problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.71k
18.9k
| golden_diff
stringlengths 145
5.13k
| verification_info
stringlengths 465
23.6k
| num_tokens_prompt
int64 556
4.1k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_29507 | rasdani/github-patches | git_diff | translate__pootle-6487 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Paths dropdown missing some parent dirs
if a dir contains only directories, not any active stores its not shown in menu (on master)
</issue>
<code>
[start of pootle/core/paths.py]
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) Pootle contributors.
4 #
5 # This file is a part of the Pootle project. It is distributed under the GPL3
6 # or later license. See the LICENSE file for a copy of the license and the
7 # AUTHORS file for copyright and authorship information.
8
9 import posixpath
10
11 from pootle.core.decorators import persistent_property
12 from pootle.core.delegate import revision
13
14
15 class Paths(object):
16
17 def __init__(self, context, q, show_all=False):
18 self.context = context
19 self.q = q
20 self.show_all = show_all
21
22 @property
23 def rev_cache_key(self):
24 return revision.get(
25 self.context.directory.__class__)(
26 self.context.directory).get(key="stats")
27
28 @property
29 def cache_key(self):
30 return (
31 "%s.%s.%s"
32 % (self.q,
33 self.rev_cache_key,
34 self.show_all))
35
36 @property
37 def store_qs(self):
38 raise NotImplementedError
39
40 @property
41 def stores(self):
42 stores = self.store_qs.exclude(obsolete=True)
43 if not self.show_all:
44 stores = stores.exclude(
45 translation_project__project__disabled=True)
46 return stores.exclude(is_template=True).filter(
47 tp_path__contains=self.q).order_by()
48
49 @persistent_property
50 def paths(self):
51 stores = set(
52 st[1:]
53 for st
54 in self.stores.values_list("tp_path", flat=True))
55 dirs = set(
56 ("%s/" % posixpath.dirname(path))
57 for path
58 in stores
59 if (path.count("/") > 1
60 and self.q in path))
61 return sorted(
62 dirs | stores,
63 key=lambda path: (posixpath.dirname(path), posixpath.basename(path)))
64
[end of pootle/core/paths.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pootle/core/paths.py b/pootle/core/paths.py
--- a/pootle/core/paths.py
+++ b/pootle/core/paths.py
@@ -6,7 +6,11 @@
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
+import pathlib
import posixpath
+from hashlib import md5
+
+from django.utils.encoding import force_bytes
from pootle.core.decorators import persistent_property
from pootle.core.delegate import revision
@@ -29,7 +33,7 @@
def cache_key(self):
return (
"%s.%s.%s"
- % (self.q,
+ % (md5(force_bytes(self.q)).hexdigest(),
self.rev_cache_key,
self.show_all))
@@ -52,12 +56,17 @@
st[1:]
for st
in self.stores.values_list("tp_path", flat=True))
- dirs = set(
- ("%s/" % posixpath.dirname(path))
- for path
- in stores
- if (path.count("/") > 1
- and self.q in path))
+ dirs = set()
+ for store in stores:
+ if posixpath.dirname(store) in dirs:
+ continue
+ dirs = (
+ dirs
+ | (set(
+ "%s/" % str(p)
+ for p
+ in pathlib.PosixPath(store).parents
+ if str(p) != ".")))
return sorted(
dirs | stores,
key=lambda path: (posixpath.dirname(path), posixpath.basename(path)))
| {"golden_diff": "diff --git a/pootle/core/paths.py b/pootle/core/paths.py\n--- a/pootle/core/paths.py\n+++ b/pootle/core/paths.py\n@@ -6,7 +6,11 @@\n # or later license. See the LICENSE file for a copy of the license and the\n # AUTHORS file for copyright and authorship information.\n \n+import pathlib\n import posixpath\n+from hashlib import md5\n+\n+from django.utils.encoding import force_bytes\n \n from pootle.core.decorators import persistent_property\n from pootle.core.delegate import revision\n@@ -29,7 +33,7 @@\n def cache_key(self):\n return (\n \"%s.%s.%s\"\n- % (self.q,\n+ % (md5(force_bytes(self.q)).hexdigest(),\n self.rev_cache_key,\n self.show_all))\n \n@@ -52,12 +56,17 @@\n st[1:]\n for st\n in self.stores.values_list(\"tp_path\", flat=True))\n- dirs = set(\n- (\"%s/\" % posixpath.dirname(path))\n- for path\n- in stores\n- if (path.count(\"/\") > 1\n- and self.q in path))\n+ dirs = set()\n+ for store in stores:\n+ if posixpath.dirname(store) in dirs:\n+ continue\n+ dirs = (\n+ dirs\n+ | (set(\n+ \"%s/\" % str(p)\n+ for p\n+ in pathlib.PosixPath(store).parents\n+ if str(p) != \".\")))\n return sorted(\n dirs | stores,\n key=lambda path: (posixpath.dirname(path), posixpath.basename(path)))\n", "issue": "Paths dropdown missing some parent dirs\nif a dir contains only directories, not any active stores its not shown in menu (on master)\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport posixpath\n\nfrom pootle.core.decorators import persistent_property\nfrom pootle.core.delegate import revision\n\n\nclass Paths(object):\n\n def __init__(self, context, q, show_all=False):\n self.context = context\n self.q = q\n self.show_all = show_all\n\n @property\n def rev_cache_key(self):\n return revision.get(\n self.context.directory.__class__)(\n self.context.directory).get(key=\"stats\")\n\n @property\n def cache_key(self):\n return (\n \"%s.%s.%s\"\n % (self.q,\n self.rev_cache_key,\n self.show_all))\n\n @property\n def store_qs(self):\n raise NotImplementedError\n\n @property\n def stores(self):\n stores = self.store_qs.exclude(obsolete=True)\n if not self.show_all:\n stores = stores.exclude(\n translation_project__project__disabled=True)\n return stores.exclude(is_template=True).filter(\n tp_path__contains=self.q).order_by()\n\n @persistent_property\n def paths(self):\n stores = set(\n st[1:]\n for st\n in self.stores.values_list(\"tp_path\", flat=True))\n dirs = set(\n (\"%s/\" % posixpath.dirname(path))\n for path\n in stores\n if (path.count(\"/\") > 1\n and self.q in path))\n return sorted(\n dirs | stores,\n key=lambda path: (posixpath.dirname(path), posixpath.basename(path)))\n", "path": "pootle/core/paths.py"}]} | 1,078 | 370 |
gh_patches_debug_843 | rasdani/github-patches | git_diff | obspy__obspy-2148 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
FDSN routing client has a locale dependency
There's a dummy call to `time.strptime` in the module init that uses locale-specific formatting, which fails under locales that don't use the same names (ie. "Nov" for the 11th month of the year).
```
>>> import locale
>>> locale.setlocale(locale.LC_TIME, ('zh_CN', 'UTF-8'))
'zh_CN.UTF-8'
>>> from obspy.clients.fdsn.routing.routing_client import RoutingClient
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/workspace/anaconda/envs/django/lib/python2.7/site-packages/obspy/clients/fdsn/__init__.py", line 242, in <module>
from .routing.routing_client import RoutingClient # NOQA
File "/workspace/anaconda/envs/django/lib/python2.7/site-packages/obspy/clients/fdsn/routing/__init__.py", line 25, in <module>
time.strptime("30 Nov 00", "%d %b %y")
File "/workspace/anaconda/envs/django/lib/python2.7/_strptime.py", line 478, in _strptime_time
return _strptime(data_string, format)[0]
File "/workspace/anaconda/envs/django/lib/python2.7/_strptime.py", line 332, in _strptime
(data_string, format))
ValueError: time data u'30 Nov 00' does not match format u'%d %b %y'
```
I believe switching this to an ISO8601-like string would be locale-agnostic:
time.strptime("2000/11/30", "%Y/%m/%d")
</issue>
<code>
[start of obspy/clients/fdsn/routing/__init__.py]
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 """
4 obspy.clients.fdsn.routing - Routing services for FDSN web services
5 ===================================================================
6
7 :copyright:
8 The ObsPy Development Team ([email protected])
9 Celso G Reyes, 2017
10 IRIS-DMC
11 :license:
12 GNU Lesser General Public License, Version 3
13 (https://www.gnu.org/copyleft/lesser.html)
14 """
15 from __future__ import (absolute_import, division, print_function,
16 unicode_literals)
17 from future.builtins import * # NOQA
18
19
20 # Extremely ugly way to avoid a race condition the first time strptime is
21 # imported which is not thread safe...
22 #
23 # See https://bugs.python.org/issue7980
24 import time
25 time.strptime("30 Nov 00", "%d %b %y")
26
27
28 if __name__ == '__main__': # pragma: no cover
29 import doctest
30 doctest.testmod(exclude_empty=True)
31
[end of obspy/clients/fdsn/routing/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/obspy/clients/fdsn/routing/__init__.py b/obspy/clients/fdsn/routing/__init__.py
--- a/obspy/clients/fdsn/routing/__init__.py
+++ b/obspy/clients/fdsn/routing/__init__.py
@@ -22,7 +22,7 @@
#
# See https://bugs.python.org/issue7980
import time
-time.strptime("30 Nov 00", "%d %b %y")
+time.strptime("2000/11/30", "%Y/%m/%d")
if __name__ == '__main__': # pragma: no cover
| {"golden_diff": "diff --git a/obspy/clients/fdsn/routing/__init__.py b/obspy/clients/fdsn/routing/__init__.py\n--- a/obspy/clients/fdsn/routing/__init__.py\n+++ b/obspy/clients/fdsn/routing/__init__.py\n@@ -22,7 +22,7 @@\n #\n # See https://bugs.python.org/issue7980\n import time\n-time.strptime(\"30 Nov 00\", \"%d %b %y\")\n+time.strptime(\"2000/11/30\", \"%Y/%m/%d\")\n \n \n if __name__ == '__main__': # pragma: no cover\n", "issue": "FDSN routing client has a locale dependency\nThere's a dummy call to `time.strptime` in the module init that uses locale-specific formatting, which fails under locales that don't use the same names (ie. \"Nov\" for the 11th month of the year).\r\n\r\n```\r\n>>> import locale\r\n>>> locale.setlocale(locale.LC_TIME, ('zh_CN', 'UTF-8'))\r\n'zh_CN.UTF-8'\r\n>>> from obspy.clients.fdsn.routing.routing_client import RoutingClient\r\nTraceback (most recent call last):\r\n File \"<stdin>\", line 1, in <module>\r\n File \"/workspace/anaconda/envs/django/lib/python2.7/site-packages/obspy/clients/fdsn/__init__.py\", line 242, in <module>\r\n from .routing.routing_client import RoutingClient # NOQA\r\n File \"/workspace/anaconda/envs/django/lib/python2.7/site-packages/obspy/clients/fdsn/routing/__init__.py\", line 25, in <module>\r\n time.strptime(\"30 Nov 00\", \"%d %b %y\")\r\n File \"/workspace/anaconda/envs/django/lib/python2.7/_strptime.py\", line 478, in _strptime_time\r\n return _strptime(data_string, format)[0]\r\n File \"/workspace/anaconda/envs/django/lib/python2.7/_strptime.py\", line 332, in _strptime\r\n (data_string, format))\r\nValueError: time data u'30 Nov 00' does not match format u'%d %b %y'\r\n```\r\n\r\nI believe switching this to an ISO8601-like string would be locale-agnostic:\r\n\r\n time.strptime(\"2000/11/30\", \"%Y/%m/%d\")\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"\nobspy.clients.fdsn.routing - Routing services for FDSN web services\n===================================================================\n\n:copyright:\n The ObsPy Development Team ([email protected])\n Celso G Reyes, 2017\n IRIS-DMC\n:license:\n GNU Lesser General Public License, Version 3\n (https://www.gnu.org/copyleft/lesser.html)\n\"\"\"\nfrom __future__ import (absolute_import, division, print_function,\n unicode_literals)\nfrom future.builtins import * # NOQA\n\n\n# Extremely ugly way to avoid a race condition the first time strptime is\n# imported which is not thread safe...\n#\n# See https://bugs.python.org/issue7980\nimport time\ntime.strptime(\"30 Nov 00\", \"%d %b %y\")\n\n\nif __name__ == '__main__': # pragma: no cover\n import doctest\n doctest.testmod(exclude_empty=True)\n", "path": "obspy/clients/fdsn/routing/__init__.py"}]} | 1,218 | 150 |
gh_patches_debug_23644 | rasdani/github-patches | git_diff | goauthentik__authentik-8926 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Unable to use Date prompt type in enrollment flow.
**Describe the bug**
I wanted to add a Date prompt in the enrollment flow, asking the user to provide their birthdate (`attributes.birthday`), just for verification purpose. But, after I added it to the flow and bind the prompt to a stage, when user filled their birthdate info in and click "Continue", authentik returned with the error "Request has been denied. Failed to update user. Please try again later.".
**To Reproduce**
Steps to reproduce the behavior:
1. Create a new prompt with Date format.
2. Bind that prompt to a prompt stage in an enrollment flow.
3. Go to the enrollment flow and fill all the information.
4. See error.
**Expected behavior**
The user will be written to authentik, and the verification email should be sent to the user.
**Screenshots**




**Logs**
From Server:
```
{"auth_via": "unauthenticated", "domain_url": "domain", "event": "/api/v3/flows/executor/nzid-register/?query=next%3D%252F", "host": "domain", "level": "info", "logger": "authentik.asgi", "method": "GET", "pid": 44, "remote": "x.x.x.x", "request_id": "94ca682b026941abaa8b853b09380512", "runtime": 610, "schema_name": "public", "scheme": "https", "status": 200, "timestamp": "2024-02-27T18:41:10.517363", "user": "", "user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36"}
```
I can't find the logs from worker (LOG_LEVEL already TRACE). Will send the logs asap.
**Version and Deployment (please complete the following information):**
- authentik version: [2024.2.1](https://goauthentik.io/docs/releases/2024.2)
- Deployment: docker-compose
**Additional context**
I tested the flow using Text type. It worked, but I still want client to render a date picker to user for easier input.
</issue>
<code>
[start of authentik/stages/user_write/stage.py]
1 """Write stage logic"""
2
3 from typing import Any
4
5 from django.contrib.auth import update_session_auth_hash
6 from django.db import transaction
7 from django.db.utils import IntegrityError, InternalError
8 from django.http import HttpRequest, HttpResponse
9 from django.utils.translation import gettext as _
10 from rest_framework.exceptions import ValidationError
11
12 from authentik.core.middleware import SESSION_KEY_IMPERSONATE_USER
13 from authentik.core.models import USER_ATTRIBUTE_SOURCES, User, UserSourceConnection, UserTypes
14 from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION
15 from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
16 from authentik.flows.stage import StageView
17 from authentik.flows.views.executor import FlowExecutorView
18 from authentik.lib.config import set_path_in_dict
19 from authentik.stages.password import BACKEND_INBUILT
20 from authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND
21 from authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT
22 from authentik.stages.user_write.models import UserCreationMode
23 from authentik.stages.user_write.signals import user_write
24
25 PLAN_CONTEXT_GROUPS = "groups"
26 PLAN_CONTEXT_USER_TYPE = "user_type"
27 PLAN_CONTEXT_USER_PATH = "user_path"
28
29
30 class UserWriteStageView(StageView):
31 """Finalise Enrollment flow by creating a user object."""
32
33 def __init__(self, executor: FlowExecutorView, **kwargs):
34 super().__init__(executor, **kwargs)
35 self.disallowed_user_attributes = [
36 "groups",
37 ]
38
39 @staticmethod
40 def write_attribute(user: User, key: str, value: Any):
41 """Allow use of attributes.foo.bar when writing to a user, with full
42 recursion"""
43 parts = key.replace("_", ".").split(".")
44 if len(parts) < 1: # pragma: no cover
45 return
46 # Function will always be called with a key like attributes.
47 # this is just a sanity check to ensure that is removed
48 if parts[0] == "attributes":
49 parts = parts[1:]
50 set_path_in_dict(user.attributes, ".".join(parts), value)
51
52 def ensure_user(self) -> tuple[User | None, bool]:
53 """Ensure a user exists"""
54 user_created = False
55 path = self.executor.plan.context.get(
56 PLAN_CONTEXT_USER_PATH, self.executor.current_stage.user_path_template
57 )
58 if path == "":
59 path = User.default_path()
60
61 try:
62 user_type = UserTypes(
63 self.executor.plan.context.get(
64 PLAN_CONTEXT_USER_TYPE,
65 self.executor.current_stage.user_type,
66 )
67 )
68 except ValueError:
69 user_type = self.executor.current_stage.user_type
70 if user_type == UserTypes.INTERNAL_SERVICE_ACCOUNT:
71 user_type = UserTypes.SERVICE_ACCOUNT
72
73 if not self.request.user.is_anonymous:
74 self.executor.plan.context.setdefault(PLAN_CONTEXT_PENDING_USER, self.request.user)
75 if (
76 PLAN_CONTEXT_PENDING_USER not in self.executor.plan.context
77 or self.executor.current_stage.user_creation_mode == UserCreationMode.ALWAYS_CREATE
78 ):
79 if self.executor.current_stage.user_creation_mode == UserCreationMode.NEVER_CREATE:
80 return None, False
81 self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = User(
82 is_active=not self.executor.current_stage.create_users_as_inactive,
83 path=path,
84 type=user_type,
85 )
86 self.executor.plan.context[PLAN_CONTEXT_AUTHENTICATION_BACKEND] = BACKEND_INBUILT
87 self.logger.debug(
88 "Created new user",
89 flow_slug=self.executor.flow.slug,
90 )
91 user_created = True
92 user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]
93 return user, user_created
94
95 def update_user(self, user: User):
96 """Update `user` with data from plan context
97
98 Only simple attributes are updated, nothing which requires a foreign key or m2m"""
99 data: dict = self.executor.plan.context[PLAN_CONTEXT_PROMPT]
100 # This is always sent back but not written to the user
101 data.pop("component", None)
102 for key, value in data.items():
103 setter_name = f"set_{key}"
104 # Check if user has a setter for this key, like set_password
105 if hasattr(user, setter_name):
106 setter = getattr(user, setter_name)
107 if callable(setter):
108 setter(value)
109 elif key in self.disallowed_user_attributes:
110 self.logger.info("discarding key", key=key)
111 continue
112 # For exact attributes match, update the dictionary in place
113 elif key == "attributes":
114 user.attributes.update(value)
115 # If using dot notation, use the correct helper to update the nested value
116 elif key.startswith("attributes.") or key.startswith("attributes_"):
117 UserWriteStageView.write_attribute(user, key, value)
118 # User has this key already
119 elif hasattr(user, key):
120 setattr(user, key, value)
121 # If none of the cases above matched, we have an attribute that the user doesn't have,
122 # has no setter for, is not a nested attributes value and as such is invalid
123 else:
124 self.logger.info("discarding key", key=key)
125 continue
126 # Check if we're writing from a source, and save the source to the attributes
127 if PLAN_CONTEXT_SOURCES_CONNECTION in self.executor.plan.context:
128 if USER_ATTRIBUTE_SOURCES not in user.attributes or not isinstance(
129 user.attributes.get(USER_ATTRIBUTE_SOURCES), list
130 ):
131 user.attributes[USER_ATTRIBUTE_SOURCES] = []
132 connection: UserSourceConnection = self.executor.plan.context[
133 PLAN_CONTEXT_SOURCES_CONNECTION
134 ]
135 if connection.source.name not in user.attributes[USER_ATTRIBUTE_SOURCES]:
136 user.attributes[USER_ATTRIBUTE_SOURCES].append(connection.source.name)
137
138 def dispatch(self, request: HttpRequest) -> HttpResponse:
139 """Save data in the current flow to the currently pending user. If no user is pending,
140 a new user is created."""
141 if PLAN_CONTEXT_PROMPT not in self.executor.plan.context:
142 message = _("No Pending data.")
143 self.logger.debug(message)
144 return self.executor.stage_invalid(message)
145 data = self.executor.plan.context[PLAN_CONTEXT_PROMPT]
146 user, user_created = self.ensure_user()
147 if not user:
148 message = _("No user found and can't create new user.")
149 self.logger.info(message)
150 return self.executor.stage_invalid(message)
151 # Before we change anything, check if the user is the same as in the request
152 # and we're updating a password. In that case we need to update the session hash
153 # Also check that we're not currently impersonating, so we don't update the session
154 should_update_session = False
155 if (
156 any("password" in x for x in data.keys())
157 and self.request.user.pk == user.pk
158 and SESSION_KEY_IMPERSONATE_USER not in self.request.session
159 ):
160 should_update_session = True
161 try:
162 self.update_user(user)
163 except ValidationError as exc:
164 self.logger.warning("failed to update user", exc=exc)
165 return self.executor.stage_invalid(_("Failed to update user. Please try again later."))
166 # Extra check to prevent flows from saving a user with a blank username
167 if user.username == "":
168 self.logger.warning("Aborting write to empty username", user=user)
169 return self.executor.stage_invalid()
170 try:
171 with transaction.atomic():
172 user.save()
173 if self.executor.current_stage.create_users_group:
174 user.ak_groups.add(self.executor.current_stage.create_users_group)
175 if PLAN_CONTEXT_GROUPS in self.executor.plan.context:
176 user.ak_groups.add(*self.executor.plan.context[PLAN_CONTEXT_GROUPS])
177 except (IntegrityError, ValueError, TypeError, InternalError) as exc:
178 self.logger.warning("Failed to save user", exc=exc)
179 return self.executor.stage_invalid(_("Failed to update user. Please try again later."))
180 user_write.send(sender=self, request=request, user=user, data=data, created=user_created)
181 # Check if the password has been updated, and update the session auth hash
182 if should_update_session:
183 update_session_auth_hash(self.request, user)
184 self.logger.debug("Updated session hash", user=user)
185 self.logger.debug(
186 "Updated existing user",
187 user=user,
188 flow_slug=self.executor.flow.slug,
189 )
190 return self.executor.stage_ok()
191
[end of authentik/stages/user_write/stage.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/authentik/stages/user_write/stage.py b/authentik/stages/user_write/stage.py
--- a/authentik/stages/user_write/stage.py
+++ b/authentik/stages/user_write/stage.py
@@ -12,6 +12,7 @@
from authentik.core.middleware import SESSION_KEY_IMPERSONATE_USER
from authentik.core.models import USER_ATTRIBUTE_SOURCES, User, UserSourceConnection, UserTypes
from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION
+from authentik.events.utils import sanitize_item
from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER
from authentik.flows.stage import StageView
from authentik.flows.views.executor import FlowExecutorView
@@ -47,7 +48,7 @@
# this is just a sanity check to ensure that is removed
if parts[0] == "attributes":
parts = parts[1:]
- set_path_in_dict(user.attributes, ".".join(parts), value)
+ set_path_in_dict(user.attributes, ".".join(parts), sanitize_item(value))
def ensure_user(self) -> tuple[User | None, bool]:
"""Ensure a user exists"""
| {"golden_diff": "diff --git a/authentik/stages/user_write/stage.py b/authentik/stages/user_write/stage.py\n--- a/authentik/stages/user_write/stage.py\n+++ b/authentik/stages/user_write/stage.py\n@@ -12,6 +12,7 @@\n from authentik.core.middleware import SESSION_KEY_IMPERSONATE_USER\n from authentik.core.models import USER_ATTRIBUTE_SOURCES, User, UserSourceConnection, UserTypes\n from authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION\n+from authentik.events.utils import sanitize_item\n from authentik.flows.planner import PLAN_CONTEXT_PENDING_USER\n from authentik.flows.stage import StageView\n from authentik.flows.views.executor import FlowExecutorView\n@@ -47,7 +48,7 @@\n # this is just a sanity check to ensure that is removed\n if parts[0] == \"attributes\":\n parts = parts[1:]\n- set_path_in_dict(user.attributes, \".\".join(parts), value)\n+ set_path_in_dict(user.attributes, \".\".join(parts), sanitize_item(value))\n \n def ensure_user(self) -> tuple[User | None, bool]:\n \"\"\"Ensure a user exists\"\"\"\n", "issue": "Unable to use Date prompt type in enrollment flow.\n**Describe the bug**\r\nI wanted to add a Date prompt in the enrollment flow, asking the user to provide their birthdate (`attributes.birthday`), just for verification purpose. But, after I added it to the flow and bind the prompt to a stage, when user filled their birthdate info in and click \"Continue\", authentik returned with the error \"Request has been denied. Failed to update user. Please try again later.\".\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n\r\n1. Create a new prompt with Date format.\r\n2. Bind that prompt to a prompt stage in an enrollment flow.\r\n3. Go to the enrollment flow and fill all the information.\r\n4. See error.\r\n\r\n**Expected behavior**\r\nThe user will be written to authentik, and the verification email should be sent to the user.\r\n\r\n**Screenshots**\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n**Logs**\r\nFrom Server:\r\n```\r\n{\"auth_via\": \"unauthenticated\", \"domain_url\": \"domain\", \"event\": \"/api/v3/flows/executor/nzid-register/?query=next%3D%252F\", \"host\": \"domain\", \"level\": \"info\", \"logger\": \"authentik.asgi\", \"method\": \"GET\", \"pid\": 44, \"remote\": \"x.x.x.x\", \"request_id\": \"94ca682b026941abaa8b853b09380512\", \"runtime\": 610, \"schema_name\": \"public\", \"scheme\": \"https\", \"status\": 200, \"timestamp\": \"2024-02-27T18:41:10.517363\", \"user\": \"\", \"user_agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36\"}\r\n```\r\nI can't find the logs from worker (LOG_LEVEL already TRACE). Will send the logs asap.\r\n\r\n**Version and Deployment (please complete the following information):**\r\n\r\n- authentik version: [2024.2.1](https://goauthentik.io/docs/releases/2024.2)\r\n- Deployment: docker-compose\r\n\r\n**Additional context**\r\nI tested the flow using Text type. It worked, but I still want client to render a date picker to user for easier input.\r\n\n", "before_files": [{"content": "\"\"\"Write stage logic\"\"\"\n\nfrom typing import Any\n\nfrom django.contrib.auth import update_session_auth_hash\nfrom django.db import transaction\nfrom django.db.utils import IntegrityError, InternalError\nfrom django.http import HttpRequest, HttpResponse\nfrom django.utils.translation import gettext as _\nfrom rest_framework.exceptions import ValidationError\n\nfrom authentik.core.middleware import SESSION_KEY_IMPERSONATE_USER\nfrom authentik.core.models import USER_ATTRIBUTE_SOURCES, User, UserSourceConnection, UserTypes\nfrom authentik.core.sources.stage import PLAN_CONTEXT_SOURCES_CONNECTION\nfrom authentik.flows.planner import PLAN_CONTEXT_PENDING_USER\nfrom authentik.flows.stage import StageView\nfrom authentik.flows.views.executor import FlowExecutorView\nfrom authentik.lib.config import set_path_in_dict\nfrom authentik.stages.password import BACKEND_INBUILT\nfrom authentik.stages.password.stage import PLAN_CONTEXT_AUTHENTICATION_BACKEND\nfrom authentik.stages.prompt.stage import PLAN_CONTEXT_PROMPT\nfrom authentik.stages.user_write.models import UserCreationMode\nfrom authentik.stages.user_write.signals import user_write\n\nPLAN_CONTEXT_GROUPS = \"groups\"\nPLAN_CONTEXT_USER_TYPE = \"user_type\"\nPLAN_CONTEXT_USER_PATH = \"user_path\"\n\n\nclass UserWriteStageView(StageView):\n \"\"\"Finalise Enrollment flow by creating a user object.\"\"\"\n\n def __init__(self, executor: FlowExecutorView, **kwargs):\n super().__init__(executor, **kwargs)\n self.disallowed_user_attributes = [\n \"groups\",\n ]\n\n @staticmethod\n def write_attribute(user: User, key: str, value: Any):\n \"\"\"Allow use of attributes.foo.bar when writing to a user, with full\n recursion\"\"\"\n parts = key.replace(\"_\", \".\").split(\".\")\n if len(parts) < 1: # pragma: no cover\n return\n # Function will always be called with a key like attributes.\n # this is just a sanity check to ensure that is removed\n if parts[0] == \"attributes\":\n parts = parts[1:]\n set_path_in_dict(user.attributes, \".\".join(parts), value)\n\n def ensure_user(self) -> tuple[User | None, bool]:\n \"\"\"Ensure a user exists\"\"\"\n user_created = False\n path = self.executor.plan.context.get(\n PLAN_CONTEXT_USER_PATH, self.executor.current_stage.user_path_template\n )\n if path == \"\":\n path = User.default_path()\n\n try:\n user_type = UserTypes(\n self.executor.plan.context.get(\n PLAN_CONTEXT_USER_TYPE,\n self.executor.current_stage.user_type,\n )\n )\n except ValueError:\n user_type = self.executor.current_stage.user_type\n if user_type == UserTypes.INTERNAL_SERVICE_ACCOUNT:\n user_type = UserTypes.SERVICE_ACCOUNT\n\n if not self.request.user.is_anonymous:\n self.executor.plan.context.setdefault(PLAN_CONTEXT_PENDING_USER, self.request.user)\n if (\n PLAN_CONTEXT_PENDING_USER not in self.executor.plan.context\n or self.executor.current_stage.user_creation_mode == UserCreationMode.ALWAYS_CREATE\n ):\n if self.executor.current_stage.user_creation_mode == UserCreationMode.NEVER_CREATE:\n return None, False\n self.executor.plan.context[PLAN_CONTEXT_PENDING_USER] = User(\n is_active=not self.executor.current_stage.create_users_as_inactive,\n path=path,\n type=user_type,\n )\n self.executor.plan.context[PLAN_CONTEXT_AUTHENTICATION_BACKEND] = BACKEND_INBUILT\n self.logger.debug(\n \"Created new user\",\n flow_slug=self.executor.flow.slug,\n )\n user_created = True\n user: User = self.executor.plan.context[PLAN_CONTEXT_PENDING_USER]\n return user, user_created\n\n def update_user(self, user: User):\n \"\"\"Update `user` with data from plan context\n\n Only simple attributes are updated, nothing which requires a foreign key or m2m\"\"\"\n data: dict = self.executor.plan.context[PLAN_CONTEXT_PROMPT]\n # This is always sent back but not written to the user\n data.pop(\"component\", None)\n for key, value in data.items():\n setter_name = f\"set_{key}\"\n # Check if user has a setter for this key, like set_password\n if hasattr(user, setter_name):\n setter = getattr(user, setter_name)\n if callable(setter):\n setter(value)\n elif key in self.disallowed_user_attributes:\n self.logger.info(\"discarding key\", key=key)\n continue\n # For exact attributes match, update the dictionary in place\n elif key == \"attributes\":\n user.attributes.update(value)\n # If using dot notation, use the correct helper to update the nested value\n elif key.startswith(\"attributes.\") or key.startswith(\"attributes_\"):\n UserWriteStageView.write_attribute(user, key, value)\n # User has this key already\n elif hasattr(user, key):\n setattr(user, key, value)\n # If none of the cases above matched, we have an attribute that the user doesn't have,\n # has no setter for, is not a nested attributes value and as such is invalid\n else:\n self.logger.info(\"discarding key\", key=key)\n continue\n # Check if we're writing from a source, and save the source to the attributes\n if PLAN_CONTEXT_SOURCES_CONNECTION in self.executor.plan.context:\n if USER_ATTRIBUTE_SOURCES not in user.attributes or not isinstance(\n user.attributes.get(USER_ATTRIBUTE_SOURCES), list\n ):\n user.attributes[USER_ATTRIBUTE_SOURCES] = []\n connection: UserSourceConnection = self.executor.plan.context[\n PLAN_CONTEXT_SOURCES_CONNECTION\n ]\n if connection.source.name not in user.attributes[USER_ATTRIBUTE_SOURCES]:\n user.attributes[USER_ATTRIBUTE_SOURCES].append(connection.source.name)\n\n def dispatch(self, request: HttpRequest) -> HttpResponse:\n \"\"\"Save data in the current flow to the currently pending user. If no user is pending,\n a new user is created.\"\"\"\n if PLAN_CONTEXT_PROMPT not in self.executor.plan.context:\n message = _(\"No Pending data.\")\n self.logger.debug(message)\n return self.executor.stage_invalid(message)\n data = self.executor.plan.context[PLAN_CONTEXT_PROMPT]\n user, user_created = self.ensure_user()\n if not user:\n message = _(\"No user found and can't create new user.\")\n self.logger.info(message)\n return self.executor.stage_invalid(message)\n # Before we change anything, check if the user is the same as in the request\n # and we're updating a password. In that case we need to update the session hash\n # Also check that we're not currently impersonating, so we don't update the session\n should_update_session = False\n if (\n any(\"password\" in x for x in data.keys())\n and self.request.user.pk == user.pk\n and SESSION_KEY_IMPERSONATE_USER not in self.request.session\n ):\n should_update_session = True\n try:\n self.update_user(user)\n except ValidationError as exc:\n self.logger.warning(\"failed to update user\", exc=exc)\n return self.executor.stage_invalid(_(\"Failed to update user. Please try again later.\"))\n # Extra check to prevent flows from saving a user with a blank username\n if user.username == \"\":\n self.logger.warning(\"Aborting write to empty username\", user=user)\n return self.executor.stage_invalid()\n try:\n with transaction.atomic():\n user.save()\n if self.executor.current_stage.create_users_group:\n user.ak_groups.add(self.executor.current_stage.create_users_group)\n if PLAN_CONTEXT_GROUPS in self.executor.plan.context:\n user.ak_groups.add(*self.executor.plan.context[PLAN_CONTEXT_GROUPS])\n except (IntegrityError, ValueError, TypeError, InternalError) as exc:\n self.logger.warning(\"Failed to save user\", exc=exc)\n return self.executor.stage_invalid(_(\"Failed to update user. Please try again later.\"))\n user_write.send(sender=self, request=request, user=user, data=data, created=user_created)\n # Check if the password has been updated, and update the session auth hash\n if should_update_session:\n update_session_auth_hash(self.request, user)\n self.logger.debug(\"Updated session hash\", user=user)\n self.logger.debug(\n \"Updated existing user\",\n user=user,\n flow_slug=self.executor.flow.slug,\n )\n return self.executor.stage_ok()\n", "path": "authentik/stages/user_write/stage.py"}]} | 3,554 | 262 |
gh_patches_debug_8074 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-2456 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Spider academy is broken
During the global build at 2021-05-21-20-28-08, spider **academy** failed with **0 features** and **0 errors**.
Here's [the log](https://data.alltheplaces.xyz/runs/2021-05-21-20-28-08/logs/academy.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-21-20-28-08/output/academy.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-21-20-28-08/output/academy.geojson))
</issue>
<code>
[start of locations/spiders/academy.py]
1 # -*- coding: utf-8 -*-
2 import json
3 import re
4
5 import scrapy
6 from scrapy.utils.gz import gunzip
7
8 from locations.items import GeojsonPointItem
9 from locations.hours import OpeningHours
10
11
12 class AcademySpider(scrapy.Spider):
13 name = "academy"
14 item_attributes = {'brand': 'Academy Sports + Outdoors', 'brand_wikidata': 'Q4671380'}
15 allowed_domains = []
16 start_urls = [
17 'https://www.academy.com/sitemap_store_1.xml.gz',
18 ]
19
20 def parse(self, response):
21 body = gunzip(response.body)
22 body = scrapy.Selector(text=body)
23 body.remove_namespaces()
24 urls = body.xpath('//url/loc/text()').extract()
25 for path in urls:
26 store_url = re.compile(r'http://www.academy.com/shop/storelocator/.+?/.+?/store-\d+')
27 if re.search(store_url, path):
28 yield scrapy.Request(
29 path.strip(),
30 callback=self.parse_store
31 )
32
33 def parse_hours(self, hours):
34 opening_hours = OpeningHours()
35
36 for elem in hours:
37 day, open_time, close_time = re.search(r'([A-Za-z]+)\s([\d:]+)\s-\s([\d:]+)', elem).groups()
38 opening_hours.add_range(day=day[:2], open_time=open_time, close_time=close_time)
39
40 return opening_hours.as_opening_hours()
41
42 def parse_store(self, response):
43 properties = {
44 'ref': re.search(r'.+/(.+?)/?(?:\.html|$)', response.url).group(1),
45 'name': response.xpath('normalize-space(//h1[@itemprop="name"]//text())').extract_first(),
46 'addr_full': response.xpath('normalize-space(//span[@itemprop="streetAddress"]//text())').extract_first(),
47 'city': response.xpath('normalize-space(//span[@itemprop="addressLocality"]//text())').extract_first(),
48 'state': response.xpath('normalize-space(//span[@itemprop="addressRegion"]//text())').extract_first(),
49 'postcode': response.xpath('normalize-space(//span[@itemprop="postalCode"]//text())').extract_first(),
50 'phone': response.xpath('//a[@id="storePhone"]/text()').extract_first(),
51 'website': response.url,
52 'lat': float(response.xpath('//input[@id="params"]/@data-lat').extract_first()),
53 'lon': float(response.xpath('//input[@id="params"]/@data-lng').extract_first()),
54 }
55
56 properties['opening_hours'] = self.parse_hours(
57 response.xpath('//*[@itemprop="openingHours"]/@datetime').extract()
58 )
59
60 yield GeojsonPointItem(**properties)
61
[end of locations/spiders/academy.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/locations/spiders/academy.py b/locations/spiders/academy.py
--- a/locations/spiders/academy.py
+++ b/locations/spiders/academy.py
@@ -23,7 +23,7 @@
body.remove_namespaces()
urls = body.xpath('//url/loc/text()').extract()
for path in urls:
- store_url = re.compile(r'http://www.academy.com/shop/storelocator/.+?/.+?/store-\d+')
+ store_url = re.compile(r'https://www.academy.com/shop/storelocator/.+?/.+?/store-\d+')
if re.search(store_url, path):
yield scrapy.Request(
path.strip(),
| {"golden_diff": "diff --git a/locations/spiders/academy.py b/locations/spiders/academy.py\n--- a/locations/spiders/academy.py\n+++ b/locations/spiders/academy.py\n@@ -23,7 +23,7 @@\n body.remove_namespaces()\n urls = body.xpath('//url/loc/text()').extract()\n for path in urls:\n- store_url = re.compile(r'http://www.academy.com/shop/storelocator/.+?/.+?/store-\\d+')\n+ store_url = re.compile(r'https://www.academy.com/shop/storelocator/.+?/.+?/store-\\d+')\n if re.search(store_url, path):\n yield scrapy.Request(\n path.strip(),\n", "issue": "Spider academy is broken\nDuring the global build at 2021-05-21-20-28-08, spider **academy** failed with **0 features** and **0 errors**.\n\nHere's [the log](https://data.alltheplaces.xyz/runs/2021-05-21-20-28-08/logs/academy.log) and [the output](https://data.alltheplaces.xyz/runs/2021-05-21-20-28-08/output/academy.geojson) ([on a map](https://data.alltheplaces.xyz/map.html?show=https://data.alltheplaces.xyz/runs/2021-05-21-20-28-08/output/academy.geojson))\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport json\nimport re\n\nimport scrapy\nfrom scrapy.utils.gz import gunzip\n\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\n\n\nclass AcademySpider(scrapy.Spider):\n name = \"academy\"\n item_attributes = {'brand': 'Academy Sports + Outdoors', 'brand_wikidata': 'Q4671380'}\n allowed_domains = []\n start_urls = [\n 'https://www.academy.com/sitemap_store_1.xml.gz',\n ]\n\n def parse(self, response):\n body = gunzip(response.body)\n body = scrapy.Selector(text=body)\n body.remove_namespaces()\n urls = body.xpath('//url/loc/text()').extract()\n for path in urls:\n store_url = re.compile(r'http://www.academy.com/shop/storelocator/.+?/.+?/store-\\d+')\n if re.search(store_url, path):\n yield scrapy.Request(\n path.strip(),\n callback=self.parse_store\n )\n\n def parse_hours(self, hours):\n opening_hours = OpeningHours()\n\n for elem in hours:\n day, open_time, close_time = re.search(r'([A-Za-z]+)\\s([\\d:]+)\\s-\\s([\\d:]+)', elem).groups()\n opening_hours.add_range(day=day[:2], open_time=open_time, close_time=close_time)\n\n return opening_hours.as_opening_hours()\n\n def parse_store(self, response):\n properties = {\n 'ref': re.search(r'.+/(.+?)/?(?:\\.html|$)', response.url).group(1),\n 'name': response.xpath('normalize-space(//h1[@itemprop=\"name\"]//text())').extract_first(),\n 'addr_full': response.xpath('normalize-space(//span[@itemprop=\"streetAddress\"]//text())').extract_first(),\n 'city': response.xpath('normalize-space(//span[@itemprop=\"addressLocality\"]//text())').extract_first(),\n 'state': response.xpath('normalize-space(//span[@itemprop=\"addressRegion\"]//text())').extract_first(),\n 'postcode': response.xpath('normalize-space(//span[@itemprop=\"postalCode\"]//text())').extract_first(),\n 'phone': response.xpath('//a[@id=\"storePhone\"]/text()').extract_first(),\n 'website': response.url,\n 'lat': float(response.xpath('//input[@id=\"params\"]/@data-lat').extract_first()),\n 'lon': float(response.xpath('//input[@id=\"params\"]/@data-lng').extract_first()),\n }\n \n properties['opening_hours'] = self.parse_hours(\n response.xpath('//*[@itemprop=\"openingHours\"]/@datetime').extract()\n )\n \n yield GeojsonPointItem(**properties)\n", "path": "locations/spiders/academy.py"}]} | 1,436 | 160 |
gh_patches_debug_14582 | rasdani/github-patches | git_diff | liqd__a4-product-139 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Translations incomplete
- partner page
</issue>
<code>
[start of liqd_product/apps/contrib/management/commands/makemessages.py]
1 from os import path
2
3 from django.conf import settings
4 from django.core.management.commands import makemessages
5
6
7 def get_module_dir(name):
8 module = __import__(name)
9 return path.dirname(module.__file__)
10
11
12 class Command(makemessages.Command):
13 msgmerge_options = (
14 makemessages.Command.msgmerge_options + ['--no-fuzzy-matching']
15 )
16
17 def handle(self, *args, **options):
18 if options['domain'] == 'djangojs':
19 if options['extensions'] is None:
20 options['extensions'] = ['js', 'jsx']
21 return super().handle(*args, **options)
22
23 def find_files(self, root):
24 a4js_paths = super().find_files(path.join(
25 settings.BASE_DIR, 'node_modules', 'adhocracy4', 'adhocracy4'
26 ))
27 a4_paths = super().find_files(get_module_dir('adhocracy4'))
28 liqd_product_paths = super().find_files(
29 path.relpath(get_module_dir('liqd_product'))
30 )
31
32 return a4js_paths + a4_paths + liqd_product_paths
33
[end of liqd_product/apps/contrib/management/commands/makemessages.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/liqd_product/apps/contrib/management/commands/makemessages.py b/liqd_product/apps/contrib/management/commands/makemessages.py
--- a/liqd_product/apps/contrib/management/commands/makemessages.py
+++ b/liqd_product/apps/contrib/management/commands/makemessages.py
@@ -25,8 +25,15 @@
settings.BASE_DIR, 'node_modules', 'adhocracy4', 'adhocracy4'
))
a4_paths = super().find_files(get_module_dir('adhocracy4'))
+ mbjs_paths = super().find_files(path.join(
+ settings.BASE_DIR, 'node_modules', 'a4-meinberlin', 'meinberlin'
+ ))
+ mb_paths = super().find_files(get_module_dir('meinberlin'))
+
liqd_product_paths = super().find_files(
path.relpath(get_module_dir('liqd_product'))
)
- return a4js_paths + a4_paths + liqd_product_paths
+ return a4js_paths + a4_paths + \
+ mbjs_paths + mb_paths + \
+ liqd_product_paths
| {"golden_diff": "diff --git a/liqd_product/apps/contrib/management/commands/makemessages.py b/liqd_product/apps/contrib/management/commands/makemessages.py\n--- a/liqd_product/apps/contrib/management/commands/makemessages.py\n+++ b/liqd_product/apps/contrib/management/commands/makemessages.py\n@@ -25,8 +25,15 @@\n settings.BASE_DIR, 'node_modules', 'adhocracy4', 'adhocracy4'\n ))\n a4_paths = super().find_files(get_module_dir('adhocracy4'))\n+ mbjs_paths = super().find_files(path.join(\n+ settings.BASE_DIR, 'node_modules', 'a4-meinberlin', 'meinberlin'\n+ ))\n+ mb_paths = super().find_files(get_module_dir('meinberlin'))\n+\n liqd_product_paths = super().find_files(\n path.relpath(get_module_dir('liqd_product'))\n )\n \n- return a4js_paths + a4_paths + liqd_product_paths\n+ return a4js_paths + a4_paths + \\\n+ mbjs_paths + mb_paths + \\\n+ liqd_product_paths\n", "issue": "Translations incomplete\n- partner page\n", "before_files": [{"content": "from os import path\n\nfrom django.conf import settings\nfrom django.core.management.commands import makemessages\n\n\ndef get_module_dir(name):\n module = __import__(name)\n return path.dirname(module.__file__)\n\n\nclass Command(makemessages.Command):\n msgmerge_options = (\n makemessages.Command.msgmerge_options + ['--no-fuzzy-matching']\n )\n\n def handle(self, *args, **options):\n if options['domain'] == 'djangojs':\n if options['extensions'] is None:\n options['extensions'] = ['js', 'jsx']\n return super().handle(*args, **options)\n\n def find_files(self, root):\n a4js_paths = super().find_files(path.join(\n settings.BASE_DIR, 'node_modules', 'adhocracy4', 'adhocracy4'\n ))\n a4_paths = super().find_files(get_module_dir('adhocracy4'))\n liqd_product_paths = super().find_files(\n path.relpath(get_module_dir('liqd_product'))\n )\n\n return a4js_paths + a4_paths + liqd_product_paths\n", "path": "liqd_product/apps/contrib/management/commands/makemessages.py"}]} | 857 | 255 |
gh_patches_debug_4529 | rasdani/github-patches | git_diff | dotkom__onlineweb4-578 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Usernames are forced to lowercase on register.
Just witnessed a registration process with a username starting with a capital letter. When user tried to log in after e-mail verification, he could not log in. Changing capital to lowercase did the trick. This is however very unintuitive, so capital letters should be allowed in username, or atleast there should be a notification or warning.
</issue>
<code>
[start of apps/authentication/views.py]
1 # -*- coding: utf-8 -*-
2
3 import uuid
4 import re
5
6 from django.contrib import auth
7 from django.contrib import messages
8 from django.core.mail import send_mail
9 from django.shortcuts import render, redirect, get_object_or_404
10 from django.http import HttpResponseRedirect
11 from django.utils.translation import ugettext as _
12 from django.views.decorators.debug import sensitive_post_parameters
13
14 from django.conf import settings
15 from apps.authentication.forms import (LoginForm, RegisterForm,
16 RecoveryForm, ChangePasswordForm)
17 from apps.authentication.models import OnlineUser as User, RegisterToken, Email
18
19
20 @sensitive_post_parameters()
21 def login(request):
22 redirect_url = request.REQUEST.get('next', '')
23 if request.method == 'POST':
24 form = LoginForm(request.POST)
25 if form.login(request):
26 messages.success(request, _(u'Du er nå logget inn.'))
27 if redirect_url:
28 return HttpResponseRedirect(redirect_url)
29 return HttpResponseRedirect('/')
30 else: form = LoginForm(request.POST, auto_id=True)
31 else:
32 form = LoginForm()
33
34 response_dict = { 'form' : form, 'next' : redirect_url}
35 return render(request, 'auth/login.html', response_dict)
36
37
38 def logout(request):
39 auth.logout(request)
40 messages.success(request, _(u'Du er nå logget ut.'))
41 return HttpResponseRedirect('/')
42
43
44 @sensitive_post_parameters()
45 def register(request):
46 if request.user.is_authenticated():
47 messages.error(request, _(u'Registrering av ny konto krever at du er logget ut.'))
48 return HttpResponseRedirect('/')
49 else:
50 if request.method == 'POST':
51 form = RegisterForm(request.POST)
52 if form.is_valid():
53 cleaned = form.cleaned_data
54
55 # Create user
56 user = User(
57 username=cleaned['username'].lower(),
58 first_name=cleaned['first_name'].title(),
59 last_name=cleaned['last_name'].title(),
60 )
61 # Set remaining fields
62 user.phone_number=cleaned['phone']
63 user.address=cleaned['address'].title()
64 user.zip_code=cleaned['zip_code']
65 # Store password properly
66 user.set_password(cleaned['password'])
67 # Users need to be manually activated
68 user.is_active = False
69 user.save()
70
71 # Set email address
72 email = Email(
73 user=user,
74 email=cleaned['email'],
75 )
76 email.primary = True
77 email.save()
78
79 # Create the registration token
80 token = uuid.uuid4().hex
81 rt = RegisterToken(user=user, email=cleaned['email'], token=token)
82 rt.save()
83
84 email_message = _(u"""
85 En konto har blitt registrert på online.ntnu.no med denne epostadressen. Dersom du ikke
86 har utført denne handlingen ber vi deg se bort fra denne eposten.
87
88 For å bruke denne kontoen kreves det at du verifiserer epostadressen. Du kan gjøre
89 dette ved å besøke linken under.
90
91 http://%s/auth/verify/%s/
92
93 Denne lenken vil være gyldig i 24 timer. Dersom du behøver å få tilsendt en ny lenke
94 kan dette gjøres med funksjonen for å gjenopprette passord.
95 """) % (request.META['HTTP_HOST'], token)
96
97 send_mail(_(u'Verifiser din konto'), email_message, settings.DEFAULT_FROM_EMAIL, [email.email,])
98
99 messages.success(request, _(u'Registreringen var vellykket. Se tilsendt epost for verifiseringsinstrukser.'))
100
101 return HttpResponseRedirect('/')
102 else:
103 form = RegisterForm(request.POST, auto_id=True)
104 else:
105 form = RegisterForm()
106
107 return render(request, 'auth/register.html', {'form': form, })
108
109
110 def verify(request, token):
111 rt = get_object_or_404(RegisterToken, token=token)
112
113 if rt.is_valid:
114 email = get_object_or_404(Email, email=rt.email)
115 email.verified = True
116 email.save()
117
118 user = getattr(rt, 'user')
119
120 # If it is a stud email, set the ntnu_username for user
121 if re.match(r'[^@][email protected]', rt.email):
122 user.ntnu_username = rt.email.split("@")[0]
123
124 user_activated = False
125 if not user.is_active:
126 user.is_active = True
127 user_activated = True
128
129 user.save()
130 rt.delete()
131
132 if user_activated:
133 messages.success(request, _(u'Bruker %s ble aktivert. Du kan nå logge inn.') % user.username)
134 return redirect('auth_login')
135 else:
136 messages.success(request, _(u'Eposten %s er nå verifisert.') % email)
137 return redirect('profiles')
138 else:
139 messages.error(request, _(u'Denne lenken er utløpt. Bruk gjenopprett passord for å få tilsendt en ny lenke.'))
140 return HttpResponseRedirect('/')
141
142
143 def recover(request):
144 if request.user.is_authenticated():
145 messages.error(request, _(u'Gjenoppretning av passord krever at du er logget ut.'))
146 return HttpResponseRedirect('/')
147 else:
148 if request.method == 'POST':
149 form = RecoveryForm(request.POST)
150 if form.is_valid():
151 email_string = form.cleaned_data['email']
152 emails = Email.objects.filter(email=email_string)
153
154 if len(emails) == 0:
155 messages.error(request, _(u'Denne eposten er ikke registrert i våre systemer.'))
156 return HttpResponseRedirect('/')
157
158 email = emails[0]
159
160 # Create the registration token
161 token = uuid.uuid4().hex
162 rt = RegisterToken(user=email.user, email=email.email, token=token)
163 rt.save()
164
165 email_message = _(u"""
166 Vi har mottat forespørsel om å gjenopprette passordet for kontoen bundet til %s.
167 Dersom du ikke har bedt om denne handlingen ber vi deg se bort fra denne eposten.
168
169 Brukernavn: %s
170
171 Hvis du ønsker å gjennomføre en gjenoppretning av passord, bruk lenken under.
172
173 http://%s/auth/set_password/%s/
174
175 Denne lenken vil være gyldig i 24 timer. Dersom du behøver å få tilsendt en ny lenke
176 kan dette gjøres med funksjonen for å gjenopprette passord.
177 """) % (email.email, email.user.username, request.META['HTTP_HOST'], token)
178
179 send_mail(_(u'Gjenoppretning av passord'), email_message, settings.DEFAULT_FROM_EMAIL, [email.email,])
180
181 messages.success(request, _(u'En lenke for gjenoppretning har blitt sendt til %s.') % email.email)
182
183 return HttpResponseRedirect('/')
184 else:
185 form = RecoveryForm(request.POST, auto_id=True)
186 else:
187 form = RecoveryForm()
188
189 return render(request, 'auth/recover.html', {'form': form})
190
191
192 @sensitive_post_parameters()
193 def set_password(request, token=None):
194 if request.user.is_authenticated():
195 return HttpResponseRedirect('/')
196 else:
197 rt = get_object_or_404(RegisterToken, token=token)
198
199 if rt.is_valid:
200 if request.method == 'POST':
201 form = ChangePasswordForm(request.POST, auto_id=True)
202 if form.is_valid():
203 user = getattr(rt, 'user')
204
205 user.is_active = True
206 user.set_password(form.cleaned_data['new_password'])
207 user.save()
208
209 rt.delete()
210
211 messages.success(request, _(u'Bruker %s har gjennomført vellykket gjenoppretning av passord. Du kan nå logge inn.') % user.username)
212
213 return HttpResponseRedirect('/')
214 else:
215
216 form = ChangePasswordForm()
217
218 messages.success(request, _(u'Lenken er akseptert. Vennligst skriv inn ønsket passord.'))
219
220 return render(request, 'auth/set_password.html', {'form': form, 'token': token})
221
222 else:
223 messages.error(request, _(u'Lenken er utløpt. Vennligst bruk gjenoppretning av passord for å få tilsendt en ny lenke.'))
224 return HttpResponseRedirect('/')
225
[end of apps/authentication/views.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/apps/authentication/views.py b/apps/authentication/views.py
--- a/apps/authentication/views.py
+++ b/apps/authentication/views.py
@@ -54,7 +54,7 @@
# Create user
user = User(
- username=cleaned['username'].lower(),
+ username=cleaned['username'],
first_name=cleaned['first_name'].title(),
last_name=cleaned['last_name'].title(),
)
| {"golden_diff": "diff --git a/apps/authentication/views.py b/apps/authentication/views.py\n--- a/apps/authentication/views.py\n+++ b/apps/authentication/views.py\n@@ -54,7 +54,7 @@\n \n # Create user\n user = User(\n- username=cleaned['username'].lower(), \n+ username=cleaned['username'], \n first_name=cleaned['first_name'].title(), \n last_name=cleaned['last_name'].title(),\n )\n", "issue": "Usernames are forced to lowercase on register.\nJust witnessed a registration process with a username starting with a capital letter. When user tried to log in after e-mail verification, he could not log in. Changing capital to lowercase did the trick. This is however very unintuitive, so capital letters should be allowed in username, or atleast there should be a notification or warning.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nimport uuid\nimport re\n\nfrom django.contrib import auth\nfrom django.contrib import messages\nfrom django.core.mail import send_mail\nfrom django.shortcuts import render, redirect, get_object_or_404\nfrom django.http import HttpResponseRedirect\nfrom django.utils.translation import ugettext as _\nfrom django.views.decorators.debug import sensitive_post_parameters\n\nfrom django.conf import settings\nfrom apps.authentication.forms import (LoginForm, RegisterForm, \n RecoveryForm, ChangePasswordForm)\nfrom apps.authentication.models import OnlineUser as User, RegisterToken, Email\n\n\n@sensitive_post_parameters()\ndef login(request):\n redirect_url = request.REQUEST.get('next', '')\n if request.method == 'POST':\n form = LoginForm(request.POST)\n if form.login(request):\n messages.success(request, _(u'Du er n\u00e5 logget inn.'))\n if redirect_url:\n return HttpResponseRedirect(redirect_url)\n return HttpResponseRedirect('/')\n else: form = LoginForm(request.POST, auto_id=True)\n else:\n form = LoginForm()\n\n response_dict = { 'form' : form, 'next' : redirect_url}\n return render(request, 'auth/login.html', response_dict)\n\n\ndef logout(request):\n auth.logout(request)\n messages.success(request, _(u'Du er n\u00e5 logget ut.'))\n return HttpResponseRedirect('/')\n\n\n@sensitive_post_parameters()\ndef register(request):\n if request.user.is_authenticated():\n messages.error(request, _(u'Registrering av ny konto krever at du er logget ut.'))\n return HttpResponseRedirect('/')\n else:\n if request.method == 'POST':\n form = RegisterForm(request.POST)\n if form.is_valid():\n cleaned = form.cleaned_data\n\n # Create user\n user = User(\n username=cleaned['username'].lower(), \n first_name=cleaned['first_name'].title(), \n last_name=cleaned['last_name'].title(),\n )\n # Set remaining fields\n user.phone_number=cleaned['phone']\n user.address=cleaned['address'].title()\n user.zip_code=cleaned['zip_code']\n # Store password properly\n user.set_password(cleaned['password'])\n # Users need to be manually activated\n user.is_active = False\n user.save()\n\n # Set email address\n email = Email(\n user=user,\n email=cleaned['email'],\n )\n email.primary = True\n email.save() \n\n # Create the registration token\n token = uuid.uuid4().hex\n rt = RegisterToken(user=user, email=cleaned['email'], token=token)\n rt.save()\n\n email_message = _(u\"\"\"\nEn konto har blitt registrert p\u00e5 online.ntnu.no med denne epostadressen. Dersom du ikke\nhar utf\u00f8rt denne handlingen ber vi deg se bort fra denne eposten.\n\nFor \u00e5 bruke denne kontoen kreves det at du verifiserer epostadressen. Du kan gj\u00f8re\ndette ved \u00e5 bes\u00f8ke linken under.\n\nhttp://%s/auth/verify/%s/\n\nDenne lenken vil v\u00e6re gyldig i 24 timer. Dersom du beh\u00f8ver \u00e5 f\u00e5 tilsendt en ny lenke\nkan dette gj\u00f8res med funksjonen for \u00e5 gjenopprette passord.\n\"\"\") % (request.META['HTTP_HOST'], token)\n\n send_mail(_(u'Verifiser din konto'), email_message, settings.DEFAULT_FROM_EMAIL, [email.email,])\n\n messages.success(request, _(u'Registreringen var vellykket. Se tilsendt epost for verifiseringsinstrukser.'))\n\n return HttpResponseRedirect('/') \n else:\n form = RegisterForm(request.POST, auto_id=True)\n else:\n form = RegisterForm()\n\n return render(request, 'auth/register.html', {'form': form, })\n\n\ndef verify(request, token):\n rt = get_object_or_404(RegisterToken, token=token)\n \n if rt.is_valid:\n email = get_object_or_404(Email, email=rt.email)\n email.verified = True\n email.save()\n \n user = getattr(rt, 'user')\n\n # If it is a stud email, set the ntnu_username for user\n if re.match(r'[^@][email protected]', rt.email):\n user.ntnu_username = rt.email.split(\"@\")[0]\n\n user_activated = False\n if not user.is_active:\n user.is_active = True\n user_activated = True\n\n user.save()\n rt.delete()\n\n if user_activated:\n messages.success(request, _(u'Bruker %s ble aktivert. Du kan n\u00e5 logge inn.') % user.username)\n return redirect('auth_login')\n else:\n messages.success(request, _(u'Eposten %s er n\u00e5 verifisert.') % email)\n return redirect('profiles')\n else:\n messages.error(request, _(u'Denne lenken er utl\u00f8pt. Bruk gjenopprett passord for \u00e5 f\u00e5 tilsendt en ny lenke.'))\n return HttpResponseRedirect('/') \n \n\ndef recover(request):\n if request.user.is_authenticated():\n messages.error(request, _(u'Gjenoppretning av passord krever at du er logget ut.'))\n return HttpResponseRedirect('/')\n else:\n if request.method == 'POST':\n form = RecoveryForm(request.POST)\n if form.is_valid():\n email_string = form.cleaned_data['email']\n emails = Email.objects.filter(email=email_string)\n\n if len(emails) == 0:\n messages.error(request, _(u'Denne eposten er ikke registrert i v\u00e5re systemer.'))\n return HttpResponseRedirect('/') \n\n email = emails[0]\n \n # Create the registration token\n token = uuid.uuid4().hex\n rt = RegisterToken(user=email.user, email=email.email, token=token)\n rt.save()\n\n email_message = _(u\"\"\"\nVi har mottat foresp\u00f8rsel om \u00e5 gjenopprette passordet for kontoen bundet til %s.\nDersom du ikke har bedt om denne handlingen ber vi deg se bort fra denne eposten.\n\nBrukernavn: %s\n\nHvis du \u00f8nsker \u00e5 gjennomf\u00f8re en gjenoppretning av passord, bruk lenken under.\n\nhttp://%s/auth/set_password/%s/\n\nDenne lenken vil v\u00e6re gyldig i 24 timer. Dersom du beh\u00f8ver \u00e5 f\u00e5 tilsendt en ny lenke\nkan dette gj\u00f8res med funksjonen for \u00e5 gjenopprette passord.\n\"\"\") % (email.email, email.user.username, request.META['HTTP_HOST'], token)\n\n send_mail(_(u'Gjenoppretning av passord'), email_message, settings.DEFAULT_FROM_EMAIL, [email.email,])\n\n messages.success(request, _(u'En lenke for gjenoppretning har blitt sendt til %s.') % email.email)\n\n return HttpResponseRedirect('/') \n else:\n form = RecoveryForm(request.POST, auto_id=True)\n else:\n form = RecoveryForm()\n\n return render(request, 'auth/recover.html', {'form': form})\n\n\n@sensitive_post_parameters()\ndef set_password(request, token=None): \n if request.user.is_authenticated():\n return HttpResponseRedirect('/')\n else:\n rt = get_object_or_404(RegisterToken, token=token)\n \n if rt.is_valid:\n if request.method == 'POST':\n form = ChangePasswordForm(request.POST, auto_id=True)\n if form.is_valid():\n user = getattr(rt, 'user')\n\n user.is_active = True\n user.set_password(form.cleaned_data['new_password'])\n user.save()\n \n rt.delete()\n\n messages.success(request, _(u'Bruker %s har gjennomf\u00f8rt vellykket gjenoppretning av passord. Du kan n\u00e5 logge inn.') % user.username)\n \n return HttpResponseRedirect('/') \n else:\n \n form = ChangePasswordForm()\n\n messages.success(request, _(u'Lenken er akseptert. Vennligst skriv inn \u00f8nsket passord.'))\n\n return render(request, 'auth/set_password.html', {'form': form, 'token': token})\n\n else:\n messages.error(request, _(u'Lenken er utl\u00f8pt. Vennligst bruk gjenoppretning av passord for \u00e5 f\u00e5 tilsendt en ny lenke.'))\n return HttpResponseRedirect('/') \n", "path": "apps/authentication/views.py"}]} | 3,030 | 100 |
gh_patches_debug_30492 | rasdani/github-patches | git_diff | holoviz__hvplot-1015 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Support Python 3.11
Python 3.11 provides major performance improvement that I would like to take advantage of when using the HoloViz ecosystem.
Panel and HoloViews already support python 3.11 according to Pypi.
Please support python 3.11. Thanks.
</issue>
<code>
[start of setup.py]
1 import json
2 import os
3 import sys
4 import shutil
5
6 from setuptools import setup, find_packages
7
8 import pyct.build
9
10 def get_setup_version(reponame):
11 """
12 Helper to get the current version from either git describe or the
13 .version file (if available).
14 """
15 basepath = os.path.split(__file__)[0]
16 version_file_path = os.path.join(basepath, reponame, '.version')
17 try:
18 from param import version
19 except:
20 version = None
21 if version is not None:
22 return version.Version.setup_version(basepath, reponame, archive_commit="$Format:%h$")
23 else:
24 print("WARNING: param>=1.6.0 unavailable. If you are installing a package, this warning can safely be ignored. If you are creating a package or otherwise operating in a git repository, you should install param>=1.6.0.")
25 return json.load(open(version_file_path))['version_string']
26
27
28 ########## dependencies ##########
29
30 install_requires = [
31 'bokeh >=1.0.0',
32 'colorcet >=2',
33 'holoviews >=1.11.0',
34 'pandas',
35 'numpy>=1.15',
36 'packaging',
37 'panel >=0.11.0',
38 ]
39
40 _examples = [
41 'geoviews >=1.6.0',
42 'numba >=0.51.0',
43 'geopandas',
44 'xarray >=0.18.2',
45 'networkx >=2.6.3',
46 'streamz >=0.3.0',
47 'intake >=0.6.5',
48 'intake-parquet >=0.2.3',
49 'intake-xarray >=0.5.0',
50 'dask >=2021.3.0',
51 'datashader >=0.6.5',
52 'notebook >=5.4',
53 'rasterio',
54 'cartopy',
55 'pyproj',
56 's3fs >=2022.1.0',
57 'scipy >=1.5.3',
58 'pillow >=8.2.0',
59 'selenium >=3.141.0',
60 'spatialpandas >=0.4.3',
61 'scikit-image >=0.17.2',
62 'python-snappy >=0.6.0',
63 'pooch >=1.6.0',
64 'fiona',
65 'rioxarray',
66 # Extra dependency of cartopy on Python 3.6 only
67 'pyepsg',
68 'matplotlib',
69 'plotly',
70 'pygraphviz',
71 'ipykernel <6.18.0' # temporary
72 ]
73
74 extras_require = {
75 'tests': [
76 'codecov',
77 'flake8',
78 'parameterized',
79 'pytest',
80 'pytest-cov',
81 'nbsmoke >=0.2.0',
82 'numpy >=1.7',
83 'matplotlib',
84 'plotly',
85 'xarray',
86 'pooch',
87 'scipy',
88 'ipywidgets',
89 'pre-commit',
90 ],
91 'examples': _examples,
92 'doc': _examples + [
93 'nbsite >=0.7.2rc2',
94 'pydata-sphinx-theme <0.10',
95 'sphinx-copybutton',
96 'sphinx-design',
97 ]
98 }
99
100 # until pyproject.toml/equivalent is widely supported (setup_requires
101 # doesn't work well with pip)
102 extras_require['build'] = [
103 'param >=1.7.0',
104 'pyct >=0.4.4',
105 'setuptools >=30.3.0' # should make this pip now
106 ]
107
108 extras_require['all'] = sorted(set(sum(extras_require.values(), [])))
109
110 ########## metadata for setuptools ##########
111
112 setup_args = dict(
113 name='hvplot',
114 version=get_setup_version("hvplot"),
115 description='A high-level plotting API for the PyData ecosystem built on HoloViews.',
116 long_description=open("README.md", mode="r", encoding="utf-8").read(),
117 long_description_content_type="text/markdown",
118 author= "Philipp Rudiger",
119 author_email= "[email protected]",
120 maintainer="HoloViz developers",
121 maintainer_email="[email protected]",
122 packages=find_packages(),
123 include_package_data=True,
124 platforms=['Windows', 'Mac OS X', 'Linux'],
125 license='BSD',
126 url='https://hvplot.pyviz.org',
127 classifiers = [
128 "License :: OSI Approved :: BSD License",
129 "Development Status :: 5 - Production/Stable",
130 "Programming Language :: Python :: 3.6",
131 "Programming Language :: Python :: 3.7",
132 "Programming Language :: Python :: 3.8",
133 "Programming Language :: Python :: 3.9",
134 "Programming Language :: Python :: 3.10",
135 "Operating System :: OS Independent",
136 "Intended Audience :: Science/Research",
137 "Intended Audience :: Developers",
138 "Natural Language :: English",
139 "Topic :: Scientific/Engineering",
140 "Topic :: Software Development :: Libraries"],
141 python_requires=">=3.6",
142 install_requires=install_requires,
143 extras_require=extras_require,
144 tests_require=extras_require['tests'],
145 entry_points={
146 'console_scripts': [],
147 'pandas_plotting_backends': [
148 'holoviews = hvplot:plotting',
149 ],
150 },
151 )
152
153
154 if __name__ == '__main__':
155 example_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
156 'hvplot','examples')
157
158 if 'develop' not in sys.argv and 'egg_info' not in sys.argv:
159 pyct.build.examples(example_path, __file__, force=True)
160
161 setup(**setup_args)
162
163 if os.path.isdir(example_path):
164 shutil.rmtree(example_path)
165
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -38,8 +38,7 @@
]
_examples = [
- 'geoviews >=1.6.0',
- 'numba >=0.51.0',
+ 'geoviews >=1.9.0',
'geopandas',
'xarray >=0.18.2',
'networkx >=2.6.3',
@@ -48,7 +47,6 @@
'intake-parquet >=0.2.3',
'intake-xarray >=0.5.0',
'dask >=2021.3.0',
- 'datashader >=0.6.5',
'notebook >=5.4',
'rasterio',
'cartopy',
@@ -57,7 +55,6 @@
'scipy >=1.5.3',
'pillow >=8.2.0',
'selenium >=3.141.0',
- 'spatialpandas >=0.4.3',
'scikit-image >=0.17.2',
'python-snappy >=0.6.0',
'pooch >=1.6.0',
@@ -71,6 +68,14 @@
'ipykernel <6.18.0' # temporary
]
+# Packages not working on python 3.11 because of numba
+if sys.version_info < (3, 11):
+ _examples += [
+ 'numba >=0.51.0',
+ 'datashader >=0.6.5',
+ 'spatialpandas >=0.4.3',
+ ]
+
extras_require = {
'tests': [
'codecov',
@@ -132,6 +137,7 @@
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -38,8 +38,7 @@\n ]\n \n _examples = [\n- 'geoviews >=1.6.0',\n- 'numba >=0.51.0',\n+ 'geoviews >=1.9.0',\n 'geopandas',\n 'xarray >=0.18.2',\n 'networkx >=2.6.3',\n@@ -48,7 +47,6 @@\n 'intake-parquet >=0.2.3',\n 'intake-xarray >=0.5.0',\n 'dask >=2021.3.0',\n- 'datashader >=0.6.5',\n 'notebook >=5.4',\n 'rasterio',\n 'cartopy',\n@@ -57,7 +55,6 @@\n 'scipy >=1.5.3',\n 'pillow >=8.2.0',\n 'selenium >=3.141.0',\n- 'spatialpandas >=0.4.3',\n 'scikit-image >=0.17.2',\n 'python-snappy >=0.6.0',\n 'pooch >=1.6.0',\n@@ -71,6 +68,14 @@\n 'ipykernel <6.18.0' # temporary\n ]\n \n+# Packages not working on python 3.11 because of numba\n+if sys.version_info < (3, 11):\n+ _examples += [\n+ 'numba >=0.51.0',\n+ 'datashader >=0.6.5',\n+ 'spatialpandas >=0.4.3',\n+ ]\n+\n extras_require = {\n 'tests': [\n 'codecov',\n@@ -132,6 +137,7 @@\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n+ \"Programming Language :: Python :: 3.11\",\n \"Operating System :: OS Independent\",\n \"Intended Audience :: Science/Research\",\n \"Intended Audience :: Developers\",\n", "issue": "Support Python 3.11\nPython 3.11 provides major performance improvement that I would like to take advantage of when using the HoloViz ecosystem.\r\n\r\nPanel and HoloViews already support python 3.11 according to Pypi.\r\n\r\nPlease support python 3.11. Thanks.\n", "before_files": [{"content": "import json\nimport os\nimport sys\nimport shutil\n\nfrom setuptools import setup, find_packages\n\nimport pyct.build\n\ndef get_setup_version(reponame):\n \"\"\"\n Helper to get the current version from either git describe or the\n .version file (if available).\n \"\"\"\n basepath = os.path.split(__file__)[0]\n version_file_path = os.path.join(basepath, reponame, '.version')\n try:\n from param import version\n except:\n version = None\n if version is not None:\n return version.Version.setup_version(basepath, reponame, archive_commit=\"$Format:%h$\")\n else:\n print(\"WARNING: param>=1.6.0 unavailable. If you are installing a package, this warning can safely be ignored. If you are creating a package or otherwise operating in a git repository, you should install param>=1.6.0.\")\n return json.load(open(version_file_path))['version_string']\n\n\n########## dependencies ##########\n\ninstall_requires = [\n 'bokeh >=1.0.0',\n 'colorcet >=2',\n 'holoviews >=1.11.0',\n 'pandas',\n 'numpy>=1.15',\n 'packaging',\n 'panel >=0.11.0',\n]\n\n_examples = [\n 'geoviews >=1.6.0',\n 'numba >=0.51.0',\n 'geopandas',\n 'xarray >=0.18.2',\n 'networkx >=2.6.3',\n 'streamz >=0.3.0',\n 'intake >=0.6.5',\n 'intake-parquet >=0.2.3',\n 'intake-xarray >=0.5.0',\n 'dask >=2021.3.0',\n 'datashader >=0.6.5',\n 'notebook >=5.4',\n 'rasterio',\n 'cartopy',\n 'pyproj',\n 's3fs >=2022.1.0',\n 'scipy >=1.5.3',\n 'pillow >=8.2.0',\n 'selenium >=3.141.0',\n 'spatialpandas >=0.4.3',\n 'scikit-image >=0.17.2',\n 'python-snappy >=0.6.0',\n 'pooch >=1.6.0',\n 'fiona',\n 'rioxarray',\n # Extra dependency of cartopy on Python 3.6 only\n 'pyepsg',\n 'matplotlib',\n 'plotly',\n 'pygraphviz',\n 'ipykernel <6.18.0' # temporary\n]\n\nextras_require = {\n 'tests': [\n 'codecov',\n 'flake8',\n 'parameterized',\n 'pytest',\n 'pytest-cov',\n 'nbsmoke >=0.2.0',\n 'numpy >=1.7',\n 'matplotlib',\n 'plotly',\n 'xarray',\n 'pooch',\n 'scipy',\n 'ipywidgets',\n 'pre-commit',\n ],\n 'examples': _examples,\n 'doc': _examples + [\n 'nbsite >=0.7.2rc2',\n 'pydata-sphinx-theme <0.10',\n 'sphinx-copybutton',\n 'sphinx-design',\n ]\n}\n\n# until pyproject.toml/equivalent is widely supported (setup_requires\n# doesn't work well with pip)\nextras_require['build'] = [\n 'param >=1.7.0',\n 'pyct >=0.4.4',\n 'setuptools >=30.3.0' # should make this pip now\n]\n\nextras_require['all'] = sorted(set(sum(extras_require.values(), [])))\n\n########## metadata for setuptools ##########\n\nsetup_args = dict(\n name='hvplot',\n version=get_setup_version(\"hvplot\"),\n description='A high-level plotting API for the PyData ecosystem built on HoloViews.',\n long_description=open(\"README.md\", mode=\"r\", encoding=\"utf-8\").read(),\n long_description_content_type=\"text/markdown\",\n author= \"Philipp Rudiger\",\n author_email= \"[email protected]\",\n maintainer=\"HoloViz developers\",\n maintainer_email=\"[email protected]\",\n packages=find_packages(),\n include_package_data=True,\n platforms=['Windows', 'Mac OS X', 'Linux'],\n license='BSD',\n url='https://hvplot.pyviz.org',\n classifiers = [\n \"License :: OSI Approved :: BSD License\",\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Operating System :: OS Independent\",\n \"Intended Audience :: Science/Research\",\n \"Intended Audience :: Developers\",\n \"Natural Language :: English\",\n \"Topic :: Scientific/Engineering\",\n \"Topic :: Software Development :: Libraries\"],\n python_requires=\">=3.6\",\n install_requires=install_requires,\n extras_require=extras_require,\n tests_require=extras_require['tests'],\n entry_points={\n 'console_scripts': [],\n 'pandas_plotting_backends': [\n 'holoviews = hvplot:plotting',\n ],\n },\n)\n\n\nif __name__ == '__main__':\n example_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),\n 'hvplot','examples')\n\n if 'develop' not in sys.argv and 'egg_info' not in sys.argv:\n pyct.build.examples(example_path, __file__, force=True)\n\n setup(**setup_args)\n\n if os.path.isdir(example_path):\n shutil.rmtree(example_path)\n", "path": "setup.py"}]} | 2,280 | 506 |
gh_patches_debug_27098 | rasdani/github-patches | git_diff | docker__docker-py-2846 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Bring back support OpenSSH Identityfile
# Description
Support for identity file was added in - #2534
And then removed during refactoring in - #2691
# Proposal
Add it back :) in PR #2846
### General Info
```bash
❯❯❯ docker --version
Docker version 20.10.6, build 370c289
❯❯❯ docker-compose version
docker-compose version 1.29.2, build 5becea4c
docker-py version: 5.0.0
CPython version: 3.9.0
OpenSSL version: OpenSSL 1.1.1h 22 Sep 2020
```
MacOS 11.3.1
</issue>
<code>
[start of docker/transport/sshconn.py]
1 import paramiko
2 import queue
3 import urllib.parse
4 import requests.adapters
5 import logging
6 import os
7 import signal
8 import socket
9 import subprocess
10
11 from docker.transport.basehttpadapter import BaseHTTPAdapter
12 from .. import constants
13
14 import http.client as httplib
15
16 try:
17 import requests.packages.urllib3 as urllib3
18 except ImportError:
19 import urllib3
20
21 RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
22
23
24 class SSHSocket(socket.socket):
25 def __init__(self, host):
26 super().__init__(
27 socket.AF_INET, socket.SOCK_STREAM)
28 self.host = host
29 self.port = None
30 self.user = None
31 if ':' in self.host:
32 self.host, self.port = self.host.split(':')
33 if '@' in self.host:
34 self.user, self.host = self.host.split('@')
35
36 self.proc = None
37
38 def connect(self, **kwargs):
39 args = ['ssh']
40 if self.user:
41 args = args + ['-l', self.user]
42
43 if self.port:
44 args = args + ['-p', self.port]
45
46 args = args + ['--', self.host, 'docker system dial-stdio']
47
48 preexec_func = None
49 if not constants.IS_WINDOWS_PLATFORM:
50 def f():
51 signal.signal(signal.SIGINT, signal.SIG_IGN)
52 preexec_func = f
53
54 env = dict(os.environ)
55
56 # drop LD_LIBRARY_PATH and SSL_CERT_FILE
57 env.pop('LD_LIBRARY_PATH', None)
58 env.pop('SSL_CERT_FILE', None)
59
60 self.proc = subprocess.Popen(
61 ' '.join(args),
62 env=env,
63 shell=True,
64 stdout=subprocess.PIPE,
65 stdin=subprocess.PIPE,
66 preexec_fn=None if constants.IS_WINDOWS_PLATFORM else preexec_func)
67
68 def _write(self, data):
69 if not self.proc or self.proc.stdin.closed:
70 raise Exception('SSH subprocess not initiated.'
71 'connect() must be called first.')
72 written = self.proc.stdin.write(data)
73 self.proc.stdin.flush()
74 return written
75
76 def sendall(self, data):
77 self._write(data)
78
79 def send(self, data):
80 return self._write(data)
81
82 def recv(self, n):
83 if not self.proc:
84 raise Exception('SSH subprocess not initiated.'
85 'connect() must be called first.')
86 return self.proc.stdout.read(n)
87
88 def makefile(self, mode):
89 if not self.proc:
90 self.connect()
91 self.proc.stdout.channel = self
92
93 return self.proc.stdout
94
95 def close(self):
96 if not self.proc or self.proc.stdin.closed:
97 return
98 self.proc.stdin.write(b'\n\n')
99 self.proc.stdin.flush()
100 self.proc.terminate()
101
102
103 class SSHConnection(httplib.HTTPConnection):
104 def __init__(self, ssh_transport=None, timeout=60, host=None):
105 super().__init__(
106 'localhost', timeout=timeout
107 )
108 self.ssh_transport = ssh_transport
109 self.timeout = timeout
110 self.ssh_host = host
111
112 def connect(self):
113 if self.ssh_transport:
114 sock = self.ssh_transport.open_session()
115 sock.settimeout(self.timeout)
116 sock.exec_command('docker system dial-stdio')
117 else:
118 sock = SSHSocket(self.ssh_host)
119 sock.settimeout(self.timeout)
120 sock.connect()
121
122 self.sock = sock
123
124
125 class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
126 scheme = 'ssh'
127
128 def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None):
129 super().__init__(
130 'localhost', timeout=timeout, maxsize=maxsize
131 )
132 self.ssh_transport = None
133 self.timeout = timeout
134 if ssh_client:
135 self.ssh_transport = ssh_client.get_transport()
136 self.ssh_host = host
137
138 def _new_conn(self):
139 return SSHConnection(self.ssh_transport, self.timeout, self.ssh_host)
140
141 # When re-using connections, urllib3 calls fileno() on our
142 # SSH channel instance, quickly overloading our fd limit. To avoid this,
143 # we override _get_conn
144 def _get_conn(self, timeout):
145 conn = None
146 try:
147 conn = self.pool.get(block=self.block, timeout=timeout)
148
149 except AttributeError: # self.pool is None
150 raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
151
152 except queue.Empty:
153 if self.block:
154 raise urllib3.exceptions.EmptyPoolError(
155 self,
156 "Pool reached maximum size and no more "
157 "connections are allowed."
158 )
159 pass # Oh well, we'll create a new connection then
160
161 return conn or self._new_conn()
162
163
164 class SSHHTTPAdapter(BaseHTTPAdapter):
165
166 __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [
167 'pools', 'timeout', 'ssh_client', 'ssh_params', 'max_pool_size'
168 ]
169
170 def __init__(self, base_url, timeout=60,
171 pool_connections=constants.DEFAULT_NUM_POOLS,
172 max_pool_size=constants.DEFAULT_MAX_POOL_SIZE,
173 shell_out=False):
174 self.ssh_client = None
175 if not shell_out:
176 self._create_paramiko_client(base_url)
177 self._connect()
178
179 self.ssh_host = base_url
180 if base_url.startswith('ssh://'):
181 self.ssh_host = base_url[len('ssh://'):]
182
183 self.timeout = timeout
184 self.max_pool_size = max_pool_size
185 self.pools = RecentlyUsedContainer(
186 pool_connections, dispose_func=lambda p: p.close()
187 )
188 super().__init__()
189
190 def _create_paramiko_client(self, base_url):
191 logging.getLogger("paramiko").setLevel(logging.WARNING)
192 self.ssh_client = paramiko.SSHClient()
193 base_url = urllib.parse.urlparse(base_url)
194 self.ssh_params = {
195 "hostname": base_url.hostname,
196 "port": base_url.port,
197 "username": base_url.username
198 }
199 ssh_config_file = os.path.expanduser("~/.ssh/config")
200 if os.path.exists(ssh_config_file):
201 conf = paramiko.SSHConfig()
202 with open(ssh_config_file) as f:
203 conf.parse(f)
204 host_config = conf.lookup(base_url.hostname)
205 self.ssh_conf = host_config
206 if 'proxycommand' in host_config:
207 self.ssh_params["sock"] = paramiko.ProxyCommand(
208 self.ssh_conf['proxycommand']
209 )
210 if 'hostname' in host_config:
211 self.ssh_params['hostname'] = host_config['hostname']
212 if base_url.port is None and 'port' in host_config:
213 self.ssh_params['port'] = self.ssh_conf['port']
214 if base_url.username is None and 'user' in host_config:
215 self.ssh_params['username'] = self.ssh_conf['user']
216
217 self.ssh_client.load_system_host_keys()
218 self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy())
219
220 def _connect(self):
221 if self.ssh_client:
222 self.ssh_client.connect(**self.ssh_params)
223
224 def get_connection(self, url, proxies=None):
225 if not self.ssh_client:
226 return SSHConnectionPool(
227 ssh_client=self.ssh_client,
228 timeout=self.timeout,
229 maxsize=self.max_pool_size,
230 host=self.ssh_host
231 )
232 with self.pools.lock:
233 pool = self.pools.get(url)
234 if pool:
235 return pool
236
237 # Connection is closed try a reconnect
238 if self.ssh_client and not self.ssh_client.get_transport():
239 self._connect()
240
241 pool = SSHConnectionPool(
242 ssh_client=self.ssh_client,
243 timeout=self.timeout,
244 maxsize=self.max_pool_size,
245 host=self.ssh_host
246 )
247 self.pools[url] = pool
248
249 return pool
250
251 def close(self):
252 super().close()
253 if self.ssh_client:
254 self.ssh_client.close()
255
[end of docker/transport/sshconn.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py
--- a/docker/transport/sshconn.py
+++ b/docker/transport/sshconn.py
@@ -205,7 +205,6 @@
with open(ssh_config_file) as f:
conf.parse(f)
host_config = conf.lookup(base_url.hostname)
- self.ssh_conf = host_config
if 'proxycommand' in host_config:
self.ssh_params["sock"] = paramiko.ProxyCommand(
self.ssh_conf['proxycommand']
@@ -213,9 +212,11 @@
if 'hostname' in host_config:
self.ssh_params['hostname'] = host_config['hostname']
if base_url.port is None and 'port' in host_config:
- self.ssh_params['port'] = self.ssh_conf['port']
+ self.ssh_params['port'] = host_config['port']
if base_url.username is None and 'user' in host_config:
- self.ssh_params['username'] = self.ssh_conf['user']
+ self.ssh_params['username'] = host_config['user']
+ if 'identityfile' in host_config:
+ self.ssh_params['key_filename'] = host_config['identityfile']
self.ssh_client.load_system_host_keys()
self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy())
| {"golden_diff": "diff --git a/docker/transport/sshconn.py b/docker/transport/sshconn.py\n--- a/docker/transport/sshconn.py\n+++ b/docker/transport/sshconn.py\n@@ -205,7 +205,6 @@\n with open(ssh_config_file) as f:\n conf.parse(f)\n host_config = conf.lookup(base_url.hostname)\n- self.ssh_conf = host_config\n if 'proxycommand' in host_config:\n self.ssh_params[\"sock\"] = paramiko.ProxyCommand(\n self.ssh_conf['proxycommand']\n@@ -213,9 +212,11 @@\n if 'hostname' in host_config:\n self.ssh_params['hostname'] = host_config['hostname']\n if base_url.port is None and 'port' in host_config:\n- self.ssh_params['port'] = self.ssh_conf['port']\n+ self.ssh_params['port'] = host_config['port']\n if base_url.username is None and 'user' in host_config:\n- self.ssh_params['username'] = self.ssh_conf['user']\n+ self.ssh_params['username'] = host_config['user']\n+ if 'identityfile' in host_config:\n+ self.ssh_params['key_filename'] = host_config['identityfile']\n \n self.ssh_client.load_system_host_keys()\n self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy())\n", "issue": "Bring back support OpenSSH Identityfile\n# Description\r\n\r\nSupport for identity file was added in - #2534\r\nAnd then removed during refactoring in - #2691 \r\n\r\n# Proposal\r\n\r\nAdd it back :) in PR #2846\r\n\r\n### General Info\r\n\r\n```bash\r\n\u276f\u276f\u276f docker --version\r\nDocker version 20.10.6, build 370c289\r\n\r\n\u276f\u276f\u276f docker-compose version\r\ndocker-compose version 1.29.2, build 5becea4c\r\ndocker-py version: 5.0.0\r\nCPython version: 3.9.0\r\nOpenSSL version: OpenSSL 1.1.1h 22 Sep 2020\r\n```\r\nMacOS 11.3.1\r\n\n", "before_files": [{"content": "import paramiko\nimport queue\nimport urllib.parse\nimport requests.adapters\nimport logging\nimport os\nimport signal\nimport socket\nimport subprocess\n\nfrom docker.transport.basehttpadapter import BaseHTTPAdapter\nfrom .. import constants\n\nimport http.client as httplib\n\ntry:\n import requests.packages.urllib3 as urllib3\nexcept ImportError:\n import urllib3\n\nRecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer\n\n\nclass SSHSocket(socket.socket):\n def __init__(self, host):\n super().__init__(\n socket.AF_INET, socket.SOCK_STREAM)\n self.host = host\n self.port = None\n self.user = None\n if ':' in self.host:\n self.host, self.port = self.host.split(':')\n if '@' in self.host:\n self.user, self.host = self.host.split('@')\n\n self.proc = None\n\n def connect(self, **kwargs):\n args = ['ssh']\n if self.user:\n args = args + ['-l', self.user]\n\n if self.port:\n args = args + ['-p', self.port]\n\n args = args + ['--', self.host, 'docker system dial-stdio']\n\n preexec_func = None\n if not constants.IS_WINDOWS_PLATFORM:\n def f():\n signal.signal(signal.SIGINT, signal.SIG_IGN)\n preexec_func = f\n\n env = dict(os.environ)\n\n # drop LD_LIBRARY_PATH and SSL_CERT_FILE\n env.pop('LD_LIBRARY_PATH', None)\n env.pop('SSL_CERT_FILE', None)\n\n self.proc = subprocess.Popen(\n ' '.join(args),\n env=env,\n shell=True,\n stdout=subprocess.PIPE,\n stdin=subprocess.PIPE,\n preexec_fn=None if constants.IS_WINDOWS_PLATFORM else preexec_func)\n\n def _write(self, data):\n if not self.proc or self.proc.stdin.closed:\n raise Exception('SSH subprocess not initiated.'\n 'connect() must be called first.')\n written = self.proc.stdin.write(data)\n self.proc.stdin.flush()\n return written\n\n def sendall(self, data):\n self._write(data)\n\n def send(self, data):\n return self._write(data)\n\n def recv(self, n):\n if not self.proc:\n raise Exception('SSH subprocess not initiated.'\n 'connect() must be called first.')\n return self.proc.stdout.read(n)\n\n def makefile(self, mode):\n if not self.proc:\n self.connect()\n self.proc.stdout.channel = self\n\n return self.proc.stdout\n\n def close(self):\n if not self.proc or self.proc.stdin.closed:\n return\n self.proc.stdin.write(b'\\n\\n')\n self.proc.stdin.flush()\n self.proc.terminate()\n\n\nclass SSHConnection(httplib.HTTPConnection):\n def __init__(self, ssh_transport=None, timeout=60, host=None):\n super().__init__(\n 'localhost', timeout=timeout\n )\n self.ssh_transport = ssh_transport\n self.timeout = timeout\n self.ssh_host = host\n\n def connect(self):\n if self.ssh_transport:\n sock = self.ssh_transport.open_session()\n sock.settimeout(self.timeout)\n sock.exec_command('docker system dial-stdio')\n else:\n sock = SSHSocket(self.ssh_host)\n sock.settimeout(self.timeout)\n sock.connect()\n\n self.sock = sock\n\n\nclass SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):\n scheme = 'ssh'\n\n def __init__(self, ssh_client=None, timeout=60, maxsize=10, host=None):\n super().__init__(\n 'localhost', timeout=timeout, maxsize=maxsize\n )\n self.ssh_transport = None\n self.timeout = timeout\n if ssh_client:\n self.ssh_transport = ssh_client.get_transport()\n self.ssh_host = host\n\n def _new_conn(self):\n return SSHConnection(self.ssh_transport, self.timeout, self.ssh_host)\n\n # When re-using connections, urllib3 calls fileno() on our\n # SSH channel instance, quickly overloading our fd limit. To avoid this,\n # we override _get_conn\n def _get_conn(self, timeout):\n conn = None\n try:\n conn = self.pool.get(block=self.block, timeout=timeout)\n\n except AttributeError: # self.pool is None\n raise urllib3.exceptions.ClosedPoolError(self, \"Pool is closed.\")\n\n except queue.Empty:\n if self.block:\n raise urllib3.exceptions.EmptyPoolError(\n self,\n \"Pool reached maximum size and no more \"\n \"connections are allowed.\"\n )\n pass # Oh well, we'll create a new connection then\n\n return conn or self._new_conn()\n\n\nclass SSHHTTPAdapter(BaseHTTPAdapter):\n\n __attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [\n 'pools', 'timeout', 'ssh_client', 'ssh_params', 'max_pool_size'\n ]\n\n def __init__(self, base_url, timeout=60,\n pool_connections=constants.DEFAULT_NUM_POOLS,\n max_pool_size=constants.DEFAULT_MAX_POOL_SIZE,\n shell_out=False):\n self.ssh_client = None\n if not shell_out:\n self._create_paramiko_client(base_url)\n self._connect()\n\n self.ssh_host = base_url\n if base_url.startswith('ssh://'):\n self.ssh_host = base_url[len('ssh://'):]\n\n self.timeout = timeout\n self.max_pool_size = max_pool_size\n self.pools = RecentlyUsedContainer(\n pool_connections, dispose_func=lambda p: p.close()\n )\n super().__init__()\n\n def _create_paramiko_client(self, base_url):\n logging.getLogger(\"paramiko\").setLevel(logging.WARNING)\n self.ssh_client = paramiko.SSHClient()\n base_url = urllib.parse.urlparse(base_url)\n self.ssh_params = {\n \"hostname\": base_url.hostname,\n \"port\": base_url.port,\n \"username\": base_url.username\n }\n ssh_config_file = os.path.expanduser(\"~/.ssh/config\")\n if os.path.exists(ssh_config_file):\n conf = paramiko.SSHConfig()\n with open(ssh_config_file) as f:\n conf.parse(f)\n host_config = conf.lookup(base_url.hostname)\n self.ssh_conf = host_config\n if 'proxycommand' in host_config:\n self.ssh_params[\"sock\"] = paramiko.ProxyCommand(\n self.ssh_conf['proxycommand']\n )\n if 'hostname' in host_config:\n self.ssh_params['hostname'] = host_config['hostname']\n if base_url.port is None and 'port' in host_config:\n self.ssh_params['port'] = self.ssh_conf['port']\n if base_url.username is None and 'user' in host_config:\n self.ssh_params['username'] = self.ssh_conf['user']\n\n self.ssh_client.load_system_host_keys()\n self.ssh_client.set_missing_host_key_policy(paramiko.WarningPolicy())\n\n def _connect(self):\n if self.ssh_client:\n self.ssh_client.connect(**self.ssh_params)\n\n def get_connection(self, url, proxies=None):\n if not self.ssh_client:\n return SSHConnectionPool(\n ssh_client=self.ssh_client,\n timeout=self.timeout,\n maxsize=self.max_pool_size,\n host=self.ssh_host\n )\n with self.pools.lock:\n pool = self.pools.get(url)\n if pool:\n return pool\n\n # Connection is closed try a reconnect\n if self.ssh_client and not self.ssh_client.get_transport():\n self._connect()\n\n pool = SSHConnectionPool(\n ssh_client=self.ssh_client,\n timeout=self.timeout,\n maxsize=self.max_pool_size,\n host=self.ssh_host\n )\n self.pools[url] = pool\n\n return pool\n\n def close(self):\n super().close()\n if self.ssh_client:\n self.ssh_client.close()\n", "path": "docker/transport/sshconn.py"}]} | 3,095 | 309 |
gh_patches_debug_6438 | rasdani/github-patches | git_diff | quantumlib__Cirq-1937 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ValueError for PhasedXPowGate with integer phase_exponent
`PhasedXPowGate._circuit_diagram_info_` contains a format string that throws a `ValueError` when `phase_exponent` is an `int`. I know the type annotation specifies `float` but mypy allows int.
```
c = cirq.Circuit.from_ops(
cirq.PhasedXPowGate(phase_exponent=1)(cirq.LineQubit(0)),
)
print(c)
```
Output:
```
...
~/docs/cirq/cirq/ops/phased_x_gate.py in _circuit_diagram_info_(self, args)
154 else:
155 s = 'PhasedX({{:.{}}})'.format(args.precision).format(
--> 156 self.phase_exponent)
157 return protocols.CircuitDiagramInfo(
158 wire_symbols=(s,),
ValueError: Precision not allowed in integer format specifier
```
</issue>
<code>
[start of cirq/ops/phased_x_gate.py]
1 # Copyright 2018 The Cirq Developers
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """An `XPowGate` conjugated by `ZPowGate`s."""
16 from typing import Union, Sequence, Tuple, Optional, cast
17
18 import math
19 import numpy as np
20 import sympy
21
22 import cirq
23 from cirq import value, protocols
24 from cirq._compat import proper_repr
25 from cirq.ops import gate_features, raw_types, op_tree, common_gates
26 from cirq.type_workarounds import NotImplementedType
27
28
29 @value.value_equality(manual_cls=True)
30 class PhasedXPowGate(gate_features.SingleQubitGate):
31 """A gate equivalent to the circuit ───Z^-p───X^t───Z^p───."""
32
33 def __init__(self,
34 *,
35 phase_exponent: Union[float, sympy.Symbol],
36 exponent: Union[float, sympy.Symbol] = 1.0,
37 global_shift: float = 0.0) -> None:
38 """
39 Args:
40 phase_exponent: The exponent on the Z gates conjugating the X gate.
41 exponent: The exponent on the X gate conjugated by Zs.
42 global_shift: How much to shift the operation's eigenvalues at
43 exponent=1.
44 """
45 self._phase_exponent = value.canonicalize_half_turns(phase_exponent)
46 self._exponent = exponent
47 self._global_shift = global_shift
48
49 def _qasm_(self,
50 args: protocols.QasmArgs,
51 qubits: Tuple[raw_types.Qid, ...]) -> Optional[str]:
52 if cirq.is_parameterized(self):
53 return None
54
55 args.validate_version('2.0')
56
57 e = cast(float, value.canonicalize_half_turns(self._exponent))
58 p = cast(float, self.phase_exponent)
59 epsilon = 10**-args.precision
60
61 if abs(e + 0.5) <= epsilon:
62 return args.format('u2({0:half_turns}, {1:half_turns}) {2};\n',
63 p + 0.5, -p - 0.5, qubits[0])
64
65 if abs(e - 0.5) <= epsilon:
66 return args.format('u2({0:half_turns}, {1:half_turns}) {2};\n',
67 p - 0.5, -p + 0.5, qubits[0])
68
69 return args.format(
70 'u3({0:half_turns}, {1:half_turns}, {2:half_turns}) {3};\n',
71 -e, p + 0.5, -p - 0.5, qubits[0])
72
73 def _decompose_(self, qubits: Sequence[raw_types.Qid]
74 ) -> op_tree.OP_TREE:
75 assert len(qubits) == 1
76 q = qubits[0]
77 z = cirq.Z(q)**self._phase_exponent
78 x = cirq.X(q)**self._exponent
79 if protocols.is_parameterized(z):
80 return NotImplemented
81 return z**-1, x, z
82
83 @property
84 def exponent(self) -> Union[float, sympy.Symbol]:
85 """The exponent on the central X gate conjugated by the Z gates."""
86 return self._exponent
87
88 @property
89 def phase_exponent(self) -> Union[float, sympy.Symbol]:
90 """The exponent on the Z gates conjugating the X gate."""
91 return self._phase_exponent
92
93 def __pow__(self, exponent: Union[float, sympy.Symbol]) -> 'PhasedXPowGate':
94 new_exponent = protocols.mul(self._exponent, exponent, NotImplemented)
95 if new_exponent is NotImplemented:
96 return NotImplemented
97 return PhasedXPowGate(phase_exponent=self._phase_exponent,
98 exponent=new_exponent,
99 global_shift=self._global_shift)
100
101 def _trace_distance_bound_(self) -> Optional[float]:
102 if self._is_parameterized_():
103 return None
104 return abs(np.sin(self._exponent * 0.5 * np.pi))
105
106 def _unitary_(self) -> Union[np.ndarray, NotImplementedType]:
107 """See `cirq.SupportsUnitary`."""
108 if self._is_parameterized_():
109 return NotImplemented
110 z = protocols.unitary(cirq.Z**self._phase_exponent)
111 x = protocols.unitary(cirq.X**self._exponent)
112 p = np.exp(1j * np.pi * self._global_shift * self._exponent)
113 return np.dot(np.dot(z, x), np.conj(z)) * p
114
115 def _pauli_expansion_(self) -> value.LinearDict[str]:
116 if self._is_parameterized_():
117 return NotImplemented
118 phase_angle = np.pi * self._phase_exponent / 2
119 angle = np.pi * self._exponent / 2
120 phase = 1j**(2 * self._exponent * (self._global_shift + 0.5))
121 return value.LinearDict({
122 'I': phase * np.cos(angle),
123 'X': -1j * phase * np.sin(angle) * np.cos(2 * phase_angle),
124 'Y': -1j * phase * np.sin(angle) * np.sin(2 * phase_angle),
125 })
126
127 def _is_parameterized_(self) -> bool:
128 """See `cirq.SupportsParameterization`."""
129 return (protocols.is_parameterized(self._exponent) or
130 protocols.is_parameterized(self._phase_exponent))
131
132 def _resolve_parameters_(self, param_resolver) -> 'PhasedXPowGate':
133 """See `cirq.SupportsParameterization`."""
134 return PhasedXPowGate(
135 phase_exponent=param_resolver.value_of(self._phase_exponent),
136 exponent=param_resolver.value_of(self._exponent),
137 global_shift=self._global_shift)
138
139 def _phase_by_(self, phase_turns, qubit_index):
140 """See `cirq.SupportsPhase`."""
141 assert qubit_index == 0
142 return PhasedXPowGate(
143 exponent=self._exponent,
144 phase_exponent=self._phase_exponent + phase_turns * 2,
145 global_shift=self._global_shift)
146
147 def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs
148 ) -> protocols.CircuitDiagramInfo:
149 """See `cirq.SupportsCircuitDiagramInfo`."""
150
151 if (isinstance(self.phase_exponent, sympy.Symbol) or
152 args.precision is None):
153 s = 'PhasedX({})'.format(self.phase_exponent)
154 else:
155 s = 'PhasedX({{:.{}}})'.format(args.precision).format(
156 self.phase_exponent)
157 return protocols.CircuitDiagramInfo(
158 wire_symbols=(s,),
159 exponent=value.canonicalize_half_turns(self._exponent))
160
161 def __str__(self):
162 info = protocols.circuit_diagram_info(self)
163 if info.exponent == 1:
164 return info.wire_symbols[0]
165 return '{}^{}'.format(info.wire_symbols[0], info.exponent)
166
167 def __repr__(self):
168 args = ['phase_exponent={}'.format(proper_repr(self.phase_exponent))]
169 if self.exponent != 1:
170 args.append('exponent={}'.format(proper_repr(self.exponent)))
171 if self._global_shift != 0:
172 args.append('global_shift={!r}'.format(self._global_shift))
173 return 'cirq.PhasedXPowGate({})'.format(', '.join(args))
174
175 def _period(self):
176 exponents = [self._global_shift, 1 + self._global_shift]
177 real_periods = [abs(2/e) for e in exponents if e != 0]
178 int_periods = [int(np.round(e)) for e in real_periods]
179 if any(i != r for i, r in zip(real_periods, int_periods)):
180 return None
181 if len(int_periods) == 1:
182 return int_periods[0]
183 return int_periods[0] * int_periods[1] / math.gcd(*int_periods)
184
185 @property
186 def _canonical_exponent(self):
187 period = self._period()
188 if not period or isinstance(self._exponent, sympy.Symbol):
189 return self._exponent
190
191 return self._exponent % period
192
193 def _value_equality_values_cls_(self):
194 if self.phase_exponent == 0:
195 return common_gates.XPowGate
196 if self.phase_exponent == 0.5:
197 return common_gates.YPowGate
198 return PhasedXPowGate
199
200 def _value_equality_values_(self):
201 if self.phase_exponent == 0:
202 return common_gates.XPowGate(
203 exponent=self._exponent,
204 global_shift=self._global_shift)._value_equality_values_()
205 if self.phase_exponent == 0.5:
206 return common_gates.YPowGate(
207 exponent=self._exponent,
208 global_shift=self._global_shift)._value_equality_values_()
209 return self.phase_exponent, self._canonical_exponent, self._global_shift
210
[end of cirq/ops/phased_x_gate.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/cirq/ops/phased_x_gate.py b/cirq/ops/phased_x_gate.py
--- a/cirq/ops/phased_x_gate.py
+++ b/cirq/ops/phased_x_gate.py
@@ -152,7 +152,7 @@
args.precision is None):
s = 'PhasedX({})'.format(self.phase_exponent)
else:
- s = 'PhasedX({{:.{}}})'.format(args.precision).format(
+ s = 'PhasedX({{:.{}f}})'.format(args.precision).format(
self.phase_exponent)
return protocols.CircuitDiagramInfo(
wire_symbols=(s,),
| {"golden_diff": "diff --git a/cirq/ops/phased_x_gate.py b/cirq/ops/phased_x_gate.py\n--- a/cirq/ops/phased_x_gate.py\n+++ b/cirq/ops/phased_x_gate.py\n@@ -152,7 +152,7 @@\n args.precision is None):\n s = 'PhasedX({})'.format(self.phase_exponent)\n else:\n- s = 'PhasedX({{:.{}}})'.format(args.precision).format(\n+ s = 'PhasedX({{:.{}f}})'.format(args.precision).format(\n self.phase_exponent)\n return protocols.CircuitDiagramInfo(\n wire_symbols=(s,),\n", "issue": "ValueError for PhasedXPowGate with integer phase_exponent\n`PhasedXPowGate._circuit_diagram_info_` contains a format string that throws a `ValueError` when `phase_exponent` is an `int`. I know the type annotation specifies `float` but mypy allows int.\r\n\r\n```\r\nc = cirq.Circuit.from_ops(\r\n cirq.PhasedXPowGate(phase_exponent=1)(cirq.LineQubit(0)),\r\n)\r\nprint(c)\r\n```\r\nOutput:\r\n```\r\n...\r\n~/docs/cirq/cirq/ops/phased_x_gate.py in _circuit_diagram_info_(self, args)\r\n 154 else:\r\n 155 s = 'PhasedX({{:.{}}})'.format(args.precision).format(\r\n--> 156 self.phase_exponent)\r\n 157 return protocols.CircuitDiagramInfo(\r\n 158 wire_symbols=(s,),\r\n\r\nValueError: Precision not allowed in integer format specifier\r\n```\n", "before_files": [{"content": "# Copyright 2018 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"An `XPowGate` conjugated by `ZPowGate`s.\"\"\"\nfrom typing import Union, Sequence, Tuple, Optional, cast\n\nimport math\nimport numpy as np\nimport sympy\n\nimport cirq\nfrom cirq import value, protocols\nfrom cirq._compat import proper_repr\nfrom cirq.ops import gate_features, raw_types, op_tree, common_gates\nfrom cirq.type_workarounds import NotImplementedType\n\n\[email protected]_equality(manual_cls=True)\nclass PhasedXPowGate(gate_features.SingleQubitGate):\n \"\"\"A gate equivalent to the circuit \u2500\u2500\u2500Z^-p\u2500\u2500\u2500X^t\u2500\u2500\u2500Z^p\u2500\u2500\u2500.\"\"\"\n\n def __init__(self,\n *,\n phase_exponent: Union[float, sympy.Symbol],\n exponent: Union[float, sympy.Symbol] = 1.0,\n global_shift: float = 0.0) -> None:\n \"\"\"\n Args:\n phase_exponent: The exponent on the Z gates conjugating the X gate.\n exponent: The exponent on the X gate conjugated by Zs.\n global_shift: How much to shift the operation's eigenvalues at\n exponent=1.\n \"\"\"\n self._phase_exponent = value.canonicalize_half_turns(phase_exponent)\n self._exponent = exponent\n self._global_shift = global_shift\n\n def _qasm_(self,\n args: protocols.QasmArgs,\n qubits: Tuple[raw_types.Qid, ...]) -> Optional[str]:\n if cirq.is_parameterized(self):\n return None\n\n args.validate_version('2.0')\n\n e = cast(float, value.canonicalize_half_turns(self._exponent))\n p = cast(float, self.phase_exponent)\n epsilon = 10**-args.precision\n\n if abs(e + 0.5) <= epsilon:\n return args.format('u2({0:half_turns}, {1:half_turns}) {2};\\n',\n p + 0.5, -p - 0.5, qubits[0])\n\n if abs(e - 0.5) <= epsilon:\n return args.format('u2({0:half_turns}, {1:half_turns}) {2};\\n',\n p - 0.5, -p + 0.5, qubits[0])\n\n return args.format(\n 'u3({0:half_turns}, {1:half_turns}, {2:half_turns}) {3};\\n',\n -e, p + 0.5, -p - 0.5, qubits[0])\n\n def _decompose_(self, qubits: Sequence[raw_types.Qid]\n ) -> op_tree.OP_TREE:\n assert len(qubits) == 1\n q = qubits[0]\n z = cirq.Z(q)**self._phase_exponent\n x = cirq.X(q)**self._exponent\n if protocols.is_parameterized(z):\n return NotImplemented\n return z**-1, x, z\n\n @property\n def exponent(self) -> Union[float, sympy.Symbol]:\n \"\"\"The exponent on the central X gate conjugated by the Z gates.\"\"\"\n return self._exponent\n\n @property\n def phase_exponent(self) -> Union[float, sympy.Symbol]:\n \"\"\"The exponent on the Z gates conjugating the X gate.\"\"\"\n return self._phase_exponent\n\n def __pow__(self, exponent: Union[float, sympy.Symbol]) -> 'PhasedXPowGate':\n new_exponent = protocols.mul(self._exponent, exponent, NotImplemented)\n if new_exponent is NotImplemented:\n return NotImplemented\n return PhasedXPowGate(phase_exponent=self._phase_exponent,\n exponent=new_exponent,\n global_shift=self._global_shift)\n\n def _trace_distance_bound_(self) -> Optional[float]:\n if self._is_parameterized_():\n return None\n return abs(np.sin(self._exponent * 0.5 * np.pi))\n\n def _unitary_(self) -> Union[np.ndarray, NotImplementedType]:\n \"\"\"See `cirq.SupportsUnitary`.\"\"\"\n if self._is_parameterized_():\n return NotImplemented\n z = protocols.unitary(cirq.Z**self._phase_exponent)\n x = protocols.unitary(cirq.X**self._exponent)\n p = np.exp(1j * np.pi * self._global_shift * self._exponent)\n return np.dot(np.dot(z, x), np.conj(z)) * p\n\n def _pauli_expansion_(self) -> value.LinearDict[str]:\n if self._is_parameterized_():\n return NotImplemented\n phase_angle = np.pi * self._phase_exponent / 2\n angle = np.pi * self._exponent / 2\n phase = 1j**(2 * self._exponent * (self._global_shift + 0.5))\n return value.LinearDict({\n 'I': phase * np.cos(angle),\n 'X': -1j * phase * np.sin(angle) * np.cos(2 * phase_angle),\n 'Y': -1j * phase * np.sin(angle) * np.sin(2 * phase_angle),\n })\n\n def _is_parameterized_(self) -> bool:\n \"\"\"See `cirq.SupportsParameterization`.\"\"\"\n return (protocols.is_parameterized(self._exponent) or\n protocols.is_parameterized(self._phase_exponent))\n\n def _resolve_parameters_(self, param_resolver) -> 'PhasedXPowGate':\n \"\"\"See `cirq.SupportsParameterization`.\"\"\"\n return PhasedXPowGate(\n phase_exponent=param_resolver.value_of(self._phase_exponent),\n exponent=param_resolver.value_of(self._exponent),\n global_shift=self._global_shift)\n\n def _phase_by_(self, phase_turns, qubit_index):\n \"\"\"See `cirq.SupportsPhase`.\"\"\"\n assert qubit_index == 0\n return PhasedXPowGate(\n exponent=self._exponent,\n phase_exponent=self._phase_exponent + phase_turns * 2,\n global_shift=self._global_shift)\n\n def _circuit_diagram_info_(self, args: protocols.CircuitDiagramInfoArgs\n ) -> protocols.CircuitDiagramInfo:\n \"\"\"See `cirq.SupportsCircuitDiagramInfo`.\"\"\"\n\n if (isinstance(self.phase_exponent, sympy.Symbol) or\n args.precision is None):\n s = 'PhasedX({})'.format(self.phase_exponent)\n else:\n s = 'PhasedX({{:.{}}})'.format(args.precision).format(\n self.phase_exponent)\n return protocols.CircuitDiagramInfo(\n wire_symbols=(s,),\n exponent=value.canonicalize_half_turns(self._exponent))\n\n def __str__(self):\n info = protocols.circuit_diagram_info(self)\n if info.exponent == 1:\n return info.wire_symbols[0]\n return '{}^{}'.format(info.wire_symbols[0], info.exponent)\n\n def __repr__(self):\n args = ['phase_exponent={}'.format(proper_repr(self.phase_exponent))]\n if self.exponent != 1:\n args.append('exponent={}'.format(proper_repr(self.exponent)))\n if self._global_shift != 0:\n args.append('global_shift={!r}'.format(self._global_shift))\n return 'cirq.PhasedXPowGate({})'.format(', '.join(args))\n\n def _period(self):\n exponents = [self._global_shift, 1 + self._global_shift]\n real_periods = [abs(2/e) for e in exponents if e != 0]\n int_periods = [int(np.round(e)) for e in real_periods]\n if any(i != r for i, r in zip(real_periods, int_periods)):\n return None\n if len(int_periods) == 1:\n return int_periods[0]\n return int_periods[0] * int_periods[1] / math.gcd(*int_periods)\n\n @property\n def _canonical_exponent(self):\n period = self._period()\n if not period or isinstance(self._exponent, sympy.Symbol):\n return self._exponent\n\n return self._exponent % period\n\n def _value_equality_values_cls_(self):\n if self.phase_exponent == 0:\n return common_gates.XPowGate\n if self.phase_exponent == 0.5:\n return common_gates.YPowGate\n return PhasedXPowGate\n\n def _value_equality_values_(self):\n if self.phase_exponent == 0:\n return common_gates.XPowGate(\n exponent=self._exponent,\n global_shift=self._global_shift)._value_equality_values_()\n if self.phase_exponent == 0.5:\n return common_gates.YPowGate(\n exponent=self._exponent,\n global_shift=self._global_shift)._value_equality_values_()\n return self.phase_exponent, self._canonical_exponent, self._global_shift\n", "path": "cirq/ops/phased_x_gate.py"}]} | 3,393 | 150 |
gh_patches_debug_41554 | rasdani/github-patches | git_diff | mozilla__bugbug-644 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Remove fallback on downloading xz files
It's added in #524 because the current task have xz as artifacts. Once we make a successful deployment, we can switch to zst fully and remove the xz fallback.
</issue>
<code>
[start of bugbug/db.py]
1 # -*- coding: utf-8 -*-
2 # This Source Code Form is subject to the terms of the Mozilla Public
3 # License, v. 2.0. If a copy of the MPL was not distributed with this file,
4 # You can obtain one at http://mozilla.org/MPL/2.0/.
5
6 import gzip
7 import io
8 import json
9 import lzma
10 import os
11 import pickle
12 import shutil
13 from contextlib import contextmanager
14 from urllib.parse import urljoin
15
16 import requests
17 import zstandard
18
19 from bugbug import utils
20
21 DATABASES = {}
22
23
24 def register(path, url, version, support_files=[]):
25 DATABASES[path] = {"url": url, "version": version, "support_files": support_files}
26
27 # Create DB parent directory.
28 parent_dir = os.path.dirname(path)
29 if not os.path.exists(parent_dir):
30 os.makedirs(parent_dir, exist_ok=True)
31
32 if not os.path.exists(f"{path}.version"):
33 with open(f"{path}.version", "w") as f:
34 f.write(str(version))
35
36
37 def is_old_version(path):
38 with open(f"{path}.version", "r") as f:
39 prev_version = int(f.read())
40
41 return DATABASES[path]["version"] > prev_version
42
43
44 def extract_file(path):
45 path, compression_type = os.path.splitext(path)
46
47 with open(path, "wb") as output_f:
48 if compression_type == ".zst":
49 dctx = zstandard.ZstdDecompressor()
50 with open(f"{path}.zst", "rb") as input_f:
51 dctx.copy_stream(input_f, output_f)
52
53 elif compression_type == ".xz":
54 with lzma.open(f"{path}.xz") as input_f:
55 shutil.copyfileobj(input_f, output_f)
56
57 else:
58 assert False, f"Unexpected compression type: {compression_type}"
59
60
61 def download_support_file(path, file_name):
62 try:
63 url = urljoin(DATABASES[path]["url"], file_name)
64 path = os.path.join(os.path.dirname(path), file_name)
65
66 print(f"Downloading {url} to {path}")
67 utils.download_check_etag(url, path)
68
69 if path.endswith(".zst") or path.endswith(".xz"):
70 extract_file(path)
71 except requests.exceptions.HTTPError:
72 try:
73 url = f"{os.path.splitext(url)[0]}.xz"
74 path = f"{os.path.splitext(path)[0]}.xz"
75
76 print(f"Downloading {url} to {path}")
77 utils.download_check_etag(url, path)
78
79 extract_file(path)
80
81 except requests.exceptions.HTTPError:
82 print(f"{file_name} is not yet available to download for {path}")
83
84
85 def download_version(path):
86 download_support_file(path, f"{os.path.basename(path)}.version")
87
88
89 # Download and extract databases.
90 def download(path, force=False, support_files_too=False):
91 if os.path.exists(path) and not force:
92 return
93
94 zst_path = f"{path}.zst"
95 xz_path = f"{path}.xz"
96
97 # Only download if the file is not there yet.
98 if (not os.path.exists(zst_path) and not os.path.exists(xz_path)) or force:
99 url = DATABASES[path]["url"]
100 try:
101 path_compressed = zst_path
102 print(f"Downloading {url} to {path_compressed}")
103 utils.download_check_etag(url, path_compressed)
104
105 except requests.exceptions.HTTPError:
106 try:
107 url_xz = f"{os.path.splitext(url)[0]}.xz"
108 path_compressed = xz_path
109 print(f"Downloading {url_xz} to {path_compressed} instead")
110 utils.download_check_etag(url_xz, path_compressed)
111
112 except requests.exceptions.HTTPError:
113 print(f"{url} is not yet available to download")
114 raise
115
116 else:
117 if os.path.exists(zst_path) or not os.path.exists(xz_path):
118 path_compressed = zst_path
119 else:
120 path_compressed = xz_path
121
122 extract_file(path_compressed)
123
124 if support_files_too:
125 for support_file in DATABASES[path]["support_files"]:
126 download_support_file(path, support_file)
127
128
129 def last_modified(path):
130 url = DATABASES[path]["url"]
131 last_modified = utils.get_last_modified(url)
132
133 if last_modified is None:
134 base_url = os.path.splitext(url)[0]
135 last_modified = utils.get_last_modified(f"{base_url}.xz")
136
137 if last_modified is None:
138 raise Exception("Last-Modified is not available")
139
140 return last_modified
141
142
143 class Store:
144 def __init__(self, fh):
145 self.fh = fh
146
147
148 class JSONStore(Store):
149 def write(self, elems):
150 for elem in elems:
151 self.fh.write((json.dumps(elem) + "\n").encode("utf-8"))
152
153 def read(self):
154 for line in io.TextIOWrapper(self.fh, encoding="utf-8"):
155 yield json.loads(line)
156
157
158 class PickleStore(Store):
159 def write(self, elems):
160 for elem in elems:
161 self.fh.write(pickle.dumps(elem))
162
163 def read(self):
164 try:
165 while True:
166 yield pickle.load(self.fh)
167 except EOFError:
168 pass
169
170
171 COMPRESSION_FORMATS = ["gz", "zstd"]
172 SERIALIZATION_FORMATS = {"json": JSONStore, "pickle": PickleStore}
173
174
175 @contextmanager
176 def _db_open(path, mode):
177 parts = str(path).split(".")
178 assert len(parts) > 1, "Extension needed to figure out serialization format"
179 if len(parts) == 2:
180 db_format = parts[-1]
181 compression = None
182 else:
183 db_format = parts[-2]
184 compression = parts[-1]
185
186 assert compression is None or compression in COMPRESSION_FORMATS
187 assert db_format in SERIALIZATION_FORMATS
188
189 store_constructor = SERIALIZATION_FORMATS[db_format]
190
191 if compression == "gz":
192 with gzip.GzipFile(path, mode) as f:
193 yield store_constructor(f)
194 elif compression == "zstd":
195 if "w" in mode or "a" in mode:
196 cctx = zstandard.ZstdCompressor()
197 with open(path, mode) as f:
198 with cctx.stream_writer(f) as writer:
199 yield store_constructor(writer)
200 else:
201 dctx = zstandard.ZstdDecompressor()
202 with open(path, mode) as f:
203 with dctx.stream_reader(f) as reader:
204 yield store_constructor(reader)
205 else:
206 with open(path, mode) as f:
207 yield store_constructor(f)
208
209
210 def read(path):
211 assert path in DATABASES
212
213 if not os.path.exists(path):
214 return ()
215
216 with _db_open(path, "rb") as store:
217 for elem in store.read():
218 yield elem
219
220
221 def write(path, elems):
222 assert path in DATABASES
223
224 with _db_open(path, "wb") as store:
225 store.write(elems)
226
227
228 def append(path, elems):
229 assert path in DATABASES
230
231 with _db_open(path, "ab") as store:
232 store.write(elems)
233
234
235 def delete(path, match):
236 assert path in DATABASES
237
238 dirname, basename = os.path.split(path)
239 new_path = os.path.join(dirname, f"new_{basename}")
240
241 def matching_elems(store):
242 for elem in store.read():
243 if not match(elem):
244 yield elem
245
246 with _db_open(new_path, "wb") as wstore:
247 with _db_open(path, "rb") as rstore:
248 wstore.write(matching_elems(rstore))
249
250 os.unlink(path)
251 os.rename(new_path, path)
252
[end of bugbug/db.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/bugbug/db.py b/bugbug/db.py
--- a/bugbug/db.py
+++ b/bugbug/db.py
@@ -6,10 +6,8 @@
import gzip
import io
import json
-import lzma
import os
import pickle
-import shutil
from contextlib import contextmanager
from urllib.parse import urljoin
@@ -49,11 +47,6 @@
dctx = zstandard.ZstdDecompressor()
with open(f"{path}.zst", "rb") as input_f:
dctx.copy_stream(input_f, output_f)
-
- elif compression_type == ".xz":
- with lzma.open(f"{path}.xz") as input_f:
- shutil.copyfileobj(input_f, output_f)
-
else:
assert False, f"Unexpected compression type: {compression_type}"
@@ -66,20 +59,10 @@
print(f"Downloading {url} to {path}")
utils.download_check_etag(url, path)
- if path.endswith(".zst") or path.endswith(".xz"):
+ if path.endswith(".zst"):
extract_file(path)
except requests.exceptions.HTTPError:
- try:
- url = f"{os.path.splitext(url)[0]}.xz"
- path = f"{os.path.splitext(path)[0]}.xz"
-
- print(f"Downloading {url} to {path}")
- utils.download_check_etag(url, path)
-
- extract_file(path)
-
- except requests.exceptions.HTTPError:
- print(f"{file_name} is not yet available to download for {path}")
+ print(f"{file_name} is not yet available to download for {path}")
def download_version(path):
@@ -92,34 +75,19 @@
return
zst_path = f"{path}.zst"
- xz_path = f"{path}.xz"
# Only download if the file is not there yet.
- if (not os.path.exists(zst_path) and not os.path.exists(xz_path)) or force:
+ if not os.path.exists(zst_path) or force:
url = DATABASES[path]["url"]
try:
- path_compressed = zst_path
- print(f"Downloading {url} to {path_compressed}")
- utils.download_check_etag(url, path_compressed)
+ print(f"Downloading {url} to {zst_path}")
+ utils.download_check_etag(url, zst_path)
except requests.exceptions.HTTPError:
- try:
- url_xz = f"{os.path.splitext(url)[0]}.xz"
- path_compressed = xz_path
- print(f"Downloading {url_xz} to {path_compressed} instead")
- utils.download_check_etag(url_xz, path_compressed)
-
- except requests.exceptions.HTTPError:
- print(f"{url} is not yet available to download")
- raise
+ print(f"{url} is not yet available to download")
+ raise
- else:
- if os.path.exists(zst_path) or not os.path.exists(xz_path):
- path_compressed = zst_path
- else:
- path_compressed = xz_path
-
- extract_file(path_compressed)
+ extract_file(zst_path)
if support_files_too:
for support_file in DATABASES[path]["support_files"]:
@@ -130,10 +98,6 @@
url = DATABASES[path]["url"]
last_modified = utils.get_last_modified(url)
- if last_modified is None:
- base_url = os.path.splitext(url)[0]
- last_modified = utils.get_last_modified(f"{base_url}.xz")
-
if last_modified is None:
raise Exception("Last-Modified is not available")
| {"golden_diff": "diff --git a/bugbug/db.py b/bugbug/db.py\n--- a/bugbug/db.py\n+++ b/bugbug/db.py\n@@ -6,10 +6,8 @@\n import gzip\n import io\n import json\n-import lzma\n import os\n import pickle\n-import shutil\n from contextlib import contextmanager\n from urllib.parse import urljoin\n \n@@ -49,11 +47,6 @@\n dctx = zstandard.ZstdDecompressor()\n with open(f\"{path}.zst\", \"rb\") as input_f:\n dctx.copy_stream(input_f, output_f)\n-\n- elif compression_type == \".xz\":\n- with lzma.open(f\"{path}.xz\") as input_f:\n- shutil.copyfileobj(input_f, output_f)\n-\n else:\n assert False, f\"Unexpected compression type: {compression_type}\"\n \n@@ -66,20 +59,10 @@\n print(f\"Downloading {url} to {path}\")\n utils.download_check_etag(url, path)\n \n- if path.endswith(\".zst\") or path.endswith(\".xz\"):\n+ if path.endswith(\".zst\"):\n extract_file(path)\n except requests.exceptions.HTTPError:\n- try:\n- url = f\"{os.path.splitext(url)[0]}.xz\"\n- path = f\"{os.path.splitext(path)[0]}.xz\"\n-\n- print(f\"Downloading {url} to {path}\")\n- utils.download_check_etag(url, path)\n-\n- extract_file(path)\n-\n- except requests.exceptions.HTTPError:\n- print(f\"{file_name} is not yet available to download for {path}\")\n+ print(f\"{file_name} is not yet available to download for {path}\")\n \n \n def download_version(path):\n@@ -92,34 +75,19 @@\n return\n \n zst_path = f\"{path}.zst\"\n- xz_path = f\"{path}.xz\"\n \n # Only download if the file is not there yet.\n- if (not os.path.exists(zst_path) and not os.path.exists(xz_path)) or force:\n+ if not os.path.exists(zst_path) or force:\n url = DATABASES[path][\"url\"]\n try:\n- path_compressed = zst_path\n- print(f\"Downloading {url} to {path_compressed}\")\n- utils.download_check_etag(url, path_compressed)\n+ print(f\"Downloading {url} to {zst_path}\")\n+ utils.download_check_etag(url, zst_path)\n \n except requests.exceptions.HTTPError:\n- try:\n- url_xz = f\"{os.path.splitext(url)[0]}.xz\"\n- path_compressed = xz_path\n- print(f\"Downloading {url_xz} to {path_compressed} instead\")\n- utils.download_check_etag(url_xz, path_compressed)\n-\n- except requests.exceptions.HTTPError:\n- print(f\"{url} is not yet available to download\")\n- raise\n+ print(f\"{url} is not yet available to download\")\n+ raise\n \n- else:\n- if os.path.exists(zst_path) or not os.path.exists(xz_path):\n- path_compressed = zst_path\n- else:\n- path_compressed = xz_path\n-\n- extract_file(path_compressed)\n+ extract_file(zst_path)\n \n if support_files_too:\n for support_file in DATABASES[path][\"support_files\"]:\n@@ -130,10 +98,6 @@\n url = DATABASES[path][\"url\"]\n last_modified = utils.get_last_modified(url)\n \n- if last_modified is None:\n- base_url = os.path.splitext(url)[0]\n- last_modified = utils.get_last_modified(f\"{base_url}.xz\")\n-\n if last_modified is None:\n raise Exception(\"Last-Modified is not available\")\n", "issue": "Remove fallback on downloading xz files\nIt's added in #524 because the current task have xz as artifacts. Once we make a successful deployment, we can switch to zst fully and remove the xz fallback.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this file,\n# You can obtain one at http://mozilla.org/MPL/2.0/.\n\nimport gzip\nimport io\nimport json\nimport lzma\nimport os\nimport pickle\nimport shutil\nfrom contextlib import contextmanager\nfrom urllib.parse import urljoin\n\nimport requests\nimport zstandard\n\nfrom bugbug import utils\n\nDATABASES = {}\n\n\ndef register(path, url, version, support_files=[]):\n DATABASES[path] = {\"url\": url, \"version\": version, \"support_files\": support_files}\n\n # Create DB parent directory.\n parent_dir = os.path.dirname(path)\n if not os.path.exists(parent_dir):\n os.makedirs(parent_dir, exist_ok=True)\n\n if not os.path.exists(f\"{path}.version\"):\n with open(f\"{path}.version\", \"w\") as f:\n f.write(str(version))\n\n\ndef is_old_version(path):\n with open(f\"{path}.version\", \"r\") as f:\n prev_version = int(f.read())\n\n return DATABASES[path][\"version\"] > prev_version\n\n\ndef extract_file(path):\n path, compression_type = os.path.splitext(path)\n\n with open(path, \"wb\") as output_f:\n if compression_type == \".zst\":\n dctx = zstandard.ZstdDecompressor()\n with open(f\"{path}.zst\", \"rb\") as input_f:\n dctx.copy_stream(input_f, output_f)\n\n elif compression_type == \".xz\":\n with lzma.open(f\"{path}.xz\") as input_f:\n shutil.copyfileobj(input_f, output_f)\n\n else:\n assert False, f\"Unexpected compression type: {compression_type}\"\n\n\ndef download_support_file(path, file_name):\n try:\n url = urljoin(DATABASES[path][\"url\"], file_name)\n path = os.path.join(os.path.dirname(path), file_name)\n\n print(f\"Downloading {url} to {path}\")\n utils.download_check_etag(url, path)\n\n if path.endswith(\".zst\") or path.endswith(\".xz\"):\n extract_file(path)\n except requests.exceptions.HTTPError:\n try:\n url = f\"{os.path.splitext(url)[0]}.xz\"\n path = f\"{os.path.splitext(path)[0]}.xz\"\n\n print(f\"Downloading {url} to {path}\")\n utils.download_check_etag(url, path)\n\n extract_file(path)\n\n except requests.exceptions.HTTPError:\n print(f\"{file_name} is not yet available to download for {path}\")\n\n\ndef download_version(path):\n download_support_file(path, f\"{os.path.basename(path)}.version\")\n\n\n# Download and extract databases.\ndef download(path, force=False, support_files_too=False):\n if os.path.exists(path) and not force:\n return\n\n zst_path = f\"{path}.zst\"\n xz_path = f\"{path}.xz\"\n\n # Only download if the file is not there yet.\n if (not os.path.exists(zst_path) and not os.path.exists(xz_path)) or force:\n url = DATABASES[path][\"url\"]\n try:\n path_compressed = zst_path\n print(f\"Downloading {url} to {path_compressed}\")\n utils.download_check_etag(url, path_compressed)\n\n except requests.exceptions.HTTPError:\n try:\n url_xz = f\"{os.path.splitext(url)[0]}.xz\"\n path_compressed = xz_path\n print(f\"Downloading {url_xz} to {path_compressed} instead\")\n utils.download_check_etag(url_xz, path_compressed)\n\n except requests.exceptions.HTTPError:\n print(f\"{url} is not yet available to download\")\n raise\n\n else:\n if os.path.exists(zst_path) or not os.path.exists(xz_path):\n path_compressed = zst_path\n else:\n path_compressed = xz_path\n\n extract_file(path_compressed)\n\n if support_files_too:\n for support_file in DATABASES[path][\"support_files\"]:\n download_support_file(path, support_file)\n\n\ndef last_modified(path):\n url = DATABASES[path][\"url\"]\n last_modified = utils.get_last_modified(url)\n\n if last_modified is None:\n base_url = os.path.splitext(url)[0]\n last_modified = utils.get_last_modified(f\"{base_url}.xz\")\n\n if last_modified is None:\n raise Exception(\"Last-Modified is not available\")\n\n return last_modified\n\n\nclass Store:\n def __init__(self, fh):\n self.fh = fh\n\n\nclass JSONStore(Store):\n def write(self, elems):\n for elem in elems:\n self.fh.write((json.dumps(elem) + \"\\n\").encode(\"utf-8\"))\n\n def read(self):\n for line in io.TextIOWrapper(self.fh, encoding=\"utf-8\"):\n yield json.loads(line)\n\n\nclass PickleStore(Store):\n def write(self, elems):\n for elem in elems:\n self.fh.write(pickle.dumps(elem))\n\n def read(self):\n try:\n while True:\n yield pickle.load(self.fh)\n except EOFError:\n pass\n\n\nCOMPRESSION_FORMATS = [\"gz\", \"zstd\"]\nSERIALIZATION_FORMATS = {\"json\": JSONStore, \"pickle\": PickleStore}\n\n\n@contextmanager\ndef _db_open(path, mode):\n parts = str(path).split(\".\")\n assert len(parts) > 1, \"Extension needed to figure out serialization format\"\n if len(parts) == 2:\n db_format = parts[-1]\n compression = None\n else:\n db_format = parts[-2]\n compression = parts[-1]\n\n assert compression is None or compression in COMPRESSION_FORMATS\n assert db_format in SERIALIZATION_FORMATS\n\n store_constructor = SERIALIZATION_FORMATS[db_format]\n\n if compression == \"gz\":\n with gzip.GzipFile(path, mode) as f:\n yield store_constructor(f)\n elif compression == \"zstd\":\n if \"w\" in mode or \"a\" in mode:\n cctx = zstandard.ZstdCompressor()\n with open(path, mode) as f:\n with cctx.stream_writer(f) as writer:\n yield store_constructor(writer)\n else:\n dctx = zstandard.ZstdDecompressor()\n with open(path, mode) as f:\n with dctx.stream_reader(f) as reader:\n yield store_constructor(reader)\n else:\n with open(path, mode) as f:\n yield store_constructor(f)\n\n\ndef read(path):\n assert path in DATABASES\n\n if not os.path.exists(path):\n return ()\n\n with _db_open(path, \"rb\") as store:\n for elem in store.read():\n yield elem\n\n\ndef write(path, elems):\n assert path in DATABASES\n\n with _db_open(path, \"wb\") as store:\n store.write(elems)\n\n\ndef append(path, elems):\n assert path in DATABASES\n\n with _db_open(path, \"ab\") as store:\n store.write(elems)\n\n\ndef delete(path, match):\n assert path in DATABASES\n\n dirname, basename = os.path.split(path)\n new_path = os.path.join(dirname, f\"new_{basename}\")\n\n def matching_elems(store):\n for elem in store.read():\n if not match(elem):\n yield elem\n\n with _db_open(new_path, \"wb\") as wstore:\n with _db_open(path, \"rb\") as rstore:\n wstore.write(matching_elems(rstore))\n\n os.unlink(path)\n os.rename(new_path, path)\n", "path": "bugbug/db.py"}]} | 2,927 | 843 |
gh_patches_debug_8000 | rasdani/github-patches | git_diff | arviz-devs__arviz-203 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Changing the order of plotting and file load seems to cause netcdf errors
For some reason it seems that changing the order of plotting and loading data causes failures, not entirely sure why yet. This is only occurring for me on Ubuntu 16.04 with the latest version of arviz. It does not occur on my osx laptop
Load, Load, Plot, Plot doesn't work. Stack trace attached
[stack_trace.txt](https://github.com/arviz-devs/arviz/files/2349232/stack_trace.txt)
```
import arviz as az
az.style.use('arviz-darkgrid')
non_centered = az.load_arviz_data('non_centered_eight')
centered = az.load_arviz_data('centered_eight')
az.violintraceplot(non_centered, var_names=["mu", "tau"], textsize=8)
az.violintraceplot(centered, var_names=["mu", "tau"], textsize=8)
```
Load, Plot, Load, Plot works
```
import arviz as az
az.style.use('arviz-darkgrid')
non_centered = az.load_arviz_data('non_centered_eight')
az.violintraceplot(non_centered, var_names=["mu", "tau"], textsize=8)
centered = az.load_arviz_data('centered_eight')
az.violintraceplot(centered, var_names=["mu", "tau"], textsize=8)
```
</issue>
<code>
[start of arviz/inference_data.py]
1 """Data structure for using netcdf groups with xarray."""
2 import netCDF4 as nc
3 import xarray as xr
4
5
6 class InferenceData():
7 """Container for accessing netCDF files using xarray."""
8
9 def __init__(self, *_, **kwargs):
10 """Initialize InferenceData object from keyword xarray datasets.
11
12 Examples
13 --------
14 InferenceData(posterior=posterior, prior=prior)
15
16 Parameters
17 ----------
18 kwargs :
19 Keyword arguments of xarray datasets
20 """
21 self._groups = []
22 for key, dataset in kwargs.items():
23 if dataset is None:
24 continue
25 elif not isinstance(dataset, xr.Dataset):
26 raise ValueError('Arguments to InferenceData must be xarray Datasets '
27 '(argument "{}" was type "{}")'.format(key, type(dataset)))
28 setattr(self, key, dataset)
29 self._groups.append(key)
30
31 def __repr__(self):
32 """Make string representation of object."""
33 return 'Inference data with groups:\n\t> {options}'.format(
34 options='\n\t> '.join(self._groups)
35 )
36
37 @staticmethod
38 def from_netcdf(filename):
39 """Initialize object from a netcdf file.
40
41 Expects that the file will have groups, each of which can be loaded by xarray.
42
43 Parameters
44 ----------
45 filename : str
46 location of netcdf file
47
48 Returns
49 -------
50 InferenceData object
51 """
52 groups = {}
53 for group in nc.Dataset(filename, mode='r').groups:
54 groups[group] = xr.open_dataset(filename, group=group, autoclose=True)
55 return InferenceData(**groups)
56
57 def to_netcdf(self, filename):
58 """Write InferenceData to file using netcdf4.
59
60 Parameters
61 ----------
62 filename : str
63 Location to write to
64
65 Returns
66 -------
67 str
68 Location of netcdf file
69 """
70 mode = 'w' # overwrite first, then append
71 for group in self._groups:
72 data = getattr(self, group)
73 data.to_netcdf(filename, mode=mode, group=group)
74 data.close()
75 mode = 'a'
76 return filename
77
[end of arviz/inference_data.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/arviz/inference_data.py b/arviz/inference_data.py
--- a/arviz/inference_data.py
+++ b/arviz/inference_data.py
@@ -50,8 +50,12 @@
InferenceData object
"""
groups = {}
- for group in nc.Dataset(filename, mode='r').groups:
- groups[group] = xr.open_dataset(filename, group=group, autoclose=True)
+ with nc.Dataset(filename, mode='r') as data:
+ data_groups = list(data.groups)
+
+ for group in data_groups:
+ with xr.open_dataset(filename, group=group) as data:
+ groups[group] = data
return InferenceData(**groups)
def to_netcdf(self, filename):
| {"golden_diff": "diff --git a/arviz/inference_data.py b/arviz/inference_data.py\n--- a/arviz/inference_data.py\n+++ b/arviz/inference_data.py\n@@ -50,8 +50,12 @@\n InferenceData object\n \"\"\"\n groups = {}\n- for group in nc.Dataset(filename, mode='r').groups:\n- groups[group] = xr.open_dataset(filename, group=group, autoclose=True)\n+ with nc.Dataset(filename, mode='r') as data:\n+ data_groups = list(data.groups)\n+\n+ for group in data_groups:\n+ with xr.open_dataset(filename, group=group) as data:\n+ groups[group] = data\n return InferenceData(**groups)\n \n def to_netcdf(self, filename):\n", "issue": "Changing the order of plotting and file load seems to cause netcdf errors\nFor some reason it seems that changing the order of plotting and loading data causes failures, not entirely sure why yet. This is only occurring for me on Ubuntu 16.04 with the latest version of arviz. It does not occur on my osx laptop\r\n\r\n\r\nLoad, Load, Plot, Plot doesn't work. Stack trace attached\r\n[stack_trace.txt](https://github.com/arviz-devs/arviz/files/2349232/stack_trace.txt)\r\n\r\n```\r\nimport arviz as az\r\naz.style.use('arviz-darkgrid')\r\n\r\nnon_centered = az.load_arviz_data('non_centered_eight')\r\ncentered = az.load_arviz_data('centered_eight')\r\naz.violintraceplot(non_centered, var_names=[\"mu\", \"tau\"], textsize=8)\r\naz.violintraceplot(centered, var_names=[\"mu\", \"tau\"], textsize=8)\r\n```\r\n\r\nLoad, Plot, Load, Plot works\r\n\r\n```\r\nimport arviz as az\r\naz.style.use('arviz-darkgrid')\r\n\r\nnon_centered = az.load_arviz_data('non_centered_eight')\r\naz.violintraceplot(non_centered, var_names=[\"mu\", \"tau\"], textsize=8)\r\n\r\ncentered = az.load_arviz_data('centered_eight')\r\naz.violintraceplot(centered, var_names=[\"mu\", \"tau\"], textsize=8)\r\n```\n", "before_files": [{"content": "\"\"\"Data structure for using netcdf groups with xarray.\"\"\"\nimport netCDF4 as nc\nimport xarray as xr\n\n\nclass InferenceData():\n \"\"\"Container for accessing netCDF files using xarray.\"\"\"\n\n def __init__(self, *_, **kwargs):\n \"\"\"Initialize InferenceData object from keyword xarray datasets.\n\n Examples\n --------\n InferenceData(posterior=posterior, prior=prior)\n\n Parameters\n ----------\n kwargs :\n Keyword arguments of xarray datasets\n \"\"\"\n self._groups = []\n for key, dataset in kwargs.items():\n if dataset is None:\n continue\n elif not isinstance(dataset, xr.Dataset):\n raise ValueError('Arguments to InferenceData must be xarray Datasets '\n '(argument \"{}\" was type \"{}\")'.format(key, type(dataset)))\n setattr(self, key, dataset)\n self._groups.append(key)\n\n def __repr__(self):\n \"\"\"Make string representation of object.\"\"\"\n return 'Inference data with groups:\\n\\t> {options}'.format(\n options='\\n\\t> '.join(self._groups)\n )\n\n @staticmethod\n def from_netcdf(filename):\n \"\"\"Initialize object from a netcdf file.\n\n Expects that the file will have groups, each of which can be loaded by xarray.\n\n Parameters\n ----------\n filename : str\n location of netcdf file\n\n Returns\n -------\n InferenceData object\n \"\"\"\n groups = {}\n for group in nc.Dataset(filename, mode='r').groups:\n groups[group] = xr.open_dataset(filename, group=group, autoclose=True)\n return InferenceData(**groups)\n\n def to_netcdf(self, filename):\n \"\"\"Write InferenceData to file using netcdf4.\n\n Parameters\n ----------\n filename : str\n Location to write to\n\n Returns\n -------\n str\n Location of netcdf file\n \"\"\"\n mode = 'w' # overwrite first, then append\n for group in self._groups:\n data = getattr(self, group)\n data.to_netcdf(filename, mode=mode, group=group)\n data.close()\n mode = 'a'\n return filename\n", "path": "arviz/inference_data.py"}]} | 1,467 | 169 |
gh_patches_debug_1379 | rasdani/github-patches | git_diff | zulip__zulip-29412 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Go to newly created stream (with first-time modal)
Even after #29154, users find it hard to navigate to a newly created stream. To address this, we should:
1. Take the user directly to the stream they just created. To avoid a potentially confusing interleaved view, we should go to the most recent topic in the stream (currently "stream events", but might be "general chat" in the future).
2. The first time that a user creates a stream, show an explanatory modal (wording to be finalized when working on the PR):
----
## Stream **#{stream name}** created!
You will now see the stream you created. If you'd like to go back to stream settings, click on the name of the stream at the top of your Zulip window, or use the **back** button in your browser or desktop app.
[Continue]
---
Since we are changing the behavior, it's fine to show this once to existing users.
[CZO thread](https://chat.zulip.org/#narrow/stream/137-feedback/topic/user.20research.3A.20going.20to.20a.20new.20stream/near/1744305)
</issue>
<code>
[start of zerver/lib/hotspots.py]
1 # See https://zulip.readthedocs.io/en/latest/subsystems/hotspots.html
2 # for documentation on this subsystem.
3 from dataclasses import dataclass
4 from typing import Any, Dict, List, Optional, Union
5
6 from django.conf import settings
7 from django.utils.translation import gettext_lazy
8 from django_stubs_ext import StrPromise
9
10 from zerver.models import OnboardingStep, UserProfile
11
12
13 @dataclass
14 class Hotspot:
15 name: str
16 title: Optional[StrPromise]
17 description: Optional[StrPromise]
18 has_trigger: bool = False
19
20 def to_dict(self, delay: float = 0) -> Dict[str, Union[str, float, bool]]:
21 return {
22 "type": "hotspot",
23 "name": self.name,
24 "title": str(self.title),
25 "description": str(self.description),
26 "delay": delay,
27 "has_trigger": self.has_trigger,
28 }
29
30
31 INTRO_HOTSPOTS: List[Hotspot] = [
32 Hotspot(
33 name="intro_streams",
34 title=gettext_lazy("Catch up on a stream"),
35 description=gettext_lazy(
36 "Messages sent to a stream are seen by everyone subscribed "
37 "to that stream. Try clicking on one of the stream links below."
38 ),
39 ),
40 Hotspot(
41 name="intro_topics",
42 title=gettext_lazy("Topics"),
43 description=gettext_lazy(
44 "Every message has a topic. Topics keep conversations "
45 "easy to follow, and make it easy to reply to conversations that start "
46 "while you are offline."
47 ),
48 ),
49 Hotspot(
50 # In theory, this should be renamed to intro_personal, since
51 # it's no longer attached to the gear menu, but renaming these
52 # requires a migration that is not worth doing at this time.
53 name="intro_gear",
54 title=gettext_lazy("Settings"),
55 description=gettext_lazy("Go to Settings to configure your notifications and preferences."),
56 ),
57 Hotspot(
58 name="intro_compose",
59 title=gettext_lazy("Compose"),
60 description=gettext_lazy(
61 "Click here to start a new conversation. Pick a topic "
62 "(2-3 words is best), and give it a go!"
63 ),
64 ),
65 ]
66
67
68 NON_INTRO_HOTSPOTS: List[Hotspot] = []
69
70
71 @dataclass
72 class OneTimeNotice:
73 name: str
74
75 def to_dict(self) -> Dict[str, str]:
76 return {
77 "type": "one_time_notice",
78 "name": self.name,
79 }
80
81
82 ONE_TIME_NOTICES: List[OneTimeNotice] = [
83 OneTimeNotice(
84 name="visibility_policy_banner",
85 ),
86 OneTimeNotice(
87 name="intro_inbox_view_modal",
88 ),
89 OneTimeNotice(
90 name="intro_recent_view_modal",
91 ),
92 ]
93
94 # We would most likely implement new hotspots in the future that aren't
95 # a part of the initial tutorial. To that end, classifying them into
96 # categories which are aggregated in ALL_HOTSPOTS, seems like a good start.
97 ALL_HOTSPOTS = [*INTRO_HOTSPOTS, *NON_INTRO_HOTSPOTS]
98 ALL_ONBOARDING_STEPS: List[Union[Hotspot, OneTimeNotice]] = [*ALL_HOTSPOTS, *ONE_TIME_NOTICES]
99
100
101 def get_next_onboarding_steps(user: UserProfile) -> List[Dict[str, Any]]:
102 # For manual testing, it can be convenient to set
103 # ALWAYS_SEND_ALL_HOTSPOTS=True in `zproject/dev_settings.py` to
104 # make it easy to click on all of the hotspots.
105 #
106 # Since this is just for development purposes, it's convenient for us to send
107 # all the hotspots rather than any specific category.
108 if settings.ALWAYS_SEND_ALL_HOTSPOTS:
109 return [hotspot.to_dict() for hotspot in ALL_HOTSPOTS]
110
111 # If a Zulip server has disabled the tutorial, never send hotspots.
112 if not settings.TUTORIAL_ENABLED:
113 return []
114
115 seen_onboarding_steps = frozenset(
116 OnboardingStep.objects.filter(user=user).values_list("onboarding_step", flat=True)
117 )
118
119 onboarding_steps: List[Dict[str, Any]] = [hotspot.to_dict() for hotspot in NON_INTRO_HOTSPOTS]
120
121 for one_time_notice in ONE_TIME_NOTICES:
122 if one_time_notice.name in seen_onboarding_steps:
123 continue
124 onboarding_steps.append(one_time_notice.to_dict())
125
126 if user.tutorial_status == UserProfile.TUTORIAL_FINISHED:
127 return onboarding_steps
128
129 for hotspot in INTRO_HOTSPOTS:
130 if hotspot.name in seen_onboarding_steps:
131 continue
132
133 onboarding_steps.append(hotspot.to_dict(delay=0.5))
134 return onboarding_steps
135
136 user.tutorial_status = UserProfile.TUTORIAL_FINISHED
137 user.save(update_fields=["tutorial_status"])
138 return onboarding_steps
139
140
141 def copy_hotspots(source_profile: UserProfile, target_profile: UserProfile) -> None:
142 for userhotspot in frozenset(OnboardingStep.objects.filter(user=source_profile)):
143 OnboardingStep.objects.create(
144 user=target_profile,
145 onboarding_step=userhotspot.onboarding_step,
146 timestamp=userhotspot.timestamp,
147 )
148
149 target_profile.tutorial_status = source_profile.tutorial_status
150 target_profile.onboarding_steps = source_profile.onboarding_steps
151 target_profile.save(update_fields=["tutorial_status", "onboarding_steps"])
152
[end of zerver/lib/hotspots.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/zerver/lib/hotspots.py b/zerver/lib/hotspots.py
--- a/zerver/lib/hotspots.py
+++ b/zerver/lib/hotspots.py
@@ -89,6 +89,9 @@
OneTimeNotice(
name="intro_recent_view_modal",
),
+ OneTimeNotice(
+ name="first_stream_created_banner",
+ ),
]
# We would most likely implement new hotspots in the future that aren't
| {"golden_diff": "diff --git a/zerver/lib/hotspots.py b/zerver/lib/hotspots.py\n--- a/zerver/lib/hotspots.py\n+++ b/zerver/lib/hotspots.py\n@@ -89,6 +89,9 @@\n OneTimeNotice(\n name=\"intro_recent_view_modal\",\n ),\n+ OneTimeNotice(\n+ name=\"first_stream_created_banner\",\n+ ),\n ]\n \n # We would most likely implement new hotspots in the future that aren't\n", "issue": "Go to newly created stream (with first-time modal)\nEven after #29154, users find it hard to navigate to a newly created stream. To address this, we should:\r\n\r\n1. Take the user directly to the stream they just created. To avoid a potentially confusing interleaved view, we should go to the most recent topic in the stream (currently \"stream events\", but might be \"general chat\" in the future).\r\n2. The first time that a user creates a stream, show an explanatory modal (wording to be finalized when working on the PR):\r\n\r\n----\r\n\r\n## Stream **#{stream name}** created!\r\n\r\nYou will now see the stream you created. If you'd like to go back to stream settings, click on the name of the stream at the top of your Zulip window, or use the **back** button in your browser or desktop app.\r\n\r\n[Continue]\r\n\r\n---\r\n\r\nSince we are changing the behavior, it's fine to show this once to existing users.\r\n\r\n[CZO thread](https://chat.zulip.org/#narrow/stream/137-feedback/topic/user.20research.3A.20going.20to.20a.20new.20stream/near/1744305)\n", "before_files": [{"content": "# See https://zulip.readthedocs.io/en/latest/subsystems/hotspots.html\n# for documentation on this subsystem.\nfrom dataclasses import dataclass\nfrom typing import Any, Dict, List, Optional, Union\n\nfrom django.conf import settings\nfrom django.utils.translation import gettext_lazy\nfrom django_stubs_ext import StrPromise\n\nfrom zerver.models import OnboardingStep, UserProfile\n\n\n@dataclass\nclass Hotspot:\n name: str\n title: Optional[StrPromise]\n description: Optional[StrPromise]\n has_trigger: bool = False\n\n def to_dict(self, delay: float = 0) -> Dict[str, Union[str, float, bool]]:\n return {\n \"type\": \"hotspot\",\n \"name\": self.name,\n \"title\": str(self.title),\n \"description\": str(self.description),\n \"delay\": delay,\n \"has_trigger\": self.has_trigger,\n }\n\n\nINTRO_HOTSPOTS: List[Hotspot] = [\n Hotspot(\n name=\"intro_streams\",\n title=gettext_lazy(\"Catch up on a stream\"),\n description=gettext_lazy(\n \"Messages sent to a stream are seen by everyone subscribed \"\n \"to that stream. Try clicking on one of the stream links below.\"\n ),\n ),\n Hotspot(\n name=\"intro_topics\",\n title=gettext_lazy(\"Topics\"),\n description=gettext_lazy(\n \"Every message has a topic. Topics keep conversations \"\n \"easy to follow, and make it easy to reply to conversations that start \"\n \"while you are offline.\"\n ),\n ),\n Hotspot(\n # In theory, this should be renamed to intro_personal, since\n # it's no longer attached to the gear menu, but renaming these\n # requires a migration that is not worth doing at this time.\n name=\"intro_gear\",\n title=gettext_lazy(\"Settings\"),\n description=gettext_lazy(\"Go to Settings to configure your notifications and preferences.\"),\n ),\n Hotspot(\n name=\"intro_compose\",\n title=gettext_lazy(\"Compose\"),\n description=gettext_lazy(\n \"Click here to start a new conversation. Pick a topic \"\n \"(2-3 words is best), and give it a go!\"\n ),\n ),\n]\n\n\nNON_INTRO_HOTSPOTS: List[Hotspot] = []\n\n\n@dataclass\nclass OneTimeNotice:\n name: str\n\n def to_dict(self) -> Dict[str, str]:\n return {\n \"type\": \"one_time_notice\",\n \"name\": self.name,\n }\n\n\nONE_TIME_NOTICES: List[OneTimeNotice] = [\n OneTimeNotice(\n name=\"visibility_policy_banner\",\n ),\n OneTimeNotice(\n name=\"intro_inbox_view_modal\",\n ),\n OneTimeNotice(\n name=\"intro_recent_view_modal\",\n ),\n]\n\n# We would most likely implement new hotspots in the future that aren't\n# a part of the initial tutorial. To that end, classifying them into\n# categories which are aggregated in ALL_HOTSPOTS, seems like a good start.\nALL_HOTSPOTS = [*INTRO_HOTSPOTS, *NON_INTRO_HOTSPOTS]\nALL_ONBOARDING_STEPS: List[Union[Hotspot, OneTimeNotice]] = [*ALL_HOTSPOTS, *ONE_TIME_NOTICES]\n\n\ndef get_next_onboarding_steps(user: UserProfile) -> List[Dict[str, Any]]:\n # For manual testing, it can be convenient to set\n # ALWAYS_SEND_ALL_HOTSPOTS=True in `zproject/dev_settings.py` to\n # make it easy to click on all of the hotspots.\n #\n # Since this is just for development purposes, it's convenient for us to send\n # all the hotspots rather than any specific category.\n if settings.ALWAYS_SEND_ALL_HOTSPOTS:\n return [hotspot.to_dict() for hotspot in ALL_HOTSPOTS]\n\n # If a Zulip server has disabled the tutorial, never send hotspots.\n if not settings.TUTORIAL_ENABLED:\n return []\n\n seen_onboarding_steps = frozenset(\n OnboardingStep.objects.filter(user=user).values_list(\"onboarding_step\", flat=True)\n )\n\n onboarding_steps: List[Dict[str, Any]] = [hotspot.to_dict() for hotspot in NON_INTRO_HOTSPOTS]\n\n for one_time_notice in ONE_TIME_NOTICES:\n if one_time_notice.name in seen_onboarding_steps:\n continue\n onboarding_steps.append(one_time_notice.to_dict())\n\n if user.tutorial_status == UserProfile.TUTORIAL_FINISHED:\n return onboarding_steps\n\n for hotspot in INTRO_HOTSPOTS:\n if hotspot.name in seen_onboarding_steps:\n continue\n\n onboarding_steps.append(hotspot.to_dict(delay=0.5))\n return onboarding_steps\n\n user.tutorial_status = UserProfile.TUTORIAL_FINISHED\n user.save(update_fields=[\"tutorial_status\"])\n return onboarding_steps\n\n\ndef copy_hotspots(source_profile: UserProfile, target_profile: UserProfile) -> None:\n for userhotspot in frozenset(OnboardingStep.objects.filter(user=source_profile)):\n OnboardingStep.objects.create(\n user=target_profile,\n onboarding_step=userhotspot.onboarding_step,\n timestamp=userhotspot.timestamp,\n )\n\n target_profile.tutorial_status = source_profile.tutorial_status\n target_profile.onboarding_steps = source_profile.onboarding_steps\n target_profile.save(update_fields=[\"tutorial_status\", \"onboarding_steps\"])\n", "path": "zerver/lib/hotspots.py"}]} | 2,325 | 103 |
gh_patches_debug_12545 | rasdani/github-patches | git_diff | systemd__mkosi-2228 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Debian tools which use /etc/alternatives break
Regression caused by https://github.com/systemd/mkosi/pull/2201, already reported in https://github.com/systemd/mkosi/issues/2206.
When putting a keyring in `/etc/apt/trusted.gpg.d`, eventually `apt-key` will be called, which will make a call to `awk`. On debian that is installed as
```
lrwxrwxrwx 1 65534 65534 21 Jun 17 2022 /usr/bin/awk -> /etc/alternatives/awk
```
Since `/etc/alternatives` is not mounted, the call to awk fails and thus apt thinks there was no valid key found.
</issue>
<code>
[start of mkosi/bubblewrap.py]
1 # SPDX-License-Identifier: LGPL-2.1+
2 import contextlib
3 import enum
4 import logging
5 import os
6 import subprocess
7 import sys
8 from collections.abc import Mapping, Sequence
9 from pathlib import Path
10 from typing import Optional
11
12 from mkosi.log import ARG_DEBUG_SHELL
13 from mkosi.mounts import finalize_passwd_mounts, mount_overlay
14 from mkosi.run import find_binary, log_process_failure, run
15 from mkosi.state import MkosiState
16 from mkosi.types import _FILE, CompletedProcess, PathString
17 from mkosi.util import flatten, one_zero
18
19
20 # https://github.com/torvalds/linux/blob/master/include/uapi/linux/capability.h
21 class Capability(enum.Enum):
22 CAP_NET_ADMIN = 12
23
24
25 def have_effective_cap(capability: Capability) -> bool:
26 for line in Path("/proc/self/status").read_text().splitlines():
27 if line.startswith("CapEff:"):
28 hexcap = line.removeprefix("CapEff:").strip()
29 break
30 else:
31 logging.warning(f"\"CapEff:\" not found in /proc/self/status, assuming we don't have {capability}")
32 return False
33
34 return (int(hexcap, 16) & (1 << capability.value)) != 0
35
36
37 def finalize_mounts(state: MkosiState) -> list[str]:
38 mounts = [
39 ((state.config.tools_tree or Path("/")) / subdir, Path("/") / subdir, True)
40 for subdir in (
41 Path("etc/pki"),
42 Path("etc/ssl"),
43 Path("etc/crypto-policies"),
44 Path("etc/ca-certificates"),
45 Path("etc/pacman.d/gnupg"),
46 Path("var/lib/ca-certificates"),
47 )
48 if ((state.config.tools_tree or Path("/")) / subdir).exists()
49 ]
50
51 mounts += [
52 (d, d, False)
53 for d in (state.workspace, state.config.cache_dir, state.config.output_dir, state.config.build_dir)
54 if d
55 ]
56
57 mounts += [(d, d, True) for d in state.config.extra_search_paths]
58
59 return flatten(
60 ["--ro-bind" if readonly else "--bind", os.fspath(src), os.fspath(target)]
61 for src, target, readonly
62 in sorted(set(mounts), key=lambda s: s[1])
63 )
64
65
66 def bwrap(
67 state: MkosiState,
68 cmd: Sequence[PathString],
69 *,
70 network: bool = False,
71 devices: bool = False,
72 options: Sequence[PathString] = (),
73 log: bool = True,
74 scripts: Optional[Path] = None,
75 env: Mapping[str, str] = {},
76 stdin: _FILE = None,
77 stdout: _FILE = None,
78 stderr: _FILE = None,
79 input: Optional[str] = None,
80 check: bool = True,
81 ) -> CompletedProcess:
82 cmdline: list[PathString] = [
83 "bwrap",
84 "--ro-bind", "/usr", "/usr",
85 "--ro-bind-try", "/nix/store", "/nix/store",
86 # This mount is writable so bwrap can create extra directories or symlinks inside of it as needed. This isn't a
87 # problem as the package manager directory is created by mkosi and thrown away when the build finishes.
88 "--bind", state.pkgmngr / "etc", "/etc",
89 "--bind", "/var/tmp", "/var/tmp",
90 "--bind", "/tmp", "/tmp",
91 "--bind", Path.cwd(), Path.cwd(),
92 "--chdir", Path.cwd(),
93 "--unshare-pid",
94 "--unshare-ipc",
95 "--unshare-cgroup",
96 *(["--unshare-net"] if not network and have_effective_cap(Capability.CAP_NET_ADMIN) else []),
97 "--die-with-parent",
98 "--proc", "/proc",
99 "--setenv", "SYSTEMD_OFFLINE", one_zero(network),
100 ]
101
102 if devices:
103 cmdline += [
104 "--bind", "/sys", "/sys",
105 "--dev-bind", "/dev", "/dev",
106 ]
107 else:
108 cmdline += ["--dev", "/dev"]
109
110 for p in Path("/").iterdir():
111 if p.is_symlink():
112 cmdline += ["--symlink", p.readlink(), p]
113
114 if network:
115 cmdline += ["--bind", "/etc/resolv.conf", "/etc/resolv.conf"]
116
117 cmdline += finalize_mounts(state) + [
118 "--setenv", "PATH", f"{scripts or ''}:{os.environ['PATH']}",
119 *options,
120 "sh", "-c", "chmod 1777 /dev/shm && exec $0 \"$@\"",
121 ]
122
123 if setpgid := find_binary("setpgid"):
124 cmdline += [setpgid, "--foreground", "--"]
125
126 try:
127 with (
128 mount_overlay([Path("/usr"), state.pkgmngr / "usr"], where=Path("/usr"), lazy=True)
129 if (state.pkgmngr / "usr").exists()
130 else contextlib.nullcontext()
131 ):
132 return run(
133 [*cmdline, *cmd],
134 env=env,
135 log=False,
136 stdin=stdin,
137 stdout=stdout,
138 stderr=stderr,
139 input=input,
140 check=check,
141 )
142 except subprocess.CalledProcessError as e:
143 if log:
144 log_process_failure([os.fspath(s) for s in cmd], e.returncode)
145 if ARG_DEBUG_SHELL.get():
146 run([*cmdline, "sh"], stdin=sys.stdin, check=False, env=env, log=False)
147 raise e
148
149
150 def apivfs_cmd(root: Path) -> list[PathString]:
151 cmdline: list[PathString] = [
152 "bwrap",
153 "--dev-bind", "/", "/",
154 "--chdir", Path.cwd(),
155 "--tmpfs", root / "run",
156 "--tmpfs", root / "tmp",
157 "--bind", os.getenv("TMPDIR", "/var/tmp"), root / "var/tmp",
158 "--proc", root / "proc",
159 "--dev", root / "dev",
160 # APIVFS generally means chrooting is going to happen so unset TMPDIR just to be safe.
161 "--unsetenv", "TMPDIR",
162 ]
163
164 if (root / "etc/machine-id").exists():
165 # Make sure /etc/machine-id is not overwritten by any package manager post install scripts.
166 cmdline += ["--ro-bind", root / "etc/machine-id", root / "etc/machine-id"]
167
168 cmdline += finalize_passwd_mounts(root)
169
170 if setpgid := find_binary("setpgid"):
171 cmdline += [setpgid, "--foreground", "--"]
172
173 chmod = f"chmod 1777 {root / 'tmp'} {root / 'var/tmp'} {root / 'dev/shm'}"
174 # Make sure anything running in the root directory thinks it's in a container. $container can't always be
175 # accessed so we write /run/host/container-manager as well which is always accessible.
176 container = f"mkdir {root}/run/host && echo mkosi >{root}/run/host/container-manager"
177
178 cmdline += ["sh", "-c", f"{chmod} && {container} && exec $0 \"$@\""]
179
180 return cmdline
181
182
183 def chroot_cmd(root: Path, *, resolve: bool = False, options: Sequence[PathString] = ()) -> list[PathString]:
184 cmdline: list[PathString] = [
185 "sh", "-c",
186 # No exec here because we need to clean up the /work directory afterwards.
187 f"trap 'rm -rf {root / 'work'}' EXIT && mkdir -p {root / 'work'} && chown 777 {root / 'work'} && $0 \"$@\"",
188 "bwrap",
189 "--dev-bind", root, "/",
190 "--setenv", "container", "mkosi",
191 "--setenv", "HOME", "/",
192 "--setenv", "PATH", "/work/scripts:/usr/bin:/usr/sbin",
193 ]
194
195 if resolve:
196 p = Path("etc/resolv.conf")
197 if (root / p).is_symlink():
198 # For each component in the target path, bubblewrap will try to create it if it doesn't exist
199 # yet. If a component in the path is a dangling symlink, bubblewrap will end up calling
200 # mkdir(symlink) which obviously fails if multiple components of the dangling symlink path don't
201 # exist yet. As a workaround, we resolve the symlink ourselves so that bubblewrap will correctly
202 # create all missing components in the target path.
203 p = p.parent / (root / p).readlink()
204
205 cmdline += ["--ro-bind", "/etc/resolv.conf", Path("/") / p]
206
207 cmdline += [*options]
208
209 if setpgid := find_binary("setpgid", root=root):
210 cmdline += [setpgid, "--foreground", "--"]
211
212 return apivfs_cmd(root) + cmdline
213
[end of mkosi/bubblewrap.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mkosi/bubblewrap.py b/mkosi/bubblewrap.py
--- a/mkosi/bubblewrap.py
+++ b/mkosi/bubblewrap.py
@@ -86,6 +86,7 @@
# This mount is writable so bwrap can create extra directories or symlinks inside of it as needed. This isn't a
# problem as the package manager directory is created by mkosi and thrown away when the build finishes.
"--bind", state.pkgmngr / "etc", "/etc",
+ "--ro-bind-try", "/etc/alternatives", "/etc/alternatives",
"--bind", "/var/tmp", "/var/tmp",
"--bind", "/tmp", "/tmp",
"--bind", Path.cwd(), Path.cwd(),
| {"golden_diff": "diff --git a/mkosi/bubblewrap.py b/mkosi/bubblewrap.py\n--- a/mkosi/bubblewrap.py\n+++ b/mkosi/bubblewrap.py\n@@ -86,6 +86,7 @@\n # This mount is writable so bwrap can create extra directories or symlinks inside of it as needed. This isn't a\n # problem as the package manager directory is created by mkosi and thrown away when the build finishes.\n \"--bind\", state.pkgmngr / \"etc\", \"/etc\",\n+ \"--ro-bind-try\", \"/etc/alternatives\", \"/etc/alternatives\",\n \"--bind\", \"/var/tmp\", \"/var/tmp\",\n \"--bind\", \"/tmp\", \"/tmp\",\n \"--bind\", Path.cwd(), Path.cwd(),\n", "issue": "Debian tools which use /etc/alternatives break\nRegression caused by https://github.com/systemd/mkosi/pull/2201, already reported in https://github.com/systemd/mkosi/issues/2206.\r\n\r\nWhen putting a keyring in `/etc/apt/trusted.gpg.d`, eventually `apt-key` will be called, which will make a call to `awk`. On debian that is installed as\r\n```\r\nlrwxrwxrwx 1 65534 65534 21 Jun 17 2022 /usr/bin/awk -> /etc/alternatives/awk\r\n```\r\nSince `/etc/alternatives` is not mounted, the call to awk fails and thus apt thinks there was no valid key found.\n", "before_files": [{"content": "# SPDX-License-Identifier: LGPL-2.1+\nimport contextlib\nimport enum\nimport logging\nimport os\nimport subprocess\nimport sys\nfrom collections.abc import Mapping, Sequence\nfrom pathlib import Path\nfrom typing import Optional\n\nfrom mkosi.log import ARG_DEBUG_SHELL\nfrom mkosi.mounts import finalize_passwd_mounts, mount_overlay\nfrom mkosi.run import find_binary, log_process_failure, run\nfrom mkosi.state import MkosiState\nfrom mkosi.types import _FILE, CompletedProcess, PathString\nfrom mkosi.util import flatten, one_zero\n\n\n# https://github.com/torvalds/linux/blob/master/include/uapi/linux/capability.h\nclass Capability(enum.Enum):\n CAP_NET_ADMIN = 12\n\n\ndef have_effective_cap(capability: Capability) -> bool:\n for line in Path(\"/proc/self/status\").read_text().splitlines():\n if line.startswith(\"CapEff:\"):\n hexcap = line.removeprefix(\"CapEff:\").strip()\n break\n else:\n logging.warning(f\"\\\"CapEff:\\\" not found in /proc/self/status, assuming we don't have {capability}\")\n return False\n\n return (int(hexcap, 16) & (1 << capability.value)) != 0\n\n\ndef finalize_mounts(state: MkosiState) -> list[str]:\n mounts = [\n ((state.config.tools_tree or Path(\"/\")) / subdir, Path(\"/\") / subdir, True)\n for subdir in (\n Path(\"etc/pki\"),\n Path(\"etc/ssl\"),\n Path(\"etc/crypto-policies\"),\n Path(\"etc/ca-certificates\"),\n Path(\"etc/pacman.d/gnupg\"),\n Path(\"var/lib/ca-certificates\"),\n )\n if ((state.config.tools_tree or Path(\"/\")) / subdir).exists()\n ]\n\n mounts += [\n (d, d, False)\n for d in (state.workspace, state.config.cache_dir, state.config.output_dir, state.config.build_dir)\n if d\n ]\n\n mounts += [(d, d, True) for d in state.config.extra_search_paths]\n\n return flatten(\n [\"--ro-bind\" if readonly else \"--bind\", os.fspath(src), os.fspath(target)]\n for src, target, readonly\n in sorted(set(mounts), key=lambda s: s[1])\n )\n\n\ndef bwrap(\n state: MkosiState,\n cmd: Sequence[PathString],\n *,\n network: bool = False,\n devices: bool = False,\n options: Sequence[PathString] = (),\n log: bool = True,\n scripts: Optional[Path] = None,\n env: Mapping[str, str] = {},\n stdin: _FILE = None,\n stdout: _FILE = None,\n stderr: _FILE = None,\n input: Optional[str] = None,\n check: bool = True,\n) -> CompletedProcess:\n cmdline: list[PathString] = [\n \"bwrap\",\n \"--ro-bind\", \"/usr\", \"/usr\",\n \"--ro-bind-try\", \"/nix/store\", \"/nix/store\",\n # This mount is writable so bwrap can create extra directories or symlinks inside of it as needed. This isn't a\n # problem as the package manager directory is created by mkosi and thrown away when the build finishes.\n \"--bind\", state.pkgmngr / \"etc\", \"/etc\",\n \"--bind\", \"/var/tmp\", \"/var/tmp\",\n \"--bind\", \"/tmp\", \"/tmp\",\n \"--bind\", Path.cwd(), Path.cwd(),\n \"--chdir\", Path.cwd(),\n \"--unshare-pid\",\n \"--unshare-ipc\",\n \"--unshare-cgroup\",\n *([\"--unshare-net\"] if not network and have_effective_cap(Capability.CAP_NET_ADMIN) else []),\n \"--die-with-parent\",\n \"--proc\", \"/proc\",\n \"--setenv\", \"SYSTEMD_OFFLINE\", one_zero(network),\n ]\n\n if devices:\n cmdline += [\n \"--bind\", \"/sys\", \"/sys\",\n \"--dev-bind\", \"/dev\", \"/dev\",\n ]\n else:\n cmdline += [\"--dev\", \"/dev\"]\n\n for p in Path(\"/\").iterdir():\n if p.is_symlink():\n cmdline += [\"--symlink\", p.readlink(), p]\n\n if network:\n cmdline += [\"--bind\", \"/etc/resolv.conf\", \"/etc/resolv.conf\"]\n\n cmdline += finalize_mounts(state) + [\n \"--setenv\", \"PATH\", f\"{scripts or ''}:{os.environ['PATH']}\",\n *options,\n \"sh\", \"-c\", \"chmod 1777 /dev/shm && exec $0 \\\"$@\\\"\",\n ]\n\n if setpgid := find_binary(\"setpgid\"):\n cmdline += [setpgid, \"--foreground\", \"--\"]\n\n try:\n with (\n mount_overlay([Path(\"/usr\"), state.pkgmngr / \"usr\"], where=Path(\"/usr\"), lazy=True)\n if (state.pkgmngr / \"usr\").exists()\n else contextlib.nullcontext()\n ):\n return run(\n [*cmdline, *cmd],\n env=env,\n log=False,\n stdin=stdin,\n stdout=stdout,\n stderr=stderr,\n input=input,\n check=check,\n )\n except subprocess.CalledProcessError as e:\n if log:\n log_process_failure([os.fspath(s) for s in cmd], e.returncode)\n if ARG_DEBUG_SHELL.get():\n run([*cmdline, \"sh\"], stdin=sys.stdin, check=False, env=env, log=False)\n raise e\n\n\ndef apivfs_cmd(root: Path) -> list[PathString]:\n cmdline: list[PathString] = [\n \"bwrap\",\n \"--dev-bind\", \"/\", \"/\",\n \"--chdir\", Path.cwd(),\n \"--tmpfs\", root / \"run\",\n \"--tmpfs\", root / \"tmp\",\n \"--bind\", os.getenv(\"TMPDIR\", \"/var/tmp\"), root / \"var/tmp\",\n \"--proc\", root / \"proc\",\n \"--dev\", root / \"dev\",\n # APIVFS generally means chrooting is going to happen so unset TMPDIR just to be safe.\n \"--unsetenv\", \"TMPDIR\",\n ]\n\n if (root / \"etc/machine-id\").exists():\n # Make sure /etc/machine-id is not overwritten by any package manager post install scripts.\n cmdline += [\"--ro-bind\", root / \"etc/machine-id\", root / \"etc/machine-id\"]\n\n cmdline += finalize_passwd_mounts(root)\n\n if setpgid := find_binary(\"setpgid\"):\n cmdline += [setpgid, \"--foreground\", \"--\"]\n\n chmod = f\"chmod 1777 {root / 'tmp'} {root / 'var/tmp'} {root / 'dev/shm'}\"\n # Make sure anything running in the root directory thinks it's in a container. $container can't always be\n # accessed so we write /run/host/container-manager as well which is always accessible.\n container = f\"mkdir {root}/run/host && echo mkosi >{root}/run/host/container-manager\"\n\n cmdline += [\"sh\", \"-c\", f\"{chmod} && {container} && exec $0 \\\"$@\\\"\"]\n\n return cmdline\n\n\ndef chroot_cmd(root: Path, *, resolve: bool = False, options: Sequence[PathString] = ()) -> list[PathString]:\n cmdline: list[PathString] = [\n \"sh\", \"-c\",\n # No exec here because we need to clean up the /work directory afterwards.\n f\"trap 'rm -rf {root / 'work'}' EXIT && mkdir -p {root / 'work'} && chown 777 {root / 'work'} && $0 \\\"$@\\\"\",\n \"bwrap\",\n \"--dev-bind\", root, \"/\",\n \"--setenv\", \"container\", \"mkosi\",\n \"--setenv\", \"HOME\", \"/\",\n \"--setenv\", \"PATH\", \"/work/scripts:/usr/bin:/usr/sbin\",\n ]\n\n if resolve:\n p = Path(\"etc/resolv.conf\")\n if (root / p).is_symlink():\n # For each component in the target path, bubblewrap will try to create it if it doesn't exist\n # yet. If a component in the path is a dangling symlink, bubblewrap will end up calling\n # mkdir(symlink) which obviously fails if multiple components of the dangling symlink path don't\n # exist yet. As a workaround, we resolve the symlink ourselves so that bubblewrap will correctly\n # create all missing components in the target path.\n p = p.parent / (root / p).readlink()\n\n cmdline += [\"--ro-bind\", \"/etc/resolv.conf\", Path(\"/\") / p]\n\n cmdline += [*options]\n\n if setpgid := find_binary(\"setpgid\", root=root):\n cmdline += [setpgid, \"--foreground\", \"--\"]\n\n return apivfs_cmd(root) + cmdline\n", "path": "mkosi/bubblewrap.py"}]} | 3,181 | 168 |
gh_patches_debug_7903 | rasdani/github-patches | git_diff | xonsh__xonsh-825 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
CTRL-C during config wizard results in loss of input to terminal
Running bash in a Terminator window, when I first started xonsh, I was asked if I wanted to run the config wizard. I chose yes, but then eventually decided I didn't want to finish, so I hit CTRL-C. This eventually got me back to bash, but when I pressed keys, the keys didn't show up in the terminal windows. I had to close the window and open a new one.
</issue>
<code>
[start of xonsh/main.py]
1 # -*- coding: utf-8 -*-
2 """The main xonsh script."""
3 import os
4 import sys
5 import enum
6 import builtins
7 from argparse import ArgumentParser, ArgumentTypeError
8 from contextlib import contextmanager
9
10 try:
11 from setproctitle import setproctitle
12 except ImportError:
13 setproctitle = None
14
15 from xonsh import __version__
16 from xonsh.shell import Shell
17 from xonsh.pretty import pprint, pretty
18 from xonsh.proc import HiddenCompletedCommand
19 from xonsh.jobs import ignore_sigtstp
20 from xonsh.tools import HAVE_PYGMENTS, setup_win_unicode_console, print_color, ON_WINDOWS
21
22 if HAVE_PYGMENTS:
23 import pygments
24 from xonsh import pyghooks
25
26
27 def path_argument(s):
28 """Return a path only if the path is actually legal
29
30 This is very similar to argparse.FileType, except that it doesn't return
31 an open file handle, but rather simply validates the path."""
32
33 s = os.path.abspath(os.path.expanduser(s))
34 if not os.path.isfile(s):
35 raise ArgumentTypeError('"%s" must be a valid path to a file' % s)
36 return s
37
38
39 parser = ArgumentParser(description='xonsh', add_help=False)
40 parser.add_argument('-h', '--help',
41 dest='help',
42 action='store_true',
43 default=False,
44 help='show help and exit')
45 parser.add_argument('-V', '--version',
46 dest='version',
47 action='store_true',
48 default=False,
49 help='show version information and exit')
50 parser.add_argument('-c',
51 help="Run a single command and exit",
52 dest='command',
53 required=False,
54 default=None)
55 parser.add_argument('-i', '--interactive',
56 help='force running in interactive mode',
57 dest='force_interactive',
58 action='store_true',
59 default=False)
60 parser.add_argument('-l', '--login',
61 help='run as a login shell',
62 dest='login',
63 action='store_true',
64 default=False)
65 parser.add_argument('--config-path',
66 help='specify a custom static configuration file',
67 dest='config_path',
68 default=None,
69 type=path_argument)
70 parser.add_argument('--no-rc',
71 help="Do not load the .xonshrc files",
72 dest='norc',
73 action='store_true',
74 default=False)
75 parser.add_argument('-D',
76 dest='defines',
77 help='define an environment variable, in the form of '
78 '-DNAME=VAL. May be used many times.',
79 metavar='ITEM',
80 nargs='*',
81 default=None)
82 parser.add_argument('--shell-type',
83 help='What kind of shell should be used. '
84 'Possible options: readline, prompt_toolkit, random. '
85 'Warning! If set this overrides $SHELL_TYPE variable.',
86 dest='shell_type',
87 choices=('readline', 'prompt_toolkit', 'best', 'random'),
88 default=None)
89 parser.add_argument('file',
90 metavar='script-file',
91 help='If present, execute the script in script-file'
92 ' and exit',
93 nargs='?',
94 default=None)
95 parser.add_argument('args',
96 metavar='args',
97 help='Additional arguments to the script specified '
98 'by script-file',
99 nargs='*',
100 default=[])
101
102
103 def arg_undoers():
104 au = {
105 '-h': (lambda args: setattr(args, 'help', False)),
106 '-V': (lambda args: setattr(args, 'version', False)),
107 '-c': (lambda args: setattr(args, 'command', None)),
108 '-i': (lambda args: setattr(args, 'force_interactive', False)),
109 '-l': (lambda args: setattr(args, 'login', False)),
110 '-c': (lambda args: setattr(args, 'command', None)),
111 '--config-path': (lambda args: delattr(args, 'config_path')),
112 '--no-rc': (lambda args: setattr(args, 'norc', False)),
113 '-D': (lambda args: setattr(args, 'defines', None)),
114 '--shell-type': (lambda args: setattr(args, 'shell_type', None)),
115 }
116 au['--help'] = au['-h']
117 au['--version'] = au['-V']
118 au['--interactive'] = au['-i']
119 au['--login'] = au['-l']
120
121 return au
122
123 def undo_args(args):
124 """Undoes missaligned args."""
125 au = arg_undoers()
126 for a in args.args:
127 if a in au:
128 au[a](args)
129 else:
130 for k in au:
131 if a.startswith(k):
132 au[k](args)
133
134 def _pprint_displayhook(value):
135 if value is None or isinstance(value, HiddenCompletedCommand):
136 return
137 builtins._ = None # Set '_' to None to avoid recursion
138 if HAVE_PYGMENTS:
139 s = pretty(value) # color case
140 lexer = pyghooks.XonshLexer()
141 tokens = list(pygments.lex(s, lexer=lexer))
142 print_color(tokens)
143 else:
144 pprint(value) # black & white case
145 builtins._ = value
146
147 class XonshMode(enum.Enum):
148 single_command = 0
149 script_from_file = 1
150 script_from_stdin = 2
151 interactive = 3
152
153 def premain(argv=None):
154 """Setup for main xonsh entry point, returns parsed arguments."""
155 if setproctitle is not None:
156 setproctitle(' '.join(['xonsh'] + sys.argv[1:]))
157 args, other = parser.parse_known_args(argv)
158 if args.file is not None:
159 real_argv = (argv or sys.argv)
160 i = real_argv.index(args.file)
161 args.args = real_argv[i+1:]
162 undo_args(args)
163 if args.help:
164 parser.print_help()
165 exit()
166 if args.version:
167 version = '/'.join(('xonsh', __version__)),
168 print(version)
169 exit()
170 shell_kwargs = {'shell_type': args.shell_type,
171 'completer': False,
172 'login': False}
173 if args.login:
174 shell_kwargs['login'] = True
175 if args.config_path is None:
176 shell_kwargs['config'] = args.config_path
177 if args.norc:
178 shell_kwargs['rc'] = ()
179 setattr(sys, 'displayhook', _pprint_displayhook)
180 if args.command is not None:
181 args.mode = XonshMode.single_command
182 shell_kwargs['shell_type'] = 'none'
183 elif args.file is not None:
184 args.mode = XonshMode.script_from_file
185 shell_kwargs['shell_type'] = 'none'
186 elif not sys.stdin.isatty() and not args.force_interactive:
187 args.mode = XonshMode.script_from_stdin
188 shell_kwargs['shell_type'] = 'none'
189 else:
190 args.mode = XonshMode.interactive
191 shell_kwargs['completer'] = True
192 shell_kwargs['login'] = True
193 shell = builtins.__xonsh_shell__ = Shell(**shell_kwargs)
194 from xonsh import imphooks
195 env = builtins.__xonsh_env__
196 env['XONSH_LOGIN'] = shell_kwargs['login']
197 if args.defines is not None:
198 env.update([x.split('=', 1) for x in args.defines])
199 env['XONSH_INTERACTIVE'] = False
200 if ON_WINDOWS:
201 setup_win_unicode_console(env.get('WIN_UNICODE_CONSOLE', True))
202 return args
203
204
205 def main(argv=None):
206 """Main entry point for xonsh cli."""
207 args = premain(argv)
208 env = builtins.__xonsh_env__
209 shell = builtins.__xonsh_shell__
210 if args.mode == XonshMode.single_command:
211 # run a single command and exit
212 shell.default(args.command)
213 elif args.mode == XonshMode.script_from_file:
214 # run a script contained in a file
215 if os.path.isfile(args.file):
216 with open(args.file) as f:
217 code = f.read()
218 code = code if code.endswith('\n') else code + '\n'
219 sys.argv = args.args
220 env['ARGS'] = [args.file] + args.args
221 code = shell.execer.compile(code, mode='exec', glbs=shell.ctx,
222 filename=args.file)
223 shell.execer.exec(code, mode='exec', glbs=shell.ctx)
224 else:
225 print('xonsh: {0}: No such file or directory.'.format(args.file))
226 elif args.mode == XonshMode.script_from_stdin:
227 # run a script given on stdin
228 code = sys.stdin.read()
229 code = code if code.endswith('\n') else code + '\n'
230 code = shell.execer.compile(code, mode='exec', glbs=shell.ctx,
231 filename='<stdin>')
232 shell.execer.exec(code, mode='exec', glbs=shell.ctx)
233 else:
234 # otherwise, enter the shell
235 env['XONSH_INTERACTIVE'] = True
236 ignore_sigtstp()
237 if not env['LOADED_CONFIG'] and not any(env['LOADED_RC_FILES']):
238 print('Could not find xonsh configuration or run control files.')
239 code = '$[xonfig wizard --confirm]'
240 shell.execer.exec(code, mode='single', glbs=shell.ctx)
241 shell.cmdloop()
242 postmain(args)
243
244
245 def postmain(args=None):
246 """Teardown for main xonsh entry point, accepts parsed arguments."""
247 if ON_WINDOWS:
248 setup_win_unicode_console(enable=False)
249 del builtins.__xonsh_shell__
250
251
252 @contextmanager
253 def main_context(argv=None):
254 """Generator that runs pre- and post-main() functions. This has two iterations.
255 The first yields the shell. The second returns None but cleans
256 up the shell.
257 """
258 args = premain(argv)
259 yield builtins.__xonsh_shell__
260 postmain(args)
261
262
263
264 if __name__ == '__main__':
265 main()
266
[end of xonsh/main.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/xonsh/main.py b/xonsh/main.py
--- a/xonsh/main.py
+++ b/xonsh/main.py
@@ -236,8 +236,8 @@
ignore_sigtstp()
if not env['LOADED_CONFIG'] and not any(env['LOADED_RC_FILES']):
print('Could not find xonsh configuration or run control files.')
- code = '$[xonfig wizard --confirm]'
- shell.execer.exec(code, mode='single', glbs=shell.ctx)
+ from xonsh import xonfig # lazy import
+ xonfig.main(['wizard', '--confirm'])
shell.cmdloop()
postmain(args)
| {"golden_diff": "diff --git a/xonsh/main.py b/xonsh/main.py\n--- a/xonsh/main.py\n+++ b/xonsh/main.py\n@@ -236,8 +236,8 @@\n ignore_sigtstp()\n if not env['LOADED_CONFIG'] and not any(env['LOADED_RC_FILES']):\n print('Could not find xonsh configuration or run control files.')\n- code = '$[xonfig wizard --confirm]'\n- shell.execer.exec(code, mode='single', glbs=shell.ctx)\n+ from xonsh import xonfig # lazy import\n+ xonfig.main(['wizard', '--confirm'])\n shell.cmdloop()\n postmain(args)\n", "issue": "CTRL-C during config wizard results in loss of input to terminal\nRunning bash in a Terminator window, when I first started xonsh, I was asked if I wanted to run the config wizard. I chose yes, but then eventually decided I didn't want to finish, so I hit CTRL-C. This eventually got me back to bash, but when I pressed keys, the keys didn't show up in the terminal windows. I had to close the window and open a new one.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"The main xonsh script.\"\"\"\nimport os\nimport sys\nimport enum\nimport builtins\nfrom argparse import ArgumentParser, ArgumentTypeError\nfrom contextlib import contextmanager\n\ntry:\n from setproctitle import setproctitle\nexcept ImportError:\n setproctitle = None\n\nfrom xonsh import __version__\nfrom xonsh.shell import Shell\nfrom xonsh.pretty import pprint, pretty\nfrom xonsh.proc import HiddenCompletedCommand\nfrom xonsh.jobs import ignore_sigtstp\nfrom xonsh.tools import HAVE_PYGMENTS, setup_win_unicode_console, print_color, ON_WINDOWS\n\nif HAVE_PYGMENTS:\n import pygments\n from xonsh import pyghooks\n\n\ndef path_argument(s):\n \"\"\"Return a path only if the path is actually legal\n\n This is very similar to argparse.FileType, except that it doesn't return\n an open file handle, but rather simply validates the path.\"\"\"\n\n s = os.path.abspath(os.path.expanduser(s))\n if not os.path.isfile(s):\n raise ArgumentTypeError('\"%s\" must be a valid path to a file' % s)\n return s\n\n\nparser = ArgumentParser(description='xonsh', add_help=False)\nparser.add_argument('-h', '--help',\n dest='help',\n action='store_true',\n default=False,\n help='show help and exit')\nparser.add_argument('-V', '--version',\n dest='version',\n action='store_true',\n default=False,\n help='show version information and exit')\nparser.add_argument('-c',\n help=\"Run a single command and exit\",\n dest='command',\n required=False,\n default=None)\nparser.add_argument('-i', '--interactive',\n help='force running in interactive mode',\n dest='force_interactive',\n action='store_true',\n default=False)\nparser.add_argument('-l', '--login',\n help='run as a login shell',\n dest='login',\n action='store_true',\n default=False)\nparser.add_argument('--config-path',\n help='specify a custom static configuration file',\n dest='config_path',\n default=None,\n type=path_argument)\nparser.add_argument('--no-rc',\n help=\"Do not load the .xonshrc files\",\n dest='norc',\n action='store_true',\n default=False)\nparser.add_argument('-D',\n dest='defines',\n help='define an environment variable, in the form of '\n '-DNAME=VAL. May be used many times.',\n metavar='ITEM',\n nargs='*',\n default=None)\nparser.add_argument('--shell-type',\n help='What kind of shell should be used. '\n 'Possible options: readline, prompt_toolkit, random. '\n 'Warning! If set this overrides $SHELL_TYPE variable.',\n dest='shell_type',\n choices=('readline', 'prompt_toolkit', 'best', 'random'),\n default=None)\nparser.add_argument('file',\n metavar='script-file',\n help='If present, execute the script in script-file'\n ' and exit',\n nargs='?',\n default=None)\nparser.add_argument('args',\n metavar='args',\n help='Additional arguments to the script specified '\n 'by script-file',\n nargs='*',\n default=[])\n\n\ndef arg_undoers():\n au = {\n '-h': (lambda args: setattr(args, 'help', False)),\n '-V': (lambda args: setattr(args, 'version', False)),\n '-c': (lambda args: setattr(args, 'command', None)),\n '-i': (lambda args: setattr(args, 'force_interactive', False)),\n '-l': (lambda args: setattr(args, 'login', False)),\n '-c': (lambda args: setattr(args, 'command', None)),\n '--config-path': (lambda args: delattr(args, 'config_path')),\n '--no-rc': (lambda args: setattr(args, 'norc', False)),\n '-D': (lambda args: setattr(args, 'defines', None)),\n '--shell-type': (lambda args: setattr(args, 'shell_type', None)),\n }\n au['--help'] = au['-h']\n au['--version'] = au['-V']\n au['--interactive'] = au['-i']\n au['--login'] = au['-l']\n\n return au\n\ndef undo_args(args):\n \"\"\"Undoes missaligned args.\"\"\"\n au = arg_undoers()\n for a in args.args:\n if a in au:\n au[a](args)\n else:\n for k in au:\n if a.startswith(k):\n au[k](args)\n\ndef _pprint_displayhook(value):\n if value is None or isinstance(value, HiddenCompletedCommand):\n return\n builtins._ = None # Set '_' to None to avoid recursion\n if HAVE_PYGMENTS:\n s = pretty(value) # color case\n lexer = pyghooks.XonshLexer()\n tokens = list(pygments.lex(s, lexer=lexer))\n print_color(tokens)\n else:\n pprint(value) # black & white case\n builtins._ = value\n\nclass XonshMode(enum.Enum):\n single_command = 0\n script_from_file = 1\n script_from_stdin = 2\n interactive = 3\n\ndef premain(argv=None):\n \"\"\"Setup for main xonsh entry point, returns parsed arguments.\"\"\"\n if setproctitle is not None:\n setproctitle(' '.join(['xonsh'] + sys.argv[1:]))\n args, other = parser.parse_known_args(argv)\n if args.file is not None:\n real_argv = (argv or sys.argv)\n i = real_argv.index(args.file)\n args.args = real_argv[i+1:]\n undo_args(args)\n if args.help:\n parser.print_help()\n exit()\n if args.version:\n version = '/'.join(('xonsh', __version__)),\n print(version)\n exit()\n shell_kwargs = {'shell_type': args.shell_type,\n 'completer': False,\n 'login': False}\n if args.login:\n shell_kwargs['login'] = True\n if args.config_path is None:\n shell_kwargs['config'] = args.config_path\n if args.norc:\n shell_kwargs['rc'] = ()\n setattr(sys, 'displayhook', _pprint_displayhook)\n if args.command is not None:\n args.mode = XonshMode.single_command\n shell_kwargs['shell_type'] = 'none'\n elif args.file is not None:\n args.mode = XonshMode.script_from_file\n shell_kwargs['shell_type'] = 'none'\n elif not sys.stdin.isatty() and not args.force_interactive:\n args.mode = XonshMode.script_from_stdin\n shell_kwargs['shell_type'] = 'none'\n else:\n args.mode = XonshMode.interactive\n shell_kwargs['completer'] = True\n shell_kwargs['login'] = True\n shell = builtins.__xonsh_shell__ = Shell(**shell_kwargs)\n from xonsh import imphooks\n env = builtins.__xonsh_env__\n env['XONSH_LOGIN'] = shell_kwargs['login']\n if args.defines is not None:\n env.update([x.split('=', 1) for x in args.defines])\n env['XONSH_INTERACTIVE'] = False\n if ON_WINDOWS:\n setup_win_unicode_console(env.get('WIN_UNICODE_CONSOLE', True))\n return args\n\n\ndef main(argv=None):\n \"\"\"Main entry point for xonsh cli.\"\"\"\n args = premain(argv)\n env = builtins.__xonsh_env__\n shell = builtins.__xonsh_shell__\n if args.mode == XonshMode.single_command:\n # run a single command and exit\n shell.default(args.command)\n elif args.mode == XonshMode.script_from_file:\n # run a script contained in a file\n if os.path.isfile(args.file):\n with open(args.file) as f:\n code = f.read()\n code = code if code.endswith('\\n') else code + '\\n'\n sys.argv = args.args\n env['ARGS'] = [args.file] + args.args\n code = shell.execer.compile(code, mode='exec', glbs=shell.ctx,\n filename=args.file)\n shell.execer.exec(code, mode='exec', glbs=shell.ctx)\n else:\n print('xonsh: {0}: No such file or directory.'.format(args.file))\n elif args.mode == XonshMode.script_from_stdin:\n # run a script given on stdin\n code = sys.stdin.read()\n code = code if code.endswith('\\n') else code + '\\n'\n code = shell.execer.compile(code, mode='exec', glbs=shell.ctx,\n filename='<stdin>')\n shell.execer.exec(code, mode='exec', glbs=shell.ctx)\n else:\n # otherwise, enter the shell\n env['XONSH_INTERACTIVE'] = True\n ignore_sigtstp()\n if not env['LOADED_CONFIG'] and not any(env['LOADED_RC_FILES']):\n print('Could not find xonsh configuration or run control files.')\n code = '$[xonfig wizard --confirm]'\n shell.execer.exec(code, mode='single', glbs=shell.ctx)\n shell.cmdloop()\n postmain(args)\n\n\ndef postmain(args=None):\n \"\"\"Teardown for main xonsh entry point, accepts parsed arguments.\"\"\"\n if ON_WINDOWS:\n setup_win_unicode_console(enable=False)\n del builtins.__xonsh_shell__\n\n\n@contextmanager\ndef main_context(argv=None):\n \"\"\"Generator that runs pre- and post-main() functions. This has two iterations.\n The first yields the shell. The second returns None but cleans\n up the shell.\n \"\"\"\n args = premain(argv)\n yield builtins.__xonsh_shell__\n postmain(args)\n\n\n\nif __name__ == '__main__':\n main()\n", "path": "xonsh/main.py"}]} | 3,488 | 153 |
gh_patches_debug_12200 | rasdani/github-patches | git_diff | elastic__apm-agent-python-1822 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
urllib3 v2.0 compatibility
urllib3 v2.0 came out during PyConUS 2023 and is resulting in hanging tests on windows, and test failures on Linux. We need to update our code/tests to ensure compatibility for this new version.
</issue>
<code>
[start of elasticapm/instrumentation/packages/urllib3.py]
1 # BSD 3-Clause License
2 #
3 # Copyright (c) 2019, Elasticsearch BV
4 # All rights reserved.
5 #
6 # Redistribution and use in source and binary forms, with or without
7 # modification, are permitted provided that the following conditions are met:
8 #
9 # * Redistributions of source code must retain the above copyright notice, this
10 # list of conditions and the following disclaimer.
11 #
12 # * Redistributions in binary form must reproduce the above copyright notice,
13 # this list of conditions and the following disclaimer in the documentation
14 # and/or other materials provided with the distribution.
15 #
16 # * Neither the name of the copyright holder nor the names of its
17 # contributors may be used to endorse or promote products derived from
18 # this software without specific prior written permission.
19 #
20 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
31 import itertools
32
33 from elasticapm.conf import constants
34 from elasticapm.instrumentation.packages.base import AbstractInstrumentedModule
35 from elasticapm.traces import DroppedSpan, capture_span, execution_context
36 from elasticapm.utils import default_ports
37 from elasticapm.utils.disttracing import TracingOptions
38
39
40 def _set_disttracing_headers(headers, trace_parent, transaction):
41 trace_parent_str = trace_parent.to_string()
42 headers[constants.TRACEPARENT_HEADER_NAME] = trace_parent_str
43 if transaction.tracer.config.use_elastic_traceparent_header:
44 headers[constants.TRACEPARENT_LEGACY_HEADER_NAME] = trace_parent_str
45 if trace_parent.tracestate:
46 headers[constants.TRACESTATE_HEADER_NAME] = trace_parent.tracestate
47
48
49 def update_headers(args, kwargs, instance, transaction, trace_parent):
50 """
51 The headers might be in 3 different places: as 4th positional argument, as "headers" keyword argument,
52 or, if none of the former two are provided, as instance variable on the HTTPConnection object.
53
54 If the headers are in the positional arguments tuple, a new tuple with updated headers will be returned.
55 If they are in the keyword arguments or on the instance, an updated kwargs dict will be returned
56
57 :param args: list of positional arguments
58 :param kwargs: dict of keyword arguments
59 :param instance: the HTTPConnection instance
60 :param transaction: the Transaction object
61 :param trace_parent: the TraceParent object
62 :return: an (args, kwargs) tuple
63 """
64 if len(args) >= 4 and args[3]:
65 headers = args[3].copy()
66 args = tuple(itertools.chain((args[:3]), (headers,), args[4:]))
67 elif "headers" in kwargs and kwargs["headers"]:
68 headers = kwargs["headers"].copy()
69 kwargs["headers"] = headers
70 else:
71 headers = instance.headers.copy() if instance.headers else {}
72 # we don't want to change the instance headers, so we'll cheat and
73 # set the headers as keywords. This slightly changes how the wrapped
74 # method is called compared to uninstrumented code.
75 kwargs["headers"] = headers
76 _set_disttracing_headers(headers, trace_parent, transaction)
77 return args, kwargs
78
79
80 class Urllib3Instrumentation(AbstractInstrumentedModule):
81 name = "urllib3"
82
83 instrument_list = [
84 ("urllib3.connectionpool", "HTTPConnectionPool.urlopen"),
85 # packages that vendor or vendored urllib3 in the past
86 ("requests.packages.urllib3.connectionpool", "HTTPConnectionPool.urlopen"),
87 ("botocore.vendored.requests.packages.urllib3.connectionpool", "HTTPConnectionPool.urlopen"),
88 ]
89
90 def call(self, module, method, wrapped, instance, args, kwargs):
91 if "method" in kwargs:
92 method = kwargs["method"]
93 else:
94 method = args[0]
95
96 host = instance.host
97
98 if instance.port != default_ports.get(instance.scheme):
99 host += ":" + str(instance.port)
100
101 if "url" in kwargs:
102 url = kwargs["url"]
103 else:
104 url = args[1]
105
106 signature = method.upper() + " " + host
107
108 if url.startswith("/"):
109 url = "%s://%s%s" % (instance.scheme, host, url)
110
111 transaction = execution_context.get_transaction()
112
113 with capture_span(
114 signature,
115 span_type="external",
116 span_subtype="http",
117 extra={"http": {"url": url}},
118 leaf=True,
119 ) as span:
120 # if urllib3 has been called in a leaf span, this span might be a DroppedSpan.
121 leaf_span = span
122 while isinstance(leaf_span, DroppedSpan):
123 leaf_span = leaf_span.parent
124
125 parent_id = leaf_span.id if leaf_span else transaction.id
126 trace_parent = transaction.trace_parent.copy_from(
127 span_id=parent_id, trace_options=TracingOptions(recorded=True)
128 )
129 args, kwargs = update_headers(args, kwargs, instance, transaction, trace_parent)
130 if leaf_span:
131 leaf_span.dist_tracing_propagated = True
132 response = wrapped(*args, **kwargs)
133 if response:
134 if span.context:
135 span.context["http"]["status_code"] = response.status
136 span.set_success() if response.status < 400 else span.set_failure()
137 return response
138
139 def mutate_unsampled_call_args(self, module, method, wrapped, instance, args, kwargs, transaction):
140 # since we don't have a span, we set the span id to the transaction id
141 trace_parent = transaction.trace_parent.copy_from(
142 span_id=transaction.id, trace_options=TracingOptions(recorded=False)
143 )
144 return update_headers(args, kwargs, instance, transaction, trace_parent)
145
[end of elasticapm/instrumentation/packages/urllib3.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/elasticapm/instrumentation/packages/urllib3.py b/elasticapm/instrumentation/packages/urllib3.py
--- a/elasticapm/instrumentation/packages/urllib3.py
+++ b/elasticapm/instrumentation/packages/urllib3.py
@@ -61,7 +61,12 @@
:param trace_parent: the TraceParent object
:return: an (args, kwargs) tuple
"""
- if len(args) >= 4 and args[3]:
+ from urllib3._version import __version__ as urllib3_version
+
+ if urllib3_version.startswith("2") and len(args) >= 5 and args[4]:
+ headers = args[4].copy()
+ args = tuple(itertools.chain((args[:4]), (headers,), args[5:]))
+ elif len(args) >= 4 and args[3]:
headers = args[3].copy()
args = tuple(itertools.chain((args[:3]), (headers,), args[4:]))
elif "headers" in kwargs and kwargs["headers"]:
| {"golden_diff": "diff --git a/elasticapm/instrumentation/packages/urllib3.py b/elasticapm/instrumentation/packages/urllib3.py\n--- a/elasticapm/instrumentation/packages/urllib3.py\n+++ b/elasticapm/instrumentation/packages/urllib3.py\n@@ -61,7 +61,12 @@\n :param trace_parent: the TraceParent object\n :return: an (args, kwargs) tuple\n \"\"\"\n- if len(args) >= 4 and args[3]:\n+ from urllib3._version import __version__ as urllib3_version\n+\n+ if urllib3_version.startswith(\"2\") and len(args) >= 5 and args[4]:\n+ headers = args[4].copy()\n+ args = tuple(itertools.chain((args[:4]), (headers,), args[5:]))\n+ elif len(args) >= 4 and args[3]:\n headers = args[3].copy()\n args = tuple(itertools.chain((args[:3]), (headers,), args[4:]))\n elif \"headers\" in kwargs and kwargs[\"headers\"]:\n", "issue": "urllib3 v2.0 compatibility\nurllib3 v2.0 came out during PyConUS 2023 and is resulting in hanging tests on windows, and test failures on Linux. We need to update our code/tests to ensure compatibility for this new version.\n", "before_files": [{"content": "# BSD 3-Clause License\n#\n# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\nimport itertools\n\nfrom elasticapm.conf import constants\nfrom elasticapm.instrumentation.packages.base import AbstractInstrumentedModule\nfrom elasticapm.traces import DroppedSpan, capture_span, execution_context\nfrom elasticapm.utils import default_ports\nfrom elasticapm.utils.disttracing import TracingOptions\n\n\ndef _set_disttracing_headers(headers, trace_parent, transaction):\n trace_parent_str = trace_parent.to_string()\n headers[constants.TRACEPARENT_HEADER_NAME] = trace_parent_str\n if transaction.tracer.config.use_elastic_traceparent_header:\n headers[constants.TRACEPARENT_LEGACY_HEADER_NAME] = trace_parent_str\n if trace_parent.tracestate:\n headers[constants.TRACESTATE_HEADER_NAME] = trace_parent.tracestate\n\n\ndef update_headers(args, kwargs, instance, transaction, trace_parent):\n \"\"\"\n The headers might be in 3 different places: as 4th positional argument, as \"headers\" keyword argument,\n or, if none of the former two are provided, as instance variable on the HTTPConnection object.\n\n If the headers are in the positional arguments tuple, a new tuple with updated headers will be returned.\n If they are in the keyword arguments or on the instance, an updated kwargs dict will be returned\n\n :param args: list of positional arguments\n :param kwargs: dict of keyword arguments\n :param instance: the HTTPConnection instance\n :param transaction: the Transaction object\n :param trace_parent: the TraceParent object\n :return: an (args, kwargs) tuple\n \"\"\"\n if len(args) >= 4 and args[3]:\n headers = args[3].copy()\n args = tuple(itertools.chain((args[:3]), (headers,), args[4:]))\n elif \"headers\" in kwargs and kwargs[\"headers\"]:\n headers = kwargs[\"headers\"].copy()\n kwargs[\"headers\"] = headers\n else:\n headers = instance.headers.copy() if instance.headers else {}\n # we don't want to change the instance headers, so we'll cheat and\n # set the headers as keywords. This slightly changes how the wrapped\n # method is called compared to uninstrumented code.\n kwargs[\"headers\"] = headers\n _set_disttracing_headers(headers, trace_parent, transaction)\n return args, kwargs\n\n\nclass Urllib3Instrumentation(AbstractInstrumentedModule):\n name = \"urllib3\"\n\n instrument_list = [\n (\"urllib3.connectionpool\", \"HTTPConnectionPool.urlopen\"),\n # packages that vendor or vendored urllib3 in the past\n (\"requests.packages.urllib3.connectionpool\", \"HTTPConnectionPool.urlopen\"),\n (\"botocore.vendored.requests.packages.urllib3.connectionpool\", \"HTTPConnectionPool.urlopen\"),\n ]\n\n def call(self, module, method, wrapped, instance, args, kwargs):\n if \"method\" in kwargs:\n method = kwargs[\"method\"]\n else:\n method = args[0]\n\n host = instance.host\n\n if instance.port != default_ports.get(instance.scheme):\n host += \":\" + str(instance.port)\n\n if \"url\" in kwargs:\n url = kwargs[\"url\"]\n else:\n url = args[1]\n\n signature = method.upper() + \" \" + host\n\n if url.startswith(\"/\"):\n url = \"%s://%s%s\" % (instance.scheme, host, url)\n\n transaction = execution_context.get_transaction()\n\n with capture_span(\n signature,\n span_type=\"external\",\n span_subtype=\"http\",\n extra={\"http\": {\"url\": url}},\n leaf=True,\n ) as span:\n # if urllib3 has been called in a leaf span, this span might be a DroppedSpan.\n leaf_span = span\n while isinstance(leaf_span, DroppedSpan):\n leaf_span = leaf_span.parent\n\n parent_id = leaf_span.id if leaf_span else transaction.id\n trace_parent = transaction.trace_parent.copy_from(\n span_id=parent_id, trace_options=TracingOptions(recorded=True)\n )\n args, kwargs = update_headers(args, kwargs, instance, transaction, trace_parent)\n if leaf_span:\n leaf_span.dist_tracing_propagated = True\n response = wrapped(*args, **kwargs)\n if response:\n if span.context:\n span.context[\"http\"][\"status_code\"] = response.status\n span.set_success() if response.status < 400 else span.set_failure()\n return response\n\n def mutate_unsampled_call_args(self, module, method, wrapped, instance, args, kwargs, transaction):\n # since we don't have a span, we set the span id to the transaction id\n trace_parent = transaction.trace_parent.copy_from(\n span_id=transaction.id, trace_options=TracingOptions(recorded=False)\n )\n return update_headers(args, kwargs, instance, transaction, trace_parent)\n", "path": "elasticapm/instrumentation/packages/urllib3.py"}]} | 2,316 | 241 |
gh_patches_debug_14600 | rasdani/github-patches | git_diff | ipython__ipython-10277 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
typing should not be a dependency for python>=3.5
https://github.com/ipython/ipython/commit/3ff1be2ea8ef180a6f17a6a03a3f8452303b9abe added `typing` as an `install_requires`. This package is builtin for python>=3.5, so it should be protected by an environment marker (just like `pathlib2`) in setup.py.
</issue>
<code>
[start of setup.py]
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 """Setup script for IPython.
4
5 Under Posix environments it works like a typical setup.py script.
6 Under Windows, the command sdist is not supported, since IPython
7 requires utilities which are not available under Windows."""
8
9 #-----------------------------------------------------------------------------
10 # Copyright (c) 2008-2011, IPython Development Team.
11 # Copyright (c) 2001-2007, Fernando Perez <[email protected]>
12 # Copyright (c) 2001, Janko Hauser <[email protected]>
13 # Copyright (c) 2001, Nathaniel Gray <[email protected]>
14 #
15 # Distributed under the terms of the Modified BSD License.
16 #
17 # The full license is in the file COPYING.rst, distributed with this software.
18 #-----------------------------------------------------------------------------
19
20 #-----------------------------------------------------------------------------
21 # Minimal Python version sanity check
22 #-----------------------------------------------------------------------------
23 from __future__ import print_function
24
25 import sys
26
27 # This check is also made in IPython/__init__, don't forget to update both when
28 # changing Python version requirements.
29 if sys.version_info < (3,3):
30 error = """
31 IPython 6.0+ does not support Python 2.6, 2.7, 3.0, 3.1, or 3.2.
32 When using Python 2.7, please install IPython 5.x LTS Long Term Support version.
33 Beginning with IPython 6.0, Python 3.3 and above is required.
34
35 See IPython `README.rst` file for more information:
36
37 https://github.com/ipython/ipython/blob/master/README.rst
38
39 Python {} detected.
40
41 """.format(sys.version_info)
42
43 print(error, file=sys.stderr)
44 sys.exit(1)
45
46 # At least we're on the python version we need, move on.
47
48 #-------------------------------------------------------------------------------
49 # Imports
50 #-------------------------------------------------------------------------------
51
52 # Stdlib imports
53 import os
54
55 from glob import glob
56
57 # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
58 # update it when the contents of directories change.
59 if os.path.exists('MANIFEST'): os.remove('MANIFEST')
60
61 from distutils.core import setup
62
63 # Our own imports
64 from setupbase import target_update
65
66 from setupbase import (
67 setup_args,
68 find_packages,
69 find_package_data,
70 check_package_data_first,
71 find_entry_points,
72 build_scripts_entrypt,
73 find_data_files,
74 git_prebuild,
75 install_symlinked,
76 install_lib_symlink,
77 install_scripts_for_symlink,
78 unsymlink,
79 )
80
81 isfile = os.path.isfile
82 pjoin = os.path.join
83
84 #-------------------------------------------------------------------------------
85 # Handle OS specific things
86 #-------------------------------------------------------------------------------
87
88 if os.name in ('nt','dos'):
89 os_name = 'windows'
90 else:
91 os_name = os.name
92
93 # Under Windows, 'sdist' has not been supported. Now that the docs build with
94 # Sphinx it might work, but let's not turn it on until someone confirms that it
95 # actually works.
96 if os_name == 'windows' and 'sdist' in sys.argv:
97 print('The sdist command is not available under Windows. Exiting.')
98 sys.exit(1)
99
100
101 #-------------------------------------------------------------------------------
102 # Things related to the IPython documentation
103 #-------------------------------------------------------------------------------
104
105 # update the manuals when building a source dist
106 if len(sys.argv) >= 2 and sys.argv[1] in ('sdist','bdist_rpm'):
107
108 # List of things to be updated. Each entry is a triplet of args for
109 # target_update()
110 to_update = [
111 ('docs/man/ipython.1.gz',
112 ['docs/man/ipython.1'],
113 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz'),
114 ]
115
116
117 [ target_update(*t) for t in to_update ]
118
119 #---------------------------------------------------------------------------
120 # Find all the packages, package data, and data_files
121 #---------------------------------------------------------------------------
122
123 packages = find_packages()
124 package_data = find_package_data()
125
126 data_files = find_data_files()
127
128 setup_args['packages'] = packages
129 setup_args['package_data'] = package_data
130 setup_args['data_files'] = data_files
131
132 #---------------------------------------------------------------------------
133 # custom distutils commands
134 #---------------------------------------------------------------------------
135 # imports here, so they are after setuptools import if there was one
136 from distutils.command.sdist import sdist
137 from distutils.command.upload import upload
138
139 class UploadWindowsInstallers(upload):
140
141 description = "Upload Windows installers to PyPI (only used from tools/release_windows.py)"
142 user_options = upload.user_options + [
143 ('files=', 'f', 'exe file (or glob) to upload')
144 ]
145 def initialize_options(self):
146 upload.initialize_options(self)
147 meta = self.distribution.metadata
148 base = '{name}-{version}'.format(
149 name=meta.get_name(),
150 version=meta.get_version()
151 )
152 self.files = os.path.join('dist', '%s.*.exe' % base)
153
154 def run(self):
155 for dist_file in glob(self.files):
156 self.upload_file('bdist_wininst', 'any', dist_file)
157
158 setup_args['cmdclass'] = {
159 'build_py': \
160 check_package_data_first(git_prebuild('IPython')),
161 'sdist' : git_prebuild('IPython', sdist),
162 'upload_wininst' : UploadWindowsInstallers,
163 'symlink': install_symlinked,
164 'install_lib_symlink': install_lib_symlink,
165 'install_scripts_sym': install_scripts_for_symlink,
166 'unsymlink': unsymlink,
167 }
168
169
170 #---------------------------------------------------------------------------
171 # Handle scripts, dependencies, and setuptools specific things
172 #---------------------------------------------------------------------------
173
174 # For some commands, use setuptools. Note that we do NOT list install here!
175 # If you want a setuptools-enhanced install, just run 'setupegg.py install'
176 needs_setuptools = set(('develop', 'release', 'bdist_egg', 'bdist_rpm',
177 'bdist', 'bdist_dumb', 'bdist_wininst', 'bdist_wheel',
178 'egg_info', 'easy_install', 'upload', 'install_egg_info',
179 ))
180
181 if len(needs_setuptools.intersection(sys.argv)) > 0:
182 import setuptools
183
184 # This dict is used for passing extra arguments that are setuptools
185 # specific to setup
186 setuptools_extra_args = {}
187
188 # setuptools requirements
189
190 extras_require = dict(
191 parallel = ['ipyparallel'],
192 qtconsole = ['qtconsole'],
193 doc = ['Sphinx>=1.3'],
194 test = ['nose>=0.10.1', 'requests', 'testpath', 'pygments', 'nbformat', 'ipykernel'],
195 terminal = [],
196 kernel = ['ipykernel'],
197 nbformat = ['nbformat'],
198 notebook = ['notebook', 'ipywidgets'],
199 nbconvert = ['nbconvert'],
200 )
201
202 install_requires = [
203 'setuptools>=18.5',
204 'jedi>=0.10',
205 'typing',
206 'decorator',
207 'pickleshare',
208 'simplegeneric>0.8',
209 'traitlets>=4.2',
210 'prompt_toolkit>=1.0.4,<2.0.0',
211 'pygments',
212 ]
213
214 # Platform-specific dependencies:
215 # This is the correct way to specify these,
216 # but requires pip >= 6. pip < 6 ignores these.
217
218 extras_require.update({
219 'test:python_version >= "3.4"': ['numpy'],
220 ':python_version == "3.3"': ['pathlib2'],
221 ':sys_platform != "win32"': ['pexpect'],
222 ':sys_platform == "darwin"': ['appnope'],
223 ':sys_platform == "win32"': ['colorama'],
224 ':sys_platform == "win32" and python_version < "3.6"': ['win_unicode_console>=0.5'],
225 })
226 # FIXME: re-specify above platform dependencies for pip < 6
227 # These would result in non-portable bdists.
228 if not any(arg.startswith('bdist') for arg in sys.argv):
229 if sys.platform == 'darwin':
230 install_requires.extend(['appnope'])
231
232 if not sys.platform.startswith('win'):
233 install_requires.append('pexpect')
234
235 # workaround pypa/setuptools#147, where setuptools misspells
236 # platform_python_implementation as python_implementation
237 if 'setuptools' in sys.modules:
238 for key in list(extras_require):
239 if 'platform_python_implementation' in key:
240 new_key = key.replace('platform_python_implementation', 'python_implementation')
241 extras_require[new_key] = extras_require.pop(key)
242
243 everything = set()
244 for key, deps in extras_require.items():
245 if ':' not in key:
246 everything.update(deps)
247 extras_require['all'] = everything
248
249 if 'setuptools' in sys.modules:
250 setuptools_extra_args['python_requires'] = '>=3.3'
251 setuptools_extra_args['zip_safe'] = False
252 setuptools_extra_args['entry_points'] = {
253 'console_scripts': find_entry_points(),
254 'pygments.lexers': [
255 'ipythonconsole = IPython.lib.lexers:IPythonConsoleLexer',
256 'ipython = IPython.lib.lexers:IPythonLexer',
257 'ipython3 = IPython.lib.lexers:IPython3Lexer',
258 ],
259 }
260 setup_args['extras_require'] = extras_require
261 requires = setup_args['install_requires'] = install_requires
262
263 # Script to be run by the windows binary installer after the default setup
264 # routine, to add shortcuts and similar windows-only things. Windows
265 # post-install scripts MUST reside in the scripts/ dir, otherwise distutils
266 # doesn't find them.
267 if 'bdist_wininst' in sys.argv:
268 if len(sys.argv) > 2 and \
269 ('sdist' in sys.argv or 'bdist_rpm' in sys.argv):
270 print("ERROR: bdist_wininst must be run alone. Exiting.", file=sys.stderr)
271 sys.exit(1)
272 setup_args['data_files'].append(
273 ['Scripts', ('scripts/ipython.ico', 'scripts/ipython_nb.ico')])
274 setup_args['scripts'] = [pjoin('scripts','ipython_win_post_install.py')]
275 setup_args['options'] = {"bdist_wininst":
276 {"install_script":
277 "ipython_win_post_install.py"}}
278
279 else:
280 # scripts has to be a non-empty list, or install_scripts isn't called
281 setup_args['scripts'] = [e.split('=')[0].strip() for e in find_entry_points()]
282
283 setup_args['cmdclass']['build_scripts'] = build_scripts_entrypt
284
285 #---------------------------------------------------------------------------
286 # Do the actual setup now
287 #---------------------------------------------------------------------------
288
289 setup_args.update(setuptools_extra_args)
290
291
292
293 def main():
294 setup(**setup_args)
295
296 if __name__ == '__main__':
297 main()
298
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -202,7 +202,6 @@
install_requires = [
'setuptools>=18.5',
'jedi>=0.10',
- 'typing',
'decorator',
'pickleshare',
'simplegeneric>0.8',
@@ -218,6 +217,7 @@
extras_require.update({
'test:python_version >= "3.4"': ['numpy'],
':python_version == "3.3"': ['pathlib2'],
+ ':python_version <= "3.4"': ['typing'],
':sys_platform != "win32"': ['pexpect'],
':sys_platform == "darwin"': ['appnope'],
':sys_platform == "win32"': ['colorama'],
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -202,7 +202,6 @@\n install_requires = [\n 'setuptools>=18.5',\n 'jedi>=0.10',\n- 'typing',\n 'decorator',\n 'pickleshare',\n 'simplegeneric>0.8',\n@@ -218,6 +217,7 @@\n extras_require.update({\n 'test:python_version >= \"3.4\"': ['numpy'],\n ':python_version == \"3.3\"': ['pathlib2'],\n+ ':python_version <= \"3.4\"': ['typing'],\n ':sys_platform != \"win32\"': ['pexpect'],\n ':sys_platform == \"darwin\"': ['appnope'],\n ':sys_platform == \"win32\"': ['colorama'],\n", "issue": "typing should not be a dependency for python>=3.5\nhttps://github.com/ipython/ipython/commit/3ff1be2ea8ef180a6f17a6a03a3f8452303b9abe added `typing` as an `install_requires`. This package is builtin for python>=3.5, so it should be protected by an environment marker (just like `pathlib2`) in setup.py.\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\"\"\"Setup script for IPython.\n\nUnder Posix environments it works like a typical setup.py script.\nUnder Windows, the command sdist is not supported, since IPython\nrequires utilities which are not available under Windows.\"\"\"\n\n#-----------------------------------------------------------------------------\n# Copyright (c) 2008-2011, IPython Development Team.\n# Copyright (c) 2001-2007, Fernando Perez <[email protected]>\n# Copyright (c) 2001, Janko Hauser <[email protected]>\n# Copyright (c) 2001, Nathaniel Gray <[email protected]>\n#\n# Distributed under the terms of the Modified BSD License.\n#\n# The full license is in the file COPYING.rst, distributed with this software.\n#-----------------------------------------------------------------------------\n\n#-----------------------------------------------------------------------------\n# Minimal Python version sanity check\n#-----------------------------------------------------------------------------\nfrom __future__ import print_function\n\nimport sys\n\n# This check is also made in IPython/__init__, don't forget to update both when\n# changing Python version requirements.\nif sys.version_info < (3,3):\n error = \"\"\"\nIPython 6.0+ does not support Python 2.6, 2.7, 3.0, 3.1, or 3.2.\nWhen using Python 2.7, please install IPython 5.x LTS Long Term Support version.\nBeginning with IPython 6.0, Python 3.3 and above is required.\n\nSee IPython `README.rst` file for more information:\n\n https://github.com/ipython/ipython/blob/master/README.rst\n\nPython {} detected.\n\n\"\"\".format(sys.version_info)\n\n print(error, file=sys.stderr)\n sys.exit(1)\n\n# At least we're on the python version we need, move on.\n\n#-------------------------------------------------------------------------------\n# Imports\n#-------------------------------------------------------------------------------\n\n# Stdlib imports\nimport os\n\nfrom glob import glob\n\n# BEFORE importing distutils, remove MANIFEST. distutils doesn't properly\n# update it when the contents of directories change.\nif os.path.exists('MANIFEST'): os.remove('MANIFEST')\n\nfrom distutils.core import setup\n\n# Our own imports\nfrom setupbase import target_update\n\nfrom setupbase import (\n setup_args,\n find_packages,\n find_package_data,\n check_package_data_first,\n find_entry_points,\n build_scripts_entrypt,\n find_data_files,\n git_prebuild,\n install_symlinked,\n install_lib_symlink,\n install_scripts_for_symlink,\n unsymlink,\n)\n\nisfile = os.path.isfile\npjoin = os.path.join\n\n#-------------------------------------------------------------------------------\n# Handle OS specific things\n#-------------------------------------------------------------------------------\n\nif os.name in ('nt','dos'):\n os_name = 'windows'\nelse:\n os_name = os.name\n\n# Under Windows, 'sdist' has not been supported. Now that the docs build with\n# Sphinx it might work, but let's not turn it on until someone confirms that it\n# actually works.\nif os_name == 'windows' and 'sdist' in sys.argv:\n print('The sdist command is not available under Windows. Exiting.')\n sys.exit(1)\n\n\n#-------------------------------------------------------------------------------\n# Things related to the IPython documentation\n#-------------------------------------------------------------------------------\n\n# update the manuals when building a source dist\nif len(sys.argv) >= 2 and sys.argv[1] in ('sdist','bdist_rpm'):\n\n # List of things to be updated. Each entry is a triplet of args for\n # target_update()\n to_update = [\n ('docs/man/ipython.1.gz',\n ['docs/man/ipython.1'],\n 'cd docs/man && gzip -9c ipython.1 > ipython.1.gz'),\n ]\n\n\n [ target_update(*t) for t in to_update ]\n\n#---------------------------------------------------------------------------\n# Find all the packages, package data, and data_files\n#---------------------------------------------------------------------------\n\npackages = find_packages()\npackage_data = find_package_data()\n\ndata_files = find_data_files()\n\nsetup_args['packages'] = packages\nsetup_args['package_data'] = package_data\nsetup_args['data_files'] = data_files\n\n#---------------------------------------------------------------------------\n# custom distutils commands\n#---------------------------------------------------------------------------\n# imports here, so they are after setuptools import if there was one\nfrom distutils.command.sdist import sdist\nfrom distutils.command.upload import upload\n\nclass UploadWindowsInstallers(upload):\n\n description = \"Upload Windows installers to PyPI (only used from tools/release_windows.py)\"\n user_options = upload.user_options + [\n ('files=', 'f', 'exe file (or glob) to upload')\n ]\n def initialize_options(self):\n upload.initialize_options(self)\n meta = self.distribution.metadata\n base = '{name}-{version}'.format(\n name=meta.get_name(),\n version=meta.get_version()\n )\n self.files = os.path.join('dist', '%s.*.exe' % base)\n\n def run(self):\n for dist_file in glob(self.files):\n self.upload_file('bdist_wininst', 'any', dist_file)\n\nsetup_args['cmdclass'] = {\n 'build_py': \\\n check_package_data_first(git_prebuild('IPython')),\n 'sdist' : git_prebuild('IPython', sdist),\n 'upload_wininst' : UploadWindowsInstallers,\n 'symlink': install_symlinked,\n 'install_lib_symlink': install_lib_symlink,\n 'install_scripts_sym': install_scripts_for_symlink,\n 'unsymlink': unsymlink,\n}\n\n\n#---------------------------------------------------------------------------\n# Handle scripts, dependencies, and setuptools specific things\n#---------------------------------------------------------------------------\n\n# For some commands, use setuptools. Note that we do NOT list install here!\n# If you want a setuptools-enhanced install, just run 'setupegg.py install'\nneeds_setuptools = set(('develop', 'release', 'bdist_egg', 'bdist_rpm',\n 'bdist', 'bdist_dumb', 'bdist_wininst', 'bdist_wheel',\n 'egg_info', 'easy_install', 'upload', 'install_egg_info',\n ))\n\nif len(needs_setuptools.intersection(sys.argv)) > 0:\n import setuptools\n\n# This dict is used for passing extra arguments that are setuptools\n# specific to setup\nsetuptools_extra_args = {}\n\n# setuptools requirements\n\nextras_require = dict(\n parallel = ['ipyparallel'],\n qtconsole = ['qtconsole'],\n doc = ['Sphinx>=1.3'],\n test = ['nose>=0.10.1', 'requests', 'testpath', 'pygments', 'nbformat', 'ipykernel'],\n terminal = [],\n kernel = ['ipykernel'],\n nbformat = ['nbformat'],\n notebook = ['notebook', 'ipywidgets'],\n nbconvert = ['nbconvert'],\n)\n\ninstall_requires = [\n 'setuptools>=18.5',\n 'jedi>=0.10',\n 'typing',\n 'decorator',\n 'pickleshare',\n 'simplegeneric>0.8',\n 'traitlets>=4.2',\n 'prompt_toolkit>=1.0.4,<2.0.0',\n 'pygments',\n]\n\n# Platform-specific dependencies:\n# This is the correct way to specify these,\n# but requires pip >= 6. pip < 6 ignores these.\n\nextras_require.update({\n 'test:python_version >= \"3.4\"': ['numpy'],\n ':python_version == \"3.3\"': ['pathlib2'],\n ':sys_platform != \"win32\"': ['pexpect'],\n ':sys_platform == \"darwin\"': ['appnope'],\n ':sys_platform == \"win32\"': ['colorama'],\n ':sys_platform == \"win32\" and python_version < \"3.6\"': ['win_unicode_console>=0.5'],\n})\n# FIXME: re-specify above platform dependencies for pip < 6\n# These would result in non-portable bdists.\nif not any(arg.startswith('bdist') for arg in sys.argv):\n if sys.platform == 'darwin':\n install_requires.extend(['appnope'])\n\n if not sys.platform.startswith('win'):\n install_requires.append('pexpect')\n\n # workaround pypa/setuptools#147, where setuptools misspells\n # platform_python_implementation as python_implementation\n if 'setuptools' in sys.modules:\n for key in list(extras_require):\n if 'platform_python_implementation' in key:\n new_key = key.replace('platform_python_implementation', 'python_implementation')\n extras_require[new_key] = extras_require.pop(key)\n\neverything = set()\nfor key, deps in extras_require.items():\n if ':' not in key:\n everything.update(deps)\nextras_require['all'] = everything\n\nif 'setuptools' in sys.modules:\n setuptools_extra_args['python_requires'] = '>=3.3'\n setuptools_extra_args['zip_safe'] = False\n setuptools_extra_args['entry_points'] = {\n 'console_scripts': find_entry_points(),\n 'pygments.lexers': [\n 'ipythonconsole = IPython.lib.lexers:IPythonConsoleLexer',\n 'ipython = IPython.lib.lexers:IPythonLexer',\n 'ipython3 = IPython.lib.lexers:IPython3Lexer',\n ],\n }\n setup_args['extras_require'] = extras_require\n requires = setup_args['install_requires'] = install_requires\n\n # Script to be run by the windows binary installer after the default setup\n # routine, to add shortcuts and similar windows-only things. Windows\n # post-install scripts MUST reside in the scripts/ dir, otherwise distutils\n # doesn't find them.\n if 'bdist_wininst' in sys.argv:\n if len(sys.argv) > 2 and \\\n ('sdist' in sys.argv or 'bdist_rpm' in sys.argv):\n print(\"ERROR: bdist_wininst must be run alone. Exiting.\", file=sys.stderr)\n sys.exit(1)\n setup_args['data_files'].append(\n ['Scripts', ('scripts/ipython.ico', 'scripts/ipython_nb.ico')])\n setup_args['scripts'] = [pjoin('scripts','ipython_win_post_install.py')]\n setup_args['options'] = {\"bdist_wininst\":\n {\"install_script\":\n \"ipython_win_post_install.py\"}}\n\nelse:\n # scripts has to be a non-empty list, or install_scripts isn't called\n setup_args['scripts'] = [e.split('=')[0].strip() for e in find_entry_points()]\n\n setup_args['cmdclass']['build_scripts'] = build_scripts_entrypt\n\n#---------------------------------------------------------------------------\n# Do the actual setup now\n#---------------------------------------------------------------------------\n\nsetup_args.update(setuptools_extra_args)\n\n\n\ndef main():\n setup(**setup_args)\n\nif __name__ == '__main__':\n main()\n", "path": "setup.py"}]} | 3,794 | 192 |
gh_patches_debug_17226 | rasdani/github-patches | git_diff | openshift__openshift-ansible-5098 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Error: unknown command "drain" for "oc" in « Drain Node for Kubelet upgrade » action
« [Drain Node for Kubelet upgrade](https://github.com/openshift/openshift-ansible/blob/release-3.6/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml#L27) » action execute: `oadm drain ...` but this command don't exists:
```
# oadm drain
Error: unknown command "drain" for "oc"
Run 'oc --help' for usage.
```
with this version:
```
# oc version
oc v3.6.0+c4dd4cf
kubernetes v1.6.1+5115d708d7
features: Basic-Auth GSSAPI Kerberos SPNEGO
```
To fix it, I need to add `adm` like this `oadm adm drain ...`
```
diff --git a/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml b/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml
index c93a5d8..a21fb7f 100644
--- a/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml
+++ b/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml
@@ -26,7 +26,7 @@
- name: Drain Node for Kubelet upgrade
command: >
- {{ hostvars[groups.oo_first_master.0].openshift.common.admin_binary }} drain {{ openshift.node.nodename | lower }} --config={{ openshift.common.config_base }}/master/admin.kubeconfig --force --delete-local-data --ignore-daemonsets
+ {{ hostvars[groups.oo_first_master.0].openshift.common.client_binary }} adm drain {{ openshift.node.nodename | lower }} --config={{ openshift.common.config_base }}/master/admin.kubeconfig --force --delete-local-data --ignore-daemonsets
delegate_to: "{{ groups.oo_first_master.0 }}"
register: l_upgrade_nodes_drain_result
until: not l_upgrade_nodes_drain_result | failed
```
</issue>
<code>
[start of roles/openshift_cli/library/openshift_container_binary_sync.py]
1 #!/usr/bin/python
2 # -*- coding: utf-8 -*-
3 # pylint: disable=missing-docstring,invalid-name
4
5 import random
6 import tempfile
7 import shutil
8 import os.path
9
10 # pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import
11 from ansible.module_utils.basic import * # noqa: F403
12
13
14 DOCUMENTATION = '''
15 ---
16 module: openshift_container_binary_sync
17 short_description: Copies OpenShift binaries out of the given image tag to host system.
18 '''
19
20
21 class BinarySyncError(Exception):
22 def __init__(self, msg):
23 super(BinarySyncError, self).__init__(msg)
24 self.msg = msg
25
26
27 # pylint: disable=too-few-public-methods,too-many-instance-attributes
28 class BinarySyncer(object):
29 """
30 Syncs the openshift, oc, oadm, and kubectl binaries/symlinks out of
31 a container onto the host system.
32 """
33
34 def __init__(self, module, image, tag, backend):
35 self.module = module
36 self.changed = False
37 self.output = []
38 self.bin_dir = '/usr/local/bin'
39 self.image = image
40 self.tag = tag
41 self.backend = backend
42 self.temp_dir = None # TBD
43
44 def sync(self):
45 if self.backend == 'atomic':
46 return self._sync_atomic()
47
48 return self._sync_docker()
49
50 def _sync_atomic(self):
51 self.temp_dir = tempfile.mkdtemp()
52 temp_dir_mount = tempfile.mkdtemp()
53 try:
54 image_spec = '%s:%s' % (self.image, self.tag)
55 rc, stdout, stderr = self.module.run_command(['atomic', 'mount',
56 '--storage', "ostree",
57 image_spec, temp_dir_mount])
58 if rc:
59 raise BinarySyncError("Error mounting image. stdout=%s, stderr=%s" %
60 (stdout, stderr))
61 for i in ["openshift", "oc"]:
62 src_file = os.path.join(temp_dir_mount, "usr/bin", i)
63 shutil.copy(src_file, self.temp_dir)
64
65 self._sync_binaries()
66 finally:
67 self.module.run_command(['atomic', 'umount', temp_dir_mount])
68 shutil.rmtree(temp_dir_mount)
69 shutil.rmtree(self.temp_dir)
70
71 def _sync_docker(self):
72 container_name = "openshift-cli-%s" % random.randint(1, 100000)
73 rc, stdout, stderr = self.module.run_command(['docker', 'create', '--name',
74 container_name, '%s:%s' % (self.image, self.tag)])
75 if rc:
76 raise BinarySyncError("Error creating temporary docker container. stdout=%s, stderr=%s" %
77 (stdout, stderr))
78 self.output.append(stdout)
79 try:
80 self.temp_dir = tempfile.mkdtemp()
81 self.output.append("Using temp dir: %s" % self.temp_dir)
82
83 rc, stdout, stderr = self.module.run_command(['docker', 'cp', "%s:/usr/bin/openshift" % container_name,
84 self.temp_dir])
85 if rc:
86 raise BinarySyncError("Error copying file from docker container: stdout=%s, stderr=%s" %
87 (stdout, stderr))
88
89 rc, stdout, stderr = self.module.run_command(['docker', 'cp', "%s:/usr/bin/oc" % container_name,
90 self.temp_dir])
91 if rc:
92 raise BinarySyncError("Error copying file from docker container: stdout=%s, stderr=%s" %
93 (stdout, stderr))
94
95 self._sync_binaries()
96 finally:
97 shutil.rmtree(self.temp_dir)
98 self.module.run_command(['docker', 'rm', container_name])
99
100 def _sync_binaries(self):
101 self._sync_binary('openshift')
102
103 # In older versions, oc was a symlink to openshift:
104 if os.path.islink(os.path.join(self.temp_dir, 'oc')):
105 self._sync_symlink('oc', 'openshift')
106 else:
107 self._sync_binary('oc')
108
109 # Ensure correct symlinks created:
110 self._sync_symlink('kubectl', 'openshift')
111 self._sync_symlink('oadm', 'openshift')
112
113 def _sync_symlink(self, binary_name, link_to):
114 """ Ensure the given binary name exists and links to the expected binary. """
115
116 # The symlink we are creating:
117 link_path = os.path.join(self.bin_dir, binary_name)
118
119 # The expected file we should be linking to:
120 link_dest = os.path.join(self.bin_dir, link_to)
121
122 if not os.path.exists(link_path) or \
123 not os.path.islink(link_path) or \
124 os.path.realpath(link_path) != os.path.realpath(link_dest):
125 if os.path.exists(link_path):
126 os.remove(link_path)
127 os.symlink(link_to, os.path.join(self.bin_dir, binary_name))
128 self.output.append("Symlinked %s to %s." % (link_path, link_dest))
129 self.changed = True
130
131 def _sync_binary(self, binary_name):
132 src_path = os.path.join(self.temp_dir, binary_name)
133 dest_path = os.path.join(self.bin_dir, binary_name)
134 incoming_checksum = self.module.run_command(['sha256sum', src_path])[1]
135 if not os.path.exists(dest_path) or self.module.run_command(['sha256sum', dest_path])[1] != incoming_checksum:
136 shutil.move(src_path, dest_path)
137 self.output.append("Moved %s to %s." % (src_path, dest_path))
138 self.changed = True
139
140
141 def main():
142 module = AnsibleModule( # noqa: F405
143 argument_spec=dict(
144 image=dict(required=True),
145 tag=dict(required=True),
146 backend=dict(required=True),
147 ),
148 supports_check_mode=True
149 )
150
151 image = module.params['image']
152 tag = module.params['tag']
153 backend = module.params['backend']
154
155 if backend not in ["docker", "atomic"]:
156 module.fail_json(msg="unknown backend")
157
158 binary_syncer = BinarySyncer(module, image, tag, backend)
159
160 try:
161 binary_syncer.sync()
162 except BinarySyncError as ex:
163 module.fail_json(msg=ex.msg)
164
165 return module.exit_json(changed=binary_syncer.changed,
166 output=binary_syncer.output)
167
168
169 if __name__ == '__main__':
170 main()
171
[end of roles/openshift_cli/library/openshift_container_binary_sync.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/roles/openshift_cli/library/openshift_container_binary_sync.py b/roles/openshift_cli/library/openshift_container_binary_sync.py
--- a/roles/openshift_cli/library/openshift_container_binary_sync.py
+++ b/roles/openshift_cli/library/openshift_container_binary_sync.py
@@ -133,6 +133,11 @@
dest_path = os.path.join(self.bin_dir, binary_name)
incoming_checksum = self.module.run_command(['sha256sum', src_path])[1]
if not os.path.exists(dest_path) or self.module.run_command(['sha256sum', dest_path])[1] != incoming_checksum:
+
+ # See: https://github.com/openshift/openshift-ansible/issues/4965
+ if os.path.islink(dest_path):
+ os.unlink(dest_path)
+ self.output.append('Removed old symlink {} before copying binary.'.format(dest_path))
shutil.move(src_path, dest_path)
self.output.append("Moved %s to %s." % (src_path, dest_path))
self.changed = True
| {"golden_diff": "diff --git a/roles/openshift_cli/library/openshift_container_binary_sync.py b/roles/openshift_cli/library/openshift_container_binary_sync.py\n--- a/roles/openshift_cli/library/openshift_container_binary_sync.py\n+++ b/roles/openshift_cli/library/openshift_container_binary_sync.py\n@@ -133,6 +133,11 @@\n dest_path = os.path.join(self.bin_dir, binary_name)\n incoming_checksum = self.module.run_command(['sha256sum', src_path])[1]\n if not os.path.exists(dest_path) or self.module.run_command(['sha256sum', dest_path])[1] != incoming_checksum:\n+\n+ # See: https://github.com/openshift/openshift-ansible/issues/4965\n+ if os.path.islink(dest_path):\n+ os.unlink(dest_path)\n+ self.output.append('Removed old symlink {} before copying binary.'.format(dest_path))\n shutil.move(src_path, dest_path)\n self.output.append(\"Moved %s to %s.\" % (src_path, dest_path))\n self.changed = True\n", "issue": "Error: unknown command \"drain\" for \"oc\" in \u00ab Drain Node for Kubelet upgrade \u00bb action\n\u00ab [Drain Node for Kubelet upgrade](https://github.com/openshift/openshift-ansible/blob/release-3.6/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml#L27) \u00bb action execute: `oadm drain ...` but this command don't exists:\r\n\r\n```\r\n# oadm drain\r\nError: unknown command \"drain\" for \"oc\"\r\nRun 'oc --help' for usage.\r\n```\r\n\r\nwith this version:\r\n\r\n```\r\n# oc version\r\noc v3.6.0+c4dd4cf\r\nkubernetes v1.6.1+5115d708d7\r\nfeatures: Basic-Auth GSSAPI Kerberos SPNEGO\r\n```\r\n\r\nTo fix it, I need to add `adm` like this `oadm adm drain ...`\r\n\r\n```\r\ndiff --git a/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml b/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml\r\nindex c93a5d8..a21fb7f 100644\r\n--- a/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml\r\n+++ b/playbooks/common/openshift-cluster/upgrades/upgrade_nodes.yml\r\n@@ -26,7 +26,7 @@\r\n\r\n - name: Drain Node for Kubelet upgrade\r\n command: >\r\n- {{ hostvars[groups.oo_first_master.0].openshift.common.admin_binary }} drain {{ openshift.node.nodename | lower }} --config={{ openshift.common.config_base }}/master/admin.kubeconfig --force --delete-local-data --ignore-daemonsets\r\n+ {{ hostvars[groups.oo_first_master.0].openshift.common.client_binary }} adm drain {{ openshift.node.nodename | lower }} --config={{ openshift.common.config_base }}/master/admin.kubeconfig --force --delete-local-data --ignore-daemonsets\r\n delegate_to: \"{{ groups.oo_first_master.0 }}\"\r\n register: l_upgrade_nodes_drain_result\r\n until: not l_upgrade_nodes_drain_result | failed\r\n```\n", "before_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n# pylint: disable=missing-docstring,invalid-name\n\nimport random\nimport tempfile\nimport shutil\nimport os.path\n\n# pylint: disable=redefined-builtin,wildcard-import,unused-wildcard-import\nfrom ansible.module_utils.basic import * # noqa: F403\n\n\nDOCUMENTATION = '''\n---\nmodule: openshift_container_binary_sync\nshort_description: Copies OpenShift binaries out of the given image tag to host system.\n'''\n\n\nclass BinarySyncError(Exception):\n def __init__(self, msg):\n super(BinarySyncError, self).__init__(msg)\n self.msg = msg\n\n\n# pylint: disable=too-few-public-methods,too-many-instance-attributes\nclass BinarySyncer(object):\n \"\"\"\n Syncs the openshift, oc, oadm, and kubectl binaries/symlinks out of\n a container onto the host system.\n \"\"\"\n\n def __init__(self, module, image, tag, backend):\n self.module = module\n self.changed = False\n self.output = []\n self.bin_dir = '/usr/local/bin'\n self.image = image\n self.tag = tag\n self.backend = backend\n self.temp_dir = None # TBD\n\n def sync(self):\n if self.backend == 'atomic':\n return self._sync_atomic()\n\n return self._sync_docker()\n\n def _sync_atomic(self):\n self.temp_dir = tempfile.mkdtemp()\n temp_dir_mount = tempfile.mkdtemp()\n try:\n image_spec = '%s:%s' % (self.image, self.tag)\n rc, stdout, stderr = self.module.run_command(['atomic', 'mount',\n '--storage', \"ostree\",\n image_spec, temp_dir_mount])\n if rc:\n raise BinarySyncError(\"Error mounting image. stdout=%s, stderr=%s\" %\n (stdout, stderr))\n for i in [\"openshift\", \"oc\"]:\n src_file = os.path.join(temp_dir_mount, \"usr/bin\", i)\n shutil.copy(src_file, self.temp_dir)\n\n self._sync_binaries()\n finally:\n self.module.run_command(['atomic', 'umount', temp_dir_mount])\n shutil.rmtree(temp_dir_mount)\n shutil.rmtree(self.temp_dir)\n\n def _sync_docker(self):\n container_name = \"openshift-cli-%s\" % random.randint(1, 100000)\n rc, stdout, stderr = self.module.run_command(['docker', 'create', '--name',\n container_name, '%s:%s' % (self.image, self.tag)])\n if rc:\n raise BinarySyncError(\"Error creating temporary docker container. stdout=%s, stderr=%s\" %\n (stdout, stderr))\n self.output.append(stdout)\n try:\n self.temp_dir = tempfile.mkdtemp()\n self.output.append(\"Using temp dir: %s\" % self.temp_dir)\n\n rc, stdout, stderr = self.module.run_command(['docker', 'cp', \"%s:/usr/bin/openshift\" % container_name,\n self.temp_dir])\n if rc:\n raise BinarySyncError(\"Error copying file from docker container: stdout=%s, stderr=%s\" %\n (stdout, stderr))\n\n rc, stdout, stderr = self.module.run_command(['docker', 'cp', \"%s:/usr/bin/oc\" % container_name,\n self.temp_dir])\n if rc:\n raise BinarySyncError(\"Error copying file from docker container: stdout=%s, stderr=%s\" %\n (stdout, stderr))\n\n self._sync_binaries()\n finally:\n shutil.rmtree(self.temp_dir)\n self.module.run_command(['docker', 'rm', container_name])\n\n def _sync_binaries(self):\n self._sync_binary('openshift')\n\n # In older versions, oc was a symlink to openshift:\n if os.path.islink(os.path.join(self.temp_dir, 'oc')):\n self._sync_symlink('oc', 'openshift')\n else:\n self._sync_binary('oc')\n\n # Ensure correct symlinks created:\n self._sync_symlink('kubectl', 'openshift')\n self._sync_symlink('oadm', 'openshift')\n\n def _sync_symlink(self, binary_name, link_to):\n \"\"\" Ensure the given binary name exists and links to the expected binary. \"\"\"\n\n # The symlink we are creating:\n link_path = os.path.join(self.bin_dir, binary_name)\n\n # The expected file we should be linking to:\n link_dest = os.path.join(self.bin_dir, link_to)\n\n if not os.path.exists(link_path) or \\\n not os.path.islink(link_path) or \\\n os.path.realpath(link_path) != os.path.realpath(link_dest):\n if os.path.exists(link_path):\n os.remove(link_path)\n os.symlink(link_to, os.path.join(self.bin_dir, binary_name))\n self.output.append(\"Symlinked %s to %s.\" % (link_path, link_dest))\n self.changed = True\n\n def _sync_binary(self, binary_name):\n src_path = os.path.join(self.temp_dir, binary_name)\n dest_path = os.path.join(self.bin_dir, binary_name)\n incoming_checksum = self.module.run_command(['sha256sum', src_path])[1]\n if not os.path.exists(dest_path) or self.module.run_command(['sha256sum', dest_path])[1] != incoming_checksum:\n shutil.move(src_path, dest_path)\n self.output.append(\"Moved %s to %s.\" % (src_path, dest_path))\n self.changed = True\n\n\ndef main():\n module = AnsibleModule( # noqa: F405\n argument_spec=dict(\n image=dict(required=True),\n tag=dict(required=True),\n backend=dict(required=True),\n ),\n supports_check_mode=True\n )\n\n image = module.params['image']\n tag = module.params['tag']\n backend = module.params['backend']\n\n if backend not in [\"docker\", \"atomic\"]:\n module.fail_json(msg=\"unknown backend\")\n\n binary_syncer = BinarySyncer(module, image, tag, backend)\n\n try:\n binary_syncer.sync()\n except BinarySyncError as ex:\n module.fail_json(msg=ex.msg)\n\n return module.exit_json(changed=binary_syncer.changed,\n output=binary_syncer.output)\n\n\nif __name__ == '__main__':\n main()\n", "path": "roles/openshift_cli/library/openshift_container_binary_sync.py"}]} | 2,816 | 242 |
gh_patches_debug_4413 | rasdani/github-patches | git_diff | mlflow__mlflow-11426 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[DOC-FIX] Add JFrog MLFlow plugin in order to store your artifacts on a preferred JFrog repository
### Willingness to contribute
Yes. I can contribute a documentation fix independently.
### URL(s) with the issue
https://mlflow.org/docs/latest/plugins.html#artifact-repository-plugins
### Description of proposal (what needs changing)
Include details about the [mlflow-jfrog-plugin](https://pypi.org/project/mlflow-jfrog-plugin), and also provide simplified installation instructions, such as `pip install mlflow[jfrog]`
</issue>
<code>
[start of dev/pyproject.py]
1 from __future__ import annotations
2
3 import re
4 import shutil
5 import subprocess
6 from pathlib import Path
7
8 import toml
9
10 SEPARATOR = """
11 # Package metadata: can't be updated manually, use dev/pyproject.py
12 # -----------------------------------------------------------------
13 # Dev tool settings: can be updated manually
14
15 """
16
17
18 def read_requirements(path: Path) -> list[str]:
19 lines = (l.strip() for l in path.read_text().splitlines())
20 return [l for l in lines if l and not l.startswith("#")]
21
22
23 def build(skinny: bool) -> None:
24 skinny_requirements = read_requirements(Path("requirements", "skinny-requirements.txt"))
25 core_requirements = read_requirements(Path("requirements", "core-requirements.txt"))
26 gateways_requirements = read_requirements(Path("requirements", "gateway-requirements.txt"))
27 version = re.search(
28 r'^VERSION = "([a-z0-9\.]+)"$', Path("mlflow", "version.py").read_text(), re.MULTILINE
29 ).group(1)
30 python_version = Path("requirements", "python-version.txt").read_text().strip()
31 data = {
32 "build-system": {
33 "requires": ["setuptools"],
34 "build-backend": "setuptools.build_meta",
35 },
36 "project": {
37 "name": "mlflow" if not skinny else "mlflow-skinny",
38 "version": version,
39 "maintainers": [
40 {"name": "Databricks", "email": "[email protected] "}
41 ],
42 "description": (
43 "MLflow is an open source platform for the complete machine learning lifecycle"
44 ),
45 "readme": "README.rst",
46 "license": {
47 "file": "LICENSE.txt",
48 },
49 "keywords": ["mlflow", "ai", "databricks"],
50 "classifiers": [
51 "Development Status :: 5 - Production/Stable",
52 "Intended Audience :: Developers",
53 "Intended Audience :: End Users/Desktop",
54 "Intended Audience :: Science/Research",
55 "Intended Audience :: Information Technology",
56 "Topic :: Scientific/Engineering :: Artificial Intelligence",
57 "Topic :: Software Development :: Libraries :: Python Modules",
58 "License :: OSI Approved :: Apache Software License",
59 "Operating System :: OS Independent",
60 f"Programming Language :: Python :: {python_version}",
61 ],
62 "requires-python": f">={python_version}",
63 "dependencies": sorted(
64 skinny_requirements if skinny else skinny_requirements + core_requirements
65 ),
66 "optional-dependencies": {
67 "extras": [
68 # Required to log artifacts and models to HDFS artifact locations
69 "pyarrow",
70 # Required to sign outgoing request with SigV4 signature
71 "requests-auth-aws-sigv4",
72 # Required to log artifacts and models to AWS S3 artifact locations
73 "boto3",
74 "botocore",
75 # Required to log artifacts and models to GCS artifact locations
76 "google-cloud-storage>=1.30.0",
77 "azureml-core>=1.2.0",
78 # Required to log artifacts to SFTP artifact locations
79 "pysftp",
80 # Required by the mlflow.projects module, when running projects against
81 # a remote Kubernetes cluster
82 "kubernetes",
83 # Required to serve models through MLServer
84 # NOTE: remove the upper version pin once protobuf is no longer pinned in
85 # mlserver. Reference issue: https://github.com/SeldonIO/MLServer/issues/1089
86 "mlserver>=1.2.0,!=1.3.1,<1.4.0",
87 "mlserver-mlflow>=1.2.0,!=1.3.1,<1.4.0",
88 "virtualenv",
89 # Required for exporting metrics from the MLflow server to Prometheus
90 # as part of the MLflow server monitoring add-on
91 "prometheus-flask-exporter",
92 ],
93 "databricks": [
94 # Required to write model artifacts to unity catalog locations
95 "azure-storage-file-datalake>12",
96 "google-cloud-storage>=1.30.0",
97 "boto3>1",
98 "botocore",
99 ],
100 "gateway": gateways_requirements,
101 "genai": gateways_requirements,
102 "sqlserver": ["mlflow-dbstore"],
103 "aliyun-oss": ["aliyunstoreplugin"],
104 "xethub": ["mlflow-xethub"],
105 },
106 "urls": {
107 "homepage": "https://mlflow.org",
108 "issues": "https://github.com/mlflow/mlflow/issues",
109 "documentation": "https://mlflow.org/docs/latest/index.html",
110 "repository": "https://github.com/mlflow/mlflow",
111 },
112 "scripts": {
113 "mlflow": "mlflow.cli:cli",
114 },
115 "entry-points": {
116 "mlflow.app": {
117 "basic-auth": "mlflow.server.auth:create_app",
118 },
119 "mlflow.app.client": {
120 "basic-auth": "mlflow.server.auth.client:AuthServiceClient",
121 },
122 "mlflow.deployments": {
123 "databricks": "mlflow.deployments.databricks",
124 "http": "mlflow.deployments.mlflow",
125 "https": "mlflow.deployments.mlflow",
126 "openai": "mlflow.deployments.openai",
127 },
128 },
129 },
130 "tool": {
131 "setuptools": {
132 "packages": {
133 "find": {
134 "where": ["."],
135 "include": ["mlflow", "mlflow.*"],
136 "exclude": ["tests", "tests.*"],
137 }
138 },
139 "package-data": {
140 "mlflow": [
141 "store/db_migrations/alembic.ini",
142 "temporary_db_migrations_for_pre_1_users/alembic.ini",
143 "pypi_package_index.json",
144 "pyspark/ml/log_model_allowlist.txt",
145 "server/auth/basic_auth.ini",
146 "server/auth/db/migrations/alembic.ini",
147 "recipes/resources/**/*",
148 "recipes/cards/templates/**/*",
149 ]
150 + ([] if skinny else ["models/container/**/*", "server/js/build/**/*"])
151 },
152 }
153 },
154 }
155
156 if skinny:
157 out_path = "pyproject.skinny.toml"
158 with Path(out_path).open("w") as f:
159 f.write(toml.dumps(data))
160 else:
161 out_path = "pyproject.toml"
162 original = Path(out_path).read_text().split(SEPARATOR)[1]
163 with Path(out_path).open("w") as f:
164 f.write(toml.dumps(data))
165 f.write(SEPARATOR)
166 f.write(original)
167
168 if taplo := shutil.which("taplo"):
169 subprocess.run([taplo, "fmt", out_path], check=True)
170
171
172 def main() -> None:
173 if shutil.which("taplo") is None:
174 print(
175 "taplo is required to generate pyproject.toml. "
176 "Please install it by following the instructions at "
177 "https://taplo.tamasfe.dev/cli/introduction.html."
178 )
179 return
180 build(skinny=False)
181 build(skinny=True)
182
183
184 if __name__ == "__main__":
185 main()
186
[end of dev/pyproject.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/dev/pyproject.py b/dev/pyproject.py
--- a/dev/pyproject.py
+++ b/dev/pyproject.py
@@ -102,6 +102,7 @@
"sqlserver": ["mlflow-dbstore"],
"aliyun-oss": ["aliyunstoreplugin"],
"xethub": ["mlflow-xethub"],
+ "jfrog": ["mlflow-jfrog-plugin"],
},
"urls": {
"homepage": "https://mlflow.org",
| {"golden_diff": "diff --git a/dev/pyproject.py b/dev/pyproject.py\n--- a/dev/pyproject.py\n+++ b/dev/pyproject.py\n@@ -102,6 +102,7 @@\n \"sqlserver\": [\"mlflow-dbstore\"],\n \"aliyun-oss\": [\"aliyunstoreplugin\"],\n \"xethub\": [\"mlflow-xethub\"],\n+ \"jfrog\": [\"mlflow-jfrog-plugin\"],\n },\n \"urls\": {\n \"homepage\": \"https://mlflow.org\",\n", "issue": "[DOC-FIX] Add JFrog MLFlow plugin in order to store your artifacts on a preferred JFrog repository\n### Willingness to contribute\n\nYes. I can contribute a documentation fix independently.\n\n### URL(s) with the issue\n\nhttps://mlflow.org/docs/latest/plugins.html#artifact-repository-plugins\n\n### Description of proposal (what needs changing)\n\nInclude details about the [mlflow-jfrog-plugin](https://pypi.org/project/mlflow-jfrog-plugin), and also provide simplified installation instructions, such as `pip install mlflow[jfrog]`\n", "before_files": [{"content": "from __future__ import annotations\n\nimport re\nimport shutil\nimport subprocess\nfrom pathlib import Path\n\nimport toml\n\nSEPARATOR = \"\"\"\n# Package metadata: can't be updated manually, use dev/pyproject.py\n# -----------------------------------------------------------------\n# Dev tool settings: can be updated manually\n\n\"\"\"\n\n\ndef read_requirements(path: Path) -> list[str]:\n lines = (l.strip() for l in path.read_text().splitlines())\n return [l for l in lines if l and not l.startswith(\"#\")]\n\n\ndef build(skinny: bool) -> None:\n skinny_requirements = read_requirements(Path(\"requirements\", \"skinny-requirements.txt\"))\n core_requirements = read_requirements(Path(\"requirements\", \"core-requirements.txt\"))\n gateways_requirements = read_requirements(Path(\"requirements\", \"gateway-requirements.txt\"))\n version = re.search(\n r'^VERSION = \"([a-z0-9\\.]+)\"$', Path(\"mlflow\", \"version.py\").read_text(), re.MULTILINE\n ).group(1)\n python_version = Path(\"requirements\", \"python-version.txt\").read_text().strip()\n data = {\n \"build-system\": {\n \"requires\": [\"setuptools\"],\n \"build-backend\": \"setuptools.build_meta\",\n },\n \"project\": {\n \"name\": \"mlflow\" if not skinny else \"mlflow-skinny\",\n \"version\": version,\n \"maintainers\": [\n {\"name\": \"Databricks\", \"email\": \"[email protected] \"}\n ],\n \"description\": (\n \"MLflow is an open source platform for the complete machine learning lifecycle\"\n ),\n \"readme\": \"README.rst\",\n \"license\": {\n \"file\": \"LICENSE.txt\",\n },\n \"keywords\": [\"mlflow\", \"ai\", \"databricks\"],\n \"classifiers\": [\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: End Users/Desktop\",\n \"Intended Audience :: Science/Research\",\n \"Intended Audience :: Information Technology\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n f\"Programming Language :: Python :: {python_version}\",\n ],\n \"requires-python\": f\">={python_version}\",\n \"dependencies\": sorted(\n skinny_requirements if skinny else skinny_requirements + core_requirements\n ),\n \"optional-dependencies\": {\n \"extras\": [\n # Required to log artifacts and models to HDFS artifact locations\n \"pyarrow\",\n # Required to sign outgoing request with SigV4 signature\n \"requests-auth-aws-sigv4\",\n # Required to log artifacts and models to AWS S3 artifact locations\n \"boto3\",\n \"botocore\",\n # Required to log artifacts and models to GCS artifact locations\n \"google-cloud-storage>=1.30.0\",\n \"azureml-core>=1.2.0\",\n # Required to log artifacts to SFTP artifact locations\n \"pysftp\",\n # Required by the mlflow.projects module, when running projects against\n # a remote Kubernetes cluster\n \"kubernetes\",\n # Required to serve models through MLServer\n # NOTE: remove the upper version pin once protobuf is no longer pinned in\n # mlserver. Reference issue: https://github.com/SeldonIO/MLServer/issues/1089\n \"mlserver>=1.2.0,!=1.3.1,<1.4.0\",\n \"mlserver-mlflow>=1.2.0,!=1.3.1,<1.4.0\",\n \"virtualenv\",\n # Required for exporting metrics from the MLflow server to Prometheus\n # as part of the MLflow server monitoring add-on\n \"prometheus-flask-exporter\",\n ],\n \"databricks\": [\n # Required to write model artifacts to unity catalog locations\n \"azure-storage-file-datalake>12\",\n \"google-cloud-storage>=1.30.0\",\n \"boto3>1\",\n \"botocore\",\n ],\n \"gateway\": gateways_requirements,\n \"genai\": gateways_requirements,\n \"sqlserver\": [\"mlflow-dbstore\"],\n \"aliyun-oss\": [\"aliyunstoreplugin\"],\n \"xethub\": [\"mlflow-xethub\"],\n },\n \"urls\": {\n \"homepage\": \"https://mlflow.org\",\n \"issues\": \"https://github.com/mlflow/mlflow/issues\",\n \"documentation\": \"https://mlflow.org/docs/latest/index.html\",\n \"repository\": \"https://github.com/mlflow/mlflow\",\n },\n \"scripts\": {\n \"mlflow\": \"mlflow.cli:cli\",\n },\n \"entry-points\": {\n \"mlflow.app\": {\n \"basic-auth\": \"mlflow.server.auth:create_app\",\n },\n \"mlflow.app.client\": {\n \"basic-auth\": \"mlflow.server.auth.client:AuthServiceClient\",\n },\n \"mlflow.deployments\": {\n \"databricks\": \"mlflow.deployments.databricks\",\n \"http\": \"mlflow.deployments.mlflow\",\n \"https\": \"mlflow.deployments.mlflow\",\n \"openai\": \"mlflow.deployments.openai\",\n },\n },\n },\n \"tool\": {\n \"setuptools\": {\n \"packages\": {\n \"find\": {\n \"where\": [\".\"],\n \"include\": [\"mlflow\", \"mlflow.*\"],\n \"exclude\": [\"tests\", \"tests.*\"],\n }\n },\n \"package-data\": {\n \"mlflow\": [\n \"store/db_migrations/alembic.ini\",\n \"temporary_db_migrations_for_pre_1_users/alembic.ini\",\n \"pypi_package_index.json\",\n \"pyspark/ml/log_model_allowlist.txt\",\n \"server/auth/basic_auth.ini\",\n \"server/auth/db/migrations/alembic.ini\",\n \"recipes/resources/**/*\",\n \"recipes/cards/templates/**/*\",\n ]\n + ([] if skinny else [\"models/container/**/*\", \"server/js/build/**/*\"])\n },\n }\n },\n }\n\n if skinny:\n out_path = \"pyproject.skinny.toml\"\n with Path(out_path).open(\"w\") as f:\n f.write(toml.dumps(data))\n else:\n out_path = \"pyproject.toml\"\n original = Path(out_path).read_text().split(SEPARATOR)[1]\n with Path(out_path).open(\"w\") as f:\n f.write(toml.dumps(data))\n f.write(SEPARATOR)\n f.write(original)\n\n if taplo := shutil.which(\"taplo\"):\n subprocess.run([taplo, \"fmt\", out_path], check=True)\n\n\ndef main() -> None:\n if shutil.which(\"taplo\") is None:\n print(\n \"taplo is required to generate pyproject.toml. \"\n \"Please install it by following the instructions at \"\n \"https://taplo.tamasfe.dev/cli/introduction.html.\"\n )\n return\n build(skinny=False)\n build(skinny=True)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "dev/pyproject.py"}]} | 2,666 | 110 |
gh_patches_debug_6914 | rasdani/github-patches | git_diff | saulpw__visidata-1717 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Create New Table from Clipboard
It would be nice to have a command which can create a new table directly from the clipboard.
For example, suppose I have tab separated data in my clipboard.
I would like to be able to execute a visidata command to load a table from the clipboard.
The command might ask me what filetype I am loading (tsv in this case).
Then it would load the data directly from the clipboard.
This would be particularly handy for quick file format conversions. e.g. if I copy a table from OneNote and want to convert it to a Markdown table, I could just import it into Visidata form the clipboard, and copy it back to the clipboard as a markdown table.
</issue>
<code>
[start of visidata/features/open_syspaste.py]
1 from visidata import vd, BaseSheet, Path
2
3
4 @BaseSheet.api
5 def open_syspaste(sheet, filetype='tsv'):
6 import io
7
8 v = vd.sysclipValue().strip() or vd.fail('nothing to open')
9
10 p = Path('syspaste'+'.'+filetype, fp=io.BytesIO(v.encode('utf-8')))
11 return vd.openSource(p, filetype=filetype)
12
13
14 BaseSheet.addCommand('', 'open-syspaste', 'vd.push(open_syspaste())', 'open clipboard as tsv')
15
[end of visidata/features/open_syspaste.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/visidata/features/open_syspaste.py b/visidata/features/open_syspaste.py
--- a/visidata/features/open_syspaste.py
+++ b/visidata/features/open_syspaste.py
@@ -1,3 +1,7 @@
+'''
+Load new table from system clipboard
+'''
+
from visidata import vd, BaseSheet, Path
@@ -11,4 +15,4 @@
return vd.openSource(p, filetype=filetype)
-BaseSheet.addCommand('', 'open-syspaste', 'vd.push(open_syspaste())', 'open clipboard as tsv')
+BaseSheet.addCommand('gShift+P', 'open-syspaste', 'vd.push(open_syspaste(filetype=vd.input("paste as filetype: ", value="tsv")))', 'open clipboard as filetype')
| {"golden_diff": "diff --git a/visidata/features/open_syspaste.py b/visidata/features/open_syspaste.py\n--- a/visidata/features/open_syspaste.py\n+++ b/visidata/features/open_syspaste.py\n@@ -1,3 +1,7 @@\n+'''\n+Load new table from system clipboard\n+'''\n+\n from visidata import vd, BaseSheet, Path\n \n \n@@ -11,4 +15,4 @@\n return vd.openSource(p, filetype=filetype)\n \n \n-BaseSheet.addCommand('', 'open-syspaste', 'vd.push(open_syspaste())', 'open clipboard as tsv')\n+BaseSheet.addCommand('gShift+P', 'open-syspaste', 'vd.push(open_syspaste(filetype=vd.input(\"paste as filetype: \", value=\"tsv\")))', 'open clipboard as filetype')\n", "issue": "Create New Table from Clipboard\nIt would be nice to have a command which can create a new table directly from the clipboard.\r\n\r\nFor example, suppose I have tab separated data in my clipboard.\r\nI would like to be able to execute a visidata command to load a table from the clipboard.\r\nThe command might ask me what filetype I am loading (tsv in this case).\r\nThen it would load the data directly from the clipboard.\r\n\r\nThis would be particularly handy for quick file format conversions. e.g. if I copy a table from OneNote and want to convert it to a Markdown table, I could just import it into Visidata form the clipboard, and copy it back to the clipboard as a markdown table.\n", "before_files": [{"content": "from visidata import vd, BaseSheet, Path\n\n\[email protected]\ndef open_syspaste(sheet, filetype='tsv'):\n import io\n\n v = vd.sysclipValue().strip() or vd.fail('nothing to open')\n\n p = Path('syspaste'+'.'+filetype, fp=io.BytesIO(v.encode('utf-8')))\n return vd.openSource(p, filetype=filetype)\n\n\nBaseSheet.addCommand('', 'open-syspaste', 'vd.push(open_syspaste())', 'open clipboard as tsv')\n", "path": "visidata/features/open_syspaste.py"}]} | 820 | 176 |
gh_patches_debug_39835 | rasdani/github-patches | git_diff | NVIDIA__NVFlare-330 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add an event_type argument in AnalyticsSender
Right now AnalyticsSender is sending default event type "analytix_log_stats".
However, in the ConvertToFedEvent and TBAnalyticsReceiver JSON configuration, the user needs to config the "analytix_log_stats" in the events to work.
It's not natural for user to know where this event is coming from.
If we add an "event_type" argument in the AnalyticsSender, we can make the event sending, converting and receiving in pairs.
That would be easier for people to understand.
</issue>
<code>
[start of nvflare/app_common/widgets/streaming.py]
1 # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from abc import ABC, abstractmethod
16 from threading import Lock
17 from typing import List, Optional
18
19 from nvflare.apis.analytix import AnalyticsData, AnalyticsDataType
20 from nvflare.apis.dxo import DXO
21 from nvflare.apis.event_type import EventType
22 from nvflare.apis.fl_component import FLComponent
23 from nvflare.apis.fl_constant import EventScope, FLContextKey, ReservedKey
24 from nvflare.apis.fl_context import FLContext
25 from nvflare.apis.shareable import Shareable
26 from nvflare.app_common.app_event_type import AppEventType
27 from nvflare.widgets.widget import Widget
28
29
30 def send_analytic_dxo(
31 comp: FLComponent, dxo: DXO, fl_ctx: FLContext, event_type: str = AppEventType.ANALYTIC_EVENT_TYPE
32 ):
33 """Sends analytic dxo.
34
35 Args:
36 comp (FLComponent): An FLComponent.
37 dxo (DXO): analytic data in dxo.
38 fl_ctx (FLContext): fl context info.
39 event_type (str): Event type.
40 """
41 if not isinstance(comp, FLComponent):
42 raise TypeError(f"expect comp to be an instance of FLComponent, but got {type(comp)}")
43 if not isinstance(dxo, DXO):
44 raise TypeError(f"expect dxo to be an instance of DXO, but got {type(dxo)}")
45 if not isinstance(fl_ctx, FLContext):
46 raise TypeError(f"expect fl_ctx to be an instance of FLContext, but got {type(fl_ctx)}")
47
48 fl_ctx.set_prop(key=FLContextKey.EVENT_DATA, value=dxo.to_shareable(), private=True, sticky=False)
49 comp.fire_event(event_type=event_type, fl_ctx=fl_ctx)
50
51
52 def create_analytic_dxo(tag: str, value, data_type: AnalyticsDataType, **kwargs) -> DXO:
53 """Creates the analytic DXO.
54
55 Args:
56 tag (str): the tag associated with this value.
57 value: the analytic data.
58 data_type (AnalyticsDataType): analytic data type.
59 kwargs: additional arguments to be passed into the receiver side's function.
60
61 Returns:
62 A DXO object that contains the analytic data.
63 """
64 data = AnalyticsData(tag=tag, value=value, data_type=data_type, kwargs=kwargs)
65 dxo = data.to_dxo()
66 return dxo
67
68
69 class AnalyticsSender(Widget):
70 def __init__(self):
71 """Sends analytics data.
72
73 This class implements some common methods follows signatures from PyTorch SummaryWriter.
74 It provides a convenient way for Learner to use.
75 """
76 super().__init__()
77 self.engine = None
78
79 def handle_event(self, event_type: str, fl_ctx: FLContext):
80 if event_type == EventType.START_RUN:
81 self.engine = fl_ctx.get_engine()
82
83 def _add(
84 self,
85 tag: str,
86 value,
87 data_type: AnalyticsDataType,
88 global_step: Optional[int] = None,
89 kwargs: Optional[dict] = None,
90 ):
91 kwargs = kwargs if kwargs else {}
92 if global_step:
93 if not isinstance(global_step, int):
94 raise TypeError(f"Expect global step to be an instance of int, but got {type(global_step)}")
95 kwargs["global_step"] = global_step
96 dxo = create_analytic_dxo(tag=tag, value=value, data_type=data_type, **kwargs)
97 with self.engine.new_context() as fl_ctx:
98 send_analytic_dxo(self, dxo=dxo, fl_ctx=fl_ctx)
99
100 def add_scalar(self, tag: str, scalar: float, global_step: Optional[int] = None, **kwargs):
101 """Sends a scalar.
102
103 Args:
104 tag (str): Data identifier.
105 scalar (float): Value to send.
106 global_step (optional, int): Global step value.
107 **kwargs: Additional arguments to pass to the receiver side.
108 """
109 self._add(tag=tag, value=scalar, data_type=AnalyticsDataType.SCALAR, global_step=global_step, kwargs=kwargs)
110
111 def add_scalars(self, tag: str, scalars: dict, global_step: Optional[int] = None, **kwargs):
112 """Sends scalars.
113
114 Args:
115 tag (str): The parent name for the tags.
116 scalars (dict): Key-value pair storing the tag and corresponding values.
117 global_step (optional, int): Global step value.
118 **kwargs: Additional arguments to pass to the receiver side.
119 """
120 self._add(tag=tag, value=scalars, data_type=AnalyticsDataType.SCALARS, global_step=global_step, kwargs=kwargs)
121
122 def add_text(self, tag: str, text: str, global_step: Optional[int] = None, **kwargs):
123 """Sends a text.
124
125 Args:
126 tag (str): Data identifier.
127 text (str): String to send.
128 global_step (optional, int): Global step value.
129 **kwargs: Additional arguments to pass to the receiver side.
130 """
131 self._add(tag=tag, value=text, data_type=AnalyticsDataType.TEXT, global_step=global_step, kwargs=kwargs)
132
133 def add_image(self, tag: str, image, global_step: Optional[int] = None, **kwargs):
134 """Sends an image.
135
136 Args:
137 tag (str): Data identifier.
138 image: Image to send.
139 global_step (optional, int): Global step value.
140 **kwargs: Additional arguments to pass to the receiver side.
141 """
142 self._add(tag=tag, value=image, data_type=AnalyticsDataType.IMAGE, global_step=global_step, kwargs=kwargs)
143
144 def flush(self):
145 """Flushes out the message.
146
147 This is doing nothing, it is defined for mimic the PyTorch SummaryWriter behavior.
148 """
149 pass
150
151 def close(self):
152 """Close resources."""
153 if self.engine:
154 self.engine = None
155
156
157 class AnalyticsReceiver(Widget, ABC):
158 def __init__(self, events: Optional[List[str]] = None):
159 """Receives analytic data.
160
161 Args:
162 events (optional, List[str]): A list of event that this receiver will handle.
163 """
164 super().__init__()
165 if events is None:
166 events = [AppEventType.ANALYTIC_EVENT_TYPE, f"fed.{AppEventType.ANALYTIC_EVENT_TYPE}"]
167 self.events = events
168 self._save_lock = Lock()
169 self._end = False
170
171 @abstractmethod
172 def initialize(self, fl_ctx: FLContext):
173 """Initializes the receiver.
174
175 Args:
176 fl_ctx (FLContext): fl context.
177 """
178 pass
179
180 @abstractmethod
181 def save(self, fl_ctx: FLContext, shareable: Shareable, record_origin: str):
182 """Saves the received data.
183
184 Args:
185 fl_ctx (FLContext): fl context.
186 shareable (Shareable): the received message.
187 record_origin (str): the sender of this message / record.
188 """
189 pass
190
191 @abstractmethod
192 def finalize(self, fl_ctx: FLContext):
193 """Finalizes the receiver.
194
195 Args:
196 fl_ctx (FLContext): fl context.
197 """
198 pass
199
200 def handle_event(self, event_type: str, fl_ctx: FLContext):
201 if event_type == EventType.START_RUN:
202 self.initialize(fl_ctx)
203 elif event_type in self.events:
204 if self._end:
205 self.log_debug(fl_ctx, f"Already received end run event, drop event {event_type}.", fire_event=False)
206 return
207 data = fl_ctx.get_prop(FLContextKey.EVENT_DATA, None)
208 if data is None:
209 self.log_error(fl_ctx, "Missing event data.", fire_event=False)
210 return
211 if not isinstance(data, Shareable):
212 self.log_error(
213 fl_ctx, f"Expect data to be an instance of Shareable but got {type(data)}", fire_event=False
214 )
215 return
216
217 # if fed event use peer name to save
218 if fl_ctx.get_prop(FLContextKey.EVENT_SCOPE) == EventScope.FEDERATION:
219 record_origin = data.get_peer_prop(ReservedKey.IDENTITY_NAME, None)
220 else:
221 record_origin = fl_ctx.get_identity_name()
222
223 if record_origin is None:
224 self.log_error(fl_ctx, "record_origin can't be None.", fire_event=False)
225 return
226 with self._save_lock:
227 self.save(shareable=data, fl_ctx=fl_ctx, record_origin=record_origin)
228 elif event_type == EventType.END_RUN:
229 self._end = True
230 self.finalize(fl_ctx)
231
[end of nvflare/app_common/widgets/streaming.py]
[start of nvflare/app_common/app_event_type.py]
1 # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15
16 class AppEventType(object):
17 """Defines application events."""
18
19 START_ROUND = "_start_round"
20 END_ROUND = "_end_round"
21
22 BEFORE_AGGREGATION = "_before_aggregation"
23 END_AGGREGATION = "_end_aggregation"
24
25 SUBMIT_LOCAL_BEST_MODEL = "_submit_local_best_model"
26 SERVER_RECEIVE_BEST_MODEL = "_server_receive_best_model"
27 RECEIVE_VALIDATION_MODEL = "_receive_validation_model"
28 SEND_VALIDATION_RESULTS = "_send_validation_results"
29 RECEIVE_VALIDATION_RESULTS = "_receive_validation_results"
30
31 BEFORE_INITIALIZE = "_before_initialize"
32 AFTER_INITIALIZE = "_after_initialize"
33 BEFORE_TRAIN = "_before_train"
34 AFTER_TRAIN = "_after_train"
35
36 BEFORE_SHAREABLE_TO_LEARNABLE = "_before_model_update"
37 AFTER_SHAREABLE_TO_LEARNABLE = "_after_model_update"
38 BEFORE_LEARNABLE_PERSIST = "_before_save_model"
39 AFTER_LEARNABLE_PERSIST = "_after_save_model"
40 BEFORE_SEND_BEST_MODEL = "_before_send_best_model"
41 AFTER_SEND_BEST_MODEL = "_after_send_best_model"
42 LOCAL_BEST_MODEL_AVAILABLE = "_local_best_model_available"
43 GLOBAL_BEST_MODEL_AVAILABLE = "_global_best_model_available"
44 BEFORE_GET_VALIDATION_MODELS = "_before_get_validation_models"
45 AFTER_GET_VALIDATION_MODELS = "_after_get_validation_models"
46 SEND_MODEL_FOR_VALIDATION = "_send_model_for_validation"
47 BEFORE_VALIDATE_MODEL = "_before_validate_model"
48 AFTER_VALIDATE_MODEL = "_after_validate_model"
49 BEFORE_SUBMIT_VALIDATION_RESULTS = "_before_submit_validation_results"
50 AFTER_SUBMIT_VALIDATION_RESULTS = "_after_submit_validation_results"
51
52 # Events
53 ROUND_STARTED = "_round_started"
54 ROUND_DONE = "_round_done"
55 INITIAL_MODEL_LOADED = "_initial_model_loaded"
56 BEFORE_TRAIN_TASK = "_before_train_task"
57 RECEIVE_CONTRIBUTION = "_receive_contribution"
58 AFTER_CONTRIBUTION_ACCEPT = "_after_contribution_accept"
59 AFTER_AGGREGATION = "_after_aggregation"
60 BEFORE_CONTRIBUTION_ACCEPT = "_before_contribution_accept"
61 GLOBAL_WEIGHTS_UPDATED = "_global_weights_updated"
62 TRAINING_STARTED = "_training_started"
63 TRAINING_FINISHED = "_training_finished"
64 TRAIN_DONE = "_train_done"
65
66 CROSS_VAL_INIT = "_cross_val_init"
67 VALIDATION_RESULT_RECEIVED = "_validation_result_received"
68 RECEIVE_BEST_MODEL = "_receive_best_model"
69
70 ANALYTIC_EVENT_TYPE = "analytix_log_stats"
71 LOGGING_EVENT_TYPE = "analytix_logging"
72
[end of nvflare/app_common/app_event_type.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/nvflare/app_common/app_event_type.py b/nvflare/app_common/app_event_type.py
--- a/nvflare/app_common/app_event_type.py
+++ b/nvflare/app_common/app_event_type.py
@@ -66,6 +66,3 @@
CROSS_VAL_INIT = "_cross_val_init"
VALIDATION_RESULT_RECEIVED = "_validation_result_received"
RECEIVE_BEST_MODEL = "_receive_best_model"
-
- ANALYTIC_EVENT_TYPE = "analytix_log_stats"
- LOGGING_EVENT_TYPE = "analytix_logging"
diff --git a/nvflare/app_common/widgets/streaming.py b/nvflare/app_common/widgets/streaming.py
--- a/nvflare/app_common/widgets/streaming.py
+++ b/nvflare/app_common/widgets/streaming.py
@@ -23,13 +23,12 @@
from nvflare.apis.fl_constant import EventScope, FLContextKey, ReservedKey
from nvflare.apis.fl_context import FLContext
from nvflare.apis.shareable import Shareable
-from nvflare.app_common.app_event_type import AppEventType
from nvflare.widgets.widget import Widget
+ANALYTIC_EVENT_TYPE = "analytix_log_stats"
-def send_analytic_dxo(
- comp: FLComponent, dxo: DXO, fl_ctx: FLContext, event_type: str = AppEventType.ANALYTIC_EVENT_TYPE
-):
+
+def send_analytic_dxo(comp: FLComponent, dxo: DXO, fl_ctx: FLContext, event_type: str = ANALYTIC_EVENT_TYPE):
"""Sends analytic dxo.
Args:
@@ -67,14 +66,19 @@
class AnalyticsSender(Widget):
- def __init__(self):
+ def __init__(self, event_type=ANALYTIC_EVENT_TYPE):
"""Sends analytics data.
- This class implements some common methods follows signatures from PyTorch SummaryWriter.
- It provides a convenient way for Learner to use.
+ Note::
+ This class implements some common methods follows signatures from PyTorch SummaryWriter.
+ It provides a convenient way for Learner to use.
+
+ Args:
+ event_type (str): event type to fire.
"""
super().__init__()
self.engine = None
+ self.event_type = event_type
def handle_event(self, event_type: str, fl_ctx: FLContext):
if event_type == EventType.START_RUN:
@@ -95,7 +99,7 @@
kwargs["global_step"] = global_step
dxo = create_analytic_dxo(tag=tag, value=value, data_type=data_type, **kwargs)
with self.engine.new_context() as fl_ctx:
- send_analytic_dxo(self, dxo=dxo, fl_ctx=fl_ctx)
+ send_analytic_dxo(self, dxo=dxo, fl_ctx=fl_ctx, event_type=self.event_type)
def add_scalar(self, tag: str, scalar: float, global_step: Optional[int] = None, **kwargs):
"""Sends a scalar.
@@ -163,7 +167,7 @@
"""
super().__init__()
if events is None:
- events = [AppEventType.ANALYTIC_EVENT_TYPE, f"fed.{AppEventType.ANALYTIC_EVENT_TYPE}"]
+ events = [ANALYTIC_EVENT_TYPE, f"fed.{ANALYTIC_EVENT_TYPE}"]
self.events = events
self._save_lock = Lock()
self._end = False
| {"golden_diff": "diff --git a/nvflare/app_common/app_event_type.py b/nvflare/app_common/app_event_type.py\n--- a/nvflare/app_common/app_event_type.py\n+++ b/nvflare/app_common/app_event_type.py\n@@ -66,6 +66,3 @@\n CROSS_VAL_INIT = \"_cross_val_init\"\n VALIDATION_RESULT_RECEIVED = \"_validation_result_received\"\n RECEIVE_BEST_MODEL = \"_receive_best_model\"\n-\n- ANALYTIC_EVENT_TYPE = \"analytix_log_stats\"\n- LOGGING_EVENT_TYPE = \"analytix_logging\"\ndiff --git a/nvflare/app_common/widgets/streaming.py b/nvflare/app_common/widgets/streaming.py\n--- a/nvflare/app_common/widgets/streaming.py\n+++ b/nvflare/app_common/widgets/streaming.py\n@@ -23,13 +23,12 @@\n from nvflare.apis.fl_constant import EventScope, FLContextKey, ReservedKey\n from nvflare.apis.fl_context import FLContext\n from nvflare.apis.shareable import Shareable\n-from nvflare.app_common.app_event_type import AppEventType\n from nvflare.widgets.widget import Widget\n \n+ANALYTIC_EVENT_TYPE = \"analytix_log_stats\"\n \n-def send_analytic_dxo(\n- comp: FLComponent, dxo: DXO, fl_ctx: FLContext, event_type: str = AppEventType.ANALYTIC_EVENT_TYPE\n-):\n+\n+def send_analytic_dxo(comp: FLComponent, dxo: DXO, fl_ctx: FLContext, event_type: str = ANALYTIC_EVENT_TYPE):\n \"\"\"Sends analytic dxo.\n \n Args:\n@@ -67,14 +66,19 @@\n \n \n class AnalyticsSender(Widget):\n- def __init__(self):\n+ def __init__(self, event_type=ANALYTIC_EVENT_TYPE):\n \"\"\"Sends analytics data.\n \n- This class implements some common methods follows signatures from PyTorch SummaryWriter.\n- It provides a convenient way for Learner to use.\n+ Note::\n+ This class implements some common methods follows signatures from PyTorch SummaryWriter.\n+ It provides a convenient way for Learner to use.\n+\n+ Args:\n+ event_type (str): event type to fire.\n \"\"\"\n super().__init__()\n self.engine = None\n+ self.event_type = event_type\n \n def handle_event(self, event_type: str, fl_ctx: FLContext):\n if event_type == EventType.START_RUN:\n@@ -95,7 +99,7 @@\n kwargs[\"global_step\"] = global_step\n dxo = create_analytic_dxo(tag=tag, value=value, data_type=data_type, **kwargs)\n with self.engine.new_context() as fl_ctx:\n- send_analytic_dxo(self, dxo=dxo, fl_ctx=fl_ctx)\n+ send_analytic_dxo(self, dxo=dxo, fl_ctx=fl_ctx, event_type=self.event_type)\n \n def add_scalar(self, tag: str, scalar: float, global_step: Optional[int] = None, **kwargs):\n \"\"\"Sends a scalar.\n@@ -163,7 +167,7 @@\n \"\"\"\n super().__init__()\n if events is None:\n- events = [AppEventType.ANALYTIC_EVENT_TYPE, f\"fed.{AppEventType.ANALYTIC_EVENT_TYPE}\"]\n+ events = [ANALYTIC_EVENT_TYPE, f\"fed.{ANALYTIC_EVENT_TYPE}\"]\n self.events = events\n self._save_lock = Lock()\n self._end = False\n", "issue": "Add an event_type argument in AnalyticsSender\nRight now AnalyticsSender is sending default event type \"analytix_log_stats\".\r\n\r\nHowever, in the ConvertToFedEvent and TBAnalyticsReceiver JSON configuration, the user needs to config the \"analytix_log_stats\" in the events to work.\r\nIt's not natural for user to know where this event is coming from.\r\nIf we add an \"event_type\" argument in the AnalyticsSender, we can make the event sending, converting and receiving in pairs.\r\nThat would be easier for people to understand.\n", "before_files": [{"content": "# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom abc import ABC, abstractmethod\nfrom threading import Lock\nfrom typing import List, Optional\n\nfrom nvflare.apis.analytix import AnalyticsData, AnalyticsDataType\nfrom nvflare.apis.dxo import DXO\nfrom nvflare.apis.event_type import EventType\nfrom nvflare.apis.fl_component import FLComponent\nfrom nvflare.apis.fl_constant import EventScope, FLContextKey, ReservedKey\nfrom nvflare.apis.fl_context import FLContext\nfrom nvflare.apis.shareable import Shareable\nfrom nvflare.app_common.app_event_type import AppEventType\nfrom nvflare.widgets.widget import Widget\n\n\ndef send_analytic_dxo(\n comp: FLComponent, dxo: DXO, fl_ctx: FLContext, event_type: str = AppEventType.ANALYTIC_EVENT_TYPE\n):\n \"\"\"Sends analytic dxo.\n\n Args:\n comp (FLComponent): An FLComponent.\n dxo (DXO): analytic data in dxo.\n fl_ctx (FLContext): fl context info.\n event_type (str): Event type.\n \"\"\"\n if not isinstance(comp, FLComponent):\n raise TypeError(f\"expect comp to be an instance of FLComponent, but got {type(comp)}\")\n if not isinstance(dxo, DXO):\n raise TypeError(f\"expect dxo to be an instance of DXO, but got {type(dxo)}\")\n if not isinstance(fl_ctx, FLContext):\n raise TypeError(f\"expect fl_ctx to be an instance of FLContext, but got {type(fl_ctx)}\")\n\n fl_ctx.set_prop(key=FLContextKey.EVENT_DATA, value=dxo.to_shareable(), private=True, sticky=False)\n comp.fire_event(event_type=event_type, fl_ctx=fl_ctx)\n\n\ndef create_analytic_dxo(tag: str, value, data_type: AnalyticsDataType, **kwargs) -> DXO:\n \"\"\"Creates the analytic DXO.\n\n Args:\n tag (str): the tag associated with this value.\n value: the analytic data.\n data_type (AnalyticsDataType): analytic data type.\n kwargs: additional arguments to be passed into the receiver side's function.\n\n Returns:\n A DXO object that contains the analytic data.\n \"\"\"\n data = AnalyticsData(tag=tag, value=value, data_type=data_type, kwargs=kwargs)\n dxo = data.to_dxo()\n return dxo\n\n\nclass AnalyticsSender(Widget):\n def __init__(self):\n \"\"\"Sends analytics data.\n\n This class implements some common methods follows signatures from PyTorch SummaryWriter.\n It provides a convenient way for Learner to use.\n \"\"\"\n super().__init__()\n self.engine = None\n\n def handle_event(self, event_type: str, fl_ctx: FLContext):\n if event_type == EventType.START_RUN:\n self.engine = fl_ctx.get_engine()\n\n def _add(\n self,\n tag: str,\n value,\n data_type: AnalyticsDataType,\n global_step: Optional[int] = None,\n kwargs: Optional[dict] = None,\n ):\n kwargs = kwargs if kwargs else {}\n if global_step:\n if not isinstance(global_step, int):\n raise TypeError(f\"Expect global step to be an instance of int, but got {type(global_step)}\")\n kwargs[\"global_step\"] = global_step\n dxo = create_analytic_dxo(tag=tag, value=value, data_type=data_type, **kwargs)\n with self.engine.new_context() as fl_ctx:\n send_analytic_dxo(self, dxo=dxo, fl_ctx=fl_ctx)\n\n def add_scalar(self, tag: str, scalar: float, global_step: Optional[int] = None, **kwargs):\n \"\"\"Sends a scalar.\n\n Args:\n tag (str): Data identifier.\n scalar (float): Value to send.\n global_step (optional, int): Global step value.\n **kwargs: Additional arguments to pass to the receiver side.\n \"\"\"\n self._add(tag=tag, value=scalar, data_type=AnalyticsDataType.SCALAR, global_step=global_step, kwargs=kwargs)\n\n def add_scalars(self, tag: str, scalars: dict, global_step: Optional[int] = None, **kwargs):\n \"\"\"Sends scalars.\n\n Args:\n tag (str): The parent name for the tags.\n scalars (dict): Key-value pair storing the tag and corresponding values.\n global_step (optional, int): Global step value.\n **kwargs: Additional arguments to pass to the receiver side.\n \"\"\"\n self._add(tag=tag, value=scalars, data_type=AnalyticsDataType.SCALARS, global_step=global_step, kwargs=kwargs)\n\n def add_text(self, tag: str, text: str, global_step: Optional[int] = None, **kwargs):\n \"\"\"Sends a text.\n\n Args:\n tag (str): Data identifier.\n text (str): String to send.\n global_step (optional, int): Global step value.\n **kwargs: Additional arguments to pass to the receiver side.\n \"\"\"\n self._add(tag=tag, value=text, data_type=AnalyticsDataType.TEXT, global_step=global_step, kwargs=kwargs)\n\n def add_image(self, tag: str, image, global_step: Optional[int] = None, **kwargs):\n \"\"\"Sends an image.\n\n Args:\n tag (str): Data identifier.\n image: Image to send.\n global_step (optional, int): Global step value.\n **kwargs: Additional arguments to pass to the receiver side.\n \"\"\"\n self._add(tag=tag, value=image, data_type=AnalyticsDataType.IMAGE, global_step=global_step, kwargs=kwargs)\n\n def flush(self):\n \"\"\"Flushes out the message.\n\n This is doing nothing, it is defined for mimic the PyTorch SummaryWriter behavior.\n \"\"\"\n pass\n\n def close(self):\n \"\"\"Close resources.\"\"\"\n if self.engine:\n self.engine = None\n\n\nclass AnalyticsReceiver(Widget, ABC):\n def __init__(self, events: Optional[List[str]] = None):\n \"\"\"Receives analytic data.\n\n Args:\n events (optional, List[str]): A list of event that this receiver will handle.\n \"\"\"\n super().__init__()\n if events is None:\n events = [AppEventType.ANALYTIC_EVENT_TYPE, f\"fed.{AppEventType.ANALYTIC_EVENT_TYPE}\"]\n self.events = events\n self._save_lock = Lock()\n self._end = False\n\n @abstractmethod\n def initialize(self, fl_ctx: FLContext):\n \"\"\"Initializes the receiver.\n\n Args:\n fl_ctx (FLContext): fl context.\n \"\"\"\n pass\n\n @abstractmethod\n def save(self, fl_ctx: FLContext, shareable: Shareable, record_origin: str):\n \"\"\"Saves the received data.\n\n Args:\n fl_ctx (FLContext): fl context.\n shareable (Shareable): the received message.\n record_origin (str): the sender of this message / record.\n \"\"\"\n pass\n\n @abstractmethod\n def finalize(self, fl_ctx: FLContext):\n \"\"\"Finalizes the receiver.\n\n Args:\n fl_ctx (FLContext): fl context.\n \"\"\"\n pass\n\n def handle_event(self, event_type: str, fl_ctx: FLContext):\n if event_type == EventType.START_RUN:\n self.initialize(fl_ctx)\n elif event_type in self.events:\n if self._end:\n self.log_debug(fl_ctx, f\"Already received end run event, drop event {event_type}.\", fire_event=False)\n return\n data = fl_ctx.get_prop(FLContextKey.EVENT_DATA, None)\n if data is None:\n self.log_error(fl_ctx, \"Missing event data.\", fire_event=False)\n return\n if not isinstance(data, Shareable):\n self.log_error(\n fl_ctx, f\"Expect data to be an instance of Shareable but got {type(data)}\", fire_event=False\n )\n return\n\n # if fed event use peer name to save\n if fl_ctx.get_prop(FLContextKey.EVENT_SCOPE) == EventScope.FEDERATION:\n record_origin = data.get_peer_prop(ReservedKey.IDENTITY_NAME, None)\n else:\n record_origin = fl_ctx.get_identity_name()\n\n if record_origin is None:\n self.log_error(fl_ctx, \"record_origin can't be None.\", fire_event=False)\n return\n with self._save_lock:\n self.save(shareable=data, fl_ctx=fl_ctx, record_origin=record_origin)\n elif event_type == EventType.END_RUN:\n self._end = True\n self.finalize(fl_ctx)\n", "path": "nvflare/app_common/widgets/streaming.py"}, {"content": "# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nclass AppEventType(object):\n \"\"\"Defines application events.\"\"\"\n\n START_ROUND = \"_start_round\"\n END_ROUND = \"_end_round\"\n\n BEFORE_AGGREGATION = \"_before_aggregation\"\n END_AGGREGATION = \"_end_aggregation\"\n\n SUBMIT_LOCAL_BEST_MODEL = \"_submit_local_best_model\"\n SERVER_RECEIVE_BEST_MODEL = \"_server_receive_best_model\"\n RECEIVE_VALIDATION_MODEL = \"_receive_validation_model\"\n SEND_VALIDATION_RESULTS = \"_send_validation_results\"\n RECEIVE_VALIDATION_RESULTS = \"_receive_validation_results\"\n\n BEFORE_INITIALIZE = \"_before_initialize\"\n AFTER_INITIALIZE = \"_after_initialize\"\n BEFORE_TRAIN = \"_before_train\"\n AFTER_TRAIN = \"_after_train\"\n\n BEFORE_SHAREABLE_TO_LEARNABLE = \"_before_model_update\"\n AFTER_SHAREABLE_TO_LEARNABLE = \"_after_model_update\"\n BEFORE_LEARNABLE_PERSIST = \"_before_save_model\"\n AFTER_LEARNABLE_PERSIST = \"_after_save_model\"\n BEFORE_SEND_BEST_MODEL = \"_before_send_best_model\"\n AFTER_SEND_BEST_MODEL = \"_after_send_best_model\"\n LOCAL_BEST_MODEL_AVAILABLE = \"_local_best_model_available\"\n GLOBAL_BEST_MODEL_AVAILABLE = \"_global_best_model_available\"\n BEFORE_GET_VALIDATION_MODELS = \"_before_get_validation_models\"\n AFTER_GET_VALIDATION_MODELS = \"_after_get_validation_models\"\n SEND_MODEL_FOR_VALIDATION = \"_send_model_for_validation\"\n BEFORE_VALIDATE_MODEL = \"_before_validate_model\"\n AFTER_VALIDATE_MODEL = \"_after_validate_model\"\n BEFORE_SUBMIT_VALIDATION_RESULTS = \"_before_submit_validation_results\"\n AFTER_SUBMIT_VALIDATION_RESULTS = \"_after_submit_validation_results\"\n\n # Events\n ROUND_STARTED = \"_round_started\"\n ROUND_DONE = \"_round_done\"\n INITIAL_MODEL_LOADED = \"_initial_model_loaded\"\n BEFORE_TRAIN_TASK = \"_before_train_task\"\n RECEIVE_CONTRIBUTION = \"_receive_contribution\"\n AFTER_CONTRIBUTION_ACCEPT = \"_after_contribution_accept\"\n AFTER_AGGREGATION = \"_after_aggregation\"\n BEFORE_CONTRIBUTION_ACCEPT = \"_before_contribution_accept\"\n GLOBAL_WEIGHTS_UPDATED = \"_global_weights_updated\"\n TRAINING_STARTED = \"_training_started\"\n TRAINING_FINISHED = \"_training_finished\"\n TRAIN_DONE = \"_train_done\"\n\n CROSS_VAL_INIT = \"_cross_val_init\"\n VALIDATION_RESULT_RECEIVED = \"_validation_result_received\"\n RECEIVE_BEST_MODEL = \"_receive_best_model\"\n\n ANALYTIC_EVENT_TYPE = \"analytix_log_stats\"\n LOGGING_EVENT_TYPE = \"analytix_logging\"\n", "path": "nvflare/app_common/app_event_type.py"}]} | 4,090 | 777 |
gh_patches_debug_1873 | rasdani/github-patches | git_diff | mdn__kuma-5855 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Escalated number of errors from Google Search Console: Breadcrumbs
[Google Search Console](https://search.google.com/search-console/breadcrumbs/drilldown?resource_id=https%3A%2F%2Fdeveloper.mozilla.org%2F&item_key=CgwICRoIcG9zaXRpb24QAxgP&hl=en) emailed us about a big increase in indexing "errors" about breadcrumbs.
<img width="1174" alt="Screen Shot 2019-09-20 at 1 47 54 PM" src="https://user-images.githubusercontent.com/26739/65347578-4a118c80-dbad-11e9-8bda-8df0bd7871de.png">
The code that produces our breadcrumbs (on the Wiki) [hasn't changed in years](https://github.com/mozilla/kuma/blame/master/kuma/wiki/jinja2/wiki/includes/document_macros.html).
</issue>
<code>
[start of kuma/wiki/templatetags/jinja_helpers.py]
1 # -*- coding: utf-8 -*-
2 import difflib
3 import json
4 import re
5
6 import jinja2
7 import six
8 from constance import config
9 from cssselect.parser import SelectorSyntaxError
10 from django.conf import settings
11 from django.core.serializers.json import DjangoJSONEncoder
12 from django.template import loader
13 from django.utils import lru_cache
14 from django.utils.html import conditional_escape
15 from django.utils.six.moves.urllib.parse import urlsplit, urlunparse
16 from django.utils.translation import ugettext
17 from django_jinja import library
18
19 from kuma.core.urlresolvers import reverse
20 from kuma.core.utils import order_params, urlparams
21 from kuma.core.utils import safer_pyquery as pq
22
23 from ..constants import DIFF_WRAP_COLUMN
24 from ..content import clean_content
25 from ..utils import tidy_content
26
27
28 def get_compare_url(doc, from_id, to_id):
29 return order_params(urlparams(
30 reverse('wiki.compare_revisions', args=[doc.slug], locale=doc.locale),
31 **{'from': from_id, 'to': to_id}
32 ))
33
34
35 @library.filter
36 def bugize_text(content):
37 content = jinja2.escape(content)
38 regex = re.compile(r'(bug)\s+#?(\d+)', re.IGNORECASE)
39 content = regex.sub(
40 jinja2.Markup('<a href="https://bugzilla.mozilla.org/'
41 'show_bug.cgi?id=\\2" '
42 'target="_blank" rel="noopener">\\1 \\2</a>'),
43 content)
44 return content
45
46
47 @library.global_function
48 def format_comment(rev, previous_revision=None, load_previous=True):
49 """
50 Format comment for HTML display, with Bugzilla links and slug changes.
51
52 Keyword Arguments:
53 rev - The revision
54 previous_revision - The previous revision (default None)
55 load_previous - Try loading previous revision if None (default True)
56 """
57 if previous_revision is None and load_previous:
58 previous_revision = rev.previous
59 comment = bugize_text(rev.comment if rev.comment else "")
60
61 # If a page move, say so
62 if previous_revision and previous_revision.slug != rev.slug:
63 comment += jinja2.Markup(
64 '<span class="slug-change">'
65 '<span>%s</span>'
66 ' <i class="icon-long-arrow-right" aria-hidden="true"></i> '
67 '<span>%s</span></span>') % (previous_revision.slug, rev.slug)
68
69 return comment
70
71
72 @library.global_function
73 def revisions_unified_diff(from_revision, to_revision):
74 """
75 Given the two revisions generate a diff between their tidied
76 content in the unified diff format.
77 """
78 if from_revision is None or to_revision is None:
79 return "Diff is unavailable."
80
81 fromfile = '[%s] #%s' % (from_revision.document.locale, from_revision.id)
82 tofile = '[%s] #%s' % (to_revision.document.locale, to_revision.id)
83
84 tidy_from = from_revision.get_tidied_content()
85 tidy_to = to_revision.get_tidied_content()
86
87 return u'\n'.join(difflib.unified_diff(
88 tidy_from.splitlines(),
89 tidy_to.splitlines(),
90 fromfile=fromfile,
91 tofile=tofile,
92 ))
93
94
95 @library.global_function
96 def diff_table(content_from, content_to, prev_id, curr_id, tidy=False):
97 """
98 Creates an HTML diff of the passed in content_from and content_to.
99 """
100 if tidy:
101 content_from, errors = tidy_content(content_from)
102 content_to, errors = tidy_content(content_to)
103
104 html_diff = difflib.HtmlDiff(wrapcolumn=DIFF_WRAP_COLUMN)
105 try:
106 diff = html_diff.make_table(content_from.splitlines(),
107 content_to.splitlines(),
108 ugettext('Revision %s') % prev_id,
109 ugettext('Revision %s') % curr_id,
110 context=True,
111 numlines=config.DIFF_CONTEXT_LINES)
112 except RuntimeError:
113 # some diffs hit a max recursion error
114 message = ugettext(u'There was an error generating the content.')
115 diff = '<div class="warning"><p>%s</p></div>' % message
116 return jinja2.Markup(diff)
117
118
119 @library.global_function
120 def tag_diff_table(prev_tags, curr_tags, prev_id, curr_id):
121 html_diff = difflib.HtmlDiff(wrapcolumn=DIFF_WRAP_COLUMN)
122
123 diff = html_diff.make_table([prev_tags], [curr_tags],
124 ugettext('Revision %s') % prev_id,
125 ugettext('Revision %s') % curr_id)
126
127 # Simple formatting update: 784877
128 diff = diff.replace('",', '"<br />').replace('<td', '<td valign="top"')
129 return jinja2.Markup(diff)
130
131
132 @library.global_function
133 def colorize_diff(diff):
134 # we're doing something horrible here because this will show up
135 # in feed reader and other clients that don't load CSS files
136 diff = diff.replace('<span class="diff_add"', '<span class="diff_add" '
137 'style="background-color: #afa; text-decoration: none;"')
138 diff = diff.replace('<span class="diff_sub"', '<span class="diff_sub" '
139 'style="background-color: #faa; text-decoration: none;"')
140 diff = diff.replace('<span class="diff_chg"', '<span class="diff_chg" '
141 'style="background-color: #fe0; text-decoration: none;"')
142 return diff
143
144
145 @library.filter
146 def wiki_bleach(val):
147 return jinja2.Markup(clean_content(val))
148
149
150 @library.filter
151 def selector_content_find(document, selector):
152 """
153 Provided a selector, returns the relevant content from the document
154 """
155 content = ''
156 try:
157 page = pq(document.rendered_html)
158 except ValueError:
159 # pass errors during construction
160 pass
161 try:
162 content = page.find(selector).text()
163 except SelectorSyntaxError:
164 # pass errors during find/select
165 pass
166 return content
167
168
169 def _recursive_escape(value, esc=conditional_escape):
170 """
171 Recursively escapes strings in an object.
172
173 Traverses dict, list and tuples. These are the data structures supported
174 by the JSON encoder.
175 """
176 if isinstance(value, dict):
177 return type(value)((esc(k), _recursive_escape(v))
178 for (k, v) in value.iteritems())
179 elif isinstance(value, (list, tuple)):
180 return type(value)(_recursive_escape(v) for v in value)
181 elif isinstance(value, six.string_types):
182 return esc(value)
183 elif isinstance(value, (int, long, float)) or value in (True, False, None):
184 return value
185 # We've exhausted all the types acceptable by the default JSON encoder.
186 # Django's improved JSON encoder handles a few other types, all of which
187 # are represented by strings. For these types, we apply JSON encoding
188 # immediately and then escape the result.
189 return esc(DjangoJSONEncoder().default(value))
190
191
192 @library.filter
193 def tojson(value):
194 """
195 Returns the JSON representation of the value.
196 """
197 try:
198 # If value contains custom subclasses of int, str, datetime, etc.
199 # arbitrary exceptions may be raised during escaping or serialization.
200 result = json.dumps(_recursive_escape(value), cls=DjangoJSONEncoder)
201 except Exception:
202 return ''
203 return jinja2.Markup(result)
204
205
206 @library.filter
207 def absolutify(url, for_wiki_site=False):
208 """Joins settings.SITE_URL with a URL path."""
209 if url.startswith('http'):
210 return url
211
212 if for_wiki_site:
213 site_url = settings.WIKI_SITE_URL
214 else:
215 site_url = settings.SITE_URL
216
217 site = urlsplit(site_url)
218 parts = urlsplit(url)
219 scheme = site.scheme
220 netloc = site.netloc
221 path = parts.path
222 query = parts.query
223 fragment = parts.fragment
224
225 if path == '':
226 path = '/'
227
228 return urlunparse([scheme, netloc, path, None, query, fragment])
229
230
231 @library.global_function
232 def wiki_url(path):
233 """
234 Create a URL pointing to Kuma.
235 Look for a wiki page in the current locale, or default to given path
236 """
237 if '#' in path:
238 slug, fragment = path.split('#', 1)
239 else:
240 slug = path
241 fragment = ''
242 new_path = reverse('wiki.document', args=[slug])
243 if fragment:
244 new_path += '#' + fragment
245 return new_path
246
247
248 @library.global_function
249 @lru_cache.lru_cache()
250 def include_svg(path, title=None, title_id=None):
251 """
252 Embded an SVG file by path, optionally changing the title,
253 and adding an id
254 """
255 svg = loader.get_template(path).render()
256 if (title):
257 svg_parsed = pq(svg, namespaces={'svg': 'http://www.w3.org/2000/svg'})
258 svg_parsed('svg|title')[0].text = title
259 if (title_id):
260 svg_parsed('svg|title').attr['id'] = title_id
261 svg_out = svg_parsed.outerHtml()
262 else:
263 svg_out = svg
264 return jinja2.Markup(svg_out)
265
[end of kuma/wiki/templatetags/jinja_helpers.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kuma/wiki/templatetags/jinja_helpers.py b/kuma/wiki/templatetags/jinja_helpers.py
--- a/kuma/wiki/templatetags/jinja_helpers.py
+++ b/kuma/wiki/templatetags/jinja_helpers.py
@@ -262,3 +262,12 @@
else:
svg_out = svg
return jinja2.Markup(svg_out)
+
+
[email protected]
+def length_plus_one(lengthy):
+ """Useful when you want to do something like
+ `{{ somelist|length_plus_one }}` and you want it to output the
+ Python equivalent of `len(somelist) + 1`.
+ """
+ return len(lengthy) + 1
| {"golden_diff": "diff --git a/kuma/wiki/templatetags/jinja_helpers.py b/kuma/wiki/templatetags/jinja_helpers.py\n--- a/kuma/wiki/templatetags/jinja_helpers.py\n+++ b/kuma/wiki/templatetags/jinja_helpers.py\n@@ -262,3 +262,12 @@\n else:\n svg_out = svg\n return jinja2.Markup(svg_out)\n+\n+\[email protected]\n+def length_plus_one(lengthy):\n+ \"\"\"Useful when you want to do something like\n+ `{{ somelist|length_plus_one }}` and you want it to output the\n+ Python equivalent of `len(somelist) + 1`.\n+ \"\"\"\n+ return len(lengthy) + 1\n", "issue": "Escalated number of errors from Google Search Console: Breadcrumbs\n[Google Search Console](https://search.google.com/search-console/breadcrumbs/drilldown?resource_id=https%3A%2F%2Fdeveloper.mozilla.org%2F&item_key=CgwICRoIcG9zaXRpb24QAxgP&hl=en) emailed us about a big increase in indexing \"errors\" about breadcrumbs.\r\n<img width=\"1174\" alt=\"Screen Shot 2019-09-20 at 1 47 54 PM\" src=\"https://user-images.githubusercontent.com/26739/65347578-4a118c80-dbad-11e9-8bda-8df0bd7871de.png\">\r\n\r\nThe code that produces our breadcrumbs (on the Wiki) [hasn't changed in years](https://github.com/mozilla/kuma/blame/master/kuma/wiki/jinja2/wiki/includes/document_macros.html).\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nimport difflib\nimport json\nimport re\n\nimport jinja2\nimport six\nfrom constance import config\nfrom cssselect.parser import SelectorSyntaxError\nfrom django.conf import settings\nfrom django.core.serializers.json import DjangoJSONEncoder\nfrom django.template import loader\nfrom django.utils import lru_cache\nfrom django.utils.html import conditional_escape\nfrom django.utils.six.moves.urllib.parse import urlsplit, urlunparse\nfrom django.utils.translation import ugettext\nfrom django_jinja import library\n\nfrom kuma.core.urlresolvers import reverse\nfrom kuma.core.utils import order_params, urlparams\nfrom kuma.core.utils import safer_pyquery as pq\n\nfrom ..constants import DIFF_WRAP_COLUMN\nfrom ..content import clean_content\nfrom ..utils import tidy_content\n\n\ndef get_compare_url(doc, from_id, to_id):\n return order_params(urlparams(\n reverse('wiki.compare_revisions', args=[doc.slug], locale=doc.locale),\n **{'from': from_id, 'to': to_id}\n ))\n\n\[email protected]\ndef bugize_text(content):\n content = jinja2.escape(content)\n regex = re.compile(r'(bug)\\s+#?(\\d+)', re.IGNORECASE)\n content = regex.sub(\n jinja2.Markup('<a href=\"https://bugzilla.mozilla.org/'\n 'show_bug.cgi?id=\\\\2\" '\n 'target=\"_blank\" rel=\"noopener\">\\\\1 \\\\2</a>'),\n content)\n return content\n\n\[email protected]_function\ndef format_comment(rev, previous_revision=None, load_previous=True):\n \"\"\"\n Format comment for HTML display, with Bugzilla links and slug changes.\n\n Keyword Arguments:\n rev - The revision\n previous_revision - The previous revision (default None)\n load_previous - Try loading previous revision if None (default True)\n \"\"\"\n if previous_revision is None and load_previous:\n previous_revision = rev.previous\n comment = bugize_text(rev.comment if rev.comment else \"\")\n\n # If a page move, say so\n if previous_revision and previous_revision.slug != rev.slug:\n comment += jinja2.Markup(\n '<span class=\"slug-change\">'\n '<span>%s</span>'\n ' <i class=\"icon-long-arrow-right\" aria-hidden=\"true\"></i> '\n '<span>%s</span></span>') % (previous_revision.slug, rev.slug)\n\n return comment\n\n\[email protected]_function\ndef revisions_unified_diff(from_revision, to_revision):\n \"\"\"\n Given the two revisions generate a diff between their tidied\n content in the unified diff format.\n \"\"\"\n if from_revision is None or to_revision is None:\n return \"Diff is unavailable.\"\n\n fromfile = '[%s] #%s' % (from_revision.document.locale, from_revision.id)\n tofile = '[%s] #%s' % (to_revision.document.locale, to_revision.id)\n\n tidy_from = from_revision.get_tidied_content()\n tidy_to = to_revision.get_tidied_content()\n\n return u'\\n'.join(difflib.unified_diff(\n tidy_from.splitlines(),\n tidy_to.splitlines(),\n fromfile=fromfile,\n tofile=tofile,\n ))\n\n\[email protected]_function\ndef diff_table(content_from, content_to, prev_id, curr_id, tidy=False):\n \"\"\"\n Creates an HTML diff of the passed in content_from and content_to.\n \"\"\"\n if tidy:\n content_from, errors = tidy_content(content_from)\n content_to, errors = tidy_content(content_to)\n\n html_diff = difflib.HtmlDiff(wrapcolumn=DIFF_WRAP_COLUMN)\n try:\n diff = html_diff.make_table(content_from.splitlines(),\n content_to.splitlines(),\n ugettext('Revision %s') % prev_id,\n ugettext('Revision %s') % curr_id,\n context=True,\n numlines=config.DIFF_CONTEXT_LINES)\n except RuntimeError:\n # some diffs hit a max recursion error\n message = ugettext(u'There was an error generating the content.')\n diff = '<div class=\"warning\"><p>%s</p></div>' % message\n return jinja2.Markup(diff)\n\n\[email protected]_function\ndef tag_diff_table(prev_tags, curr_tags, prev_id, curr_id):\n html_diff = difflib.HtmlDiff(wrapcolumn=DIFF_WRAP_COLUMN)\n\n diff = html_diff.make_table([prev_tags], [curr_tags],\n ugettext('Revision %s') % prev_id,\n ugettext('Revision %s') % curr_id)\n\n # Simple formatting update: 784877\n diff = diff.replace('\",', '\"<br />').replace('<td', '<td valign=\"top\"')\n return jinja2.Markup(diff)\n\n\[email protected]_function\ndef colorize_diff(diff):\n # we're doing something horrible here because this will show up\n # in feed reader and other clients that don't load CSS files\n diff = diff.replace('<span class=\"diff_add\"', '<span class=\"diff_add\" '\n 'style=\"background-color: #afa; text-decoration: none;\"')\n diff = diff.replace('<span class=\"diff_sub\"', '<span class=\"diff_sub\" '\n 'style=\"background-color: #faa; text-decoration: none;\"')\n diff = diff.replace('<span class=\"diff_chg\"', '<span class=\"diff_chg\" '\n 'style=\"background-color: #fe0; text-decoration: none;\"')\n return diff\n\n\[email protected]\ndef wiki_bleach(val):\n return jinja2.Markup(clean_content(val))\n\n\[email protected]\ndef selector_content_find(document, selector):\n \"\"\"\n Provided a selector, returns the relevant content from the document\n \"\"\"\n content = ''\n try:\n page = pq(document.rendered_html)\n except ValueError:\n # pass errors during construction\n pass\n try:\n content = page.find(selector).text()\n except SelectorSyntaxError:\n # pass errors during find/select\n pass\n return content\n\n\ndef _recursive_escape(value, esc=conditional_escape):\n \"\"\"\n Recursively escapes strings in an object.\n\n Traverses dict, list and tuples. These are the data structures supported\n by the JSON encoder.\n \"\"\"\n if isinstance(value, dict):\n return type(value)((esc(k), _recursive_escape(v))\n for (k, v) in value.iteritems())\n elif isinstance(value, (list, tuple)):\n return type(value)(_recursive_escape(v) for v in value)\n elif isinstance(value, six.string_types):\n return esc(value)\n elif isinstance(value, (int, long, float)) or value in (True, False, None):\n return value\n # We've exhausted all the types acceptable by the default JSON encoder.\n # Django's improved JSON encoder handles a few other types, all of which\n # are represented by strings. For these types, we apply JSON encoding\n # immediately and then escape the result.\n return esc(DjangoJSONEncoder().default(value))\n\n\[email protected]\ndef tojson(value):\n \"\"\"\n Returns the JSON representation of the value.\n \"\"\"\n try:\n # If value contains custom subclasses of int, str, datetime, etc.\n # arbitrary exceptions may be raised during escaping or serialization.\n result = json.dumps(_recursive_escape(value), cls=DjangoJSONEncoder)\n except Exception:\n return ''\n return jinja2.Markup(result)\n\n\[email protected]\ndef absolutify(url, for_wiki_site=False):\n \"\"\"Joins settings.SITE_URL with a URL path.\"\"\"\n if url.startswith('http'):\n return url\n\n if for_wiki_site:\n site_url = settings.WIKI_SITE_URL\n else:\n site_url = settings.SITE_URL\n\n site = urlsplit(site_url)\n parts = urlsplit(url)\n scheme = site.scheme\n netloc = site.netloc\n path = parts.path\n query = parts.query\n fragment = parts.fragment\n\n if path == '':\n path = '/'\n\n return urlunparse([scheme, netloc, path, None, query, fragment])\n\n\[email protected]_function\ndef wiki_url(path):\n \"\"\"\n Create a URL pointing to Kuma.\n Look for a wiki page in the current locale, or default to given path\n \"\"\"\n if '#' in path:\n slug, fragment = path.split('#', 1)\n else:\n slug = path\n fragment = ''\n new_path = reverse('wiki.document', args=[slug])\n if fragment:\n new_path += '#' + fragment\n return new_path\n\n\[email protected]_function\n@lru_cache.lru_cache()\ndef include_svg(path, title=None, title_id=None):\n \"\"\"\n Embded an SVG file by path, optionally changing the title,\n and adding an id\n \"\"\"\n svg = loader.get_template(path).render()\n if (title):\n svg_parsed = pq(svg, namespaces={'svg': 'http://www.w3.org/2000/svg'})\n svg_parsed('svg|title')[0].text = title\n if (title_id):\n svg_parsed('svg|title').attr['id'] = title_id\n svg_out = svg_parsed.outerHtml()\n else:\n svg_out = svg\n return jinja2.Markup(svg_out)\n", "path": "kuma/wiki/templatetags/jinja_helpers.py"}]} | 3,466 | 171 |
gh_patches_debug_9866 | rasdani/github-patches | git_diff | microsoft__ptvsd-882 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
pyfile adds an additional new line in every line of code
## Environment data
- PTVSD version: master
- OS and version: any
- Python version (& distribution if applicable, e.g. Anaconda): 3.6
- Using VS Code or Visual Studio:
## Actual behavior
code:
```python
@pyfile
def foo():
print('one')
print('two')
```
The file generated by pyfile, `foo.py`:
```python
print('one')
print('two')
```
## Expected behavior
The file generated by pyfile, `foo.py`:
```python
print('one')
print('two')
```
Having the extra blank lines makes it confusing to set breakpoints in tests.
</issue>
<code>
[start of pytests/conftest.py]
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License. See LICENSE in the project root
3 # for license information.
4
5 from __future__ import print_function, with_statement, absolute_import
6
7 import inspect
8 import pytest
9 import threading
10 import types
11
12 from .helpers.session import DebugSession
13
14
15 @pytest.fixture
16 def daemon():
17 """Provides a factory function for daemon threads. The returned thread is
18 started immediately, and it must not be alive by the time the test returns.
19 """
20
21 daemons = []
22
23 def factory(func, name_suffix=''):
24 name = func.__name__ + name_suffix
25 thread = threading.Thread(target=func, name=name)
26 thread.daemon = True
27 daemons.append(thread)
28 thread.start()
29 return thread
30
31 yield factory
32
33 for thread in daemons:
34 assert not thread.is_alive()
35
36
37 @pytest.fixture
38 def pyfile(request, tmpdir):
39 """A fixture providing a factory function that generates .py files.
40
41 The returned factory takes a single function with an empty argument list,
42 generates a temporary file that contains the code corresponding to the
43 function body, and returns the full path to the generated file. Idiomatic
44 use is as a decorator, e.g.:
45
46 @pyfile
47 def script_file():
48 print('fizz')
49 print('buzz')
50
51 will produce a temporary file named script_file.py containing:
52
53 print('fizz')
54 print('buzz')
55
56 and the variable script_file will contain the path to that file.
57
58 In order for the factory to be able to extract the function body properly,
59 function header ("def") must all be on a single line, with nothing after
60 the colon but whitespace.
61 """
62
63 def factory(source):
64 assert isinstance(source, types.FunctionType)
65 name = source.__name__
66 source, _ = inspect.getsourcelines(source)
67
68 # First, find the "def" line.
69 def_lineno = 0
70 for line in source:
71 line = line.strip()
72 if line.startswith('def') and line.endswith(':'):
73 break
74 def_lineno += 1
75 else:
76 raise ValueError('Failed to locate function header.')
77
78 # Remove everything up to and including "def".
79 source = source[def_lineno + 1:]
80 assert source
81
82 # Now we need to adjust indentation. Compute how much the first line of
83 # the body is indented by, then dedent all lines by that amount.
84 line = source[0]
85 indent = len(line) - len(line.lstrip())
86 source = [line[indent:] for line in source]
87 source = '\n'.join(source)
88
89 tmpfile = tmpdir.join(name + '.py')
90 assert not tmpfile.check()
91 tmpfile.write(source)
92 return tmpfile.strpath
93
94 return factory
95
96
97 @pytest.fixture(params=[
98 'launch', 'attach_socket' # 'attach_pid'
99 ])
100 def debug_session(request):
101 session = DebugSession(request.param)
102 yield session
103 try:
104 session.wait_for_exit()
105 finally:
106 session.stop()
107
108
109
[end of pytests/conftest.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pytests/conftest.py b/pytests/conftest.py
--- a/pytests/conftest.py
+++ b/pytests/conftest.py
@@ -84,7 +84,7 @@
line = source[0]
indent = len(line) - len(line.lstrip())
source = [line[indent:] for line in source]
- source = '\n'.join(source)
+ source = ''.join(source)
tmpfile = tmpdir.join(name + '.py')
assert not tmpfile.check()
@@ -104,5 +104,3 @@
session.wait_for_exit()
finally:
session.stop()
-
-
| {"golden_diff": "diff --git a/pytests/conftest.py b/pytests/conftest.py\n--- a/pytests/conftest.py\n+++ b/pytests/conftest.py\n@@ -84,7 +84,7 @@\n line = source[0]\n indent = len(line) - len(line.lstrip())\n source = [line[indent:] for line in source]\n- source = '\\n'.join(source)\n+ source = ''.join(source)\n \n tmpfile = tmpdir.join(name + '.py')\n assert not tmpfile.check()\n@@ -104,5 +104,3 @@\n session.wait_for_exit()\n finally:\n session.stop()\n-\n-\n", "issue": "pyfile adds an additional new line in every line of code\n## Environment data\r\n\r\n- PTVSD version: master\r\n- OS and version: any\r\n- Python version (& distribution if applicable, e.g. Anaconda): 3.6\r\n- Using VS Code or Visual Studio:\r\n\r\n## Actual behavior\r\n\r\ncode:\r\n```python\r\n@pyfile\r\ndef foo():\r\n print('one')\r\n print('two')\r\n```\r\nThe file generated by pyfile, `foo.py`:\r\n```python\r\nprint('one')\r\n\r\nprint('two')\r\n\r\n```\r\n\r\n## Expected behavior\r\n\r\nThe file generated by pyfile, `foo.py`:\r\n```python\r\nprint('one')\r\nprint('two')\r\n```\r\n\r\nHaving the extra blank lines makes it confusing to set breakpoints in tests.\r\n\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import print_function, with_statement, absolute_import\n\nimport inspect\nimport pytest\nimport threading\nimport types\n\nfrom .helpers.session import DebugSession\n\n\[email protected]\ndef daemon():\n \"\"\"Provides a factory function for daemon threads. The returned thread is\n started immediately, and it must not be alive by the time the test returns.\n \"\"\"\n\n daemons = []\n\n def factory(func, name_suffix=''):\n name = func.__name__ + name_suffix\n thread = threading.Thread(target=func, name=name)\n thread.daemon = True\n daemons.append(thread)\n thread.start()\n return thread\n\n yield factory\n\n for thread in daemons:\n assert not thread.is_alive()\n\n\[email protected]\ndef pyfile(request, tmpdir):\n \"\"\"A fixture providing a factory function that generates .py files.\n\n The returned factory takes a single function with an empty argument list,\n generates a temporary file that contains the code corresponding to the\n function body, and returns the full path to the generated file. Idiomatic\n use is as a decorator, e.g.:\n\n @pyfile\n def script_file():\n print('fizz')\n print('buzz')\n\n will produce a temporary file named script_file.py containing:\n\n print('fizz')\n print('buzz')\n\n and the variable script_file will contain the path to that file.\n\n In order for the factory to be able to extract the function body properly,\n function header (\"def\") must all be on a single line, with nothing after\n the colon but whitespace.\n \"\"\"\n\n def factory(source):\n assert isinstance(source, types.FunctionType)\n name = source.__name__\n source, _ = inspect.getsourcelines(source)\n\n # First, find the \"def\" line.\n def_lineno = 0\n for line in source:\n line = line.strip()\n if line.startswith('def') and line.endswith(':'):\n break\n def_lineno += 1\n else:\n raise ValueError('Failed to locate function header.')\n\n # Remove everything up to and including \"def\".\n source = source[def_lineno + 1:]\n assert source\n\n # Now we need to adjust indentation. Compute how much the first line of\n # the body is indented by, then dedent all lines by that amount.\n line = source[0]\n indent = len(line) - len(line.lstrip())\n source = [line[indent:] for line in source]\n source = '\\n'.join(source)\n\n tmpfile = tmpdir.join(name + '.py')\n assert not tmpfile.check()\n tmpfile.write(source)\n return tmpfile.strpath\n\n return factory\n\n\[email protected](params=[\n 'launch', 'attach_socket' # 'attach_pid'\n])\ndef debug_session(request):\n session = DebugSession(request.param)\n yield session\n try:\n session.wait_for_exit()\n finally:\n session.stop()\n\n\n", "path": "pytests/conftest.py"}]} | 1,594 | 147 |
gh_patches_debug_22998 | rasdani/github-patches | git_diff | pyqtgraph__pyqtgraph-2510 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ExportDialog Drawn Off Screen
Depending on the size of the scene the export dialog box can be drawn off or partially off screen. This is due to an implementation of the `show` command that allows moving the box to negative pixel indices.
Problem Code:
https://github.com/pyqtgraph/pyqtgraph/blob/a5f48ec5b58a10260195f1424309f7374a85ece7/pyqtgraph/GraphicsScene/exportDialog.py#L57-L62
To fix this, the position calculation can be clipped using `max`, and the `setGeometry` command can be changed to `move` to account for the size of the window's frame.
Potential Fix:
```python
if not self.shown:
self.shown = True
vcenter = self.scene.getViewWidget().geometry().center()
x = max(0, int(vcenter.x() - self.width() / 2))
y = max(0, int(vcenter.y() - self.height() / 2))
self.move(x, y)
```
I can't say I understand the motivation for moving the dialog box in the first place, but atleast with this modification the dialog box is always accessible with the mouse.
</issue>
<code>
[start of pyqtgraph/GraphicsScene/exportDialog.py]
1 from .. import exporters as exporters
2 from .. import functions as fn
3 from ..graphicsItems.PlotItem import PlotItem
4 from ..graphicsItems.ViewBox import ViewBox
5 from ..Qt import QtCore, QtWidgets
6
7 from . import exportDialogTemplate_generic as ui_template
8
9 class FormatExportListWidgetItem(QtWidgets.QListWidgetItem):
10 def __init__(self, expClass, *args, **kwargs):
11 QtWidgets.QListWidgetItem.__init__(self, *args, **kwargs)
12 self.expClass = expClass
13
14
15 class ExportDialog(QtWidgets.QWidget):
16 def __init__(self, scene):
17 QtWidgets.QWidget.__init__(self)
18 self.setVisible(False)
19 self.setWindowTitle("Export")
20 self.shown = False
21 self.currentExporter = None
22 self.scene = scene
23
24 self.selectBox = QtWidgets.QGraphicsRectItem()
25 self.selectBox.setPen(fn.mkPen('y', width=3, style=QtCore.Qt.PenStyle.DashLine))
26 self.selectBox.hide()
27 self.scene.addItem(self.selectBox)
28
29 self.ui = ui_template.Ui_Form()
30 self.ui.setupUi(self)
31
32 self.ui.closeBtn.clicked.connect(self.close)
33 self.ui.exportBtn.clicked.connect(self.exportClicked)
34 self.ui.copyBtn.clicked.connect(self.copyClicked)
35 self.ui.itemTree.currentItemChanged.connect(self.exportItemChanged)
36 self.ui.formatList.currentItemChanged.connect(self.exportFormatChanged)
37
38
39 def show(self, item=None):
40 if item is not None:
41 ## Select next exportable parent of the item originally clicked on
42 while not isinstance(item, ViewBox) and not isinstance(item, PlotItem) and item is not None:
43 item = item.parentItem()
44 ## if this is a ViewBox inside a PlotItem, select the parent instead.
45 if isinstance(item, ViewBox) and isinstance(item.parentItem(), PlotItem):
46 item = item.parentItem()
47 self.updateItemList(select=item)
48 self.setVisible(True)
49 self.activateWindow()
50 self.raise_()
51 self.selectBox.setVisible(True)
52
53 if not self.shown:
54 self.shown = True
55 vcenter = self.scene.getViewWidget().geometry().center()
56 self.setGeometry(int(vcenter.x() - self.width() / 2),
57 int(vcenter.y() - self.height() / 2),
58 self.width(), self.height())
59
60 def updateItemList(self, select=None):
61 self.ui.itemTree.clear()
62 si = QtWidgets.QTreeWidgetItem(["Entire Scene"])
63 si.gitem = self.scene
64 self.ui.itemTree.addTopLevelItem(si)
65 self.ui.itemTree.setCurrentItem(si)
66 si.setExpanded(True)
67 for child in self.scene.items():
68 if child.parentItem() is None:
69 self.updateItemTree(child, si, select=select)
70
71 def updateItemTree(self, item, treeItem, select=None):
72 si = None
73 if isinstance(item, ViewBox):
74 si = QtWidgets.QTreeWidgetItem(['ViewBox'])
75 elif isinstance(item, PlotItem):
76 si = QtWidgets.QTreeWidgetItem(['Plot'])
77
78 if si is not None:
79 si.gitem = item
80 treeItem.addChild(si)
81 treeItem = si
82 if si.gitem is select:
83 self.ui.itemTree.setCurrentItem(si)
84
85 for ch in item.childItems():
86 self.updateItemTree(ch, treeItem, select=select)
87
88
89 def exportItemChanged(self, item, prev):
90 if item is None:
91 return
92 if item.gitem is self.scene:
93 newBounds = self.scene.views()[0].viewRect()
94 else:
95 newBounds = item.gitem.sceneBoundingRect()
96 self.selectBox.setRect(newBounds)
97 self.selectBox.show()
98 self.updateFormatList()
99
100 def updateFormatList(self):
101 current = self.ui.formatList.currentItem()
102
103 self.ui.formatList.clear()
104 gotCurrent = False
105 for exp in exporters.listExporters():
106 item = FormatExportListWidgetItem(exp, QtCore.QCoreApplication.translate('Exporter', exp.Name))
107 self.ui.formatList.addItem(item)
108 if item == current:
109 self.ui.formatList.setCurrentRow(self.ui.formatList.count() - 1)
110 gotCurrent = True
111
112 if not gotCurrent:
113 self.ui.formatList.setCurrentRow(0)
114
115 def exportFormatChanged(self, item, prev):
116 if item is None:
117 self.currentExporter = None
118 self.ui.paramTree.clear()
119 return
120 expClass = item.expClass
121 exp = expClass(item=self.ui.itemTree.currentItem().gitem)
122
123 params = exp.parameters()
124
125 if params is None:
126 self.ui.paramTree.clear()
127 else:
128 self.ui.paramTree.setParameters(params)
129 self.currentExporter = exp
130 self.ui.copyBtn.setEnabled(exp.allowCopy)
131
132 def exportClicked(self):
133 self.selectBox.hide()
134 self.currentExporter.export()
135
136 def copyClicked(self):
137 self.selectBox.hide()
138 self.currentExporter.export(copy=True)
139
140 def close(self):
141 self.selectBox.setVisible(False)
142 self.setVisible(False)
143
144 def closeEvent(self, event):
145 self.close()
146 super().closeEvent(event)
147
[end of pyqtgraph/GraphicsScene/exportDialog.py]
[start of pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py]
1 # Form implementation generated from reading ui file '../pyqtgraph/GraphicsScene/exportDialogTemplate.ui'
2 #
3 # Created by: PyQt6 UI code generator 6.1.0
4 #
5 # WARNING: Any manual changes made to this file will be lost when pyuic6 is
6 # run again. Do not edit this file unless you know what you are doing.
7
8
9 from ..Qt import QtCore, QtGui, QtWidgets
10
11
12 class Ui_Form(object):
13 def setupUi(self, Form):
14 Form.setObjectName("Form")
15 Form.resize(241, 367)
16 self.gridLayout = QtWidgets.QGridLayout(Form)
17 self.gridLayout.setSpacing(0)
18 self.gridLayout.setObjectName("gridLayout")
19 self.label = QtWidgets.QLabel(Form)
20 self.label.setObjectName("label")
21 self.gridLayout.addWidget(self.label, 0, 0, 1, 3)
22 self.itemTree = QtWidgets.QTreeWidget(Form)
23 self.itemTree.setObjectName("itemTree")
24 self.itemTree.headerItem().setText(0, "1")
25 self.itemTree.header().setVisible(False)
26 self.gridLayout.addWidget(self.itemTree, 1, 0, 1, 3)
27 self.label_2 = QtWidgets.QLabel(Form)
28 self.label_2.setObjectName("label_2")
29 self.gridLayout.addWidget(self.label_2, 2, 0, 1, 3)
30 self.formatList = QtWidgets.QListWidget(Form)
31 self.formatList.setObjectName("formatList")
32 self.gridLayout.addWidget(self.formatList, 3, 0, 1, 3)
33 self.exportBtn = QtWidgets.QPushButton(Form)
34 self.exportBtn.setObjectName("exportBtn")
35 self.gridLayout.addWidget(self.exportBtn, 6, 1, 1, 1)
36 self.closeBtn = QtWidgets.QPushButton(Form)
37 self.closeBtn.setObjectName("closeBtn")
38 self.gridLayout.addWidget(self.closeBtn, 6, 2, 1, 1)
39 self.paramTree = ParameterTree(Form)
40 self.paramTree.setColumnCount(2)
41 self.paramTree.setObjectName("paramTree")
42 self.paramTree.headerItem().setText(0, "1")
43 self.paramTree.header().setVisible(False)
44 self.gridLayout.addWidget(self.paramTree, 5, 0, 1, 3)
45 self.label_3 = QtWidgets.QLabel(Form)
46 self.label_3.setObjectName("label_3")
47 self.gridLayout.addWidget(self.label_3, 4, 0, 1, 3)
48 self.copyBtn = QtWidgets.QPushButton(Form)
49 self.copyBtn.setObjectName("copyBtn")
50 self.gridLayout.addWidget(self.copyBtn, 6, 0, 1, 1)
51
52 self.retranslateUi(Form)
53 QtCore.QMetaObject.connectSlotsByName(Form)
54
55 def retranslateUi(self, Form):
56 _translate = QtCore.QCoreApplication.translate
57 Form.setWindowTitle(_translate("Form", "Export"))
58 self.label.setText(_translate("Form", "Item to export:"))
59 self.label_2.setText(_translate("Form", "Export format"))
60 self.exportBtn.setText(_translate("Form", "Export"))
61 self.closeBtn.setText(_translate("Form", "Close"))
62 self.label_3.setText(_translate("Form", "Export options"))
63 self.copyBtn.setText(_translate("Form", "Copy"))
64 from ..parametertree import ParameterTree
65
[end of pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pyqtgraph/GraphicsScene/exportDialog.py b/pyqtgraph/GraphicsScene/exportDialog.py
--- a/pyqtgraph/GraphicsScene/exportDialog.py
+++ b/pyqtgraph/GraphicsScene/exportDialog.py
@@ -49,13 +49,12 @@
self.activateWindow()
self.raise_()
self.selectBox.setVisible(True)
-
if not self.shown:
self.shown = True
vcenter = self.scene.getViewWidget().geometry().center()
- self.setGeometry(int(vcenter.x() - self.width() / 2),
- int(vcenter.y() - self.height() / 2),
- self.width(), self.height())
+ x = max(0, int(vcenter.x() - self.width() / 2))
+ y = max(0, int(vcenter.y() - self.height() / 2))
+ self.move(x, y)
def updateItemList(self, select=None):
self.ui.itemTree.clear()
diff --git a/pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py b/pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py
--- a/pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py
+++ b/pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py
@@ -5,7 +5,6 @@
# WARNING: Any manual changes made to this file will be lost when pyuic6 is
# run again. Do not edit this file unless you know what you are doing.
-
from ..Qt import QtCore, QtGui, QtWidgets
| {"golden_diff": "diff --git a/pyqtgraph/GraphicsScene/exportDialog.py b/pyqtgraph/GraphicsScene/exportDialog.py\n--- a/pyqtgraph/GraphicsScene/exportDialog.py\n+++ b/pyqtgraph/GraphicsScene/exportDialog.py\n@@ -49,13 +49,12 @@\n self.activateWindow()\n self.raise_()\n self.selectBox.setVisible(True)\n- \n if not self.shown:\n self.shown = True\n vcenter = self.scene.getViewWidget().geometry().center()\n- self.setGeometry(int(vcenter.x() - self.width() / 2),\n- int(vcenter.y() - self.height() / 2),\n- self.width(), self.height())\n+ x = max(0, int(vcenter.x() - self.width() / 2))\n+ y = max(0, int(vcenter.y() - self.height() / 2))\n+ self.move(x, y)\n \n def updateItemList(self, select=None):\n self.ui.itemTree.clear()\ndiff --git a/pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py b/pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py\n--- a/pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py\n+++ b/pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py\n@@ -5,7 +5,6 @@\n # WARNING: Any manual changes made to this file will be lost when pyuic6 is\n # run again. Do not edit this file unless you know what you are doing.\n \n-\n from ..Qt import QtCore, QtGui, QtWidgets\n", "issue": "ExportDialog Drawn Off Screen\nDepending on the size of the scene the export dialog box can be drawn off or partially off screen. This is due to an implementation of the `show` command that allows moving the box to negative pixel indices.\r\n\r\nProblem Code:\r\nhttps://github.com/pyqtgraph/pyqtgraph/blob/a5f48ec5b58a10260195f1424309f7374a85ece7/pyqtgraph/GraphicsScene/exportDialog.py#L57-L62\r\n\r\nTo fix this, the position calculation can be clipped using `max`, and the `setGeometry` command can be changed to `move` to account for the size of the window's frame.\r\n\r\nPotential Fix:\r\n```python\r\n if not self.shown:\r\n self.shown = True\r\n vcenter = self.scene.getViewWidget().geometry().center()\r\n x = max(0, int(vcenter.x() - self.width() / 2))\r\n y = max(0, int(vcenter.y() - self.height() / 2))\r\n self.move(x, y)\r\n```\r\n\r\nI can't say I understand the motivation for moving the dialog box in the first place, but atleast with this modification the dialog box is always accessible with the mouse.\n", "before_files": [{"content": "from .. import exporters as exporters\nfrom .. import functions as fn\nfrom ..graphicsItems.PlotItem import PlotItem\nfrom ..graphicsItems.ViewBox import ViewBox\nfrom ..Qt import QtCore, QtWidgets\n\nfrom . import exportDialogTemplate_generic as ui_template\n\nclass FormatExportListWidgetItem(QtWidgets.QListWidgetItem):\n def __init__(self, expClass, *args, **kwargs):\n QtWidgets.QListWidgetItem.__init__(self, *args, **kwargs)\n self.expClass = expClass\n\n\nclass ExportDialog(QtWidgets.QWidget):\n def __init__(self, scene):\n QtWidgets.QWidget.__init__(self)\n self.setVisible(False)\n self.setWindowTitle(\"Export\")\n self.shown = False\n self.currentExporter = None\n self.scene = scene\n\n self.selectBox = QtWidgets.QGraphicsRectItem()\n self.selectBox.setPen(fn.mkPen('y', width=3, style=QtCore.Qt.PenStyle.DashLine))\n self.selectBox.hide()\n self.scene.addItem(self.selectBox)\n \n self.ui = ui_template.Ui_Form()\n self.ui.setupUi(self)\n \n self.ui.closeBtn.clicked.connect(self.close)\n self.ui.exportBtn.clicked.connect(self.exportClicked)\n self.ui.copyBtn.clicked.connect(self.copyClicked)\n self.ui.itemTree.currentItemChanged.connect(self.exportItemChanged)\n self.ui.formatList.currentItemChanged.connect(self.exportFormatChanged)\n \n\n def show(self, item=None):\n if item is not None:\n ## Select next exportable parent of the item originally clicked on\n while not isinstance(item, ViewBox) and not isinstance(item, PlotItem) and item is not None:\n item = item.parentItem()\n ## if this is a ViewBox inside a PlotItem, select the parent instead.\n if isinstance(item, ViewBox) and isinstance(item.parentItem(), PlotItem):\n item = item.parentItem()\n self.updateItemList(select=item)\n self.setVisible(True)\n self.activateWindow()\n self.raise_()\n self.selectBox.setVisible(True)\n \n if not self.shown:\n self.shown = True\n vcenter = self.scene.getViewWidget().geometry().center()\n self.setGeometry(int(vcenter.x() - self.width() / 2),\n int(vcenter.y() - self.height() / 2),\n self.width(), self.height())\n \n def updateItemList(self, select=None):\n self.ui.itemTree.clear()\n si = QtWidgets.QTreeWidgetItem([\"Entire Scene\"])\n si.gitem = self.scene\n self.ui.itemTree.addTopLevelItem(si)\n self.ui.itemTree.setCurrentItem(si)\n si.setExpanded(True)\n for child in self.scene.items():\n if child.parentItem() is None:\n self.updateItemTree(child, si, select=select)\n \n def updateItemTree(self, item, treeItem, select=None):\n si = None\n if isinstance(item, ViewBox):\n si = QtWidgets.QTreeWidgetItem(['ViewBox'])\n elif isinstance(item, PlotItem):\n si = QtWidgets.QTreeWidgetItem(['Plot'])\n \n if si is not None:\n si.gitem = item\n treeItem.addChild(si)\n treeItem = si\n if si.gitem is select:\n self.ui.itemTree.setCurrentItem(si)\n \n for ch in item.childItems():\n self.updateItemTree(ch, treeItem, select=select)\n \n \n def exportItemChanged(self, item, prev):\n if item is None:\n return\n if item.gitem is self.scene:\n newBounds = self.scene.views()[0].viewRect()\n else:\n newBounds = item.gitem.sceneBoundingRect()\n self.selectBox.setRect(newBounds)\n self.selectBox.show()\n self.updateFormatList()\n \n def updateFormatList(self):\n current = self.ui.formatList.currentItem()\n\n self.ui.formatList.clear()\n gotCurrent = False\n for exp in exporters.listExporters():\n item = FormatExportListWidgetItem(exp, QtCore.QCoreApplication.translate('Exporter', exp.Name))\n self.ui.formatList.addItem(item)\n if item == current:\n self.ui.formatList.setCurrentRow(self.ui.formatList.count() - 1)\n gotCurrent = True\n \n if not gotCurrent:\n self.ui.formatList.setCurrentRow(0)\n \n def exportFormatChanged(self, item, prev):\n if item is None:\n self.currentExporter = None\n self.ui.paramTree.clear()\n return\n expClass = item.expClass\n exp = expClass(item=self.ui.itemTree.currentItem().gitem)\n\n params = exp.parameters()\n\n if params is None:\n self.ui.paramTree.clear()\n else:\n self.ui.paramTree.setParameters(params)\n self.currentExporter = exp\n self.ui.copyBtn.setEnabled(exp.allowCopy)\n \n def exportClicked(self):\n self.selectBox.hide()\n self.currentExporter.export()\n \n def copyClicked(self):\n self.selectBox.hide()\n self.currentExporter.export(copy=True)\n \n def close(self):\n self.selectBox.setVisible(False)\n self.setVisible(False)\n\n def closeEvent(self, event):\n self.close()\n super().closeEvent(event)\n", "path": "pyqtgraph/GraphicsScene/exportDialog.py"}, {"content": "# Form implementation generated from reading ui file '../pyqtgraph/GraphicsScene/exportDialogTemplate.ui'\n#\n# Created by: PyQt6 UI code generator 6.1.0\n#\n# WARNING: Any manual changes made to this file will be lost when pyuic6 is\n# run again. Do not edit this file unless you know what you are doing.\n\n\nfrom ..Qt import QtCore, QtGui, QtWidgets\n\n\nclass Ui_Form(object):\n def setupUi(self, Form):\n Form.setObjectName(\"Form\")\n Form.resize(241, 367)\n self.gridLayout = QtWidgets.QGridLayout(Form)\n self.gridLayout.setSpacing(0)\n self.gridLayout.setObjectName(\"gridLayout\")\n self.label = QtWidgets.QLabel(Form)\n self.label.setObjectName(\"label\")\n self.gridLayout.addWidget(self.label, 0, 0, 1, 3)\n self.itemTree = QtWidgets.QTreeWidget(Form)\n self.itemTree.setObjectName(\"itemTree\")\n self.itemTree.headerItem().setText(0, \"1\")\n self.itemTree.header().setVisible(False)\n self.gridLayout.addWidget(self.itemTree, 1, 0, 1, 3)\n self.label_2 = QtWidgets.QLabel(Form)\n self.label_2.setObjectName(\"label_2\")\n self.gridLayout.addWidget(self.label_2, 2, 0, 1, 3)\n self.formatList = QtWidgets.QListWidget(Form)\n self.formatList.setObjectName(\"formatList\")\n self.gridLayout.addWidget(self.formatList, 3, 0, 1, 3)\n self.exportBtn = QtWidgets.QPushButton(Form)\n self.exportBtn.setObjectName(\"exportBtn\")\n self.gridLayout.addWidget(self.exportBtn, 6, 1, 1, 1)\n self.closeBtn = QtWidgets.QPushButton(Form)\n self.closeBtn.setObjectName(\"closeBtn\")\n self.gridLayout.addWidget(self.closeBtn, 6, 2, 1, 1)\n self.paramTree = ParameterTree(Form)\n self.paramTree.setColumnCount(2)\n self.paramTree.setObjectName(\"paramTree\")\n self.paramTree.headerItem().setText(0, \"1\")\n self.paramTree.header().setVisible(False)\n self.gridLayout.addWidget(self.paramTree, 5, 0, 1, 3)\n self.label_3 = QtWidgets.QLabel(Form)\n self.label_3.setObjectName(\"label_3\")\n self.gridLayout.addWidget(self.label_3, 4, 0, 1, 3)\n self.copyBtn = QtWidgets.QPushButton(Form)\n self.copyBtn.setObjectName(\"copyBtn\")\n self.gridLayout.addWidget(self.copyBtn, 6, 0, 1, 1)\n\n self.retranslateUi(Form)\n QtCore.QMetaObject.connectSlotsByName(Form)\n\n def retranslateUi(self, Form):\n _translate = QtCore.QCoreApplication.translate\n Form.setWindowTitle(_translate(\"Form\", \"Export\"))\n self.label.setText(_translate(\"Form\", \"Item to export:\"))\n self.label_2.setText(_translate(\"Form\", \"Export format\"))\n self.exportBtn.setText(_translate(\"Form\", \"Export\"))\n self.closeBtn.setText(_translate(\"Form\", \"Close\"))\n self.label_3.setText(_translate(\"Form\", \"Export options\"))\n self.copyBtn.setText(_translate(\"Form\", \"Copy\"))\nfrom ..parametertree import ParameterTree\n", "path": "pyqtgraph/GraphicsScene/exportDialogTemplate_generic.py"}]} | 3,088 | 322 |
gh_patches_debug_37290 | rasdani/github-patches | git_diff | sql-machine-learning__elasticdl-1563 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Master needs to relaunch a failed ps/worker pod for OOM workaround
TensorFlow 2.0 has memory leak issues in the eager mode. A ps/worker pod may fail due to OOM after running for a while.
To work around it, the master can delete the failed pod so it can be relaunched.
We can remove this workaround after all memory leak issues are resolved.
</issue>
<code>
[start of elasticdl/python/master/k8s_instance_manager.py]
1 import copy
2 import itertools
3 import threading
4 from collections import Counter
5
6 from elasticdl.python.common import k8s_client as k8s
7 from elasticdl.python.common.log_utils import default_logger as logger
8
9 _SERVICE_ADDR_SEP = ","
10
11
12 class InstanceManager(object):
13 def __init__(
14 self,
15 task_d,
16 num_workers=1,
17 worker_command=None,
18 worker_args=None,
19 worker_resource_request="cpu=1,memory=4096Mi",
20 worker_resource_limit="cpu=1,memory=4096Mi",
21 worker_pod_priority=None,
22 num_ps=0,
23 ps_command=None,
24 ps_args=None,
25 ps_resource_request="cpu=1,memory=4096Mi",
26 ps_resource_limit="cpu=1,memory=4096Mi",
27 ps_pod_priority=None,
28 volume=None,
29 image_pull_policy=None,
30 restart_policy="Never",
31 envs=None,
32 **kwargs
33 ):
34 self._num_workers = num_workers
35 self._worker_command = worker_command
36 self._worker_args = worker_args
37 self._worker_resource_request = worker_resource_request
38 self._worker_resource_limit = worker_resource_limit
39 self._worker_pod_priority = worker_pod_priority
40
41 self._num_ps = num_ps
42 self._ps_command = ps_command
43 self._ps_args = ps_args
44 self._ps_resource_request = ps_resource_request
45 self._ps_resource_limit = ps_resource_limit
46 self._ps_pod_priority = ps_pod_priority
47
48 self._restart_policy = restart_policy
49 self._volume = volume
50 self._image_pull_policy = image_pull_policy
51 self._envs = envs
52 self._task_d = task_d
53 self._next_worker_id = itertools.count().__next__
54
55 # Protects followed variables, which are accessed from event_cb.
56 self._lock = threading.Lock()
57 # worker id to (pod name, phase) mapping
58 # phase: None/Pending/Running/Succeeded/Failed/Unknown
59 # None: worker was just launched, haven't received event yet.
60 # Pending: worker pod not started yet
61 # Running: worker pod is running
62 # Succeeded: worker pod finishes all tasks and terminates with
63 # no issue.
64 # Failed: worker pod is killed for some reason
65 # Unknown: unknown
66 self._worker_pods_phase = {}
67 # pod name to worker id mapping
68 self._worker_pod_name_to_id = {}
69
70 self._relaunch_deleted_live_worker = True
71
72 self._ps_pods_phase = {}
73 self._ps_pod_name_to_id = {}
74 self._relaunch_deleted_live_ps = True
75
76 self._k8s_client = k8s.Client(event_callback=self._event_cb, **kwargs)
77 self._ps_addrs = self._get_addrs(
78 self._num_ps, self._k8s_client.get_ps_service_address
79 )
80 # TODO: Select a worker address to be used for broadcasting model
81 # parameters under allreduce-strategy.
82 self._worker_addrs = self._get_addrs(
83 self._num_workers, self._k8s_client.get_worker_service_address
84 )
85
86 def _start_worker(self, worker_id):
87 logger.info("Starting worker: %d" % worker_id)
88 with self._lock:
89 pod = self._k8s_client.create_worker(
90 worker_id=worker_id,
91 resource_requests=self._worker_resource_request,
92 resource_limits=self._worker_resource_limit,
93 pod_priority=self._worker_pod_priority,
94 volume=self._volume,
95 image_pull_policy=self._image_pull_policy,
96 command=self._worker_command,
97 args=self._worker_args
98 + ["--worker_id", str(worker_id)]
99 + ["--ps_addrs", self._ps_addrs],
100 restart_policy=self._restart_policy,
101 ps_addrs=self._ps_addrs,
102 envs=copy.deepcopy(self._envs),
103 )
104 name = pod.metadata.name
105 self._worker_pod_name_to_id[name] = worker_id
106 self._worker_pods_phase[worker_id] = (name, None)
107 self._k8s_client.create_worker_service(worker_id)
108
109 def _start_ps(self, ps_id):
110 logger.info("Starting PS: %d" % ps_id)
111 with self._lock:
112 pod = self._k8s_client.create_ps(
113 ps_id=ps_id,
114 resource_requests=self._ps_resource_request,
115 resource_limits=self._ps_resource_limit,
116 pod_priority=self._ps_pod_priority,
117 volume=self._volume,
118 image_pull_policy=self._image_pull_policy,
119 command=self._ps_command,
120 args=self._ps_args + ["--ps_id", str(ps_id)],
121 restart_policy=self._restart_policy,
122 envs=copy.deepcopy(self._envs),
123 )
124 name = pod.metadata.name
125 self._ps_pod_name_to_id[name] = ps_id
126 self._ps_pods_phase[ps_id] = (name, None)
127 self._k8s_client.create_ps_service(ps_id)
128
129 def _get_addrs(self, num_addrs, addr_get_fn):
130 addrs = []
131 for addr_id in range(num_addrs):
132 addrs.append(addr_get_fn(addr_id))
133 return _SERVICE_ADDR_SEP.join(addrs)
134
135 @staticmethod
136 def _update_addr(old_addr, new_addr, addrs, addr_get_fn):
137 addrs_list = addrs.split(_SERVICE_ADDR_SEP)
138 addrs_list[addrs_list.index(addr_get_fn(old_addr))] = addr_get_fn(
139 new_addr
140 )
141 return _SERVICE_ADDR_SEP.join(addrs_list)
142
143 def update_status(self, status):
144 master_name = self._k8s_client.get_master_pod_name()
145 self._k8s_client.patch_labels_to_pod(
146 master_name, labels_dict={"status": status}
147 )
148
149 def start_workers(self):
150 for _ in range(self._num_workers):
151 self._start_worker(self._next_worker_id())
152
153 def start_parameter_servers(self):
154 for i in range(self._num_ps):
155 self._start_ps(i)
156
157 def _remove_worker(self, worker_id):
158 logger.info("Removing worker: %d", worker_id)
159 with self._lock:
160 if worker_id not in self._worker_pods_phase:
161 logger.error("Unknown worker id: %s" % worker_id)
162 return
163
164 # TODO: change _k8s_client to accept pod name instead of worker id.
165 self._k8s_client.delete_worker(worker_id)
166
167 def _remove_ps(self, ps_id):
168 logger.info("Removing PS: %d", ps_id)
169 with self._lock:
170 if ps_id not in self._ps_pods_phase:
171 logger.error("Unknown PS id: %s" % ps_id)
172 return
173
174 self._k8s_client.delete_ps(ps_id)
175
176 def stop_relaunch_and_remove_workers(self):
177 with self._lock:
178 self._relaunch_deleted_live_worker = False
179 for worker_id in self._worker_pods_phase:
180 self._k8s_client.delete_worker(worker_id)
181
182 def stop_relaunch_and_remove_all_ps(self):
183 with self._lock:
184 self._relaunch_deleted_live_ps = False
185 for ps_id in self._ps_pods_phase:
186 self._k8s_client.delete_ps(ps_id)
187
188 def get_worker_counter(self):
189 with self._lock:
190 return Counter([v for _, v in self._worker_pods_phase.values()])
191
192 def get_ps_counter(self):
193 with self._lock:
194 return Counter([v for _, v in self._ps_pods_phase.values()])
195
196 def _event_cb(self, event):
197 evt_obj = event.get("object")
198 evt_type = event.get("type")
199 if not evt_obj or not evt_type:
200 logger.error("Event doesn't have object or type: %s" % event)
201 return
202
203 if evt_obj.kind != "Pod":
204 # We only care about pod related events
205 return
206
207 pod_name = evt_obj.metadata.name
208 phase = evt_obj.status.phase
209 logger.info(
210 "Got event %s, phase %s for pod: %s" % (evt_type, phase, pod_name)
211 )
212 if pod_name == self._k8s_client.get_master_pod_name():
213 # No need to care about master pod
214 return
215
216 relaunch_worker = False
217 relaunch_ps = False
218 worker_id = -1
219 ps_id = -1
220 with self._lock:
221 if pod_name in self._worker_pod_name_to_id:
222 worker_id = self._worker_pod_name_to_id.get(pod_name)
223 self._worker_pods_phase[worker_id] = (pod_name, phase)
224 if evt_type == "DELETED":
225 del self._worker_pods_phase[worker_id]
226 del self._worker_pod_name_to_id[pod_name]
227 self._task_d.recover_tasks(worker_id)
228
229 # If a deleted pod was not "Succeeded", relaunch a worker.
230 relaunch_worker = (
231 self._relaunch_deleted_live_worker
232 and phase != "Succeeded"
233 )
234
235 elif pod_name in self._ps_pod_name_to_id:
236 ps_id = self._ps_pod_name_to_id.get(pod_name)
237 self._ps_pods_phase[ps_id] = (pod_name, phase)
238 if evt_type == "DELETED":
239 del self._ps_pods_phase[ps_id]
240 del self._ps_pod_name_to_id[pod_name]
241 relaunch_ps = self._relaunch_deleted_live_ps
242 else:
243 logger.error("Unknown pod name: %s" % pod_name)
244 return
245
246 if relaunch_worker and worker_id >= 0:
247 logger.info("Relaunching worker.")
248 new_worker_id = self._next_worker_id()
249 self._start_worker(new_worker_id)
250 self._update_addr(
251 worker_id,
252 new_worker_id,
253 self._worker_addrs,
254 addr_get_fn=self._k8s_client.get_worker_service_address,
255 )
256 elif relaunch_ps:
257 logger.info("Relaunching ps.")
258 # Note: the ID and service address for relaunched parameter
259 # server are intentionally left unchanged to support fault
260 # tolerance.
261 self._start_ps(ps_id)
262
263 @property
264 def ps_addrs(self):
265 return self._ps_addrs
266
[end of elasticdl/python/master/k8s_instance_manager.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/elasticdl/python/master/k8s_instance_manager.py b/elasticdl/python/master/k8s_instance_manager.py
--- a/elasticdl/python/master/k8s_instance_manager.py
+++ b/elasticdl/python/master/k8s_instance_manager.py
@@ -73,6 +73,8 @@
self._ps_pod_name_to_id = {}
self._relaunch_deleted_live_ps = True
+ self._failed_pods = []
+
self._k8s_client = k8s.Client(event_callback=self._event_cb, **kwargs)
self._ps_addrs = self._get_addrs(
self._num_ps, self._k8s_client.get_ps_service_address
@@ -218,10 +220,18 @@
worker_id = -1
ps_id = -1
with self._lock:
+ if pod_name in self._failed_pods:
+ return
if pod_name in self._worker_pod_name_to_id:
worker_id = self._worker_pod_name_to_id.get(pod_name)
self._worker_pods_phase[worker_id] = (pod_name, phase)
- if evt_type == "DELETED":
+ # Workaround for memory leak issues in tf eager mode.
+ # A pod may fail due to OOM from tf eager mode memory leak.
+ failed_pod = False
+ if evt_type == "MODIFIED" and phase == "Failed":
+ self._failed_pods.append(pod_name)
+ failed_pod = True
+ if evt_type == "DELETED" or failed_pod:
del self._worker_pods_phase[worker_id]
del self._worker_pod_name_to_id[pod_name]
self._task_d.recover_tasks(worker_id)
@@ -235,7 +245,13 @@
elif pod_name in self._ps_pod_name_to_id:
ps_id = self._ps_pod_name_to_id.get(pod_name)
self._ps_pods_phase[ps_id] = (pod_name, phase)
- if evt_type == "DELETED":
+ # Workaround for memory leak issues in tf eager mode.
+ # A pod may fail due to OOM from tf eager mode memory leak.
+ failed_pod = False
+ if evt_type == "MODIFIED" and phase == "Failed":
+ self._failed_pods.append(pod_name)
+ failed_pod = True
+ if evt_type == "DELETED" or failed_pod:
del self._ps_pods_phase[ps_id]
del self._ps_pod_name_to_id[pod_name]
relaunch_ps = self._relaunch_deleted_live_ps
| {"golden_diff": "diff --git a/elasticdl/python/master/k8s_instance_manager.py b/elasticdl/python/master/k8s_instance_manager.py\n--- a/elasticdl/python/master/k8s_instance_manager.py\n+++ b/elasticdl/python/master/k8s_instance_manager.py\n@@ -73,6 +73,8 @@\n self._ps_pod_name_to_id = {}\n self._relaunch_deleted_live_ps = True\n \n+ self._failed_pods = []\n+\n self._k8s_client = k8s.Client(event_callback=self._event_cb, **kwargs)\n self._ps_addrs = self._get_addrs(\n self._num_ps, self._k8s_client.get_ps_service_address\n@@ -218,10 +220,18 @@\n worker_id = -1\n ps_id = -1\n with self._lock:\n+ if pod_name in self._failed_pods:\n+ return\n if pod_name in self._worker_pod_name_to_id:\n worker_id = self._worker_pod_name_to_id.get(pod_name)\n self._worker_pods_phase[worker_id] = (pod_name, phase)\n- if evt_type == \"DELETED\":\n+ # Workaround for memory leak issues in tf eager mode.\n+ # A pod may fail due to OOM from tf eager mode memory leak.\n+ failed_pod = False\n+ if evt_type == \"MODIFIED\" and phase == \"Failed\":\n+ self._failed_pods.append(pod_name)\n+ failed_pod = True\n+ if evt_type == \"DELETED\" or failed_pod:\n del self._worker_pods_phase[worker_id]\n del self._worker_pod_name_to_id[pod_name]\n self._task_d.recover_tasks(worker_id)\n@@ -235,7 +245,13 @@\n elif pod_name in self._ps_pod_name_to_id:\n ps_id = self._ps_pod_name_to_id.get(pod_name)\n self._ps_pods_phase[ps_id] = (pod_name, phase)\n- if evt_type == \"DELETED\":\n+ # Workaround for memory leak issues in tf eager mode.\n+ # A pod may fail due to OOM from tf eager mode memory leak.\n+ failed_pod = False\n+ if evt_type == \"MODIFIED\" and phase == \"Failed\":\n+ self._failed_pods.append(pod_name)\n+ failed_pod = True\n+ if evt_type == \"DELETED\" or failed_pod:\n del self._ps_pods_phase[ps_id]\n del self._ps_pod_name_to_id[pod_name]\n relaunch_ps = self._relaunch_deleted_live_ps\n", "issue": "Master needs to relaunch a failed ps/worker pod for OOM workaround\nTensorFlow 2.0 has memory leak issues in the eager mode. A ps/worker pod may fail due to OOM after running for a while.\r\n\r\nTo work around it, the master can delete the failed pod so it can be relaunched.\r\n\r\nWe can remove this workaround after all memory leak issues are resolved.\r\n\n", "before_files": [{"content": "import copy\nimport itertools\nimport threading\nfrom collections import Counter\n\nfrom elasticdl.python.common import k8s_client as k8s\nfrom elasticdl.python.common.log_utils import default_logger as logger\n\n_SERVICE_ADDR_SEP = \",\"\n\n\nclass InstanceManager(object):\n def __init__(\n self,\n task_d,\n num_workers=1,\n worker_command=None,\n worker_args=None,\n worker_resource_request=\"cpu=1,memory=4096Mi\",\n worker_resource_limit=\"cpu=1,memory=4096Mi\",\n worker_pod_priority=None,\n num_ps=0,\n ps_command=None,\n ps_args=None,\n ps_resource_request=\"cpu=1,memory=4096Mi\",\n ps_resource_limit=\"cpu=1,memory=4096Mi\",\n ps_pod_priority=None,\n volume=None,\n image_pull_policy=None,\n restart_policy=\"Never\",\n envs=None,\n **kwargs\n ):\n self._num_workers = num_workers\n self._worker_command = worker_command\n self._worker_args = worker_args\n self._worker_resource_request = worker_resource_request\n self._worker_resource_limit = worker_resource_limit\n self._worker_pod_priority = worker_pod_priority\n\n self._num_ps = num_ps\n self._ps_command = ps_command\n self._ps_args = ps_args\n self._ps_resource_request = ps_resource_request\n self._ps_resource_limit = ps_resource_limit\n self._ps_pod_priority = ps_pod_priority\n\n self._restart_policy = restart_policy\n self._volume = volume\n self._image_pull_policy = image_pull_policy\n self._envs = envs\n self._task_d = task_d\n self._next_worker_id = itertools.count().__next__\n\n # Protects followed variables, which are accessed from event_cb.\n self._lock = threading.Lock()\n # worker id to (pod name, phase) mapping\n # phase: None/Pending/Running/Succeeded/Failed/Unknown\n # None: worker was just launched, haven't received event yet.\n # Pending: worker pod not started yet\n # Running: worker pod is running\n # Succeeded: worker pod finishes all tasks and terminates with\n # no issue.\n # Failed: worker pod is killed for some reason\n # Unknown: unknown\n self._worker_pods_phase = {}\n # pod name to worker id mapping\n self._worker_pod_name_to_id = {}\n\n self._relaunch_deleted_live_worker = True\n\n self._ps_pods_phase = {}\n self._ps_pod_name_to_id = {}\n self._relaunch_deleted_live_ps = True\n\n self._k8s_client = k8s.Client(event_callback=self._event_cb, **kwargs)\n self._ps_addrs = self._get_addrs(\n self._num_ps, self._k8s_client.get_ps_service_address\n )\n # TODO: Select a worker address to be used for broadcasting model\n # parameters under allreduce-strategy.\n self._worker_addrs = self._get_addrs(\n self._num_workers, self._k8s_client.get_worker_service_address\n )\n\n def _start_worker(self, worker_id):\n logger.info(\"Starting worker: %d\" % worker_id)\n with self._lock:\n pod = self._k8s_client.create_worker(\n worker_id=worker_id,\n resource_requests=self._worker_resource_request,\n resource_limits=self._worker_resource_limit,\n pod_priority=self._worker_pod_priority,\n volume=self._volume,\n image_pull_policy=self._image_pull_policy,\n command=self._worker_command,\n args=self._worker_args\n + [\"--worker_id\", str(worker_id)]\n + [\"--ps_addrs\", self._ps_addrs],\n restart_policy=self._restart_policy,\n ps_addrs=self._ps_addrs,\n envs=copy.deepcopy(self._envs),\n )\n name = pod.metadata.name\n self._worker_pod_name_to_id[name] = worker_id\n self._worker_pods_phase[worker_id] = (name, None)\n self._k8s_client.create_worker_service(worker_id)\n\n def _start_ps(self, ps_id):\n logger.info(\"Starting PS: %d\" % ps_id)\n with self._lock:\n pod = self._k8s_client.create_ps(\n ps_id=ps_id,\n resource_requests=self._ps_resource_request,\n resource_limits=self._ps_resource_limit,\n pod_priority=self._ps_pod_priority,\n volume=self._volume,\n image_pull_policy=self._image_pull_policy,\n command=self._ps_command,\n args=self._ps_args + [\"--ps_id\", str(ps_id)],\n restart_policy=self._restart_policy,\n envs=copy.deepcopy(self._envs),\n )\n name = pod.metadata.name\n self._ps_pod_name_to_id[name] = ps_id\n self._ps_pods_phase[ps_id] = (name, None)\n self._k8s_client.create_ps_service(ps_id)\n\n def _get_addrs(self, num_addrs, addr_get_fn):\n addrs = []\n for addr_id in range(num_addrs):\n addrs.append(addr_get_fn(addr_id))\n return _SERVICE_ADDR_SEP.join(addrs)\n\n @staticmethod\n def _update_addr(old_addr, new_addr, addrs, addr_get_fn):\n addrs_list = addrs.split(_SERVICE_ADDR_SEP)\n addrs_list[addrs_list.index(addr_get_fn(old_addr))] = addr_get_fn(\n new_addr\n )\n return _SERVICE_ADDR_SEP.join(addrs_list)\n\n def update_status(self, status):\n master_name = self._k8s_client.get_master_pod_name()\n self._k8s_client.patch_labels_to_pod(\n master_name, labels_dict={\"status\": status}\n )\n\n def start_workers(self):\n for _ in range(self._num_workers):\n self._start_worker(self._next_worker_id())\n\n def start_parameter_servers(self):\n for i in range(self._num_ps):\n self._start_ps(i)\n\n def _remove_worker(self, worker_id):\n logger.info(\"Removing worker: %d\", worker_id)\n with self._lock:\n if worker_id not in self._worker_pods_phase:\n logger.error(\"Unknown worker id: %s\" % worker_id)\n return\n\n # TODO: change _k8s_client to accept pod name instead of worker id.\n self._k8s_client.delete_worker(worker_id)\n\n def _remove_ps(self, ps_id):\n logger.info(\"Removing PS: %d\", ps_id)\n with self._lock:\n if ps_id not in self._ps_pods_phase:\n logger.error(\"Unknown PS id: %s\" % ps_id)\n return\n\n self._k8s_client.delete_ps(ps_id)\n\n def stop_relaunch_and_remove_workers(self):\n with self._lock:\n self._relaunch_deleted_live_worker = False\n for worker_id in self._worker_pods_phase:\n self._k8s_client.delete_worker(worker_id)\n\n def stop_relaunch_and_remove_all_ps(self):\n with self._lock:\n self._relaunch_deleted_live_ps = False\n for ps_id in self._ps_pods_phase:\n self._k8s_client.delete_ps(ps_id)\n\n def get_worker_counter(self):\n with self._lock:\n return Counter([v for _, v in self._worker_pods_phase.values()])\n\n def get_ps_counter(self):\n with self._lock:\n return Counter([v for _, v in self._ps_pods_phase.values()])\n\n def _event_cb(self, event):\n evt_obj = event.get(\"object\")\n evt_type = event.get(\"type\")\n if not evt_obj or not evt_type:\n logger.error(\"Event doesn't have object or type: %s\" % event)\n return\n\n if evt_obj.kind != \"Pod\":\n # We only care about pod related events\n return\n\n pod_name = evt_obj.metadata.name\n phase = evt_obj.status.phase\n logger.info(\n \"Got event %s, phase %s for pod: %s\" % (evt_type, phase, pod_name)\n )\n if pod_name == self._k8s_client.get_master_pod_name():\n # No need to care about master pod\n return\n\n relaunch_worker = False\n relaunch_ps = False\n worker_id = -1\n ps_id = -1\n with self._lock:\n if pod_name in self._worker_pod_name_to_id:\n worker_id = self._worker_pod_name_to_id.get(pod_name)\n self._worker_pods_phase[worker_id] = (pod_name, phase)\n if evt_type == \"DELETED\":\n del self._worker_pods_phase[worker_id]\n del self._worker_pod_name_to_id[pod_name]\n self._task_d.recover_tasks(worker_id)\n\n # If a deleted pod was not \"Succeeded\", relaunch a worker.\n relaunch_worker = (\n self._relaunch_deleted_live_worker\n and phase != \"Succeeded\"\n )\n\n elif pod_name in self._ps_pod_name_to_id:\n ps_id = self._ps_pod_name_to_id.get(pod_name)\n self._ps_pods_phase[ps_id] = (pod_name, phase)\n if evt_type == \"DELETED\":\n del self._ps_pods_phase[ps_id]\n del self._ps_pod_name_to_id[pod_name]\n relaunch_ps = self._relaunch_deleted_live_ps\n else:\n logger.error(\"Unknown pod name: %s\" % pod_name)\n return\n\n if relaunch_worker and worker_id >= 0:\n logger.info(\"Relaunching worker.\")\n new_worker_id = self._next_worker_id()\n self._start_worker(new_worker_id)\n self._update_addr(\n worker_id,\n new_worker_id,\n self._worker_addrs,\n addr_get_fn=self._k8s_client.get_worker_service_address,\n )\n elif relaunch_ps:\n logger.info(\"Relaunching ps.\")\n # Note: the ID and service address for relaunched parameter\n # server are intentionally left unchanged to support fault\n # tolerance.\n self._start_ps(ps_id)\n\n @property\n def ps_addrs(self):\n return self._ps_addrs\n", "path": "elasticdl/python/master/k8s_instance_manager.py"}]} | 3,581 | 582 |
gh_patches_debug_42547 | rasdani/github-patches | git_diff | searxng__searxng-2917 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
YaCy Public Instance
<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF SearXNG -->
**Working URL to the engine**
<!-- Please check if the engine is responding correctly before submitting it. -->
https://yacy.searchlab.eu/
**Why do you want to add this engine?**
<!-- What's special about this engine? Is it open source or libre? -->
Currently it seems support for YaCy is limited to local nodes, using a public instance of YaCy maintained by the team that develops it is a solution for end users who would like to query yacy through their searxng instance of choice without it being set up locally.
**Features of this engine**
<!-- Features of this engine: Doesn't track its users, fast, easy to integrate, ... -->
YaCy uses a novel peer-to-peer technology which collectively powers a decentralized webcrawler. It does not rely on SEO or a single entity. It is a federated service that prioritzes index by user query and manual feedback.
**How can SearXNG fetch the information from this engine?**
<!-- List API URL, example code (using the correct markdown) and more
that could be useful for the developers in order to implement this engine.
If you don't know what to write, let this part blank. -->
Hover mouse over 'search interfaces' in top right, at the bottom will be links to numerous types of api calls
**Applicable category of this engine**
<!-- Where should this new engine fit in SearXNG? Current categories in SearXNG:
general, files, images, it, map, music, news, science, social media and videos.
You can add multiple categories at the same time. -->
general, images, potentially more. Click 'more' next to text and image options at homepage.
**Additional context**
<!-- Add any other context about this engine here. -->
</issue>
<code>
[start of searx/engines/yacy.py]
1 # SPDX-License-Identifier: AGPL-3.0-or-later
2 # lint: pylint
3 """YaCy_ is a free distributed search engine, built on the principles of
4 peer-to-peer (P2P) networks.
5
6 API: Dev:APIyacysearch_
7
8 Releases:
9
10 - https://github.com/yacy/yacy_search_server/tags
11 - https://download.yacy.net/
12
13 .. _Yacy: https://yacy.net/
14 .. _Dev:APIyacysearch: https://wiki.yacy.net/index.php/Dev:APIyacysearch
15
16 Configuration
17 =============
18
19 The engine has the following (additional) settings:
20
21 .. code:: yaml
22
23 - name: yacy
24 engine: yacy
25 shortcut: ya
26 base_url: http://localhost:8090
27 # Yacy search mode. 'global' or 'local'.
28 search_mode: 'global'
29 number_of_results: 5
30 http_digest_auth_user: ""
31 http_digest_auth_pass: ""
32
33
34 Implementations
35 ===============
36 """
37 # pylint: disable=fixme
38
39 from json import loads
40 from urllib.parse import urlencode
41 from dateutil import parser
42
43 from httpx import DigestAuth
44
45 from searx.utils import html_to_text
46
47 # about
48 about = {
49 "website": 'https://yacy.net/',
50 "wikidata_id": 'Q1759675',
51 "official_api_documentation": 'https://wiki.yacy.net/index.php/Dev:API',
52 "use_official_api": True,
53 "require_api_key": False,
54 "results": 'JSON',
55 }
56
57 # engine dependent config
58 categories = ['general', 'images'] # TODO , 'music', 'videos', 'files'
59 paging = True
60 number_of_results = 5
61 http_digest_auth_user = ""
62 http_digest_auth_pass = ""
63 search_mode = 'global'
64 """Yacy search mode ``global`` or ``local``. By default, Yacy operates in ``global``
65 mode.
66
67 ``global``
68 Peer-to-Peer search
69
70 ``local``
71 Privacy or Stealth mode, restricts the search to local yacy instance.
72 """
73 # search-url
74 base_url = 'http://localhost:8090'
75 search_url = (
76 '/yacysearch.json?{query}'
77 '&startRecord={offset}'
78 '&maximumRecords={limit}'
79 '&contentdom={search_type}'
80 '&resource={resource}'
81 )
82
83 # yacy specific type-definitions
84 search_types = {'general': 'text', 'images': 'image', 'files': 'app', 'music': 'audio', 'videos': 'video'}
85
86
87 def request(query, params):
88 offset = (params['pageno'] - 1) * number_of_results
89 search_type = search_types.get(params.get('category'), '0')
90
91 params['url'] = base_url + search_url.format(
92 query=urlencode({'query': query}),
93 offset=offset,
94 limit=number_of_results,
95 search_type=search_type,
96 resource=search_mode,
97 )
98
99 if http_digest_auth_user and http_digest_auth_pass:
100 params['auth'] = DigestAuth(http_digest_auth_user, http_digest_auth_pass)
101
102 # add language tag if specified
103 if params['language'] != 'all':
104 params['url'] += '&lr=lang_' + params['language'].split('-')[0]
105
106 return params
107
108
109 def response(resp):
110 results = []
111
112 raw_search_results = loads(resp.text)
113
114 # return empty array if there are no results
115 if not raw_search_results:
116 return []
117
118 search_results = raw_search_results.get('channels', [])
119
120 if len(search_results) == 0:
121 return []
122
123 for result in search_results[0].get('items', []):
124 # parse image results
125 if resp.search_params.get('category') == 'images':
126 result_url = ''
127 if 'url' in result:
128 result_url = result['url']
129 elif 'link' in result:
130 result_url = result['link']
131 else:
132 continue
133
134 # append result
135 results.append(
136 {
137 'url': result_url,
138 'title': result['title'],
139 'content': '',
140 'img_src': result['image'],
141 'template': 'images.html',
142 }
143 )
144
145 # parse general results
146 else:
147 publishedDate = parser.parse(result['pubDate'])
148
149 # append result
150 results.append(
151 {
152 'url': result['link'],
153 'title': result['title'],
154 'content': html_to_text(result['description']),
155 'publishedDate': publishedDate,
156 }
157 )
158
159 # TODO parse video, audio and file results
160
161 return results
162
[end of searx/engines/yacy.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/searx/engines/yacy.py b/searx/engines/yacy.py
--- a/searx/engines/yacy.py
+++ b/searx/engines/yacy.py
@@ -18,17 +18,27 @@
The engine has the following (additional) settings:
+- :py:obj:`http_digest_auth_user`
+- :py:obj:`http_digest_auth_pass`
+- :py:obj:`search_mode`
+- :py:obj:`search_type`
+
.. code:: yaml
- - name: yacy
- engine: yacy
- shortcut: ya
- base_url: http://localhost:8090
- # Yacy search mode. 'global' or 'local'.
- search_mode: 'global'
- number_of_results: 5
- http_digest_auth_user: ""
- http_digest_auth_pass: ""
+ - name: yacy
+ engine: yacy
+ categories: general
+ search_type: text
+ base_url: https://yacy.searchlab.eu
+ shortcut: ya
+
+ - name: yacy images
+ engine: yacy
+ categories: images
+ search_type: image
+ base_url: https://yacy.searchlab.eu
+ shortcut: yai
+ disabled: true
Implementations
@@ -55,11 +65,14 @@
}
# engine dependent config
-categories = ['general', 'images'] # TODO , 'music', 'videos', 'files'
+categories = ['general']
paging = True
-number_of_results = 5
+number_of_results = 10
http_digest_auth_user = ""
+"""HTTP digest user for the local YACY instance"""
http_digest_auth_pass = ""
+"""HTTP digest password for the local YACY instance"""
+
search_mode = 'global'
"""Yacy search mode ``global`` or ``local``. By default, Yacy operates in ``global``
mode.
@@ -70,8 +83,13 @@
``local``
Privacy or Stealth mode, restricts the search to local yacy instance.
"""
+search_type = 'text'
+"""One of ``text``, ``image`` / The search-types ``app``, ``audio`` and
+``video`` are not yet implemented (Pull-Requests are welcome).
+"""
+
# search-url
-base_url = 'http://localhost:8090'
+base_url = 'https://yacy.searchlab.eu'
search_url = (
'/yacysearch.json?{query}'
'&startRecord={offset}'
@@ -80,13 +98,19 @@
'&resource={resource}'
)
-# yacy specific type-definitions
-search_types = {'general': 'text', 'images': 'image', 'files': 'app', 'music': 'audio', 'videos': 'video'}
+
+def init(_):
+ valid_types = [
+ 'text',
+ 'image',
+ # 'app', 'audio', 'video',
+ ]
+ if search_type not in valid_types:
+ raise ValueError('search_type "%s" is not one of %s' % (search_type, valid_types))
def request(query, params):
offset = (params['pageno'] - 1) * number_of_results
- search_type = search_types.get(params.get('category'), '0')
params['url'] = base_url + search_url.format(
query=urlencode({'query': query}),
@@ -122,7 +146,7 @@
for result in search_results[0].get('items', []):
# parse image results
- if resp.search_params.get('category') == 'images':
+ if search_type == 'image':
result_url = ''
if 'url' in result:
result_url = result['url']
@@ -144,12 +168,14 @@
# parse general results
else:
- publishedDate = parser.parse(result['pubDate'])
+ publishedDate = None
+ if 'pubDate' in result:
+ publishedDate = parser.parse(result['pubDate'])
# append result
results.append(
{
- 'url': result['link'],
+ 'url': result['link'] or '',
'title': result['title'],
'content': html_to_text(result['description']),
'publishedDate': publishedDate,
| {"golden_diff": "diff --git a/searx/engines/yacy.py b/searx/engines/yacy.py\n--- a/searx/engines/yacy.py\n+++ b/searx/engines/yacy.py\n@@ -18,17 +18,27 @@\n \n The engine has the following (additional) settings:\n \n+- :py:obj:`http_digest_auth_user`\n+- :py:obj:`http_digest_auth_pass`\n+- :py:obj:`search_mode`\n+- :py:obj:`search_type`\n+\n .. code:: yaml\n \n- - name: yacy\n- engine: yacy\n- shortcut: ya\n- base_url: http://localhost:8090\n- # Yacy search mode. 'global' or 'local'.\n- search_mode: 'global'\n- number_of_results: 5\n- http_digest_auth_user: \"\"\n- http_digest_auth_pass: \"\"\n+ - name: yacy\n+ engine: yacy\n+ categories: general\n+ search_type: text\n+ base_url: https://yacy.searchlab.eu\n+ shortcut: ya\n+\n+ - name: yacy images\n+ engine: yacy\n+ categories: images\n+ search_type: image\n+ base_url: https://yacy.searchlab.eu\n+ shortcut: yai\n+ disabled: true\n \n \n Implementations\n@@ -55,11 +65,14 @@\n }\n \n # engine dependent config\n-categories = ['general', 'images'] # TODO , 'music', 'videos', 'files'\n+categories = ['general']\n paging = True\n-number_of_results = 5\n+number_of_results = 10\n http_digest_auth_user = \"\"\n+\"\"\"HTTP digest user for the local YACY instance\"\"\"\n http_digest_auth_pass = \"\"\n+\"\"\"HTTP digest password for the local YACY instance\"\"\"\n+\n search_mode = 'global'\n \"\"\"Yacy search mode ``global`` or ``local``. By default, Yacy operates in ``global``\n mode.\n@@ -70,8 +83,13 @@\n ``local``\n Privacy or Stealth mode, restricts the search to local yacy instance.\n \"\"\"\n+search_type = 'text'\n+\"\"\"One of ``text``, ``image`` / The search-types ``app``, ``audio`` and\n+``video`` are not yet implemented (Pull-Requests are welcome).\n+\"\"\"\n+\n # search-url\n-base_url = 'http://localhost:8090'\n+base_url = 'https://yacy.searchlab.eu'\n search_url = (\n '/yacysearch.json?{query}'\n '&startRecord={offset}'\n@@ -80,13 +98,19 @@\n '&resource={resource}'\n )\n \n-# yacy specific type-definitions\n-search_types = {'general': 'text', 'images': 'image', 'files': 'app', 'music': 'audio', 'videos': 'video'}\n+\n+def init(_):\n+ valid_types = [\n+ 'text',\n+ 'image',\n+ # 'app', 'audio', 'video',\n+ ]\n+ if search_type not in valid_types:\n+ raise ValueError('search_type \"%s\" is not one of %s' % (search_type, valid_types))\n \n \n def request(query, params):\n offset = (params['pageno'] - 1) * number_of_results\n- search_type = search_types.get(params.get('category'), '0')\n \n params['url'] = base_url + search_url.format(\n query=urlencode({'query': query}),\n@@ -122,7 +146,7 @@\n \n for result in search_results[0].get('items', []):\n # parse image results\n- if resp.search_params.get('category') == 'images':\n+ if search_type == 'image':\n result_url = ''\n if 'url' in result:\n result_url = result['url']\n@@ -144,12 +168,14 @@\n \n # parse general results\n else:\n- publishedDate = parser.parse(result['pubDate'])\n+ publishedDate = None\n+ if 'pubDate' in result:\n+ publishedDate = parser.parse(result['pubDate'])\n \n # append result\n results.append(\n {\n- 'url': result['link'],\n+ 'url': result['link'] or '',\n 'title': result['title'],\n 'content': html_to_text(result['description']),\n 'publishedDate': publishedDate,\n", "issue": "YaCy Public Instance \n<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF SearXNG -->\r\n\r\n**Working URL to the engine**\r\n<!-- Please check if the engine is responding correctly before submitting it. -->\r\nhttps://yacy.searchlab.eu/\r\n**Why do you want to add this engine?**\r\n<!-- What's special about this engine? Is it open source or libre? -->\r\nCurrently it seems support for YaCy is limited to local nodes, using a public instance of YaCy maintained by the team that develops it is a solution for end users who would like to query yacy through their searxng instance of choice without it being set up locally.\r\n**Features of this engine**\r\n<!-- Features of this engine: Doesn't track its users, fast, easy to integrate, ... -->\r\nYaCy uses a novel peer-to-peer technology which collectively powers a decentralized webcrawler. It does not rely on SEO or a single entity. It is a federated service that prioritzes index by user query and manual feedback.\r\n**How can SearXNG fetch the information from this engine?**\r\n<!-- List API URL, example code (using the correct markdown) and more\r\nthat could be useful for the developers in order to implement this engine.\r\nIf you don't know what to write, let this part blank. -->\r\nHover mouse over 'search interfaces' in top right, at the bottom will be links to numerous types of api calls\r\n**Applicable category of this engine**\r\n<!-- Where should this new engine fit in SearXNG? Current categories in SearXNG:\r\ngeneral, files, images, it, map, music, news, science, social media and videos.\r\nYou can add multiple categories at the same time. -->\r\ngeneral, images, potentially more. Click 'more' next to text and image options at homepage.\r\n**Additional context**\r\n<!-- Add any other context about this engine here. -->\r\n\n", "before_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n# lint: pylint\n\"\"\"YaCy_ is a free distributed search engine, built on the principles of\npeer-to-peer (P2P) networks.\n\nAPI: Dev:APIyacysearch_\n\nReleases:\n\n- https://github.com/yacy/yacy_search_server/tags\n- https://download.yacy.net/\n\n.. _Yacy: https://yacy.net/\n.. _Dev:APIyacysearch: https://wiki.yacy.net/index.php/Dev:APIyacysearch\n\nConfiguration\n=============\n\nThe engine has the following (additional) settings:\n\n.. code:: yaml\n\n - name: yacy\n engine: yacy\n shortcut: ya\n base_url: http://localhost:8090\n # Yacy search mode. 'global' or 'local'.\n search_mode: 'global'\n number_of_results: 5\n http_digest_auth_user: \"\"\n http_digest_auth_pass: \"\"\n\n\nImplementations\n===============\n\"\"\"\n# pylint: disable=fixme\n\nfrom json import loads\nfrom urllib.parse import urlencode\nfrom dateutil import parser\n\nfrom httpx import DigestAuth\n\nfrom searx.utils import html_to_text\n\n# about\nabout = {\n \"website\": 'https://yacy.net/',\n \"wikidata_id\": 'Q1759675',\n \"official_api_documentation\": 'https://wiki.yacy.net/index.php/Dev:API',\n \"use_official_api\": True,\n \"require_api_key\": False,\n \"results\": 'JSON',\n}\n\n# engine dependent config\ncategories = ['general', 'images'] # TODO , 'music', 'videos', 'files'\npaging = True\nnumber_of_results = 5\nhttp_digest_auth_user = \"\"\nhttp_digest_auth_pass = \"\"\nsearch_mode = 'global'\n\"\"\"Yacy search mode ``global`` or ``local``. By default, Yacy operates in ``global``\nmode.\n\n``global``\n Peer-to-Peer search\n\n``local``\n Privacy or Stealth mode, restricts the search to local yacy instance.\n\"\"\"\n# search-url\nbase_url = 'http://localhost:8090'\nsearch_url = (\n '/yacysearch.json?{query}'\n '&startRecord={offset}'\n '&maximumRecords={limit}'\n '&contentdom={search_type}'\n '&resource={resource}'\n)\n\n# yacy specific type-definitions\nsearch_types = {'general': 'text', 'images': 'image', 'files': 'app', 'music': 'audio', 'videos': 'video'}\n\n\ndef request(query, params):\n offset = (params['pageno'] - 1) * number_of_results\n search_type = search_types.get(params.get('category'), '0')\n\n params['url'] = base_url + search_url.format(\n query=urlencode({'query': query}),\n offset=offset,\n limit=number_of_results,\n search_type=search_type,\n resource=search_mode,\n )\n\n if http_digest_auth_user and http_digest_auth_pass:\n params['auth'] = DigestAuth(http_digest_auth_user, http_digest_auth_pass)\n\n # add language tag if specified\n if params['language'] != 'all':\n params['url'] += '&lr=lang_' + params['language'].split('-')[0]\n\n return params\n\n\ndef response(resp):\n results = []\n\n raw_search_results = loads(resp.text)\n\n # return empty array if there are no results\n if not raw_search_results:\n return []\n\n search_results = raw_search_results.get('channels', [])\n\n if len(search_results) == 0:\n return []\n\n for result in search_results[0].get('items', []):\n # parse image results\n if resp.search_params.get('category') == 'images':\n result_url = ''\n if 'url' in result:\n result_url = result['url']\n elif 'link' in result:\n result_url = result['link']\n else:\n continue\n\n # append result\n results.append(\n {\n 'url': result_url,\n 'title': result['title'],\n 'content': '',\n 'img_src': result['image'],\n 'template': 'images.html',\n }\n )\n\n # parse general results\n else:\n publishedDate = parser.parse(result['pubDate'])\n\n # append result\n results.append(\n {\n 'url': result['link'],\n 'title': result['title'],\n 'content': html_to_text(result['description']),\n 'publishedDate': publishedDate,\n }\n )\n\n # TODO parse video, audio and file results\n\n return results\n", "path": "searx/engines/yacy.py"}]} | 2,334 | 985 |
gh_patches_debug_41564 | rasdani/github-patches | git_diff | ethereum__web3.py-3238 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Deprecate `miner` namespace support and tests
### What was wrong?
Geth removed support for their `miner` API since it's no longer needed after The Merge ™️ . See: https://geth.ethereum.org/docs/interacting-with-geth/rpc/ns-miner
### How can it be fixed?
Add deprecation warnings to the methods within the miner namespace, and change the tests to check for the deprecation warnings. We can then either remove the methods as Geth stops supporting these, or wait until v7 to remove.
</issue>
<code>
[start of web3/method.py]
1 import functools
2 from typing import (
3 TYPE_CHECKING,
4 Any,
5 Callable,
6 Dict,
7 Generic,
8 List,
9 Optional,
10 Sequence,
11 Tuple,
12 Type,
13 TypeVar,
14 Union,
15 )
16 import warnings
17
18 from eth_utils.curried import (
19 to_tuple,
20 )
21 from eth_utils.toolz import (
22 pipe,
23 )
24
25 from web3._utils.method_formatters import (
26 get_error_formatters,
27 get_null_result_formatters,
28 get_request_formatters,
29 get_result_formatters,
30 )
31 from web3._utils.rpc_abi import (
32 RPC,
33 )
34 from web3.exceptions import (
35 Web3ValidationError,
36 )
37 from web3.types import (
38 RPCEndpoint,
39 TReturn,
40 )
41
42 if TYPE_CHECKING:
43 from web3 import Web3 # noqa: F401
44 from web3.module import Module # noqa: F401
45
46
47 Munger = Callable[..., Any]
48
49
50 @to_tuple
51 def _apply_request_formatters(
52 params: Any, request_formatters: Dict[RPCEndpoint, Callable[..., TReturn]]
53 ) -> Tuple[Any, ...]:
54 if request_formatters:
55 formatted_params = pipe(params, request_formatters)
56 return formatted_params
57 return params
58
59
60 def _set_mungers(
61 mungers: Optional[Sequence[Munger]], is_property: bool
62 ) -> Sequence[Any]:
63 if is_property and mungers:
64 raise Web3ValidationError("Mungers cannot be used with a property.")
65
66 return (
67 mungers
68 if mungers
69 else [default_munger] if is_property else [default_root_munger]
70 )
71
72
73 def default_munger(_module: "Module", *args: Any, **kwargs: Any) -> Tuple[()]:
74 if args or kwargs:
75 raise Web3ValidationError("Parameters cannot be passed to a property.")
76 return ()
77
78
79 def default_root_munger(_module: "Module", *args: Any) -> List[Any]:
80 return [*args]
81
82
83 TFunc = TypeVar("TFunc", bound=Callable[..., Any])
84
85
86 class Method(Generic[TFunc]):
87 """Method object for web3 module methods
88
89 Calls to the Method go through these steps:
90
91 1. input munging - includes normalization, parameter checking, early parameter
92 formatting. Any processing on the input parameters that need to happen before
93 json_rpc method string selection occurs.
94
95 A note about mungers: The first (root) munger should reflect the desired
96 api function arguments. In other words, if the api function wants to
97 behave as: `get_balance(account, block_identifier=None)`, the root munger
98 should accept these same arguments, with the addition of the module as
99 the first argument e.g.:
100
101 ```
102 def get_balance_root_munger(module, account, block_identifier=None):
103 if block_identifier is None:
104 block_identifier = DEFAULT_BLOCK
105 return module, [account, block_identifier]
106 ```
107
108 all mungers should return an argument list.
109
110 if no munger is provided, a default munger expecting no method arguments
111 will be used.
112
113 2. method selection - The json_rpc_method argument can be method string or a
114 function that returns a method string. If a callable is provided the processed
115 method inputs are passed to the method selection function, and the returned
116 method string is used.
117
118 3. request and response formatters are set - formatters are retrieved
119 using the json rpc method string.
120
121 4. After the parameter processing from steps 1-3 the request is made using
122 the calling function returned by the module attribute ``retrieve_caller_fn``
123 and the response formatters are applied to the output.
124 """
125
126 def __init__(
127 self,
128 json_rpc_method: Optional[RPCEndpoint] = None,
129 mungers: Optional[Sequence[Munger]] = None,
130 request_formatters: Optional[Callable[..., TReturn]] = None,
131 result_formatters: Optional[Callable[..., TReturn]] = None,
132 null_result_formatters: Optional[Callable[..., TReturn]] = None,
133 method_choice_depends_on_args: Optional[Callable[..., RPCEndpoint]] = None,
134 is_property: bool = False,
135 ):
136 self.json_rpc_method = json_rpc_method
137 self.mungers = _set_mungers(mungers, is_property)
138 self.request_formatters = request_formatters or get_request_formatters
139 self.result_formatters = result_formatters or get_result_formatters
140 self.null_result_formatters = (
141 null_result_formatters or get_null_result_formatters
142 )
143 self.method_choice_depends_on_args = method_choice_depends_on_args
144 self.is_property = is_property
145
146 def __get__(
147 self, obj: Optional["Module"] = None, obj_type: Optional[Type["Module"]] = None
148 ) -> TFunc:
149 if obj is None:
150 raise TypeError(
151 "Direct calls to methods are not supported. "
152 "Methods must be called from an module instance, "
153 "usually attached to a web3 instance."
154 )
155 return obj.retrieve_caller_fn(self)
156
157 @property
158 def method_selector_fn(
159 self,
160 ) -> Callable[..., Union[RPCEndpoint, Callable[..., RPCEndpoint]]]:
161 """Gets the method selector from the config."""
162 if callable(self.json_rpc_method):
163 return self.json_rpc_method
164 elif isinstance(self.json_rpc_method, (str,)):
165 return lambda *_: self.json_rpc_method
166 raise ValueError(
167 "``json_rpc_method`` config invalid. May be a string or function"
168 )
169
170 def input_munger(self, module: "Module", args: Any, kwargs: Any) -> List[Any]:
171 # This function takes the input parameters and munges them.
172 # See the test_process_params test in ``tests/core/method-class/test_method.py``
173 # for an example with multiple mungers.
174 return functools.reduce(
175 lambda args, munger: munger(module, *args, **kwargs), self.mungers, args
176 )
177
178 def process_params(self, module: "Module", *args: Any, **kwargs: Any) -> Tuple[
179 Tuple[Union[RPCEndpoint, Callable[..., RPCEndpoint]], Tuple[Any, ...]],
180 Tuple[
181 Union[TReturn, Dict[str, Callable[..., Any]]],
182 Callable[..., Any],
183 Union[TReturn, Callable[..., Any]],
184 ],
185 ]:
186 params = self.input_munger(module, args, kwargs)
187
188 if self.method_choice_depends_on_args:
189 # If the method choice depends on the args that get passed in,
190 # the first parameter determines which method needs to be called
191 self.json_rpc_method = self.method_choice_depends_on_args(value=params[0])
192
193 pending_or_latest_filter_methods = [
194 RPC.eth_newPendingTransactionFilter,
195 RPC.eth_newBlockFilter,
196 ]
197 if self.json_rpc_method in pending_or_latest_filter_methods:
198 # For pending or latest filter methods, use params to determine
199 # which method to call, but don't pass them through with the request
200 params = []
201
202 method = self.method_selector_fn()
203 response_formatters = (
204 self.result_formatters(method, module),
205 get_error_formatters(method),
206 self.null_result_formatters(method),
207 )
208 request = (
209 method,
210 _apply_request_formatters(params, self.request_formatters(method)),
211 )
212 return request, response_formatters
213
214
215 class DeprecatedMethod:
216 def __init__(
217 self, method: Method[Callable[..., Any]], old_name: str, new_name: str
218 ) -> None:
219 self.method = method
220 self.old_name = old_name
221 self.new_name = new_name
222
223 def __get__(
224 self, obj: Optional["Module"] = None, obj_type: Optional[Type["Module"]] = None
225 ) -> Any:
226 warnings.warn(
227 f"{self.old_name} is deprecated in favor of {self.new_name}",
228 category=DeprecationWarning,
229 )
230 return self.method.__get__(obj, obj_type)
231
[end of web3/method.py]
[start of web3/_utils/miner.py]
1 from typing import (
2 Callable,
3 )
4
5 from eth_typing import (
6 ChecksumAddress,
7 )
8
9 from web3._utils.rpc_abi import (
10 RPC,
11 )
12 from web3.method import (
13 Method,
14 default_root_munger,
15 )
16 from web3.types import (
17 BlockNumber,
18 Wei,
19 )
20
21 make_dag: Method[Callable[[BlockNumber], bool]] = Method(
22 RPC.miner_makeDag,
23 mungers=[default_root_munger],
24 )
25
26
27 set_extra: Method[Callable[[str], bool]] = Method(
28 RPC.miner_setExtra,
29 mungers=[default_root_munger],
30 )
31
32
33 set_etherbase: Method[Callable[[ChecksumAddress], bool]] = Method(
34 RPC.miner_setEtherbase,
35 mungers=[default_root_munger],
36 )
37
38
39 set_gas_price: Method[Callable[[Wei], bool]] = Method(
40 RPC.miner_setGasPrice,
41 mungers=[default_root_munger],
42 )
43
44
45 start: Method[Callable[[int], bool]] = Method(
46 RPC.miner_start,
47 mungers=[default_root_munger],
48 )
49
50
51 stop: Method[Callable[[], bool]] = Method(
52 RPC.miner_stop,
53 is_property=True,
54 )
55
56
57 start_auto_dag: Method[Callable[[], bool]] = Method(
58 RPC.miner_startAutoDag,
59 is_property=True,
60 )
61
62
63 stop_auto_dag: Method[Callable[[], bool]] = Method(
64 RPC.miner_stopAutoDag,
65 is_property=True,
66 )
67
[end of web3/_utils/miner.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/web3/_utils/miner.py b/web3/_utils/miner.py
--- a/web3/_utils/miner.py
+++ b/web3/_utils/miner.py
@@ -10,6 +10,7 @@
RPC,
)
from web3.method import (
+ DeprecatedMethod,
Method,
default_root_munger,
)
@@ -18,49 +19,74 @@
Wei,
)
-make_dag: Method[Callable[[BlockNumber], bool]] = Method(
+#
+# The Geth client deprecated the miner namespace.
+#
+
+_make_dag: Method[Callable[[BlockNumber], bool]] = Method(
RPC.miner_makeDag,
mungers=[default_root_munger],
)
+make_dag = DeprecatedMethod(
+ _make_dag, "make_dag", msg="All mining methods have been deprecated"
+)
-set_extra: Method[Callable[[str], bool]] = Method(
+_set_extra: Method[Callable[[str], bool]] = Method(
RPC.miner_setExtra,
mungers=[default_root_munger],
)
+set_extra = DeprecatedMethod(
+ _set_extra, "set_extra", msg="All mining methods have been deprecated"
+)
-set_etherbase: Method[Callable[[ChecksumAddress], bool]] = Method(
+_set_etherbase: Method[Callable[[ChecksumAddress], bool]] = Method(
RPC.miner_setEtherbase,
mungers=[default_root_munger],
)
+set_etherbase = DeprecatedMethod(
+ _set_etherbase, "set_etherbase", msg="All mining methods have been deprecated"
+)
-set_gas_price: Method[Callable[[Wei], bool]] = Method(
+_set_gas_price: Method[Callable[[Wei], bool]] = Method(
RPC.miner_setGasPrice,
mungers=[default_root_munger],
)
+set_gas_price = DeprecatedMethod(
+ _set_gas_price, "set_gas_price", msg="All mining methods have been deprecated"
+)
-start: Method[Callable[[int], bool]] = Method(
+_start: Method[Callable[[int], bool]] = Method(
RPC.miner_start,
mungers=[default_root_munger],
)
+start = DeprecatedMethod(_start, "start", msg="All mining methods have been deprecated")
-stop: Method[Callable[[], bool]] = Method(
+_stop: Method[Callable[[], bool]] = Method(
RPC.miner_stop,
is_property=True,
)
+stop = DeprecatedMethod(_stop, "stop", msg="All mining methods have been deprecated")
-start_auto_dag: Method[Callable[[], bool]] = Method(
+_start_auto_dag: Method[Callable[[], bool]] = Method(
RPC.miner_startAutoDag,
is_property=True,
)
+start_auto_dag = DeprecatedMethod(
+ _start_auto_dag, "start_auto_dag", msg="All mining methods have been deprecated"
+)
-stop_auto_dag: Method[Callable[[], bool]] = Method(
+_stop_auto_dag: Method[Callable[[], bool]] = Method(
RPC.miner_stopAutoDag,
is_property=True,
)
+
+stop_auto_dag = DeprecatedMethod(
+ _stop_auto_dag, "stop_auto_dag", msg="All mining methods have been deprecated"
+)
diff --git a/web3/method.py b/web3/method.py
--- a/web3/method.py
+++ b/web3/method.py
@@ -214,17 +214,26 @@
class DeprecatedMethod:
def __init__(
- self, method: Method[Callable[..., Any]], old_name: str, new_name: str
+ self,
+ method: Method[Callable[..., Any]],
+ old_name: str,
+ new_name: Optional[str] = None,
+ msg: Optional[str] = None,
) -> None:
self.method = method
self.old_name = old_name
self.new_name = new_name
+ self.msg = msg
def __get__(
self, obj: Optional["Module"] = None, obj_type: Optional[Type["Module"]] = None
) -> Any:
+ message = f"{self.old_name} is deprecated in favor of {self.new_name}"
+ if self.msg is not None:
+ message = self.msg
+
warnings.warn(
- f"{self.old_name} is deprecated in favor of {self.new_name}",
+ message,
category=DeprecationWarning,
)
return self.method.__get__(obj, obj_type)
| {"golden_diff": "diff --git a/web3/_utils/miner.py b/web3/_utils/miner.py\n--- a/web3/_utils/miner.py\n+++ b/web3/_utils/miner.py\n@@ -10,6 +10,7 @@\n RPC,\n )\n from web3.method import (\n+ DeprecatedMethod,\n Method,\n default_root_munger,\n )\n@@ -18,49 +19,74 @@\n Wei,\n )\n \n-make_dag: Method[Callable[[BlockNumber], bool]] = Method(\n+#\n+# The Geth client deprecated the miner namespace.\n+#\n+\n+_make_dag: Method[Callable[[BlockNumber], bool]] = Method(\n RPC.miner_makeDag,\n mungers=[default_root_munger],\n )\n \n+make_dag = DeprecatedMethod(\n+ _make_dag, \"make_dag\", msg=\"All mining methods have been deprecated\"\n+)\n \n-set_extra: Method[Callable[[str], bool]] = Method(\n+_set_extra: Method[Callable[[str], bool]] = Method(\n RPC.miner_setExtra,\n mungers=[default_root_munger],\n )\n \n+set_extra = DeprecatedMethod(\n+ _set_extra, \"set_extra\", msg=\"All mining methods have been deprecated\"\n+)\n \n-set_etherbase: Method[Callable[[ChecksumAddress], bool]] = Method(\n+_set_etherbase: Method[Callable[[ChecksumAddress], bool]] = Method(\n RPC.miner_setEtherbase,\n mungers=[default_root_munger],\n )\n \n+set_etherbase = DeprecatedMethod(\n+ _set_etherbase, \"set_etherbase\", msg=\"All mining methods have been deprecated\"\n+)\n \n-set_gas_price: Method[Callable[[Wei], bool]] = Method(\n+_set_gas_price: Method[Callable[[Wei], bool]] = Method(\n RPC.miner_setGasPrice,\n mungers=[default_root_munger],\n )\n \n+set_gas_price = DeprecatedMethod(\n+ _set_gas_price, \"set_gas_price\", msg=\"All mining methods have been deprecated\"\n+)\n \n-start: Method[Callable[[int], bool]] = Method(\n+_start: Method[Callable[[int], bool]] = Method(\n RPC.miner_start,\n mungers=[default_root_munger],\n )\n \n+start = DeprecatedMethod(_start, \"start\", msg=\"All mining methods have been deprecated\")\n \n-stop: Method[Callable[[], bool]] = Method(\n+_stop: Method[Callable[[], bool]] = Method(\n RPC.miner_stop,\n is_property=True,\n )\n \n+stop = DeprecatedMethod(_stop, \"stop\", msg=\"All mining methods have been deprecated\")\n \n-start_auto_dag: Method[Callable[[], bool]] = Method(\n+_start_auto_dag: Method[Callable[[], bool]] = Method(\n RPC.miner_startAutoDag,\n is_property=True,\n )\n \n+start_auto_dag = DeprecatedMethod(\n+ _start_auto_dag, \"start_auto_dag\", msg=\"All mining methods have been deprecated\"\n+)\n \n-stop_auto_dag: Method[Callable[[], bool]] = Method(\n+_stop_auto_dag: Method[Callable[[], bool]] = Method(\n RPC.miner_stopAutoDag,\n is_property=True,\n )\n+\n+stop_auto_dag = DeprecatedMethod(\n+ _stop_auto_dag, \"stop_auto_dag\", msg=\"All mining methods have been deprecated\"\n+)\ndiff --git a/web3/method.py b/web3/method.py\n--- a/web3/method.py\n+++ b/web3/method.py\n@@ -214,17 +214,26 @@\n \n class DeprecatedMethod:\n def __init__(\n- self, method: Method[Callable[..., Any]], old_name: str, new_name: str\n+ self,\n+ method: Method[Callable[..., Any]],\n+ old_name: str,\n+ new_name: Optional[str] = None,\n+ msg: Optional[str] = None,\n ) -> None:\n self.method = method\n self.old_name = old_name\n self.new_name = new_name\n+ self.msg = msg\n \n def __get__(\n self, obj: Optional[\"Module\"] = None, obj_type: Optional[Type[\"Module\"]] = None\n ) -> Any:\n+ message = f\"{self.old_name} is deprecated in favor of {self.new_name}\"\n+ if self.msg is not None:\n+ message = self.msg\n+\n warnings.warn(\n- f\"{self.old_name} is deprecated in favor of {self.new_name}\",\n+ message,\n category=DeprecationWarning,\n )\n return self.method.__get__(obj, obj_type)\n", "issue": "Deprecate `miner` namespace support and tests\n\r\n### What was wrong?\r\n\r\nGeth removed support for their `miner` API since it's no longer needed after The Merge \u2122\ufe0f . See: https://geth.ethereum.org/docs/interacting-with-geth/rpc/ns-miner\r\n\r\n\r\n### How can it be fixed?\r\n\r\nAdd deprecation warnings to the methods within the miner namespace, and change the tests to check for the deprecation warnings. We can then either remove the methods as Geth stops supporting these, or wait until v7 to remove.\r\n\n", "before_files": [{"content": "import functools\nfrom typing import (\n TYPE_CHECKING,\n Any,\n Callable,\n Dict,\n Generic,\n List,\n Optional,\n Sequence,\n Tuple,\n Type,\n TypeVar,\n Union,\n)\nimport warnings\n\nfrom eth_utils.curried import (\n to_tuple,\n)\nfrom eth_utils.toolz import (\n pipe,\n)\n\nfrom web3._utils.method_formatters import (\n get_error_formatters,\n get_null_result_formatters,\n get_request_formatters,\n get_result_formatters,\n)\nfrom web3._utils.rpc_abi import (\n RPC,\n)\nfrom web3.exceptions import (\n Web3ValidationError,\n)\nfrom web3.types import (\n RPCEndpoint,\n TReturn,\n)\n\nif TYPE_CHECKING:\n from web3 import Web3 # noqa: F401\n from web3.module import Module # noqa: F401\n\n\nMunger = Callable[..., Any]\n\n\n@to_tuple\ndef _apply_request_formatters(\n params: Any, request_formatters: Dict[RPCEndpoint, Callable[..., TReturn]]\n) -> Tuple[Any, ...]:\n if request_formatters:\n formatted_params = pipe(params, request_formatters)\n return formatted_params\n return params\n\n\ndef _set_mungers(\n mungers: Optional[Sequence[Munger]], is_property: bool\n) -> Sequence[Any]:\n if is_property and mungers:\n raise Web3ValidationError(\"Mungers cannot be used with a property.\")\n\n return (\n mungers\n if mungers\n else [default_munger] if is_property else [default_root_munger]\n )\n\n\ndef default_munger(_module: \"Module\", *args: Any, **kwargs: Any) -> Tuple[()]:\n if args or kwargs:\n raise Web3ValidationError(\"Parameters cannot be passed to a property.\")\n return ()\n\n\ndef default_root_munger(_module: \"Module\", *args: Any) -> List[Any]:\n return [*args]\n\n\nTFunc = TypeVar(\"TFunc\", bound=Callable[..., Any])\n\n\nclass Method(Generic[TFunc]):\n \"\"\"Method object for web3 module methods\n\n Calls to the Method go through these steps:\n\n 1. input munging - includes normalization, parameter checking, early parameter\n formatting. Any processing on the input parameters that need to happen before\n json_rpc method string selection occurs.\n\n A note about mungers: The first (root) munger should reflect the desired\n api function arguments. In other words, if the api function wants to\n behave as: `get_balance(account, block_identifier=None)`, the root munger\n should accept these same arguments, with the addition of the module as\n the first argument e.g.:\n\n ```\n def get_balance_root_munger(module, account, block_identifier=None):\n if block_identifier is None:\n block_identifier = DEFAULT_BLOCK\n return module, [account, block_identifier]\n ```\n\n all mungers should return an argument list.\n\n if no munger is provided, a default munger expecting no method arguments\n will be used.\n\n 2. method selection - The json_rpc_method argument can be method string or a\n function that returns a method string. If a callable is provided the processed\n method inputs are passed to the method selection function, and the returned\n method string is used.\n\n 3. request and response formatters are set - formatters are retrieved\n using the json rpc method string.\n\n 4. After the parameter processing from steps 1-3 the request is made using\n the calling function returned by the module attribute ``retrieve_caller_fn``\n and the response formatters are applied to the output.\n \"\"\"\n\n def __init__(\n self,\n json_rpc_method: Optional[RPCEndpoint] = None,\n mungers: Optional[Sequence[Munger]] = None,\n request_formatters: Optional[Callable[..., TReturn]] = None,\n result_formatters: Optional[Callable[..., TReturn]] = None,\n null_result_formatters: Optional[Callable[..., TReturn]] = None,\n method_choice_depends_on_args: Optional[Callable[..., RPCEndpoint]] = None,\n is_property: bool = False,\n ):\n self.json_rpc_method = json_rpc_method\n self.mungers = _set_mungers(mungers, is_property)\n self.request_formatters = request_formatters or get_request_formatters\n self.result_formatters = result_formatters or get_result_formatters\n self.null_result_formatters = (\n null_result_formatters or get_null_result_formatters\n )\n self.method_choice_depends_on_args = method_choice_depends_on_args\n self.is_property = is_property\n\n def __get__(\n self, obj: Optional[\"Module\"] = None, obj_type: Optional[Type[\"Module\"]] = None\n ) -> TFunc:\n if obj is None:\n raise TypeError(\n \"Direct calls to methods are not supported. \"\n \"Methods must be called from an module instance, \"\n \"usually attached to a web3 instance.\"\n )\n return obj.retrieve_caller_fn(self)\n\n @property\n def method_selector_fn(\n self,\n ) -> Callable[..., Union[RPCEndpoint, Callable[..., RPCEndpoint]]]:\n \"\"\"Gets the method selector from the config.\"\"\"\n if callable(self.json_rpc_method):\n return self.json_rpc_method\n elif isinstance(self.json_rpc_method, (str,)):\n return lambda *_: self.json_rpc_method\n raise ValueError(\n \"``json_rpc_method`` config invalid. May be a string or function\"\n )\n\n def input_munger(self, module: \"Module\", args: Any, kwargs: Any) -> List[Any]:\n # This function takes the input parameters and munges them.\n # See the test_process_params test in ``tests/core/method-class/test_method.py``\n # for an example with multiple mungers.\n return functools.reduce(\n lambda args, munger: munger(module, *args, **kwargs), self.mungers, args\n )\n\n def process_params(self, module: \"Module\", *args: Any, **kwargs: Any) -> Tuple[\n Tuple[Union[RPCEndpoint, Callable[..., RPCEndpoint]], Tuple[Any, ...]],\n Tuple[\n Union[TReturn, Dict[str, Callable[..., Any]]],\n Callable[..., Any],\n Union[TReturn, Callable[..., Any]],\n ],\n ]:\n params = self.input_munger(module, args, kwargs)\n\n if self.method_choice_depends_on_args:\n # If the method choice depends on the args that get passed in,\n # the first parameter determines which method needs to be called\n self.json_rpc_method = self.method_choice_depends_on_args(value=params[0])\n\n pending_or_latest_filter_methods = [\n RPC.eth_newPendingTransactionFilter,\n RPC.eth_newBlockFilter,\n ]\n if self.json_rpc_method in pending_or_latest_filter_methods:\n # For pending or latest filter methods, use params to determine\n # which method to call, but don't pass them through with the request\n params = []\n\n method = self.method_selector_fn()\n response_formatters = (\n self.result_formatters(method, module),\n get_error_formatters(method),\n self.null_result_formatters(method),\n )\n request = (\n method,\n _apply_request_formatters(params, self.request_formatters(method)),\n )\n return request, response_formatters\n\n\nclass DeprecatedMethod:\n def __init__(\n self, method: Method[Callable[..., Any]], old_name: str, new_name: str\n ) -> None:\n self.method = method\n self.old_name = old_name\n self.new_name = new_name\n\n def __get__(\n self, obj: Optional[\"Module\"] = None, obj_type: Optional[Type[\"Module\"]] = None\n ) -> Any:\n warnings.warn(\n f\"{self.old_name} is deprecated in favor of {self.new_name}\",\n category=DeprecationWarning,\n )\n return self.method.__get__(obj, obj_type)\n", "path": "web3/method.py"}, {"content": "from typing import (\n Callable,\n)\n\nfrom eth_typing import (\n ChecksumAddress,\n)\n\nfrom web3._utils.rpc_abi import (\n RPC,\n)\nfrom web3.method import (\n Method,\n default_root_munger,\n)\nfrom web3.types import (\n BlockNumber,\n Wei,\n)\n\nmake_dag: Method[Callable[[BlockNumber], bool]] = Method(\n RPC.miner_makeDag,\n mungers=[default_root_munger],\n)\n\n\nset_extra: Method[Callable[[str], bool]] = Method(\n RPC.miner_setExtra,\n mungers=[default_root_munger],\n)\n\n\nset_etherbase: Method[Callable[[ChecksumAddress], bool]] = Method(\n RPC.miner_setEtherbase,\n mungers=[default_root_munger],\n)\n\n\nset_gas_price: Method[Callable[[Wei], bool]] = Method(\n RPC.miner_setGasPrice,\n mungers=[default_root_munger],\n)\n\n\nstart: Method[Callable[[int], bool]] = Method(\n RPC.miner_start,\n mungers=[default_root_munger],\n)\n\n\nstop: Method[Callable[[], bool]] = Method(\n RPC.miner_stop,\n is_property=True,\n)\n\n\nstart_auto_dag: Method[Callable[[], bool]] = Method(\n RPC.miner_startAutoDag,\n is_property=True,\n)\n\n\nstop_auto_dag: Method[Callable[[], bool]] = Method(\n RPC.miner_stopAutoDag,\n is_property=True,\n)\n", "path": "web3/_utils/miner.py"}]} | 3,478 | 1,005 |
gh_patches_debug_17332 | rasdani/github-patches | git_diff | napalm-automation__napalm-704 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`base.clitools.cl_napalm:main` entrypoint still uses PIP internals
https://github.com/napalm-automation/napalm/blob/f45fc4f8e9108cacd76ce8f5cd9182df45129480/napalm/base/clitools/cl_napalm.py#L146
Should switch to `pkg_resources.find_distributions()` or similar.
</issue>
<code>
[start of napalm/base/clitools/cl_napalm.py]
1 # Python3 support
2 from __future__ import print_function
3 from __future__ import unicode_literals
4
5 # import helpers
6 from napalm.base import get_network_driver
7 from napalm.base.clitools import helpers
8
9 # stdlib
10 import pip
11 import json
12 import logging
13 import argparse
14 import getpass
15 from functools import wraps
16
17
18 def debugging(name):
19 def real_decorator(func):
20 @wraps(func)
21 def wrapper(*args, **kwargs):
22 censor_parameters = ["password"]
23 censored_kwargs = {k: v if k not in censor_parameters else "*******"
24 for k, v in kwargs.items()}
25 logger.debug("{} - Calling with args: {}, {}".format(name, args, censored_kwargs))
26 try:
27 r = func(*args, **kwargs)
28 logger.debug("{} - Successful".format(name))
29 return r
30 except NotImplementedError:
31 if name not in ["pre_connection_tests", "connection_tests",
32 "post_connection_tests"]:
33 logger.debug("{} - Not implemented".format(name))
34 except Exception as e:
35 logger.error("{} - Failed: {}".format(name, e))
36 print("\n================= Traceback =================\n")
37 raise
38 return wrapper
39 return real_decorator
40
41
42 logger = logging.getLogger('napalm')
43
44
45 def build_help():
46 parser = argparse.ArgumentParser(
47 description='Command line tool to handle configuration on devices using NAPALM.'
48 'The script will print the diff on the screen',
49 epilog='Automate all the things!!!'
50 )
51 parser.add_argument(
52 dest='hostname',
53 action='store',
54 help='Host where you want to deploy the configuration.'
55 )
56 parser.add_argument(
57 '--user', '-u',
58 dest='user',
59 action='store',
60 default=getpass.getuser(),
61 help='User for authenticating to the host. Default: user running the script.'
62 )
63 parser.add_argument(
64 '--password', '-p',
65 dest='password',
66 action='store',
67 help='Password for authenticating to the host.'
68 'If you do not provide a password in the CLI you will be prompted.',
69 )
70 parser.add_argument(
71 '--vendor', '-v',
72 dest='vendor',
73 action='store',
74 required=True,
75 help='Host Operating System.'
76 )
77 parser.add_argument(
78 '--optional_args', '-o',
79 dest='optional_args',
80 action='store',
81 help='String with comma separated key=value pairs passed via optional_args to the driver.',
82 )
83 parser.add_argument(
84 '--debug',
85 dest='debug',
86 action='store_true',
87 help='Enables debug mode; more verbosity.'
88 )
89 subparser = parser.add_subparsers(title='actions')
90
91 config = subparser.add_parser('configure', help='Perform a configuration operation')
92 config.set_defaults(which='config')
93 config.add_argument(
94 dest='config_file',
95 action='store',
96 help='File containing the configuration you want to deploy.'
97 )
98 config.add_argument(
99 '--strategy', '-s',
100 dest='strategy',
101 action='store',
102 choices=['replace', 'merge'],
103 default='replace',
104 help='Strategy to use to deploy configuration. Default: replace.'
105 )
106 config.add_argument(
107 '--dry-run', '-d',
108 dest='dry_run',
109 action='store_true',
110 default=None,
111 help='Only returns diff, it does not deploy the configuration.',
112 )
113
114 call = subparser.add_parser('call', help='Call a napalm method')
115 call.set_defaults(which='call')
116 call.add_argument(
117 dest='method',
118 action='store',
119 help='Run this method'
120 )
121 call.add_argument(
122 '--method-kwargs', '-k',
123 dest='method_kwargs',
124 action='store',
125 help='kwargs to pass to the method. For example: "destination=1.1.1.1,protocol=bgp"'
126 )
127
128 validate = subparser.add_parser('validate', help='Validate configuration/state')
129 validate.set_defaults(which='validate')
130 validate.add_argument(
131 dest='validation_file',
132 action='store',
133 help='Validation file containing resources derised states'
134 )
135 args = parser.parse_args()
136
137 if args.password is None:
138 password = getpass.getpass('Enter password: ')
139 setattr(args, 'password', password)
140
141 return args
142
143
144 def check_installed_packages():
145 logger.debug("Gathering napalm packages")
146 installed_packages = pip.get_installed_distributions()
147 napalm_packages = sorted(["{}=={}".format(i.key, i.version)
148 for i in installed_packages if i.key.startswith("napalm")])
149 for n in napalm_packages:
150 logger.debug(n)
151
152
153 @debugging("get_network_driver")
154 def call_get_network_driver(vendor):
155 return get_network_driver(vendor)
156
157
158 @debugging("__init__")
159 def call_instantiating_object(driver, *args, **kwargs):
160 return driver(*args, **kwargs)
161
162
163 @debugging("pre_connection_tests")
164 def call_pre_connection(driver):
165 driver.pre_connection_tests()
166
167
168 @debugging("connection_tests")
169 def call_connection(device):
170 device.connection_tests()
171
172
173 @debugging("post_connection_tests")
174 def call_post_connection(device):
175 device.post_connection_tests()
176
177
178 @debugging("get_facts")
179 def call_facts(device):
180 facts = device.get_facts()
181 logger.debug("Gathered facts:\n{}".format(json.dumps(facts, indent=4)))
182 print(json.dumps(facts, indent=4))
183
184
185 @debugging("close")
186 def call_close(device):
187 return device.close()
188
189
190 @debugging("open")
191 def call_open_device(device):
192 return device.open()
193
194
195 @debugging("load_replace_candidate")
196 def call_load_replace_candidate(device, *args, **kwargs):
197 return device.load_replace_candidate(*args, **kwargs)
198
199
200 @debugging("load_merge_candidate")
201 def call_load_merge_candidate(device, *args, **kwargs):
202 return device.load_merge_candidate(*args, **kwargs)
203
204
205 @debugging("compare_config")
206 def call_compare_config(device, *args, **kwargs):
207 diff = device.compare_config(*args, **kwargs)
208 logger.debug("Gathered diff:")
209 print(diff)
210 return diff
211
212
213 @debugging("commit_config")
214 def call_commit_config(device, *args, **kwargs):
215 return device.commit_config(*args, **kwargs)
216
217
218 def configuration_change(device, config_file, strategy, dry_run):
219 if strategy == 'replace':
220 strategy_method = call_load_replace_candidate
221 elif strategy == 'merge':
222 strategy_method = call_load_merge_candidate
223
224 strategy_method(device, filename=config_file)
225
226 diff = call_compare_config(device)
227
228 if not dry_run:
229 call_commit_config(device)
230 return diff
231
232
233 @debugging("method")
234 def call_getter(device, method, **kwargs):
235 logger.debug("{} - Attempting to resolve method".format(method))
236 func = getattr(device, method)
237 logger.debug("{} - Attempting to call method with kwargs: {}".format(method, kwargs))
238 r = func(**kwargs)
239 logger.debug("{} - Response".format(method))
240 print(json.dumps(r, indent=4))
241
242
243 @debugging("compliance_report")
244 def call_compliance_report(device, validation_file):
245 result = device.compliance_report(validation_file)
246 print(json.dumps(result, indent=4))
247 return result
248
249
250 def run_tests(args):
251 driver = call_get_network_driver(args.vendor)
252 optional_args = helpers.parse_optional_args(args.optional_args)
253
254 device = call_instantiating_object(driver, args.hostname, args.user, password=args.password,
255 timeout=60, optional_args=optional_args)
256
257 if args.debug:
258 call_pre_connection(device)
259
260 call_open_device(device)
261
262 if args.debug:
263 call_connection(device)
264 call_facts(device)
265
266 if args.which == 'call':
267 method_kwargs = helpers.parse_optional_args(args.method_kwargs)
268 call_getter(device, args.method, **method_kwargs)
269 elif args.which == 'config':
270 configuration_change(device, args.config_file, args.strategy, args.dry_run)
271 elif args.which == 'validate':
272 call_compliance_report(device, args.validation_file)
273
274 call_close(device)
275
276 if args.debug:
277 call_post_connection(device)
278
279
280 def main():
281 args = build_help()
282 helpers.configure_logging(logger, debug=args.debug)
283 logger.debug("Starting napalm's debugging tool")
284 check_installed_packages()
285 run_tests(args)
286
287
288 if __name__ == '__main__':
289 main()
290
[end of napalm/base/clitools/cl_napalm.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/napalm/base/clitools/cl_napalm.py b/napalm/base/clitools/cl_napalm.py
--- a/napalm/base/clitools/cl_napalm.py
+++ b/napalm/base/clitools/cl_napalm.py
@@ -7,11 +7,11 @@
from napalm.base.clitools import helpers
# stdlib
-import pip
import json
import logging
import argparse
import getpass
+import pkg_resources
from functools import wraps
@@ -143,9 +143,8 @@
def check_installed_packages():
logger.debug("Gathering napalm packages")
- installed_packages = pip.get_installed_distributions()
napalm_packages = sorted(["{}=={}".format(i.key, i.version)
- for i in installed_packages if i.key.startswith("napalm")])
+ for i in pkg_resources.working_set if i.key.startswith("napalm")])
for n in napalm_packages:
logger.debug(n)
| {"golden_diff": "diff --git a/napalm/base/clitools/cl_napalm.py b/napalm/base/clitools/cl_napalm.py\n--- a/napalm/base/clitools/cl_napalm.py\n+++ b/napalm/base/clitools/cl_napalm.py\n@@ -7,11 +7,11 @@\n from napalm.base.clitools import helpers\n \n # stdlib\n-import pip\n import json\n import logging\n import argparse\n import getpass\n+import pkg_resources\n from functools import wraps\n \n \n@@ -143,9 +143,8 @@\n \n def check_installed_packages():\n logger.debug(\"Gathering napalm packages\")\n- installed_packages = pip.get_installed_distributions()\n napalm_packages = sorted([\"{}=={}\".format(i.key, i.version)\n- for i in installed_packages if i.key.startswith(\"napalm\")])\n+ for i in pkg_resources.working_set if i.key.startswith(\"napalm\")])\n for n in napalm_packages:\n logger.debug(n)\n", "issue": "`base.clitools.cl_napalm:main` entrypoint still uses PIP internals\nhttps://github.com/napalm-automation/napalm/blob/f45fc4f8e9108cacd76ce8f5cd9182df45129480/napalm/base/clitools/cl_napalm.py#L146\r\n\r\nShould switch to `pkg_resources.find_distributions()` or similar.\n", "before_files": [{"content": "# Python3 support\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\n# import helpers\nfrom napalm.base import get_network_driver\nfrom napalm.base.clitools import helpers\n\n# stdlib\nimport pip\nimport json\nimport logging\nimport argparse\nimport getpass\nfrom functools import wraps\n\n\ndef debugging(name):\n def real_decorator(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n censor_parameters = [\"password\"]\n censored_kwargs = {k: v if k not in censor_parameters else \"*******\"\n for k, v in kwargs.items()}\n logger.debug(\"{} - Calling with args: {}, {}\".format(name, args, censored_kwargs))\n try:\n r = func(*args, **kwargs)\n logger.debug(\"{} - Successful\".format(name))\n return r\n except NotImplementedError:\n if name not in [\"pre_connection_tests\", \"connection_tests\",\n \"post_connection_tests\"]:\n logger.debug(\"{} - Not implemented\".format(name))\n except Exception as e:\n logger.error(\"{} - Failed: {}\".format(name, e))\n print(\"\\n================= Traceback =================\\n\")\n raise\n return wrapper\n return real_decorator\n\n\nlogger = logging.getLogger('napalm')\n\n\ndef build_help():\n parser = argparse.ArgumentParser(\n description='Command line tool to handle configuration on devices using NAPALM.'\n 'The script will print the diff on the screen',\n epilog='Automate all the things!!!'\n )\n parser.add_argument(\n dest='hostname',\n action='store',\n help='Host where you want to deploy the configuration.'\n )\n parser.add_argument(\n '--user', '-u',\n dest='user',\n action='store',\n default=getpass.getuser(),\n help='User for authenticating to the host. Default: user running the script.'\n )\n parser.add_argument(\n '--password', '-p',\n dest='password',\n action='store',\n help='Password for authenticating to the host.'\n 'If you do not provide a password in the CLI you will be prompted.',\n )\n parser.add_argument(\n '--vendor', '-v',\n dest='vendor',\n action='store',\n required=True,\n help='Host Operating System.'\n )\n parser.add_argument(\n '--optional_args', '-o',\n dest='optional_args',\n action='store',\n help='String with comma separated key=value pairs passed via optional_args to the driver.',\n )\n parser.add_argument(\n '--debug',\n dest='debug',\n action='store_true',\n help='Enables debug mode; more verbosity.'\n )\n subparser = parser.add_subparsers(title='actions')\n\n config = subparser.add_parser('configure', help='Perform a configuration operation')\n config.set_defaults(which='config')\n config.add_argument(\n dest='config_file',\n action='store',\n help='File containing the configuration you want to deploy.'\n )\n config.add_argument(\n '--strategy', '-s',\n dest='strategy',\n action='store',\n choices=['replace', 'merge'],\n default='replace',\n help='Strategy to use to deploy configuration. Default: replace.'\n )\n config.add_argument(\n '--dry-run', '-d',\n dest='dry_run',\n action='store_true',\n default=None,\n help='Only returns diff, it does not deploy the configuration.',\n )\n\n call = subparser.add_parser('call', help='Call a napalm method')\n call.set_defaults(which='call')\n call.add_argument(\n dest='method',\n action='store',\n help='Run this method'\n )\n call.add_argument(\n '--method-kwargs', '-k',\n dest='method_kwargs',\n action='store',\n help='kwargs to pass to the method. For example: \"destination=1.1.1.1,protocol=bgp\"'\n )\n\n validate = subparser.add_parser('validate', help='Validate configuration/state')\n validate.set_defaults(which='validate')\n validate.add_argument(\n dest='validation_file',\n action='store',\n help='Validation file containing resources derised states'\n )\n args = parser.parse_args()\n\n if args.password is None:\n password = getpass.getpass('Enter password: ')\n setattr(args, 'password', password)\n\n return args\n\n\ndef check_installed_packages():\n logger.debug(\"Gathering napalm packages\")\n installed_packages = pip.get_installed_distributions()\n napalm_packages = sorted([\"{}=={}\".format(i.key, i.version)\n for i in installed_packages if i.key.startswith(\"napalm\")])\n for n in napalm_packages:\n logger.debug(n)\n\n\n@debugging(\"get_network_driver\")\ndef call_get_network_driver(vendor):\n return get_network_driver(vendor)\n\n\n@debugging(\"__init__\")\ndef call_instantiating_object(driver, *args, **kwargs):\n return driver(*args, **kwargs)\n\n\n@debugging(\"pre_connection_tests\")\ndef call_pre_connection(driver):\n driver.pre_connection_tests()\n\n\n@debugging(\"connection_tests\")\ndef call_connection(device):\n device.connection_tests()\n\n\n@debugging(\"post_connection_tests\")\ndef call_post_connection(device):\n device.post_connection_tests()\n\n\n@debugging(\"get_facts\")\ndef call_facts(device):\n facts = device.get_facts()\n logger.debug(\"Gathered facts:\\n{}\".format(json.dumps(facts, indent=4)))\n print(json.dumps(facts, indent=4))\n\n\n@debugging(\"close\")\ndef call_close(device):\n return device.close()\n\n\n@debugging(\"open\")\ndef call_open_device(device):\n return device.open()\n\n\n@debugging(\"load_replace_candidate\")\ndef call_load_replace_candidate(device, *args, **kwargs):\n return device.load_replace_candidate(*args, **kwargs)\n\n\n@debugging(\"load_merge_candidate\")\ndef call_load_merge_candidate(device, *args, **kwargs):\n return device.load_merge_candidate(*args, **kwargs)\n\n\n@debugging(\"compare_config\")\ndef call_compare_config(device, *args, **kwargs):\n diff = device.compare_config(*args, **kwargs)\n logger.debug(\"Gathered diff:\")\n print(diff)\n return diff\n\n\n@debugging(\"commit_config\")\ndef call_commit_config(device, *args, **kwargs):\n return device.commit_config(*args, **kwargs)\n\n\ndef configuration_change(device, config_file, strategy, dry_run):\n if strategy == 'replace':\n strategy_method = call_load_replace_candidate\n elif strategy == 'merge':\n strategy_method = call_load_merge_candidate\n\n strategy_method(device, filename=config_file)\n\n diff = call_compare_config(device)\n\n if not dry_run:\n call_commit_config(device)\n return diff\n\n\n@debugging(\"method\")\ndef call_getter(device, method, **kwargs):\n logger.debug(\"{} - Attempting to resolve method\".format(method))\n func = getattr(device, method)\n logger.debug(\"{} - Attempting to call method with kwargs: {}\".format(method, kwargs))\n r = func(**kwargs)\n logger.debug(\"{} - Response\".format(method))\n print(json.dumps(r, indent=4))\n\n\n@debugging(\"compliance_report\")\ndef call_compliance_report(device, validation_file):\n result = device.compliance_report(validation_file)\n print(json.dumps(result, indent=4))\n return result\n\n\ndef run_tests(args):\n driver = call_get_network_driver(args.vendor)\n optional_args = helpers.parse_optional_args(args.optional_args)\n\n device = call_instantiating_object(driver, args.hostname, args.user, password=args.password,\n timeout=60, optional_args=optional_args)\n\n if args.debug:\n call_pre_connection(device)\n\n call_open_device(device)\n\n if args.debug:\n call_connection(device)\n call_facts(device)\n\n if args.which == 'call':\n method_kwargs = helpers.parse_optional_args(args.method_kwargs)\n call_getter(device, args.method, **method_kwargs)\n elif args.which == 'config':\n configuration_change(device, args.config_file, args.strategy, args.dry_run)\n elif args.which == 'validate':\n call_compliance_report(device, args.validation_file)\n\n call_close(device)\n\n if args.debug:\n call_post_connection(device)\n\n\ndef main():\n args = build_help()\n helpers.configure_logging(logger, debug=args.debug)\n logger.debug(\"Starting napalm's debugging tool\")\n check_installed_packages()\n run_tests(args)\n\n\nif __name__ == '__main__':\n main()\n", "path": "napalm/base/clitools/cl_napalm.py"}]} | 3,238 | 219 |
gh_patches_debug_8312 | rasdani/github-patches | git_diff | opensearch-project__opensearch-build-672 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[Bug]: The location specified in bundle-manifest (manifest.yml) are invalid
### Describe the bug
A build manifest is bundled inside the OpenSearch bundle tarball (manifest.yml) which contains all the information about the components used to build the bundle. The bundle manifest contains a key `location` whose value is invalid for all components.
```
build:
architecture: x64
id: '317'
location: https://ci.opensearch.org/ci/bundles/1.1.0/317/opensearch-1.1.0-linux-x64.tar.gz
name: OpenSearch
version: 1.1.0
components:
- commit_id: 15e9f137622d878b79103df8f82d78d782b686a1
location: https://ci.opensearch.org/ci/builds/1.1.0/317/bundle/opensearch-min-1.1.0-linux-x64.tar.gz
name: OpenSearch
ref: '1.1'
repository: https://github.com/opensearch-project/OpenSearch.git
```
### To reproduce
Download the bundle. Untar it and see the manifest.yml
Try accessing the location url to download individual component. It will give `Access Denied` error
### Expected behavior
The URLs should be valid. Each component should be downloadable from the given location url
### Screenshots
_No response_
### Host / Environment
_No response_
### Additional context
_No response_
### Relevant log output
_No response_
</issue>
<code>
[start of bundle-workflow/src/assemble_workflow/bundle_recorder.py]
1 # SPDX-License-Identifier: Apache-2.0
2 #
3 # The OpenSearch Contributors require contributions made to
4 # this file be licensed under the Apache-2.0 license or a
5 # compatible open source license.
6
7 import os
8 from urllib.parse import urljoin
9
10 from manifests.bundle_manifest import BundleManifest
11
12
13 class BundleRecorder:
14 def __init__(self, build, output_dir, artifacts_dir):
15 self.output_dir = output_dir
16 self.build_id = build.id
17 self.public_url = os.getenv("PUBLIC_ARTIFACT_URL", None)
18 self.version = build.version
19 self.tar_name = self.__get_tar_name(build)
20 self.artifacts_dir = artifacts_dir
21 self.bundle_manifest = self.BundleManifestBuilder(
22 build.id,
23 build.name,
24 build.version,
25 build.architecture,
26 self.__get_tar_location(),
27 )
28
29 def __get_tar_name(self, build):
30 parts = [build.name.lower(), build.version, "linux", build.architecture]
31 return "-".join(parts) + ".tar.gz"
32
33 def __get_public_url_path(self, folder, rel_path):
34 path = "{}/{}/{}/{}".format(folder, self.version, self.build_id, rel_path)
35 return urljoin(self.public_url, path)
36
37 def __get_location(self, folder_name, rel_path, abs_path):
38 if self.public_url:
39 return self.__get_public_url_path(folder_name, rel_path)
40 return abs_path
41
42 # Assembled bundles are expected to be served from a separate "bundles" folder
43 # Example: https://artifacts.opensearch.org/bundles/1.0.0/<build-id
44 def __get_tar_location(self):
45 return self.__get_location(
46 "bundles", self.tar_name, os.path.join(self.output_dir, self.tar_name)
47 )
48
49 # Build artifacts are expected to be served from a "builds" folder
50 # Example: https://artifacts.opensearch.org/builds/1.0.0/<build-id>
51 def __get_component_location(self, component_rel_path):
52 abs_path = os.path.join(self.artifacts_dir, component_rel_path)
53 return self.__get_location("builds", component_rel_path, abs_path)
54
55 def record_component(self, component, rel_path):
56 self.bundle_manifest.append_component(
57 component.name,
58 component.repository,
59 component.ref,
60 component.commit_id,
61 self.__get_component_location(rel_path),
62 )
63
64 def get_manifest(self):
65 return self.bundle_manifest.to_manifest()
66
67 def write_manifest(self, folder):
68 manifest_path = os.path.join(folder, "manifest.yml")
69 self.get_manifest().to_file(manifest_path)
70
71 class BundleManifestBuilder:
72 def __init__(self, build_id, name, version, arch, location):
73 self.data = {}
74 self.data["build"] = {}
75 self.data["build"]["id"] = build_id
76 self.data["build"]["name"] = name
77 self.data["build"]["version"] = str(version)
78 self.data["build"]["architecture"] = arch
79 self.data["build"]["location"] = location
80 self.data["schema-version"] = "1.0"
81 # We need to store components as a hash so that we can append artifacts by component name
82 # When we convert to a BundleManifest this will get converted back into a list
83 self.data["components"] = []
84
85 def append_component(self, name, repository_url, ref, commit_id, location):
86 component = {
87 "name": name,
88 "repository": repository_url,
89 "ref": ref,
90 "commit_id": commit_id,
91 "location": location,
92 }
93 self.data["components"].append(component)
94
95 def to_manifest(self):
96 return BundleManifest(self.data)
97
[end of bundle-workflow/src/assemble_workflow/bundle_recorder.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/bundle-workflow/src/assemble_workflow/bundle_recorder.py b/bundle-workflow/src/assemble_workflow/bundle_recorder.py
--- a/bundle-workflow/src/assemble_workflow/bundle_recorder.py
+++ b/bundle-workflow/src/assemble_workflow/bundle_recorder.py
@@ -31,8 +31,8 @@
return "-".join(parts) + ".tar.gz"
def __get_public_url_path(self, folder, rel_path):
- path = "{}/{}/{}/{}".format(folder, self.version, self.build_id, rel_path)
- return urljoin(self.public_url, path)
+ path = "/".join((folder, self.version, self.build_id, rel_path))
+ return urljoin(self.public_url + "/", path)
def __get_location(self, folder_name, rel_path, abs_path):
if self.public_url:
| {"golden_diff": "diff --git a/bundle-workflow/src/assemble_workflow/bundle_recorder.py b/bundle-workflow/src/assemble_workflow/bundle_recorder.py\n--- a/bundle-workflow/src/assemble_workflow/bundle_recorder.py\n+++ b/bundle-workflow/src/assemble_workflow/bundle_recorder.py\n@@ -31,8 +31,8 @@\n return \"-\".join(parts) + \".tar.gz\"\n \n def __get_public_url_path(self, folder, rel_path):\n- path = \"{}/{}/{}/{}\".format(folder, self.version, self.build_id, rel_path)\n- return urljoin(self.public_url, path)\n+ path = \"/\".join((folder, self.version, self.build_id, rel_path))\n+ return urljoin(self.public_url + \"/\", path)\n \n def __get_location(self, folder_name, rel_path, abs_path):\n if self.public_url:\n", "issue": "[Bug]: The location specified in bundle-manifest (manifest.yml) are invalid\n### Describe the bug\n\nA build manifest is bundled inside the OpenSearch bundle tarball (manifest.yml) which contains all the information about the components used to build the bundle. The bundle manifest contains a key `location` whose value is invalid for all components.\r\n\r\n```\r\nbuild:\r\n architecture: x64\r\n id: '317'\r\n location: https://ci.opensearch.org/ci/bundles/1.1.0/317/opensearch-1.1.0-linux-x64.tar.gz\r\n name: OpenSearch\r\n version: 1.1.0\r\ncomponents:\r\n- commit_id: 15e9f137622d878b79103df8f82d78d782b686a1\r\n location: https://ci.opensearch.org/ci/builds/1.1.0/317/bundle/opensearch-min-1.1.0-linux-x64.tar.gz\r\n name: OpenSearch\r\n ref: '1.1'\r\n repository: https://github.com/opensearch-project/OpenSearch.git\r\n```\n\n### To reproduce\n\nDownload the bundle. Untar it and see the manifest.yml\r\n\r\nTry accessing the location url to download individual component. It will give `Access Denied` error\n\n### Expected behavior\n\nThe URLs should be valid. Each component should be downloadable from the given location url\n\n### Screenshots\n\n_No response_\n\n### Host / Environment\n\n_No response_\n\n### Additional context\n\n_No response_\n\n### Relevant log output\n\n_No response_\n", "before_files": [{"content": "# SPDX-License-Identifier: Apache-2.0\n#\n# The OpenSearch Contributors require contributions made to\n# this file be licensed under the Apache-2.0 license or a\n# compatible open source license.\n\nimport os\nfrom urllib.parse import urljoin\n\nfrom manifests.bundle_manifest import BundleManifest\n\n\nclass BundleRecorder:\n def __init__(self, build, output_dir, artifacts_dir):\n self.output_dir = output_dir\n self.build_id = build.id\n self.public_url = os.getenv(\"PUBLIC_ARTIFACT_URL\", None)\n self.version = build.version\n self.tar_name = self.__get_tar_name(build)\n self.artifacts_dir = artifacts_dir\n self.bundle_manifest = self.BundleManifestBuilder(\n build.id,\n build.name,\n build.version,\n build.architecture,\n self.__get_tar_location(),\n )\n\n def __get_tar_name(self, build):\n parts = [build.name.lower(), build.version, \"linux\", build.architecture]\n return \"-\".join(parts) + \".tar.gz\"\n\n def __get_public_url_path(self, folder, rel_path):\n path = \"{}/{}/{}/{}\".format(folder, self.version, self.build_id, rel_path)\n return urljoin(self.public_url, path)\n\n def __get_location(self, folder_name, rel_path, abs_path):\n if self.public_url:\n return self.__get_public_url_path(folder_name, rel_path)\n return abs_path\n\n # Assembled bundles are expected to be served from a separate \"bundles\" folder\n # Example: https://artifacts.opensearch.org/bundles/1.0.0/<build-id\n def __get_tar_location(self):\n return self.__get_location(\n \"bundles\", self.tar_name, os.path.join(self.output_dir, self.tar_name)\n )\n\n # Build artifacts are expected to be served from a \"builds\" folder\n # Example: https://artifacts.opensearch.org/builds/1.0.0/<build-id>\n def __get_component_location(self, component_rel_path):\n abs_path = os.path.join(self.artifacts_dir, component_rel_path)\n return self.__get_location(\"builds\", component_rel_path, abs_path)\n\n def record_component(self, component, rel_path):\n self.bundle_manifest.append_component(\n component.name,\n component.repository,\n component.ref,\n component.commit_id,\n self.__get_component_location(rel_path),\n )\n\n def get_manifest(self):\n return self.bundle_manifest.to_manifest()\n\n def write_manifest(self, folder):\n manifest_path = os.path.join(folder, \"manifest.yml\")\n self.get_manifest().to_file(manifest_path)\n\n class BundleManifestBuilder:\n def __init__(self, build_id, name, version, arch, location):\n self.data = {}\n self.data[\"build\"] = {}\n self.data[\"build\"][\"id\"] = build_id\n self.data[\"build\"][\"name\"] = name\n self.data[\"build\"][\"version\"] = str(version)\n self.data[\"build\"][\"architecture\"] = arch\n self.data[\"build\"][\"location\"] = location\n self.data[\"schema-version\"] = \"1.0\"\n # We need to store components as a hash so that we can append artifacts by component name\n # When we convert to a BundleManifest this will get converted back into a list\n self.data[\"components\"] = []\n\n def append_component(self, name, repository_url, ref, commit_id, location):\n component = {\n \"name\": name,\n \"repository\": repository_url,\n \"ref\": ref,\n \"commit_id\": commit_id,\n \"location\": location,\n }\n self.data[\"components\"].append(component)\n\n def to_manifest(self):\n return BundleManifest(self.data)\n", "path": "bundle-workflow/src/assemble_workflow/bundle_recorder.py"}]} | 1,892 | 191 |
gh_patches_debug_43991 | rasdani/github-patches | git_diff | boto__boto-280 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
505 HTTP Version Not Supported
Occasionally getting a 505 with CloudFormation requests. Is this just an AWS issue?
Some query params omitted for privacy:
```
send: 'POST / HTTP/1.1\r\nHost: cloudformation.us-east-1.amazonaws.com\r\nAccept-Encoding: identity\r\nContent-Length: 12588\r\nContent-Type: application/x-www-form-urlencoded; charset=UTF-8\r\nUser-Agent: Boto/2.0 (linux2)\r\n\r\nAWSAccessKeyId=XXXXXXXXXXXXXXXXXXX&Action=CreateStack&ContentType=JSON&DisableRollback=false&Timestamp=2011-07-26T13%3A26%3A42Z&Version=2010-05-15&Signature=XXXXXXXXXXXXXXXXXXXXXXXXXXX'
reply: 'HTTP/1.1 505 HTTP Version Not Supported\r\n'
header: Date: Tue, 26 Jul 2011 13:26:42 GMT
header: Connection: close
```
</issue>
<code>
[start of boto/cloudformation/connection.py]
1 # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
2 #
3 # Permission is hereby granted, free of charge, to any person obtaining a
4 # copy of this software and associated documentation files (the
5 # "Software"), to deal in the Software without restriction, including
6 # without limitation the rights to use, copy, modify, merge, publish, dis-
7 # tribute, sublicense, and/or sell copies of the Software, and to permit
8 # persons to whom the Software is furnished to do so, subject to the fol-
9 # lowing conditions:
10 #
11 # The above copyright notice and this permission notice shall be included
12 # in all copies or substantial portions of the Software.
13 #
14 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
16 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
17 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
18 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 # IN THE SOFTWARE.
21
22 try:
23 import simplejson
24 except:
25 import json
26
27 import boto
28 from boto.cloudformation.stack import Stack, StackSummary
29 from boto.cloudformation.stack import StackResource, StackResourceSummary
30 from boto.cloudformation.template import Template
31 from boto.connection import AWSQueryConnection
32 from boto.regioninfo import RegionInfo
33
34 class CloudFormationConnection(AWSQueryConnection):
35
36 """
37 A Connection to the CloudFormation Service.
38 """
39 DefaultRegionName = 'us-east-1'
40 DefaultRegionEndpoint = 'cloudformation.us-east-1.amazonaws.com'
41 APIVersion = '2010-05-15'
42
43 valid_states = ("CREATE_IN_PROGRESS", "CREATE_FAILED", "CREATE_COMPLETE",
44 "ROLLBACK_IN_PROGRESS", "ROLLBACK_FAILED", "ROLLBACK_COMPLETE",
45 "DELETE_IN_PROGRESS", "DELETE_FAILED", "DELETE_COMPLETE")
46
47 def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
48 is_secure=True, port=None, proxy=None, proxy_port=None,
49 proxy_user=None, proxy_pass=None, debug=0,
50 https_connection_factory=None, region=None, path='/', converter=None):
51 if not region:
52 region = RegionInfo(self, self.DefaultRegionName,
53 self.DefaultRegionEndpoint, CloudFormationConnection)
54 self.region = region
55 AWSQueryConnection.__init__(self, aws_access_key_id, aws_secret_access_key,
56 is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,
57 self.region.endpoint, debug, https_connection_factory, path)
58
59 def _required_auth_capability(self):
60 return ['cloudformation']
61
62 def encode_bool(self, v):
63 v = bool(v)
64 return {True: "true", False: "false"}[v]
65
66 def create_stack(self, stack_name, template_body=None, template_url=None,
67 parameters=[], notification_arns=[], disable_rollback=False,
68 timeout_in_minutes=None):
69 params = {'ContentType': "JSON", 'StackName': stack_name,
70 'DisableRollback': self.encode_bool(disable_rollback)}
71 if template_body:
72 params['TemplateBody'] = template_body
73 if template_url:
74 params['TemplateURL'] = template_url
75 if template_body and template_url:
76 boto.log.warning("If both TemplateBody and TemplateURL are"
77 " specified, only TemplateBody will be honored by the API")
78 if len(parameters) > 0:
79 for i, (key, value) in enumerate(parameters):
80 params['Parameters.member.%d.ParameterKey' % (i+1)] = key
81 params['Parameters.member.%d.ParameterValue' % (i+1)] = value
82 if len(notification_arns) > 0:
83 self.build_list_params(params, notification_arns, "NotificationARNs.member")
84 if timeout_in_minutes:
85 params['TimeoutInMinutes'] = int(timeout_in_minutes)
86
87 response = self.make_request('CreateStack', params, '/', 'POST')
88 body = response.read()
89 if response.status == 200:
90 body = json.loads(body)
91 return body['CreateStackResponse']['CreateStackResult']['StackId']
92 else:
93 boto.log.error('%s %s' % (response.status, response.reason))
94 boto.log.error('%s' % body)
95 raise self.ResponseError(response.status, response.reason, body)
96
97 def delete_stack(self, stack_name_or_id):
98 params = {'ContentType': "JSON", 'StackName': stack_name_or_id}
99 response = self.make_request('DeleteStack', params, '/', 'GET')
100 body = response.read()
101 if response.status == 200:
102 return json.loads(body)
103 else:
104 boto.log.error('%s %s' % (response.status, response.reason))
105 boto.log.error('%s' % body)
106 raise self.ResponseError(response.status, response.reason, body)
107
108 def describe_stack_events(self, stack_name_or_id=None, next_token=None):
109 params = {'ContentType' : 'JSON'}
110 if stack_name_or_id:
111 params['StackName'] = stack_name_or_id
112 if next_token:
113 params['NextToken'] = next_token
114 response = self.make_request('DescribeStackEvents', params, '/', 'GET')
115 body = response.read()
116 if response.status == 200:
117 return json.loads(body)
118 else:
119 boto.log.error('%s %s' % (response.status, response.reason))
120 boto.log.error('%s' % body)
121 raise self.ResponseError(response.status, response.reason, body)
122
123 def describe_stack_resource(self, stack_name_or_id, logical_resource_id):
124 params = {'ContentType': "JSON", 'StackName': stack_name_or_id,
125 'LogicalResourceId': logical_resource_id}
126 response = self.make_request('DescribeStackResource', params, '/', 'GET')
127 body = response.read()
128 if response.status == 200:
129 return json.loads(body)
130 else:
131 boto.log.error('%s %s' % (response.status, response.reason))
132 boto.log.error('%s' % body)
133 raise self.ResponseError(response.status, response.reason, body)
134
135 def describe_stack_resources(self, stack_name_or_id=None,
136 logical_resource_id=None,
137 physical_resource_id=None):
138 params = {}
139 if stack_name_or_id:
140 params['StackName'] = stack_name_or_id
141 if logical_resource_id:
142 params['LogicalResourceId'] = logical_resource_id
143 if physical_resource_id:
144 params['PhysicalResourceId'] = physical_resource_id
145 return self.get_list('DescribeStackResources', params, [('member',
146 StackResource)])
147
148 def describe_stacks(self, stack_name_or_id=None):
149 params = {}
150 if stack_name_or_id:
151 params['StackName'] = stack_name_or_id
152 return self.get_list('DescribeStacks', params, [('member', Stack)])
153
154 def get_template(self, stack_name_or_id):
155 params = {'ContentType': "JSON", 'StackName': stack_name_or_id}
156 response = self.make_request('GetTemplate', params, '/', 'GET')
157 body = response.read()
158 if response.status == 200:
159 return json.loads(body)
160 else:
161 boto.log.error('%s %s' % (response.status, response.reason))
162 boto.log.error('%s' % body)
163 raise self.ResponseError(response.status, response.reason, body)
164
165 def list_stack_resources(self, stack_name_or_id, next_token=None):
166 params = {'StackName': stack_name_or_id}
167 if next_token:
168 params['NextToken'] = next_token
169 return self.get_list('ListStackResources', params, [('member',
170 StackResourceSummary)])
171
172 def list_stacks(self, stack_status_filters=[], next_token=None):
173 params = {}
174 if next_token:
175 params['NextToken'] = next_token
176 if len(stack_status_filters) > 0:
177 self.build_list_params(params, stack_status_filters,
178 "StackStatusFilter.member")
179
180 return self.get_list('ListStacks', params, [('member',
181 StackSummary)])
182
183 def validate_template(self, template_body=None, template_url=None):
184 params = {}
185 if template_body:
186 params['TemplateBody'] = template_body
187 if template_url:
188 params['TemplateUrl'] = template_url
189 if template_body and template_url:
190 boto.log.warning("If both TemplateBody and TemplateURL are"
191 " specified, only TemplateBody will be honored by the API")
192 return self.get_object('ValidateTemplate', params, Template)
193
[end of boto/cloudformation/connection.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/boto/cloudformation/connection.py b/boto/cloudformation/connection.py
--- a/boto/cloudformation/connection.py
+++ b/boto/cloudformation/connection.py
@@ -25,7 +25,7 @@
import json
import boto
-from boto.cloudformation.stack import Stack, StackSummary
+from boto.cloudformation.stack import Stack, StackSummary, StackEvent
from boto.cloudformation.stack import StackResource, StackResourceSummary
from boto.cloudformation.template import Template
from boto.connection import AWSQueryConnection
@@ -66,6 +66,41 @@
def create_stack(self, stack_name, template_body=None, template_url=None,
parameters=[], notification_arns=[], disable_rollback=False,
timeout_in_minutes=None):
+ """
+ Creates a CloudFormation Stack as specified by the template.
+
+ :type stack_name: string
+ :param stack_name: The name of the Stack, must be unique amoung running
+ Stacks
+
+ :type template_body: string
+ :param template_body: The template body (JSON string)
+
+ :type template_url: string
+ :param template_url: An S3 URL of a stored template JSON document. If
+ both the template_body and template_url are
+ specified, the template_body takes precedence
+
+ :type parameters: list of tuples
+ :param parameters: A list of (key, value) pairs for template input
+ parameters.
+
+ :type notification_arns: list of strings
+ :param notification_arns: A list of SNS topics to send Stack event
+ notifications to
+
+ :type disable_rollback: bool
+ :param disable_rollback: Indicates whether or not to rollback on
+ failure
+
+ :type timeout_in_minutes: int
+ :param timeout_in_minutes: Maximum amount of time to let the Stack
+ spend creating itself. If this timeout is exceeded,
+ the Stack will enter the CREATE_FAILED state
+
+ :rtype: string
+ :return: The unique Stack ID
+ """
params = {'ContentType': "JSON", 'StackName': stack_name,
'DisableRollback': self.encode_bool(disable_rollback)}
if template_body:
@@ -96,6 +131,7 @@
def delete_stack(self, stack_name_or_id):
params = {'ContentType': "JSON", 'StackName': stack_name_or_id}
+ # TODO: change this to get_status ?
response = self.make_request('DeleteStack', params, '/', 'GET')
body = response.read()
if response.status == 200:
@@ -106,19 +142,13 @@
raise self.ResponseError(response.status, response.reason, body)
def describe_stack_events(self, stack_name_or_id=None, next_token=None):
- params = {'ContentType' : 'JSON'}
+ params = {}
if stack_name_or_id:
params['StackName'] = stack_name_or_id
if next_token:
params['NextToken'] = next_token
- response = self.make_request('DescribeStackEvents', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ return self.get_list('DescribeStackEvents', params, [('member',
+ StackEvent)])
def describe_stack_resource(self, stack_name_or_id, logical_resource_id):
params = {'ContentType': "JSON", 'StackName': stack_name_or_id,
@@ -189,4 +219,5 @@
if template_body and template_url:
boto.log.warning("If both TemplateBody and TemplateURL are"
" specified, only TemplateBody will be honored by the API")
- return self.get_object('ValidateTemplate', params, Template)
+ return self.get_object('ValidateTemplate', params, Template,
+ verb="POST")
| {"golden_diff": "diff --git a/boto/cloudformation/connection.py b/boto/cloudformation/connection.py\n--- a/boto/cloudformation/connection.py\n+++ b/boto/cloudformation/connection.py\n@@ -25,7 +25,7 @@\n import json\n \n import boto\n-from boto.cloudformation.stack import Stack, StackSummary\n+from boto.cloudformation.stack import Stack, StackSummary, StackEvent\n from boto.cloudformation.stack import StackResource, StackResourceSummary\n from boto.cloudformation.template import Template\n from boto.connection import AWSQueryConnection\n@@ -66,6 +66,41 @@\n def create_stack(self, stack_name, template_body=None, template_url=None,\n parameters=[], notification_arns=[], disable_rollback=False,\n timeout_in_minutes=None):\n+ \"\"\"\n+ Creates a CloudFormation Stack as specified by the template.\n+\n+ :type stack_name: string\n+ :param stack_name: The name of the Stack, must be unique amoung running\n+ Stacks\n+\n+ :type template_body: string\n+ :param template_body: The template body (JSON string)\n+\n+ :type template_url: string\n+ :param template_url: An S3 URL of a stored template JSON document. If\n+ both the template_body and template_url are\n+ specified, the template_body takes precedence\n+\n+ :type parameters: list of tuples\n+ :param parameters: A list of (key, value) pairs for template input\n+ parameters.\n+\n+ :type notification_arns: list of strings\n+ :param notification_arns: A list of SNS topics to send Stack event\n+ notifications to\n+\n+ :type disable_rollback: bool\n+ :param disable_rollback: Indicates whether or not to rollback on\n+ failure\n+\n+ :type timeout_in_minutes: int\n+ :param timeout_in_minutes: Maximum amount of time to let the Stack\n+ spend creating itself. If this timeout is exceeded,\n+ the Stack will enter the CREATE_FAILED state\n+\n+ :rtype: string\n+ :return: The unique Stack ID\n+ \"\"\"\n params = {'ContentType': \"JSON\", 'StackName': stack_name,\n 'DisableRollback': self.encode_bool(disable_rollback)}\n if template_body:\n@@ -96,6 +131,7 @@\n \n def delete_stack(self, stack_name_or_id):\n params = {'ContentType': \"JSON\", 'StackName': stack_name_or_id}\n+ # TODO: change this to get_status ?\n response = self.make_request('DeleteStack', params, '/', 'GET')\n body = response.read()\n if response.status == 200:\n@@ -106,19 +142,13 @@\n raise self.ResponseError(response.status, response.reason, body)\n \n def describe_stack_events(self, stack_name_or_id=None, next_token=None):\n- params = {'ContentType' : 'JSON'}\n+ params = {}\n if stack_name_or_id:\n params['StackName'] = stack_name_or_id\n if next_token:\n params['NextToken'] = next_token\n- response = self.make_request('DescribeStackEvents', params, '/', 'GET')\n- body = response.read()\n- if response.status == 200:\n- return json.loads(body)\n- else:\n- boto.log.error('%s %s' % (response.status, response.reason))\n- boto.log.error('%s' % body)\n- raise self.ResponseError(response.status, response.reason, body)\n+ return self.get_list('DescribeStackEvents', params, [('member',\n+ StackEvent)])\n \n def describe_stack_resource(self, stack_name_or_id, logical_resource_id):\n params = {'ContentType': \"JSON\", 'StackName': stack_name_or_id,\n@@ -189,4 +219,5 @@\n if template_body and template_url:\n boto.log.warning(\"If both TemplateBody and TemplateURL are\"\n \" specified, only TemplateBody will be honored by the API\")\n- return self.get_object('ValidateTemplate', params, Template)\n+ return self.get_object('ValidateTemplate', params, Template,\n+ verb=\"POST\")\n", "issue": "505 HTTP Version Not Supported\nOccasionally getting a 505 with CloudFormation requests. Is this just an AWS issue?\n\nSome query params omitted for privacy:\n\n```\nsend: 'POST / HTTP/1.1\\r\\nHost: cloudformation.us-east-1.amazonaws.com\\r\\nAccept-Encoding: identity\\r\\nContent-Length: 12588\\r\\nContent-Type: application/x-www-form-urlencoded; charset=UTF-8\\r\\nUser-Agent: Boto/2.0 (linux2)\\r\\n\\r\\nAWSAccessKeyId=XXXXXXXXXXXXXXXXXXX&Action=CreateStack&ContentType=JSON&DisableRollback=false&Timestamp=2011-07-26T13%3A26%3A42Z&Version=2010-05-15&Signature=XXXXXXXXXXXXXXXXXXXXXXXXXXX'\nreply: 'HTTP/1.1 505 HTTP Version Not Supported\\r\\n'\nheader: Date: Tue, 26 Jul 2011 13:26:42 GMT\nheader: Connection: close\n```\n\n", "before_files": [{"content": "# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/\n#\n# Permission is hereby granted, free of charge, to any person obtaining a\n# copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish, dis-\n# tribute, sublicense, and/or sell copies of the Software, and to permit\n# persons to whom the Software is furnished to do so, subject to the fol-\n# lowing conditions:\n#\n# The above copyright notice and this permission notice shall be included\n# in all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-\n# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, \n# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n# IN THE SOFTWARE.\n\ntry:\n import simplejson\nexcept:\n import json\n\nimport boto\nfrom boto.cloudformation.stack import Stack, StackSummary\nfrom boto.cloudformation.stack import StackResource, StackResourceSummary\nfrom boto.cloudformation.template import Template\nfrom boto.connection import AWSQueryConnection\nfrom boto.regioninfo import RegionInfo\n\nclass CloudFormationConnection(AWSQueryConnection):\n\n \"\"\"\n A Connection to the CloudFormation Service.\n \"\"\"\n DefaultRegionName = 'us-east-1'\n DefaultRegionEndpoint = 'cloudformation.us-east-1.amazonaws.com'\n APIVersion = '2010-05-15'\n\n valid_states = (\"CREATE_IN_PROGRESS\", \"CREATE_FAILED\", \"CREATE_COMPLETE\",\n \"ROLLBACK_IN_PROGRESS\", \"ROLLBACK_FAILED\", \"ROLLBACK_COMPLETE\",\n \"DELETE_IN_PROGRESS\", \"DELETE_FAILED\", \"DELETE_COMPLETE\")\n\n def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,\n is_secure=True, port=None, proxy=None, proxy_port=None,\n proxy_user=None, proxy_pass=None, debug=0,\n https_connection_factory=None, region=None, path='/', converter=None):\n if not region:\n region = RegionInfo(self, self.DefaultRegionName,\n self.DefaultRegionEndpoint, CloudFormationConnection)\n self.region = region\n AWSQueryConnection.__init__(self, aws_access_key_id, aws_secret_access_key,\n is_secure, port, proxy, proxy_port, proxy_user, proxy_pass,\n self.region.endpoint, debug, https_connection_factory, path)\n\n def _required_auth_capability(self):\n return ['cloudformation']\n\n def encode_bool(self, v):\n v = bool(v)\n return {True: \"true\", False: \"false\"}[v]\n\n def create_stack(self, stack_name, template_body=None, template_url=None,\n parameters=[], notification_arns=[], disable_rollback=False,\n timeout_in_minutes=None):\n params = {'ContentType': \"JSON\", 'StackName': stack_name,\n 'DisableRollback': self.encode_bool(disable_rollback)}\n if template_body:\n params['TemplateBody'] = template_body\n if template_url:\n params['TemplateURL'] = template_url\n if template_body and template_url:\n boto.log.warning(\"If both TemplateBody and TemplateURL are\"\n \" specified, only TemplateBody will be honored by the API\")\n if len(parameters) > 0:\n for i, (key, value) in enumerate(parameters):\n params['Parameters.member.%d.ParameterKey' % (i+1)] = key\n params['Parameters.member.%d.ParameterValue' % (i+1)] = value\n if len(notification_arns) > 0:\n self.build_list_params(params, notification_arns, \"NotificationARNs.member\")\n if timeout_in_minutes:\n params['TimeoutInMinutes'] = int(timeout_in_minutes)\n\n response = self.make_request('CreateStack', params, '/', 'POST')\n body = response.read()\n if response.status == 200:\n body = json.loads(body)\n return body['CreateStackResponse']['CreateStackResult']['StackId']\n else:\n boto.log.error('%s %s' % (response.status, response.reason))\n boto.log.error('%s' % body)\n raise self.ResponseError(response.status, response.reason, body)\n\n def delete_stack(self, stack_name_or_id):\n params = {'ContentType': \"JSON\", 'StackName': stack_name_or_id}\n response = self.make_request('DeleteStack', params, '/', 'GET')\n body = response.read()\n if response.status == 200:\n return json.loads(body)\n else:\n boto.log.error('%s %s' % (response.status, response.reason))\n boto.log.error('%s' % body)\n raise self.ResponseError(response.status, response.reason, body)\n\n def describe_stack_events(self, stack_name_or_id=None, next_token=None):\n params = {'ContentType' : 'JSON'}\n if stack_name_or_id:\n params['StackName'] = stack_name_or_id\n if next_token:\n params['NextToken'] = next_token\n response = self.make_request('DescribeStackEvents', params, '/', 'GET')\n body = response.read()\n if response.status == 200:\n return json.loads(body)\n else:\n boto.log.error('%s %s' % (response.status, response.reason))\n boto.log.error('%s' % body)\n raise self.ResponseError(response.status, response.reason, body)\n\n def describe_stack_resource(self, stack_name_or_id, logical_resource_id):\n params = {'ContentType': \"JSON\", 'StackName': stack_name_or_id,\n 'LogicalResourceId': logical_resource_id}\n response = self.make_request('DescribeStackResource', params, '/', 'GET')\n body = response.read()\n if response.status == 200:\n return json.loads(body)\n else:\n boto.log.error('%s %s' % (response.status, response.reason))\n boto.log.error('%s' % body)\n raise self.ResponseError(response.status, response.reason, body)\n\n def describe_stack_resources(self, stack_name_or_id=None,\n logical_resource_id=None,\n physical_resource_id=None):\n params = {}\n if stack_name_or_id:\n params['StackName'] = stack_name_or_id\n if logical_resource_id:\n params['LogicalResourceId'] = logical_resource_id\n if physical_resource_id:\n params['PhysicalResourceId'] = physical_resource_id\n return self.get_list('DescribeStackResources', params, [('member',\n StackResource)])\n\n def describe_stacks(self, stack_name_or_id=None):\n params = {}\n if stack_name_or_id:\n params['StackName'] = stack_name_or_id\n return self.get_list('DescribeStacks', params, [('member', Stack)])\n\n def get_template(self, stack_name_or_id):\n params = {'ContentType': \"JSON\", 'StackName': stack_name_or_id}\n response = self.make_request('GetTemplate', params, '/', 'GET')\n body = response.read()\n if response.status == 200:\n return json.loads(body)\n else:\n boto.log.error('%s %s' % (response.status, response.reason))\n boto.log.error('%s' % body)\n raise self.ResponseError(response.status, response.reason, body)\n\n def list_stack_resources(self, stack_name_or_id, next_token=None):\n params = {'StackName': stack_name_or_id}\n if next_token:\n params['NextToken'] = next_token\n return self.get_list('ListStackResources', params, [('member',\n StackResourceSummary)])\n\n def list_stacks(self, stack_status_filters=[], next_token=None):\n params = {}\n if next_token:\n params['NextToken'] = next_token\n if len(stack_status_filters) > 0:\n self.build_list_params(params, stack_status_filters,\n \"StackStatusFilter.member\")\n\n return self.get_list('ListStacks', params, [('member',\n StackSummary)])\n\n def validate_template(self, template_body=None, template_url=None):\n params = {}\n if template_body:\n params['TemplateBody'] = template_body\n if template_url:\n params['TemplateUrl'] = template_url\n if template_body and template_url:\n boto.log.warning(\"If both TemplateBody and TemplateURL are\"\n \" specified, only TemplateBody will be honored by the API\")\n return self.get_object('ValidateTemplate', params, Template)\n", "path": "boto/cloudformation/connection.py"}]} | 3,097 | 910 |
gh_patches_debug_5840 | rasdani/github-patches | git_diff | OpenNMT__OpenNMT-tf-11 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
AttributeError: 'str' object has no attribute 'update'
```
Traceback (most recent call last):
File "/home/soul/anaconda2/lib/python2.7/runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "/home/soul/anaconda2/lib/python2.7/runpy.py", line 72, in _run_code
exec code in run_globals
File "/home/soul/projects/opennmt-tf/OpenNMT-tf/bin/main.py", line 275, in <module>
main()
File "/home/soul/projects/opennmt-tf/OpenNMT-tf/bin/main.py", line 225, in main
config = load_config(args.config)
File "opennmt/config.py", line 48, in load_config
config[section].update(subconfig[section])
AttributeError: 'str' object has no attribute 'update'
```
The attribute that caused it was "model_dir", where its value was a string.
The config file that I used:
```
# The directory where models and summaries will be saved. It is created if it does not exist.
model_dir: enfr
data:
train_features_file: data/enfr/src-train.txt
train_labels_file: data/enfr/tgt-train.txt
eval_features_file: data/enfr/src-val.txt
eval_labels_file: data/enfr/tgt-val.txt
# (optional) Models may require additional resource files (e.g. vocabularies).
source_words_vocabulary: data/enfr/src-vocab.txt
target_words_vocabulary: data/enfr/tgt-vocab.txt
# Model and optimization parameters.
params:
# The optimizer class name in tf.train or tf.contrib.opt.
optimizer: AdamOptimizer
learning_rate: 0.1
# (optional) Maximum gradients norm (default: None).
clip_gradients: 5.0
# (optional) The type of learning rate decay (default: None). See:
# * https://www.tensorflow.org/versions/master/api_guides/python/train#Decaying_the_learning_rate
# * opennmt/utils/decay.py
# This value may change the semantics of other decay options. See the documentation or the code.
decay_type: exponential_decay
# (optional unless decay_type is set) The learning rate decay rate.
decay_rate: 0.9
# (optional unless decay_type is set) Decay every this many steps.
decay_steps: 10000
# (optional) If true, the learning rate is decayed in a staircase fashion (default: True).
staircase: true
# (optional) After how many steps to start the decay (default: 0).
start_decay_steps: 50000
# (optional) Stop decay when this learning rate value is reached (default: 0).
minimum_learning_rate: 0.0001
# (optional) Width of the beam search (default: 1).
beam_width: 5
# (optional) Length penaly weight to apply on hypotheses (default: 0).
length_penalty: 0.2
# (optional) Maximum decoding iterations before stopping (default: 250).
maximum_iterations: 200
# Training options.
train:
batch_size: 64
# (optional) Save a checkpoint every this many steps.
save_checkpoints_steps: 5000
# (optional) How many checkpoints to keep on disk.
keep_checkpoint_max: 3
# (optional) Save summaries every this many steps.
save_summary_steps: 100
# (optional) Train for this many steps. If not set, train forever.
train_steps: 1000000
# (optional) Evaluate every this many seconds (default: 3600).
eval_delay: 7200
# (optional) Save evaluation predictions in model_dir/eval/.
save_eval_predictions: false
# (optional) The maximum length of feature sequences during training (default: None).
maximum_features_length: 70
# (optional) The maximum length of label sequences during training (default: None).
maximum_labels_length: 70
# (optional) The number of buckets by sequence length to improve training efficiency (default: 5).
num_buckets: 5
# (optional) The number of threads to use for processing data in parallel (default: number of logical cores).
num_parallel_process_calls: 4
# (optional) The data pre-fetch buffer size, e.g. for shuffling examples (default: batch_size * 1000).
buffer_size: 10000
# (optional) Inference options.
infer:
# (optional) The batch size to use (default: 1).
batch_size: 10
# (optional) The number of threads to use for processing data in parallel (default: number of logical cores).
num_parallel_process_calls: 8
# (optional) The data pre-fetch buffer size when processing data in parallel (default: batch_size * 10).
buffer_size: 100
# (optional) For compatible models, the number of hypotheses to output (default: 1).
n_best: 1
```
</issue>
<code>
[start of opennmt/config.py]
1 """Defines functions related to configuration files."""
2
3 from importlib import import_module
4
5 import yaml
6
7
8 def load_model_module(path):
9 """Loads a model configuration file.
10
11 Args:
12 path: The relative path to the configuration file.
13
14 Returns:
15 A Python module.
16 """
17 module, _ = path.rsplit(".", 1)
18 module = module.replace("/", ".")
19 module = import_module(module)
20
21 if not hasattr(module, "model"):
22 raise ImportError("No model defined in {}".format(path))
23
24 return module
25
26 def load_config(config_paths, config=None):
27 """Loads configuration files.
28
29 Args:
30 config_paths: A list of configuration files.
31 config: A (possibly non empty) config dictionary to fill.
32
33 Returns:
34 The configuration dictionary.
35 """
36 if config is None:
37 config = {}
38
39 for config_path in config_paths:
40 with open(config_path) as config_file:
41 subconfig = yaml.load(config_file.read())
42
43 # Add or update section in main configuration.
44 for section in subconfig:
45 if section in config:
46 config[section].update(subconfig[section])
47 else:
48 config[section] = subconfig[section]
49
50 return config
51
[end of opennmt/config.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/opennmt/config.py b/opennmt/config.py
--- a/opennmt/config.py
+++ b/opennmt/config.py
@@ -43,7 +43,10 @@
# Add or update section in main configuration.
for section in subconfig:
if section in config:
- config[section].update(subconfig[section])
+ if isinstance(config[section], dict):
+ config[section].update(subconfig[section])
+ else:
+ config[section] = subconfig[section]
else:
config[section] = subconfig[section]
| {"golden_diff": "diff --git a/opennmt/config.py b/opennmt/config.py\n--- a/opennmt/config.py\n+++ b/opennmt/config.py\n@@ -43,7 +43,10 @@\n # Add or update section in main configuration.\n for section in subconfig:\n if section in config:\n- config[section].update(subconfig[section])\n+ if isinstance(config[section], dict):\n+ config[section].update(subconfig[section])\n+ else:\n+ config[section] = subconfig[section]\n else:\n config[section] = subconfig[section]\n", "issue": "AttributeError: 'str' object has no attribute 'update'\n```\r\nTraceback (most recent call last):\r\n File \"/home/soul/anaconda2/lib/python2.7/runpy.py\", line 174, in _run_module_as_main\r\n \"__main__\", fname, loader, pkg_name)\r\n File \"/home/soul/anaconda2/lib/python2.7/runpy.py\", line 72, in _run_code\r\n exec code in run_globals\r\n File \"/home/soul/projects/opennmt-tf/OpenNMT-tf/bin/main.py\", line 275, in <module>\r\n main()\r\n File \"/home/soul/projects/opennmt-tf/OpenNMT-tf/bin/main.py\", line 225, in main\r\n config = load_config(args.config)\r\n File \"opennmt/config.py\", line 48, in load_config\r\n config[section].update(subconfig[section])\r\nAttributeError: 'str' object has no attribute 'update'\r\n```\r\nThe attribute that caused it was \"model_dir\", where its value was a string.\r\n\r\nThe config file that I used:\r\n```\r\n# The directory where models and summaries will be saved. It is created if it does not exist.\r\nmodel_dir: enfr\r\n\r\ndata:\r\n train_features_file: data/enfr/src-train.txt\r\n train_labels_file: data/enfr/tgt-train.txt\r\n eval_features_file: data/enfr/src-val.txt\r\n eval_labels_file: data/enfr/tgt-val.txt\r\n\r\n # (optional) Models may require additional resource files (e.g. vocabularies).\r\n source_words_vocabulary: data/enfr/src-vocab.txt\r\n target_words_vocabulary: data/enfr/tgt-vocab.txt\r\n\r\n# Model and optimization parameters.\r\nparams:\r\n # The optimizer class name in tf.train or tf.contrib.opt.\r\n optimizer: AdamOptimizer\r\n learning_rate: 0.1\r\n\r\n # (optional) Maximum gradients norm (default: None).\r\n clip_gradients: 5.0\r\n # (optional) The type of learning rate decay (default: None). See:\r\n # * https://www.tensorflow.org/versions/master/api_guides/python/train#Decaying_the_learning_rate\r\n # * opennmt/utils/decay.py\r\n # This value may change the semantics of other decay options. See the documentation or the code.\r\n decay_type: exponential_decay\r\n # (optional unless decay_type is set) The learning rate decay rate.\r\n decay_rate: 0.9\r\n # (optional unless decay_type is set) Decay every this many steps.\r\n decay_steps: 10000\r\n # (optional) If true, the learning rate is decayed in a staircase fashion (default: True).\r\n staircase: true\r\n # (optional) After how many steps to start the decay (default: 0).\r\n start_decay_steps: 50000\r\n # (optional) Stop decay when this learning rate value is reached (default: 0).\r\n minimum_learning_rate: 0.0001\r\n # (optional) Width of the beam search (default: 1).\r\n beam_width: 5\r\n # (optional) Length penaly weight to apply on hypotheses (default: 0).\r\n length_penalty: 0.2\r\n # (optional) Maximum decoding iterations before stopping (default: 250).\r\n maximum_iterations: 200\r\n\r\n# Training options.\r\ntrain:\r\n batch_size: 64\r\n\r\n # (optional) Save a checkpoint every this many steps.\r\n save_checkpoints_steps: 5000\r\n # (optional) How many checkpoints to keep on disk.\r\n keep_checkpoint_max: 3\r\n # (optional) Save summaries every this many steps.\r\n save_summary_steps: 100\r\n # (optional) Train for this many steps. If not set, train forever.\r\n train_steps: 1000000\r\n # (optional) Evaluate every this many seconds (default: 3600).\r\n eval_delay: 7200\r\n # (optional) Save evaluation predictions in model_dir/eval/.\r\n save_eval_predictions: false\r\n # (optional) The maximum length of feature sequences during training (default: None).\r\n maximum_features_length: 70\r\n # (optional) The maximum length of label sequences during training (default: None).\r\n maximum_labels_length: 70\r\n # (optional) The number of buckets by sequence length to improve training efficiency (default: 5).\r\n num_buckets: 5\r\n # (optional) The number of threads to use for processing data in parallel (default: number of logical cores).\r\n num_parallel_process_calls: 4\r\n # (optional) The data pre-fetch buffer size, e.g. for shuffling examples (default: batch_size * 1000).\r\n buffer_size: 10000\r\n\r\n# (optional) Inference options.\r\ninfer:\r\n # (optional) The batch size to use (default: 1).\r\n batch_size: 10\r\n # (optional) The number of threads to use for processing data in parallel (default: number of logical cores).\r\n num_parallel_process_calls: 8\r\n # (optional) The data pre-fetch buffer size when processing data in parallel (default: batch_size * 10).\r\n buffer_size: 100\r\n # (optional) For compatible models, the number of hypotheses to output (default: 1).\r\n n_best: 1\r\n```\n", "before_files": [{"content": "\"\"\"Defines functions related to configuration files.\"\"\"\n\nfrom importlib import import_module\n\nimport yaml\n\n\ndef load_model_module(path):\n \"\"\"Loads a model configuration file.\n\n Args:\n path: The relative path to the configuration file.\n\n Returns:\n A Python module.\n \"\"\"\n module, _ = path.rsplit(\".\", 1)\n module = module.replace(\"/\", \".\")\n module = import_module(module)\n\n if not hasattr(module, \"model\"):\n raise ImportError(\"No model defined in {}\".format(path))\n\n return module\n\ndef load_config(config_paths, config=None):\n \"\"\"Loads configuration files.\n\n Args:\n config_paths: A list of configuration files.\n config: A (possibly non empty) config dictionary to fill.\n\n Returns:\n The configuration dictionary.\n \"\"\"\n if config is None:\n config = {}\n\n for config_path in config_paths:\n with open(config_path) as config_file:\n subconfig = yaml.load(config_file.read())\n\n # Add or update section in main configuration.\n for section in subconfig:\n if section in config:\n config[section].update(subconfig[section])\n else:\n config[section] = subconfig[section]\n\n return config\n", "path": "opennmt/config.py"}]} | 2,076 | 123 |
gh_patches_debug_18459 | rasdani/github-patches | git_diff | Qiskit__qiskit-738 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
String formating bug in Coupling
The descriptions of qubits are tuple (e.g. `('q',0)`) that are not automatically converted to strings when `"%s not in coupling graph" % q1`, a `TypeError: not all arguments converted during string formatting` raises.
### Informations
- **Qiskit Terra version**: 0.6.0
- **Python version**: 3.6.1
- **Operating system**: MacOS
### What is the current behavior?
```
In [1]: from qiskit.mapper._coupling import Coupling
In [2]: graph = Coupling({0: [1, 2], 1: [2]})
In [3]: graph.distance(('q0',0),('q1',1))
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-3-88a8c8c0c516> in <module>()
----> 1 graph.distance(('q0',0),('q1',1))
/Users/luciano.bello/repos/qiskit-terra/qiskit/mapper/_coupling.py in distance(self, q1, q2)
168 raise CouplingError("distance has not been computed")
169 if q1 not in self.qubits:
--> 170 raise CouplingError("%s not in coupling graph" % q1)
171 if q2 not in self.qubits:
172 raise CouplingError("%s not in coupling graph" % q2)
TypeError: not all arguments converted during string formatting
```
### What is the expected behavior?
```
In [1]: from qiskit.mapper._coupling import Coupling
In [2]: graph = Coupling({0: [1, 2], 1: [2]})
In [3]: graph.distance(('q0',0),('q1',1))
---------------------------------------------------------------------------
CouplingError Traceback (most recent call last)
<ipython-input-3-88a8c8c0c516> in <module>()
----> 1 graph.distance(('q0',0),('q1',1))
/Users/luciano.bello/repos/qiskit-terra/qiskit/mapper/_coupling.py in distance(self, q1, q2)
169 raise CouplingError("distance has not been computed")
170 if q1 not in self.qubits:
--> 171 raise CouplingError("%s not in coupling graph" % (q1,))
172 if q2 not in self.qubits:
173 raise CouplingError("%s not in coupling graph" % (q2,))
CouplingError: "('q0', 0) not in coupling graph"
```
### Suggested solutions
#738
</issue>
<code>
[start of qiskit/mapper/_coupling.py]
1 # -*- coding: utf-8 -*-
2
3 # Copyright 2017, IBM.
4 #
5 # This source code is licensed under the Apache License, Version 2.0 found in
6 # the LICENSE.txt file in the root directory of this source tree.
7
8 """
9 Directed graph object for representing coupling between qubits.
10
11 The nodes of the graph correspond to named qubits and the directed edges
12 indicate which qubits are coupled and the permitted direction of CNOT gates.
13 The object has a distance function that can be used to map quantum circuits
14 onto a device with this coupling.
15 """
16 from collections import OrderedDict
17 import networkx as nx
18 from ._couplingerror import CouplingError
19
20
21 def coupling_dict2list(couplingdict):
22 """Convert coupling map dictionary into list.
23
24 Example dictionary format: {0: [1, 2], 1: [2]}
25 Example list format: [[0, 1], [0, 2], [1, 2]]
26
27 We do not do any checking of the input.
28
29 Return coupling map in list format.
30 """
31 if not couplingdict:
32 return None
33 couplinglist = []
34 for ctl, tgtlist in couplingdict.items():
35 for tgt in tgtlist:
36 couplinglist.append([ctl, tgt])
37 return couplinglist
38
39
40 def coupling_list2dict(couplinglist):
41 """Convert coupling map list into dictionary.
42
43 Example list format: [[0, 1], [0, 2], [1, 2]]
44 Example dictionary format: {0: [1, 2], 1: [2]}
45
46 We do not do any checking of the input.
47
48 Return coupling map in dict format.
49 """
50 if not couplinglist:
51 return None
52 couplingdict = {}
53 for pair in couplinglist:
54 if pair[0] in couplingdict:
55 couplingdict[pair[0]].append(pair[1])
56 else:
57 couplingdict[pair[0]] = [pair[1]]
58 return couplingdict
59
60
61 class Coupling:
62 """
63 Directed graph specifying fixed coupling.
64
65 Nodes correspond to qubits and directed edges correspond to permitted
66 CNOT gates
67 """
68 # pylint: disable=invalid-name
69
70 def __init__(self, couplingdict=None):
71 """
72 Create coupling graph.
73
74 By default, the coupling graph has no nodes. The optional couplingdict
75 specifies the graph as an adjacency list. For example,
76 couplingdict = {0: [1, 2], 1: [2]}.
77 """
78 # self.qubits is dict from qubit (regname,idx) tuples to node indices
79 self.qubits = OrderedDict()
80 # self.index_to_qubit is a dict from node indices to qubits
81 self.index_to_qubit = {}
82 # self.node_counter is integer counter for labeling nodes
83 self.node_counter = 0
84 # self.G is the coupling digraph
85 self.G = nx.DiGraph()
86 # self.dist is a dict of dicts from node pairs to distances
87 # it must be computed, it is the distance on the digraph
88 self.dist = None
89 # Add edges to the graph if the couplingdict is present
90 if couplingdict is not None:
91 for v0, alist in couplingdict.items():
92 for v1 in alist:
93 regname = "q"
94 self.add_edge((regname, v0), (regname, v1))
95 self.compute_distance()
96
97 def size(self):
98 """Return the number of qubits in this graph."""
99 return len(self.qubits)
100
101 def get_qubits(self):
102 """Return the qubits in this graph as a sorted (qreg, index) tuples."""
103 return sorted(list(self.qubits.keys()))
104
105 def get_edges(self):
106 """Return a list of edges in the coupling graph.
107
108 Each edge is a pair of qubits and each qubit is a tuple (qreg, index).
109 """
110 return list(map(lambda x: (self.index_to_qubit[x[0]],
111 self.index_to_qubit[x[1]]), self.G.edges()))
112
113 def add_qubit(self, name):
114 """
115 Add a qubit to the coupling graph.
116
117 name = tuple (regname, idx) for qubit
118 """
119 if name in self.qubits:
120 raise CouplingError("%s already in coupling graph" % name)
121 if not isinstance(name, tuple):
122 raise CouplingError("name %s is not a tuple")
123 if not (isinstance(name[0], str) and isinstance(name[1], int)):
124 raise CouplingError("name %s is not of the right form, it must"
125 " be: (regname, idx)")
126
127 self.node_counter += 1
128 self.G.add_node(self.node_counter)
129 self.G.node[self.node_counter]["name"] = name
130 self.qubits[name] = self.node_counter
131 self.index_to_qubit[self.node_counter] = name
132
133 def add_edge(self, s_name, d_name):
134 """
135 Add directed edge to coupling graph.
136
137 s_name = source qubit tuple
138 d_name = destination qubit tuple
139 """
140 if s_name not in self.qubits:
141 self.add_qubit(s_name)
142 if d_name not in self.qubits:
143 self.add_qubit(d_name)
144 self.G.add_edge(self.qubits[s_name], self.qubits[d_name])
145
146 def connected(self):
147 """
148 Test if the graph is connected.
149
150 Return True if connected, False otherwise
151 """
152 try:
153 return nx.is_weakly_connected(self.G)
154 except nx.exception.NetworkXException:
155 return False
156
157 def compute_distance(self):
158 """
159 Compute the undirected distance function on pairs of nodes.
160
161 The distance map self.dist is computed from the graph using
162 all_pairs_shortest_path_length.
163 """
164 if not self.connected():
165 raise CouplingError("coupling graph not connected")
166 lengths = dict(nx.all_pairs_shortest_path_length(self.G.to_undirected()))
167 self.dist = {}
168 for i in self.qubits.keys():
169 self.dist[i] = {}
170 for j in self.qubits.keys():
171 self.dist[i][j] = lengths[self.qubits[i]][self.qubits[j]]
172
173 def distance(self, q1, q2):
174 """Return the undirected distance between qubit q1 to qubit q2."""
175 if self.dist is None:
176 raise CouplingError("distance has not been computed")
177 if q1 not in self.qubits:
178 raise CouplingError("%s not in coupling graph" % q1)
179 if q2 not in self.qubits:
180 raise CouplingError("%s not in coupling graph" % q2)
181 return self.dist[q1][q2]
182
183 def __str__(self):
184 """Return a string representation of the coupling graph."""
185 s = ""
186 if self.qubits:
187 s += "qubits: "
188 s += ", ".join(["%s[%d] @ %d" % (k[0], k[1], v)
189 for k, v in self.qubits.items()])
190 if self.get_edges():
191 s += "\nedges: "
192 s += ", ".join(
193 ["%s[%d]-%s[%d]" % (e[0][0], e[0][1], e[1][0], e[1][1])
194 for e in self.get_edges()])
195 return s
196
[end of qiskit/mapper/_coupling.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/qiskit/mapper/_coupling.py b/qiskit/mapper/_coupling.py
--- a/qiskit/mapper/_coupling.py
+++ b/qiskit/mapper/_coupling.py
@@ -65,6 +65,7 @@
Nodes correspond to qubits and directed edges correspond to permitted
CNOT gates
"""
+
# pylint: disable=invalid-name
def __init__(self, couplingdict=None):
@@ -175,9 +176,9 @@
if self.dist is None:
raise CouplingError("distance has not been computed")
if q1 not in self.qubits:
- raise CouplingError("%s not in coupling graph" % q1)
+ raise CouplingError("%s not in coupling graph" % (q1,))
if q2 not in self.qubits:
- raise CouplingError("%s not in coupling graph" % q2)
+ raise CouplingError("%s not in coupling graph" % (q2,))
return self.dist[q1][q2]
def __str__(self):
| {"golden_diff": "diff --git a/qiskit/mapper/_coupling.py b/qiskit/mapper/_coupling.py\n--- a/qiskit/mapper/_coupling.py\n+++ b/qiskit/mapper/_coupling.py\n@@ -65,6 +65,7 @@\n Nodes correspond to qubits and directed edges correspond to permitted\n CNOT gates\n \"\"\"\n+\n # pylint: disable=invalid-name\n \n def __init__(self, couplingdict=None):\n@@ -175,9 +176,9 @@\n if self.dist is None:\n raise CouplingError(\"distance has not been computed\")\n if q1 not in self.qubits:\n- raise CouplingError(\"%s not in coupling graph\" % q1)\n+ raise CouplingError(\"%s not in coupling graph\" % (q1,))\n if q2 not in self.qubits:\n- raise CouplingError(\"%s not in coupling graph\" % q2)\n+ raise CouplingError(\"%s not in coupling graph\" % (q2,))\n return self.dist[q1][q2]\n \n def __str__(self):\n", "issue": "String formating bug in Coupling\nThe descriptions of qubits are tuple (e.g. `('q',0)`) that are not automatically converted to strings when `\"%s not in coupling graph\" % q1`, a `TypeError: not all arguments converted during string formatting` raises.\r\n\r\n### Informations\r\n\r\n- **Qiskit Terra version**: 0.6.0\r\n- **Python version**: 3.6.1\r\n- **Operating system**: MacOS\r\n\r\n### What is the current behavior?\r\n```\r\nIn [1]: from qiskit.mapper._coupling import Coupling\r\nIn [2]: graph = Coupling({0: [1, 2], 1: [2]})\r\nIn [3]: graph.distance(('q0',0),('q1',1))\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n<ipython-input-3-88a8c8c0c516> in <module>()\r\n----> 1 graph.distance(('q0',0),('q1',1))\r\n\r\n/Users/luciano.bello/repos/qiskit-terra/qiskit/mapper/_coupling.py in distance(self, q1, q2)\r\n 168 raise CouplingError(\"distance has not been computed\")\r\n 169 if q1 not in self.qubits:\r\n--> 170 raise CouplingError(\"%s not in coupling graph\" % q1)\r\n 171 if q2 not in self.qubits:\r\n 172 raise CouplingError(\"%s not in coupling graph\" % q2)\r\n\r\nTypeError: not all arguments converted during string formatting\r\n```\r\n\r\n### What is the expected behavior?\r\n```\r\nIn [1]: from qiskit.mapper._coupling import Coupling\r\nIn [2]: graph = Coupling({0: [1, 2], 1: [2]})\r\nIn [3]: graph.distance(('q0',0),('q1',1))\r\n---------------------------------------------------------------------------\r\nCouplingError Traceback (most recent call last)\r\n<ipython-input-3-88a8c8c0c516> in <module>()\r\n----> 1 graph.distance(('q0',0),('q1',1))\r\n\r\n/Users/luciano.bello/repos/qiskit-terra/qiskit/mapper/_coupling.py in distance(self, q1, q2)\r\n 169 raise CouplingError(\"distance has not been computed\")\r\n 170 if q1 not in self.qubits:\r\n--> 171 raise CouplingError(\"%s not in coupling graph\" % (q1,))\r\n 172 if q2 not in self.qubits:\r\n 173 raise CouplingError(\"%s not in coupling graph\" % (q2,))\r\n\r\nCouplingError: \"('q0', 0) not in coupling graph\"\r\n```\r\n\r\n### Suggested solutions\r\n\r\n#738 \r\n\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright 2017, IBM.\n#\n# This source code is licensed under the Apache License, Version 2.0 found in\n# the LICENSE.txt file in the root directory of this source tree.\n\n\"\"\"\nDirected graph object for representing coupling between qubits.\n\nThe nodes of the graph correspond to named qubits and the directed edges\nindicate which qubits are coupled and the permitted direction of CNOT gates.\nThe object has a distance function that can be used to map quantum circuits\nonto a device with this coupling.\n\"\"\"\nfrom collections import OrderedDict\nimport networkx as nx\nfrom ._couplingerror import CouplingError\n\n\ndef coupling_dict2list(couplingdict):\n \"\"\"Convert coupling map dictionary into list.\n\n Example dictionary format: {0: [1, 2], 1: [2]}\n Example list format: [[0, 1], [0, 2], [1, 2]]\n\n We do not do any checking of the input.\n\n Return coupling map in list format.\n \"\"\"\n if not couplingdict:\n return None\n couplinglist = []\n for ctl, tgtlist in couplingdict.items():\n for tgt in tgtlist:\n couplinglist.append([ctl, tgt])\n return couplinglist\n\n\ndef coupling_list2dict(couplinglist):\n \"\"\"Convert coupling map list into dictionary.\n\n Example list format: [[0, 1], [0, 2], [1, 2]]\n Example dictionary format: {0: [1, 2], 1: [2]}\n\n We do not do any checking of the input.\n\n Return coupling map in dict format.\n \"\"\"\n if not couplinglist:\n return None\n couplingdict = {}\n for pair in couplinglist:\n if pair[0] in couplingdict:\n couplingdict[pair[0]].append(pair[1])\n else:\n couplingdict[pair[0]] = [pair[1]]\n return couplingdict\n\n\nclass Coupling:\n \"\"\"\n Directed graph specifying fixed coupling.\n\n Nodes correspond to qubits and directed edges correspond to permitted\n CNOT gates\n \"\"\"\n # pylint: disable=invalid-name\n\n def __init__(self, couplingdict=None):\n \"\"\"\n Create coupling graph.\n\n By default, the coupling graph has no nodes. The optional couplingdict\n specifies the graph as an adjacency list. For example,\n couplingdict = {0: [1, 2], 1: [2]}.\n \"\"\"\n # self.qubits is dict from qubit (regname,idx) tuples to node indices\n self.qubits = OrderedDict()\n # self.index_to_qubit is a dict from node indices to qubits\n self.index_to_qubit = {}\n # self.node_counter is integer counter for labeling nodes\n self.node_counter = 0\n # self.G is the coupling digraph\n self.G = nx.DiGraph()\n # self.dist is a dict of dicts from node pairs to distances\n # it must be computed, it is the distance on the digraph\n self.dist = None\n # Add edges to the graph if the couplingdict is present\n if couplingdict is not None:\n for v0, alist in couplingdict.items():\n for v1 in alist:\n regname = \"q\"\n self.add_edge((regname, v0), (regname, v1))\n self.compute_distance()\n\n def size(self):\n \"\"\"Return the number of qubits in this graph.\"\"\"\n return len(self.qubits)\n\n def get_qubits(self):\n \"\"\"Return the qubits in this graph as a sorted (qreg, index) tuples.\"\"\"\n return sorted(list(self.qubits.keys()))\n\n def get_edges(self):\n \"\"\"Return a list of edges in the coupling graph.\n\n Each edge is a pair of qubits and each qubit is a tuple (qreg, index).\n \"\"\"\n return list(map(lambda x: (self.index_to_qubit[x[0]],\n self.index_to_qubit[x[1]]), self.G.edges()))\n\n def add_qubit(self, name):\n \"\"\"\n Add a qubit to the coupling graph.\n\n name = tuple (regname, idx) for qubit\n \"\"\"\n if name in self.qubits:\n raise CouplingError(\"%s already in coupling graph\" % name)\n if not isinstance(name, tuple):\n raise CouplingError(\"name %s is not a tuple\")\n if not (isinstance(name[0], str) and isinstance(name[1], int)):\n raise CouplingError(\"name %s is not of the right form, it must\"\n \" be: (regname, idx)\")\n\n self.node_counter += 1\n self.G.add_node(self.node_counter)\n self.G.node[self.node_counter][\"name\"] = name\n self.qubits[name] = self.node_counter\n self.index_to_qubit[self.node_counter] = name\n\n def add_edge(self, s_name, d_name):\n \"\"\"\n Add directed edge to coupling graph.\n\n s_name = source qubit tuple\n d_name = destination qubit tuple\n \"\"\"\n if s_name not in self.qubits:\n self.add_qubit(s_name)\n if d_name not in self.qubits:\n self.add_qubit(d_name)\n self.G.add_edge(self.qubits[s_name], self.qubits[d_name])\n\n def connected(self):\n \"\"\"\n Test if the graph is connected.\n\n Return True if connected, False otherwise\n \"\"\"\n try:\n return nx.is_weakly_connected(self.G)\n except nx.exception.NetworkXException:\n return False\n\n def compute_distance(self):\n \"\"\"\n Compute the undirected distance function on pairs of nodes.\n\n The distance map self.dist is computed from the graph using\n all_pairs_shortest_path_length.\n \"\"\"\n if not self.connected():\n raise CouplingError(\"coupling graph not connected\")\n lengths = dict(nx.all_pairs_shortest_path_length(self.G.to_undirected()))\n self.dist = {}\n for i in self.qubits.keys():\n self.dist[i] = {}\n for j in self.qubits.keys():\n self.dist[i][j] = lengths[self.qubits[i]][self.qubits[j]]\n\n def distance(self, q1, q2):\n \"\"\"Return the undirected distance between qubit q1 to qubit q2.\"\"\"\n if self.dist is None:\n raise CouplingError(\"distance has not been computed\")\n if q1 not in self.qubits:\n raise CouplingError(\"%s not in coupling graph\" % q1)\n if q2 not in self.qubits:\n raise CouplingError(\"%s not in coupling graph\" % q2)\n return self.dist[q1][q2]\n\n def __str__(self):\n \"\"\"Return a string representation of the coupling graph.\"\"\"\n s = \"\"\n if self.qubits:\n s += \"qubits: \"\n s += \", \".join([\"%s[%d] @ %d\" % (k[0], k[1], v)\n for k, v in self.qubits.items()])\n if self.get_edges():\n s += \"\\nedges: \"\n s += \", \".join(\n [\"%s[%d]-%s[%d]\" % (e[0][0], e[0][1], e[1][0], e[1][1])\n for e in self.get_edges()])\n return s\n", "path": "qiskit/mapper/_coupling.py"}]} | 3,259 | 242 |
gh_patches_debug_32697 | rasdani/github-patches | git_diff | dask__distributed-6898 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Support for multiple protocols or heterogenous protocols
I have a use case where the `client<=>scheduler` communication needs to be proxied, so we use the [websocket protocol](
https://github.com/dask/distributed/blob/main/distributed/comm/ws.py).
However, doing so means that `scheduler<=>worker` and `worker<=>worker` communication must also use websockets as we assume consistency.
This results in performance issues on systems with high-performance networking like Infiniband or NVLink. We usually use [UCX](https://github.com/dask/distributed/blob/main/distributed/comm/ucx.py) for `worker<=>worker` communication to get the best out of the hardware.
I wanted to start a discussion around a solution for this. The two things that spring to mind are:
- Add support for multiple protocols simultaneously.
- Allow for one protocol for `client<=>scheduler` and a different one for `worker<=>worker`.
</issue>
<code>
[start of distributed/cli/dask_scheduler.py]
1 from __future__ import annotations
2
3 import asyncio
4 import atexit
5 import gc
6 import logging
7 import os
8 import re
9 import sys
10 import warnings
11
12 import click
13
14 from distributed import Scheduler
15 from distributed._signals import wait_for_signals
16 from distributed.preloading import validate_preload_argv
17 from distributed.proctitle import (
18 enable_proctitle_on_children,
19 enable_proctitle_on_current,
20 )
21
22 logger = logging.getLogger("distributed.scheduler")
23
24
25 pem_file_option_type = click.Path(exists=True, resolve_path=True)
26
27
28 @click.command(context_settings=dict(ignore_unknown_options=True))
29 @click.option("--host", type=str, default="", help="URI, IP or hostname of this server")
30 @click.option("--port", type=int, default=None, help="Serving port")
31 @click.option(
32 "--interface",
33 type=str,
34 default=None,
35 help="Preferred network interface like 'eth0' or 'ib0'",
36 )
37 @click.option(
38 "--protocol", type=str, default=None, help="Protocol like tcp, tls, or ucx"
39 )
40 @click.option(
41 "--tls-ca-file",
42 type=pem_file_option_type,
43 default=None,
44 help="CA cert(s) file for TLS (in PEM format)",
45 )
46 @click.option(
47 "--tls-cert",
48 type=pem_file_option_type,
49 default=None,
50 help="certificate file for TLS (in PEM format)",
51 )
52 @click.option(
53 "--tls-key",
54 type=pem_file_option_type,
55 default=None,
56 help="private key file for TLS (in PEM format)",
57 )
58 # XXX default port (or URI) values should be centralized somewhere
59 @click.option(
60 "--bokeh-port", type=int, default=None, help="Deprecated. See --dashboard-address"
61 )
62 @click.option(
63 "--dashboard-address",
64 type=str,
65 default=":8787",
66 show_default=True,
67 help="Address on which to listen for diagnostics dashboard",
68 )
69 @click.option(
70 "--dashboard/--no-dashboard",
71 "dashboard",
72 default=True,
73 required=False,
74 help="Launch the Dashboard [default: --dashboard]",
75 )
76 @click.option(
77 "--bokeh/--no-bokeh",
78 "bokeh",
79 default=None,
80 required=False,
81 help="Deprecated. See --dashboard/--no-dashboard.",
82 )
83 @click.option(
84 "--jupyter/--no-jupyter",
85 "jupyter",
86 default=False,
87 required=False,
88 help="Start a Jupyter Server in the same process. Warning: This will make"
89 "it possible for anyone with access to your dashboard address to run"
90 "Python code",
91 )
92 @click.option("--show/--no-show", default=False, help="Show web UI [default: --show]")
93 @click.option(
94 "--dashboard-prefix", type=str, default="", help="Prefix for the dashboard app"
95 )
96 @click.option(
97 "--use-xheaders",
98 type=bool,
99 default=False,
100 show_default=True,
101 help="User xheaders in dashboard app for ssl termination in header",
102 )
103 @click.option("--pid-file", type=str, default="", help="File to write the process PID")
104 @click.option(
105 "--scheduler-file",
106 type=str,
107 default="",
108 help="File to write connection information. "
109 "This may be a good way to share connection information if your "
110 "cluster is on a shared network file system.",
111 )
112 @click.option(
113 "--preload",
114 type=str,
115 multiple=True,
116 is_eager=True,
117 help="Module that should be loaded by the scheduler process "
118 'like "foo.bar" or "/path/to/foo.py".',
119 )
120 @click.argument(
121 "preload_argv", nargs=-1, type=click.UNPROCESSED, callback=validate_preload_argv
122 )
123 @click.option(
124 "--idle-timeout",
125 default=None,
126 type=str,
127 help="Time of inactivity after which to kill the scheduler",
128 )
129 @click.version_option()
130 def main(
131 host,
132 port,
133 bokeh_port,
134 show,
135 dashboard,
136 bokeh,
137 dashboard_prefix,
138 use_xheaders,
139 pid_file,
140 tls_ca_file,
141 tls_cert,
142 tls_key,
143 dashboard_address,
144 jupyter,
145 **kwargs,
146 ):
147 g0, g1, g2 = gc.get_threshold() # https://github.com/dask/distributed/issues/1653
148 gc.set_threshold(g0 * 3, g1 * 3, g2 * 3)
149
150 enable_proctitle_on_current()
151 enable_proctitle_on_children()
152
153 if bokeh_port is not None:
154 warnings.warn(
155 "The --bokeh-port flag has been renamed to --dashboard-address. "
156 "Consider adding ``--dashboard-address :%d`` " % bokeh_port
157 )
158 dashboard_address = bokeh_port
159 if bokeh is not None:
160 warnings.warn(
161 "The --bokeh/--no-bokeh flag has been renamed to --dashboard/--no-dashboard. "
162 )
163 dashboard = bokeh
164
165 if port is None and (not host or not re.search(r":\d", host)):
166 port = 8786
167
168 sec = {
169 k: v
170 for k, v in [
171 ("tls_ca_file", tls_ca_file),
172 ("tls_scheduler_cert", tls_cert),
173 ("tls_scheduler_key", tls_key),
174 ]
175 if v is not None
176 }
177
178 if not host and (tls_ca_file or tls_cert or tls_key):
179 host = "tls://"
180
181 if pid_file:
182 with open(pid_file, "w") as f:
183 f.write(str(os.getpid()))
184
185 def del_pid_file():
186 if os.path.exists(pid_file):
187 os.remove(pid_file)
188
189 atexit.register(del_pid_file)
190
191 if sys.platform.startswith("linux"):
192 import resource # module fails importing on Windows
193
194 soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)
195 limit = max(soft, hard // 2)
196 resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard))
197
198 async def run():
199 logger.info("-" * 47)
200
201 scheduler = Scheduler(
202 security=sec,
203 host=host,
204 port=port,
205 dashboard=dashboard,
206 dashboard_address=dashboard_address,
207 http_prefix=dashboard_prefix,
208 jupyter=jupyter,
209 **kwargs,
210 )
211 logger.info("-" * 47)
212
213 async def wait_for_scheduler_to_finish():
214 """Wait for the scheduler to initialize and finish"""
215 await scheduler
216 await scheduler.finished()
217
218 async def wait_for_signals_and_close():
219 """Wait for SIGINT or SIGTERM and close the scheduler upon receiving one of those signals"""
220 await wait_for_signals()
221 await scheduler.close()
222
223 wait_for_signals_and_close_task = asyncio.create_task(
224 wait_for_signals_and_close()
225 )
226 wait_for_scheduler_to_finish_task = asyncio.create_task(
227 wait_for_scheduler_to_finish()
228 )
229
230 done, _ = await asyncio.wait(
231 [wait_for_signals_and_close_task, wait_for_scheduler_to_finish_task],
232 return_when=asyncio.FIRST_COMPLETED,
233 )
234 # Re-raise exceptions from done tasks
235 [task.result() for task in done]
236 logger.info("Stopped scheduler at %r", scheduler.address)
237
238 try:
239 asyncio.run(run())
240 finally:
241 logger.info("End scheduler")
242
243
244 if __name__ == "__main__":
245 main() # pragma: no cover
246
[end of distributed/cli/dask_scheduler.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/distributed/cli/dask_scheduler.py b/distributed/cli/dask_scheduler.py
--- a/distributed/cli/dask_scheduler.py
+++ b/distributed/cli/dask_scheduler.py
@@ -27,7 +27,7 @@
@click.command(context_settings=dict(ignore_unknown_options=True))
@click.option("--host", type=str, default="", help="URI, IP or hostname of this server")
[email protected]("--port", type=int, default=None, help="Serving port")
[email protected]("--port", type=str, default=None, help="Serving port")
@click.option(
"--interface",
type=str,
@@ -130,6 +130,8 @@
def main(
host,
port,
+ protocol,
+ interface,
bokeh_port,
show,
dashboard,
@@ -162,8 +164,29 @@
)
dashboard = bokeh
+ if interface and "," in interface:
+ interface = interface.split(",")
+
+ if protocol and "," in protocol:
+ protocol = protocol.split(",")
+
+ if port:
+ if "," in port:
+ port = [int(p) for p in port.split(",")]
+ else:
+ port = int(port)
+
if port is None and (not host or not re.search(r":\d", host)):
- port = 8786
+ if isinstance(protocol, list):
+ port = [8786] + [0] * (len(protocol) - 1)
+ else:
+ port = 8786
+
+ if isinstance(protocol, list) or isinstance(port, list):
+ if (not isinstance(protocol, list) or not isinstance(port, list)) or len(
+ port
+ ) != len(protocol):
+ raise ValueError("--protocol and --port must both be lists of equal length")
sec = {
k: v
@@ -202,6 +225,8 @@
security=sec,
host=host,
port=port,
+ protocol=protocol,
+ interface=interface,
dashboard=dashboard,
dashboard_address=dashboard_address,
http_prefix=dashboard_prefix,
| {"golden_diff": "diff --git a/distributed/cli/dask_scheduler.py b/distributed/cli/dask_scheduler.py\n--- a/distributed/cli/dask_scheduler.py\n+++ b/distributed/cli/dask_scheduler.py\n@@ -27,7 +27,7 @@\n \n @click.command(context_settings=dict(ignore_unknown_options=True))\n @click.option(\"--host\", type=str, default=\"\", help=\"URI, IP or hostname of this server\")\[email protected](\"--port\", type=int, default=None, help=\"Serving port\")\[email protected](\"--port\", type=str, default=None, help=\"Serving port\")\n @click.option(\n \"--interface\",\n type=str,\n@@ -130,6 +130,8 @@\n def main(\n host,\n port,\n+ protocol,\n+ interface,\n bokeh_port,\n show,\n dashboard,\n@@ -162,8 +164,29 @@\n )\n dashboard = bokeh\n \n+ if interface and \",\" in interface:\n+ interface = interface.split(\",\")\n+\n+ if protocol and \",\" in protocol:\n+ protocol = protocol.split(\",\")\n+\n+ if port:\n+ if \",\" in port:\n+ port = [int(p) for p in port.split(\",\")]\n+ else:\n+ port = int(port)\n+\n if port is None and (not host or not re.search(r\":\\d\", host)):\n- port = 8786\n+ if isinstance(protocol, list):\n+ port = [8786] + [0] * (len(protocol) - 1)\n+ else:\n+ port = 8786\n+\n+ if isinstance(protocol, list) or isinstance(port, list):\n+ if (not isinstance(protocol, list) or not isinstance(port, list)) or len(\n+ port\n+ ) != len(protocol):\n+ raise ValueError(\"--protocol and --port must both be lists of equal length\")\n \n sec = {\n k: v\n@@ -202,6 +225,8 @@\n security=sec,\n host=host,\n port=port,\n+ protocol=protocol,\n+ interface=interface,\n dashboard=dashboard,\n dashboard_address=dashboard_address,\n http_prefix=dashboard_prefix,\n", "issue": "Support for multiple protocols or heterogenous protocols\nI have a use case where the `client<=>scheduler` communication needs to be proxied, so we use the [websocket protocol](\r\nhttps://github.com/dask/distributed/blob/main/distributed/comm/ws.py).\r\n\r\nHowever, doing so means that `scheduler<=>worker` and `worker<=>worker` communication must also use websockets as we assume consistency. \r\n\r\nThis results in performance issues on systems with high-performance networking like Infiniband or NVLink. We usually use [UCX](https://github.com/dask/distributed/blob/main/distributed/comm/ucx.py) for `worker<=>worker` communication to get the best out of the hardware.\r\n\r\nI wanted to start a discussion around a solution for this. The two things that spring to mind are:\r\n- Add support for multiple protocols simultaneously.\r\n- Allow for one protocol for `client<=>scheduler` and a different one for `worker<=>worker`.\n", "before_files": [{"content": "from __future__ import annotations\n\nimport asyncio\nimport atexit\nimport gc\nimport logging\nimport os\nimport re\nimport sys\nimport warnings\n\nimport click\n\nfrom distributed import Scheduler\nfrom distributed._signals import wait_for_signals\nfrom distributed.preloading import validate_preload_argv\nfrom distributed.proctitle import (\n enable_proctitle_on_children,\n enable_proctitle_on_current,\n)\n\nlogger = logging.getLogger(\"distributed.scheduler\")\n\n\npem_file_option_type = click.Path(exists=True, resolve_path=True)\n\n\[email protected](context_settings=dict(ignore_unknown_options=True))\[email protected](\"--host\", type=str, default=\"\", help=\"URI, IP or hostname of this server\")\[email protected](\"--port\", type=int, default=None, help=\"Serving port\")\[email protected](\n \"--interface\",\n type=str,\n default=None,\n help=\"Preferred network interface like 'eth0' or 'ib0'\",\n)\[email protected](\n \"--protocol\", type=str, default=None, help=\"Protocol like tcp, tls, or ucx\"\n)\[email protected](\n \"--tls-ca-file\",\n type=pem_file_option_type,\n default=None,\n help=\"CA cert(s) file for TLS (in PEM format)\",\n)\[email protected](\n \"--tls-cert\",\n type=pem_file_option_type,\n default=None,\n help=\"certificate file for TLS (in PEM format)\",\n)\[email protected](\n \"--tls-key\",\n type=pem_file_option_type,\n default=None,\n help=\"private key file for TLS (in PEM format)\",\n)\n# XXX default port (or URI) values should be centralized somewhere\[email protected](\n \"--bokeh-port\", type=int, default=None, help=\"Deprecated. See --dashboard-address\"\n)\[email protected](\n \"--dashboard-address\",\n type=str,\n default=\":8787\",\n show_default=True,\n help=\"Address on which to listen for diagnostics dashboard\",\n)\[email protected](\n \"--dashboard/--no-dashboard\",\n \"dashboard\",\n default=True,\n required=False,\n help=\"Launch the Dashboard [default: --dashboard]\",\n)\[email protected](\n \"--bokeh/--no-bokeh\",\n \"bokeh\",\n default=None,\n required=False,\n help=\"Deprecated. See --dashboard/--no-dashboard.\",\n)\[email protected](\n \"--jupyter/--no-jupyter\",\n \"jupyter\",\n default=False,\n required=False,\n help=\"Start a Jupyter Server in the same process. Warning: This will make\"\n \"it possible for anyone with access to your dashboard address to run\"\n \"Python code\",\n)\[email protected](\"--show/--no-show\", default=False, help=\"Show web UI [default: --show]\")\[email protected](\n \"--dashboard-prefix\", type=str, default=\"\", help=\"Prefix for the dashboard app\"\n)\[email protected](\n \"--use-xheaders\",\n type=bool,\n default=False,\n show_default=True,\n help=\"User xheaders in dashboard app for ssl termination in header\",\n)\[email protected](\"--pid-file\", type=str, default=\"\", help=\"File to write the process PID\")\[email protected](\n \"--scheduler-file\",\n type=str,\n default=\"\",\n help=\"File to write connection information. \"\n \"This may be a good way to share connection information if your \"\n \"cluster is on a shared network file system.\",\n)\[email protected](\n \"--preload\",\n type=str,\n multiple=True,\n is_eager=True,\n help=\"Module that should be loaded by the scheduler process \"\n 'like \"foo.bar\" or \"/path/to/foo.py\".',\n)\[email protected](\n \"preload_argv\", nargs=-1, type=click.UNPROCESSED, callback=validate_preload_argv\n)\[email protected](\n \"--idle-timeout\",\n default=None,\n type=str,\n help=\"Time of inactivity after which to kill the scheduler\",\n)\[email protected]_option()\ndef main(\n host,\n port,\n bokeh_port,\n show,\n dashboard,\n bokeh,\n dashboard_prefix,\n use_xheaders,\n pid_file,\n tls_ca_file,\n tls_cert,\n tls_key,\n dashboard_address,\n jupyter,\n **kwargs,\n):\n g0, g1, g2 = gc.get_threshold() # https://github.com/dask/distributed/issues/1653\n gc.set_threshold(g0 * 3, g1 * 3, g2 * 3)\n\n enable_proctitle_on_current()\n enable_proctitle_on_children()\n\n if bokeh_port is not None:\n warnings.warn(\n \"The --bokeh-port flag has been renamed to --dashboard-address. \"\n \"Consider adding ``--dashboard-address :%d`` \" % bokeh_port\n )\n dashboard_address = bokeh_port\n if bokeh is not None:\n warnings.warn(\n \"The --bokeh/--no-bokeh flag has been renamed to --dashboard/--no-dashboard. \"\n )\n dashboard = bokeh\n\n if port is None and (not host or not re.search(r\":\\d\", host)):\n port = 8786\n\n sec = {\n k: v\n for k, v in [\n (\"tls_ca_file\", tls_ca_file),\n (\"tls_scheduler_cert\", tls_cert),\n (\"tls_scheduler_key\", tls_key),\n ]\n if v is not None\n }\n\n if not host and (tls_ca_file or tls_cert or tls_key):\n host = \"tls://\"\n\n if pid_file:\n with open(pid_file, \"w\") as f:\n f.write(str(os.getpid()))\n\n def del_pid_file():\n if os.path.exists(pid_file):\n os.remove(pid_file)\n\n atexit.register(del_pid_file)\n\n if sys.platform.startswith(\"linux\"):\n import resource # module fails importing on Windows\n\n soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE)\n limit = max(soft, hard // 2)\n resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard))\n\n async def run():\n logger.info(\"-\" * 47)\n\n scheduler = Scheduler(\n security=sec,\n host=host,\n port=port,\n dashboard=dashboard,\n dashboard_address=dashboard_address,\n http_prefix=dashboard_prefix,\n jupyter=jupyter,\n **kwargs,\n )\n logger.info(\"-\" * 47)\n\n async def wait_for_scheduler_to_finish():\n \"\"\"Wait for the scheduler to initialize and finish\"\"\"\n await scheduler\n await scheduler.finished()\n\n async def wait_for_signals_and_close():\n \"\"\"Wait for SIGINT or SIGTERM and close the scheduler upon receiving one of those signals\"\"\"\n await wait_for_signals()\n await scheduler.close()\n\n wait_for_signals_and_close_task = asyncio.create_task(\n wait_for_signals_and_close()\n )\n wait_for_scheduler_to_finish_task = asyncio.create_task(\n wait_for_scheduler_to_finish()\n )\n\n done, _ = await asyncio.wait(\n [wait_for_signals_and_close_task, wait_for_scheduler_to_finish_task],\n return_when=asyncio.FIRST_COMPLETED,\n )\n # Re-raise exceptions from done tasks\n [task.result() for task in done]\n logger.info(\"Stopped scheduler at %r\", scheduler.address)\n\n try:\n asyncio.run(run())\n finally:\n logger.info(\"End scheduler\")\n\n\nif __name__ == \"__main__\":\n main() # pragma: no cover\n", "path": "distributed/cli/dask_scheduler.py"}]} | 2,969 | 482 |
gh_patches_debug_30228 | rasdani/github-patches | git_diff | netket__netket-1256 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`QGTJacobian***(holomorphic=True)` for R->C models returns wrong output. Should error.
cc @attila-i-szabo
I agree that `holomorphic=True` is wrong in that case, but it should rather throw an error than give wrong results...
It took me a while to notice that this was the cause of some wrong optimisations.
```python
import netket as nk
import jax
L = 20
g = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)
hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)
ha = nk.operator.Ising(hilbert=hi, graph=g, h=1.0)
ma = nk.models.RBMModPhase(alpha=1, param_dtype=float)
sa = nk.sampler.MetropolisLocal(hi, n_chains=16)
vs = nk.vqs.MCState(sa, ma, n_samples=1000, n_discard_per_chain=100)
qgt_holo = nk.optimizer.qgt.QGTJacobianDense(holomorphic=True)
qgt_nonholo = nk.optimizer.qgt.QGTJacobianDense(holomorphic=False)
qgt_otf = nk.optimizer.qgt.QGTOnTheFly()
S_holo = vs.quantum_geometric_tensor(qgt_holo)
S_nonholo = vs.quantum_geometric_tensor(qgt_nonholo)
S_otf = vs.quantum_geometric_tensor(qgt_otf)
F = vs.parameters
r_holo = S_holo@F
r_nonholo = S_nonholo@F
r_otf = S_otf@F
jax.tree_map(lambda x,y:x-y, r_holo, r_nonholo)
```
`QGTJacobian***(holomorphic=True)` for R->C models returns wrong output. Should error.
cc @attila-i-szabo
I agree that `holomorphic=True` is wrong in that case, but it should rather throw an error than give wrong results...
It took me a while to notice that this was the cause of some wrong optimisations.
```python
import netket as nk
import jax
L = 20
g = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)
hi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)
ha = nk.operator.Ising(hilbert=hi, graph=g, h=1.0)
ma = nk.models.RBMModPhase(alpha=1, param_dtype=float)
sa = nk.sampler.MetropolisLocal(hi, n_chains=16)
vs = nk.vqs.MCState(sa, ma, n_samples=1000, n_discard_per_chain=100)
qgt_holo = nk.optimizer.qgt.QGTJacobianDense(holomorphic=True)
qgt_nonholo = nk.optimizer.qgt.QGTJacobianDense(holomorphic=False)
qgt_otf = nk.optimizer.qgt.QGTOnTheFly()
S_holo = vs.quantum_geometric_tensor(qgt_holo)
S_nonholo = vs.quantum_geometric_tensor(qgt_nonholo)
S_otf = vs.quantum_geometric_tensor(qgt_otf)
F = vs.parameters
r_holo = S_holo@F
r_nonholo = S_nonholo@F
r_otf = S_otf@F
jax.tree_map(lambda x,y:x-y, r_holo, r_nonholo)
```
</issue>
<code>
[start of netket/optimizer/qgt/qgt_jacobian_common.py]
1 # Copyright 2021 The NetKet Authors - All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from functools import partial
16 import warnings
17 from textwrap import dedent
18
19 import jax
20
21 import netket.jax as nkjax
22
23
24 @partial(jax.jit, static_argnums=(0, 4, 5))
25 def _choose_jacobian_mode(apply_fun, pars, model_state, samples, mode, holomorphic):
26 homogeneous_vars = nkjax.tree_ishomogeneous(pars)
27
28 if holomorphic is True:
29 if not homogeneous_vars:
30 warnings.warn(
31 dedent(
32 """The ansatz has non homogeneous variables, which might not behave well with the
33 holomorhic implementation.
34 Use `holomorphic=False` or mode='complex' for more accurate results but
35 lower performance.
36 """
37 )
38 )
39 mode = "holomorphic"
40 else:
41 leaf_iscomplex = nkjax.tree_leaf_iscomplex(pars)
42 complex_output = nkjax.is_complex(
43 jax.eval_shape(
44 apply_fun,
45 {"params": pars, **model_state},
46 samples.reshape(-1, samples.shape[-1]),
47 )
48 )
49
50 if complex_output:
51 if leaf_iscomplex:
52 if holomorphic is None:
53 warnings.warn(
54 dedent(
55 """
56 Complex-to-Complex model detected. Defaulting to `holomorphic=False` for
57 the implementation of QGTJacobianDense.
58 If your model is holomorphic, specify `holomorphic=True` to use a more
59 performant implementation.
60 To suppress this warning specify `holomorphic`.
61 """
62 ),
63 UserWarning,
64 )
65 mode = "complex"
66 else:
67 mode = "complex"
68 else:
69 mode = "real"
70
71 if mode == "real":
72 return 0
73 elif mode == "complex":
74 return 1
75 elif mode == "holomorphic":
76 return 2
77 else:
78 raise ValueError(f"unknown mode {mode}")
79
80
81 def choose_jacobian_mode(afun, pars, state, samples, *, mode, holomorphic):
82 """
83 Select an implementation of Jacobian
84 """
85 i = _choose_jacobian_mode(afun, pars, state, samples, mode, holomorphic).item()
86 if i == 0:
87 return "real"
88 elif i == 1:
89 return "complex"
90 elif i == 2:
91 return "holomorphic"
92 else:
93 raise ValueError(f"unknown mode {i}")
94
[end of netket/optimizer/qgt/qgt_jacobian_common.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/netket/optimizer/qgt/qgt_jacobian_common.py b/netket/optimizer/qgt/qgt_jacobian_common.py
--- a/netket/optimizer/qgt/qgt_jacobian_common.py
+++ b/netket/optimizer/qgt/qgt_jacobian_common.py
@@ -24,21 +24,37 @@
@partial(jax.jit, static_argnums=(0, 4, 5))
def _choose_jacobian_mode(apply_fun, pars, model_state, samples, mode, holomorphic):
homogeneous_vars = nkjax.tree_ishomogeneous(pars)
+ leaf_iscomplex = nkjax.tree_leaf_iscomplex(pars)
if holomorphic is True:
- if not homogeneous_vars:
+ if homogeneous_vars and leaf_iscomplex:
+ ## all complex parameters
+ mode = "holomorphic"
+ elif homogeneous_vars and not leaf_iscomplex:
+ # all real parameters
+ raise ValueError(
+ dedent(
+ """
+ A function with real parameters cannot be holomorphic.
+
+ Please remove the kw-arg `holomorphic=True`.
+ """
+ )
+ )
+ else:
+ # mixed complex and real parameters
warnings.warn(
dedent(
"""The ansatz has non homogeneous variables, which might not behave well with the
holomorhic implementation.
+
Use `holomorphic=False` or mode='complex' for more accurate results but
lower performance.
"""
)
)
- mode = "holomorphic"
+ mode = "holomorphic"
else:
- leaf_iscomplex = nkjax.tree_leaf_iscomplex(pars)
complex_output = nkjax.is_complex(
jax.eval_shape(
apply_fun,
| {"golden_diff": "diff --git a/netket/optimizer/qgt/qgt_jacobian_common.py b/netket/optimizer/qgt/qgt_jacobian_common.py\n--- a/netket/optimizer/qgt/qgt_jacobian_common.py\n+++ b/netket/optimizer/qgt/qgt_jacobian_common.py\n@@ -24,21 +24,37 @@\n @partial(jax.jit, static_argnums=(0, 4, 5))\n def _choose_jacobian_mode(apply_fun, pars, model_state, samples, mode, holomorphic):\n homogeneous_vars = nkjax.tree_ishomogeneous(pars)\n+ leaf_iscomplex = nkjax.tree_leaf_iscomplex(pars)\n \n if holomorphic is True:\n- if not homogeneous_vars:\n+ if homogeneous_vars and leaf_iscomplex:\n+ ## all complex parameters\n+ mode = \"holomorphic\"\n+ elif homogeneous_vars and not leaf_iscomplex:\n+ # all real parameters\n+ raise ValueError(\n+ dedent(\n+ \"\"\"\n+ A function with real parameters cannot be holomorphic. \n+ \n+ Please remove the kw-arg `holomorphic=True`.\n+ \"\"\"\n+ )\n+ )\n+ else:\n+ # mixed complex and real parameters\n warnings.warn(\n dedent(\n \"\"\"The ansatz has non homogeneous variables, which might not behave well with the\n holomorhic implementation.\n+\n Use `holomorphic=False` or mode='complex' for more accurate results but\n lower performance.\n \"\"\"\n )\n )\n- mode = \"holomorphic\"\n+ mode = \"holomorphic\"\n else:\n- leaf_iscomplex = nkjax.tree_leaf_iscomplex(pars)\n complex_output = nkjax.is_complex(\n jax.eval_shape(\n apply_fun,\n", "issue": "`QGTJacobian***(holomorphic=True)` for R->C models returns wrong output. Should error.\ncc @attila-i-szabo\r\n\r\nI agree that `holomorphic=True`\u00a0is wrong in that case, but it should rather throw an error than give wrong results...\r\nIt took me a while to notice that this was the cause of some wrong optimisations.\r\n\r\n```python\r\nimport netket as nk\r\nimport jax\r\n\r\nL = 20\r\ng = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)\r\nhi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)\r\nha = nk.operator.Ising(hilbert=hi, graph=g, h=1.0)\r\nma = nk.models.RBMModPhase(alpha=1, param_dtype=float)\r\nsa = nk.sampler.MetropolisLocal(hi, n_chains=16)\r\nvs = nk.vqs.MCState(sa, ma, n_samples=1000, n_discard_per_chain=100)\r\n\r\nqgt_holo = nk.optimizer.qgt.QGTJacobianDense(holomorphic=True)\r\nqgt_nonholo = nk.optimizer.qgt.QGTJacobianDense(holomorphic=False)\r\nqgt_otf = nk.optimizer.qgt.QGTOnTheFly()\r\n\r\nS_holo = vs.quantum_geometric_tensor(qgt_holo)\r\nS_nonholo = vs.quantum_geometric_tensor(qgt_nonholo)\r\nS_otf = vs.quantum_geometric_tensor(qgt_otf)\r\n\r\nF = vs.parameters\r\n\r\nr_holo = S_holo@F\r\nr_nonholo = S_nonholo@F\r\nr_otf = S_otf@F\r\n\r\njax.tree_map(lambda x,y:x-y, r_holo, r_nonholo)\r\n```\n`QGTJacobian***(holomorphic=True)` for R->C models returns wrong output. Should error.\ncc @attila-i-szabo\r\n\r\nI agree that `holomorphic=True`\u00a0is wrong in that case, but it should rather throw an error than give wrong results...\r\nIt took me a while to notice that this was the cause of some wrong optimisations.\r\n\r\n```python\r\nimport netket as nk\r\nimport jax\r\n\r\nL = 20\r\ng = nk.graph.Hypercube(length=L, n_dim=1, pbc=True)\r\nhi = nk.hilbert.Spin(s=1 / 2, N=g.n_nodes)\r\nha = nk.operator.Ising(hilbert=hi, graph=g, h=1.0)\r\nma = nk.models.RBMModPhase(alpha=1, param_dtype=float)\r\nsa = nk.sampler.MetropolisLocal(hi, n_chains=16)\r\nvs = nk.vqs.MCState(sa, ma, n_samples=1000, n_discard_per_chain=100)\r\n\r\nqgt_holo = nk.optimizer.qgt.QGTJacobianDense(holomorphic=True)\r\nqgt_nonholo = nk.optimizer.qgt.QGTJacobianDense(holomorphic=False)\r\nqgt_otf = nk.optimizer.qgt.QGTOnTheFly()\r\n\r\nS_holo = vs.quantum_geometric_tensor(qgt_holo)\r\nS_nonholo = vs.quantum_geometric_tensor(qgt_nonholo)\r\nS_otf = vs.quantum_geometric_tensor(qgt_otf)\r\n\r\nF = vs.parameters\r\n\r\nr_holo = S_holo@F\r\nr_nonholo = S_nonholo@F\r\nr_otf = S_otf@F\r\n\r\njax.tree_map(lambda x,y:x-y, r_holo, r_nonholo)\r\n```\n", "before_files": [{"content": "# Copyright 2021 The NetKet Authors - All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom functools import partial\nimport warnings\nfrom textwrap import dedent\n\nimport jax\n\nimport netket.jax as nkjax\n\n\n@partial(jax.jit, static_argnums=(0, 4, 5))\ndef _choose_jacobian_mode(apply_fun, pars, model_state, samples, mode, holomorphic):\n homogeneous_vars = nkjax.tree_ishomogeneous(pars)\n\n if holomorphic is True:\n if not homogeneous_vars:\n warnings.warn(\n dedent(\n \"\"\"The ansatz has non homogeneous variables, which might not behave well with the\n holomorhic implementation.\n Use `holomorphic=False` or mode='complex' for more accurate results but\n lower performance.\n \"\"\"\n )\n )\n mode = \"holomorphic\"\n else:\n leaf_iscomplex = nkjax.tree_leaf_iscomplex(pars)\n complex_output = nkjax.is_complex(\n jax.eval_shape(\n apply_fun,\n {\"params\": pars, **model_state},\n samples.reshape(-1, samples.shape[-1]),\n )\n )\n\n if complex_output:\n if leaf_iscomplex:\n if holomorphic is None:\n warnings.warn(\n dedent(\n \"\"\"\n Complex-to-Complex model detected. Defaulting to `holomorphic=False` for\n the implementation of QGTJacobianDense.\n If your model is holomorphic, specify `holomorphic=True` to use a more\n performant implementation.\n To suppress this warning specify `holomorphic`.\n \"\"\"\n ),\n UserWarning,\n )\n mode = \"complex\"\n else:\n mode = \"complex\"\n else:\n mode = \"real\"\n\n if mode == \"real\":\n return 0\n elif mode == \"complex\":\n return 1\n elif mode == \"holomorphic\":\n return 2\n else:\n raise ValueError(f\"unknown mode {mode}\")\n\n\ndef choose_jacobian_mode(afun, pars, state, samples, *, mode, holomorphic):\n \"\"\"\n Select an implementation of Jacobian\n \"\"\"\n i = _choose_jacobian_mode(afun, pars, state, samples, mode, holomorphic).item()\n if i == 0:\n return \"real\"\n elif i == 1:\n return \"complex\"\n elif i == 2:\n return \"holomorphic\"\n else:\n raise ValueError(f\"unknown mode {i}\")\n", "path": "netket/optimizer/qgt/qgt_jacobian_common.py"}]} | 2,145 | 380 |
gh_patches_debug_63339 | rasdani/github-patches | git_diff | sanic-org__sanic-2452 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Error Handler mismatch warning
The warning for error handler mismatch is triggering on v22.3 accidentally when setting `FALLBACK_ERROR_FORMAT`.
```python
app.config.FALLBACK_ERROR_FORMAT = "text"
@app.get("/")
async def handler(request: Request):
1 / 0
```
This can be resolved as follows:
```python
@classmethod
def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):
if error_handler._fallback is not _default:
if config._FALLBACK_ERROR_FORMAT == error_handler._fallback: # <<<<< This line needs this change
return error_handler.fallback
error_logger.warning(
"Conflicting error fallback values were found in the "
"error handler and in the app.config while handling an "
"exception. Using the value from app.config."
)
return config.FALLBACK_ERROR_FORMAT
```
https://github.com/sanic-org/sanic/blob/5d683c6ea4b615e80c51d80189436437b824cce6/sanic/handlers.py#L79
</issue>
<code>
[start of sanic/handlers.py]
1 from __future__ import annotations
2
3 from typing import Dict, List, Optional, Tuple, Type, Union
4
5 from sanic.config import Config
6 from sanic.errorpages import (
7 DEFAULT_FORMAT,
8 BaseRenderer,
9 TextRenderer,
10 exception_response,
11 )
12 from sanic.exceptions import (
13 ContentRangeError,
14 HeaderNotFound,
15 InvalidRangeType,
16 SanicException,
17 )
18 from sanic.helpers import Default, _default
19 from sanic.log import deprecation, error_logger
20 from sanic.models.handler_types import RouteHandler
21 from sanic.response import text
22
23
24 class ErrorHandler:
25 """
26 Provide :class:`sanic.app.Sanic` application with a mechanism to handle
27 and process any and all uncaught exceptions in a way the application
28 developer will set fit.
29
30 This error handling framework is built into the core that can be extended
31 by the developers to perform a wide range of tasks from recording the error
32 stats to reporting them to an external service that can be used for
33 realtime alerting system.
34
35 """
36
37 def __init__(
38 self,
39 fallback: Union[str, Default] = _default,
40 base: Type[BaseRenderer] = TextRenderer,
41 ):
42 self.cached_handlers: Dict[
43 Tuple[Type[BaseException], Optional[str]], Optional[RouteHandler]
44 ] = {}
45 self.debug = False
46 self._fallback = fallback
47 self.base = base
48
49 if fallback is not _default:
50 self._warn_fallback_deprecation()
51
52 @property
53 def fallback(self): # no cov
54 # This is for backwards compat and can be removed in v22.6
55 if self._fallback is _default:
56 return DEFAULT_FORMAT
57 return self._fallback
58
59 @fallback.setter
60 def fallback(self, value: str): # no cov
61 self._warn_fallback_deprecation()
62 if not isinstance(value, str):
63 raise SanicException(
64 f"Cannot set error handler fallback to: value={value}"
65 )
66 self._fallback = value
67
68 @staticmethod
69 def _warn_fallback_deprecation():
70 deprecation(
71 "Setting the ErrorHandler fallback value directly is "
72 "deprecated and no longer supported. This feature will "
73 "be removed in v22.6. Instead, use "
74 "app.config.FALLBACK_ERROR_FORMAT.",
75 22.6,
76 )
77
78 @classmethod
79 def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):
80 if error_handler._fallback is not _default:
81 if config._FALLBACK_ERROR_FORMAT is _default:
82 return error_handler.fallback
83
84 error_logger.warning(
85 "Conflicting error fallback values were found in the "
86 "error handler and in the app.config while handling an "
87 "exception. Using the value from app.config."
88 )
89 return config.FALLBACK_ERROR_FORMAT
90
91 @classmethod
92 def finalize(
93 cls,
94 error_handler: ErrorHandler,
95 config: Config,
96 fallback: Optional[str] = None,
97 ):
98 if fallback:
99 deprecation(
100 "Setting the ErrorHandler fallback value via finalize() "
101 "is deprecated and no longer supported. This feature will "
102 "be removed in v22.6. Instead, use "
103 "app.config.FALLBACK_ERROR_FORMAT.",
104 22.6,
105 )
106
107 if not fallback:
108 fallback = config.FALLBACK_ERROR_FORMAT
109
110 if fallback != DEFAULT_FORMAT:
111 if error_handler._fallback is not _default:
112 error_logger.warning(
113 f"Setting the fallback value to {fallback}. This changes "
114 "the current non-default value "
115 f"'{error_handler._fallback}'."
116 )
117 error_handler._fallback = fallback
118
119 if not isinstance(error_handler, cls):
120 error_logger.warning(
121 f"Error handler is non-conforming: {type(error_handler)}"
122 )
123
124 def _full_lookup(self, exception, route_name: Optional[str] = None):
125 return self.lookup(exception, route_name)
126
127 def add(self, exception, handler, route_names: Optional[List[str]] = None):
128 """
129 Add a new exception handler to an already existing handler object.
130
131 :param exception: Type of exception that need to be handled
132 :param handler: Reference to the method that will handle the exception
133
134 :type exception: :class:`sanic.exceptions.SanicException` or
135 :class:`Exception`
136 :type handler: ``function``
137
138 :return: None
139 """
140 if route_names:
141 for route in route_names:
142 self.cached_handlers[(exception, route)] = handler
143 else:
144 self.cached_handlers[(exception, None)] = handler
145
146 def lookup(self, exception, route_name: Optional[str] = None):
147 """
148 Lookup the existing instance of :class:`ErrorHandler` and fetch the
149 registered handler for a specific type of exception.
150
151 This method leverages a dict lookup to speedup the retrieval process.
152
153 :param exception: Type of exception
154
155 :type exception: :class:`sanic.exceptions.SanicException` or
156 :class:`Exception`
157
158 :return: Registered function if found ``None`` otherwise
159 """
160 exception_class = type(exception)
161
162 for name in (route_name, None):
163 exception_key = (exception_class, name)
164 handler = self.cached_handlers.get(exception_key)
165 if handler:
166 return handler
167
168 for name in (route_name, None):
169 for ancestor in type.mro(exception_class):
170 exception_key = (ancestor, name)
171 if exception_key in self.cached_handlers:
172 handler = self.cached_handlers[exception_key]
173 self.cached_handlers[
174 (exception_class, route_name)
175 ] = handler
176 return handler
177
178 if ancestor is BaseException:
179 break
180 self.cached_handlers[(exception_class, route_name)] = None
181 handler = None
182 return handler
183
184 _lookup = _full_lookup
185
186 def response(self, request, exception):
187 """Fetches and executes an exception handler and returns a response
188 object
189
190 :param request: Instance of :class:`sanic.request.Request`
191 :param exception: Exception to handle
192
193 :type request: :class:`sanic.request.Request`
194 :type exception: :class:`sanic.exceptions.SanicException` or
195 :class:`Exception`
196
197 :return: Wrap the return value obtained from :func:`default`
198 or registered handler for that type of exception.
199 """
200 route_name = request.name if request else None
201 handler = self._lookup(exception, route_name)
202 response = None
203 try:
204 if handler:
205 response = handler(request, exception)
206 if response is None:
207 response = self.default(request, exception)
208 except Exception:
209 try:
210 url = repr(request.url)
211 except AttributeError: # no cov
212 url = "unknown"
213 response_message = (
214 "Exception raised in exception handler " '"%s" for uri: %s'
215 )
216 error_logger.exception(response_message, handler.__name__, url)
217
218 if self.debug:
219 return text(response_message % (handler.__name__, url), 500)
220 else:
221 return text("An error occurred while handling an error", 500)
222 return response
223
224 def default(self, request, exception):
225 """
226 Provide a default behavior for the objects of :class:`ErrorHandler`.
227 If a developer chooses to extent the :class:`ErrorHandler` they can
228 provide a custom implementation for this method to behave in a way
229 they see fit.
230
231 :param request: Incoming request
232 :param exception: Exception object
233
234 :type request: :class:`sanic.request.Request`
235 :type exception: :class:`sanic.exceptions.SanicException` or
236 :class:`Exception`
237 :return:
238 """
239 self.log(request, exception)
240 fallback = ErrorHandler._get_fallback_value(self, request.app.config)
241 return exception_response(
242 request,
243 exception,
244 debug=self.debug,
245 base=self.base,
246 fallback=fallback,
247 )
248
249 @staticmethod
250 def log(request, exception):
251 quiet = getattr(exception, "quiet", False)
252 noisy = getattr(request.app.config, "NOISY_EXCEPTIONS", False)
253 if quiet is False or noisy is True:
254 try:
255 url = repr(request.url)
256 except AttributeError: # no cov
257 url = "unknown"
258
259 error_logger.exception(
260 "Exception occurred while handling uri: %s", url
261 )
262
263
264 class ContentRangeHandler:
265 """
266 A mechanism to parse and process the incoming request headers to
267 extract the content range information.
268
269 :param request: Incoming api request
270 :param stats: Stats related to the content
271
272 :type request: :class:`sanic.request.Request`
273 :type stats: :class:`posix.stat_result`
274
275 :ivar start: Content Range start
276 :ivar end: Content Range end
277 :ivar size: Length of the content
278 :ivar total: Total size identified by the :class:`posix.stat_result`
279 instance
280 :ivar ContentRangeHandler.headers: Content range header ``dict``
281 """
282
283 __slots__ = ("start", "end", "size", "total", "headers")
284
285 def __init__(self, request, stats):
286 self.total = stats.st_size
287 _range = request.headers.getone("range", None)
288 if _range is None:
289 raise HeaderNotFound("Range Header Not Found")
290 unit, _, value = tuple(map(str.strip, _range.partition("=")))
291 if unit != "bytes":
292 raise InvalidRangeType(
293 "%s is not a valid Range Type" % (unit,), self
294 )
295 start_b, _, end_b = tuple(map(str.strip, value.partition("-")))
296 try:
297 self.start = int(start_b) if start_b else None
298 except ValueError:
299 raise ContentRangeError(
300 "'%s' is invalid for Content Range" % (start_b,), self
301 )
302 try:
303 self.end = int(end_b) if end_b else None
304 except ValueError:
305 raise ContentRangeError(
306 "'%s' is invalid for Content Range" % (end_b,), self
307 )
308 if self.end is None:
309 if self.start is None:
310 raise ContentRangeError(
311 "Invalid for Content Range parameters", self
312 )
313 else:
314 # this case represents `Content-Range: bytes 5-`
315 self.end = self.total - 1
316 else:
317 if self.start is None:
318 # this case represents `Content-Range: bytes -5`
319 self.start = self.total - self.end
320 self.end = self.total - 1
321 if self.start >= self.end:
322 raise ContentRangeError(
323 "Invalid for Content Range parameters", self
324 )
325 self.size = self.end - self.start + 1
326 self.headers = {
327 "Content-Range": "bytes %s-%s/%s"
328 % (self.start, self.end, self.total)
329 }
330
331 def __bool__(self):
332 return self.size > 0
333
[end of sanic/handlers.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/sanic/handlers.py b/sanic/handlers.py
--- a/sanic/handlers.py
+++ b/sanic/handlers.py
@@ -78,7 +78,7 @@
@classmethod
def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):
if error_handler._fallback is not _default:
- if config._FALLBACK_ERROR_FORMAT is _default:
+ if config._FALLBACK_ERROR_FORMAT == error_handler._fallback:
return error_handler.fallback
error_logger.warning(
| {"golden_diff": "diff --git a/sanic/handlers.py b/sanic/handlers.py\n--- a/sanic/handlers.py\n+++ b/sanic/handlers.py\n@@ -78,7 +78,7 @@\n @classmethod\n def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):\n if error_handler._fallback is not _default:\n- if config._FALLBACK_ERROR_FORMAT is _default:\n+ if config._FALLBACK_ERROR_FORMAT == error_handler._fallback:\n return error_handler.fallback\n \n error_logger.warning(\n", "issue": "Error Handler mismatch warning\nThe warning for error handler mismatch is triggering on v22.3 accidentally when setting `FALLBACK_ERROR_FORMAT`.\r\n\r\n```python\r\napp.config.FALLBACK_ERROR_FORMAT = \"text\"\r\n\r\n\r\[email protected](\"/\")\r\nasync def handler(request: Request):\r\n 1 / 0\r\n```\r\n\r\nThis can be resolved as follows:\r\n\r\n```python\r\n @classmethod\r\n def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):\r\n if error_handler._fallback is not _default:\r\n if config._FALLBACK_ERROR_FORMAT == error_handler._fallback: # <<<<< This line needs this change\r\n return error_handler.fallback\r\n\r\n error_logger.warning(\r\n \"Conflicting error fallback values were found in the \"\r\n \"error handler and in the app.config while handling an \"\r\n \"exception. Using the value from app.config.\"\r\n )\r\n return config.FALLBACK_ERROR_FORMAT\r\n```\r\n\r\nhttps://github.com/sanic-org/sanic/blob/5d683c6ea4b615e80c51d80189436437b824cce6/sanic/handlers.py#L79\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nfrom sanic.config import Config\nfrom sanic.errorpages import (\n DEFAULT_FORMAT,\n BaseRenderer,\n TextRenderer,\n exception_response,\n)\nfrom sanic.exceptions import (\n ContentRangeError,\n HeaderNotFound,\n InvalidRangeType,\n SanicException,\n)\nfrom sanic.helpers import Default, _default\nfrom sanic.log import deprecation, error_logger\nfrom sanic.models.handler_types import RouteHandler\nfrom sanic.response import text\n\n\nclass ErrorHandler:\n \"\"\"\n Provide :class:`sanic.app.Sanic` application with a mechanism to handle\n and process any and all uncaught exceptions in a way the application\n developer will set fit.\n\n This error handling framework is built into the core that can be extended\n by the developers to perform a wide range of tasks from recording the error\n stats to reporting them to an external service that can be used for\n realtime alerting system.\n\n \"\"\"\n\n def __init__(\n self,\n fallback: Union[str, Default] = _default,\n base: Type[BaseRenderer] = TextRenderer,\n ):\n self.cached_handlers: Dict[\n Tuple[Type[BaseException], Optional[str]], Optional[RouteHandler]\n ] = {}\n self.debug = False\n self._fallback = fallback\n self.base = base\n\n if fallback is not _default:\n self._warn_fallback_deprecation()\n\n @property\n def fallback(self): # no cov\n # This is for backwards compat and can be removed in v22.6\n if self._fallback is _default:\n return DEFAULT_FORMAT\n return self._fallback\n\n @fallback.setter\n def fallback(self, value: str): # no cov\n self._warn_fallback_deprecation()\n if not isinstance(value, str):\n raise SanicException(\n f\"Cannot set error handler fallback to: value={value}\"\n )\n self._fallback = value\n\n @staticmethod\n def _warn_fallback_deprecation():\n deprecation(\n \"Setting the ErrorHandler fallback value directly is \"\n \"deprecated and no longer supported. This feature will \"\n \"be removed in v22.6. Instead, use \"\n \"app.config.FALLBACK_ERROR_FORMAT.\",\n 22.6,\n )\n\n @classmethod\n def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):\n if error_handler._fallback is not _default:\n if config._FALLBACK_ERROR_FORMAT is _default:\n return error_handler.fallback\n\n error_logger.warning(\n \"Conflicting error fallback values were found in the \"\n \"error handler and in the app.config while handling an \"\n \"exception. Using the value from app.config.\"\n )\n return config.FALLBACK_ERROR_FORMAT\n\n @classmethod\n def finalize(\n cls,\n error_handler: ErrorHandler,\n config: Config,\n fallback: Optional[str] = None,\n ):\n if fallback:\n deprecation(\n \"Setting the ErrorHandler fallback value via finalize() \"\n \"is deprecated and no longer supported. This feature will \"\n \"be removed in v22.6. Instead, use \"\n \"app.config.FALLBACK_ERROR_FORMAT.\",\n 22.6,\n )\n\n if not fallback:\n fallback = config.FALLBACK_ERROR_FORMAT\n\n if fallback != DEFAULT_FORMAT:\n if error_handler._fallback is not _default:\n error_logger.warning(\n f\"Setting the fallback value to {fallback}. This changes \"\n \"the current non-default value \"\n f\"'{error_handler._fallback}'.\"\n )\n error_handler._fallback = fallback\n\n if not isinstance(error_handler, cls):\n error_logger.warning(\n f\"Error handler is non-conforming: {type(error_handler)}\"\n )\n\n def _full_lookup(self, exception, route_name: Optional[str] = None):\n return self.lookup(exception, route_name)\n\n def add(self, exception, handler, route_names: Optional[List[str]] = None):\n \"\"\"\n Add a new exception handler to an already existing handler object.\n\n :param exception: Type of exception that need to be handled\n :param handler: Reference to the method that will handle the exception\n\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n :type handler: ``function``\n\n :return: None\n \"\"\"\n if route_names:\n for route in route_names:\n self.cached_handlers[(exception, route)] = handler\n else:\n self.cached_handlers[(exception, None)] = handler\n\n def lookup(self, exception, route_name: Optional[str] = None):\n \"\"\"\n Lookup the existing instance of :class:`ErrorHandler` and fetch the\n registered handler for a specific type of exception.\n\n This method leverages a dict lookup to speedup the retrieval process.\n\n :param exception: Type of exception\n\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n\n :return: Registered function if found ``None`` otherwise\n \"\"\"\n exception_class = type(exception)\n\n for name in (route_name, None):\n exception_key = (exception_class, name)\n handler = self.cached_handlers.get(exception_key)\n if handler:\n return handler\n\n for name in (route_name, None):\n for ancestor in type.mro(exception_class):\n exception_key = (ancestor, name)\n if exception_key in self.cached_handlers:\n handler = self.cached_handlers[exception_key]\n self.cached_handlers[\n (exception_class, route_name)\n ] = handler\n return handler\n\n if ancestor is BaseException:\n break\n self.cached_handlers[(exception_class, route_name)] = None\n handler = None\n return handler\n\n _lookup = _full_lookup\n\n def response(self, request, exception):\n \"\"\"Fetches and executes an exception handler and returns a response\n object\n\n :param request: Instance of :class:`sanic.request.Request`\n :param exception: Exception to handle\n\n :type request: :class:`sanic.request.Request`\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n\n :return: Wrap the return value obtained from :func:`default`\n or registered handler for that type of exception.\n \"\"\"\n route_name = request.name if request else None\n handler = self._lookup(exception, route_name)\n response = None\n try:\n if handler:\n response = handler(request, exception)\n if response is None:\n response = self.default(request, exception)\n except Exception:\n try:\n url = repr(request.url)\n except AttributeError: # no cov\n url = \"unknown\"\n response_message = (\n \"Exception raised in exception handler \" '\"%s\" for uri: %s'\n )\n error_logger.exception(response_message, handler.__name__, url)\n\n if self.debug:\n return text(response_message % (handler.__name__, url), 500)\n else:\n return text(\"An error occurred while handling an error\", 500)\n return response\n\n def default(self, request, exception):\n \"\"\"\n Provide a default behavior for the objects of :class:`ErrorHandler`.\n If a developer chooses to extent the :class:`ErrorHandler` they can\n provide a custom implementation for this method to behave in a way\n they see fit.\n\n :param request: Incoming request\n :param exception: Exception object\n\n :type request: :class:`sanic.request.Request`\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n :return:\n \"\"\"\n self.log(request, exception)\n fallback = ErrorHandler._get_fallback_value(self, request.app.config)\n return exception_response(\n request,\n exception,\n debug=self.debug,\n base=self.base,\n fallback=fallback,\n )\n\n @staticmethod\n def log(request, exception):\n quiet = getattr(exception, \"quiet\", False)\n noisy = getattr(request.app.config, \"NOISY_EXCEPTIONS\", False)\n if quiet is False or noisy is True:\n try:\n url = repr(request.url)\n except AttributeError: # no cov\n url = \"unknown\"\n\n error_logger.exception(\n \"Exception occurred while handling uri: %s\", url\n )\n\n\nclass ContentRangeHandler:\n \"\"\"\n A mechanism to parse and process the incoming request headers to\n extract the content range information.\n\n :param request: Incoming api request\n :param stats: Stats related to the content\n\n :type request: :class:`sanic.request.Request`\n :type stats: :class:`posix.stat_result`\n\n :ivar start: Content Range start\n :ivar end: Content Range end\n :ivar size: Length of the content\n :ivar total: Total size identified by the :class:`posix.stat_result`\n instance\n :ivar ContentRangeHandler.headers: Content range header ``dict``\n \"\"\"\n\n __slots__ = (\"start\", \"end\", \"size\", \"total\", \"headers\")\n\n def __init__(self, request, stats):\n self.total = stats.st_size\n _range = request.headers.getone(\"range\", None)\n if _range is None:\n raise HeaderNotFound(\"Range Header Not Found\")\n unit, _, value = tuple(map(str.strip, _range.partition(\"=\")))\n if unit != \"bytes\":\n raise InvalidRangeType(\n \"%s is not a valid Range Type\" % (unit,), self\n )\n start_b, _, end_b = tuple(map(str.strip, value.partition(\"-\")))\n try:\n self.start = int(start_b) if start_b else None\n except ValueError:\n raise ContentRangeError(\n \"'%s' is invalid for Content Range\" % (start_b,), self\n )\n try:\n self.end = int(end_b) if end_b else None\n except ValueError:\n raise ContentRangeError(\n \"'%s' is invalid for Content Range\" % (end_b,), self\n )\n if self.end is None:\n if self.start is None:\n raise ContentRangeError(\n \"Invalid for Content Range parameters\", self\n )\n else:\n # this case represents `Content-Range: bytes 5-`\n self.end = self.total - 1\n else:\n if self.start is None:\n # this case represents `Content-Range: bytes -5`\n self.start = self.total - self.end\n self.end = self.total - 1\n if self.start >= self.end:\n raise ContentRangeError(\n \"Invalid for Content Range parameters\", self\n )\n self.size = self.end - self.start + 1\n self.headers = {\n \"Content-Range\": \"bytes %s-%s/%s\"\n % (self.start, self.end, self.total)\n }\n\n def __bool__(self):\n return self.size > 0\n", "path": "sanic/handlers.py"}]} | 4,084 | 123 |
gh_patches_debug_28949 | rasdani/github-patches | git_diff | google__mobly-417 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`SnippetEvent` should be loggable
Right now logging event object directly does not show the content of the event, which makes debugging difficult.
`logging.info(event)` should log the full content of the event.
</issue>
<code>
[start of setup.py]
1 # Copyright 2016 Google Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import platform
16 import setuptools
17 from setuptools.command import test
18 import sys
19
20 install_requires = [
21 'future',
22 # mock-1.0.1 is the last version compatible with setuptools <17.1,
23 # which is what comes with Ubuntu 14.04 LTS.
24 'mock<=1.0.1',
25 'portpicker',
26 'psutil',
27 'pytz',
28 'pyyaml',
29 'timeout_decorator',
30 'pyserial'
31 ]
32
33 if sys.version_info < (3, ):
34 install_requires.extend([
35 'enum34',
36 # "futures" is needed for py2 compatibility and it only works in 2.7
37 'futures',
38 ])
39
40 if platform.system() == 'Windows':
41 install_requires.append('pywin32')
42
43
44 class PyTest(test.test):
45 """Class used to execute unit tests using PyTest. This allows us to execute
46 unit tests without having to install the package.
47 """
48
49 def finalize_options(self):
50 test.test.finalize_options(self)
51 self.test_args = ['-x', "tests"]
52 self.test_suite = True
53
54 def run_tests(self):
55 import pytest
56 errno = pytest.main(self.test_args)
57 sys.exit(errno)
58
59
60 def main():
61 setuptools.setup(
62 name='mobly',
63 version='1.7.1',
64 maintainer = 'Ang Li',
65 maintainer_email = '[email protected]',
66 description='Automation framework for special end-to-end test cases',
67 license='Apache2.0',
68 url = 'https://github.com/google/mobly',
69 download_url = 'https://github.com/google/mobly/tarball/1.7.1',
70 packages=setuptools.find_packages(),
71 include_package_data=False,
72 scripts=['tools/sl4a_shell.py', 'tools/snippet_shell.py'],
73 tests_require=['pytest'],
74 install_requires=install_requires,
75 cmdclass={'test': PyTest}, )
76
77
78 if __name__ == '__main__':
79 main()
80
[end of setup.py]
[start of mobly/controllers/android_device_lib/snippet_event.py]
1 # Copyright 2017 Google Inc.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15
16 def from_dict(event_dict):
17 """Create a SnippetEvent object from a dictionary.
18
19 Args:
20 event_dict: a dictionary representing an event.
21
22 Returns:
23 A SnippetEvent object.
24 """
25 return SnippetEvent(
26 callback_id=event_dict['callbackId'],
27 name=event_dict['name'],
28 creation_time=event_dict['time'],
29 data=event_dict['data'])
30
31
32 class SnippetEvent(object):
33 """The class that represents callback events for mobly snippet library.
34
35 Attributes:
36 callback_id: string, the callback ID associated with the event.
37 name: string, the name of the event.
38 creation_time: int, the epoch time when the event is created on the
39 Rpc server side.
40 data: dictionary, the data held by the event. Can be None.
41 """
42
43 def __init__(self, callback_id, name, creation_time, data):
44 self.callback_id = callback_id
45 self.name = name
46 self.creation_time = creation_time
47 self.data = data
48
[end of mobly/controllers/android_device_lib/snippet_event.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mobly/controllers/android_device_lib/snippet_event.py b/mobly/controllers/android_device_lib/snippet_event.py
--- a/mobly/controllers/android_device_lib/snippet_event.py
+++ b/mobly/controllers/android_device_lib/snippet_event.py
@@ -3,9 +3,9 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -45,3 +45,8 @@
self.name = name
self.creation_time = creation_time
self.data = data
+
+ def __repr__(self):
+ return ('SnippetEvent(callback_id: %s, name: %s, creation_time: %s, '
+ 'data: %s)') % (self.callback_id, self.name,
+ self.creation_time, self.data)
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -70,7 +70,7 @@
packages=setuptools.find_packages(),
include_package_data=False,
scripts=['tools/sl4a_shell.py', 'tools/snippet_shell.py'],
- tests_require=['pytest'],
+ tests_require=['pytest', 'testfixtures'],
install_requires=install_requires,
cmdclass={'test': PyTest}, )
| {"golden_diff": "diff --git a/mobly/controllers/android_device_lib/snippet_event.py b/mobly/controllers/android_device_lib/snippet_event.py\n--- a/mobly/controllers/android_device_lib/snippet_event.py\n+++ b/mobly/controllers/android_device_lib/snippet_event.py\n@@ -3,9 +3,9 @@\n # Licensed under the Apache License, Version 2.0 (the \"License\");\n # you may not use this file except in compliance with the License.\n # You may obtain a copy of the License at\n-# \n+#\n # http://www.apache.org/licenses/LICENSE-2.0\n-# \n+#\n # Unless required by applicable law or agreed to in writing, software\n # distributed under the License is distributed on an \"AS IS\" BASIS,\n # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n@@ -45,3 +45,8 @@\n self.name = name\n self.creation_time = creation_time\n self.data = data\n+\n+ def __repr__(self):\n+ return ('SnippetEvent(callback_id: %s, name: %s, creation_time: %s, '\n+ 'data: %s)') % (self.callback_id, self.name,\n+ self.creation_time, self.data)\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -70,7 +70,7 @@\n packages=setuptools.find_packages(),\n include_package_data=False,\n scripts=['tools/sl4a_shell.py', 'tools/snippet_shell.py'],\n- tests_require=['pytest'],\n+ tests_require=['pytest', 'testfixtures'],\n install_requires=install_requires,\n cmdclass={'test': PyTest}, )\n", "issue": "`SnippetEvent` should be loggable\nRight now logging event object directly does not show the content of the event, which makes debugging difficult.\r\n`logging.info(event)` should log the full content of the event.\n", "before_files": [{"content": "# Copyright 2016 Google Inc.\n# \n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# \n# http://www.apache.org/licenses/LICENSE-2.0\n# \n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport platform\nimport setuptools\nfrom setuptools.command import test\nimport sys\n\ninstall_requires = [\n 'future',\n # mock-1.0.1 is the last version compatible with setuptools <17.1,\n # which is what comes with Ubuntu 14.04 LTS.\n 'mock<=1.0.1',\n 'portpicker',\n 'psutil',\n 'pytz',\n 'pyyaml',\n 'timeout_decorator',\n 'pyserial'\n]\n\nif sys.version_info < (3, ):\n install_requires.extend([\n 'enum34',\n # \"futures\" is needed for py2 compatibility and it only works in 2.7\n 'futures',\n ])\n\nif platform.system() == 'Windows':\n install_requires.append('pywin32')\n\n\nclass PyTest(test.test):\n \"\"\"Class used to execute unit tests using PyTest. This allows us to execute\n unit tests without having to install the package.\n \"\"\"\n\n def finalize_options(self):\n test.test.finalize_options(self)\n self.test_args = ['-x', \"tests\"]\n self.test_suite = True\n\n def run_tests(self):\n import pytest\n errno = pytest.main(self.test_args)\n sys.exit(errno)\n\n\ndef main():\n setuptools.setup(\n name='mobly',\n version='1.7.1',\n maintainer = 'Ang Li',\n maintainer_email = '[email protected]',\n description='Automation framework for special end-to-end test cases',\n license='Apache2.0',\n url = 'https://github.com/google/mobly',\n download_url = 'https://github.com/google/mobly/tarball/1.7.1',\n packages=setuptools.find_packages(),\n include_package_data=False,\n scripts=['tools/sl4a_shell.py', 'tools/snippet_shell.py'],\n tests_require=['pytest'],\n install_requires=install_requires,\n cmdclass={'test': PyTest}, )\n\n\nif __name__ == '__main__':\n main()\n", "path": "setup.py"}, {"content": "# Copyright 2017 Google Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n# \n# http://www.apache.org/licenses/LICENSE-2.0\n# \n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\ndef from_dict(event_dict):\n \"\"\"Create a SnippetEvent object from a dictionary.\n\n Args:\n event_dict: a dictionary representing an event.\n\n Returns:\n A SnippetEvent object.\n \"\"\"\n return SnippetEvent(\n callback_id=event_dict['callbackId'],\n name=event_dict['name'],\n creation_time=event_dict['time'],\n data=event_dict['data'])\n\n\nclass SnippetEvent(object):\n \"\"\"The class that represents callback events for mobly snippet library.\n\n Attributes:\n callback_id: string, the callback ID associated with the event.\n name: string, the name of the event.\n creation_time: int, the epoch time when the event is created on the\n Rpc server side.\n data: dictionary, the data held by the event. Can be None.\n \"\"\"\n\n def __init__(self, callback_id, name, creation_time, data):\n self.callback_id = callback_id\n self.name = name\n self.creation_time = creation_time\n self.data = data\n", "path": "mobly/controllers/android_device_lib/snippet_event.py"}]} | 1,763 | 364 |
gh_patches_debug_36492 | rasdani/github-patches | git_diff | yt-dlp__yt-dlp-5445 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Stripchat No Video Formats Found
### DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE
- [X] I understand that I will be **blocked** if I remove or skip any mandatory\* field
### Checklist
- [X] I'm reporting a broken site
- [X] I've verified that I'm running yt-dlp version **2022.10.04** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)
- [X] I've checked that all provided URLs are playable in a browser with the same IP and same login details
- [X] I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)
- [X] I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates
- [X] I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)
- [X] I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required
### Region
Most parts of the world
### Provide a description that is worded well enough to be understood
When downloading, it would say that there are no video formats found. Stopped working since today, site usable/playable via browser.
### Provide verbose output that clearly demonstrates the problem
- [X] Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)
- [X] Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below
### Complete Verbose Output
```shell
[debug] Command-line config: ['-vU', 'https://stripchat.com/sexgreat']
[debug] Encodings: locale cp1252, fs utf-8, pref cp1252, out utf-8, error utf-8, screen utf-8
[debug] yt-dlp version 2022.10.04 [4e0511f] (win32_exe)
[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.20348-SP0
[debug] Checking exe version: ffmpeg -bsfs
[debug] Checking exe version: ffprobe -bsfs
[debug] exe versions: ffmpeg N-108625-g28ac2279ad-20221012 (setts), ffprobe N-108625-g28ac2279ad-20221012
[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.09.24, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3
[debug] Proxy map: {}
[debug] Loaded 1690 extractors
[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest
Latest version: 2022.10.04, Current version: 2022.10.04
yt-dlp is up to date (2022.10.04)
[debug] [Stripchat] Extracting URL: https://stripchat.com/sexgreat
[Stripchat] sexgreat: Downloading webpage
[Stripchat] sexgreat: Downloading m3u8 information
WARNING: [Stripchat] Failed to download m3u8 information: <urlopen error [Errno 11001] getaddrinfo failed>
ERROR: [Stripchat] sexgreat: No video formats found!; please report this issue on https://github.com/yt-dlp/yt-dlp/issues?q= , filling out the appropriate issue template. Confirm you are on the latest version using yt-dlp -U
Traceback (most recent call last):
File "yt_dlp\YoutubeDL.py", line 1477, in wrapper
File "yt_dlp\YoutubeDL.py", line 1574, in __extract_info
File "yt_dlp\YoutubeDL.py", line 1632, in process_ie_result
File "yt_dlp\YoutubeDL.py", line 2569, in process_video_result
File "yt_dlp\YoutubeDL.py", line 1027, in raise_no_formats
yt_dlp.utils.ExtractorError: [Stripchat] sexgreat: No video formats found!; please report this issue on https://github.com/yt-dlp/yt-dlp/issues?q= , filling out the appropriate issue template. Confirm you are on the latest version using yt-dlp -U
```
</issue>
<code>
[start of yt_dlp/extractor/stripchat.py]
1 from .common import InfoExtractor
2 from ..compat import (
3 compat_str,
4 )
5 from ..utils import (
6 ExtractorError,
7 lowercase_escape,
8 try_get,
9 )
10
11
12 class StripchatIE(InfoExtractor):
13 _VALID_URL = r'https?://stripchat\.com/(?P<id>[^/?#]+)'
14 _TESTS = [{
15 'url': 'https://stripchat.com/feel_me',
16 'info_dict': {
17 'id': 'feel_me',
18 'ext': 'mp4',
19 'title': 're:^feel_me [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
20 'description': str,
21 'is_live': True,
22 'age_limit': 18,
23 },
24 'skip': 'Room is offline',
25 }, {
26 'url': 'https://stripchat.com/Rakhijaan@xh',
27 'only_matching': True
28 }]
29
30 def _real_extract(self, url):
31 video_id = self._match_id(url)
32 webpage = self._download_webpage(url, video_id, headers=self.geo_verification_headers())
33
34 data = self._parse_json(
35 self._search_regex(
36 r'<script\b[^>]*>\s*window\.__PRELOADED_STATE__\s*=(?P<value>.*?)<\/script>',
37 webpage, 'data', default='{}', group='value'),
38 video_id, transform_source=lowercase_escape, fatal=False)
39 if not data:
40 raise ExtractorError('Unable to find configuration for stream.')
41
42 if try_get(data, lambda x: x['viewCam']['show'], dict):
43 raise ExtractorError('Model is in private show', expected=True)
44 elif not try_get(data, lambda x: x['viewCam']['model']['isLive'], bool):
45 raise ExtractorError('Model is offline', expected=True)
46
47 server = try_get(data, lambda x: x['viewCam']['viewServers']['flashphoner-hls'], compat_str)
48 host = try_get(data, lambda x: x['config']['data']['hlsStreamHost'], compat_str)
49 model_id = try_get(data, lambda x: x['viewCam']['model']['id'], int)
50
51 formats = self._extract_m3u8_formats(
52 'https://b-%s.%s/hls/%d/%d.m3u8' % (server, host, model_id, model_id),
53 video_id, ext='mp4', m3u8_id='hls', fatal=False, live=True)
54 self._sort_formats(formats)
55
56 return {
57 'id': video_id,
58 'title': video_id,
59 'description': self._og_search_description(webpage),
60 'is_live': True,
61 'formats': formats,
62 # Stripchat declares the RTA meta-tag, but in an non-standard format so _rta_search() can't be used
63 'age_limit': 18,
64 }
65
[end of yt_dlp/extractor/stripchat.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/yt_dlp/extractor/stripchat.py b/yt_dlp/extractor/stripchat.py
--- a/yt_dlp/extractor/stripchat.py
+++ b/yt_dlp/extractor/stripchat.py
@@ -1,22 +1,15 @@
from .common import InfoExtractor
-from ..compat import (
- compat_str,
-)
-from ..utils import (
- ExtractorError,
- lowercase_escape,
- try_get,
-)
+from ..utils import ExtractorError, lowercase_escape, traverse_obj
class StripchatIE(InfoExtractor):
_VALID_URL = r'https?://stripchat\.com/(?P<id>[^/?#]+)'
_TESTS = [{
- 'url': 'https://stripchat.com/feel_me',
+ 'url': 'https://stripchat.com/Joselin_Flower',
'info_dict': {
- 'id': 'feel_me',
+ 'id': 'Joselin_Flower',
'ext': 'mp4',
- 'title': 're:^feel_me [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
+ 'title': 're:^Joselin_Flower [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': str,
'is_live': True,
'age_limit': 18,
@@ -39,18 +32,22 @@
if not data:
raise ExtractorError('Unable to find configuration for stream.')
- if try_get(data, lambda x: x['viewCam']['show'], dict):
+ if traverse_obj(data, ('viewCam', 'show'), expected_type=dict):
raise ExtractorError('Model is in private show', expected=True)
- elif not try_get(data, lambda x: x['viewCam']['model']['isLive'], bool):
+ elif not traverse_obj(data, ('viewCam', 'model', 'isLive'), expected_type=bool):
raise ExtractorError('Model is offline', expected=True)
- server = try_get(data, lambda x: x['viewCam']['viewServers']['flashphoner-hls'], compat_str)
- host = try_get(data, lambda x: x['config']['data']['hlsStreamHost'], compat_str)
- model_id = try_get(data, lambda x: x['viewCam']['model']['id'], int)
+ server = traverse_obj(data, ('viewCam', 'viewServers', 'flashphoner-hls'), expected_type=str)
+ model_id = traverse_obj(data, ('viewCam', 'model', 'id'), expected_type=int)
+
+ for host in traverse_obj(data, (
+ 'config', 'data', (('featuresV2', 'hlsFallback', 'fallbackDomains', ...), 'hlsStreamHost'))):
+ formats = self._extract_m3u8_formats(
+ f'https://b-{server}.{host}/hls/{model_id}/{model_id}.m3u8',
+ video_id, ext='mp4', m3u8_id='hls', fatal=False, live=True)
+ if formats:
+ break
- formats = self._extract_m3u8_formats(
- 'https://b-%s.%s/hls/%d/%d.m3u8' % (server, host, model_id, model_id),
- video_id, ext='mp4', m3u8_id='hls', fatal=False, live=True)
self._sort_formats(formats)
return {
| {"golden_diff": "diff --git a/yt_dlp/extractor/stripchat.py b/yt_dlp/extractor/stripchat.py\n--- a/yt_dlp/extractor/stripchat.py\n+++ b/yt_dlp/extractor/stripchat.py\n@@ -1,22 +1,15 @@\n from .common import InfoExtractor\n-from ..compat import (\n- compat_str,\n-)\n-from ..utils import (\n- ExtractorError,\n- lowercase_escape,\n- try_get,\n-)\n+from ..utils import ExtractorError, lowercase_escape, traverse_obj\n \n \n class StripchatIE(InfoExtractor):\n _VALID_URL = r'https?://stripchat\\.com/(?P<id>[^/?#]+)'\n _TESTS = [{\n- 'url': 'https://stripchat.com/feel_me',\n+ 'url': 'https://stripchat.com/Joselin_Flower',\n 'info_dict': {\n- 'id': 'feel_me',\n+ 'id': 'Joselin_Flower',\n 'ext': 'mp4',\n- 'title': 're:^feel_me [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',\n+ 'title': 're:^Joselin_Flower [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',\n 'description': str,\n 'is_live': True,\n 'age_limit': 18,\n@@ -39,18 +32,22 @@\n if not data:\n raise ExtractorError('Unable to find configuration for stream.')\n \n- if try_get(data, lambda x: x['viewCam']['show'], dict):\n+ if traverse_obj(data, ('viewCam', 'show'), expected_type=dict):\n raise ExtractorError('Model is in private show', expected=True)\n- elif not try_get(data, lambda x: x['viewCam']['model']['isLive'], bool):\n+ elif not traverse_obj(data, ('viewCam', 'model', 'isLive'), expected_type=bool):\n raise ExtractorError('Model is offline', expected=True)\n \n- server = try_get(data, lambda x: x['viewCam']['viewServers']['flashphoner-hls'], compat_str)\n- host = try_get(data, lambda x: x['config']['data']['hlsStreamHost'], compat_str)\n- model_id = try_get(data, lambda x: x['viewCam']['model']['id'], int)\n+ server = traverse_obj(data, ('viewCam', 'viewServers', 'flashphoner-hls'), expected_type=str)\n+ model_id = traverse_obj(data, ('viewCam', 'model', 'id'), expected_type=int)\n+\n+ for host in traverse_obj(data, (\n+ 'config', 'data', (('featuresV2', 'hlsFallback', 'fallbackDomains', ...), 'hlsStreamHost'))):\n+ formats = self._extract_m3u8_formats(\n+ f'https://b-{server}.{host}/hls/{model_id}/{model_id}.m3u8',\n+ video_id, ext='mp4', m3u8_id='hls', fatal=False, live=True)\n+ if formats:\n+ break\n \n- formats = self._extract_m3u8_formats(\n- 'https://b-%s.%s/hls/%d/%d.m3u8' % (server, host, model_id, model_id),\n- video_id, ext='mp4', m3u8_id='hls', fatal=False, live=True)\n self._sort_formats(formats)\n \n return {\n", "issue": "Stripchat No Video Formats Found\n### DO NOT REMOVE OR SKIP THE ISSUE TEMPLATE\n\n- [X] I understand that I will be **blocked** if I remove or skip any mandatory\\* field\n\n### Checklist\n\n- [X] I'm reporting a broken site\n- [X] I've verified that I'm running yt-dlp version **2022.10.04** ([update instructions](https://github.com/yt-dlp/yt-dlp#update)) or later (specify commit)\n- [X] I've checked that all provided URLs are playable in a browser with the same IP and same login details\n- [X] I've checked that all URLs and arguments with special characters are [properly quoted or escaped](https://github.com/yt-dlp/yt-dlp/wiki/FAQ#video-url-contains-an-ampersand--and-im-getting-some-strange-output-1-2839-or-v-is-not-recognized-as-an-internal-or-external-command)\n- [X] I've searched the [bugtracker](https://github.com/yt-dlp/yt-dlp/issues?q=) for similar issues **including closed ones**. DO NOT post duplicates\n- [X] I've read the [guidelines for opening an issue](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#opening-an-issue)\n- [X] I've read about [sharing account credentials](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#are-you-willing-to-share-account-details-if-needed) and I'm willing to share it if required\n\n### Region\n\nMost parts of the world\n\n### Provide a description that is worded well enough to be understood\n\nWhen downloading, it would say that there are no video formats found. Stopped working since today, site usable/playable via browser.\n\n### Provide verbose output that clearly demonstrates the problem\n\n- [X] Run **your** yt-dlp command with **-vU** flag added (`yt-dlp -vU <your command line>`)\n- [X] Copy the WHOLE output (starting with `[debug] Command-line config`) and insert it below\n\n### Complete Verbose Output\n\n```shell\n[debug] Command-line config: ['-vU', 'https://stripchat.com/sexgreat']\r\n[debug] Encodings: locale cp1252, fs utf-8, pref cp1252, out utf-8, error utf-8, screen utf-8\r\n[debug] yt-dlp version 2022.10.04 [4e0511f] (win32_exe)\r\n[debug] Python 3.8.10 (CPython 64bit) - Windows-10-10.0.20348-SP0\r\n[debug] Checking exe version: ffmpeg -bsfs\r\n[debug] Checking exe version: ffprobe -bsfs\r\n[debug] exe versions: ffmpeg N-108625-g28ac2279ad-20221012 (setts), ffprobe N-108625-g28ac2279ad-20221012\r\n[debug] Optional libraries: Cryptodome-3.15.0, brotli-1.0.9, certifi-2022.09.24, mutagen-1.45.1, sqlite3-2.6.0, websockets-10.3\r\n[debug] Proxy map: {}\r\n[debug] Loaded 1690 extractors\r\n[debug] Fetching release info: https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest\r\nLatest version: 2022.10.04, Current version: 2022.10.04\r\nyt-dlp is up to date (2022.10.04)\r\n[debug] [Stripchat] Extracting URL: https://stripchat.com/sexgreat\r\n[Stripchat] sexgreat: Downloading webpage\r\n[Stripchat] sexgreat: Downloading m3u8 information\r\nWARNING: [Stripchat] Failed to download m3u8 information: <urlopen error [Errno 11001] getaddrinfo failed>\r\nERROR: [Stripchat] sexgreat: No video formats found!; please report this issue on https://github.com/yt-dlp/yt-dlp/issues?q= , filling out the appropriate issue template. Confirm you are on the latest version using yt-dlp -U\r\nTraceback (most recent call last):\r\n File \"yt_dlp\\YoutubeDL.py\", line 1477, in wrapper\r\n File \"yt_dlp\\YoutubeDL.py\", line 1574, in __extract_info\r\n File \"yt_dlp\\YoutubeDL.py\", line 1632, in process_ie_result\r\n File \"yt_dlp\\YoutubeDL.py\", line 2569, in process_video_result\r\n File \"yt_dlp\\YoutubeDL.py\", line 1027, in raise_no_formats\r\nyt_dlp.utils.ExtractorError: [Stripchat] sexgreat: No video formats found!; please report this issue on https://github.com/yt-dlp/yt-dlp/issues?q= , filling out the appropriate issue template. Confirm you are on the latest version using yt-dlp -U\n```\n\n", "before_files": [{"content": "from .common import InfoExtractor\nfrom ..compat import (\n compat_str,\n)\nfrom ..utils import (\n ExtractorError,\n lowercase_escape,\n try_get,\n)\n\n\nclass StripchatIE(InfoExtractor):\n _VALID_URL = r'https?://stripchat\\.com/(?P<id>[^/?#]+)'\n _TESTS = [{\n 'url': 'https://stripchat.com/feel_me',\n 'info_dict': {\n 'id': 'feel_me',\n 'ext': 'mp4',\n 'title': 're:^feel_me [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',\n 'description': str,\n 'is_live': True,\n 'age_limit': 18,\n },\n 'skip': 'Room is offline',\n }, {\n 'url': 'https://stripchat.com/Rakhijaan@xh',\n 'only_matching': True\n }]\n\n def _real_extract(self, url):\n video_id = self._match_id(url)\n webpage = self._download_webpage(url, video_id, headers=self.geo_verification_headers())\n\n data = self._parse_json(\n self._search_regex(\n r'<script\\b[^>]*>\\s*window\\.__PRELOADED_STATE__\\s*=(?P<value>.*?)<\\/script>',\n webpage, 'data', default='{}', group='value'),\n video_id, transform_source=lowercase_escape, fatal=False)\n if not data:\n raise ExtractorError('Unable to find configuration for stream.')\n\n if try_get(data, lambda x: x['viewCam']['show'], dict):\n raise ExtractorError('Model is in private show', expected=True)\n elif not try_get(data, lambda x: x['viewCam']['model']['isLive'], bool):\n raise ExtractorError('Model is offline', expected=True)\n\n server = try_get(data, lambda x: x['viewCam']['viewServers']['flashphoner-hls'], compat_str)\n host = try_get(data, lambda x: x['config']['data']['hlsStreamHost'], compat_str)\n model_id = try_get(data, lambda x: x['viewCam']['model']['id'], int)\n\n formats = self._extract_m3u8_formats(\n 'https://b-%s.%s/hls/%d/%d.m3u8' % (server, host, model_id, model_id),\n video_id, ext='mp4', m3u8_id='hls', fatal=False, live=True)\n self._sort_formats(formats)\n\n return {\n 'id': video_id,\n 'title': video_id,\n 'description': self._og_search_description(webpage),\n 'is_live': True,\n 'formats': formats,\n # Stripchat declares the RTA meta-tag, but in an non-standard format so _rta_search() can't be used\n 'age_limit': 18,\n }\n", "path": "yt_dlp/extractor/stripchat.py"}]} | 2,508 | 813 |
gh_patches_debug_11487 | rasdani/github-patches | git_diff | facebookresearch__nevergrad-712 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Optimizer always trying the same points
## Steps to reproduce
1. Set up an optimizer with a parametrization
2. Run optimizer.minimize()
3. Print values being tested
## Observed Results
* What happened? This could be a description, log output, etc.
The argument of the objective function are always the same
## Expected Results
* What did you expect to happen?
The optimizer tries new values
## Relevant Code
```
from concurrent import futures
import nevergrad as ng
import numpy as np
class myobj:
def __init__(self):
self.params = ng.p.Dict(
kernel=ng.p.Log(lower=3, upper=2000).set_integer_casting()
)
def get_optimizer(self, parametrization, dictA):
optimizer = ng.optimizers.NGO(
parametrization=parametrization,
budget=dictA["budget"],
num_workers=dictA["workers"],
)
return optimizer
def fn(self, params, dictA, X, Y):
print(params)
kvalue = params['kernel']
print(kvalue)
return kvalue * np.mean((Y - X)**2)
def optimize_params(self, dictA, X, Y):
# breakpoint()
parametrization = ng.p.Instrumentation(
self.params, dictA=dictA, X=X, Y=Y
)
optimizer = self.get_optimizer(parametrization, dictA)
with futures.ThreadPoolExecutor(
max_workers=optimizer.num_workers
) as executor:
r = optimizer.minimize(
self.fn, executor=executor, batch_mode=False
)
return r
obj = myobj()
dictA = {'budget':5, 'workers':2}
recom = obj.optimize_params(dictA, np.zeros(3), np.ones(3))
print('recommendation: ')
print(*recom.args)
```
I am sure I am doing something wrong, but I can not detect what. Any helpful hand is welcome.
<!--- In any case, don't hesitate to join and ask questions if you need on Nevergrad users Facebook group https://www.facebook.com/groups/nevergradusers/ -->
</issue>
<code>
[start of nevergrad/optimization/mutations.py]
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
2 #
3 # This source code is licensed under the MIT license found in the
4 # LICENSE file in the root directory of this source tree.
5
6 from typing import Optional, Any
7 import numpy as np
8 from ..common.typetools import ArrayLike
9 from . import utils
10
11
12 class Mutator:
13 """Class defining mutations, and holding a random state used for random generation.
14 """
15
16 def __init__(self, random_state: np.random.RandomState) -> None:
17 self.random_state = random_state
18
19 def doerr_discrete_mutation(self, parent: ArrayLike) -> ArrayLike:
20 """Mutation as in the fast 1+1-ES, Doerr et al. The exponent is 1.5.
21 """
22 dimension = len(parent)
23 if dimension < 5:
24 return self.discrete_mutation(parent)
25 return self.doubledoerr_discrete_mutation(parent, max_ratio=.5)
26
27 def doubledoerr_discrete_mutation(self, parent: ArrayLike, max_ratio: float = 1.) -> ArrayLike:
28 """Doerr's recommendation above can mutate up to half variables
29 in average.
30 In our high-arity context, we might need more than that.
31
32 Parameters
33 ----------
34 parent: array-like
35 the point to mutate
36 max_ratio: float (between 0 and 1)
37 the maximum mutation ratio (careful: this is not an exact ratio)
38 """
39 assert 0 <= max_ratio <= 1
40 dimension = len(parent)
41 max_mutations = max(2, int(max_ratio * dimension))
42 p = 1. / np.arange(1, max_mutations)**1.5
43 p /= np.sum(p)
44 u = self.random_state.choice(np.arange(1, max_mutations), p=p)
45 return self.portfolio_discrete_mutation(parent, u=u)
46
47 def portfolio_discrete_mutation(self, parent: ArrayLike, u: Optional[int] = None) -> ArrayLike:
48 """Mutation discussed in
49 https://arxiv.org/pdf/1606.05551v1.pdf
50 We mutate a randomly drawn number of variables in average.
51 """
52 dimension = len(parent)
53 if u is None:
54 u = 1 if dimension == 1 else int(self.random_state.randint(1, dimension))
55 boolean_vector = [True for _ in parent]
56 while all(boolean_vector) and dimension != 1:
57 boolean_vector = [self.random_state.rand() > (float(u) / dimension) for _ in parent]
58 return [s if b else self.random_state.normal(0., 1.) for (b, s) in zip(boolean_vector, parent)]
59
60 def discrete_mutation(self, parent: ArrayLike) -> ArrayLike:
61 dimension = len(parent)
62 boolean_vector = [True for _ in parent]
63 while all(boolean_vector):
64 boolean_vector = [self.random_state.rand() > (1. / dimension) for _ in parent]
65 return [s if b else self.random_state.normal(0., 1.) for (b, s) in zip(boolean_vector, parent)]
66
67 def crossover(self, parent: ArrayLike, donor: ArrayLike) -> ArrayLike:
68 mix = [self.random_state.choice([d, p]) for (p, d) in zip(parent, donor)]
69 return self.discrete_mutation(mix)
70
71 def get_roulette(self, archive: utils.Archive[utils.MultiValue], num: Optional[int] = None) -> Any:
72 """Apply a roulette tournament selection.
73 """
74 if num is None:
75 num = int(.999 + np.sqrt(len(archive)))
76 # the following sort makes the line deterministic, and function seedable, at the cost of complexity!
77 my_keys = sorted(archive.bytesdict.keys())
78 my_keys_indices = self.random_state.choice(len(my_keys), size=min(num, len(my_keys)), replace=False)
79 my_keys = [my_keys[i] for i in my_keys_indices]
80 # best pessimistic value in a random set of keys
81 return np.frombuffer(min(my_keys, key=lambda x: archive.bytesdict[x].pessimistic_confidence_bound))
82
[end of nevergrad/optimization/mutations.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/nevergrad/optimization/mutations.py b/nevergrad/optimization/mutations.py
--- a/nevergrad/optimization/mutations.py
+++ b/nevergrad/optimization/mutations.py
@@ -52,6 +52,8 @@
dimension = len(parent)
if u is None:
u = 1 if dimension == 1 else int(self.random_state.randint(1, dimension))
+ if dimension == 1: # corner case.
+ return self.random_state.normal(0., 1., size=1) # type: ignore
boolean_vector = [True for _ in parent]
while all(boolean_vector) and dimension != 1:
boolean_vector = [self.random_state.rand() > (float(u) / dimension) for _ in parent]
| {"golden_diff": "diff --git a/nevergrad/optimization/mutations.py b/nevergrad/optimization/mutations.py\n--- a/nevergrad/optimization/mutations.py\n+++ b/nevergrad/optimization/mutations.py\n@@ -52,6 +52,8 @@\n dimension = len(parent)\n if u is None:\n u = 1 if dimension == 1 else int(self.random_state.randint(1, dimension))\n+ if dimension == 1: # corner case.\n+ return self.random_state.normal(0., 1., size=1) # type: ignore\n boolean_vector = [True for _ in parent]\n while all(boolean_vector) and dimension != 1:\n boolean_vector = [self.random_state.rand() > (float(u) / dimension) for _ in parent]\n", "issue": "Optimizer always trying the same points \n## Steps to reproduce\r\n\r\n 1. Set up an optimizer with a parametrization\r\n 2. Run optimizer.minimize()\r\n 3. Print values being tested\r\n\r\n## Observed Results\r\n\r\n * What happened? This could be a description, log output, etc.\r\nThe argument of the objective function are always the same\r\n\r\n## Expected Results\r\n\r\n * What did you expect to happen?\r\nThe optimizer tries new values\r\n## Relevant Code\r\n\r\n ```\r\nfrom concurrent import futures\r\n\r\nimport nevergrad as ng\r\nimport numpy as np\r\n\r\n\r\nclass myobj:\r\n def __init__(self):\r\n self.params = ng.p.Dict(\r\n kernel=ng.p.Log(lower=3, upper=2000).set_integer_casting()\r\n )\r\n\r\n def get_optimizer(self, parametrization, dictA):\r\n optimizer = ng.optimizers.NGO(\r\n parametrization=parametrization,\r\n budget=dictA[\"budget\"],\r\n num_workers=dictA[\"workers\"],\r\n )\r\n return optimizer\r\n\r\n def fn(self, params, dictA, X, Y):\r\n print(params)\r\n kvalue = params['kernel']\r\n print(kvalue)\r\n return kvalue * np.mean((Y - X)**2)\r\n\r\n def optimize_params(self, dictA, X, Y):\r\n # breakpoint()\r\n parametrization = ng.p.Instrumentation(\r\n self.params, dictA=dictA, X=X, Y=Y\r\n )\r\n optimizer = self.get_optimizer(parametrization, dictA)\r\n with futures.ThreadPoolExecutor(\r\n max_workers=optimizer.num_workers\r\n ) as executor:\r\n r = optimizer.minimize(\r\n self.fn, executor=executor, batch_mode=False\r\n )\r\n return r\r\n\r\n\r\nobj = myobj()\r\ndictA = {'budget':5, 'workers':2}\r\nrecom = obj.optimize_params(dictA, np.zeros(3), np.ones(3))\r\nprint('recommendation: ')\r\nprint(*recom.args)\r\n\r\n ```\r\n\r\nI am sure I am doing something wrong, but I can not detect what. Any helpful hand is welcome.\r\n\r\n<!--- In any case, don't hesitate to join and ask questions if you need on Nevergrad users Facebook group https://www.facebook.com/groups/nevergradusers/ -->\r\n\n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nfrom typing import Optional, Any\nimport numpy as np\nfrom ..common.typetools import ArrayLike\nfrom . import utils\n\n\nclass Mutator:\n \"\"\"Class defining mutations, and holding a random state used for random generation.\n \"\"\"\n\n def __init__(self, random_state: np.random.RandomState) -> None:\n self.random_state = random_state\n\n def doerr_discrete_mutation(self, parent: ArrayLike) -> ArrayLike:\n \"\"\"Mutation as in the fast 1+1-ES, Doerr et al. The exponent is 1.5.\n \"\"\"\n dimension = len(parent)\n if dimension < 5:\n return self.discrete_mutation(parent)\n return self.doubledoerr_discrete_mutation(parent, max_ratio=.5)\n\n def doubledoerr_discrete_mutation(self, parent: ArrayLike, max_ratio: float = 1.) -> ArrayLike:\n \"\"\"Doerr's recommendation above can mutate up to half variables\n in average.\n In our high-arity context, we might need more than that.\n\n Parameters\n ----------\n parent: array-like\n the point to mutate\n max_ratio: float (between 0 and 1)\n the maximum mutation ratio (careful: this is not an exact ratio)\n \"\"\"\n assert 0 <= max_ratio <= 1\n dimension = len(parent)\n max_mutations = max(2, int(max_ratio * dimension))\n p = 1. / np.arange(1, max_mutations)**1.5\n p /= np.sum(p)\n u = self.random_state.choice(np.arange(1, max_mutations), p=p)\n return self.portfolio_discrete_mutation(parent, u=u)\n\n def portfolio_discrete_mutation(self, parent: ArrayLike, u: Optional[int] = None) -> ArrayLike:\n \"\"\"Mutation discussed in\n https://arxiv.org/pdf/1606.05551v1.pdf\n We mutate a randomly drawn number of variables in average.\n \"\"\"\n dimension = len(parent)\n if u is None:\n u = 1 if dimension == 1 else int(self.random_state.randint(1, dimension))\n boolean_vector = [True for _ in parent]\n while all(boolean_vector) and dimension != 1:\n boolean_vector = [self.random_state.rand() > (float(u) / dimension) for _ in parent]\n return [s if b else self.random_state.normal(0., 1.) for (b, s) in zip(boolean_vector, parent)]\n\n def discrete_mutation(self, parent: ArrayLike) -> ArrayLike:\n dimension = len(parent)\n boolean_vector = [True for _ in parent]\n while all(boolean_vector):\n boolean_vector = [self.random_state.rand() > (1. / dimension) for _ in parent]\n return [s if b else self.random_state.normal(0., 1.) for (b, s) in zip(boolean_vector, parent)]\n\n def crossover(self, parent: ArrayLike, donor: ArrayLike) -> ArrayLike:\n mix = [self.random_state.choice([d, p]) for (p, d) in zip(parent, donor)]\n return self.discrete_mutation(mix)\n\n def get_roulette(self, archive: utils.Archive[utils.MultiValue], num: Optional[int] = None) -> Any:\n \"\"\"Apply a roulette tournament selection.\n \"\"\"\n if num is None:\n num = int(.999 + np.sqrt(len(archive)))\n # the following sort makes the line deterministic, and function seedable, at the cost of complexity!\n my_keys = sorted(archive.bytesdict.keys())\n my_keys_indices = self.random_state.choice(len(my_keys), size=min(num, len(my_keys)), replace=False)\n my_keys = [my_keys[i] for i in my_keys_indices]\n # best pessimistic value in a random set of keys\n return np.frombuffer(min(my_keys, key=lambda x: archive.bytesdict[x].pessimistic_confidence_bound))\n", "path": "nevergrad/optimization/mutations.py"}]} | 2,079 | 175 |
gh_patches_debug_5700 | rasdani/github-patches | git_diff | psychopy__psychopy-1325 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
psychopyApp won't start with Matplotlib 1.5 installed
See http://discourse.psychopy.org/t/mac-specific-help/1540/3
We need to figure out
- whether this problem is Anaconda-specific (and would require fixing upstream)
- whether this problem is Mac-specific
</issue>
<code>
[start of psychopy/app/psychopyApp.py]
1 #!/usr/bin/env python2
2
3 # Part of the PsychoPy library
4 # Copyright (C) 2015 Jonathan Peirce
5 # Distributed under the terms of the GNU General Public License (GPL).
6
7 from __future__ import absolute_import, print_function
8
9 import sys
10 from psychopy.app._psychopyApp import PsychoPyApp, __version__
11
12 # NB the PsychoPyApp classes moved to _psychopyApp.py as of version 1.78.00
13 # to allow for better upgrading possibilities from the mac app bundle. this
14 # file now used solely as a launcher for the app, not as the app itself.
15
16 if __name__ == '__main__':
17 if '-x' in sys.argv:
18 # run a .py script from the command line using StandAlone python
19 targetScript = sys.argv[sys.argv.index('-x') + 1]
20 from psychopy import core
21 import os
22 core.shellCall([sys.executable, os.path.abspath(targetScript)])
23 sys.exit()
24 if '-v' in sys.argv or '--version' in sys.argv:
25 info = 'PsychoPy2, version %s (c)Jonathan Peirce 2015, GNU GPL license'
26 print(info % __version__)
27 sys.exit()
28 if '-h' in sys.argv or '--help' in sys.argv:
29 print("""Starts the PsychoPy2 application.
30
31 Usage: python PsychoPy.py [options] [file]
32
33 Without options or files provided this starts PsychoPy using prefs to
34 decide on the view(s) to open. If optional [file] is provided action
35 depends on the type of the [file]:
36
37 Python script 'file.py' -- opens coder
38
39 Experiment design 'file.psyexp' -- opens builder
40
41 Options:
42 -c, --coder, coder opens coder view only
43 -b, --builder, builder opens builder view only
44 -x script.py execute script.py using StandAlone python
45
46 -v, --version prints version and exits
47 -h, --help prints this help and exit
48
49 --firstrun launches configuration wizard
50 --no-splash suppresses splash screen
51
52 """)
53 sys.exit()
54
55 else:
56 showSplash = True
57 if '--no-splash' in sys.argv:
58 showSplash = False
59 del sys.argv[sys.argv.index('--no-splash')]
60 app = PsychoPyApp(0, showSplash=showSplash)
61 app.MainLoop()
62
[end of psychopy/app/psychopyApp.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/psychopy/app/psychopyApp.py b/psychopy/app/psychopyApp.py
--- a/psychopy/app/psychopyApp.py
+++ b/psychopy/app/psychopyApp.py
@@ -8,6 +8,8 @@
import sys
from psychopy.app._psychopyApp import PsychoPyApp, __version__
+# fix OS X locale-bug on startup: sets locale to LC_ALL (must be defined!)
+import psychopy.locale_setup # noqa
# NB the PsychoPyApp classes moved to _psychopyApp.py as of version 1.78.00
# to allow for better upgrading possibilities from the mac app bundle. this
| {"golden_diff": "diff --git a/psychopy/app/psychopyApp.py b/psychopy/app/psychopyApp.py\n--- a/psychopy/app/psychopyApp.py\n+++ b/psychopy/app/psychopyApp.py\n@@ -8,6 +8,8 @@\n \n import sys\n from psychopy.app._psychopyApp import PsychoPyApp, __version__\n+# fix OS X locale-bug on startup: sets locale to LC_ALL (must be defined!)\n+import psychopy.locale_setup # noqa\n \n # NB the PsychoPyApp classes moved to _psychopyApp.py as of version 1.78.00\n # to allow for better upgrading possibilities from the mac app bundle. this\n", "issue": "psychopyApp won't start with Matplotlib 1.5 installed\nSee http://discourse.psychopy.org/t/mac-specific-help/1540/3\r\n\r\nWe need to figure out\r\n- whether this problem is Anaconda-specific (and would require fixing upstream)\r\n- whether this problem is Mac-specific\n", "before_files": [{"content": "#!/usr/bin/env python2\n\n# Part of the PsychoPy library\n# Copyright (C) 2015 Jonathan Peirce\n# Distributed under the terms of the GNU General Public License (GPL).\n\nfrom __future__ import absolute_import, print_function\n\nimport sys\nfrom psychopy.app._psychopyApp import PsychoPyApp, __version__\n\n# NB the PsychoPyApp classes moved to _psychopyApp.py as of version 1.78.00\n# to allow for better upgrading possibilities from the mac app bundle. this\n# file now used solely as a launcher for the app, not as the app itself.\n\nif __name__ == '__main__':\n if '-x' in sys.argv:\n # run a .py script from the command line using StandAlone python\n targetScript = sys.argv[sys.argv.index('-x') + 1]\n from psychopy import core\n import os\n core.shellCall([sys.executable, os.path.abspath(targetScript)])\n sys.exit()\n if '-v' in sys.argv or '--version' in sys.argv:\n info = 'PsychoPy2, version %s (c)Jonathan Peirce 2015, GNU GPL license'\n print(info % __version__)\n sys.exit()\n if '-h' in sys.argv or '--help' in sys.argv:\n print(\"\"\"Starts the PsychoPy2 application.\n\nUsage: python PsychoPy.py [options] [file]\n\nWithout options or files provided this starts PsychoPy using prefs to\ndecide on the view(s) to open. If optional [file] is provided action\ndepends on the type of the [file]:\n\n Python script 'file.py' -- opens coder\n\n Experiment design 'file.psyexp' -- opens builder\n\nOptions:\n -c, --coder, coder opens coder view only\n -b, --builder, builder opens builder view only\n -x script.py execute script.py using StandAlone python\n\n -v, --version prints version and exits\n -h, --help prints this help and exit\n\n --firstrun launches configuration wizard\n --no-splash suppresses splash screen\n\n\"\"\")\n sys.exit()\n\n else:\n showSplash = True\n if '--no-splash' in sys.argv:\n showSplash = False\n del sys.argv[sys.argv.index('--no-splash')]\n app = PsychoPyApp(0, showSplash=showSplash)\n app.MainLoop()\n", "path": "psychopy/app/psychopyApp.py"}]} | 1,267 | 149 |
gh_patches_debug_5346 | rasdani/github-patches | git_diff | borgbackup__borg-8135 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
upgrade cython and msgpack
is borg also affected?
https://github.com/msgpack/msgpack-python/pull/583/files
https://github.com/msgpack/msgpack-python/issues/579#issuecomment-1972893890
https://github.com/msgpack/msgpack-python/releases/tag/v1.0.8
https://github.com/cython/cython/issues/5724
</issue>
<code>
[start of src/borg/helpers/msgpack.py]
1 from .datastruct import StableDict
2 from ..constants import * # NOQA
3
4 # wrapping msgpack ---------------------------------------------------------------------------------------------------
5 #
6 # due to the breaking api changes in upstream msgpack (from 0.x to 1.0), we wrapped it the way we need it -
7 # to avoid having lots of clutter in the calling code. see tickets #968 and #3632.
8 # as borg 1.4 now requires msgpack > 1.0 anyway, the wrapper has reduced functionality, but was kept.
9 #
10 # Packing
11 # -------
12 # use_bin_type = False is needed to generate the old msgpack format (not msgpack 2.0 spec) as borg always did.
13 # unicode_errors = None is needed because usage of it is deprecated
14 #
15 # Unpacking
16 # ---------
17 # raw = True is needed to unpack the old msgpack format to bytes (not str, about the decoding see item.pyx).
18 # unicode_errors = None is needed because usage of it is deprecated
19
20 from msgpack import Packer as mp_Packer
21 from msgpack import packb as mp_packb
22 from msgpack import pack as mp_pack
23 from msgpack import Unpacker as mp_Unpacker
24 from msgpack import unpackb as mp_unpackb
25 from msgpack import unpack as mp_unpack
26 from msgpack import version as mp_version
27
28 from msgpack import ExtType
29 from msgpack import OutOfData
30
31
32 version = mp_version
33
34
35 class PackException(Exception):
36 """Exception while msgpack packing"""
37
38
39 class UnpackException(Exception):
40 """Exception while msgpack unpacking"""
41
42
43 class Packer(mp_Packer):
44 def __init__(self, *, default=None, unicode_errors=None,
45 use_single_float=False, autoreset=True, use_bin_type=False,
46 strict_types=False):
47 assert unicode_errors is None
48 super().__init__(default=default, unicode_errors=unicode_errors,
49 use_single_float=use_single_float, autoreset=autoreset, use_bin_type=use_bin_type,
50 strict_types=strict_types)
51
52 def pack(self, obj):
53 try:
54 return super().pack(obj)
55 except Exception as e:
56 raise PackException(e)
57
58
59 def packb(o, *, use_bin_type=False, unicode_errors=None, **kwargs):
60 assert unicode_errors is None
61 try:
62 return mp_packb(o, use_bin_type=use_bin_type, unicode_errors=unicode_errors, **kwargs)
63 except Exception as e:
64 raise PackException(e)
65
66
67 def pack(o, stream, *, use_bin_type=False, unicode_errors=None, **kwargs):
68 assert unicode_errors is None
69 try:
70 return mp_pack(o, stream, use_bin_type=use_bin_type, unicode_errors=unicode_errors, **kwargs)
71 except Exception as e:
72 raise PackException(e)
73
74
75 class Unpacker(mp_Unpacker):
76 def __init__(self, file_like=None, *, read_size=0, use_list=True, raw=True,
77 object_hook=None, object_pairs_hook=None, list_hook=None,
78 unicode_errors=None, max_buffer_size=0,
79 ext_hook=ExtType,
80 strict_map_key=False):
81 assert raw is True
82 assert unicode_errors is None
83 kw = dict(file_like=file_like, read_size=read_size, use_list=use_list, raw=raw,
84 object_hook=object_hook, object_pairs_hook=object_pairs_hook, list_hook=list_hook,
85 unicode_errors=unicode_errors, max_buffer_size=max_buffer_size,
86 ext_hook=ext_hook,
87 strict_map_key=strict_map_key)
88 super().__init__(**kw)
89
90 def unpack(self):
91 try:
92 return super().unpack()
93 except OutOfData:
94 raise
95 except Exception as e:
96 raise UnpackException(e)
97
98 def __next__(self):
99 try:
100 return super().__next__()
101 except StopIteration:
102 raise
103 except Exception as e:
104 raise UnpackException(e)
105
106 next = __next__
107
108
109 def unpackb(packed, *, raw=True, unicode_errors=None, strict_map_key=False, **kwargs):
110 assert unicode_errors is None
111 try:
112 kw = dict(raw=raw, unicode_errors=unicode_errors, strict_map_key=strict_map_key)
113 kw.update(kwargs)
114 return mp_unpackb(packed, **kw)
115 except Exception as e:
116 raise UnpackException(e)
117
118
119 def unpack(stream, *, raw=True, unicode_errors=None, strict_map_key=False, **kwargs):
120 assert unicode_errors is None
121 try:
122 kw = dict(raw=raw, unicode_errors=unicode_errors, strict_map_key=strict_map_key)
123 kw.update(kwargs)
124 return mp_unpack(stream, **kw)
125 except Exception as e:
126 raise UnpackException(e)
127
128
129 # msgpacking related utilities -----------------------------------------------
130
131 def is_slow_msgpack():
132 import msgpack
133 import msgpack.fallback
134 return msgpack.Packer is msgpack.fallback.Packer
135
136
137 def is_supported_msgpack():
138 # DO NOT CHANGE OR REMOVE! See also requirements and comments in pyproject.toml.
139 import msgpack
140 return (1, 0, 3) <= msgpack.version <= (1, 0, 7) and \
141 msgpack.version not in [] # < add bad releases here to deny list
142
143
144 def get_limited_unpacker(kind):
145 """return a limited Unpacker because we should not trust msgpack data received from remote"""
146 # Note: msgpack >= 0.6.1 auto-computes DoS-safe max values from len(data) for
147 # unpack(data) or from max_buffer_size for Unpacker(max_buffer_size=N).
148 args = dict(use_list=False, max_buffer_size=3 * max(BUFSIZE, MAX_OBJECT_SIZE)) # return tuples, not lists
149 if kind in ('server', 'client'):
150 pass # nothing special
151 elif kind in ('manifest', 'archive', 'key'):
152 args.update(dict(use_list=True, object_hook=StableDict)) # default value
153 else:
154 raise ValueError('kind must be "server", "client", "manifest", "archive" or "key"')
155 return Unpacker(**args)
156
157
158 def bigint_to_int(mtime):
159 """Convert bytearray to int
160 """
161 if isinstance(mtime, bytes):
162 return int.from_bytes(mtime, 'little', signed=True)
163 return mtime
164
165
166 def int_to_bigint(value):
167 """Convert integers larger than 64 bits to bytearray
168
169 Smaller integers are left alone
170 """
171 if value.bit_length() > 63:
172 return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)
173 return value
174
[end of src/borg/helpers/msgpack.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/borg/helpers/msgpack.py b/src/borg/helpers/msgpack.py
--- a/src/borg/helpers/msgpack.py
+++ b/src/borg/helpers/msgpack.py
@@ -137,7 +137,7 @@
def is_supported_msgpack():
# DO NOT CHANGE OR REMOVE! See also requirements and comments in pyproject.toml.
import msgpack
- return (1, 0, 3) <= msgpack.version <= (1, 0, 7) and \
+ return (1, 0, 3) <= msgpack.version <= (1, 0, 8) and \
msgpack.version not in [] # < add bad releases here to deny list
| {"golden_diff": "diff --git a/src/borg/helpers/msgpack.py b/src/borg/helpers/msgpack.py\n--- a/src/borg/helpers/msgpack.py\n+++ b/src/borg/helpers/msgpack.py\n@@ -137,7 +137,7 @@\n def is_supported_msgpack():\n # DO NOT CHANGE OR REMOVE! See also requirements and comments in pyproject.toml.\n import msgpack\n- return (1, 0, 3) <= msgpack.version <= (1, 0, 7) and \\\n+ return (1, 0, 3) <= msgpack.version <= (1, 0, 8) and \\\n msgpack.version not in [] # < add bad releases here to deny list\n", "issue": "upgrade cython and msgpack\nis borg also affected?\r\n\r\nhttps://github.com/msgpack/msgpack-python/pull/583/files\r\n\r\nhttps://github.com/msgpack/msgpack-python/issues/579#issuecomment-1972893890\r\n\r\nhttps://github.com/msgpack/msgpack-python/releases/tag/v1.0.8\r\n\r\nhttps://github.com/cython/cython/issues/5724\n", "before_files": [{"content": "from .datastruct import StableDict\nfrom ..constants import * # NOQA\n\n# wrapping msgpack ---------------------------------------------------------------------------------------------------\n#\n# due to the breaking api changes in upstream msgpack (from 0.x to 1.0), we wrapped it the way we need it -\n# to avoid having lots of clutter in the calling code. see tickets #968 and #3632.\n# as borg 1.4 now requires msgpack > 1.0 anyway, the wrapper has reduced functionality, but was kept.\n#\n# Packing\n# -------\n# use_bin_type = False is needed to generate the old msgpack format (not msgpack 2.0 spec) as borg always did.\n# unicode_errors = None is needed because usage of it is deprecated\n#\n# Unpacking\n# ---------\n# raw = True is needed to unpack the old msgpack format to bytes (not str, about the decoding see item.pyx).\n# unicode_errors = None is needed because usage of it is deprecated\n\nfrom msgpack import Packer as mp_Packer\nfrom msgpack import packb as mp_packb\nfrom msgpack import pack as mp_pack\nfrom msgpack import Unpacker as mp_Unpacker\nfrom msgpack import unpackb as mp_unpackb\nfrom msgpack import unpack as mp_unpack\nfrom msgpack import version as mp_version\n\nfrom msgpack import ExtType\nfrom msgpack import OutOfData\n\n\nversion = mp_version\n\n\nclass PackException(Exception):\n \"\"\"Exception while msgpack packing\"\"\"\n\n\nclass UnpackException(Exception):\n \"\"\"Exception while msgpack unpacking\"\"\"\n\n\nclass Packer(mp_Packer):\n def __init__(self, *, default=None, unicode_errors=None,\n use_single_float=False, autoreset=True, use_bin_type=False,\n strict_types=False):\n assert unicode_errors is None\n super().__init__(default=default, unicode_errors=unicode_errors,\n use_single_float=use_single_float, autoreset=autoreset, use_bin_type=use_bin_type,\n strict_types=strict_types)\n\n def pack(self, obj):\n try:\n return super().pack(obj)\n except Exception as e:\n raise PackException(e)\n\n\ndef packb(o, *, use_bin_type=False, unicode_errors=None, **kwargs):\n assert unicode_errors is None\n try:\n return mp_packb(o, use_bin_type=use_bin_type, unicode_errors=unicode_errors, **kwargs)\n except Exception as e:\n raise PackException(e)\n\n\ndef pack(o, stream, *, use_bin_type=False, unicode_errors=None, **kwargs):\n assert unicode_errors is None\n try:\n return mp_pack(o, stream, use_bin_type=use_bin_type, unicode_errors=unicode_errors, **kwargs)\n except Exception as e:\n raise PackException(e)\n\n\nclass Unpacker(mp_Unpacker):\n def __init__(self, file_like=None, *, read_size=0, use_list=True, raw=True,\n object_hook=None, object_pairs_hook=None, list_hook=None,\n unicode_errors=None, max_buffer_size=0,\n ext_hook=ExtType,\n strict_map_key=False):\n assert raw is True\n assert unicode_errors is None\n kw = dict(file_like=file_like, read_size=read_size, use_list=use_list, raw=raw,\n object_hook=object_hook, object_pairs_hook=object_pairs_hook, list_hook=list_hook,\n unicode_errors=unicode_errors, max_buffer_size=max_buffer_size,\n ext_hook=ext_hook,\n strict_map_key=strict_map_key)\n super().__init__(**kw)\n\n def unpack(self):\n try:\n return super().unpack()\n except OutOfData:\n raise\n except Exception as e:\n raise UnpackException(e)\n\n def __next__(self):\n try:\n return super().__next__()\n except StopIteration:\n raise\n except Exception as e:\n raise UnpackException(e)\n\n next = __next__\n\n\ndef unpackb(packed, *, raw=True, unicode_errors=None, strict_map_key=False, **kwargs):\n assert unicode_errors is None\n try:\n kw = dict(raw=raw, unicode_errors=unicode_errors, strict_map_key=strict_map_key)\n kw.update(kwargs)\n return mp_unpackb(packed, **kw)\n except Exception as e:\n raise UnpackException(e)\n\n\ndef unpack(stream, *, raw=True, unicode_errors=None, strict_map_key=False, **kwargs):\n assert unicode_errors is None\n try:\n kw = dict(raw=raw, unicode_errors=unicode_errors, strict_map_key=strict_map_key)\n kw.update(kwargs)\n return mp_unpack(stream, **kw)\n except Exception as e:\n raise UnpackException(e)\n\n\n# msgpacking related utilities -----------------------------------------------\n\ndef is_slow_msgpack():\n import msgpack\n import msgpack.fallback\n return msgpack.Packer is msgpack.fallback.Packer\n\n\ndef is_supported_msgpack():\n # DO NOT CHANGE OR REMOVE! See also requirements and comments in pyproject.toml.\n import msgpack\n return (1, 0, 3) <= msgpack.version <= (1, 0, 7) and \\\n msgpack.version not in [] # < add bad releases here to deny list\n\n\ndef get_limited_unpacker(kind):\n \"\"\"return a limited Unpacker because we should not trust msgpack data received from remote\"\"\"\n # Note: msgpack >= 0.6.1 auto-computes DoS-safe max values from len(data) for\n # unpack(data) or from max_buffer_size for Unpacker(max_buffer_size=N).\n args = dict(use_list=False, max_buffer_size=3 * max(BUFSIZE, MAX_OBJECT_SIZE)) # return tuples, not lists\n if kind in ('server', 'client'):\n pass # nothing special\n elif kind in ('manifest', 'archive', 'key'):\n args.update(dict(use_list=True, object_hook=StableDict)) # default value\n else:\n raise ValueError('kind must be \"server\", \"client\", \"manifest\", \"archive\" or \"key\"')\n return Unpacker(**args)\n\n\ndef bigint_to_int(mtime):\n \"\"\"Convert bytearray to int\n \"\"\"\n if isinstance(mtime, bytes):\n return int.from_bytes(mtime, 'little', signed=True)\n return mtime\n\n\ndef int_to_bigint(value):\n \"\"\"Convert integers larger than 64 bits to bytearray\n\n Smaller integers are left alone\n \"\"\"\n if value.bit_length() > 63:\n return value.to_bytes((value.bit_length() + 9) // 8, 'little', signed=True)\n return value\n", "path": "src/borg/helpers/msgpack.py"}]} | 2,491 | 158 |
gh_patches_debug_2245 | rasdani/github-patches | git_diff | pypi__warehouse-3928 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Missing legacy redirection from pypi.python.org/pypi/
**Describe the bug**
Redirections from `https://pypi.python.org/pypi/` are not handled (only redirected to `https://pypi.org/pypi/` by varnish (fastly)).
As https://pypi.org/pypi/ does not exists, it creates some broken links.
**Expected behavior**
A 301 to `https://pypi.org/`, simply.
**To Reproduce**
```$ curl -sI https://pypi.python.org/pypi/
HTTP/2 301
server: Varnish
retry-after: 0
location: https://pypi.org/pypi/
[...redacted for readability...]
```
</issue>
<code>
[start of warehouse/routes.py]
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13
14 def includeme(config):
15 # We need to get the value of the Warehouse and Forklift domains, we'll use
16 # these to segregate the Warehouse routes from the Forklift routes until
17 # Forklift is properly split out into it's own project.
18 warehouse = config.get_settings().get("warehouse.domain")
19 files_url = config.get_settings()["files.url"]
20
21 # Simple Route for health checks.
22 config.add_route("health", "/_health/")
23
24 # Internal route to make it easier to force a particular status for
25 # debugging HTTPException templates.
26 config.add_route("force-status", "/_force-status/{status:[45]\d\d}/")
27
28 # Basic global routes
29 config.add_route("index", "/", domain=warehouse)
30 config.add_route("robots.txt", "/robots.txt", domain=warehouse)
31 config.add_route("opensearch.xml", "/opensearch.xml", domain=warehouse)
32 config.add_route("index.sitemap.xml", "/sitemap.xml", domain=warehouse)
33 config.add_route(
34 "bucket.sitemap.xml",
35 "/{bucket}.sitemap.xml",
36 domain=warehouse,
37 )
38
39 # Some static, template driven pages
40 config.add_template_view("help", "/help/", "pages/help.html")
41 config.add_template_view("security", "/security/", "pages/security.html")
42 config.add_template_view(
43 "sponsors",
44 "/sponsors/",
45 # Use the full resource path here to make it able to be overridden by
46 # pypi-theme.
47 "warehouse:templates/pages/sponsors.html",
48 )
49
50 # Our legal policies
51 config.add_policy("terms-of-use", "terms.md")
52
53 # HTML Snippets for including into other pages.
54 config.add_route(
55 "includes.current-user-indicator",
56 "/_includes/current-user-indicator/",
57 domain=warehouse,
58 )
59 config.add_route(
60 "includes.flash-messages",
61 "/_includes/flash-messages/",
62 domain=warehouse,
63 )
64 config.add_route(
65 "includes.current-user-profile-callout",
66 "/_includes/current-user-profile-callout/{username}",
67 factory="warehouse.accounts.models:UserFactory",
68 traverse="/{username}",
69 domain=warehouse,
70 )
71 config.add_route(
72 "includes.edit-project-button",
73 "/_includes/edit-project-button/{project_name}",
74 factory="warehouse.packaging.models:ProjectFactory",
75 traverse="/{project_name}",
76 domain=warehouse,
77 )
78 config.add_route(
79 "includes.profile-actions",
80 "/_includes/profile-actions/{username}",
81 factory="warehouse.accounts.models:UserFactory",
82 traverse="/{username}",
83 domain=warehouse,
84 )
85
86 # Classifier Routes
87 config.add_route("classifiers", "/classifiers/", domain=warehouse)
88
89 # Search Routes
90 config.add_route("search", "/search/", domain=warehouse)
91
92 # Accounts
93 config.add_route(
94 "accounts.profile",
95 "/user/{username}/",
96 factory="warehouse.accounts.models:UserFactory",
97 traverse="/{username}",
98 domain=warehouse,
99 )
100 config.add_route("accounts.login", "/account/login/", domain=warehouse)
101 config.add_route("accounts.logout", "/account/logout/", domain=warehouse)
102 config.add_route(
103 "accounts.register",
104 "/account/register/",
105 domain=warehouse,
106 )
107 config.add_route(
108 "accounts.request-password-reset",
109 "/account/request-password-reset/",
110 domain=warehouse,
111 )
112 config.add_route(
113 "accounts.reset-password",
114 "/account/reset-password/",
115 domain=warehouse,
116 )
117 config.add_route(
118 "accounts.verify-email",
119 "/account/verify-email/",
120 domain=warehouse,
121 )
122
123 # Management (views for logged-in users)
124 config.add_route("manage.account", "/manage/account/", domain=warehouse)
125 config.add_route("manage.projects", "/manage/projects/", domain=warehouse)
126 config.add_route(
127 "manage.project.settings",
128 "/manage/project/{project_name}/settings/",
129 factory="warehouse.packaging.models:ProjectFactory",
130 traverse="/{project_name}",
131 domain=warehouse,
132 )
133 config.add_route(
134 "manage.project.delete_project",
135 "/manage/project/{project_name}/delete_project/",
136 factory="warehouse.packaging.models:ProjectFactory",
137 traverse="/{project_name}",
138 domain=warehouse,
139 )
140 config.add_route(
141 "manage.project.destroy_docs",
142 "/manage/project/{project_name}/delete_project_docs/",
143 factory="warehouse.packaging.models:ProjectFactory",
144 traverse="/{project_name}",
145 domain=warehouse,
146 )
147 config.add_route(
148 "manage.project.releases",
149 "/manage/project/{project_name}/releases/",
150 factory="warehouse.packaging.models:ProjectFactory",
151 traverse="/{project_name}",
152 domain=warehouse,
153 )
154 config.add_route(
155 "manage.project.release",
156 "/manage/project/{project_name}/release/{version}/",
157 factory="warehouse.packaging.models:ProjectFactory",
158 traverse="/{project_name}/{version}",
159 domain=warehouse,
160 )
161 config.add_route(
162 "manage.project.roles",
163 "/manage/project/{project_name}/collaboration/",
164 factory="warehouse.packaging.models:ProjectFactory",
165 traverse="/{project_name}",
166 domain=warehouse,
167 )
168 config.add_route(
169 "manage.project.change_role",
170 "/manage/project/{project_name}/collaboration/change/",
171 factory="warehouse.packaging.models:ProjectFactory",
172 traverse="/{project_name}",
173 domain=warehouse,
174 )
175 config.add_route(
176 "manage.project.delete_role",
177 "/manage/project/{project_name}/collaboration/delete/",
178 factory="warehouse.packaging.models:ProjectFactory",
179 traverse="/{project_name}",
180 domain=warehouse,
181 )
182 config.add_route(
183 "manage.project.documentation",
184 "/manage/project/{project_name}/documentation/",
185 factory="warehouse.packaging.models:ProjectFactory",
186 traverse="/{project_name}",
187 domain=warehouse,
188 )
189 config.add_route(
190 "manage.project.history",
191 "/manage/project/{project_name}/history/",
192 factory="warehouse.packaging.models:ProjectFactory",
193 traverse="/{project_name}",
194 domain=warehouse,
195 )
196
197 # Packaging
198 config.add_redirect('/p/{name}/', '/project/{name}/', domain=warehouse)
199 config.add_route(
200 "packaging.project",
201 "/project/{name}/",
202 factory="warehouse.packaging.models:ProjectFactory",
203 traverse="/{name}",
204 domain=warehouse,
205 )
206 config.add_route(
207 "packaging.release",
208 "/project/{name}/{version}/",
209 factory="warehouse.packaging.models:ProjectFactory",
210 traverse="/{name}/{version}",
211 domain=warehouse,
212 )
213 config.add_route("packaging.file", files_url)
214
215 # SES Webhooks
216 config.add_route("ses.hook", "/_/ses-hook/", domain=warehouse)
217
218 # RSS
219 config.add_route("rss.updates", "/rss/updates.xml", domain=warehouse)
220 config.add_route("rss.packages", "/rss/packages.xml", domain=warehouse)
221
222 # Legacy URLs
223 config.add_route("legacy.api.simple.index", "/simple/", domain=warehouse)
224 config.add_route(
225 "legacy.api.simple.detail",
226 "/simple/{name}/",
227 factory="warehouse.packaging.models:ProjectFactory",
228 traverse="/{name}/",
229 read_only=True,
230 domain=warehouse,
231 )
232 config.add_route(
233 "legacy.api.json.project",
234 "/pypi/{name}/json",
235 factory="warehouse.packaging.models:ProjectFactory",
236 traverse="/{name}",
237 read_only=True,
238 domain=warehouse,
239 )
240 config.add_route(
241 "legacy.api.json.release",
242 "/pypi/{name}/{version}/json",
243 factory="warehouse.packaging.models:ProjectFactory",
244 traverse="/{name}/{version}",
245 read_only=True,
246 domain=warehouse,
247 )
248
249 # Legacy Action URLs
250 # TODO: We should probably add Warehouse routes for these that just error
251 # and direct people to use upload.pypi.io
252 config.add_pypi_action_route(
253 "legacy.api.pypi.file_upload",
254 "file_upload",
255 domain=warehouse,
256 )
257 config.add_pypi_action_route(
258 "legacy.api.pypi.submit",
259 "submit",
260 domain=warehouse,
261 )
262 config.add_pypi_action_route(
263 "legacy.api.pypi.submit_pkg_info",
264 "submit_pkg_info",
265 domain=warehouse,
266 )
267 config.add_pypi_action_route(
268 "legacy.api.pypi.doc_upload",
269 "doc_upload",
270 domain=warehouse,
271 )
272 config.add_pypi_action_route(
273 "legacy.api.pypi.doap",
274 "doap",
275 domain=warehouse,
276 )
277 config.add_pypi_action_route(
278 "legacy.api.pypi.list_classifiers",
279 "list_classifiers",
280 domain=warehouse,
281 )
282 config.add_pypi_action_route(
283 'legacy.api.pypi.search',
284 'search',
285 domain=warehouse,
286 )
287 config.add_pypi_action_route(
288 'legacy.api.pypi.browse',
289 'browse',
290 domain=warehouse,
291 )
292 config.add_pypi_action_route(
293 'legacy.api.pypi.files',
294 'files',
295 domain=warehouse,
296 )
297 config.add_pypi_action_route(
298 'legacy.api.pypi.display',
299 'display',
300 domain=warehouse,
301 )
302
303 # Legacy XMLRPC
304 config.add_xmlrpc_endpoint(
305 "pypi",
306 pattern="/pypi",
307 header="Content-Type:text/xml",
308 domain=warehouse,
309 )
310 config.add_xmlrpc_endpoint(
311 "pypi_slash",
312 pattern="/pypi/",
313 header="Content-Type:text/xml",
314 domain=warehouse,
315 )
316 config.add_xmlrpc_endpoint(
317 "RPC2",
318 pattern="/RPC2",
319 header="Content-Type:text/xml",
320 domain=warehouse,
321 )
322
323 # Legacy Documentation
324 config.add_route("legacy.docs", config.registry.settings["docs.url"])
325
326 # Legacy Redirects
327 config.add_redirect("/pypi/{name}/", "/project/{name}/", domain=warehouse)
328 config.add_redirect(
329 "/pypi/{name}/{version}/",
330 "/project/{name}/{version}/",
331 domain=warehouse,
332 )
333 config.add_redirect("/packages/{path:.*}", files_url, domain=warehouse)
334
335 # Legacy Action Redirects
336 config.add_pypi_action_redirect(
337 "rss",
338 "/rss/updates.xml",
339 domain=warehouse,
340 )
341 config.add_pypi_action_redirect(
342 "packages_rss",
343 "/rss/packages.xml",
344 domain=warehouse,
345 )
346
[end of warehouse/routes.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/warehouse/routes.py b/warehouse/routes.py
--- a/warehouse/routes.py
+++ b/warehouse/routes.py
@@ -330,6 +330,7 @@
"/project/{name}/{version}/",
domain=warehouse,
)
+ config.add_redirect("/pypi/", "/", domain=warehouse)
config.add_redirect("/packages/{path:.*}", files_url, domain=warehouse)
# Legacy Action Redirects
| {"golden_diff": "diff --git a/warehouse/routes.py b/warehouse/routes.py\n--- a/warehouse/routes.py\n+++ b/warehouse/routes.py\n@@ -330,6 +330,7 @@\n \"/project/{name}/{version}/\",\n domain=warehouse,\n )\n+ config.add_redirect(\"/pypi/\", \"/\", domain=warehouse)\n config.add_redirect(\"/packages/{path:.*}\", files_url, domain=warehouse)\n \n # Legacy Action Redirects\n", "issue": "Missing legacy redirection from pypi.python.org/pypi/\n**Describe the bug**\r\nRedirections from `https://pypi.python.org/pypi/` are not handled (only redirected to `https://pypi.org/pypi/` by varnish (fastly)).\r\n\r\nAs https://pypi.org/pypi/ does not exists, it creates some broken links.\r\n\r\n**Expected behavior**\r\nA 301 to `https://pypi.org/`, simply.\r\n\r\n**To Reproduce**\r\n```$ curl -sI https://pypi.python.org/pypi/\r\nHTTP/2 301 \r\nserver: Varnish\r\nretry-after: 0\r\nlocation: https://pypi.org/pypi/\r\n[...redacted for readability...]\r\n```\r\n\r\n\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\ndef includeme(config):\n # We need to get the value of the Warehouse and Forklift domains, we'll use\n # these to segregate the Warehouse routes from the Forklift routes until\n # Forklift is properly split out into it's own project.\n warehouse = config.get_settings().get(\"warehouse.domain\")\n files_url = config.get_settings()[\"files.url\"]\n\n # Simple Route for health checks.\n config.add_route(\"health\", \"/_health/\")\n\n # Internal route to make it easier to force a particular status for\n # debugging HTTPException templates.\n config.add_route(\"force-status\", \"/_force-status/{status:[45]\\d\\d}/\")\n\n # Basic global routes\n config.add_route(\"index\", \"/\", domain=warehouse)\n config.add_route(\"robots.txt\", \"/robots.txt\", domain=warehouse)\n config.add_route(\"opensearch.xml\", \"/opensearch.xml\", domain=warehouse)\n config.add_route(\"index.sitemap.xml\", \"/sitemap.xml\", domain=warehouse)\n config.add_route(\n \"bucket.sitemap.xml\",\n \"/{bucket}.sitemap.xml\",\n domain=warehouse,\n )\n\n # Some static, template driven pages\n config.add_template_view(\"help\", \"/help/\", \"pages/help.html\")\n config.add_template_view(\"security\", \"/security/\", \"pages/security.html\")\n config.add_template_view(\n \"sponsors\",\n \"/sponsors/\",\n # Use the full resource path here to make it able to be overridden by\n # pypi-theme.\n \"warehouse:templates/pages/sponsors.html\",\n )\n\n # Our legal policies\n config.add_policy(\"terms-of-use\", \"terms.md\")\n\n # HTML Snippets for including into other pages.\n config.add_route(\n \"includes.current-user-indicator\",\n \"/_includes/current-user-indicator/\",\n domain=warehouse,\n )\n config.add_route(\n \"includes.flash-messages\",\n \"/_includes/flash-messages/\",\n domain=warehouse,\n )\n config.add_route(\n \"includes.current-user-profile-callout\",\n \"/_includes/current-user-profile-callout/{username}\",\n factory=\"warehouse.accounts.models:UserFactory\",\n traverse=\"/{username}\",\n domain=warehouse,\n )\n config.add_route(\n \"includes.edit-project-button\",\n \"/_includes/edit-project-button/{project_name}\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n config.add_route(\n \"includes.profile-actions\",\n \"/_includes/profile-actions/{username}\",\n factory=\"warehouse.accounts.models:UserFactory\",\n traverse=\"/{username}\",\n domain=warehouse,\n )\n\n # Classifier Routes\n config.add_route(\"classifiers\", \"/classifiers/\", domain=warehouse)\n\n # Search Routes\n config.add_route(\"search\", \"/search/\", domain=warehouse)\n\n # Accounts\n config.add_route(\n \"accounts.profile\",\n \"/user/{username}/\",\n factory=\"warehouse.accounts.models:UserFactory\",\n traverse=\"/{username}\",\n domain=warehouse,\n )\n config.add_route(\"accounts.login\", \"/account/login/\", domain=warehouse)\n config.add_route(\"accounts.logout\", \"/account/logout/\", domain=warehouse)\n config.add_route(\n \"accounts.register\",\n \"/account/register/\",\n domain=warehouse,\n )\n config.add_route(\n \"accounts.request-password-reset\",\n \"/account/request-password-reset/\",\n domain=warehouse,\n )\n config.add_route(\n \"accounts.reset-password\",\n \"/account/reset-password/\",\n domain=warehouse,\n )\n config.add_route(\n \"accounts.verify-email\",\n \"/account/verify-email/\",\n domain=warehouse,\n )\n\n # Management (views for logged-in users)\n config.add_route(\"manage.account\", \"/manage/account/\", domain=warehouse)\n config.add_route(\"manage.projects\", \"/manage/projects/\", domain=warehouse)\n config.add_route(\n \"manage.project.settings\",\n \"/manage/project/{project_name}/settings/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n config.add_route(\n \"manage.project.delete_project\",\n \"/manage/project/{project_name}/delete_project/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n config.add_route(\n \"manage.project.destroy_docs\",\n \"/manage/project/{project_name}/delete_project_docs/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n config.add_route(\n \"manage.project.releases\",\n \"/manage/project/{project_name}/releases/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n config.add_route(\n \"manage.project.release\",\n \"/manage/project/{project_name}/release/{version}/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}/{version}\",\n domain=warehouse,\n )\n config.add_route(\n \"manage.project.roles\",\n \"/manage/project/{project_name}/collaboration/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n config.add_route(\n \"manage.project.change_role\",\n \"/manage/project/{project_name}/collaboration/change/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n config.add_route(\n \"manage.project.delete_role\",\n \"/manage/project/{project_name}/collaboration/delete/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n config.add_route(\n \"manage.project.documentation\",\n \"/manage/project/{project_name}/documentation/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n config.add_route(\n \"manage.project.history\",\n \"/manage/project/{project_name}/history/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{project_name}\",\n domain=warehouse,\n )\n\n # Packaging\n config.add_redirect('/p/{name}/', '/project/{name}/', domain=warehouse)\n config.add_route(\n \"packaging.project\",\n \"/project/{name}/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{name}\",\n domain=warehouse,\n )\n config.add_route(\n \"packaging.release\",\n \"/project/{name}/{version}/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{name}/{version}\",\n domain=warehouse,\n )\n config.add_route(\"packaging.file\", files_url)\n\n # SES Webhooks\n config.add_route(\"ses.hook\", \"/_/ses-hook/\", domain=warehouse)\n\n # RSS\n config.add_route(\"rss.updates\", \"/rss/updates.xml\", domain=warehouse)\n config.add_route(\"rss.packages\", \"/rss/packages.xml\", domain=warehouse)\n\n # Legacy URLs\n config.add_route(\"legacy.api.simple.index\", \"/simple/\", domain=warehouse)\n config.add_route(\n \"legacy.api.simple.detail\",\n \"/simple/{name}/\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{name}/\",\n read_only=True,\n domain=warehouse,\n )\n config.add_route(\n \"legacy.api.json.project\",\n \"/pypi/{name}/json\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{name}\",\n read_only=True,\n domain=warehouse,\n )\n config.add_route(\n \"legacy.api.json.release\",\n \"/pypi/{name}/{version}/json\",\n factory=\"warehouse.packaging.models:ProjectFactory\",\n traverse=\"/{name}/{version}\",\n read_only=True,\n domain=warehouse,\n )\n\n # Legacy Action URLs\n # TODO: We should probably add Warehouse routes for these that just error\n # and direct people to use upload.pypi.io\n config.add_pypi_action_route(\n \"legacy.api.pypi.file_upload\",\n \"file_upload\",\n domain=warehouse,\n )\n config.add_pypi_action_route(\n \"legacy.api.pypi.submit\",\n \"submit\",\n domain=warehouse,\n )\n config.add_pypi_action_route(\n \"legacy.api.pypi.submit_pkg_info\",\n \"submit_pkg_info\",\n domain=warehouse,\n )\n config.add_pypi_action_route(\n \"legacy.api.pypi.doc_upload\",\n \"doc_upload\",\n domain=warehouse,\n )\n config.add_pypi_action_route(\n \"legacy.api.pypi.doap\",\n \"doap\",\n domain=warehouse,\n )\n config.add_pypi_action_route(\n \"legacy.api.pypi.list_classifiers\",\n \"list_classifiers\",\n domain=warehouse,\n )\n config.add_pypi_action_route(\n 'legacy.api.pypi.search',\n 'search',\n domain=warehouse,\n )\n config.add_pypi_action_route(\n 'legacy.api.pypi.browse',\n 'browse',\n domain=warehouse,\n )\n config.add_pypi_action_route(\n 'legacy.api.pypi.files',\n 'files',\n domain=warehouse,\n )\n config.add_pypi_action_route(\n 'legacy.api.pypi.display',\n 'display',\n domain=warehouse,\n )\n\n # Legacy XMLRPC\n config.add_xmlrpc_endpoint(\n \"pypi\",\n pattern=\"/pypi\",\n header=\"Content-Type:text/xml\",\n domain=warehouse,\n )\n config.add_xmlrpc_endpoint(\n \"pypi_slash\",\n pattern=\"/pypi/\",\n header=\"Content-Type:text/xml\",\n domain=warehouse,\n )\n config.add_xmlrpc_endpoint(\n \"RPC2\",\n pattern=\"/RPC2\",\n header=\"Content-Type:text/xml\",\n domain=warehouse,\n )\n\n # Legacy Documentation\n config.add_route(\"legacy.docs\", config.registry.settings[\"docs.url\"])\n\n # Legacy Redirects\n config.add_redirect(\"/pypi/{name}/\", \"/project/{name}/\", domain=warehouse)\n config.add_redirect(\n \"/pypi/{name}/{version}/\",\n \"/project/{name}/{version}/\",\n domain=warehouse,\n )\n config.add_redirect(\"/packages/{path:.*}\", files_url, domain=warehouse)\n\n # Legacy Action Redirects\n config.add_pypi_action_redirect(\n \"rss\",\n \"/rss/updates.xml\",\n domain=warehouse,\n )\n config.add_pypi_action_redirect(\n \"packages_rss\",\n \"/rss/packages.xml\",\n domain=warehouse,\n )\n", "path": "warehouse/routes.py"}]} | 3,998 | 98 |
gh_patches_debug_9887 | rasdani/github-patches | git_diff | mars-project__mars-151 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[BUG] mars-* scripts not marked as executable
**Describe the bug**
Scripts in scripts/ dir not marked as executable when packed into wheels, thus when installed in venvs, they cannot be installed.
**To Reproduce**
To help us reproducing this bug, please provide information below:
1. Your Python version
Python 3.7.1 installed with pyenv
2. The version of Mars you use
0.1.0b1
3. Versions of crucial packages, such as numpy, scipy and protobuf
not relevant
4. Full stack of the error.
NA
5. Minimized code to reproduce the error.
mars-scheduler ......
</issue>
<code>
[start of setup.py]
1 # Copyright 1999-2017 Alibaba Group Holding Ltd.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import os
16 import sys
17 from setuptools import setup, find_packages, Extension
18
19 import numpy as np
20 from Cython.Build import cythonize
21 from Cython.Distutils import build_ext
22
23 repo_root = os.path.dirname(os.path.abspath(__file__))
24
25 try:
26 execfile
27 except NameError:
28 def execfile(fname, globs, locs=None):
29 locs = locs or globs
30 exec(compile(open(fname).read(), fname, "exec"), globs, locs)
31
32 version_file_path = os.path.join(repo_root, 'mars', '_version.py')
33 version_ns = {'__file__': version_file_path}
34 execfile(version_file_path, version_ns)
35
36 requirements = []
37 with open(os.path.join(repo_root, 'requirements.txt'), 'r') as f:
38 requirements.extend(f.read().splitlines())
39
40
41 extra_requirements = []
42 with open(os.path.join(repo_root, 'requirements-extra.txt'), 'r') as f:
43 extra_requirements.extend(f.read().splitlines())
44
45
46 long_description = None
47 if os.path.exists(os.path.join(repo_root, 'README.rst')):
48 with open(os.path.join(repo_root, 'README.rst')) as f:
49 long_description = f.read()
50
51
52 if os.path.exists(os.path.join(repo_root, '.git')):
53 git_info = version_ns['get_git_info']()
54 if git_info:
55 with open(os.path.join(repo_root, 'mars', '.git-branch'), 'w') as git_file:
56 git_file.write('%s %s' % git_info)
57
58 cythonize_kw = dict(language_level=sys.version_info[0])
59 extension_kw = dict()
60 if 'CI_MODE' in os.environ:
61 extension_kw['define_macros'] = [('CYTHON_TRACE_NOGIL', '1'), ('CYTHON_TRACE', '1')]
62 cythonize_kw['compiler_directives'] = {'linetrace': True, 'binding': True}
63
64 if 'MSC' in sys.version:
65 extra_compile_args = ['/Ot', '/I' + os.path.join(repo_root, 'misc')]
66 extension_kw['extra_compile_args'] = extra_compile_args
67 else:
68 extra_compile_args = ['-O3']
69 extension_kw['extra_compile_args'] = extra_compile_args
70
71 extension_kw['include_dirs'] = [np.get_include()]
72 extensions = [
73 Extension('mars.graph', ['mars/graph.pyx'], **extension_kw),
74 Extension('mars.fuse', ['mars/fuse.pyx'], **extension_kw),
75 Extension('mars.utils_c', ['mars/utils_c.pyx'], **extension_kw),
76 Extension('mars.lib.gipc', ['mars/lib/gipc.pyx'], **extension_kw),
77 Extension('mars.actors.core', ['mars/actors/core.pyx'], **extension_kw),
78 Extension('mars.actors.distributor', ['mars/actors/distributor.pyx'], **extension_kw),
79 Extension('mars.actors.cluster', ['mars/actors/cluster.pyx'], **extension_kw),
80 Extension('mars.actors.pool.messages', ['mars/actors/pool/messages.pyx'], **extension_kw),
81 Extension('mars.actors.pool.utils', ['mars/actors/pool/utils.pyx'], **extension_kw),
82 Extension('mars.actors.pool.gevent_pool', ['mars/actors/pool/gevent_pool.pyx'], **extension_kw),
83 Extension('mars.serialize.core', ['mars/serialize/core.pyx'], **extension_kw),
84 Extension('mars.serialize.pbserializer', ['mars/serialize/pbserializer.pyx'], **extension_kw),
85 Extension('mars.serialize.jsonserializer', ['mars/serialize/jsonserializer.pyx'], **extension_kw),
86 ]
87
88
89 setup_options = dict(
90 name='pymars',
91 version=version_ns['__version__'],
92 description='MARS: a tensor-based unified framework for large-scale data computation.',
93 long_description=long_description,
94 author='Qin Xuye',
95 author_email='[email protected]',
96 maintainer='Qin Xuye',
97 maintainer_email='[email protected]',
98 url='http://github.com/mars-project/mars',
99 license='Apache License 2.0',
100 classifiers=[
101 'Operating System :: OS Independent',
102 'Programming Language :: Python',
103 'Programming Language :: Python :: 2',
104 'Programming Language :: Python :: 2.7',
105 'Programming Language :: Python :: 3',
106 'Programming Language :: Python :: 3.5',
107 'Programming Language :: Python :: 3.6',
108 'Programming Language :: Python :: 3.7',
109 'Programming Language :: Python :: Implementation :: CPython',
110 'Topic :: Software Development :: Libraries',
111 ],
112 packages=find_packages(exclude=('*.tests.*', '*.tests')),
113 include_package_data=True,
114 scripts=['scripts/mars-scheduler', 'scripts/mars-worker', 'scripts/mars-web'],
115 install_requires=requirements,
116 cmdclass={'build_ext': build_ext},
117 ext_modules=cythonize(extensions, **cythonize_kw),
118 extras_require={'distributed': extra_requirements}
119 )
120 setup(**setup_options)
121
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -111,7 +111,11 @@
],
packages=find_packages(exclude=('*.tests.*', '*.tests')),
include_package_data=True,
- scripts=['scripts/mars-scheduler', 'scripts/mars-worker', 'scripts/mars-web'],
+ entry_points={'console_scripts': [
+ 'mars-scheduler = mars.scheduler.__main__:main',
+ 'mars-worker = mars.worker.__main__:main',
+ 'mars-web = mars.web.__main__:main',
+ ]},
install_requires=requirements,
cmdclass={'build_ext': build_ext},
ext_modules=cythonize(extensions, **cythonize_kw),
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -111,7 +111,11 @@\n ],\n packages=find_packages(exclude=('*.tests.*', '*.tests')),\n include_package_data=True,\n- scripts=['scripts/mars-scheduler', 'scripts/mars-worker', 'scripts/mars-web'],\n+ entry_points={'console_scripts': [\n+ 'mars-scheduler = mars.scheduler.__main__:main',\n+ 'mars-worker = mars.worker.__main__:main',\n+ 'mars-web = mars.web.__main__:main',\n+ ]},\n install_requires=requirements,\n cmdclass={'build_ext': build_ext},\n ext_modules=cythonize(extensions, **cythonize_kw),\n", "issue": "[BUG] mars-* scripts not marked as executable\n**Describe the bug**\r\nScripts in scripts/ dir not marked as executable when packed into wheels, thus when installed in venvs, they cannot be installed.\r\n\r\n**To Reproduce**\r\nTo help us reproducing this bug, please provide information below:\r\n1. Your Python version\r\n Python 3.7.1 installed with pyenv\r\n2. The version of Mars you use\r\n 0.1.0b1\r\n3. Versions of crucial packages, such as numpy, scipy and protobuf\r\n not relevant\r\n4. Full stack of the error.\r\n NA\r\n5. Minimized code to reproduce the error.\r\n mars-scheduler ......\r\n\n", "before_files": [{"content": "# Copyright 1999-2017 Alibaba Group Holding Ltd.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport sys\nfrom setuptools import setup, find_packages, Extension\n\nimport numpy as np\nfrom Cython.Build import cythonize\nfrom Cython.Distutils import build_ext\n\nrepo_root = os.path.dirname(os.path.abspath(__file__))\n\ntry:\n execfile\nexcept NameError:\n def execfile(fname, globs, locs=None):\n locs = locs or globs\n exec(compile(open(fname).read(), fname, \"exec\"), globs, locs)\n\nversion_file_path = os.path.join(repo_root, 'mars', '_version.py')\nversion_ns = {'__file__': version_file_path}\nexecfile(version_file_path, version_ns)\n\nrequirements = []\nwith open(os.path.join(repo_root, 'requirements.txt'), 'r') as f:\n requirements.extend(f.read().splitlines())\n\n\nextra_requirements = []\nwith open(os.path.join(repo_root, 'requirements-extra.txt'), 'r') as f:\n extra_requirements.extend(f.read().splitlines())\n\n\nlong_description = None\nif os.path.exists(os.path.join(repo_root, 'README.rst')):\n with open(os.path.join(repo_root, 'README.rst')) as f:\n long_description = f.read()\n\n\nif os.path.exists(os.path.join(repo_root, '.git')):\n git_info = version_ns['get_git_info']()\n if git_info:\n with open(os.path.join(repo_root, 'mars', '.git-branch'), 'w') as git_file:\n git_file.write('%s %s' % git_info)\n\ncythonize_kw = dict(language_level=sys.version_info[0])\nextension_kw = dict()\nif 'CI_MODE' in os.environ:\n extension_kw['define_macros'] = [('CYTHON_TRACE_NOGIL', '1'), ('CYTHON_TRACE', '1')]\n cythonize_kw['compiler_directives'] = {'linetrace': True, 'binding': True}\n\nif 'MSC' in sys.version:\n extra_compile_args = ['/Ot', '/I' + os.path.join(repo_root, 'misc')]\n extension_kw['extra_compile_args'] = extra_compile_args\nelse:\n extra_compile_args = ['-O3']\n extension_kw['extra_compile_args'] = extra_compile_args\n\nextension_kw['include_dirs'] = [np.get_include()]\nextensions = [\n Extension('mars.graph', ['mars/graph.pyx'], **extension_kw),\n Extension('mars.fuse', ['mars/fuse.pyx'], **extension_kw),\n Extension('mars.utils_c', ['mars/utils_c.pyx'], **extension_kw),\n Extension('mars.lib.gipc', ['mars/lib/gipc.pyx'], **extension_kw),\n Extension('mars.actors.core', ['mars/actors/core.pyx'], **extension_kw),\n Extension('mars.actors.distributor', ['mars/actors/distributor.pyx'], **extension_kw),\n Extension('mars.actors.cluster', ['mars/actors/cluster.pyx'], **extension_kw),\n Extension('mars.actors.pool.messages', ['mars/actors/pool/messages.pyx'], **extension_kw),\n Extension('mars.actors.pool.utils', ['mars/actors/pool/utils.pyx'], **extension_kw),\n Extension('mars.actors.pool.gevent_pool', ['mars/actors/pool/gevent_pool.pyx'], **extension_kw),\n Extension('mars.serialize.core', ['mars/serialize/core.pyx'], **extension_kw),\n Extension('mars.serialize.pbserializer', ['mars/serialize/pbserializer.pyx'], **extension_kw),\n Extension('mars.serialize.jsonserializer', ['mars/serialize/jsonserializer.pyx'], **extension_kw),\n]\n\n\nsetup_options = dict(\n name='pymars',\n version=version_ns['__version__'],\n description='MARS: a tensor-based unified framework for large-scale data computation.',\n long_description=long_description,\n author='Qin Xuye',\n author_email='[email protected]',\n maintainer='Qin Xuye',\n maintainer_email='[email protected]',\n url='http://github.com/mars-project/mars',\n license='Apache License 2.0',\n classifiers=[\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Topic :: Software Development :: Libraries',\n ],\n packages=find_packages(exclude=('*.tests.*', '*.tests')),\n include_package_data=True,\n scripts=['scripts/mars-scheduler', 'scripts/mars-worker', 'scripts/mars-web'],\n install_requires=requirements,\n cmdclass={'build_ext': build_ext},\n ext_modules=cythonize(extensions, **cythonize_kw),\n extras_require={'distributed': extra_requirements}\n)\nsetup(**setup_options)\n", "path": "setup.py"}]} | 2,138 | 164 |
gh_patches_debug_36630 | rasdani/github-patches | git_diff | aws-cloudformation__cfn-lint-505 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Pip install fails for 0.10.0
*cfn-lint version: (`cfn-lint --version`)*
0.10.0
*Description of issue.*
My pipeline started failing when installing the latest version via pip.
example:
`pip install cfn-lint==0.10.0`
```
[...]
.txt --single-version-externally-managed --compile:
running install
running build
running build_py
creating build
creating build/lib.linux-x86_64-3.7
copying regex_3/regex.py -> build/lib.linux-x86_64-3.7
copying regex_3/_regex_core.py -> build/lib.linux-x86_64-3.7
copying regex_3/test_regex.py -> build/lib.linux-x86_64-3.7
running build_ext
building '_regex' extension
creating build/temp.linux-x86_64-3.7
creating build/temp.linux-x86_64-3.7/regex_3
gcc -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -DTHREAD_STACK_SIZE=0x100000 -fPIC -I/usr/local/include/python3.7m -c regex_3/_regex.c -o build/temp.linux-x86_64-3.7/regex_3/_regex.o
unable to execute 'gcc': No such file or directory
error: command 'gcc' failed with exit status 1
----------------------------------------
Command "/usr/local/bin/python -u -c "import setuptools, tokenize;__file__='/tmp/pip-install-r143_0dz/regex/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\r\n', '\n');f.close();exec(compile(code, __file__, 'exec'))" install --record /tmp/pip-record-467tj48_/install-record.txt --single-version-externally-managed --compile" failed with error code 1 in /tmp/pip-install-r143_0dz/regex/
```
Version 0.9.2 works well.
Replicate with:
`docker run --rm -it python:3.7.1-alpine3.8 /bin/sh -c 'pip install cfn-lint==0.10.0'`
</issue>
<code>
[start of setup.py]
1 """
2 Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3
4 Permission is hereby granted, free of charge, to any person obtaining a copy of this
5 software and associated documentation files (the "Software"), to deal in the Software
6 without restriction, including without limitation the rights to use, copy, modify,
7 merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
8 permit persons to whom the Software is furnished to do so.
9
10 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
11 INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
12 PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
13 HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
14 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
15 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 """
17 import codecs
18 import re
19 from setuptools import find_packages
20 from setuptools import setup
21
22
23 def get_version(filename):
24 with codecs.open(filename, 'r', 'utf-8') as fp:
25 contents = fp.read()
26 return re.search(r"__version__ = ['\"]([^'\"]+)['\"]", contents).group(1)
27
28
29 version = get_version('src/cfnlint/version.py')
30
31
32 with open('README.md') as f:
33 readme = f.read()
34
35 setup(
36 name='cfn-lint',
37 version=version,
38 description=('checks cloudformation for practices and behaviour \
39 that could potentially be improved'),
40 long_description=readme,
41 long_description_content_type="text/markdown",
42 keywords='aws, lint',
43 author='kddejong',
44 author_email='[email protected]',
45 url='https://github.com/awslabs/cfn-python-lint',
46 package_dir={'': 'src'},
47 package_data={'cfnlint': [
48 'data/CloudSpecs/*.json',
49 'data/AdditionalSpecs/*.json',
50 'data/Serverless/*.json',
51 'data/CfnLintCli/config/schema.json'
52 ]},
53 packages=find_packages('src'),
54 zip_safe=False,
55 install_requires=[
56 'pyyaml',
57 'six~=1.11',
58 'requests>=2.15.0',
59 'aws-sam-translator>=1.8.0',
60 'jsonpatch',
61 'jsonschema~=2.6',
62 'pathlib2>=2.3.0;python_version<"3.4"',
63 'regex>=2018.11.07'
64 ],
65 python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
66 entry_points={
67 'console_scripts': [
68 'cfn-lint = cfnlint.__main__:main'
69 ]
70 },
71 license='MIT no attribution',
72 test_suite="unittest",
73 classifiers=[
74 'Development Status :: 5 - Production/Stable',
75 'Intended Audience :: Developers',
76 'License :: OSI Approved :: MIT License',
77 'Natural Language :: English',
78 'Operating System :: OS Independent',
79 'Programming Language :: Python :: 2',
80 'Programming Language :: Python :: 2.7',
81 'Programming Language :: Python :: 3',
82 'Programming Language :: Python :: 3.4',
83 'Programming Language :: Python :: 3.5',
84 'Programming Language :: Python :: 3.6',
85 'Programming Language :: Python :: 3.7',
86 ],
87 )
88
[end of setup.py]
[start of src/cfnlint/rules/resources/iam/ManagedPolicyDescription.py]
1 """
2 Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3
4 Permission is hereby granted, free of charge, to any person obtaining a copy of this
5 software and associated documentation files (the "Software"), to deal in the Software
6 without restriction, including without limitation the rights to use, copy, modify,
7 merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
8 permit persons to whom the Software is furnished to do so.
9
10 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
11 INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
12 PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
13 HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
14 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
15 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 """
17 import regex
18 from cfnlint import CloudFormationLintRule
19 from cfnlint import RuleMatch
20
21
22 class ManagedPolicyDescription(CloudFormationLintRule):
23 """Check if IAM Policy Description is syntax correct"""
24 id = 'E3507'
25 shortdesc = 'Check if IAM Managed Policy description follows supported regex'
26 description = 'IAM Managed Policy description much comply with the regex [\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*'
27 source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iam-managedpolicy.html'
28 tags = ['properties', 'iam']
29
30 def __init__(self):
31 """Init"""
32 super(ManagedPolicyDescription, self).__init__()
33 self.resource_property_types.append('AWS::IAM::ManagedPolicy')
34
35 def check_value(self, value, path):
36 """Check the value"""
37 regex_string = r'^[\p{L}\p{M}\p{Z}\p{S}\p{N}\p{P}]+$'
38 r = regex.compile(regex_string)
39 if not r.match(value):
40 message = 'ManagedPolicy Description needs to follow regex pattern "{0}"'
41 return [
42 RuleMatch(path[:], message.format(regex_string))
43 ]
44
45 return []
46
47 def match_resource_properties(self, properties, _, path, cfn):
48 """Check CloudFormation Properties"""
49 matches = []
50
51 matches.extend(
52 cfn.check_value(
53 obj=properties, key='Description',
54 path=path[:],
55 check_value=self.check_value
56 ))
57
58 return matches
59
[end of src/cfnlint/rules/resources/iam/ManagedPolicyDescription.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -59,8 +59,7 @@
'aws-sam-translator>=1.8.0',
'jsonpatch',
'jsonschema~=2.6',
- 'pathlib2>=2.3.0;python_version<"3.4"',
- 'regex>=2018.11.07'
+ 'pathlib2>=2.3.0;python_version<"3.4"'
],
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
entry_points={
diff --git a/src/cfnlint/rules/resources/iam/ManagedPolicyDescription.py b/src/cfnlint/rules/resources/iam/ManagedPolicyDescription.py
deleted file mode 100644
--- a/src/cfnlint/rules/resources/iam/ManagedPolicyDescription.py
+++ /dev/null
@@ -1,58 +0,0 @@
-"""
- Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
-
- Permission is hereby granted, free of charge, to any person obtaining a copy of this
- software and associated documentation files (the "Software"), to deal in the Software
- without restriction, including without limitation the rights to use, copy, modify,
- merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
- permit persons to whom the Software is furnished to do so.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
- INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
- PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
- HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
- SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
-import regex
-from cfnlint import CloudFormationLintRule
-from cfnlint import RuleMatch
-
-
-class ManagedPolicyDescription(CloudFormationLintRule):
- """Check if IAM Policy Description is syntax correct"""
- id = 'E3507'
- shortdesc = 'Check if IAM Managed Policy description follows supported regex'
- description = 'IAM Managed Policy description much comply with the regex [\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*'
- source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iam-managedpolicy.html'
- tags = ['properties', 'iam']
-
- def __init__(self):
- """Init"""
- super(ManagedPolicyDescription, self).__init__()
- self.resource_property_types.append('AWS::IAM::ManagedPolicy')
-
- def check_value(self, value, path):
- """Check the value"""
- regex_string = r'^[\p{L}\p{M}\p{Z}\p{S}\p{N}\p{P}]+$'
- r = regex.compile(regex_string)
- if not r.match(value):
- message = 'ManagedPolicy Description needs to follow regex pattern "{0}"'
- return [
- RuleMatch(path[:], message.format(regex_string))
- ]
-
- return []
-
- def match_resource_properties(self, properties, _, path, cfn):
- """Check CloudFormation Properties"""
- matches = []
-
- matches.extend(
- cfn.check_value(
- obj=properties, key='Description',
- path=path[:],
- check_value=self.check_value
- ))
-
- return matches
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -59,8 +59,7 @@\n 'aws-sam-translator>=1.8.0',\n 'jsonpatch',\n 'jsonschema~=2.6',\n- 'pathlib2>=2.3.0;python_version<\"3.4\"',\n- 'regex>=2018.11.07'\n+ 'pathlib2>=2.3.0;python_version<\"3.4\"'\n ],\n python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',\n entry_points={\ndiff --git a/src/cfnlint/rules/resources/iam/ManagedPolicyDescription.py b/src/cfnlint/rules/resources/iam/ManagedPolicyDescription.py\ndeleted file mode 100644\n--- a/src/cfnlint/rules/resources/iam/ManagedPolicyDescription.py\n+++ /dev/null\n@@ -1,58 +0,0 @@\n-\"\"\"\n- Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n-\n- Permission is hereby granted, free of charge, to any person obtaining a copy of this\n- software and associated documentation files (the \"Software\"), to deal in the Software\n- without restriction, including without limitation the rights to use, copy, modify,\n- merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n- permit persons to whom the Software is furnished to do so.\n-\n- THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n- INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n- PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n- HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n- SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n-\"\"\"\n-import regex\n-from cfnlint import CloudFormationLintRule\n-from cfnlint import RuleMatch\n-\n-\n-class ManagedPolicyDescription(CloudFormationLintRule):\n- \"\"\"Check if IAM Policy Description is syntax correct\"\"\"\n- id = 'E3507'\n- shortdesc = 'Check if IAM Managed Policy description follows supported regex'\n- description = 'IAM Managed Policy description much comply with the regex [\\\\p{L}\\\\p{M}\\\\p{Z}\\\\p{S}\\\\p{N}\\\\p{P}]*'\n- source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iam-managedpolicy.html'\n- tags = ['properties', 'iam']\n-\n- def __init__(self):\n- \"\"\"Init\"\"\"\n- super(ManagedPolicyDescription, self).__init__()\n- self.resource_property_types.append('AWS::IAM::ManagedPolicy')\n-\n- def check_value(self, value, path):\n- \"\"\"Check the value\"\"\"\n- regex_string = r'^[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]+$'\n- r = regex.compile(regex_string)\n- if not r.match(value):\n- message = 'ManagedPolicy Description needs to follow regex pattern \"{0}\"'\n- return [\n- RuleMatch(path[:], message.format(regex_string))\n- ]\n-\n- return []\n-\n- def match_resource_properties(self, properties, _, path, cfn):\n- \"\"\"Check CloudFormation Properties\"\"\"\n- matches = []\n-\n- matches.extend(\n- cfn.check_value(\n- obj=properties, key='Description',\n- path=path[:],\n- check_value=self.check_value\n- ))\n-\n- return matches\n", "issue": "Pip install fails for 0.10.0\n*cfn-lint version: (`cfn-lint --version`)*\r\n\r\n0.10.0\r\n\r\n*Description of issue.*\r\n\r\nMy pipeline started failing when installing the latest version via pip.\r\n\r\nexample:\r\n\r\n`pip install cfn-lint==0.10.0`\r\n\r\n```\r\n[...]\r\n.txt --single-version-externally-managed --compile:\r\n running install\r\n running build\r\n running build_py\r\n creating build\r\n creating build/lib.linux-x86_64-3.7\r\n copying regex_3/regex.py -> build/lib.linux-x86_64-3.7\r\n copying regex_3/_regex_core.py -> build/lib.linux-x86_64-3.7\r\n copying regex_3/test_regex.py -> build/lib.linux-x86_64-3.7\r\n running build_ext\r\n building '_regex' extension\r\n creating build/temp.linux-x86_64-3.7\r\n creating build/temp.linux-x86_64-3.7/regex_3\r\n gcc -Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -DTHREAD_STACK_SIZE=0x100000 -fPIC -I/usr/local/include/python3.7m -c regex_3/_regex.c -o build/temp.linux-x86_64-3.7/regex_3/_regex.o\r\n unable to execute 'gcc': No such file or directory\r\n error: command 'gcc' failed with exit status 1\r\n \r\n ----------------------------------------\r\nCommand \"/usr/local/bin/python -u -c \"import setuptools, tokenize;__file__='/tmp/pip-install-r143_0dz/regex/setup.py';f=getattr(tokenize, 'open', open)(__file__);code=f.read().replace('\\r\\n', '\\n');f.close();exec(compile(code, __file__, 'exec'))\" install --record /tmp/pip-record-467tj48_/install-record.txt --single-version-externally-managed --compile\" failed with error code 1 in /tmp/pip-install-r143_0dz/regex/\r\n```\r\n\r\nVersion 0.9.2 works well.\r\n\r\nReplicate with:\r\n\r\n`docker run --rm -it python:3.7.1-alpine3.8 /bin/sh -c 'pip install cfn-lint==0.10.0'`\n", "before_files": [{"content": "\"\"\"\n Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining a copy of this\n software and associated documentation files (the \"Software\"), to deal in the Software\n without restriction, including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\nimport codecs\nimport re\nfrom setuptools import find_packages\nfrom setuptools import setup\n\n\ndef get_version(filename):\n with codecs.open(filename, 'r', 'utf-8') as fp:\n contents = fp.read()\n return re.search(r\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", contents).group(1)\n\n\nversion = get_version('src/cfnlint/version.py')\n\n\nwith open('README.md') as f:\n readme = f.read()\n\nsetup(\n name='cfn-lint',\n version=version,\n description=('checks cloudformation for practices and behaviour \\\n that could potentially be improved'),\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n keywords='aws, lint',\n author='kddejong',\n author_email='[email protected]',\n url='https://github.com/awslabs/cfn-python-lint',\n package_dir={'': 'src'},\n package_data={'cfnlint': [\n 'data/CloudSpecs/*.json',\n 'data/AdditionalSpecs/*.json',\n 'data/Serverless/*.json',\n 'data/CfnLintCli/config/schema.json'\n ]},\n packages=find_packages('src'),\n zip_safe=False,\n install_requires=[\n 'pyyaml',\n 'six~=1.11',\n 'requests>=2.15.0',\n 'aws-sam-translator>=1.8.0',\n 'jsonpatch',\n 'jsonschema~=2.6',\n 'pathlib2>=2.3.0;python_version<\"3.4\"',\n 'regex>=2018.11.07'\n ],\n python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',\n entry_points={\n 'console_scripts': [\n 'cfn-lint = cfnlint.__main__:main'\n ]\n },\n license='MIT no attribution',\n test_suite=\"unittest\",\n classifiers=[\n 'Development Status :: 5 - Production/Stable',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Natural Language :: English',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n ],\n)\n", "path": "setup.py"}, {"content": "\"\"\"\n Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining a copy of this\n software and associated documentation files (the \"Software\"), to deal in the Software\n without restriction, including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\nimport regex\nfrom cfnlint import CloudFormationLintRule\nfrom cfnlint import RuleMatch\n\n\nclass ManagedPolicyDescription(CloudFormationLintRule):\n \"\"\"Check if IAM Policy Description is syntax correct\"\"\"\n id = 'E3507'\n shortdesc = 'Check if IAM Managed Policy description follows supported regex'\n description = 'IAM Managed Policy description much comply with the regex [\\\\p{L}\\\\p{M}\\\\p{Z}\\\\p{S}\\\\p{N}\\\\p{P}]*'\n source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-iam-managedpolicy.html'\n tags = ['properties', 'iam']\n\n def __init__(self):\n \"\"\"Init\"\"\"\n super(ManagedPolicyDescription, self).__init__()\n self.resource_property_types.append('AWS::IAM::ManagedPolicy')\n\n def check_value(self, value, path):\n \"\"\"Check the value\"\"\"\n regex_string = r'^[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]+$'\n r = regex.compile(regex_string)\n if not r.match(value):\n message = 'ManagedPolicy Description needs to follow regex pattern \"{0}\"'\n return [\n RuleMatch(path[:], message.format(regex_string))\n ]\n\n return []\n\n def match_resource_properties(self, properties, _, path, cfn):\n \"\"\"Check CloudFormation Properties\"\"\"\n matches = []\n\n matches.extend(\n cfn.check_value(\n obj=properties, key='Description',\n path=path[:],\n check_value=self.check_value\n ))\n\n return matches\n", "path": "src/cfnlint/rules/resources/iam/ManagedPolicyDescription.py"}]} | 2,712 | 828 |
gh_patches_debug_11133 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-contrib-2552 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`remove_url_credentials` drops brackets from IPv6 hostnames
**Describe your environment**
Python 3.11.9, `opentelemetry-instrumentation` is auto-injected via the OpenTelemetry operator, `opentelemetry_util_http` is `0.44b0`
**Steps to reproduce**
```python
from opentelemetry.util.http import remove_url_credentials
literal_ipv6_url = "https://[::1]/somepath?query=foo"
remove_url_credentials(literal_ipv6_url)
# 'https://::1/somepath?query=foo' -- should be 'https://[::1]/somepath?query=foo'
literal_ipv6_url_with_port = "https://[::1]:12345/somepath?query=foo"
remove_url_credentials(literal_ipv6_url_with_port)
# 'https://::1:12345/somepath?query=foo -- should be 'https://[::1]:12345/somepath?query=foo'
literal_ipv6_url_with_auth = "https://someuser:somepass@[::1]:12345/somepath?query=foo"
remove_url_credentials(literal_ipv6_url_with_auth)
# 'https://::1:12345/somepath?query=foo' -- should be https://[::1]:12345/somepath?query=foo
```
**What is the expected behavior?**
The ipv6 host should remain inside `[]`
**What is the actual behavior?**
`[]` are stripped from the host
**Additional context**
https://github.com/open-telemetry/opentelemetry-python-contrib/blob/main/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py#L169 is the causing line. The `hostname` result on `urlparse` does not contain the brackets
```python
from urllib.parse import urlparse
parsed = urlparse(literal_ipv6_url_with_auth)
parsed
# ParseResult(scheme='https', netloc='someuser:somepass@[::1]:12345', path='/somepath', params='', query='query=foo', fragment='')
parsed.hostname
# '::1'
```
</issue>
<code>
[start of util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py]
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from __future__ import annotations
16
17 from os import environ
18 from re import IGNORECASE as RE_IGNORECASE
19 from re import compile as re_compile
20 from re import search
21 from typing import Callable, Iterable, Optional
22 from urllib.parse import urlparse, urlunparse
23
24 from opentelemetry.semconv.trace import SpanAttributes
25
26 OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS = (
27 "OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS"
28 )
29 OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST = (
30 "OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST"
31 )
32 OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE = (
33 "OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE"
34 )
35
36 OTEL_PYTHON_INSTRUMENTATION_HTTP_CAPTURE_ALL_METHODS = (
37 "OTEL_PYTHON_INSTRUMENTATION_HTTP_CAPTURE_ALL_METHODS"
38 )
39
40 # List of recommended metrics attributes
41 _duration_attrs = {
42 SpanAttributes.HTTP_METHOD,
43 SpanAttributes.HTTP_HOST,
44 SpanAttributes.HTTP_SCHEME,
45 SpanAttributes.HTTP_STATUS_CODE,
46 SpanAttributes.HTTP_FLAVOR,
47 SpanAttributes.HTTP_SERVER_NAME,
48 SpanAttributes.NET_HOST_NAME,
49 SpanAttributes.NET_HOST_PORT,
50 }
51
52 _active_requests_count_attrs = {
53 SpanAttributes.HTTP_METHOD,
54 SpanAttributes.HTTP_HOST,
55 SpanAttributes.HTTP_SCHEME,
56 SpanAttributes.HTTP_FLAVOR,
57 SpanAttributes.HTTP_SERVER_NAME,
58 }
59
60
61 class ExcludeList:
62 """Class to exclude certain paths (given as a list of regexes) from tracing requests"""
63
64 def __init__(self, excluded_urls: Iterable[str]):
65 self._excluded_urls = excluded_urls
66 if self._excluded_urls:
67 self._regex = re_compile("|".join(excluded_urls))
68
69 def url_disabled(self, url: str) -> bool:
70 return bool(self._excluded_urls and search(self._regex, url))
71
72
73 class SanitizeValue:
74 """Class to sanitize (remove sensitive data from) certain headers (given as a list of regexes)"""
75
76 def __init__(self, sanitized_fields: Iterable[str]):
77 self._sanitized_fields = sanitized_fields
78 if self._sanitized_fields:
79 self._regex = re_compile("|".join(sanitized_fields), RE_IGNORECASE)
80
81 def sanitize_header_value(self, header: str, value: str) -> str:
82 return (
83 "[REDACTED]"
84 if (self._sanitized_fields and search(self._regex, header))
85 else value
86 )
87
88 def sanitize_header_values(
89 self,
90 headers: dict[str, str],
91 header_regexes: list[str],
92 normalize_function: Callable[[str], str],
93 ) -> dict[str, str]:
94 values: dict[str, str] = {}
95
96 if header_regexes:
97 header_regexes_compiled = re_compile(
98 "|".join("^" + i + "$" for i in header_regexes),
99 RE_IGNORECASE,
100 )
101
102 for header_name in list(
103 filter(
104 header_regexes_compiled.match,
105 headers.keys(),
106 )
107 ):
108 header_values = headers.get(header_name)
109 if header_values:
110 key = normalize_function(header_name.lower())
111 values[key] = [
112 self.sanitize_header_value(
113 header=header_name, value=header_values
114 )
115 ]
116
117 return values
118
119
120 _root = r"OTEL_PYTHON_{}"
121
122
123 def get_traced_request_attrs(instrumentation):
124 traced_request_attrs = environ.get(
125 _root.format(f"{instrumentation}_TRACED_REQUEST_ATTRS"), []
126 )
127
128 if traced_request_attrs:
129 traced_request_attrs = [
130 traced_request_attr.strip()
131 for traced_request_attr in traced_request_attrs.split(",")
132 ]
133
134 return traced_request_attrs
135
136
137 def get_excluded_urls(instrumentation: str) -> ExcludeList:
138 # Get instrumentation-specific excluded URLs. If not set, retrieve them
139 # from generic variable.
140 excluded_urls = environ.get(
141 _root.format(f"{instrumentation}_EXCLUDED_URLS"),
142 environ.get(_root.format("EXCLUDED_URLS"), ""),
143 )
144
145 return parse_excluded_urls(excluded_urls)
146
147
148 def parse_excluded_urls(excluded_urls: str) -> ExcludeList:
149 """
150 Small helper to put an arbitrary url list inside an ExcludeList
151 """
152 if excluded_urls:
153 excluded_url_list = [
154 excluded_url.strip() for excluded_url in excluded_urls.split(",")
155 ]
156 else:
157 excluded_url_list = []
158
159 return ExcludeList(excluded_url_list)
160
161
162 def remove_url_credentials(url: str) -> str:
163 """Given a string url, remove the username and password only if it is a valid url"""
164
165 try:
166 parsed = urlparse(url)
167 if all([parsed.scheme, parsed.netloc]): # checks for valid url
168 parsed_url = urlparse(url)
169 netloc = (
170 (":".join(((parsed_url.hostname or ""), str(parsed_url.port))))
171 if parsed_url.port
172 else (parsed_url.hostname or "")
173 )
174 return urlunparse(
175 (
176 parsed_url.scheme,
177 netloc,
178 parsed_url.path,
179 parsed_url.params,
180 parsed_url.query,
181 parsed_url.fragment,
182 )
183 )
184 except ValueError: # an unparsable url was passed
185 pass
186 return url
187
188
189 def normalise_request_header_name(header: str) -> str:
190 key = header.lower().replace("-", "_")
191 return f"http.request.header.{key}"
192
193
194 def normalise_response_header_name(header: str) -> str:
195 key = header.lower().replace("-", "_")
196 return f"http.response.header.{key}"
197
198
199 def sanitize_method(method: Optional[str]) -> Optional[str]:
200 if method is None:
201 return None
202 method = method.upper()
203 if (
204 environ.get(OTEL_PYTHON_INSTRUMENTATION_HTTP_CAPTURE_ALL_METHODS)
205 or
206 # Based on https://www.rfc-editor.org/rfc/rfc7231#section-4.1 and https://www.rfc-editor.org/rfc/rfc5789#section-2.
207 method
208 in [
209 "GET",
210 "HEAD",
211 "POST",
212 "PUT",
213 "DELETE",
214 "CONNECT",
215 "OPTIONS",
216 "TRACE",
217 "PATCH",
218 ]
219 ):
220 return method
221 return "_OTHER"
222
223
224 def get_custom_headers(env_var: str) -> list[str]:
225 custom_headers = environ.get(env_var, None)
226 if custom_headers:
227 return [
228 custom_headers.strip()
229 for custom_headers in custom_headers.split(",")
230 ]
231 return []
232
233
234 def _parse_active_request_count_attrs(req_attrs):
235 active_requests_count_attrs = {
236 key: req_attrs[key]
237 for key in _active_requests_count_attrs.intersection(req_attrs.keys())
238 }
239 return active_requests_count_attrs
240
241
242 def _parse_duration_attrs(req_attrs):
243 duration_attrs = {
244 key: req_attrs[key]
245 for key in _duration_attrs.intersection(req_attrs.keys())
246 }
247 return duration_attrs
248
249
250 def _parse_url_query(url: str):
251 parsed_url = urlparse(url)
252 path = parsed_url.path
253 query_params = parsed_url.query
254 return path, query_params
255
[end of util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py b/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py
--- a/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py
+++ b/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py
@@ -166,11 +166,7 @@
parsed = urlparse(url)
if all([parsed.scheme, parsed.netloc]): # checks for valid url
parsed_url = urlparse(url)
- netloc = (
- (":".join(((parsed_url.hostname or ""), str(parsed_url.port))))
- if parsed_url.port
- else (parsed_url.hostname or "")
- )
+ _, _, netloc = parsed.netloc.rpartition("@")
return urlunparse(
(
parsed_url.scheme,
| {"golden_diff": "diff --git a/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py b/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py\n--- a/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py\n+++ b/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py\n@@ -166,11 +166,7 @@\n parsed = urlparse(url)\n if all([parsed.scheme, parsed.netloc]): # checks for valid url\n parsed_url = urlparse(url)\n- netloc = (\n- (\":\".join(((parsed_url.hostname or \"\"), str(parsed_url.port))))\n- if parsed_url.port\n- else (parsed_url.hostname or \"\")\n- )\n+ _, _, netloc = parsed.netloc.rpartition(\"@\")\n return urlunparse(\n (\n parsed_url.scheme,\n", "issue": "`remove_url_credentials` drops brackets from IPv6 hostnames\n**Describe your environment** \r\n\r\nPython 3.11.9, `opentelemetry-instrumentation` is auto-injected via the OpenTelemetry operator, `opentelemetry_util_http` is `0.44b0`\r\n\r\n**Steps to reproduce**\r\n```python\r\n\r\nfrom opentelemetry.util.http import remove_url_credentials\r\n\r\nliteral_ipv6_url = \"https://[::1]/somepath?query=foo\"\r\nremove_url_credentials(literal_ipv6_url)\r\n# 'https://::1/somepath?query=foo' -- should be 'https://[::1]/somepath?query=foo'\r\n\r\nliteral_ipv6_url_with_port = \"https://[::1]:12345/somepath?query=foo\"\r\nremove_url_credentials(literal_ipv6_url_with_port)\r\n# 'https://::1:12345/somepath?query=foo -- should be 'https://[::1]:12345/somepath?query=foo'\r\n\r\nliteral_ipv6_url_with_auth = \"https://someuser:somepass@[::1]:12345/somepath?query=foo\"\r\nremove_url_credentials(literal_ipv6_url_with_auth)\r\n# 'https://::1:12345/somepath?query=foo' -- should be https://[::1]:12345/somepath?query=foo\r\n\r\n```\r\n\r\n**What is the expected behavior?**\r\nThe ipv6 host should remain inside `[]` \r\n\r\n**What is the actual behavior?**\r\n`[]` are stripped from the host\r\n\r\n**Additional context**\r\nhttps://github.com/open-telemetry/opentelemetry-python-contrib/blob/main/util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py#L169 is the causing line. The `hostname` result on `urlparse` does not contain the brackets\r\n\r\n\r\n```python\r\nfrom urllib.parse import urlparse\r\n\r\nparsed = urlparse(literal_ipv6_url_with_auth)\r\n\r\nparsed\r\n# ParseResult(scheme='https', netloc='someuser:somepass@[::1]:12345', path='/somepath', params='', query='query=foo', fragment='')\r\n\r\nparsed.hostname\r\n# '::1'\r\n```\r\n\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import annotations\n\nfrom os import environ\nfrom re import IGNORECASE as RE_IGNORECASE\nfrom re import compile as re_compile\nfrom re import search\nfrom typing import Callable, Iterable, Optional\nfrom urllib.parse import urlparse, urlunparse\n\nfrom opentelemetry.semconv.trace import SpanAttributes\n\nOTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS = (\n \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SANITIZE_FIELDS\"\n)\nOTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST = (\n \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_REQUEST\"\n)\nOTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE = (\n \"OTEL_INSTRUMENTATION_HTTP_CAPTURE_HEADERS_SERVER_RESPONSE\"\n)\n\nOTEL_PYTHON_INSTRUMENTATION_HTTP_CAPTURE_ALL_METHODS = (\n \"OTEL_PYTHON_INSTRUMENTATION_HTTP_CAPTURE_ALL_METHODS\"\n)\n\n# List of recommended metrics attributes\n_duration_attrs = {\n SpanAttributes.HTTP_METHOD,\n SpanAttributes.HTTP_HOST,\n SpanAttributes.HTTP_SCHEME,\n SpanAttributes.HTTP_STATUS_CODE,\n SpanAttributes.HTTP_FLAVOR,\n SpanAttributes.HTTP_SERVER_NAME,\n SpanAttributes.NET_HOST_NAME,\n SpanAttributes.NET_HOST_PORT,\n}\n\n_active_requests_count_attrs = {\n SpanAttributes.HTTP_METHOD,\n SpanAttributes.HTTP_HOST,\n SpanAttributes.HTTP_SCHEME,\n SpanAttributes.HTTP_FLAVOR,\n SpanAttributes.HTTP_SERVER_NAME,\n}\n\n\nclass ExcludeList:\n \"\"\"Class to exclude certain paths (given as a list of regexes) from tracing requests\"\"\"\n\n def __init__(self, excluded_urls: Iterable[str]):\n self._excluded_urls = excluded_urls\n if self._excluded_urls:\n self._regex = re_compile(\"|\".join(excluded_urls))\n\n def url_disabled(self, url: str) -> bool:\n return bool(self._excluded_urls and search(self._regex, url))\n\n\nclass SanitizeValue:\n \"\"\"Class to sanitize (remove sensitive data from) certain headers (given as a list of regexes)\"\"\"\n\n def __init__(self, sanitized_fields: Iterable[str]):\n self._sanitized_fields = sanitized_fields\n if self._sanitized_fields:\n self._regex = re_compile(\"|\".join(sanitized_fields), RE_IGNORECASE)\n\n def sanitize_header_value(self, header: str, value: str) -> str:\n return (\n \"[REDACTED]\"\n if (self._sanitized_fields and search(self._regex, header))\n else value\n )\n\n def sanitize_header_values(\n self,\n headers: dict[str, str],\n header_regexes: list[str],\n normalize_function: Callable[[str], str],\n ) -> dict[str, str]:\n values: dict[str, str] = {}\n\n if header_regexes:\n header_regexes_compiled = re_compile(\n \"|\".join(\"^\" + i + \"$\" for i in header_regexes),\n RE_IGNORECASE,\n )\n\n for header_name in list(\n filter(\n header_regexes_compiled.match,\n headers.keys(),\n )\n ):\n header_values = headers.get(header_name)\n if header_values:\n key = normalize_function(header_name.lower())\n values[key] = [\n self.sanitize_header_value(\n header=header_name, value=header_values\n )\n ]\n\n return values\n\n\n_root = r\"OTEL_PYTHON_{}\"\n\n\ndef get_traced_request_attrs(instrumentation):\n traced_request_attrs = environ.get(\n _root.format(f\"{instrumentation}_TRACED_REQUEST_ATTRS\"), []\n )\n\n if traced_request_attrs:\n traced_request_attrs = [\n traced_request_attr.strip()\n for traced_request_attr in traced_request_attrs.split(\",\")\n ]\n\n return traced_request_attrs\n\n\ndef get_excluded_urls(instrumentation: str) -> ExcludeList:\n # Get instrumentation-specific excluded URLs. If not set, retrieve them\n # from generic variable.\n excluded_urls = environ.get(\n _root.format(f\"{instrumentation}_EXCLUDED_URLS\"),\n environ.get(_root.format(\"EXCLUDED_URLS\"), \"\"),\n )\n\n return parse_excluded_urls(excluded_urls)\n\n\ndef parse_excluded_urls(excluded_urls: str) -> ExcludeList:\n \"\"\"\n Small helper to put an arbitrary url list inside an ExcludeList\n \"\"\"\n if excluded_urls:\n excluded_url_list = [\n excluded_url.strip() for excluded_url in excluded_urls.split(\",\")\n ]\n else:\n excluded_url_list = []\n\n return ExcludeList(excluded_url_list)\n\n\ndef remove_url_credentials(url: str) -> str:\n \"\"\"Given a string url, remove the username and password only if it is a valid url\"\"\"\n\n try:\n parsed = urlparse(url)\n if all([parsed.scheme, parsed.netloc]): # checks for valid url\n parsed_url = urlparse(url)\n netloc = (\n (\":\".join(((parsed_url.hostname or \"\"), str(parsed_url.port))))\n if parsed_url.port\n else (parsed_url.hostname or \"\")\n )\n return urlunparse(\n (\n parsed_url.scheme,\n netloc,\n parsed_url.path,\n parsed_url.params,\n parsed_url.query,\n parsed_url.fragment,\n )\n )\n except ValueError: # an unparsable url was passed\n pass\n return url\n\n\ndef normalise_request_header_name(header: str) -> str:\n key = header.lower().replace(\"-\", \"_\")\n return f\"http.request.header.{key}\"\n\n\ndef normalise_response_header_name(header: str) -> str:\n key = header.lower().replace(\"-\", \"_\")\n return f\"http.response.header.{key}\"\n\n\ndef sanitize_method(method: Optional[str]) -> Optional[str]:\n if method is None:\n return None\n method = method.upper()\n if (\n environ.get(OTEL_PYTHON_INSTRUMENTATION_HTTP_CAPTURE_ALL_METHODS)\n or\n # Based on https://www.rfc-editor.org/rfc/rfc7231#section-4.1 and https://www.rfc-editor.org/rfc/rfc5789#section-2.\n method\n in [\n \"GET\",\n \"HEAD\",\n \"POST\",\n \"PUT\",\n \"DELETE\",\n \"CONNECT\",\n \"OPTIONS\",\n \"TRACE\",\n \"PATCH\",\n ]\n ):\n return method\n return \"_OTHER\"\n\n\ndef get_custom_headers(env_var: str) -> list[str]:\n custom_headers = environ.get(env_var, None)\n if custom_headers:\n return [\n custom_headers.strip()\n for custom_headers in custom_headers.split(\",\")\n ]\n return []\n\n\ndef _parse_active_request_count_attrs(req_attrs):\n active_requests_count_attrs = {\n key: req_attrs[key]\n for key in _active_requests_count_attrs.intersection(req_attrs.keys())\n }\n return active_requests_count_attrs\n\n\ndef _parse_duration_attrs(req_attrs):\n duration_attrs = {\n key: req_attrs[key]\n for key in _duration_attrs.intersection(req_attrs.keys())\n }\n return duration_attrs\n\n\ndef _parse_url_query(url: str):\n parsed_url = urlparse(url)\n path = parsed_url.path\n query_params = parsed_url.query\n return path, query_params\n", "path": "util/opentelemetry-util-http/src/opentelemetry/util/http/__init__.py"}]} | 3,356 | 191 |
gh_patches_debug_34875 | rasdani/github-patches | git_diff | mozmeao__snippets-service-741 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Special Link: about:accounts
AS Router Action:
- `SHOW_FIREFOX_ACCOUNTS`: opens the firefox accounts signup page (about:accounts?action=signup&entrypoint=snippets)
Currently blocked waiting
- documentation on AS Router. See also #521
- code changes in (bug [1478569](https://bugzilla.mozilla.org/show_bug.cgi?id=1478569))
</issue>
<code>
[start of snippets/base/util.py]
1 import copy
2 import datetime
3 import re
4
5 from product_details import product_details
6 from product_details.version_compare import version_list
7
8 EPOCH = datetime.datetime.utcfromtimestamp(0)
9
10
11 def get_object_or_none(model_class, **filters):
12 """
13 Identical to Model.get, except instead of throwing exceptions, this returns
14 None.
15 """
16 try:
17 return model_class.objects.get(**filters)
18 except (model_class.DoesNotExist, model_class.MultipleObjectsReturned):
19 return None
20
21
22 def first(collection, callback):
23 """
24 Find the first item in collection that, when passed to callback, returns
25 True. Returns None if no such item is found.
26 """
27 return next((item for item in collection if callback(item)), None)
28
29
30 def create_locales():
31 from snippets.base.models import TargetedLocale
32
33 for code, name in product_details.languages.items():
34 locale = TargetedLocale.objects.get_or_create(code=code.lower())[0]
35 name = name['English']
36 if locale.name != name:
37 locale.name = name
38 locale.save()
39
40
41 def create_countries():
42 from snippets.base.models import TargetedCountry
43
44 for code, name in product_details.get_regions('en-US').items():
45 country = TargetedCountry.objects.get_or_create(code=code)[0]
46 if country.name != name:
47 country.name = name
48 country.save()
49
50
51 def current_firefox_major_version():
52 full_version = version_list(
53 product_details.firefox_history_major_releases)[0]
54
55 return full_version.split('.', 1)[0]
56
57
58 def fluent_link_extractor(data, variables):
59 """Replaces all <a> elements with fluent.js link elements sequentially
60 numbered.
61
62 Returns a tuple with the new text and a dict of all the links with url and
63 custom metric where available.
64
65 """
66 class Replacer:
67 link_counter = 0
68 links = {}
69
70 def __call__(self, matchobj):
71 keyname = 'link{0}'.format(self.link_counter)
72 replacement = '<{keyname}>{text}</{keyname}>'.format(
73 keyname=keyname,
74 text=matchobj.group('innerText'))
75 # Find the URL
76 url_match = re.search('href="(?P<url>.+?)"', matchobj.group('attrs'))
77 url = ''
78
79 if url_match:
80 url = url_match.group('url')
81 self.links[keyname] = {
82 'url': url,
83 }
84
85 # Find the optional data-metric attrib
86 metric_match = re.search('data-metric="(?P<metric>.+?)"', matchobj.group('attrs'))
87 if metric_match:
88 self.links[keyname]['metric'] = metric_match.group('metric')
89
90 self.link_counter += 1
91 return replacement
92
93 local_data = copy.deepcopy(data)
94 replacer = Replacer()
95 for variable in variables:
96 local_data[variable] = re.sub('(<a(?P<attrs> .*?)>)(?P<innerText>.+?)(</a>)',
97 replacer, local_data[variable])
98
99 local_data['links'] = replacer.links
100 return local_data
101
102
103 def to_unix_time_seconds(dt):
104 return int((dt - EPOCH).total_seconds())
105
[end of snippets/base/util.py]
[start of snippets/base/validators.py]
1 import re
2 import json
3 from io import StringIO
4
5 import xml.sax
6 from xml.sax import ContentHandler
7
8 from django.core.exceptions import ValidationError
9 from django.core.validators import BaseValidator
10 from django.utils.deconstruct import deconstructible
11
12 import bleach
13
14 ALLOWED_TAGS = ['a', 'i', 'b', 'u', 'strong', 'em', 'br']
15 ALLOWED_ATTRIBUTES = {'a': ['href', 'data-metric']}
16
17
18 @deconstructible
19 class MinValueValidator(BaseValidator):
20 message = 'Ensure this value is greater than or equal to %(limit_value)s.'
21 code = 'min_value'
22
23 def compare(self, a, b):
24 return int(a) < int(b)
25
26
27 def validate_xml_template(data):
28 parser = xml.sax.make_parser()
29 parser.setContentHandler(ContentHandler())
30 parser.setFeature(xml.sax.handler.feature_external_ges, 0)
31
32 xml_str = '<div>\n{0}</div>'.format(data)
33 try:
34 parser.parse(StringIO(xml_str))
35 except xml.sax.SAXParseException as e:
36 # getLineNumber() - 1 to get the correct line number because
37 # we're wrapping contents into a div.
38 error_msg = (
39 'XML Error: {message} in line {line} column {column}').format(
40 message=e.getMessage(), line=e.getLineNumber() - 1, column=e.getColumnNumber())
41 raise ValidationError(error_msg)
42 return data
43
44
45 def validate_xml_variables(data):
46 data_dict = json.loads(data)
47
48 # set up a safer XML parser that does not resolve external
49 # entities
50 parser = xml.sax.make_parser()
51 parser.setContentHandler(ContentHandler())
52 parser.setFeature(xml.sax.handler.feature_external_ges, 0)
53
54 for name, value in data_dict.items():
55 # Skip over values that aren't strings.
56 if not isinstance(value, str):
57 continue
58
59 xml_str = '<div>{0}</div>'.format(value)
60 try:
61 parser.parse(StringIO(xml_str))
62 except xml.sax.SAXParseException as e:
63 error_msg = (
64 'Data is not XML valid.\n'
65 'XML Error in value "{name}": {message} in column {column}'
66 .format(name=name, message=e.getMessage(),
67 column=e.getColumnNumber()))
68 raise ValidationError(error_msg)
69 return data
70
71
72 def validate_as_router_fluent_variables(data, variables):
73 data_dict = json.loads(data)
74
75 for variable in variables:
76 text = data_dict[variable]
77 bleached_text = bleach.clean(text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES)
78 # Bleach escapes '&' to '&'. We need to revert back to compare with
79 # text
80 bleached_text = bleached_text.replace('&', '&')
81 if text != bleached_text:
82 error_msg = ('Variable `{}` contains unsupported tags.'
83 'Only {} are supported'.format(variable, ', '.join(ALLOWED_TAGS)))
84 raise ValidationError(error_msg)
85 return data
86
87
88 def validate_regex(regex_str):
89 if regex_str.startswith('/'):
90 try:
91 re.compile(regex_str[1:-1])
92 except re.error as exp:
93 raise ValidationError(str(exp))
94 return regex_str
95
[end of snippets/base/validators.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/snippets/base/util.py b/snippets/base/util.py
--- a/snippets/base/util.py
+++ b/snippets/base/util.py
@@ -78,9 +78,20 @@
if url_match:
url = url_match.group('url')
- self.links[keyname] = {
- 'url': url,
- }
+
+ if url == 'special:appMenu':
+ self.links[keyname] = {
+ 'action': 'OPEN_APPLICATIONS_MENU',
+ 'args': 'appMenu',
+ }
+ elif url == 'special:accounts':
+ self.links[keyname] = {
+ 'action': 'SHOW_FIREFOX_ACCOUNTS',
+ }
+ else:
+ self.links[keyname] = {
+ 'url': url,
+ }
# Find the optional data-metric attrib
metric_match = re.search('data-metric="(?P<metric>.+?)"', matchobj.group('attrs'))
diff --git a/snippets/base/validators.py b/snippets/base/validators.py
--- a/snippets/base/validators.py
+++ b/snippets/base/validators.py
@@ -13,6 +13,7 @@
ALLOWED_TAGS = ['a', 'i', 'b', 'u', 'strong', 'em', 'br']
ALLOWED_ATTRIBUTES = {'a': ['href', 'data-metric']}
+ALLOWED_PROTOCOLS = ['https', 'special']
@deconstructible
@@ -74,13 +75,22 @@
for variable in variables:
text = data_dict[variable]
- bleached_text = bleach.clean(text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES)
+ bleached_text = bleach.clean(
+ text,
+ tags=ALLOWED_TAGS,
+ attributes=ALLOWED_ATTRIBUTES,
+ # Allow only secure protocols and custom special links.
+ protocols=ALLOWED_PROTOCOLS,
+ )
# Bleach escapes '&' to '&'. We need to revert back to compare with
# text
bleached_text = bleached_text.replace('&', '&')
+
if text != bleached_text:
- error_msg = ('Variable `{}` contains unsupported tags.'
- 'Only {} are supported'.format(variable, ', '.join(ALLOWED_TAGS)))
+ error_msg = (
+ 'Variable `{}` contains unsupported tags or insecure links.'
+ 'Only {} tags and https links are supported'
+ ).format(variable, ', '.join(ALLOWED_TAGS))
raise ValidationError(error_msg)
return data
| {"golden_diff": "diff --git a/snippets/base/util.py b/snippets/base/util.py\n--- a/snippets/base/util.py\n+++ b/snippets/base/util.py\n@@ -78,9 +78,20 @@\n \n if url_match:\n url = url_match.group('url')\n- self.links[keyname] = {\n- 'url': url,\n- }\n+\n+ if url == 'special:appMenu':\n+ self.links[keyname] = {\n+ 'action': 'OPEN_APPLICATIONS_MENU',\n+ 'args': 'appMenu',\n+ }\n+ elif url == 'special:accounts':\n+ self.links[keyname] = {\n+ 'action': 'SHOW_FIREFOX_ACCOUNTS',\n+ }\n+ else:\n+ self.links[keyname] = {\n+ 'url': url,\n+ }\n \n # Find the optional data-metric attrib\n metric_match = re.search('data-metric=\"(?P<metric>.+?)\"', matchobj.group('attrs'))\ndiff --git a/snippets/base/validators.py b/snippets/base/validators.py\n--- a/snippets/base/validators.py\n+++ b/snippets/base/validators.py\n@@ -13,6 +13,7 @@\n \n ALLOWED_TAGS = ['a', 'i', 'b', 'u', 'strong', 'em', 'br']\n ALLOWED_ATTRIBUTES = {'a': ['href', 'data-metric']}\n+ALLOWED_PROTOCOLS = ['https', 'special']\n \n \n @deconstructible\n@@ -74,13 +75,22 @@\n \n for variable in variables:\n text = data_dict[variable]\n- bleached_text = bleach.clean(text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES)\n+ bleached_text = bleach.clean(\n+ text,\n+ tags=ALLOWED_TAGS,\n+ attributes=ALLOWED_ATTRIBUTES,\n+ # Allow only secure protocols and custom special links.\n+ protocols=ALLOWED_PROTOCOLS,\n+ )\n # Bleach escapes '&' to '&'. We need to revert back to compare with\n # text\n bleached_text = bleached_text.replace('&', '&')\n+\n if text != bleached_text:\n- error_msg = ('Variable `{}` contains unsupported tags.'\n- 'Only {} are supported'.format(variable, ', '.join(ALLOWED_TAGS)))\n+ error_msg = (\n+ 'Variable `{}` contains unsupported tags or insecure links.'\n+ 'Only {} tags and https links are supported'\n+ ).format(variable, ', '.join(ALLOWED_TAGS))\n raise ValidationError(error_msg)\n return data\n", "issue": "Special Link: about:accounts\nAS Router Action:\r\n - `SHOW_FIREFOX_ACCOUNTS`: opens the firefox accounts signup page (about:accounts?action=signup&entrypoint=snippets)\r\n\r\nCurrently blocked waiting\r\n - documentation on AS Router. See also #521 \r\n - code changes in (bug [1478569](https://bugzilla.mozilla.org/show_bug.cgi?id=1478569))\n", "before_files": [{"content": "import copy\nimport datetime\nimport re\n\nfrom product_details import product_details\nfrom product_details.version_compare import version_list\n\nEPOCH = datetime.datetime.utcfromtimestamp(0)\n\n\ndef get_object_or_none(model_class, **filters):\n \"\"\"\n Identical to Model.get, except instead of throwing exceptions, this returns\n None.\n \"\"\"\n try:\n return model_class.objects.get(**filters)\n except (model_class.DoesNotExist, model_class.MultipleObjectsReturned):\n return None\n\n\ndef first(collection, callback):\n \"\"\"\n Find the first item in collection that, when passed to callback, returns\n True. Returns None if no such item is found.\n \"\"\"\n return next((item for item in collection if callback(item)), None)\n\n\ndef create_locales():\n from snippets.base.models import TargetedLocale\n\n for code, name in product_details.languages.items():\n locale = TargetedLocale.objects.get_or_create(code=code.lower())[0]\n name = name['English']\n if locale.name != name:\n locale.name = name\n locale.save()\n\n\ndef create_countries():\n from snippets.base.models import TargetedCountry\n\n for code, name in product_details.get_regions('en-US').items():\n country = TargetedCountry.objects.get_or_create(code=code)[0]\n if country.name != name:\n country.name = name\n country.save()\n\n\ndef current_firefox_major_version():\n full_version = version_list(\n product_details.firefox_history_major_releases)[0]\n\n return full_version.split('.', 1)[0]\n\n\ndef fluent_link_extractor(data, variables):\n \"\"\"Replaces all <a> elements with fluent.js link elements sequentially\n numbered.\n\n Returns a tuple with the new text and a dict of all the links with url and\n custom metric where available.\n\n \"\"\"\n class Replacer:\n link_counter = 0\n links = {}\n\n def __call__(self, matchobj):\n keyname = 'link{0}'.format(self.link_counter)\n replacement = '<{keyname}>{text}</{keyname}>'.format(\n keyname=keyname,\n text=matchobj.group('innerText'))\n # Find the URL\n url_match = re.search('href=\"(?P<url>.+?)\"', matchobj.group('attrs'))\n url = ''\n\n if url_match:\n url = url_match.group('url')\n self.links[keyname] = {\n 'url': url,\n }\n\n # Find the optional data-metric attrib\n metric_match = re.search('data-metric=\"(?P<metric>.+?)\"', matchobj.group('attrs'))\n if metric_match:\n self.links[keyname]['metric'] = metric_match.group('metric')\n\n self.link_counter += 1\n return replacement\n\n local_data = copy.deepcopy(data)\n replacer = Replacer()\n for variable in variables:\n local_data[variable] = re.sub('(<a(?P<attrs> .*?)>)(?P<innerText>.+?)(</a>)',\n replacer, local_data[variable])\n\n local_data['links'] = replacer.links\n return local_data\n\n\ndef to_unix_time_seconds(dt):\n return int((dt - EPOCH).total_seconds())\n", "path": "snippets/base/util.py"}, {"content": "import re\nimport json\nfrom io import StringIO\n\nimport xml.sax\nfrom xml.sax import ContentHandler\n\nfrom django.core.exceptions import ValidationError\nfrom django.core.validators import BaseValidator\nfrom django.utils.deconstruct import deconstructible\n\nimport bleach\n\nALLOWED_TAGS = ['a', 'i', 'b', 'u', 'strong', 'em', 'br']\nALLOWED_ATTRIBUTES = {'a': ['href', 'data-metric']}\n\n\n@deconstructible\nclass MinValueValidator(BaseValidator):\n message = 'Ensure this value is greater than or equal to %(limit_value)s.'\n code = 'min_value'\n\n def compare(self, a, b):\n return int(a) < int(b)\n\n\ndef validate_xml_template(data):\n parser = xml.sax.make_parser()\n parser.setContentHandler(ContentHandler())\n parser.setFeature(xml.sax.handler.feature_external_ges, 0)\n\n xml_str = '<div>\\n{0}</div>'.format(data)\n try:\n parser.parse(StringIO(xml_str))\n except xml.sax.SAXParseException as e:\n # getLineNumber() - 1 to get the correct line number because\n # we're wrapping contents into a div.\n error_msg = (\n 'XML Error: {message} in line {line} column {column}').format(\n message=e.getMessage(), line=e.getLineNumber() - 1, column=e.getColumnNumber())\n raise ValidationError(error_msg)\n return data\n\n\ndef validate_xml_variables(data):\n data_dict = json.loads(data)\n\n # set up a safer XML parser that does not resolve external\n # entities\n parser = xml.sax.make_parser()\n parser.setContentHandler(ContentHandler())\n parser.setFeature(xml.sax.handler.feature_external_ges, 0)\n\n for name, value in data_dict.items():\n # Skip over values that aren't strings.\n if not isinstance(value, str):\n continue\n\n xml_str = '<div>{0}</div>'.format(value)\n try:\n parser.parse(StringIO(xml_str))\n except xml.sax.SAXParseException as e:\n error_msg = (\n 'Data is not XML valid.\\n'\n 'XML Error in value \"{name}\": {message} in column {column}'\n .format(name=name, message=e.getMessage(),\n column=e.getColumnNumber()))\n raise ValidationError(error_msg)\n return data\n\n\ndef validate_as_router_fluent_variables(data, variables):\n data_dict = json.loads(data)\n\n for variable in variables:\n text = data_dict[variable]\n bleached_text = bleach.clean(text, tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES)\n # Bleach escapes '&' to '&'. We need to revert back to compare with\n # text\n bleached_text = bleached_text.replace('&', '&')\n if text != bleached_text:\n error_msg = ('Variable `{}` contains unsupported tags.'\n 'Only {} are supported'.format(variable, ', '.join(ALLOWED_TAGS)))\n raise ValidationError(error_msg)\n return data\n\n\ndef validate_regex(regex_str):\n if regex_str.startswith('/'):\n try:\n re.compile(regex_str[1:-1])\n except re.error as exp:\n raise ValidationError(str(exp))\n return regex_str\n", "path": "snippets/base/validators.py"}]} | 2,421 | 562 |
gh_patches_debug_19111 | rasdani/github-patches | git_diff | scrapy__scrapy-3045 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Python3.3 support and requirements without it
Scrapy still supports py3.3 (at least according to its trove classifiers in setup.py and the CI conf)
but some of its dependencies dropped support some time ago.
https://github.com/pyca/service_identity/blob/master/CHANGELOG.rst#backward-incompatible-changes-1
https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst#20---2017-07-17
This caused some problems when testing scrapy daemon with py3.3,
which was resolved by installing the [enum-compat virtual package](https://pypi.python.org/pypi/enum-compat/0.0.2)
There are several options here.
scrapy1.5 can drop support for python3.3,
scrapy1.4 can restrict the max versions for those dependencies
and enum-compat can become a requirement,
although there may be more things broken.
I didn't figure out why the python3.3 build for scrapy doesn't fail
but here is a failed scrapyd build https://travis-ci.org/scrapy/scrapyd/jobs/299029712
</issue>
<code>
[start of setup.py]
1 from os.path import dirname, join
2 from pkg_resources import parse_version
3 from setuptools import setup, find_packages, __version__ as setuptools_version
4
5
6 with open(join(dirname(__file__), 'scrapy/VERSION'), 'rb') as f:
7 version = f.read().decode('ascii').strip()
8
9
10 def has_environment_marker_platform_impl_support():
11 """Code extracted from 'pytest/setup.py'
12 https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31
13
14 The first known release to support environment marker with range operators
15 it is 18.5, see:
16 https://setuptools.readthedocs.io/en/latest/history.html#id235
17 """
18 return parse_version(setuptools_version) >= parse_version('18.5')
19
20
21 extras_require = {}
22
23 if has_environment_marker_platform_impl_support():
24 extras_require[':platform_python_implementation == "PyPy"'] = [
25 'PyPyDispatcher>=2.1.0',
26 ]
27
28
29 setup(
30 name='Scrapy',
31 version=version,
32 url='https://scrapy.org',
33 description='A high-level Web Crawling and Web Scraping framework',
34 long_description=open('README.rst').read(),
35 author='Scrapy developers',
36 maintainer='Pablo Hoffman',
37 maintainer_email='[email protected]',
38 license='BSD',
39 packages=find_packages(exclude=('tests', 'tests.*')),
40 include_package_data=True,
41 zip_safe=False,
42 entry_points={
43 'console_scripts': ['scrapy = scrapy.cmdline:execute']
44 },
45 classifiers=[
46 'Framework :: Scrapy',
47 'Development Status :: 5 - Production/Stable',
48 'Environment :: Console',
49 'Intended Audience :: Developers',
50 'License :: OSI Approved :: BSD License',
51 'Operating System :: OS Independent',
52 'Programming Language :: Python',
53 'Programming Language :: Python :: 2',
54 'Programming Language :: Python :: 2.7',
55 'Programming Language :: Python :: 3',
56 'Programming Language :: Python :: 3.3',
57 'Programming Language :: Python :: 3.4',
58 'Programming Language :: Python :: 3.5',
59 'Programming Language :: Python :: 3.6',
60 'Topic :: Internet :: WWW/HTTP',
61 'Topic :: Software Development :: Libraries :: Application Frameworks',
62 'Topic :: Software Development :: Libraries :: Python Modules',
63 ],
64 install_requires=[
65 'Twisted>=13.1.0',
66 'w3lib>=1.17.0',
67 'queuelib',
68 'lxml',
69 'pyOpenSSL',
70 'cssselect>=0.9',
71 'six>=1.5.2',
72 'parsel>=1.1',
73 'PyDispatcher>=2.0.5',
74 'service_identity',
75 ],
76 extras_require=extras_require,
77 )
78
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -53,7 +53,6 @@
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
@@ -61,6 +60,7 @@
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
install_requires=[
'Twisted>=13.1.0',
'w3lib>=1.17.0',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -53,7 +53,6 @@\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n- 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n@@ -61,6 +60,7 @@\n 'Topic :: Software Development :: Libraries :: Application Frameworks',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n+ python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',\n install_requires=[\n 'Twisted>=13.1.0',\n 'w3lib>=1.17.0',\n", "issue": "Python3.3 support and requirements without it\nScrapy still supports py3.3 (at least according to its trove classifiers in setup.py and the CI conf)\r\nbut some of its dependencies dropped support some time ago.\r\nhttps://github.com/pyca/service_identity/blob/master/CHANGELOG.rst#backward-incompatible-changes-1\r\nhttps://github.com/pyca/cryptography/blob/master/CHANGELOG.rst#20---2017-07-17\r\n\r\nThis caused some problems when testing scrapy daemon with py3.3,\r\nwhich was resolved by installing the [enum-compat virtual package](https://pypi.python.org/pypi/enum-compat/0.0.2)\r\n\r\nThere are several options here.\r\nscrapy1.5 can drop support for python3.3,\r\nscrapy1.4 can restrict the max versions for those dependencies\r\nand enum-compat can become a requirement,\r\nalthough there may be more things broken.\r\n\r\nI didn't figure out why the python3.3 build for scrapy doesn't fail\r\nbut here is a failed scrapyd build https://travis-ci.org/scrapy/scrapyd/jobs/299029712\n", "before_files": [{"content": "from os.path import dirname, join\nfrom pkg_resources import parse_version\nfrom setuptools import setup, find_packages, __version__ as setuptools_version\n\n\nwith open(join(dirname(__file__), 'scrapy/VERSION'), 'rb') as f:\n version = f.read().decode('ascii').strip()\n\n\ndef has_environment_marker_platform_impl_support():\n \"\"\"Code extracted from 'pytest/setup.py'\n https://github.com/pytest-dev/pytest/blob/7538680c/setup.py#L31\n\n The first known release to support environment marker with range operators\n it is 18.5, see:\n https://setuptools.readthedocs.io/en/latest/history.html#id235\n \"\"\"\n return parse_version(setuptools_version) >= parse_version('18.5')\n\n\nextras_require = {}\n\nif has_environment_marker_platform_impl_support():\n extras_require[':platform_python_implementation == \"PyPy\"'] = [\n 'PyPyDispatcher>=2.1.0',\n ]\n\n\nsetup(\n name='Scrapy',\n version=version,\n url='https://scrapy.org',\n description='A high-level Web Crawling and Web Scraping framework',\n long_description=open('README.rst').read(),\n author='Scrapy developers',\n maintainer='Pablo Hoffman',\n maintainer_email='[email protected]',\n license='BSD',\n packages=find_packages(exclude=('tests', 'tests.*')),\n include_package_data=True,\n zip_safe=False,\n entry_points={\n 'console_scripts': ['scrapy = scrapy.cmdline:execute']\n },\n classifiers=[\n 'Framework :: Scrapy',\n 'Development Status :: 5 - Production/Stable',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Topic :: Internet :: WWW/HTTP',\n 'Topic :: Software Development :: Libraries :: Application Frameworks',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n ],\n install_requires=[\n 'Twisted>=13.1.0',\n 'w3lib>=1.17.0',\n 'queuelib',\n 'lxml',\n 'pyOpenSSL',\n 'cssselect>=0.9',\n 'six>=1.5.2',\n 'parsel>=1.1',\n 'PyDispatcher>=2.0.5',\n 'service_identity',\n ],\n extras_require=extras_require,\n)\n", "path": "setup.py"}]} | 1,549 | 212 |
gh_patches_debug_2376 | rasdani/github-patches | git_diff | xonsh__xonsh-1890 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
No output from scp command
While running scp in xonsh, the progress does not showed up:
https://asciinema.org/a/322p80uvb0pjyaic2e51iqmhq
I'm using version 3f45378
</issue>
<code>
[start of xonsh/commands_cache.py]
1 # -*- coding: utf-8 -*-
2 """Module for caching command & alias names as well as for predicting whether
3 a command will be able to be run in the background.
4
5 A background predictor is a function that accepect a single argument list
6 and returns whethere or not the process can be run in the background (returns
7 True) or must be run the foreground (returns False).
8 """
9 import os
10 import builtins
11 import argparse
12 import collections
13 import collections.abc as cabc
14
15 from xonsh.platform import ON_WINDOWS, pathbasename
16 from xonsh.tools import executables_in
17 from xonsh.lazyasd import lazyobject
18
19
20 class CommandsCache(cabc.Mapping):
21 """A lazy cache representing the commands available on the file system.
22 The keys are the command names and the values a tuple of (loc, has_alias)
23 where loc is either a str pointing to the executable on the file system or
24 None (if no executable exists) and has_alias is a boolean flag for whether
25 the command has an alias.
26 """
27
28 def __init__(self):
29 self._cmds_cache = {}
30 self._path_checksum = None
31 self._alias_checksum = None
32 self._path_mtime = -1
33 self.threadable_predictors = default_threadable_predictors()
34
35 def __contains__(self, key):
36 _ = self.all_commands
37 return self.lazyin(key)
38
39 def __iter__(self):
40 for cmd, (path, is_alias) in self.all_commands.items():
41 if ON_WINDOWS and path is not None:
42 # All comand keys are stored in uppercase on Windows.
43 # This ensures the original command name is returned.
44 cmd = pathbasename(path)
45 yield cmd
46
47 def __len__(self):
48 return len(self.all_commands)
49
50 def __getitem__(self, key):
51 _ = self.all_commands
52 return self.lazyget(key)
53
54 def is_empty(self):
55 """Returns whether the cache is populated or not."""
56 return len(self._cmds_cache) == 0
57
58 @staticmethod
59 def get_possible_names(name):
60 """Generates the possible `PATHEXT` extension variants of a given executable
61 name on Windows as a list, conserving the ordering in `PATHEXT`.
62 Returns a list as `name` being the only item in it on other platforms."""
63 if ON_WINDOWS:
64 pathext = builtins.__xonsh_env__.get('PATHEXT')
65 name = name.upper()
66 return [
67 name + ext
68 for ext in ([''] + pathext)
69 ]
70 else:
71 return [name]
72
73 @property
74 def all_commands(self):
75 paths = builtins.__xonsh_env__.get('PATH', [])
76 pathset = frozenset(x for x in paths if os.path.isdir(x))
77 # did PATH change?
78 path_hash = hash(pathset)
79 cache_valid = path_hash == self._path_checksum
80 self._path_checksum = path_hash
81 # did aliases change?
82 alss = getattr(builtins, 'aliases', set())
83 al_hash = hash(frozenset(alss))
84 cache_valid = cache_valid and al_hash == self._alias_checksum
85 self._alias_checksum = al_hash
86 # did the contents of any directory in PATH change?
87 max_mtime = 0
88 for path in pathset:
89 mtime = os.stat(path).st_mtime
90 if mtime > max_mtime:
91 max_mtime = mtime
92 cache_valid = cache_valid and (max_mtime <= self._path_mtime)
93 self._path_mtime = max_mtime
94 if cache_valid:
95 return self._cmds_cache
96 allcmds = {}
97 for path in reversed(paths):
98 # iterate backwards so that entries at the front of PATH overwrite
99 # entries at the back.
100 for cmd in executables_in(path):
101 key = cmd.upper() if ON_WINDOWS else cmd
102 allcmds[key] = (os.path.join(path, cmd), cmd in alss)
103 for cmd in alss:
104 if cmd not in allcmds:
105 key = cmd.upper() if ON_WINDOWS else cmd
106 allcmds[key] = (cmd, True)
107 self._cmds_cache = allcmds
108 return allcmds
109
110 def cached_name(self, name):
111 """Returns the name that would appear in the cache, if it was exists."""
112 if name is None:
113 return None
114 cached = pathbasename(name)
115 if ON_WINDOWS:
116 keys = self.get_possible_names(cached)
117 cached = next((k for k in keys if k in self._cmds_cache), None)
118 return cached
119
120 def lazyin(self, key):
121 """Checks if the value is in the current cache without the potential to
122 update the cache. It just says whether the value is known *now*. This
123 may not reflect precisely what is on the $PATH.
124 """
125 return self.cached_name(key) in self._cmds_cache
126
127 def lazyiter(self):
128 """Returns an iterator over the current cache contents without the
129 potential to update the cache. This may not reflect what is on the
130 $PATH.
131 """
132 return iter(self._cmds_cache)
133
134 def lazylen(self):
135 """Returns the length of the current cache contents without the
136 potential to update the cache. This may not reflect precisely
137 what is on the $PATH.
138 """
139 return len(self._cmds_cache)
140
141 def lazyget(self, key, default=None):
142 """A lazy value getter."""
143 return self._cmds_cache.get(self.cached_name(key), default)
144
145 def locate_binary(self, name):
146 """Locates an executable on the file system using the cache."""
147 # make sure the cache is up to date by accessing the property
148 _ = self.all_commands
149 return self.lazy_locate_binary(name)
150
151 def lazy_locate_binary(self, name):
152 """Locates an executable in the cache, without checking its validity."""
153 possibilities = self.get_possible_names(name)
154 if ON_WINDOWS:
155 # Windows users expect to be able to execute files in the same
156 # directory without `./`
157 local_bin = next((fn for fn in possibilities if os.path.isfile(fn)),
158 None)
159 if local_bin:
160 return os.path.abspath(local_bin)
161 cached = next((cmd for cmd in possibilities if cmd in self._cmds_cache),
162 None)
163 if cached:
164 (path, is_alias) = self._cmds_cache[cached]
165 return path if not is_alias else None
166 elif os.path.isfile(name) and name != pathbasename(name):
167 return name
168
169 def predict_threadable(self, cmd):
170 """Predicts whether a command list is able to be run on a background
171 thread, rather than the main thread.
172 """
173 name = self.cached_name(cmd[0])
174 if ON_WINDOWS:
175 # On all names (keys) are stored in upper case so instead
176 # we get the original cmd or alias name
177 path, _ = self.lazyget(name, (None, None))
178 if path is None:
179 return True
180 else:
181 name = pathbasename(path)
182 predictor = self.threadable_predictors[name]
183 return predictor(cmd[1:])
184
185 #
186 # Background Predictors
187 #
188
189
190 def predict_true(args):
191 """Always say the process is threadable."""
192 return True
193
194
195 def predict_false(args):
196 """Never say the process is threadable."""
197 return False
198
199
200 @lazyobject
201 def SHELL_PREDICTOR_PARSER():
202 p = argparse.ArgumentParser('shell', add_help=False)
203 p.add_argument('-c', nargs='?', default=None)
204 p.add_argument('filename', nargs='?', default=None)
205 return p
206
207
208 def predict_shell(args):
209 """Precict the backgroundability of the normal shell interface, which
210 comes down to whether it is being run in subproc mode.
211 """
212 ns, _ = SHELL_PREDICTOR_PARSER.parse_known_args(args)
213 if ns.c is None and ns.filename is None:
214 pred = False
215 else:
216 pred = True
217 return pred
218
219
220 @lazyobject
221 def HELP_VER_PREDICTOR_PARSER():
222 p = argparse.ArgumentParser('cmd', add_help=False)
223 p.add_argument('-h', '--help', dest='help',
224 action='store_true', default=None)
225 p.add_argument('-v', '-V', '--version', dest='version',
226 action='store_true', default=None)
227 return p
228
229
230 def predict_help_ver(args):
231 """Precict the backgroundability of commands that have help & version
232 switches: -h, --help, -v, -V, --version. If either of these options is
233 present, the command is assumed to print to stdout normally and is therefore
234 threadable. Otherwise, the command is assumed to not be threadable.
235 This is useful for commands, like top, that normally enter alternate mode
236 but may not in certain circumstances.
237 """
238 ns, _ = HELP_VER_PREDICTOR_PARSER.parse_known_args(args)
239 pred = ns.help is not None or ns.version is not None
240 return pred
241
242
243 def default_threadable_predictors():
244 """Generates a new defaultdict for known threadable predictors.
245 The default is to predict true.
246 """
247 # alphabetical, for what it is worth.
248 predictors = {
249 'bash': predict_shell,
250 'csh': predict_shell,
251 'clear': predict_false,
252 'clear.exe': predict_false,
253 'cls': predict_false,
254 'cmd': predict_shell,
255 'fish': predict_shell,
256 'htop': predict_help_ver,
257 'ksh': predict_shell,
258 'less': predict_help_ver,
259 'man': predict_help_ver,
260 'more': predict_help_ver,
261 'sh': predict_shell,
262 'ssh': predict_false,
263 'startx': predict_false,
264 'sudo': predict_help_ver,
265 'tcsh': predict_shell,
266 'top': predict_help_ver,
267 'vi': predict_false,
268 'vim': predict_false,
269 'vimpager': predict_help_ver,
270 'xo': predict_help_ver,
271 'xonsh': predict_shell,
272 'zsh': predict_shell,
273 }
274 return collections.defaultdict(lambda: predict_true, predictors)
275
[end of xonsh/commands_cache.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/xonsh/commands_cache.py b/xonsh/commands_cache.py
--- a/xonsh/commands_cache.py
+++ b/xonsh/commands_cache.py
@@ -258,6 +258,7 @@
'less': predict_help_ver,
'man': predict_help_ver,
'more': predict_help_ver,
+ 'scp': predict_false,
'sh': predict_shell,
'ssh': predict_false,
'startx': predict_false,
| {"golden_diff": "diff --git a/xonsh/commands_cache.py b/xonsh/commands_cache.py\n--- a/xonsh/commands_cache.py\n+++ b/xonsh/commands_cache.py\n@@ -258,6 +258,7 @@\n 'less': predict_help_ver,\n 'man': predict_help_ver,\n 'more': predict_help_ver,\n+ 'scp': predict_false,\n 'sh': predict_shell,\n 'ssh': predict_false,\n 'startx': predict_false,\n", "issue": "No output from scp command\nWhile running scp in xonsh, the progress does not showed up:\n\nhttps://asciinema.org/a/322p80uvb0pjyaic2e51iqmhq\n\nI'm using version 3f45378\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"Module for caching command & alias names as well as for predicting whether\na command will be able to be run in the background.\n\nA background predictor is a function that accepect a single argument list\nand returns whethere or not the process can be run in the background (returns\nTrue) or must be run the foreground (returns False).\n\"\"\"\nimport os\nimport builtins\nimport argparse\nimport collections\nimport collections.abc as cabc\n\nfrom xonsh.platform import ON_WINDOWS, pathbasename\nfrom xonsh.tools import executables_in\nfrom xonsh.lazyasd import lazyobject\n\n\nclass CommandsCache(cabc.Mapping):\n \"\"\"A lazy cache representing the commands available on the file system.\n The keys are the command names and the values a tuple of (loc, has_alias)\n where loc is either a str pointing to the executable on the file system or\n None (if no executable exists) and has_alias is a boolean flag for whether\n the command has an alias.\n \"\"\"\n\n def __init__(self):\n self._cmds_cache = {}\n self._path_checksum = None\n self._alias_checksum = None\n self._path_mtime = -1\n self.threadable_predictors = default_threadable_predictors()\n\n def __contains__(self, key):\n _ = self.all_commands\n return self.lazyin(key)\n\n def __iter__(self):\n for cmd, (path, is_alias) in self.all_commands.items():\n if ON_WINDOWS and path is not None:\n # All comand keys are stored in uppercase on Windows.\n # This ensures the original command name is returned.\n cmd = pathbasename(path)\n yield cmd\n\n def __len__(self):\n return len(self.all_commands)\n\n def __getitem__(self, key):\n _ = self.all_commands\n return self.lazyget(key)\n\n def is_empty(self):\n \"\"\"Returns whether the cache is populated or not.\"\"\"\n return len(self._cmds_cache) == 0\n\n @staticmethod\n def get_possible_names(name):\n \"\"\"Generates the possible `PATHEXT` extension variants of a given executable\n name on Windows as a list, conserving the ordering in `PATHEXT`.\n Returns a list as `name` being the only item in it on other platforms.\"\"\"\n if ON_WINDOWS:\n pathext = builtins.__xonsh_env__.get('PATHEXT')\n name = name.upper()\n return [\n name + ext\n for ext in ([''] + pathext)\n ]\n else:\n return [name]\n\n @property\n def all_commands(self):\n paths = builtins.__xonsh_env__.get('PATH', [])\n pathset = frozenset(x for x in paths if os.path.isdir(x))\n # did PATH change?\n path_hash = hash(pathset)\n cache_valid = path_hash == self._path_checksum\n self._path_checksum = path_hash\n # did aliases change?\n alss = getattr(builtins, 'aliases', set())\n al_hash = hash(frozenset(alss))\n cache_valid = cache_valid and al_hash == self._alias_checksum\n self._alias_checksum = al_hash\n # did the contents of any directory in PATH change?\n max_mtime = 0\n for path in pathset:\n mtime = os.stat(path).st_mtime\n if mtime > max_mtime:\n max_mtime = mtime\n cache_valid = cache_valid and (max_mtime <= self._path_mtime)\n self._path_mtime = max_mtime\n if cache_valid:\n return self._cmds_cache\n allcmds = {}\n for path in reversed(paths):\n # iterate backwards so that entries at the front of PATH overwrite\n # entries at the back.\n for cmd in executables_in(path):\n key = cmd.upper() if ON_WINDOWS else cmd\n allcmds[key] = (os.path.join(path, cmd), cmd in alss)\n for cmd in alss:\n if cmd not in allcmds:\n key = cmd.upper() if ON_WINDOWS else cmd\n allcmds[key] = (cmd, True)\n self._cmds_cache = allcmds\n return allcmds\n\n def cached_name(self, name):\n \"\"\"Returns the name that would appear in the cache, if it was exists.\"\"\"\n if name is None:\n return None\n cached = pathbasename(name)\n if ON_WINDOWS:\n keys = self.get_possible_names(cached)\n cached = next((k for k in keys if k in self._cmds_cache), None)\n return cached\n\n def lazyin(self, key):\n \"\"\"Checks if the value is in the current cache without the potential to\n update the cache. It just says whether the value is known *now*. This\n may not reflect precisely what is on the $PATH.\n \"\"\"\n return self.cached_name(key) in self._cmds_cache\n\n def lazyiter(self):\n \"\"\"Returns an iterator over the current cache contents without the\n potential to update the cache. This may not reflect what is on the\n $PATH.\n \"\"\"\n return iter(self._cmds_cache)\n\n def lazylen(self):\n \"\"\"Returns the length of the current cache contents without the\n potential to update the cache. This may not reflect precisely\n what is on the $PATH.\n \"\"\"\n return len(self._cmds_cache)\n\n def lazyget(self, key, default=None):\n \"\"\"A lazy value getter.\"\"\"\n return self._cmds_cache.get(self.cached_name(key), default)\n\n def locate_binary(self, name):\n \"\"\"Locates an executable on the file system using the cache.\"\"\"\n # make sure the cache is up to date by accessing the property\n _ = self.all_commands\n return self.lazy_locate_binary(name)\n\n def lazy_locate_binary(self, name):\n \"\"\"Locates an executable in the cache, without checking its validity.\"\"\"\n possibilities = self.get_possible_names(name)\n if ON_WINDOWS:\n # Windows users expect to be able to execute files in the same\n # directory without `./`\n local_bin = next((fn for fn in possibilities if os.path.isfile(fn)),\n None)\n if local_bin:\n return os.path.abspath(local_bin)\n cached = next((cmd for cmd in possibilities if cmd in self._cmds_cache),\n None)\n if cached:\n (path, is_alias) = self._cmds_cache[cached]\n return path if not is_alias else None\n elif os.path.isfile(name) and name != pathbasename(name):\n return name\n\n def predict_threadable(self, cmd):\n \"\"\"Predicts whether a command list is able to be run on a background\n thread, rather than the main thread.\n \"\"\"\n name = self.cached_name(cmd[0])\n if ON_WINDOWS:\n # On all names (keys) are stored in upper case so instead\n # we get the original cmd or alias name\n path, _ = self.lazyget(name, (None, None))\n if path is None:\n return True\n else:\n name = pathbasename(path)\n predictor = self.threadable_predictors[name]\n return predictor(cmd[1:])\n\n#\n# Background Predictors\n#\n\n\ndef predict_true(args):\n \"\"\"Always say the process is threadable.\"\"\"\n return True\n\n\ndef predict_false(args):\n \"\"\"Never say the process is threadable.\"\"\"\n return False\n\n\n@lazyobject\ndef SHELL_PREDICTOR_PARSER():\n p = argparse.ArgumentParser('shell', add_help=False)\n p.add_argument('-c', nargs='?', default=None)\n p.add_argument('filename', nargs='?', default=None)\n return p\n\n\ndef predict_shell(args):\n \"\"\"Precict the backgroundability of the normal shell interface, which\n comes down to whether it is being run in subproc mode.\n \"\"\"\n ns, _ = SHELL_PREDICTOR_PARSER.parse_known_args(args)\n if ns.c is None and ns.filename is None:\n pred = False\n else:\n pred = True\n return pred\n\n\n@lazyobject\ndef HELP_VER_PREDICTOR_PARSER():\n p = argparse.ArgumentParser('cmd', add_help=False)\n p.add_argument('-h', '--help', dest='help',\n action='store_true', default=None)\n p.add_argument('-v', '-V', '--version', dest='version',\n action='store_true', default=None)\n return p\n\n\ndef predict_help_ver(args):\n \"\"\"Precict the backgroundability of commands that have help & version\n switches: -h, --help, -v, -V, --version. If either of these options is\n present, the command is assumed to print to stdout normally and is therefore\n threadable. Otherwise, the command is assumed to not be threadable.\n This is useful for commands, like top, that normally enter alternate mode\n but may not in certain circumstances.\n \"\"\"\n ns, _ = HELP_VER_PREDICTOR_PARSER.parse_known_args(args)\n pred = ns.help is not None or ns.version is not None\n return pred\n\n\ndef default_threadable_predictors():\n \"\"\"Generates a new defaultdict for known threadable predictors.\n The default is to predict true.\n \"\"\"\n # alphabetical, for what it is worth.\n predictors = {\n 'bash': predict_shell,\n 'csh': predict_shell,\n 'clear': predict_false,\n 'clear.exe': predict_false,\n 'cls': predict_false,\n 'cmd': predict_shell,\n 'fish': predict_shell,\n 'htop': predict_help_ver,\n 'ksh': predict_shell,\n 'less': predict_help_ver,\n 'man': predict_help_ver,\n 'more': predict_help_ver,\n 'sh': predict_shell,\n 'ssh': predict_false,\n 'startx': predict_false,\n 'sudo': predict_help_ver,\n 'tcsh': predict_shell,\n 'top': predict_help_ver,\n 'vi': predict_false,\n 'vim': predict_false,\n 'vimpager': predict_help_ver,\n 'xo': predict_help_ver,\n 'xonsh': predict_shell,\n 'zsh': predict_shell,\n }\n return collections.defaultdict(lambda: predict_true, predictors)\n", "path": "xonsh/commands_cache.py"}]} | 3,520 | 108 |
gh_patches_debug_4481 | rasdani/github-patches | git_diff | nipy__nipype-2129 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ANTs interfaces crash when checking version
### Summary
ANTs interfaces crash if the version does not contain commit hash. E.g.:
```
antsRegistration --version
ANTs Version: 2.2.0
```
### Actual behavior
```
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/nipype/pipeline/plugins/multiproc.py", line 52, in run_node
result['result'] = node.run(updatehash=updatehash)
File "/usr/local/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py", line 372, in run
self._run_interface()
File "/usr/local/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py", line 482, in _run_interface
self._result = self._run_command(execute)
File "/usr/local/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py", line 613, in _run_command
result = self._interface.run()
File "/usr/local/lib/python3.6/site-packages/nipype/interfaces/base.py", line 1081, in run
version=self.version)
File "/usr/local/lib/python3.6/site-packages/nipype/interfaces/ants/base.py", line 127, in version
return Info().version
File "/usr/local/lib/python3.6/site-packages/nipype/interfaces/ants/base.py", line 56, in version
v_string, githash = self._version.split('-')
ValueError: not enough values to unpack (expected 2, got 1)
```
### Expected behavior
`githash` should not be mandatory
### How to replicate the behavior
Shadow the original `antsRegistration` with a mock that just prints the above command.
### Script/Workflow details
https://github.com/poldracklab/mriqc/issues/600
### Platform details:
Unavailable. Should not be relevant.
### Execution environment
Should not be relevant.
</issue>
<code>
[start of nipype/interfaces/ants/base.py]
1 # -*- coding: utf-8 -*-
2 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
3 # vi: set ft=python sts=4 ts=4 sw=4 et:
4 """The ants module provides basic functions for interfacing with ANTS tools."""
5 from __future__ import print_function, division, unicode_literals, absolute_import
6 from builtins import str
7
8 import os
9 import subprocess
10
11 # Local imports
12 from ... import logging, LooseVersion
13 from ..base import CommandLine, CommandLineInputSpec, traits, isdefined
14 logger = logging.getLogger('interface')
15
16 # -Using -1 gives primary responsibilty to ITKv4 to do the correct
17 # thread limitings.
18 # -Using 1 takes a very conservative approach to avoid overloading
19 # the computer (when running MultiProc) by forcing everything to
20 # single threaded. This can be a severe penalty for registration
21 # performance.
22 LOCAL_DEFAULT_NUMBER_OF_THREADS = 1
23 # -Using NSLOTS has the same behavior as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS
24 # as long as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS is not set. Otherwise
25 # ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS takes precidence.
26 # This behavior states that you the user explicitly specifies
27 # num_threads, then respect that no matter what SGE tries to limit.
28 PREFERED_ITKv4_THREAD_LIMIT_VARIABLE = 'NSLOTS'
29 ALT_ITKv4_THREAD_LIMIT_VARIABLE = 'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS'
30
31
32 class Info(object):
33 _version = None
34
35 @property
36 def version(self):
37 if self._version is None:
38 try:
39 basedir = os.environ['ANTSPATH']
40 except KeyError:
41 return None
42
43 cmd = os.path.join(basedir, 'antsRegistration')
44 try:
45 res = subprocess.check_output([cmd, '--version']).decode('utf-8')
46 except OSError:
47 return None
48
49 for line in res.splitlines():
50 if line.startswith('ANTs Version: '):
51 self._version = line.split()[2]
52 break
53 else:
54 return None
55
56 v_string, githash = self._version.split('-')
57
58 # 2.2.0-equivalent version string
59 if 'post' in v_string and LooseVersion(v_string) >= LooseVersion('2.1.0.post789'):
60 return '2.2.0'
61 else:
62 return '.'.join(v_string.split('.')[:3])
63
64
65 class ANTSCommandInputSpec(CommandLineInputSpec):
66 """Base Input Specification for all ANTS Commands
67 """
68
69 num_threads = traits.Int(LOCAL_DEFAULT_NUMBER_OF_THREADS, usedefault=True,
70 nohash=True, desc="Number of ITK threads to use")
71
72
73 class ANTSCommand(CommandLine):
74 """Base class for ANTS interfaces
75 """
76
77 input_spec = ANTSCommandInputSpec
78 _num_threads = LOCAL_DEFAULT_NUMBER_OF_THREADS
79
80 def __init__(self, **inputs):
81 super(ANTSCommand, self).__init__(**inputs)
82 self.inputs.on_trait_change(self._num_threads_update, 'num_threads')
83
84 if not isdefined(self.inputs.num_threads):
85 self.inputs.num_threads = self._num_threads
86 else:
87 self._num_threads_update()
88
89 def _num_threads_update(self):
90 self._num_threads = self.inputs.num_threads
91 # ONLY SET THE ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS if requested
92 # by the end user. The default setting did not allow for
93 # overwriting the default values.
94 # In ITKv4 (the version used for all ANTS programs), ITK respects
95 # the SGE controlled $NSLOTS environmental variable.
96 # If user specifies -1, then that indicates that the system
97 # default behavior should be the one specified by ITKv4 rules
98 # (i.e. respect SGE $NSLOTS or environmental variables of threads, or
99 # user environmental settings)
100 if (self.inputs.num_threads == -1):
101 if (ALT_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ):
102 del self.inputs.environ[ALT_ITKv4_THREAD_LIMIT_VARIABLE]
103 if (PREFERED_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ):
104 del self.inputs.environ[PREFERED_ITKv4_THREAD_LIMIT_VARIABLE]
105 else:
106 self.inputs.environ.update({PREFERED_ITKv4_THREAD_LIMIT_VARIABLE:
107 '%s' % self.inputs.num_threads})
108
109 @staticmethod
110 def _format_xarray(val):
111 """ Convenience method for converting input arrays [1,2,3] to commandline format '1x2x3' """
112 return 'x'.join([str(x) for x in val])
113
114 @classmethod
115 def set_default_num_threads(cls, num_threads):
116 """Set the default number of threads for ITK calls
117
118 This method is used to set the default number of ITK threads for all
119 the ANTS interfaces. However, setting this will not update the output
120 type for any existing instances. For these, assign the
121 <instance>.inputs.num_threads
122 """
123 cls._num_threads = num_threads
124
125 @property
126 def version(self):
127 return Info().version
128
[end of nipype/interfaces/ants/base.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/nipype/interfaces/ants/base.py b/nipype/interfaces/ants/base.py
--- a/nipype/interfaces/ants/base.py
+++ b/nipype/interfaces/ants/base.py
@@ -53,7 +53,8 @@
else:
return None
- v_string, githash = self._version.split('-')
+ # -githash may or may not be appended
+ v_string = self._version.split('-')[0]
# 2.2.0-equivalent version string
if 'post' in v_string and LooseVersion(v_string) >= LooseVersion('2.1.0.post789'):
| {"golden_diff": "diff --git a/nipype/interfaces/ants/base.py b/nipype/interfaces/ants/base.py\n--- a/nipype/interfaces/ants/base.py\n+++ b/nipype/interfaces/ants/base.py\n@@ -53,7 +53,8 @@\n else:\n return None\n \n- v_string, githash = self._version.split('-')\n+ # -githash may or may not be appended\n+ v_string = self._version.split('-')[0]\n \n # 2.2.0-equivalent version string\n if 'post' in v_string and LooseVersion(v_string) >= LooseVersion('2.1.0.post789'):\n", "issue": "ANTs interfaces crash when checking version\n### Summary\r\nANTs interfaces crash if the version does not contain commit hash. E.g.:\r\n```\r\nantsRegistration --version\r\nANTs Version: 2.2.0\r\n```\r\n\r\n### Actual behavior\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.6/site-packages/nipype/pipeline/plugins/multiproc.py\", line 52, in run_node\r\n result['result'] = node.run(updatehash=updatehash)\r\n File \"/usr/local/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 372, in run\r\n self._run_interface()\r\n File \"/usr/local/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 482, in _run_interface\r\n self._result = self._run_command(execute)\r\n File \"/usr/local/lib/python3.6/site-packages/nipype/pipeline/engine/nodes.py\", line 613, in _run_command\r\n result = self._interface.run()\r\n File \"/usr/local/lib/python3.6/site-packages/nipype/interfaces/base.py\", line 1081, in run\r\n version=self.version)\r\n File \"/usr/local/lib/python3.6/site-packages/nipype/interfaces/ants/base.py\", line 127, in version\r\n return Info().version\r\n File \"/usr/local/lib/python3.6/site-packages/nipype/interfaces/ants/base.py\", line 56, in version\r\n v_string, githash = self._version.split('-')\r\nValueError: not enough values to unpack (expected 2, got 1)\r\n```\r\n\r\n### Expected behavior\r\n`githash` should not be mandatory\r\n\r\n### How to replicate the behavior\r\nShadow the original `antsRegistration` with a mock that just prints the above command.\r\n\r\n### Script/Workflow details\r\nhttps://github.com/poldracklab/mriqc/issues/600\r\n\r\n### Platform details:\r\nUnavailable. Should not be relevant.\r\n\r\n### Execution environment\r\nShould not be relevant.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-\n# vi: set ft=python sts=4 ts=4 sw=4 et:\n\"\"\"The ants module provides basic functions for interfacing with ANTS tools.\"\"\"\nfrom __future__ import print_function, division, unicode_literals, absolute_import\nfrom builtins import str\n\nimport os\nimport subprocess\n\n# Local imports\nfrom ... import logging, LooseVersion\nfrom ..base import CommandLine, CommandLineInputSpec, traits, isdefined\nlogger = logging.getLogger('interface')\n\n# -Using -1 gives primary responsibilty to ITKv4 to do the correct\n# thread limitings.\n# -Using 1 takes a very conservative approach to avoid overloading\n# the computer (when running MultiProc) by forcing everything to\n# single threaded. This can be a severe penalty for registration\n# performance.\nLOCAL_DEFAULT_NUMBER_OF_THREADS = 1\n# -Using NSLOTS has the same behavior as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS\n# as long as ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS is not set. Otherwise\n# ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS takes precidence.\n# This behavior states that you the user explicitly specifies\n# num_threads, then respect that no matter what SGE tries to limit.\nPREFERED_ITKv4_THREAD_LIMIT_VARIABLE = 'NSLOTS'\nALT_ITKv4_THREAD_LIMIT_VARIABLE = 'ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS'\n\n\nclass Info(object):\n _version = None\n\n @property\n def version(self):\n if self._version is None:\n try:\n basedir = os.environ['ANTSPATH']\n except KeyError:\n return None\n\n cmd = os.path.join(basedir, 'antsRegistration')\n try:\n res = subprocess.check_output([cmd, '--version']).decode('utf-8')\n except OSError:\n return None\n\n for line in res.splitlines():\n if line.startswith('ANTs Version: '):\n self._version = line.split()[2]\n break\n else:\n return None\n\n v_string, githash = self._version.split('-')\n\n # 2.2.0-equivalent version string\n if 'post' in v_string and LooseVersion(v_string) >= LooseVersion('2.1.0.post789'):\n return '2.2.0'\n else:\n return '.'.join(v_string.split('.')[:3])\n\n\nclass ANTSCommandInputSpec(CommandLineInputSpec):\n \"\"\"Base Input Specification for all ANTS Commands\n \"\"\"\n\n num_threads = traits.Int(LOCAL_DEFAULT_NUMBER_OF_THREADS, usedefault=True,\n nohash=True, desc=\"Number of ITK threads to use\")\n\n\nclass ANTSCommand(CommandLine):\n \"\"\"Base class for ANTS interfaces\n \"\"\"\n\n input_spec = ANTSCommandInputSpec\n _num_threads = LOCAL_DEFAULT_NUMBER_OF_THREADS\n\n def __init__(self, **inputs):\n super(ANTSCommand, self).__init__(**inputs)\n self.inputs.on_trait_change(self._num_threads_update, 'num_threads')\n\n if not isdefined(self.inputs.num_threads):\n self.inputs.num_threads = self._num_threads\n else:\n self._num_threads_update()\n\n def _num_threads_update(self):\n self._num_threads = self.inputs.num_threads\n # ONLY SET THE ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS if requested\n # by the end user. The default setting did not allow for\n # overwriting the default values.\n # In ITKv4 (the version used for all ANTS programs), ITK respects\n # the SGE controlled $NSLOTS environmental variable.\n # If user specifies -1, then that indicates that the system\n # default behavior should be the one specified by ITKv4 rules\n # (i.e. respect SGE $NSLOTS or environmental variables of threads, or\n # user environmental settings)\n if (self.inputs.num_threads == -1):\n if (ALT_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ):\n del self.inputs.environ[ALT_ITKv4_THREAD_LIMIT_VARIABLE]\n if (PREFERED_ITKv4_THREAD_LIMIT_VARIABLE in self.inputs.environ):\n del self.inputs.environ[PREFERED_ITKv4_THREAD_LIMIT_VARIABLE]\n else:\n self.inputs.environ.update({PREFERED_ITKv4_THREAD_LIMIT_VARIABLE:\n '%s' % self.inputs.num_threads})\n\n @staticmethod\n def _format_xarray(val):\n \"\"\" Convenience method for converting input arrays [1,2,3] to commandline format '1x2x3' \"\"\"\n return 'x'.join([str(x) for x in val])\n\n @classmethod\n def set_default_num_threads(cls, num_threads):\n \"\"\"Set the default number of threads for ITK calls\n\n This method is used to set the default number of ITK threads for all\n the ANTS interfaces. However, setting this will not update the output\n type for any existing instances. For these, assign the\n <instance>.inputs.num_threads\n \"\"\"\n cls._num_threads = num_threads\n\n @property\n def version(self):\n return Info().version\n", "path": "nipype/interfaces/ants/base.py"}]} | 2,393 | 147 |
gh_patches_debug_1326 | rasdani/github-patches | git_diff | iterative__dvc-1757 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
typo in docs
super minor typo:
$dvc repro --help
-c CWD, --cwd CWD Directory within your repo to **reroduce** from.
dvc --version
0.30.1
</issue>
<code>
[start of dvc/command/repro.py]
1 from __future__ import unicode_literals
2
3 import os
4
5 import dvc.logger as logger
6 from dvc.command.base import CmdBase
7 from dvc.command.status import CmdDataStatus
8 from dvc.exceptions import DvcException
9
10
11 class CmdRepro(CmdBase):
12 def run(self):
13 recursive = not self.args.single_item
14 saved_dir = os.path.realpath(os.curdir)
15 if self.args.cwd:
16 os.chdir(self.args.cwd)
17
18 # Dirty hack so the for loop below can at least enter once
19 if self.args.all_pipelines:
20 self.args.targets = [None]
21 elif not self.args.targets:
22 self.args.targets = self.default_targets
23
24 ret = 0
25 for target in self.args.targets:
26 try:
27 stages = self.repo.reproduce(
28 target,
29 recursive=recursive,
30 force=self.args.force,
31 dry=self.args.dry,
32 interactive=self.args.interactive,
33 pipeline=self.args.pipeline,
34 all_pipelines=self.args.all_pipelines,
35 ignore_build_cache=self.args.ignore_build_cache,
36 no_commit=self.args.no_commit,
37 )
38
39 if len(stages) == 0:
40 logger.info(CmdDataStatus.UP_TO_DATE_MSG)
41
42 if self.args.metrics:
43 self.repo.metrics.show()
44 except DvcException:
45 logger.error()
46 ret = 1
47 break
48
49 os.chdir(saved_dir)
50 return ret
51
52
53 def add_parser(subparsers, parent_parser):
54 REPRO_HELP = "Reproduce DVC file. Default file name - 'Dvcfile'."
55 repro_parser = subparsers.add_parser(
56 "repro",
57 parents=[parent_parser],
58 description=REPRO_HELP,
59 help=REPRO_HELP,
60 )
61 repro_parser.add_argument(
62 "targets", nargs="*", help="DVC file to reproduce."
63 )
64 repro_parser.add_argument(
65 "-f",
66 "--force",
67 action="store_true",
68 default=False,
69 help="Reproduce even if dependencies were not changed.",
70 )
71 repro_parser.add_argument(
72 "-s",
73 "--single-item",
74 action="store_true",
75 default=False,
76 help="Reproduce only single data item without recursive dependencies "
77 "check.",
78 )
79 repro_parser.add_argument(
80 "-c",
81 "--cwd",
82 default=os.path.curdir,
83 help="Directory within your repo to reroduce from.",
84 )
85 repro_parser.add_argument(
86 "-m",
87 "--metrics",
88 action="store_true",
89 default=False,
90 help="Show metrics after reproduction.",
91 )
92 repro_parser.add_argument(
93 "--dry",
94 action="store_true",
95 default=False,
96 help="Only print the commands that would be executed without "
97 "actually executing.",
98 )
99 repro_parser.add_argument(
100 "-i",
101 "--interactive",
102 action="store_true",
103 default=False,
104 help="Ask for confirmation before reproducing each stage.",
105 )
106 repro_parser.add_argument(
107 "-p",
108 "--pipeline",
109 action="store_true",
110 default=False,
111 help="Reproduce the whole pipeline that the specified stage file "
112 "belongs to.",
113 )
114 repro_parser.add_argument(
115 "-P",
116 "--all-pipelines",
117 action="store_true",
118 default=False,
119 help="Reproduce all pipelines in the repo.",
120 )
121 repro_parser.add_argument(
122 "--ignore-build-cache",
123 action="store_true",
124 default=False,
125 help="Reproduce all descendants of a changed stage even if their "
126 "direct dependencies didn't change.",
127 )
128 repro_parser.add_argument(
129 "--no-commit",
130 action="store_true",
131 default=False,
132 help="Don't put files/directories into cache.",
133 )
134 repro_parser.set_defaults(func=CmdRepro)
135
[end of dvc/command/repro.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/dvc/command/repro.py b/dvc/command/repro.py
--- a/dvc/command/repro.py
+++ b/dvc/command/repro.py
@@ -80,7 +80,7 @@
"-c",
"--cwd",
default=os.path.curdir,
- help="Directory within your repo to reroduce from.",
+ help="Directory within your repo to reproduce from.",
)
repro_parser.add_argument(
"-m",
| {"golden_diff": "diff --git a/dvc/command/repro.py b/dvc/command/repro.py\n--- a/dvc/command/repro.py\n+++ b/dvc/command/repro.py\n@@ -80,7 +80,7 @@\n \"-c\",\n \"--cwd\",\n default=os.path.curdir,\n- help=\"Directory within your repo to reroduce from.\",\n+ help=\"Directory within your repo to reproduce from.\",\n )\n repro_parser.add_argument(\n \"-m\",\n", "issue": "typo in docs\nsuper minor typo:\r\n\r\n$dvc repro --help\r\n -c CWD, --cwd CWD Directory within your repo to **reroduce** from.\r\n\r\ndvc --version\r\n0.30.1\r\n\r\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport os\n\nimport dvc.logger as logger\nfrom dvc.command.base import CmdBase\nfrom dvc.command.status import CmdDataStatus\nfrom dvc.exceptions import DvcException\n\n\nclass CmdRepro(CmdBase):\n def run(self):\n recursive = not self.args.single_item\n saved_dir = os.path.realpath(os.curdir)\n if self.args.cwd:\n os.chdir(self.args.cwd)\n\n # Dirty hack so the for loop below can at least enter once\n if self.args.all_pipelines:\n self.args.targets = [None]\n elif not self.args.targets:\n self.args.targets = self.default_targets\n\n ret = 0\n for target in self.args.targets:\n try:\n stages = self.repo.reproduce(\n target,\n recursive=recursive,\n force=self.args.force,\n dry=self.args.dry,\n interactive=self.args.interactive,\n pipeline=self.args.pipeline,\n all_pipelines=self.args.all_pipelines,\n ignore_build_cache=self.args.ignore_build_cache,\n no_commit=self.args.no_commit,\n )\n\n if len(stages) == 0:\n logger.info(CmdDataStatus.UP_TO_DATE_MSG)\n\n if self.args.metrics:\n self.repo.metrics.show()\n except DvcException:\n logger.error()\n ret = 1\n break\n\n os.chdir(saved_dir)\n return ret\n\n\ndef add_parser(subparsers, parent_parser):\n REPRO_HELP = \"Reproduce DVC file. Default file name - 'Dvcfile'.\"\n repro_parser = subparsers.add_parser(\n \"repro\",\n parents=[parent_parser],\n description=REPRO_HELP,\n help=REPRO_HELP,\n )\n repro_parser.add_argument(\n \"targets\", nargs=\"*\", help=\"DVC file to reproduce.\"\n )\n repro_parser.add_argument(\n \"-f\",\n \"--force\",\n action=\"store_true\",\n default=False,\n help=\"Reproduce even if dependencies were not changed.\",\n )\n repro_parser.add_argument(\n \"-s\",\n \"--single-item\",\n action=\"store_true\",\n default=False,\n help=\"Reproduce only single data item without recursive dependencies \"\n \"check.\",\n )\n repro_parser.add_argument(\n \"-c\",\n \"--cwd\",\n default=os.path.curdir,\n help=\"Directory within your repo to reroduce from.\",\n )\n repro_parser.add_argument(\n \"-m\",\n \"--metrics\",\n action=\"store_true\",\n default=False,\n help=\"Show metrics after reproduction.\",\n )\n repro_parser.add_argument(\n \"--dry\",\n action=\"store_true\",\n default=False,\n help=\"Only print the commands that would be executed without \"\n \"actually executing.\",\n )\n repro_parser.add_argument(\n \"-i\",\n \"--interactive\",\n action=\"store_true\",\n default=False,\n help=\"Ask for confirmation before reproducing each stage.\",\n )\n repro_parser.add_argument(\n \"-p\",\n \"--pipeline\",\n action=\"store_true\",\n default=False,\n help=\"Reproduce the whole pipeline that the specified stage file \"\n \"belongs to.\",\n )\n repro_parser.add_argument(\n \"-P\",\n \"--all-pipelines\",\n action=\"store_true\",\n default=False,\n help=\"Reproduce all pipelines in the repo.\",\n )\n repro_parser.add_argument(\n \"--ignore-build-cache\",\n action=\"store_true\",\n default=False,\n help=\"Reproduce all descendants of a changed stage even if their \"\n \"direct dependencies didn't change.\",\n )\n repro_parser.add_argument(\n \"--no-commit\",\n action=\"store_true\",\n default=False,\n help=\"Don't put files/directories into cache.\",\n )\n repro_parser.set_defaults(func=CmdRepro)\n", "path": "dvc/command/repro.py"}]} | 1,685 | 101 |
gh_patches_debug_20886 | rasdani/github-patches | git_diff | ansible__ansible-modules-extras-715 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
bower_module thrown an error if bower_components dir already exists
I run next task in my playbook:
``` yaml
- name: install bower vendor dependencies
bower: path="/path/to/project" state=latest
```
And if `bower_components` dir not exists in `/path/to/project`, it download all dependencies and works fine, but if I run this task again, I get a next error:
```
< TASK: install bower vendor dependencies >
<transformation.server.ndv.net.ua> ESTABLISH CONNECTION FOR USER: srvadmin
<transformation.server.ndv.net.ua> REMOTE_MODULE bower path="/var/www/sites/transformation/web" state=latest
<transformation.server.ndv.net.ua> EXEC ssh -C -tt -vvv -o ControlMaster=auto -o ControlPersist=60s -o ControlPath="/home/victor/.ansible/cp/ansible-ssh-%h-%p-%r" -o Port=22 -o KbdInteractiveAuthentication=no -o PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o User=srvadmin -o ConnectTimeout=10 transformation.server.ndv.net.ua /bin/sh -c 'mkdir -p $HOME/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340 && chmod a+rx $HOME/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340 && echo $HOME/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340'
<transformation.server.ndv.net.ua> PUT /tmp/tmpjZzhjm TO /home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower
<transformation.server.ndv.net.ua> EXEC ssh -C -tt -vvv -o ControlMaster=auto -o ControlPersist=60s -o ControlPath="/home/victor/.ansible/cp/ansible-ssh-%h-%p-%r" -o Port=22 -o KbdInteractiveAuthentication=no -o PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o User=srvadmin -o ConnectTimeout=10 transformation.server.ndv.net.ua /bin/sh -c 'sudo -k && sudo -H -S -p "[sudo via ansible, key=vilcuzdrghmtlvfndvzibzqgxnyjsrmj] password: " -u root /bin/sh -c '"'"'echo BECOME-SUCCESS-vilcuzdrghmtlvfndvzibzqgxnyjsrmj; LANG=C LC_CTYPE=C /usr/bin/python /home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower; rm -rf /home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/ >/dev/null 2>&1'"'"''
failed: [localhost] => {"failed": true, "parsed": false}
BECOME-SUCCESS-vilcuzdrghmtlvfndvzibzqgxnyjsrmj
Traceback (most recent call last):
File "/home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower", line 1786, in <module>
main()
File "/home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower", line 173, in main
installed, missing, outdated = bower.list()
File "/home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower", line 121, in list
elif data['dependencies'][dep]['pkgMeta']['version'] != data['dependencies'][dep]['update']['latest']:
KeyError: 'version'
debug3: mux_client_read_packet: read header failed: Broken pipe
debug2: Received exit status from master 0
Shared connection to transformation.server.ndv.net.ua closed.
```
It looks like a bug.
P.S. I run playbook with `-vvvv` key. Remote machine is Ubuntu 12.04, bower is installed and work correctly if I run it manually with `bower install` or `bower update`
My workaround is to remove manually `bower_components` folder first if exists and then execute `install bower vendor dependencies` task.
</issue>
<code>
[start of packaging/language/bower.py]
1 #!/usr/bin/python
2 # -*- coding: utf-8 -*-
3
4 # (c) 2014, Michael Warkentin <[email protected]>
5 #
6 # This file is part of Ansible
7 #
8 # Ansible is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # Ansible is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the GNU General Public License
19 # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
20
21 DOCUMENTATION = '''
22 ---
23 module: bower
24 short_description: Manage bower packages with bower
25 description:
26 - Manage bower packages with bower
27 version_added: 1.9
28 author: "Michael Warkentin (@mwarkentin)"
29 options:
30 name:
31 description:
32 - The name of a bower package to install
33 required: false
34 offline:
35 description:
36 - Install packages from local cache, if the packages were installed before
37 required: false
38 default: no
39 choices: [ "yes", "no" ]
40 path:
41 description:
42 - The base path where to install the bower packages
43 required: true
44 state:
45 description:
46 - The state of the bower package
47 required: false
48 default: present
49 choices: [ "present", "absent", "latest" ]
50 version:
51 description:
52 - The version to be installed
53 required: false
54 '''
55
56 EXAMPLES = '''
57 description: Install "bootstrap" bower package.
58 - bower: name=bootstrap
59
60 description: Install "bootstrap" bower package on version 3.1.1.
61 - bower: name=bootstrap version=3.1.1
62
63 description: Remove the "bootstrap" bower package.
64 - bower: name=bootstrap state=absent
65
66 description: Install packages based on bower.json.
67 - bower: path=/app/location
68
69 description: Update packages based on bower.json to their latest version.
70 - bower: path=/app/location state=latest
71 '''
72
73
74 class Bower(object):
75 def __init__(self, module, **kwargs):
76 self.module = module
77 self.name = kwargs['name']
78 self.offline = kwargs['offline']
79 self.path = kwargs['path']
80 self.version = kwargs['version']
81
82 if kwargs['version']:
83 self.name_version = self.name + '#' + self.version
84 else:
85 self.name_version = self.name
86
87 def _exec(self, args, run_in_check_mode=False, check_rc=True):
88 if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
89 cmd = ["bower"] + args + ['--config.interactive=false', '--allow-root']
90
91 if self.name:
92 cmd.append(self.name_version)
93
94 if self.offline:
95 cmd.append('--offline')
96
97 # If path is specified, cd into that path and run the command.
98 cwd = None
99 if self.path:
100 if not os.path.exists(self.path):
101 os.makedirs(self.path)
102 if not os.path.isdir(self.path):
103 self.module.fail_json(msg="path %s is not a directory" % self.path)
104 cwd = self.path
105
106 rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd)
107 return out
108 return ''
109
110 def list(self):
111 cmd = ['list', '--json']
112
113 installed = list()
114 missing = list()
115 outdated = list()
116 data = json.loads(self._exec(cmd, True, False))
117 if 'dependencies' in data:
118 for dep in data['dependencies']:
119 if 'missing' in data['dependencies'][dep] and data['dependencies'][dep]['missing']:
120 missing.append(dep)
121 elif data['dependencies'][dep]['pkgMeta']['version'] != data['dependencies'][dep]['update']['latest']:
122 outdated.append(dep)
123 elif 'incompatible' in data['dependencies'][dep] and data['dependencies'][dep]['incompatible']:
124 outdated.append(dep)
125 else:
126 installed.append(dep)
127 # Named dependency not installed
128 else:
129 missing.append(self.name)
130
131 return installed, missing, outdated
132
133 def install(self):
134 return self._exec(['install'])
135
136 def update(self):
137 return self._exec(['update'])
138
139 def uninstall(self):
140 return self._exec(['uninstall'])
141
142
143 def main():
144 arg_spec = dict(
145 name=dict(default=None),
146 offline=dict(default='no', type='bool'),
147 path=dict(required=True),
148 state=dict(default='present', choices=['present', 'absent', 'latest', ]),
149 version=dict(default=None),
150 )
151 module = AnsibleModule(
152 argument_spec=arg_spec
153 )
154
155 name = module.params['name']
156 offline = module.params['offline']
157 path = os.path.expanduser(module.params['path'])
158 state = module.params['state']
159 version = module.params['version']
160
161 if state == 'absent' and not name:
162 module.fail_json(msg='uninstalling a package is only available for named packages')
163
164 bower = Bower(module, name=name, offline=offline, path=path, version=version)
165
166 changed = False
167 if state == 'present':
168 installed, missing, outdated = bower.list()
169 if len(missing):
170 changed = True
171 bower.install()
172 elif state == 'latest':
173 installed, missing, outdated = bower.list()
174 if len(missing) or len(outdated):
175 changed = True
176 bower.update()
177 else: # Absent
178 installed, missing, outdated = bower.list()
179 if name in installed:
180 changed = True
181 bower.uninstall()
182
183 module.exit_json(changed=changed)
184
185 # Import module snippets
186 from ansible.module_utils.basic import *
187 main()
188
[end of packaging/language/bower.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/packaging/language/bower.py b/packaging/language/bower.py
--- a/packaging/language/bower.py
+++ b/packaging/language/bower.py
@@ -116,11 +116,15 @@
data = json.loads(self._exec(cmd, True, False))
if 'dependencies' in data:
for dep in data['dependencies']:
- if 'missing' in data['dependencies'][dep] and data['dependencies'][dep]['missing']:
+ dep_data = data['dependencies'][dep]
+ if dep_data.get('missing', False):
missing.append(dep)
- elif data['dependencies'][dep]['pkgMeta']['version'] != data['dependencies'][dep]['update']['latest']:
+ elif \
+ 'version' in dep_data['pkgMeta'] and \
+ 'update' in dep_data and \
+ dep_data['pkgMeta']['version'] != dep_data['update']['latest']:
outdated.append(dep)
- elif 'incompatible' in data['dependencies'][dep] and data['dependencies'][dep]['incompatible']:
+ elif dep_data.get('incompatible', False):
outdated.append(dep)
else:
installed.append(dep)
| {"golden_diff": "diff --git a/packaging/language/bower.py b/packaging/language/bower.py\n--- a/packaging/language/bower.py\n+++ b/packaging/language/bower.py\n@@ -116,11 +116,15 @@\n data = json.loads(self._exec(cmd, True, False))\n if 'dependencies' in data:\n for dep in data['dependencies']:\n- if 'missing' in data['dependencies'][dep] and data['dependencies'][dep]['missing']:\n+ dep_data = data['dependencies'][dep]\n+ if dep_data.get('missing', False):\n missing.append(dep)\n- elif data['dependencies'][dep]['pkgMeta']['version'] != data['dependencies'][dep]['update']['latest']:\n+ elif \\\n+ 'version' in dep_data['pkgMeta'] and \\\n+ 'update' in dep_data and \\\n+ dep_data['pkgMeta']['version'] != dep_data['update']['latest']:\n outdated.append(dep)\n- elif 'incompatible' in data['dependencies'][dep] and data['dependencies'][dep]['incompatible']:\n+ elif dep_data.get('incompatible', False):\n outdated.append(dep)\n else:\n installed.append(dep)\n", "issue": "bower_module thrown an error if bower_components dir already exists\nI run next task in my playbook:\n\n``` yaml\n - name: install bower vendor dependencies\n bower: path=\"/path/to/project\" state=latest\n```\n\nAnd if `bower_components` dir not exists in `/path/to/project`, it download all dependencies and works fine, but if I run this task again, I get a next error:\n\n```\n< TASK: install bower vendor dependencies >\n<transformation.server.ndv.net.ua> ESTABLISH CONNECTION FOR USER: srvadmin\n<transformation.server.ndv.net.ua> REMOTE_MODULE bower path=\"/var/www/sites/transformation/web\" state=latest\n<transformation.server.ndv.net.ua> EXEC ssh -C -tt -vvv -o ControlMaster=auto -o ControlPersist=60s -o ControlPath=\"/home/victor/.ansible/cp/ansible-ssh-%h-%p-%r\" -o Port=22 -o KbdInteractiveAuthentication=no -o PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o User=srvadmin -o ConnectTimeout=10 transformation.server.ndv.net.ua /bin/sh -c 'mkdir -p $HOME/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340 && chmod a+rx $HOME/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340 && echo $HOME/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340'\n<transformation.server.ndv.net.ua> PUT /tmp/tmpjZzhjm TO /home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower\n<transformation.server.ndv.net.ua> EXEC ssh -C -tt -vvv -o ControlMaster=auto -o ControlPersist=60s -o ControlPath=\"/home/victor/.ansible/cp/ansible-ssh-%h-%p-%r\" -o Port=22 -o KbdInteractiveAuthentication=no -o PreferredAuthentications=gssapi-with-mic,gssapi-keyex,hostbased,publickey -o PasswordAuthentication=no -o User=srvadmin -o ConnectTimeout=10 transformation.server.ndv.net.ua /bin/sh -c 'sudo -k && sudo -H -S -p \"[sudo via ansible, key=vilcuzdrghmtlvfndvzibzqgxnyjsrmj] password: \" -u root /bin/sh -c '\"'\"'echo BECOME-SUCCESS-vilcuzdrghmtlvfndvzibzqgxnyjsrmj; LANG=C LC_CTYPE=C /usr/bin/python /home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower; rm -rf /home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/ >/dev/null 2>&1'\"'\"''\n\nfailed: [localhost] => {\"failed\": true, \"parsed\": false}\n\nBECOME-SUCCESS-vilcuzdrghmtlvfndvzibzqgxnyjsrmj\nTraceback (most recent call last):\n File \"/home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower\", line 1786, in <module>\n main()\n File \"/home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower\", line 173, in main\n installed, missing, outdated = bower.list()\n File \"/home/srvadmin/.ansible/tmp/ansible-tmp-1431673951.26-168306551298340/bower\", line 121, in list\n elif data['dependencies'][dep]['pkgMeta']['version'] != data['dependencies'][dep]['update']['latest']:\nKeyError: 'version'\ndebug3: mux_client_read_packet: read header failed: Broken pipe\ndebug2: Received exit status from master 0\nShared connection to transformation.server.ndv.net.ua closed.\n```\n\nIt looks like a bug.\n\nP.S. I run playbook with `-vvvv` key. Remote machine is Ubuntu 12.04, bower is installed and work correctly if I run it manually with `bower install` or `bower update`\n\nMy workaround is to remove manually `bower_components` folder first if exists and then execute `install bower vendor dependencies` task.\n\n", "before_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# (c) 2014, Michael Warkentin <[email protected]>\n#\n# This file is part of Ansible\n#\n# Ansible is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# Ansible is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with Ansible. If not, see <http://www.gnu.org/licenses/>.\n\nDOCUMENTATION = '''\n---\nmodule: bower\nshort_description: Manage bower packages with bower\ndescription:\n - Manage bower packages with bower\nversion_added: 1.9\nauthor: \"Michael Warkentin (@mwarkentin)\"\noptions:\n name:\n description:\n - The name of a bower package to install\n required: false\n offline:\n description:\n - Install packages from local cache, if the packages were installed before\n required: false\n default: no\n choices: [ \"yes\", \"no\" ]\n path:\n description:\n - The base path where to install the bower packages\n required: true\n state:\n description:\n - The state of the bower package\n required: false\n default: present\n choices: [ \"present\", \"absent\", \"latest\" ]\n version:\n description:\n - The version to be installed\n required: false\n'''\n\nEXAMPLES = '''\ndescription: Install \"bootstrap\" bower package.\n- bower: name=bootstrap\n\ndescription: Install \"bootstrap\" bower package on version 3.1.1.\n- bower: name=bootstrap version=3.1.1\n\ndescription: Remove the \"bootstrap\" bower package.\n- bower: name=bootstrap state=absent\n\ndescription: Install packages based on bower.json.\n- bower: path=/app/location\n\ndescription: Update packages based on bower.json to their latest version.\n- bower: path=/app/location state=latest\n'''\n\n\nclass Bower(object):\n def __init__(self, module, **kwargs):\n self.module = module\n self.name = kwargs['name']\n self.offline = kwargs['offline']\n self.path = kwargs['path']\n self.version = kwargs['version']\n\n if kwargs['version']:\n self.name_version = self.name + '#' + self.version\n else:\n self.name_version = self.name\n\n def _exec(self, args, run_in_check_mode=False, check_rc=True):\n if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):\n cmd = [\"bower\"] + args + ['--config.interactive=false', '--allow-root']\n\n if self.name:\n cmd.append(self.name_version)\n\n if self.offline:\n cmd.append('--offline')\n\n # If path is specified, cd into that path and run the command.\n cwd = None\n if self.path:\n if not os.path.exists(self.path):\n os.makedirs(self.path)\n if not os.path.isdir(self.path):\n self.module.fail_json(msg=\"path %s is not a directory\" % self.path)\n cwd = self.path\n\n rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd)\n return out\n return ''\n\n def list(self):\n cmd = ['list', '--json']\n\n installed = list()\n missing = list()\n outdated = list()\n data = json.loads(self._exec(cmd, True, False))\n if 'dependencies' in data:\n for dep in data['dependencies']:\n if 'missing' in data['dependencies'][dep] and data['dependencies'][dep]['missing']:\n missing.append(dep)\n elif data['dependencies'][dep]['pkgMeta']['version'] != data['dependencies'][dep]['update']['latest']:\n outdated.append(dep)\n elif 'incompatible' in data['dependencies'][dep] and data['dependencies'][dep]['incompatible']:\n outdated.append(dep)\n else:\n installed.append(dep)\n # Named dependency not installed\n else:\n missing.append(self.name)\n\n return installed, missing, outdated\n\n def install(self):\n return self._exec(['install'])\n\n def update(self):\n return self._exec(['update'])\n\n def uninstall(self):\n return self._exec(['uninstall'])\n\n\ndef main():\n arg_spec = dict(\n name=dict(default=None),\n offline=dict(default='no', type='bool'),\n path=dict(required=True),\n state=dict(default='present', choices=['present', 'absent', 'latest', ]),\n version=dict(default=None),\n )\n module = AnsibleModule(\n argument_spec=arg_spec\n )\n\n name = module.params['name']\n offline = module.params['offline']\n path = os.path.expanduser(module.params['path'])\n state = module.params['state']\n version = module.params['version']\n\n if state == 'absent' and not name:\n module.fail_json(msg='uninstalling a package is only available for named packages')\n\n bower = Bower(module, name=name, offline=offline, path=path, version=version)\n\n changed = False\n if state == 'present':\n installed, missing, outdated = bower.list()\n if len(missing):\n changed = True\n bower.install()\n elif state == 'latest':\n installed, missing, outdated = bower.list()\n if len(missing) or len(outdated):\n changed = True\n bower.update()\n else: # Absent\n installed, missing, outdated = bower.list()\n if name in installed:\n changed = True\n bower.uninstall()\n\n module.exit_json(changed=changed)\n\n# Import module snippets\nfrom ansible.module_utils.basic import *\nmain()\n", "path": "packaging/language/bower.py"}]} | 3,472 | 256 |
gh_patches_debug_10641 | rasdani/github-patches | git_diff | holoviz__holoviews-749 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Always get warnings.warn("All-NaN slice encountered", RuntimeWarning) for Overlay
I always get this error when creating an `hv.Overlay`:
``` python
x = np.linspace(0, 1)
hv.Path((x,x)) + hv.Path((x,x))
```
```
/home/bnijholt/anaconda3/lib/python3.5/site-packages/numpy/lib/nanfunctions.py:326: RuntimeWarning: All-NaN slice encountered
warnings.warn("All-NaN slice encountered", RuntimeWarning)
```
if run with:
```
import warnings
warnings.simplefilter('error')
```
it generates this traceback:
```
RuntimeWarningTraceback (most recent call last)
/home/bnijholt/anaconda3/lib/python3.5/site-packages/IPython/core/formatters.py in __call__(self, obj)
337 pass
338 else:
--> 339 return printer(obj)
340 # Finally look for special method names
341 method = _safe_get_formatter_method(obj, self.print_method)
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in pprint_display(obj)
214 if not ip.display_formatter.formatters['text/plain'].pprint:
215 return None
--> 216 return display(obj, raw=True)
217
218
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in display(obj, raw, **kwargs)
198 html = element_display(obj)
199 elif isinstance(obj, (Layout, NdLayout, AdjointLayout)):
--> 200 html = layout_display(obj)
201 elif isinstance(obj, (HoloMap, DynamicMap)):
202 html = map_display(obj)
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in wrapped(element)
101 html = fn(element,
102 max_frames=OutputMagic.options['max_frames'],
--> 103 max_branches = OutputMagic.options['max_branches'])
104
105 # Only want to add to the archive for one display hook...
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in layout_display(layout, max_frames, max_branches)
172 return '<tt>'+ sanitize_HTML(layout) + '</tt>'
173
--> 174 return render(layout)
175
176
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in render(obj, **kwargs)
49
50 backend = Store.current_backend
---> 51 return Store.renderers[backend].html(obj, **kwargs)
52
53
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/renderer.py in html(self, obj, fmt, css)
247 Renders plot or data structure and wraps the output in HTML.
248 """
--> 249 plot, fmt = self._validate(obj, fmt)
250 figdata, _ = self(plot, fmt)
251 if css is None: css = self.css
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/renderer.py in _validate(self, obj, fmt)
187 if isinstance(obj, tuple(self.widgets.values())):
188 return obj, 'html'
--> 189 plot = self.get_plot(obj)
190
191 fig_formats = self.mode_formats['fig'][self.mode]
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/renderer.py in get_plot(self_or_cls, obj)
173 obj = Layout.from_values(obj) if isinstance(obj, AdjointLayout) else obj
174 plot_opts = self_or_cls.plot_options(obj, self_or_cls.size)
--> 175 plot = self_or_cls.plotting_class(obj)(obj, **plot_opts)
176 plot.update(0)
177 else:
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/mpl/plot.py in __init__(self, layout, **params)
715 def __init__(self, layout, **params):
716 super(LayoutPlot, self).__init__(layout=layout, **params)
--> 717 self.subplots, self.subaxes, self.layout = self._compute_gridspec(layout)
718
719
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/mpl/plot.py in _compute_gridspec(self, layout)
804 for ratios in col[1] for r in ratios])/4
805
--> 806 wr_unnormalized = compute_ratios(col_widthratios, False)
807 hr_list = compute_ratios(row_heightratios)
808 wr_list = compute_ratios(col_widthratios)
/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/mpl/util.py in compute_ratios(ratios, normalized)
48 unpacked = normalize_ratios(unpacked)
49 sorted_ratios = sorted(unpacked.items())
---> 50 return np.nanmax(np.vstack([v for _, v in sorted_ratios]), axis=0)
/home/bnijholt/anaconda3/lib/python3.5/site-packages/numpy/lib/nanfunctions.py in nanmax(a, axis, out, keepdims)
324 res = np.fmax.reduce(a, axis=axis, out=out, keepdims=keepdims)
325 if np.isnan(res).any():
--> 326 warnings.warn("All-NaN slice encountered", RuntimeWarning)
327 else:
328 # Slow, but safe for subclasses of ndarray
RuntimeWarning: All-NaN slice encountered
```
</issue>
<code>
[start of holoviews/plotting/mpl/util.py]
1 import re
2 import inspect
3
4 import numpy as np
5 from matplotlib import ticker
6
7 from ...core.util import basestring
8
9
10 def wrap_formatter(formatter):
11 """
12 Wraps formatting function or string in
13 appropriate matplotlib formatter type.
14 """
15 if isinstance(formatter, ticker.Formatter):
16 return formatter
17 elif callable(formatter):
18 args = [arg for arg in inspect.getargspec(formatter).args
19 if arg != 'self']
20 wrapped = formatter
21 if len(args) == 1:
22 def wrapped(val, pos=None):
23 return formatter(val)
24 return ticker.FuncFormatter(wrapped)
25 elif isinstance(formatter, basestring):
26 if re.findall(r"\{(\w+)\}", formatter):
27 return ticker.StrMethodFormatter(formatter)
28 else:
29 return ticker.FormatStrFormatter(formatter)
30
31 def unpack_adjoints(ratios):
32 new_ratios = {}
33 offset = 0
34 for k, (num, ratios) in sorted(ratios.items()):
35 unpacked = [[] for _ in range(num)]
36 for r in ratios:
37 nr = len(r)
38 for i in range(num):
39 unpacked[i].append(r[i] if i < nr else np.nan)
40 for i, r in enumerate(unpacked):
41 new_ratios[k+i+offset] = r
42 offset += num-1
43 return new_ratios
44
45 def normalize_ratios(ratios):
46 normalized = {}
47 for i, v in enumerate(zip(*ratios.values())):
48 arr = np.array(v)
49 normalized[i] = arr/float(np.nanmax(arr))
50 return normalized
51
52 def compute_ratios(ratios, normalized=True):
53 unpacked = unpack_adjoints(ratios)
54 if normalized:
55 unpacked = normalize_ratios(unpacked)
56 sorted_ratios = sorted(unpacked.items())
57 return np.nanmax(np.vstack([v for _, v in sorted_ratios]), axis=0)
58
[end of holoviews/plotting/mpl/util.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/holoviews/plotting/mpl/util.py b/holoviews/plotting/mpl/util.py
--- a/holoviews/plotting/mpl/util.py
+++ b/holoviews/plotting/mpl/util.py
@@ -1,5 +1,6 @@
import re
import inspect
+import warnings
import numpy as np
from matplotlib import ticker
@@ -54,4 +55,6 @@
if normalized:
unpacked = normalize_ratios(unpacked)
sorted_ratios = sorted(unpacked.items())
- return np.nanmax(np.vstack([v for _, v in sorted_ratios]), axis=0)
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'All-NaN (slice|axis) encountered')
+ return np.nanmax(np.vstack([v for _, v in sorted_ratios]), axis=0)
| {"golden_diff": "diff --git a/holoviews/plotting/mpl/util.py b/holoviews/plotting/mpl/util.py\n--- a/holoviews/plotting/mpl/util.py\n+++ b/holoviews/plotting/mpl/util.py\n@@ -1,5 +1,6 @@\n import re\n import inspect\n+import warnings\n \n import numpy as np\n from matplotlib import ticker\n@@ -54,4 +55,6 @@\n if normalized:\n unpacked = normalize_ratios(unpacked)\n sorted_ratios = sorted(unpacked.items())\n- return np.nanmax(np.vstack([v for _, v in sorted_ratios]), axis=0)\n+ with warnings.catch_warnings():\n+ warnings.filterwarnings('ignore', r'All-NaN (slice|axis) encountered')\n+ return np.nanmax(np.vstack([v for _, v in sorted_ratios]), axis=0)\n", "issue": "Always get warnings.warn(\"All-NaN slice encountered\", RuntimeWarning) for Overlay\nI always get this error when creating an `hv.Overlay`:\n\n``` python\nx = np.linspace(0, 1)\nhv.Path((x,x)) + hv.Path((x,x))\n```\n\n```\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/numpy/lib/nanfunctions.py:326: RuntimeWarning: All-NaN slice encountered\n warnings.warn(\"All-NaN slice encountered\", RuntimeWarning)\n```\n\nif run with:\n\n```\nimport warnings\nwarnings.simplefilter('error')\n```\n\nit generates this traceback:\n\n```\nRuntimeWarningTraceback (most recent call last)\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/IPython/core/formatters.py in __call__(self, obj)\n 337 pass\n 338 else:\n--> 339 return printer(obj)\n 340 # Finally look for special method names\n 341 method = _safe_get_formatter_method(obj, self.print_method)\n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in pprint_display(obj)\n 214 if not ip.display_formatter.formatters['text/plain'].pprint:\n 215 return None\n--> 216 return display(obj, raw=True)\n 217 \n 218 \n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in display(obj, raw, **kwargs)\n 198 html = element_display(obj)\n 199 elif isinstance(obj, (Layout, NdLayout, AdjointLayout)):\n--> 200 html = layout_display(obj)\n 201 elif isinstance(obj, (HoloMap, DynamicMap)):\n 202 html = map_display(obj)\n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in wrapped(element)\n 101 html = fn(element,\n 102 max_frames=OutputMagic.options['max_frames'],\n--> 103 max_branches = OutputMagic.options['max_branches'])\n 104 \n 105 # Only want to add to the archive for one display hook...\n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in layout_display(layout, max_frames, max_branches)\n 172 return '<tt>'+ sanitize_HTML(layout) + '</tt>'\n 173 \n--> 174 return render(layout)\n 175 \n 176 \n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/ipython/display_hooks.py in render(obj, **kwargs)\n 49 \n 50 backend = Store.current_backend\n---> 51 return Store.renderers[backend].html(obj, **kwargs)\n 52 \n 53 \n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/renderer.py in html(self, obj, fmt, css)\n 247 Renders plot or data structure and wraps the output in HTML.\n 248 \"\"\"\n--> 249 plot, fmt = self._validate(obj, fmt)\n 250 figdata, _ = self(plot, fmt)\n 251 if css is None: css = self.css\n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/renderer.py in _validate(self, obj, fmt)\n 187 if isinstance(obj, tuple(self.widgets.values())):\n 188 return obj, 'html'\n--> 189 plot = self.get_plot(obj)\n 190 \n 191 fig_formats = self.mode_formats['fig'][self.mode]\n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/renderer.py in get_plot(self_or_cls, obj)\n 173 obj = Layout.from_values(obj) if isinstance(obj, AdjointLayout) else obj\n 174 plot_opts = self_or_cls.plot_options(obj, self_or_cls.size)\n--> 175 plot = self_or_cls.plotting_class(obj)(obj, **plot_opts)\n 176 plot.update(0)\n 177 else:\n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/mpl/plot.py in __init__(self, layout, **params)\n 715 def __init__(self, layout, **params):\n 716 super(LayoutPlot, self).__init__(layout=layout, **params)\n--> 717 self.subplots, self.subaxes, self.layout = self._compute_gridspec(layout)\n 718 \n 719 \n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/mpl/plot.py in _compute_gridspec(self, layout)\n 804 for ratios in col[1] for r in ratios])/4\n 805 \n--> 806 wr_unnormalized = compute_ratios(col_widthratios, False)\n 807 hr_list = compute_ratios(row_heightratios)\n 808 wr_list = compute_ratios(col_widthratios)\n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/holoviews/plotting/mpl/util.py in compute_ratios(ratios, normalized)\n 48 unpacked = normalize_ratios(unpacked)\n 49 sorted_ratios = sorted(unpacked.items())\n---> 50 return np.nanmax(np.vstack([v for _, v in sorted_ratios]), axis=0)\n\n/home/bnijholt/anaconda3/lib/python3.5/site-packages/numpy/lib/nanfunctions.py in nanmax(a, axis, out, keepdims)\n 324 res = np.fmax.reduce(a, axis=axis, out=out, keepdims=keepdims)\n 325 if np.isnan(res).any():\n--> 326 warnings.warn(\"All-NaN slice encountered\", RuntimeWarning)\n 327 else:\n 328 # Slow, but safe for subclasses of ndarray\n\nRuntimeWarning: All-NaN slice encountered\n```\n\n", "before_files": [{"content": "import re\nimport inspect\n\nimport numpy as np\nfrom matplotlib import ticker\n\nfrom ...core.util import basestring\n\n\ndef wrap_formatter(formatter):\n \"\"\"\n Wraps formatting function or string in\n appropriate matplotlib formatter type.\n \"\"\"\n if isinstance(formatter, ticker.Formatter):\n return formatter\n elif callable(formatter):\n args = [arg for arg in inspect.getargspec(formatter).args\n if arg != 'self']\n wrapped = formatter\n if len(args) == 1:\n def wrapped(val, pos=None):\n return formatter(val)\n return ticker.FuncFormatter(wrapped)\n elif isinstance(formatter, basestring):\n if re.findall(r\"\\{(\\w+)\\}\", formatter):\n return ticker.StrMethodFormatter(formatter)\n else:\n return ticker.FormatStrFormatter(formatter)\n\ndef unpack_adjoints(ratios):\n new_ratios = {}\n offset = 0\n for k, (num, ratios) in sorted(ratios.items()):\n unpacked = [[] for _ in range(num)]\n for r in ratios:\n nr = len(r)\n for i in range(num):\n unpacked[i].append(r[i] if i < nr else np.nan)\n for i, r in enumerate(unpacked):\n new_ratios[k+i+offset] = r\n offset += num-1\n return new_ratios\n\ndef normalize_ratios(ratios):\n normalized = {}\n for i, v in enumerate(zip(*ratios.values())):\n arr = np.array(v)\n normalized[i] = arr/float(np.nanmax(arr))\n return normalized\n\ndef compute_ratios(ratios, normalized=True):\n unpacked = unpack_adjoints(ratios)\n if normalized:\n unpacked = normalize_ratios(unpacked)\n sorted_ratios = sorted(unpacked.items())\n return np.nanmax(np.vstack([v for _, v in sorted_ratios]), axis=0)\n", "path": "holoviews/plotting/mpl/util.py"}]} | 2,556 | 197 |
gh_patches_debug_21443 | rasdani/github-patches | git_diff | avocado-framework__avocado-5762 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Improve Avocado jobs command
**Describe the bug**
A clear and concise description of what the bug is.
1. `avocado jobs list` returns unsorted list of job results. One improvement is to sort the list by `mtime`. This help find the recent jobs.
2. `avocado jobs show` shows weird end time because we don't have the `end` attribute in the `results.json` any more.
I have a fix on the first issue, but looking for inputs for the second one. One feasible fix is to replace the end time with the start time.
**Steps to reproduce**
Steps to reproduce the behavior with a reproducible whenever possible.
```
# run a few tests
avocado run /bin/true
avocado run /bin/true
avocado run /bin/true
# 1. list
avocado jobs list
# 2. show
avocado jobs show
```
**Expected behavior**
1.
```
b36e709ed01af2567e4df81842b678fb9dca51eb 2023-09-08 03:04:52.701753 1 (1/0/0/0)
11861a8dd228a86e7c0f2b3583fe6ca7011af99c 2023-09-08 03:00:01.887965 1 (1/0/0/0)
6a29261e8d23e963270c42c9147781fed3ca6118 2023-09-08 02:55:27.117886 1 (1/0/0/0)
```
2.
```
JOB ID : latest
Test ID End Time Run Time Status
1-/bin/true <correct time> 0.012107 PASS
```
**Current behavior**
1.
```
b36e709ed01af2567e4df81842b678fb9dca51eb 2023-09-08 03:04:52.701753 1 (1/0/0/0)
6a29261e8d23e963270c42c9147781fed3ca6118 2023-09-08 02:55:27.117886 1 (1/0/0/0)
11861a8dd228a86e7c0f2b3583fe6ca7011af99c 2023-09-08 03:00:01.887965 1 (1/0/0/0)
```
2.
```
JOB ID : latest
Test ID End Time Run Time Status
1-/bin/true 1970/01/01 01:51:03 01:51:03 0.012107 PASS
```
**System information (please complete the following information):**
- OS: `Ubuntu 18.04.6 LTS`
- Avocado version: `Avocado 102.0`
- Avocado installation method: reproduce on github & pip installation
</issue>
<code>
[start of avocado/plugins/jobs.py]
1 # This program is free software; you can redistribute it and/or modify
2 # it under the terms of the GNU General Public License as published by
3 # the Free Software Foundation; either version 2 of the License, or
4 # (at your option) any later version.
5 #
6 # This program is distributed in the hope that it will be useful,
7 # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
9 #
10 # See LICENSE for more details.
11 #
12 # Copyright: Red Hat Inc. 2020
13 # Authors: Beraldo Leal <[email protected]>
14
15 """
16 Jobs subcommand
17 """
18 import json
19 import os
20 from datetime import datetime
21 from glob import glob
22
23 from avocado.core import exit_codes, output
24 from avocado.core.data_dir import get_job_results_dir, get_logs_dir
25 from avocado.core.output import LOG_UI
26 from avocado.core.plugin_interfaces import CLICmd
27 from avocado.core.settings import settings
28 from avocado.utils import astring
29
30
31 class Jobs(CLICmd):
32 """
33 Implements the avocado 'jobs' subcommand
34 """
35
36 name = "jobs"
37 description = "Manage Avocado jobs"
38
39 @staticmethod
40 def _get_data_from_file(filename):
41 if not filename or not os.path.isfile(filename):
42 raise FileNotFoundError(f"File not found {filename}")
43
44 with open(filename, "r", encoding="utf-8") as fp:
45 return json.load(fp)
46
47 @staticmethod
48 def _print_job_details(details):
49 for key, value in details.items():
50 LOG_UI.info("%-12s: %s", key, value)
51
52 @staticmethod
53 def _print_job_tests(tests):
54 test_matrix = []
55 date_fmt = "%Y/%m/%d %H:%M:%S"
56 for test in tests:
57 status = test.get("status")
58 decorator = output.TEST_STATUS_DECORATOR_MAPPING.get(status)
59 end = datetime.fromtimestamp(test.get("end"))
60 test_matrix.append(
61 (
62 test.get("id"),
63 end.strftime(date_fmt),
64 f"{float(test.get('time')):5f}",
65 decorator(status, ""),
66 )
67 )
68 header = (
69 output.TERM_SUPPORT.header_str("Test ID"),
70 output.TERM_SUPPORT.header_str("End Time"),
71 output.TERM_SUPPORT.header_str("Run Time"),
72 output.TERM_SUPPORT.header_str("Status"),
73 )
74 for line in astring.iter_tabular_output(test_matrix, header=header, strip=True):
75 LOG_UI.debug(line)
76
77 def configure(self, parser):
78 """
79 Add the subparser for the assets action.
80
81 :param parser: The Avocado command line application parser
82 :type parser: :class:`avocado.core.parser.ArgumentParser`
83 """
84 parser = super().configure(parser)
85
86 subcommands = parser.add_subparsers(
87 dest="jobs_subcommand", metavar="sub-command"
88 )
89 subcommands.required = True
90
91 help_msg = "List all known jobs by Avocado"
92 subcommands.add_parser("list", help=help_msg)
93
94 help_msg = (
95 "Show details about a specific job. When passing a Job "
96 'ID, you can use any Job Reference (job_id, "latest", '
97 "or job results path)."
98 )
99 show_parser = subcommands.add_parser("show", help=help_msg)
100 settings.register_option(
101 section="jobs.show",
102 key="job_id",
103 help_msg="JOB id",
104 metavar="JOBID",
105 default="latest",
106 nargs="?",
107 positional_arg=True,
108 parser=show_parser,
109 )
110
111 @staticmethod
112 def handle_list_command(jobs_results):
113 """Called when 'avocado jobs list' command is executed."""
114
115 for filename in jobs_results.values():
116 with open(filename, "r", encoding="utf-8") as fp:
117 job = json.load(fp)
118 LOG_UI.info(
119 "%-40s %-26s %3s (%s/%s/%s/%s)",
120 job["job_id"],
121 job["start"],
122 job["total"],
123 job["pass"],
124 job["skip"],
125 job["errors"],
126 job["failures"],
127 )
128
129 return exit_codes.AVOCADO_ALL_OK
130
131 def handle_show_command(self, config):
132 """Called when 'avocado jobs show' command is executed."""
133
134 job_id = config.get("jobs.show.job_id")
135 results_dir = get_job_results_dir(job_id)
136 if results_dir is None:
137 LOG_UI.error("Error: Job %s not found", job_id)
138 return exit_codes.AVOCADO_GENERIC_CRASH
139
140 results_file = os.path.join(results_dir, "results.json")
141 try:
142 results_data = self._get_data_from_file(results_file)
143 except FileNotFoundError as ex:
144 # Results data are important and should exit if not found
145 LOG_UI.error(ex)
146 return exit_codes.AVOCADO_GENERIC_CRASH
147
148 data = {
149 "JOB ID": job_id,
150 "JOB LOG": results_data.get("debuglog"),
151 }
152
153 # We could improve this soon with more data and colors
154 self._print_job_details(data)
155 LOG_UI.info("")
156 self._print_job_tests(results_data.get("tests"))
157 results = (
158 "PASS %d | ERROR %d | FAIL %d | SKIP %d |"
159 "WARN %d | INTERRUPT %s | CANCEL %s"
160 )
161 results %= (
162 results_data.get("pass", 0),
163 results_data.get("error", 0),
164 results_data.get("failures", 0),
165 results_data.get("skip", 0),
166 results_data.get("warn", 0),
167 results_data.get("interrupt", 0),
168 results_data.get("cancel", 0),
169 )
170 self._print_job_details({"RESULTS": results})
171 return exit_codes.AVOCADO_ALL_OK
172
173 def run(self, config):
174 results = {}
175
176 jobs_dir = get_logs_dir()
177 for result in glob(os.path.join(jobs_dir, "*/results.json")):
178 with open(result, "r", encoding="utf-8") as fp:
179 job = json.load(fp)
180 results[job["job_id"]] = result
181
182 subcommand = config.get("jobs_subcommand")
183 if subcommand == "list":
184 return self.handle_list_command(results)
185 elif subcommand == "show":
186 return self.handle_show_command(config)
187 return exit_codes.AVOCADO_ALL_OK
188
[end of avocado/plugins/jobs.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/avocado/plugins/jobs.py b/avocado/plugins/jobs.py
--- a/avocado/plugins/jobs.py
+++ b/avocado/plugins/jobs.py
@@ -56,7 +56,8 @@
for test in tests:
status = test.get("status")
decorator = output.TEST_STATUS_DECORATOR_MAPPING.get(status)
- end = datetime.fromtimestamp(test.get("end"))
+ # Retrieve "end" for backward compatibility
+ end = datetime.fromtimestamp(test.get("actual_end", test.get("end")))
test_matrix.append(
(
test.get("id"),
@@ -174,7 +175,11 @@
results = {}
jobs_dir = get_logs_dir()
- for result in glob(os.path.join(jobs_dir, "*/results.json")):
+ for result in sorted(
+ glob(os.path.join(jobs_dir, "*/results.json")),
+ key=os.path.getmtime,
+ reverse=True,
+ ):
with open(result, "r", encoding="utf-8") as fp:
job = json.load(fp)
results[job["job_id"]] = result
| {"golden_diff": "diff --git a/avocado/plugins/jobs.py b/avocado/plugins/jobs.py\n--- a/avocado/plugins/jobs.py\n+++ b/avocado/plugins/jobs.py\n@@ -56,7 +56,8 @@\n for test in tests:\n status = test.get(\"status\")\n decorator = output.TEST_STATUS_DECORATOR_MAPPING.get(status)\n- end = datetime.fromtimestamp(test.get(\"end\"))\n+ # Retrieve \"end\" for backward compatibility\n+ end = datetime.fromtimestamp(test.get(\"actual_end\", test.get(\"end\")))\n test_matrix.append(\n (\n test.get(\"id\"),\n@@ -174,7 +175,11 @@\n results = {}\n \n jobs_dir = get_logs_dir()\n- for result in glob(os.path.join(jobs_dir, \"*/results.json\")):\n+ for result in sorted(\n+ glob(os.path.join(jobs_dir, \"*/results.json\")),\n+ key=os.path.getmtime,\n+ reverse=True,\n+ ):\n with open(result, \"r\", encoding=\"utf-8\") as fp:\n job = json.load(fp)\n results[job[\"job_id\"]] = result\n", "issue": "Improve Avocado jobs command\n**Describe the bug**\r\nA clear and concise description of what the bug is.\r\n\r\n1. `avocado jobs list` returns unsorted list of job results. One improvement is to sort the list by `mtime`. This help find the recent jobs. \r\n2. `avocado jobs show` shows weird end time because we don't have the `end` attribute in the `results.json` any more. \r\n\r\nI have a fix on the first issue, but looking for inputs for the second one. One feasible fix is to replace the end time with the start time. \r\n\r\n**Steps to reproduce**\r\nSteps to reproduce the behavior with a reproducible whenever possible.\r\n\r\n```\r\n# run a few tests\r\navocado run /bin/true\r\navocado run /bin/true\r\navocado run /bin/true\r\n# 1. list\r\navocado jobs list\r\n# 2. show\r\navocado jobs show\r\n```\r\n\r\n**Expected behavior**\r\n\r\n1. \r\n```\r\nb36e709ed01af2567e4df81842b678fb9dca51eb 2023-09-08 03:04:52.701753 1 (1/0/0/0)\r\n11861a8dd228a86e7c0f2b3583fe6ca7011af99c 2023-09-08 03:00:01.887965 1 (1/0/0/0)\r\n6a29261e8d23e963270c42c9147781fed3ca6118 2023-09-08 02:55:27.117886 1 (1/0/0/0)\r\n```\r\n\r\n2. \r\n\r\n```\r\nJOB ID : latest\r\n\r\nTest ID End Time Run Time Status\r\n1-/bin/true <correct time> 0.012107 PASS\r\n```\r\n\r\n**Current behavior**\r\n\r\n1. \r\n```\r\nb36e709ed01af2567e4df81842b678fb9dca51eb 2023-09-08 03:04:52.701753 1 (1/0/0/0)\r\n6a29261e8d23e963270c42c9147781fed3ca6118 2023-09-08 02:55:27.117886 1 (1/0/0/0)\r\n11861a8dd228a86e7c0f2b3583fe6ca7011af99c 2023-09-08 03:00:01.887965 1 (1/0/0/0)\r\n```\r\n\r\n2.\r\n```\r\nJOB ID : latest\r\n\r\nTest ID End Time Run Time Status\r\n1-/bin/true 1970/01/01 01:51:03 01:51:03 0.012107 PASS\r\n```\r\n\r\n**System information (please complete the following information):**\r\n - OS: `Ubuntu 18.04.6 LTS`\r\n - Avocado version: `Avocado 102.0`\r\n - Avocado installation method: reproduce on github & pip installation\r\n\r\n\n", "before_files": [{"content": "# This program is free software; you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation; either version 2 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n#\n# See LICENSE for more details.\n#\n# Copyright: Red Hat Inc. 2020\n# Authors: Beraldo Leal <[email protected]>\n\n\"\"\"\nJobs subcommand\n\"\"\"\nimport json\nimport os\nfrom datetime import datetime\nfrom glob import glob\n\nfrom avocado.core import exit_codes, output\nfrom avocado.core.data_dir import get_job_results_dir, get_logs_dir\nfrom avocado.core.output import LOG_UI\nfrom avocado.core.plugin_interfaces import CLICmd\nfrom avocado.core.settings import settings\nfrom avocado.utils import astring\n\n\nclass Jobs(CLICmd):\n \"\"\"\n Implements the avocado 'jobs' subcommand\n \"\"\"\n\n name = \"jobs\"\n description = \"Manage Avocado jobs\"\n\n @staticmethod\n def _get_data_from_file(filename):\n if not filename or not os.path.isfile(filename):\n raise FileNotFoundError(f\"File not found {filename}\")\n\n with open(filename, \"r\", encoding=\"utf-8\") as fp:\n return json.load(fp)\n\n @staticmethod\n def _print_job_details(details):\n for key, value in details.items():\n LOG_UI.info(\"%-12s: %s\", key, value)\n\n @staticmethod\n def _print_job_tests(tests):\n test_matrix = []\n date_fmt = \"%Y/%m/%d %H:%M:%S\"\n for test in tests:\n status = test.get(\"status\")\n decorator = output.TEST_STATUS_DECORATOR_MAPPING.get(status)\n end = datetime.fromtimestamp(test.get(\"end\"))\n test_matrix.append(\n (\n test.get(\"id\"),\n end.strftime(date_fmt),\n f\"{float(test.get('time')):5f}\",\n decorator(status, \"\"),\n )\n )\n header = (\n output.TERM_SUPPORT.header_str(\"Test ID\"),\n output.TERM_SUPPORT.header_str(\"End Time\"),\n output.TERM_SUPPORT.header_str(\"Run Time\"),\n output.TERM_SUPPORT.header_str(\"Status\"),\n )\n for line in astring.iter_tabular_output(test_matrix, header=header, strip=True):\n LOG_UI.debug(line)\n\n def configure(self, parser):\n \"\"\"\n Add the subparser for the assets action.\n\n :param parser: The Avocado command line application parser\n :type parser: :class:`avocado.core.parser.ArgumentParser`\n \"\"\"\n parser = super().configure(parser)\n\n subcommands = parser.add_subparsers(\n dest=\"jobs_subcommand\", metavar=\"sub-command\"\n )\n subcommands.required = True\n\n help_msg = \"List all known jobs by Avocado\"\n subcommands.add_parser(\"list\", help=help_msg)\n\n help_msg = (\n \"Show details about a specific job. When passing a Job \"\n 'ID, you can use any Job Reference (job_id, \"latest\", '\n \"or job results path).\"\n )\n show_parser = subcommands.add_parser(\"show\", help=help_msg)\n settings.register_option(\n section=\"jobs.show\",\n key=\"job_id\",\n help_msg=\"JOB id\",\n metavar=\"JOBID\",\n default=\"latest\",\n nargs=\"?\",\n positional_arg=True,\n parser=show_parser,\n )\n\n @staticmethod\n def handle_list_command(jobs_results):\n \"\"\"Called when 'avocado jobs list' command is executed.\"\"\"\n\n for filename in jobs_results.values():\n with open(filename, \"r\", encoding=\"utf-8\") as fp:\n job = json.load(fp)\n LOG_UI.info(\n \"%-40s %-26s %3s (%s/%s/%s/%s)\",\n job[\"job_id\"],\n job[\"start\"],\n job[\"total\"],\n job[\"pass\"],\n job[\"skip\"],\n job[\"errors\"],\n job[\"failures\"],\n )\n\n return exit_codes.AVOCADO_ALL_OK\n\n def handle_show_command(self, config):\n \"\"\"Called when 'avocado jobs show' command is executed.\"\"\"\n\n job_id = config.get(\"jobs.show.job_id\")\n results_dir = get_job_results_dir(job_id)\n if results_dir is None:\n LOG_UI.error(\"Error: Job %s not found\", job_id)\n return exit_codes.AVOCADO_GENERIC_CRASH\n\n results_file = os.path.join(results_dir, \"results.json\")\n try:\n results_data = self._get_data_from_file(results_file)\n except FileNotFoundError as ex:\n # Results data are important and should exit if not found\n LOG_UI.error(ex)\n return exit_codes.AVOCADO_GENERIC_CRASH\n\n data = {\n \"JOB ID\": job_id,\n \"JOB LOG\": results_data.get(\"debuglog\"),\n }\n\n # We could improve this soon with more data and colors\n self._print_job_details(data)\n LOG_UI.info(\"\")\n self._print_job_tests(results_data.get(\"tests\"))\n results = (\n \"PASS %d | ERROR %d | FAIL %d | SKIP %d |\"\n \"WARN %d | INTERRUPT %s | CANCEL %s\"\n )\n results %= (\n results_data.get(\"pass\", 0),\n results_data.get(\"error\", 0),\n results_data.get(\"failures\", 0),\n results_data.get(\"skip\", 0),\n results_data.get(\"warn\", 0),\n results_data.get(\"interrupt\", 0),\n results_data.get(\"cancel\", 0),\n )\n self._print_job_details({\"RESULTS\": results})\n return exit_codes.AVOCADO_ALL_OK\n\n def run(self, config):\n results = {}\n\n jobs_dir = get_logs_dir()\n for result in glob(os.path.join(jobs_dir, \"*/results.json\")):\n with open(result, \"r\", encoding=\"utf-8\") as fp:\n job = json.load(fp)\n results[job[\"job_id\"]] = result\n\n subcommand = config.get(\"jobs_subcommand\")\n if subcommand == \"list\":\n return self.handle_list_command(results)\n elif subcommand == \"show\":\n return self.handle_show_command(config)\n return exit_codes.AVOCADO_ALL_OK\n", "path": "avocado/plugins/jobs.py"}]} | 3,220 | 253 |
gh_patches_debug_10319 | rasdani/github-patches | git_diff | voxel51__fiftyone-102 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Need custom FloatField that supports more numeric types
In this code (taken from https://github.com/voxel51/fiftyone/blob/develop/examples/model_inference/README.md), I had to add `float(confidence)` otherwise I got an error about `confidence`, which was a numpy float32 or something similar, not being a supported value for a `mongoengine.fields.FloatField`.
```py
for imgs, sample_ids in data_loader:
predictions, confidences = predict(model, imgs)
# Add predictions to your FiftyOne dataset
for sample_id, prediction, confidence in zip(
sample_ids, predictions, confidences
):
sample = dataset[sample_id]
sample[model_name] = fo.Classification(label=labels_map[prediction])
sample["confidence"] = float(confidence) # float() is required here, but shouldn't need to be...
sample.save()
```
Kind of hard to believe that MongoEngine doesn't handle casting a `np.float32` into a float, but, alas, it seems like our wrapper around `mongoengine.fields.FloatField` will need to override the `validate()` function below to cast non-int types with `float()` as well...
https://github.com/MongoEngine/mongoengine/blob/4275c2d7b791f5910308a4815a1ba39324dee373/mongoengine/fields.py#L377-L411
</issue>
<code>
[start of fiftyone/core/fields.py]
1 """
2 Fields of dataset sample schemas.
3
4 | Copyright 2017-2020, Voxel51, Inc.
5 | `voxel51.com <https://voxel51.com/>`_
6 |
7 """
8 # pragma pylint: disable=redefined-builtin
9 # pragma pylint: disable=unused-wildcard-import
10 # pragma pylint: disable=wildcard-import
11 from __future__ import absolute_import
12 from __future__ import division
13 from __future__ import print_function
14 from __future__ import unicode_literals
15 from builtins import *
16
17 # pragma pylint: enable=redefined-builtin
18 # pragma pylint: enable=unused-wildcard-import
19 # pragma pylint: enable=wildcard-import
20
21 import mongoengine.fields
22
23
24 class Field(mongoengine.fields.BaseField):
25 pass
26
27
28 class BooleanField(mongoengine.BooleanField, Field):
29 pass
30
31
32 class IntField(mongoengine.IntField, Field):
33 pass
34
35
36 class FloatField(mongoengine.FloatField, Field):
37 pass
38
39
40 class StringField(mongoengine.StringField, Field):
41 pass
42
43
44 class ListField(mongoengine.ListField, Field):
45 pass
46
47
48 class DictField(mongoengine.DictField, Field):
49 pass
50
51
52 class EmbeddedDocumentField(mongoengine.EmbeddedDocumentField, Field):
53 pass
54
[end of fiftyone/core/fields.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/fiftyone/core/fields.py b/fiftyone/core/fields.py
--- a/fiftyone/core/fields.py
+++ b/fiftyone/core/fields.py
@@ -34,7 +34,19 @@
class FloatField(mongoengine.FloatField, Field):
- pass
+ def validate(self, value):
+ try:
+ value = float(value)
+ except OverflowError:
+ self.error("The value is too large to be converted to float")
+ except (TypeError, ValueError):
+ self.error("%s could not be converted to float" % value)
+
+ if self.min_value is not None and value < self.min_value:
+ self.error("Float value is too small")
+
+ if self.max_value is not None and value > self.max_value:
+ self.error("Float value is too large")
class StringField(mongoengine.StringField, Field):
| {"golden_diff": "diff --git a/fiftyone/core/fields.py b/fiftyone/core/fields.py\n--- a/fiftyone/core/fields.py\n+++ b/fiftyone/core/fields.py\n@@ -34,7 +34,19 @@\n \n \n class FloatField(mongoengine.FloatField, Field):\n- pass\n+ def validate(self, value):\n+ try:\n+ value = float(value)\n+ except OverflowError:\n+ self.error(\"The value is too large to be converted to float\")\n+ except (TypeError, ValueError):\n+ self.error(\"%s could not be converted to float\" % value)\n+\n+ if self.min_value is not None and value < self.min_value:\n+ self.error(\"Float value is too small\")\n+\n+ if self.max_value is not None and value > self.max_value:\n+ self.error(\"Float value is too large\")\n \n \n class StringField(mongoengine.StringField, Field):\n", "issue": "Need custom FloatField that supports more numeric types\nIn this code (taken from https://github.com/voxel51/fiftyone/blob/develop/examples/model_inference/README.md), I had to add `float(confidence)` otherwise I got an error about `confidence`, which was a numpy float32 or something similar, not being a supported value for a `mongoengine.fields.FloatField`. \r\n\r\n```py\r\nfor imgs, sample_ids in data_loader:\r\n predictions, confidences = predict(model, imgs)\r\n\r\n # Add predictions to your FiftyOne dataset\r\n for sample_id, prediction, confidence in zip(\r\n sample_ids, predictions, confidences\r\n ):\r\n sample = dataset[sample_id]\r\n sample[model_name] = fo.Classification(label=labels_map[prediction])\r\n sample[\"confidence\"] = float(confidence) # float() is required here, but shouldn't need to be...\r\n sample.save()\r\n```\r\n\r\nKind of hard to believe that MongoEngine doesn't handle casting a `np.float32` into a float, but, alas, it seems like our wrapper around `mongoengine.fields.FloatField` will need to override the `validate()` function below to cast non-int types with `float()` as well...\r\n\r\nhttps://github.com/MongoEngine/mongoengine/blob/4275c2d7b791f5910308a4815a1ba39324dee373/mongoengine/fields.py#L377-L411\r\n\n", "before_files": [{"content": "\"\"\"\nFields of dataset sample schemas.\n\n| Copyright 2017-2020, Voxel51, Inc.\n| `voxel51.com <https://voxel51.com/>`_\n|\n\"\"\"\n# pragma pylint: disable=redefined-builtin\n# pragma pylint: disable=unused-wildcard-import\n# pragma pylint: disable=wildcard-import\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\nfrom builtins import *\n\n# pragma pylint: enable=redefined-builtin\n# pragma pylint: enable=unused-wildcard-import\n# pragma pylint: enable=wildcard-import\n\nimport mongoengine.fields\n\n\nclass Field(mongoengine.fields.BaseField):\n pass\n\n\nclass BooleanField(mongoengine.BooleanField, Field):\n pass\n\n\nclass IntField(mongoengine.IntField, Field):\n pass\n\n\nclass FloatField(mongoengine.FloatField, Field):\n pass\n\n\nclass StringField(mongoengine.StringField, Field):\n pass\n\n\nclass ListField(mongoengine.ListField, Field):\n pass\n\n\nclass DictField(mongoengine.DictField, Field):\n pass\n\n\nclass EmbeddedDocumentField(mongoengine.EmbeddedDocumentField, Field):\n pass\n", "path": "fiftyone/core/fields.py"}]} | 1,239 | 200 |
gh_patches_debug_12098 | rasdani/github-patches | git_diff | ansible__awx-13080 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[api] Confusing error when trying to sync project set to scm_type Manual
### Please confirm the following
- [X] I agree to follow this project's [code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).
- [X] I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.
- [X] I understand that AWX is open source software provided for free and that I might not receive a timely response.
### Bug Summary
Creating a project and accidentally setting it to be `scm_type: manual` (the default in the awx collection), and then trying to do a project update on it, leads to a very confusing error:
```
fatal: [localhost]: FAILED! => {"changed": false, "msg": "Cannot make a request with the POST method to this endpoint /api/v2/projects/8/update/"}
```
Although it is "technically" correct, it would be nice if we could show something nicer in this scenario (e.g. "Unable to trigger a project update because the project scm type (Manual) does not support it.")
### AWX version
devel
### Select the relevant components
- [ ] UI
- [X] API
- [ ] Docs
- [ ] Collection
- [ ] CLI
- [ ] Other
### Installation method
docker development environment
### Modifications
no
### Ansible version
_No response_
### Operating system
_No response_
### Web browser
_No response_
### Steps to reproduce
- Create a project, set its type to Manual
- Use the API (or `awx.awx.project_update`) to try to update it
### Expected results
An error explaining the problem
### Actual results
An error telling me that I can't POST to an endpoint that I expect to be able to POST to.
### Additional information
_No response_
</issue>
<code>
[start of awx_collection/plugins/modules/project_update.py]
1 #!/usr/bin/python
2 # coding: utf-8 -*-
3
4 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
5
6 from __future__ import absolute_import, division, print_function
7
8 __metaclass__ = type
9
10 ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'}
11
12 DOCUMENTATION = '''
13 ---
14 module: project_update
15 author: "Sean Sullivan (@sean-m-sullivan)"
16 short_description: Update a Project in Automation Platform Controller
17 description:
18 - Update a Automation Platform Controller Project. See
19 U(https://www.ansible.com/tower) for an overview.
20 options:
21 name:
22 description:
23 - The name or id of the project to update.
24 required: True
25 type: str
26 aliases:
27 - project
28 organization:
29 description:
30 - Organization the project exists in.
31 - Used to help lookup the object, cannot be modified using this module.
32 - If not provided, will lookup by name only, which does not work with duplicates.
33 type: str
34 wait:
35 description:
36 - Wait for the project to update.
37 - If scm revision has not changed module will return not changed.
38 default: True
39 type: bool
40 interval:
41 description:
42 - The interval to request an update from the controller.
43 required: False
44 default: 2
45 type: float
46 timeout:
47 description:
48 - If waiting for the project to update this will abort after this
49 amount of seconds
50 type: int
51 extends_documentation_fragment: awx.awx.auth
52 '''
53
54 RETURN = '''
55 id:
56 description: project id of the updated project
57 returned: success
58 type: int
59 sample: 86
60 status:
61 description: status of the updated project
62 returned: success
63 type: str
64 sample: pending
65 '''
66
67
68 EXAMPLES = '''
69 - name: Launch a project with a timeout of 10 seconds
70 project_update:
71 project: "Networking Project"
72 timeout: 10
73
74 - name: Launch a Project with extra_vars without waiting
75 project_update:
76 project: "Networking Project"
77 wait: False
78 '''
79
80 from ..module_utils.controller_api import ControllerAPIModule
81
82
83 def main():
84 # Any additional arguments that are not fields of the item can be added here
85 argument_spec = dict(
86 name=dict(required=True, aliases=['project']),
87 organization=dict(),
88 wait=dict(default=True, type='bool'),
89 interval=dict(default=2.0, type='float'),
90 timeout=dict(default=None, type='int'),
91 )
92
93 # Create a module for ourselves
94 module = ControllerAPIModule(argument_spec=argument_spec)
95
96 # Extract our parameters
97 name = module.params.get('name')
98 organization = module.params.get('organization')
99 wait = module.params.get('wait')
100 interval = module.params.get('interval')
101 timeout = module.params.get('timeout')
102
103 # Attempt to look up project based on the provided name or id
104 lookup_data = {}
105 if organization:
106 lookup_data['organization'] = module.resolve_name_to_id('organizations', organization)
107 project = module.get_one('projects', name_or_id=name, data=lookup_data)
108 if project is None:
109 module.fail_json(msg="Unable to find project")
110
111 if wait:
112 scm_revision_original = project['scm_revision']
113
114 # Update the project
115 result = module.post_endpoint(project['related']['update'])
116
117 if result['status_code'] != 202:
118 module.fail_json(msg="Failed to update project, see response for details", response=result)
119
120 module.json_output['changed'] = True
121 module.json_output['id'] = result['json']['id']
122 module.json_output['status'] = result['json']['status']
123
124 if not wait:
125 module.exit_json(**module.json_output)
126
127 # Invoke wait function
128 result = module.wait_on_url(
129 url=result['json']['url'], object_name=module.get_item_name(project), object_type='Project Update', timeout=timeout, interval=interval
130 )
131 scm_revision_new = result['json']['scm_revision']
132 if scm_revision_new == scm_revision_original:
133 module.json_output['changed'] = False
134
135 module.exit_json(**module.json_output)
136
137
138 if __name__ == '__main__':
139 main()
140
[end of awx_collection/plugins/modules/project_update.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/awx_collection/plugins/modules/project_update.py b/awx_collection/plugins/modules/project_update.py
--- a/awx_collection/plugins/modules/project_update.py
+++ b/awx_collection/plugins/modules/project_update.py
@@ -114,7 +114,12 @@
# Update the project
result = module.post_endpoint(project['related']['update'])
- if result['status_code'] != 202:
+ if result['status_code'] == 405:
+ module.fail_json(
+ msg="Unable to trigger a project update because the project scm_type ({0}) does not support it.".format(project['scm_type']),
+ response=result
+ )
+ elif result['status_code'] != 202:
module.fail_json(msg="Failed to update project, see response for details", response=result)
module.json_output['changed'] = True
| {"golden_diff": "diff --git a/awx_collection/plugins/modules/project_update.py b/awx_collection/plugins/modules/project_update.py\n--- a/awx_collection/plugins/modules/project_update.py\n+++ b/awx_collection/plugins/modules/project_update.py\n@@ -114,7 +114,12 @@\n # Update the project\n result = module.post_endpoint(project['related']['update'])\n \n- if result['status_code'] != 202:\n+ if result['status_code'] == 405:\n+ module.fail_json(\n+ msg=\"Unable to trigger a project update because the project scm_type ({0}) does not support it.\".format(project['scm_type']),\n+ response=result\n+ )\n+ elif result['status_code'] != 202:\n module.fail_json(msg=\"Failed to update project, see response for details\", response=result)\n \n module.json_output['changed'] = True\n", "issue": "[api] Confusing error when trying to sync project set to scm_type Manual\n### Please confirm the following\n\n- [X] I agree to follow this project's [code of conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html).\n- [X] I have checked the [current issues](https://github.com/ansible/awx/issues) for duplicates.\n- [X] I understand that AWX is open source software provided for free and that I might not receive a timely response.\n\n### Bug Summary\n\nCreating a project and accidentally setting it to be `scm_type: manual` (the default in the awx collection), and then trying to do a project update on it, leads to a very confusing error:\r\n\r\n```\r\nfatal: [localhost]: FAILED! => {\"changed\": false, \"msg\": \"Cannot make a request with the POST method to this endpoint /api/v2/projects/8/update/\"}\r\n```\r\n\r\nAlthough it is \"technically\" correct, it would be nice if we could show something nicer in this scenario (e.g. \"Unable to trigger a project update because the project scm type (Manual) does not support it.\")\n\n### AWX version\n\ndevel\n\n### Select the relevant components\n\n- [ ] UI\n- [X] API\n- [ ] Docs\n- [ ] Collection\n- [ ] CLI\n- [ ] Other\n\n### Installation method\n\ndocker development environment\n\n### Modifications\n\nno\n\n### Ansible version\n\n_No response_\n\n### Operating system\n\n_No response_\n\n### Web browser\n\n_No response_\n\n### Steps to reproduce\n\n- Create a project, set its type to Manual\r\n- Use the API (or `awx.awx.project_update`) to try to update it\n\n### Expected results\n\nAn error explaining the problem\n\n### Actual results\n\nAn error telling me that I can't POST to an endpoint that I expect to be able to POST to.\n\n### Additional information\n\n_No response_\n", "before_files": [{"content": "#!/usr/bin/python\n# coding: utf-8 -*-\n\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\nfrom __future__ import absolute_import, division, print_function\n\n__metaclass__ = type\n\nANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'}\n\nDOCUMENTATION = '''\n---\nmodule: project_update\nauthor: \"Sean Sullivan (@sean-m-sullivan)\"\nshort_description: Update a Project in Automation Platform Controller\ndescription:\n - Update a Automation Platform Controller Project. See\n U(https://www.ansible.com/tower) for an overview.\noptions:\n name:\n description:\n - The name or id of the project to update.\n required: True\n type: str\n aliases:\n - project\n organization:\n description:\n - Organization the project exists in.\n - Used to help lookup the object, cannot be modified using this module.\n - If not provided, will lookup by name only, which does not work with duplicates.\n type: str\n wait:\n description:\n - Wait for the project to update.\n - If scm revision has not changed module will return not changed.\n default: True\n type: bool\n interval:\n description:\n - The interval to request an update from the controller.\n required: False\n default: 2\n type: float\n timeout:\n description:\n - If waiting for the project to update this will abort after this\n amount of seconds\n type: int\nextends_documentation_fragment: awx.awx.auth\n'''\n\nRETURN = '''\nid:\n description: project id of the updated project\n returned: success\n type: int\n sample: 86\nstatus:\n description: status of the updated project\n returned: success\n type: str\n sample: pending\n'''\n\n\nEXAMPLES = '''\n- name: Launch a project with a timeout of 10 seconds\n project_update:\n project: \"Networking Project\"\n timeout: 10\n\n- name: Launch a Project with extra_vars without waiting\n project_update:\n project: \"Networking Project\"\n wait: False\n'''\n\nfrom ..module_utils.controller_api import ControllerAPIModule\n\n\ndef main():\n # Any additional arguments that are not fields of the item can be added here\n argument_spec = dict(\n name=dict(required=True, aliases=['project']),\n organization=dict(),\n wait=dict(default=True, type='bool'),\n interval=dict(default=2.0, type='float'),\n timeout=dict(default=None, type='int'),\n )\n\n # Create a module for ourselves\n module = ControllerAPIModule(argument_spec=argument_spec)\n\n # Extract our parameters\n name = module.params.get('name')\n organization = module.params.get('organization')\n wait = module.params.get('wait')\n interval = module.params.get('interval')\n timeout = module.params.get('timeout')\n\n # Attempt to look up project based on the provided name or id\n lookup_data = {}\n if organization:\n lookup_data['organization'] = module.resolve_name_to_id('organizations', organization)\n project = module.get_one('projects', name_or_id=name, data=lookup_data)\n if project is None:\n module.fail_json(msg=\"Unable to find project\")\n\n if wait:\n scm_revision_original = project['scm_revision']\n\n # Update the project\n result = module.post_endpoint(project['related']['update'])\n\n if result['status_code'] != 202:\n module.fail_json(msg=\"Failed to update project, see response for details\", response=result)\n\n module.json_output['changed'] = True\n module.json_output['id'] = result['json']['id']\n module.json_output['status'] = result['json']['status']\n\n if not wait:\n module.exit_json(**module.json_output)\n\n # Invoke wait function\n result = module.wait_on_url(\n url=result['json']['url'], object_name=module.get_item_name(project), object_type='Project Update', timeout=timeout, interval=interval\n )\n scm_revision_new = result['json']['scm_revision']\n if scm_revision_new == scm_revision_original:\n module.json_output['changed'] = False\n\n module.exit_json(**module.json_output)\n\n\nif __name__ == '__main__':\n main()\n", "path": "awx_collection/plugins/modules/project_update.py"}]} | 2,212 | 198 |
gh_patches_debug_23192 | rasdani/github-patches | git_diff | readthedocs__readthedocs.org-5344 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Be more defensive in how we insert our mkdocs media
This logic can be executed multiple times on [some builds](https://readthedocs.org/projects/phpspreadsheet/builds/8340376/) (see the `mkdocs.yml`):
https://github.com/rtfd/readthedocs.org/blob/ed8dd29a68d009fc08929fabf7155883482619c1/readthedocs/doc_builder/backends/mkdocs.py#L143-L152
If we have an issue reseting the branch/tag of the docs, we will end up inserting our media files twice, which causes them to be executed twice.
## Solution
This code should check if one of the files we insert is already in the list:
```
if 'readthedocs-data.js' not in user_config.get('extra_javascript'):
```
</issue>
<code>
[start of readthedocs/doc_builder/backends/mkdocs.py]
1 # -*- coding: utf-8 -*-
2
3 """
4 MkDocs_ backend for building docs.
5
6 .. _MkDocs: http://www.mkdocs.org/
7 """
8 import json
9 import logging
10 import os
11
12 import yaml
13 from django.conf import settings
14 from django.template import loader as template_loader
15
16 from readthedocs.doc_builder.base import BaseBuilder
17 from readthedocs.doc_builder.exceptions import MkDocsYAMLParseError
18 from readthedocs.projects.models import Feature
19
20
21 log = logging.getLogger(__name__)
22
23
24 def get_absolute_static_url():
25 """
26 Get the fully qualified static URL from settings.
27
28 Mkdocs needs a full domain because it tries to link to local files.
29 """
30 static_url = settings.STATIC_URL
31
32 if not static_url.startswith('http'):
33 domain = getattr(settings, 'PRODUCTION_DOMAIN')
34 static_url = 'http://{}{}'.format(domain, static_url)
35
36 return static_url
37
38
39 class BaseMkdocs(BaseBuilder):
40
41 """Mkdocs builder."""
42
43 # The default theme for mkdocs is the 'mkdocs' theme
44 DEFAULT_THEME_NAME = 'mkdocs'
45
46 def __init__(self, *args, **kwargs):
47 super().__init__(*args, **kwargs)
48 self.old_artifact_path = os.path.join(
49 self.version.project.checkout_path(self.version.slug),
50 self.build_dir,
51 )
52 self.root_path = self.version.project.checkout_path(self.version.slug)
53 self.yaml_file = self.get_yaml_config()
54
55 # README: historically, the default theme was ``readthedocs`` but in
56 # https://github.com/rtfd/readthedocs.org/pull/4556 we change it to
57 # ``mkdocs`` to maintain the same behavior in Read the Docs than
58 # building locally. Although, we can't apply this into the Corporate
59 # site. To keep the same default theme there, we created a Feature flag
60 # for these project that were building with MkDocs in the Corporate
61 # site.
62 if self.project.has_feature(Feature.MKDOCS_THEME_RTD):
63 self.DEFAULT_THEME_NAME = 'readthedocs'
64 log.warning(
65 'Project using readthedocs theme as default for MkDocs: slug=%s',
66 self.project.slug,
67 )
68 else:
69 self.DEFAULT_THEME_NAME = 'mkdocs'
70
71 def get_yaml_config(self):
72 """Find the ``mkdocs.yml`` file in the project root."""
73 mkdoc_path = self.config.mkdocs.configuration
74 if not mkdoc_path:
75 mkdoc_path = os.path.join(
76 self.project.checkout_path(self.version.slug),
77 'mkdocs.yml',
78 )
79 if not os.path.exists(mkdoc_path):
80 return None
81 return mkdoc_path
82
83 def load_yaml_config(self):
84 """
85 Load a YAML config.
86
87 :raises: ``MkDocsYAMLParseError`` if failed due to syntax errors.
88 """
89 try:
90 return yaml.safe_load(open(self.yaml_file, 'r'),)
91 except IOError:
92 return {
93 'site_name': self.version.project.name,
94 }
95 except yaml.YAMLError as exc:
96 note = ''
97 if hasattr(exc, 'problem_mark'):
98 mark = exc.problem_mark
99 note = ' (line %d, column %d)' % (
100 mark.line + 1,
101 mark.column + 1,
102 )
103 raise MkDocsYAMLParseError(
104 'Your mkdocs.yml could not be loaded, '
105 'possibly due to a syntax error{note}'.format(note=note),
106 )
107
108 def append_conf(self, **__):
109 """
110 Set mkdocs config values.
111
112 :raises: ``MkDocsYAMLParseError`` if failed due to known type errors
113 (i.e. expecting a list and a string is found).
114 """
115 if not self.yaml_file:
116 self.yaml_file = os.path.join(self.root_path, 'mkdocs.yml')
117
118 user_config = self.load_yaml_config()
119
120 # Handle custom docs dirs
121 user_docs_dir = user_config.get('docs_dir')
122 if not isinstance(user_docs_dir, (type(None), str)):
123 raise MkDocsYAMLParseError(
124 MkDocsYAMLParseError.INVALID_DOCS_DIR_CONFIG,
125 )
126
127 docs_dir = self.docs_dir(docs_dir=user_docs_dir)
128 self.create_index(extension='md')
129 user_config['docs_dir'] = docs_dir
130
131 # Set mkdocs config values
132 static_url = get_absolute_static_url()
133
134 for config in ('extra_css', 'extra_javascript'):
135 user_value = user_config.get(config, [])
136 if not isinstance(user_value, list):
137 raise MkDocsYAMLParseError(
138 MkDocsYAMLParseError.INVALID_EXTRA_CONFIG.format(
139 config=config,
140 ),
141 )
142
143 user_config.setdefault('extra_javascript', []).extend([
144 'readthedocs-data.js',
145 '%score/js/readthedocs-doc-embed.js' % static_url,
146 '%sjavascript/readthedocs-analytics.js' % static_url,
147 ])
148 user_config.setdefault('extra_css', []).extend([
149 '%scss/badge_only.css' % static_url,
150 '%scss/readthedocs-doc-embed.css' % static_url,
151 ])
152
153 # The docs path is relative to the location
154 # of the mkdocs configuration file.
155 docs_path = os.path.join(
156 os.path.dirname(self.yaml_file),
157 docs_dir,
158 )
159
160 # RTD javascript writing
161 rtd_data = self.generate_rtd_data(
162 docs_dir=os.path.relpath(docs_path, self.root_path),
163 mkdocs_config=user_config,
164 )
165 with open(os.path.join(docs_path, 'readthedocs-data.js'), 'w') as f:
166 f.write(rtd_data)
167
168 # Use Read the Docs' analytics setup rather than mkdocs'
169 # This supports using RTD's privacy improvements around analytics
170 user_config['google_analytics'] = None
171
172 # README: make MkDocs to use ``readthedocs`` theme as default if the
173 # user didn't specify a specific theme manually
174 if self.project.has_feature(Feature.MKDOCS_THEME_RTD):
175 if 'theme' not in user_config:
176 # mkdocs<0.17 syntax
177 user_config['theme'] = self.DEFAULT_THEME_NAME
178
179 # Write the modified mkdocs configuration
180 yaml.safe_dump(
181 user_config,
182 open(self.yaml_file, 'w'),
183 )
184
185 # Write the mkdocs.yml to the build logs
186 self.run(
187 'cat',
188 os.path.relpath(self.yaml_file, self.root_path),
189 cwd=self.root_path,
190 )
191
192 def generate_rtd_data(self, docs_dir, mkdocs_config):
193 """Generate template properties and render readthedocs-data.js."""
194 # Use the analytics code from mkdocs.yml
195 # if it isn't set already by Read the Docs,
196 analytics_code = self.version.project.analytics_code
197 if not analytics_code and mkdocs_config.get('google_analytics'):
198 # http://www.mkdocs.org/user-guide/configuration/#google_analytics
199 analytics_code = mkdocs_config['google_analytics'][0]
200
201 # Will be available in the JavaScript as READTHEDOCS_DATA.
202 readthedocs_data = {
203 'project': self.version.project.slug,
204 'version': self.version.slug,
205 'language': self.version.project.language,
206 'programming_language': self.version.project.programming_language,
207 'page': None,
208 'theme': self.get_theme_name(mkdocs_config),
209 'builder': 'mkdocs',
210 'docroot': docs_dir,
211 'source_suffix': '.md',
212 'api_host': getattr(
213 settings,
214 'PUBLIC_API_URL',
215 'https://readthedocs.org',
216 ),
217 'ad_free': not self.project.show_advertising,
218 'commit': self.version.project.vcs_repo(self.version.slug).commit,
219 'global_analytics_code': getattr(
220 settings,
221 'GLOBAL_ANALYTICS_CODE',
222 'UA-17997319-1',
223 ),
224 'user_analytics_code': analytics_code,
225 }
226 data_json = json.dumps(readthedocs_data, indent=4)
227 data_ctx = {
228 'data_json': data_json,
229 'current_version': readthedocs_data['version'],
230 'slug': readthedocs_data['project'],
231 'html_theme': readthedocs_data['theme'],
232 'pagename': None,
233 }
234 tmpl = template_loader.get_template('doc_builder/data.js.tmpl')
235 return tmpl.render(data_ctx)
236
237 def build(self):
238 checkout_path = self.project.checkout_path(self.version.slug)
239 build_command = [
240 'python',
241 self.python_env.venv_bin(filename='mkdocs'),
242 self.builder,
243 '--clean',
244 '--site-dir',
245 self.build_dir,
246 '--config-file',
247 self.yaml_file,
248 ]
249 if self.config.mkdocs.fail_on_warning:
250 build_command.append('--strict')
251 cmd_ret = self.run(
252 *build_command, cwd=checkout_path,
253 bin_path=self.python_env.venv_bin()
254 )
255 return cmd_ret.successful
256
257 def get_theme_name(self, mkdocs_config):
258 """
259 Get the theme configuration in the mkdocs_config.
260
261 In v0.17.0, the theme configuration switched
262 from two separate configs (both optional) to a nested directive.
263
264 :see: http://www.mkdocs.org/about/release-notes/#theme-customization-1164
265 :returns: the name of the theme RTD will use
266 """
267 theme_setting = mkdocs_config.get('theme')
268 if isinstance(theme_setting, dict):
269 # Full nested theme config (the new configuration)
270 return theme_setting.get('name') or self.DEFAULT_THEME_NAME
271
272 if theme_setting:
273 # A string which is the name of the theme
274 return theme_setting
275
276 theme_dir = mkdocs_config.get('theme_dir')
277 if theme_dir:
278 # Use the name of the directory in this project's custom theme directory
279 return theme_dir.rstrip('/').split('/')[-1]
280
281 return self.DEFAULT_THEME_NAME
282
283
284 class MkdocsHTML(BaseMkdocs):
285 type = 'mkdocs'
286 builder = 'build'
287 build_dir = '_build/html'
288
289
290 class MkdocsJSON(BaseMkdocs):
291 type = 'mkdocs_json'
292 builder = 'json'
293 build_dir = '_build/json'
294
[end of readthedocs/doc_builder/backends/mkdocs.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/readthedocs/doc_builder/backends/mkdocs.py b/readthedocs/doc_builder/backends/mkdocs.py
--- a/readthedocs/doc_builder/backends/mkdocs.py
+++ b/readthedocs/doc_builder/backends/mkdocs.py
@@ -140,15 +140,25 @@
),
)
- user_config.setdefault('extra_javascript', []).extend([
+ extra_javascript_list = [
'readthedocs-data.js',
'%score/js/readthedocs-doc-embed.js' % static_url,
'%sjavascript/readthedocs-analytics.js' % static_url,
- ])
- user_config.setdefault('extra_css', []).extend([
+ ]
+ extra_css_list = [
'%scss/badge_only.css' % static_url,
'%scss/readthedocs-doc-embed.css' % static_url,
- ])
+ ]
+
+ # Only add static file if the files are not already in the list
+ user_config.setdefault('extra_javascript', []).extend(
+ [js for js in extra_javascript_list if js not in user_config.get(
+ 'extra_javascript')]
+ )
+ user_config.setdefault('extra_css', []).extend(
+ [css for css in extra_css_list if css not in user_config.get(
+ 'extra_css')]
+ )
# The docs path is relative to the location
# of the mkdocs configuration file.
| {"golden_diff": "diff --git a/readthedocs/doc_builder/backends/mkdocs.py b/readthedocs/doc_builder/backends/mkdocs.py\n--- a/readthedocs/doc_builder/backends/mkdocs.py\n+++ b/readthedocs/doc_builder/backends/mkdocs.py\n@@ -140,15 +140,25 @@\n ),\n )\n \n- user_config.setdefault('extra_javascript', []).extend([\n+ extra_javascript_list = [\n 'readthedocs-data.js',\n '%score/js/readthedocs-doc-embed.js' % static_url,\n '%sjavascript/readthedocs-analytics.js' % static_url,\n- ])\n- user_config.setdefault('extra_css', []).extend([\n+ ]\n+ extra_css_list = [\n '%scss/badge_only.css' % static_url,\n '%scss/readthedocs-doc-embed.css' % static_url,\n- ])\n+ ]\n+\n+ # Only add static file if the files are not already in the list\n+ user_config.setdefault('extra_javascript', []).extend(\n+ [js for js in extra_javascript_list if js not in user_config.get(\n+ 'extra_javascript')]\n+ )\n+ user_config.setdefault('extra_css', []).extend(\n+ [css for css in extra_css_list if css not in user_config.get(\n+ 'extra_css')]\n+ )\n \n # The docs path is relative to the location\n # of the mkdocs configuration file.\n", "issue": "Be more defensive in how we insert our mkdocs media\nThis logic can be executed multiple times on [some builds](https://readthedocs.org/projects/phpspreadsheet/builds/8340376/) (see the `mkdocs.yml`):\r\n\r\nhttps://github.com/rtfd/readthedocs.org/blob/ed8dd29a68d009fc08929fabf7155883482619c1/readthedocs/doc_builder/backends/mkdocs.py#L143-L152\r\n\r\nIf we have an issue reseting the branch/tag of the docs, we will end up inserting our media files twice, which causes them to be executed twice.\r\n\r\n## Solution\r\n\r\nThis code should check if one of the files we insert is already in the list:\r\n\r\n```\r\nif 'readthedocs-data.js' not in user_config.get('extra_javascript'):\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"\nMkDocs_ backend for building docs.\n\n.. _MkDocs: http://www.mkdocs.org/\n\"\"\"\nimport json\nimport logging\nimport os\n\nimport yaml\nfrom django.conf import settings\nfrom django.template import loader as template_loader\n\nfrom readthedocs.doc_builder.base import BaseBuilder\nfrom readthedocs.doc_builder.exceptions import MkDocsYAMLParseError\nfrom readthedocs.projects.models import Feature\n\n\nlog = logging.getLogger(__name__)\n\n\ndef get_absolute_static_url():\n \"\"\"\n Get the fully qualified static URL from settings.\n\n Mkdocs needs a full domain because it tries to link to local files.\n \"\"\"\n static_url = settings.STATIC_URL\n\n if not static_url.startswith('http'):\n domain = getattr(settings, 'PRODUCTION_DOMAIN')\n static_url = 'http://{}{}'.format(domain, static_url)\n\n return static_url\n\n\nclass BaseMkdocs(BaseBuilder):\n\n \"\"\"Mkdocs builder.\"\"\"\n\n # The default theme for mkdocs is the 'mkdocs' theme\n DEFAULT_THEME_NAME = 'mkdocs'\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.old_artifact_path = os.path.join(\n self.version.project.checkout_path(self.version.slug),\n self.build_dir,\n )\n self.root_path = self.version.project.checkout_path(self.version.slug)\n self.yaml_file = self.get_yaml_config()\n\n # README: historically, the default theme was ``readthedocs`` but in\n # https://github.com/rtfd/readthedocs.org/pull/4556 we change it to\n # ``mkdocs`` to maintain the same behavior in Read the Docs than\n # building locally. Although, we can't apply this into the Corporate\n # site. To keep the same default theme there, we created a Feature flag\n # for these project that were building with MkDocs in the Corporate\n # site.\n if self.project.has_feature(Feature.MKDOCS_THEME_RTD):\n self.DEFAULT_THEME_NAME = 'readthedocs'\n log.warning(\n 'Project using readthedocs theme as default for MkDocs: slug=%s',\n self.project.slug,\n )\n else:\n self.DEFAULT_THEME_NAME = 'mkdocs'\n\n def get_yaml_config(self):\n \"\"\"Find the ``mkdocs.yml`` file in the project root.\"\"\"\n mkdoc_path = self.config.mkdocs.configuration\n if not mkdoc_path:\n mkdoc_path = os.path.join(\n self.project.checkout_path(self.version.slug),\n 'mkdocs.yml',\n )\n if not os.path.exists(mkdoc_path):\n return None\n return mkdoc_path\n\n def load_yaml_config(self):\n \"\"\"\n Load a YAML config.\n\n :raises: ``MkDocsYAMLParseError`` if failed due to syntax errors.\n \"\"\"\n try:\n return yaml.safe_load(open(self.yaml_file, 'r'),)\n except IOError:\n return {\n 'site_name': self.version.project.name,\n }\n except yaml.YAMLError as exc:\n note = ''\n if hasattr(exc, 'problem_mark'):\n mark = exc.problem_mark\n note = ' (line %d, column %d)' % (\n mark.line + 1,\n mark.column + 1,\n )\n raise MkDocsYAMLParseError(\n 'Your mkdocs.yml could not be loaded, '\n 'possibly due to a syntax error{note}'.format(note=note),\n )\n\n def append_conf(self, **__):\n \"\"\"\n Set mkdocs config values.\n\n :raises: ``MkDocsYAMLParseError`` if failed due to known type errors\n (i.e. expecting a list and a string is found).\n \"\"\"\n if not self.yaml_file:\n self.yaml_file = os.path.join(self.root_path, 'mkdocs.yml')\n\n user_config = self.load_yaml_config()\n\n # Handle custom docs dirs\n user_docs_dir = user_config.get('docs_dir')\n if not isinstance(user_docs_dir, (type(None), str)):\n raise MkDocsYAMLParseError(\n MkDocsYAMLParseError.INVALID_DOCS_DIR_CONFIG,\n )\n\n docs_dir = self.docs_dir(docs_dir=user_docs_dir)\n self.create_index(extension='md')\n user_config['docs_dir'] = docs_dir\n\n # Set mkdocs config values\n static_url = get_absolute_static_url()\n\n for config in ('extra_css', 'extra_javascript'):\n user_value = user_config.get(config, [])\n if not isinstance(user_value, list):\n raise MkDocsYAMLParseError(\n MkDocsYAMLParseError.INVALID_EXTRA_CONFIG.format(\n config=config,\n ),\n )\n\n user_config.setdefault('extra_javascript', []).extend([\n 'readthedocs-data.js',\n '%score/js/readthedocs-doc-embed.js' % static_url,\n '%sjavascript/readthedocs-analytics.js' % static_url,\n ])\n user_config.setdefault('extra_css', []).extend([\n '%scss/badge_only.css' % static_url,\n '%scss/readthedocs-doc-embed.css' % static_url,\n ])\n\n # The docs path is relative to the location\n # of the mkdocs configuration file.\n docs_path = os.path.join(\n os.path.dirname(self.yaml_file),\n docs_dir,\n )\n\n # RTD javascript writing\n rtd_data = self.generate_rtd_data(\n docs_dir=os.path.relpath(docs_path, self.root_path),\n mkdocs_config=user_config,\n )\n with open(os.path.join(docs_path, 'readthedocs-data.js'), 'w') as f:\n f.write(rtd_data)\n\n # Use Read the Docs' analytics setup rather than mkdocs'\n # This supports using RTD's privacy improvements around analytics\n user_config['google_analytics'] = None\n\n # README: make MkDocs to use ``readthedocs`` theme as default if the\n # user didn't specify a specific theme manually\n if self.project.has_feature(Feature.MKDOCS_THEME_RTD):\n if 'theme' not in user_config:\n # mkdocs<0.17 syntax\n user_config['theme'] = self.DEFAULT_THEME_NAME\n\n # Write the modified mkdocs configuration\n yaml.safe_dump(\n user_config,\n open(self.yaml_file, 'w'),\n )\n\n # Write the mkdocs.yml to the build logs\n self.run(\n 'cat',\n os.path.relpath(self.yaml_file, self.root_path),\n cwd=self.root_path,\n )\n\n def generate_rtd_data(self, docs_dir, mkdocs_config):\n \"\"\"Generate template properties and render readthedocs-data.js.\"\"\"\n # Use the analytics code from mkdocs.yml\n # if it isn't set already by Read the Docs,\n analytics_code = self.version.project.analytics_code\n if not analytics_code and mkdocs_config.get('google_analytics'):\n # http://www.mkdocs.org/user-guide/configuration/#google_analytics\n analytics_code = mkdocs_config['google_analytics'][0]\n\n # Will be available in the JavaScript as READTHEDOCS_DATA.\n readthedocs_data = {\n 'project': self.version.project.slug,\n 'version': self.version.slug,\n 'language': self.version.project.language,\n 'programming_language': self.version.project.programming_language,\n 'page': None,\n 'theme': self.get_theme_name(mkdocs_config),\n 'builder': 'mkdocs',\n 'docroot': docs_dir,\n 'source_suffix': '.md',\n 'api_host': getattr(\n settings,\n 'PUBLIC_API_URL',\n 'https://readthedocs.org',\n ),\n 'ad_free': not self.project.show_advertising,\n 'commit': self.version.project.vcs_repo(self.version.slug).commit,\n 'global_analytics_code': getattr(\n settings,\n 'GLOBAL_ANALYTICS_CODE',\n 'UA-17997319-1',\n ),\n 'user_analytics_code': analytics_code,\n }\n data_json = json.dumps(readthedocs_data, indent=4)\n data_ctx = {\n 'data_json': data_json,\n 'current_version': readthedocs_data['version'],\n 'slug': readthedocs_data['project'],\n 'html_theme': readthedocs_data['theme'],\n 'pagename': None,\n }\n tmpl = template_loader.get_template('doc_builder/data.js.tmpl')\n return tmpl.render(data_ctx)\n\n def build(self):\n checkout_path = self.project.checkout_path(self.version.slug)\n build_command = [\n 'python',\n self.python_env.venv_bin(filename='mkdocs'),\n self.builder,\n '--clean',\n '--site-dir',\n self.build_dir,\n '--config-file',\n self.yaml_file,\n ]\n if self.config.mkdocs.fail_on_warning:\n build_command.append('--strict')\n cmd_ret = self.run(\n *build_command, cwd=checkout_path,\n bin_path=self.python_env.venv_bin()\n )\n return cmd_ret.successful\n\n def get_theme_name(self, mkdocs_config):\n \"\"\"\n Get the theme configuration in the mkdocs_config.\n\n In v0.17.0, the theme configuration switched\n from two separate configs (both optional) to a nested directive.\n\n :see: http://www.mkdocs.org/about/release-notes/#theme-customization-1164\n :returns: the name of the theme RTD will use\n \"\"\"\n theme_setting = mkdocs_config.get('theme')\n if isinstance(theme_setting, dict):\n # Full nested theme config (the new configuration)\n return theme_setting.get('name') or self.DEFAULT_THEME_NAME\n\n if theme_setting:\n # A string which is the name of the theme\n return theme_setting\n\n theme_dir = mkdocs_config.get('theme_dir')\n if theme_dir:\n # Use the name of the directory in this project's custom theme directory\n return theme_dir.rstrip('/').split('/')[-1]\n\n return self.DEFAULT_THEME_NAME\n\n\nclass MkdocsHTML(BaseMkdocs):\n type = 'mkdocs'\n builder = 'build'\n build_dir = '_build/html'\n\n\nclass MkdocsJSON(BaseMkdocs):\n type = 'mkdocs_json'\n builder = 'json'\n build_dir = '_build/json'\n", "path": "readthedocs/doc_builder/backends/mkdocs.py"}]} | 3,767 | 314 |
gh_patches_debug_12410 | rasdani/github-patches | git_diff | sagemath__sage-36176 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
toml package is obsolete
### Steps To Reproduce
_No response_
### Expected Behavior
toml is not installed
### Actual Behavior
toml is installed
### Additional Information
I think our `toml` package is obsolete. The only other package listing it as a requirement is tox, but tox only needs it with ancient pythons: https://github.com/tox-dev/tox/blob/3.27.1/setup.cfg#L45
I think our dependency can be replaced with tomli at which point toml can go.
### Environment
```markdown
Gentoo / git develop
```
### Checklist
- [X] I have searched the existing issues for a bug report that matches the one I want to file, without success.
- [X] I have read the documentation and troubleshoot guide
</issue>
<code>
[start of build/sage_bootstrap/creator.py]
1 # -*- coding: utf-8 -*-
2 """
3 Package Creator
4 """
5
6 # ****************************************************************************
7 # Copyright (C) 2016 Volker Braun <[email protected]>
8 #
9 # This program is free software: you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation, either version 2 of the License, or
12 # (at your option) any later version.
13 # https://www.gnu.org/licenses/
14 # ****************************************************************************
15
16 import os
17
18 import logging
19 log = logging.getLogger()
20
21 from sage_bootstrap.env import SAGE_ROOT
22
23
24 class PackageCreator(object):
25
26 def __init__(self, package_name):
27 self.package_name = package_name
28 self.path = os.path.join(SAGE_ROOT, 'build', 'pkgs', package_name)
29 try:
30 os.mkdir(self.path)
31 except OSError:
32 pass
33
34 def set_version(self, version):
35 """
36 Write the version to ``package-version.txt``
37 """
38 with open(os.path.join(self.path, 'package-version.txt'), 'w+') as f:
39 f.write(version)
40 f.write('\n')
41
42 def set_type(self, pkg_type):
43 """
44 Write the package type to ``type``
45 """
46 with open(os.path.join(self.path, 'type'), 'w+') as f:
47 f.write(pkg_type)
48 f.write('\n')
49
50 def set_tarball(self, tarball, upstream_url):
51 """
52 Write the tarball name pattern to ``checksums.ini``
53 """
54 with open(os.path.join(self.path, 'checksums.ini'), 'w+') as f:
55 f.write('tarball={0}'.format(tarball))
56 f.write('\n')
57 if upstream_url:
58 f.write('upstream_url={0}'.format(upstream_url))
59 f.write('\n')
60
61 def set_description(self, description, license, upstream_contact):
62 """
63 Write the ``SPKG.rst`` file
64 """
65 with open(os.path.join(self.path, 'SPKG.rst'), 'w+') as f:
66 def heading(title, char='-'):
67 return '{0}\n{1}\n\n'.format(title, char * len(title))
68 if description:
69 title = '{0}: {1}'.format(self.package_name, description)
70 else:
71 title = self.package_name
72 f.write(heading(title, '='))
73 f.write(heading('Description'))
74 if description:
75 f.write('{0}\n\n'.format(description))
76 f.write(heading('License'))
77 if license:
78 f.write('{0}\n\n'.format(license))
79 f.write(heading('Upstream Contact'))
80 if upstream_contact:
81 f.write('{0}\n\n'.format(upstream_contact))
82
83 def set_python_data_and_scripts(self, pypi_package_name=None, source='normal'):
84 """
85 Write the file ``dependencies`` and other files for Python packages.
86
87 If ``source`` is ``"normal"``, write the files ``spkg-install.in`` and
88 ``install-requires.txt``.
89
90 If ``source`` is ``"wheel"``, write the file ``install-requires.txt``.
91
92 If ``source`` is ``"pip"``, write the file ``requirements.txt``.
93 """
94 if pypi_package_name is None:
95 pypi_package_name = self.package_name
96 with open(os.path.join(self.path, 'dependencies'), 'w+') as f:
97 f.write('$(PYTHON) | $(PYTHON_TOOLCHAIN)\n\n')
98 f.write('----------\nAll lines of this file are ignored except the first.\n')
99 if source == 'normal':
100 with open(os.path.join(self.path, 'spkg-install.in'), 'w+') as f:
101 f.write('cd src\nsdh_pip_install .\n')
102 with open(os.path.join(self.path, 'install-requires.txt'), 'w+') as f:
103 f.write('{0}\n'.format(pypi_package_name))
104 elif source == 'wheel':
105 with open(os.path.join(self.path, 'install-requires.txt'), 'w+') as f:
106 f.write('{0}\n'.format(pypi_package_name))
107 elif source == 'pip':
108 with open(os.path.join(self.path, 'requirements.txt'), 'w+') as f:
109 f.write('{0}\n'.format(pypi_package_name))
110 elif source == 'script':
111 pass
112 else:
113 raise ValueError('package source must be one of normal, script, pip, or wheel')
114
[end of build/sage_bootstrap/creator.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/build/sage_bootstrap/creator.py b/build/sage_bootstrap/creator.py
--- a/build/sage_bootstrap/creator.py
+++ b/build/sage_bootstrap/creator.py
@@ -94,7 +94,7 @@
if pypi_package_name is None:
pypi_package_name = self.package_name
with open(os.path.join(self.path, 'dependencies'), 'w+') as f:
- f.write('$(PYTHON) | $(PYTHON_TOOLCHAIN)\n\n')
+ f.write(' | $(PYTHON_TOOLCHAIN) $(PYTHON)\n\n')
f.write('----------\nAll lines of this file are ignored except the first.\n')
if source == 'normal':
with open(os.path.join(self.path, 'spkg-install.in'), 'w+') as f:
| {"golden_diff": "diff --git a/build/sage_bootstrap/creator.py b/build/sage_bootstrap/creator.py\n--- a/build/sage_bootstrap/creator.py\n+++ b/build/sage_bootstrap/creator.py\n@@ -94,7 +94,7 @@\n if pypi_package_name is None:\n pypi_package_name = self.package_name\n with open(os.path.join(self.path, 'dependencies'), 'w+') as f:\n- f.write('$(PYTHON) | $(PYTHON_TOOLCHAIN)\\n\\n')\n+ f.write(' | $(PYTHON_TOOLCHAIN) $(PYTHON)\\n\\n')\n f.write('----------\\nAll lines of this file are ignored except the first.\\n')\n if source == 'normal':\n with open(os.path.join(self.path, 'spkg-install.in'), 'w+') as f:\n", "issue": "toml package is obsolete\n### Steps To Reproduce\n\n_No response_\n\n### Expected Behavior\n\ntoml is not installed\n\n### Actual Behavior\n\ntoml is installed\n\n### Additional Information\n\nI think our `toml` package is obsolete. The only other package listing it as a requirement is tox, but tox only needs it with ancient pythons: https://github.com/tox-dev/tox/blob/3.27.1/setup.cfg#L45\r\n\r\nI think our dependency can be replaced with tomli at which point toml can go.\r\n\n\n### Environment\n\n```markdown\nGentoo / git develop\n```\n\n\n### Checklist\n\n- [X] I have searched the existing issues for a bug report that matches the one I want to file, without success.\n- [X] I have read the documentation and troubleshoot guide\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nPackage Creator\n\"\"\"\n\n# ****************************************************************************\n# Copyright (C) 2016 Volker Braun <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 2 of the License, or\n# (at your option) any later version.\n# https://www.gnu.org/licenses/\n# ****************************************************************************\n\nimport os\n\nimport logging\nlog = logging.getLogger()\n\nfrom sage_bootstrap.env import SAGE_ROOT\n\n\nclass PackageCreator(object):\n\n def __init__(self, package_name):\n self.package_name = package_name\n self.path = os.path.join(SAGE_ROOT, 'build', 'pkgs', package_name)\n try:\n os.mkdir(self.path)\n except OSError:\n pass\n\n def set_version(self, version):\n \"\"\"\n Write the version to ``package-version.txt``\n \"\"\"\n with open(os.path.join(self.path, 'package-version.txt'), 'w+') as f:\n f.write(version)\n f.write('\\n')\n\n def set_type(self, pkg_type):\n \"\"\"\n Write the package type to ``type``\n \"\"\"\n with open(os.path.join(self.path, 'type'), 'w+') as f:\n f.write(pkg_type)\n f.write('\\n')\n\n def set_tarball(self, tarball, upstream_url):\n \"\"\"\n Write the tarball name pattern to ``checksums.ini``\n \"\"\"\n with open(os.path.join(self.path, 'checksums.ini'), 'w+') as f:\n f.write('tarball={0}'.format(tarball))\n f.write('\\n')\n if upstream_url:\n f.write('upstream_url={0}'.format(upstream_url))\n f.write('\\n')\n\n def set_description(self, description, license, upstream_contact):\n \"\"\"\n Write the ``SPKG.rst`` file\n \"\"\"\n with open(os.path.join(self.path, 'SPKG.rst'), 'w+') as f:\n def heading(title, char='-'):\n return '{0}\\n{1}\\n\\n'.format(title, char * len(title))\n if description:\n title = '{0}: {1}'.format(self.package_name, description)\n else:\n title = self.package_name\n f.write(heading(title, '='))\n f.write(heading('Description'))\n if description:\n f.write('{0}\\n\\n'.format(description))\n f.write(heading('License'))\n if license:\n f.write('{0}\\n\\n'.format(license))\n f.write(heading('Upstream Contact'))\n if upstream_contact:\n f.write('{0}\\n\\n'.format(upstream_contact))\n\n def set_python_data_and_scripts(self, pypi_package_name=None, source='normal'):\n \"\"\"\n Write the file ``dependencies`` and other files for Python packages.\n\n If ``source`` is ``\"normal\"``, write the files ``spkg-install.in`` and\n ``install-requires.txt``.\n\n If ``source`` is ``\"wheel\"``, write the file ``install-requires.txt``.\n\n If ``source`` is ``\"pip\"``, write the file ``requirements.txt``.\n \"\"\"\n if pypi_package_name is None:\n pypi_package_name = self.package_name\n with open(os.path.join(self.path, 'dependencies'), 'w+') as f:\n f.write('$(PYTHON) | $(PYTHON_TOOLCHAIN)\\n\\n')\n f.write('----------\\nAll lines of this file are ignored except the first.\\n')\n if source == 'normal':\n with open(os.path.join(self.path, 'spkg-install.in'), 'w+') as f:\n f.write('cd src\\nsdh_pip_install .\\n')\n with open(os.path.join(self.path, 'install-requires.txt'), 'w+') as f:\n f.write('{0}\\n'.format(pypi_package_name))\n elif source == 'wheel':\n with open(os.path.join(self.path, 'install-requires.txt'), 'w+') as f:\n f.write('{0}\\n'.format(pypi_package_name))\n elif source == 'pip':\n with open(os.path.join(self.path, 'requirements.txt'), 'w+') as f:\n f.write('{0}\\n'.format(pypi_package_name))\n elif source == 'script':\n pass\n else:\n raise ValueError('package source must be one of normal, script, pip, or wheel')\n", "path": "build/sage_bootstrap/creator.py"}]} | 1,917 | 171 |
gh_patches_debug_14444 | rasdani/github-patches | git_diff | mlflow__mlflow-11722 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[BUG]mlflow.log_input will export dataset to json and import dataset again
### Issues Policy acknowledgement
- [X] I have read and agree to submit bug reports in accordance with the [issues policy](https://www.github.com/mlflow/mlflow/blob/master/ISSUE_POLICY.md)
### Where did you encounter this bug?
Local machine
### Willingness to contribute
No. I cannot contribute a bug fix at this time.
### MLflow version
- Client: 2.11.3
- Tracking server: 2.11.3
### System information
- **OS Platform and Distribution (e.g., Linux Ubuntu 16.04)**: Ubuntu 18.04 (WSL2)
- **Python version**: 3.10
### Describe the problem
run `mlflow.log_input(dataset, context='training')` will export dataset to json and import dataset again
`mlflow.log_input(dataset, context='training')` will call
`dataset_input = DatasetInput(dataset=dataset._to_mlflow_entity(), tags=tags_to_log)` will cal
```
def _to_mlflow_entity(self) -> DatasetEntity:
"""
Returns:
A `mlflow.entities.Dataset` instance representing the dataset.
"""
dataset_json = json.loads(self.to_json())
return DatasetEntity(
name=dataset_json["name"],
digest=dataset_json["digest"],
source_type=dataset_json["source_type"],
source=dataset_json["source"],
schema=dataset_json.get("schema"),
profile=dataset_json.get("profile"),
)
```
`dataset_json = json.loads(self.to_json())` will export dataset to json and import dataset again
Dataset dinifination
```
class MyDataset(Dataset):
def __init__(self, data_dir, segment_len=128):
self.data_dir = data_dir
self.segment_len = segment_len
# Load the mapping from speaker name to their corresponding id.
mapping_path = Path(data_dir) / "mapping.json"
mapping = json.load(mapping_path.open())
self.speaker2id = mapping["speaker2id"]
# Load metadata of training data.
metadata_path = Path(data_dir) / "metadata.json"
metadata = json.load(open(metadata_path))["speakers"]
# Get the total number of speaker.
self.speaker_num = len(metadata.keys())
self.data = []
for speaker in metadata.keys():
for utterances in metadata[speaker]:
self.data.append([utterances["feature_path"], self.speaker2id[speaker]])
```
### Tracking information
<!-- PLEASE KEEP BACKTICKS AND CHECK PREVIEW -->
```shell
REPLACE_ME
```
### Code to reproduce issue
<!-- PLEASE KEEP BACKTICKS AND CHECK PREVIEW -->
```
class MyDataset(Dataset):
def __init__(self, data_dir, segment_len=128):
self.data_dir = data_dir
self.segment_len = segment_len
# Load the mapping from speaker name to their corresponding id.
mapping_path = Path(data_dir) / "mapping.json"
mapping = json.load(mapping_path.open())
self.speaker2id = mapping["speaker2id"]
# Load metadata of training data.
metadata_path = Path(data_dir) / "metadata.json"
metadata = json.load(open(metadata_path))["speakers"]
# Get the total number of speaker.
self.speaker_num = len(metadata.keys())
self.data = []
for speaker in metadata.keys():
for utterances in metadata[speaker]:
self.data.append([utterances["feature_path"], self.speaker2id[speaker]])
# Create a DataFrame from the data
df = pd.DataFrame(self.data, columns=['feature_path', 'speaker_id'])
# Create a Dataset object from the DataFrame
dataset = mlflow.data.from_pandas(df, source=data_dir)
# Log the Dataset object
mlflow.log_input(dataset, context='training')
dataset = MyDataset("./Dataset")
```
Data is [here](https://drive.google.com/drive/folders/1vI1kuLB-q1VilIftiwnPOCAeOOFfBZge?usp=sharing)
### Stack trace
<!-- PLEASE KEEP BACKTICKS AND CHECK PREVIEW -->
```
REPLACE_ME
```
### Other info / logs
<!-- PLEASE KEEP BACKTICKS AND CHECK PREVIEW -->
```
REPLACE_ME
```
### What component(s) does this bug affect?
- [X] `area/artifacts`: Artifact stores and artifact logging
- [ ] `area/build`: Build and test infrastructure for MLflow
- [ ] `area/deployments`: MLflow Deployments client APIs, server, and third-party Deployments integrations
- [ ] `area/docs`: MLflow documentation pages
- [ ] `area/examples`: Example code
- [ ] `area/model-registry`: Model Registry service, APIs, and the fluent client calls for Model Registry
- [ ] `area/models`: MLmodel format, model serialization/deserialization, flavors
- [ ] `area/recipes`: Recipes, Recipe APIs, Recipe configs, Recipe Templates
- [ ] `area/projects`: MLproject format, project running backends
- [ ] `area/scoring`: MLflow Model server, model deployment tools, Spark UDFs
- [ ] `area/server-infra`: MLflow Tracking server backend
- [ ] `area/tracking`: Tracking Service, tracking client APIs, autologging
### What interface(s) does this bug affect?
- [ ] `area/uiux`: Front-end, user experience, plotting, JavaScript, JavaScript dev server
- [ ] `area/docker`: Docker use across MLflow's components, such as MLflow Projects and MLflow Models
- [ ] `area/sqlalchemy`: Use of SQLAlchemy in the Tracking Service or Model Registry
- [ ] `area/windows`: Windows support
### What language(s) does this bug affect?
- [ ] `language/r`: R APIs and clients
- [ ] `language/java`: Java APIs and clients
- [ ] `language/new`: Proposals for new client languages
### What integration(s) does this bug affect?
- [ ] `integrations/azure`: Azure and Azure ML integrations
- [ ] `integrations/sagemaker`: SageMaker integrations
- [ ] `integrations/databricks`: Databricks integrations
</issue>
<code>
[start of mlflow/data/dataset.py]
1 import json
2 from abc import abstractmethod
3 from typing import Any, Dict, Optional
4
5 from mlflow.data.dataset_source import DatasetSource
6 from mlflow.entities import Dataset as DatasetEntity
7
8
9 class Dataset:
10 """
11 Represents a dataset for use with MLflow Tracking, including the name, digest (hash),
12 schema, and profile of the dataset as well as source information (e.g. the S3 bucket or
13 managed Delta table from which the dataset was derived). Most datasets expose features
14 and targets for training and evaluation as well.
15 """
16
17 def __init__(
18 self, source: DatasetSource, name: Optional[str] = None, digest: Optional[str] = None
19 ):
20 """
21 Base constructor for a dataset. All subclasses must call this constructor.
22 """
23 self._name = name
24 self._source = source
25 # Note: Subclasses should call super() once they've initialized all of
26 # the class attributes necessary for digest computation
27 self._digest = digest or self._compute_digest()
28
29 @abstractmethod
30 def _compute_digest(self) -> str:
31 """Computes a digest for the dataset. Called if the user doesn't supply
32 a digest when constructing the dataset.
33
34 Returns:
35 A string digest for the dataset. We recommend a maximum digest length
36 of 10 characters with an ideal length of 8 characters.
37
38 """
39
40 @abstractmethod
41 def to_dict(self) -> Dict[str, str]:
42 """Create config dictionary for the dataset.
43
44 Subclasses should override this method to provide additional fields in the config dict,
45 e.g., schema, profile, etc.
46
47 Returns a string dictionary containing the following fields: name, digest, source, source
48 type.
49 """
50 return {
51 "name": self.name,
52 "digest": self.digest,
53 "source": self.source.to_json(),
54 "source_type": self.source._get_source_type(),
55 }
56
57 def to_json(self) -> str:
58 """
59 Obtains a JSON string representation of the :py:class:`Dataset
60 <mlflow.data.dataset.Dataset>`.
61
62 Returns:
63 A JSON string representation of the :py:class:`Dataset <mlflow.data.dataset.Dataset>`.
64 """
65
66 return json.dumps(self.to_dict())
67
68 @property
69 def name(self) -> str:
70 """
71 The name of the dataset, e.g. ``"iris_data"``, ``"myschema.mycatalog.mytable@v1"``, etc.
72 """
73 if self._name is not None:
74 return self._name
75 else:
76 return "dataset"
77
78 @property
79 def digest(self) -> str:
80 """
81 A unique hash or fingerprint of the dataset, e.g. ``"498c7496"``.
82 """
83 return self._digest
84
85 @property
86 def source(self) -> DatasetSource:
87 """
88 Information about the dataset's source, represented as an instance of
89 :py:class:`DatasetSource <mlflow.data.dataset_source.DatasetSource>`. For example, this
90 may be the S3 location or the name of the managed Delta Table from which the dataset
91 was derived.
92 """
93 return self._source
94
95 @property
96 @abstractmethod
97 def profile(self) -> Optional[Any]:
98 """
99 Optional summary statistics for the dataset, such as the number of rows in a table, the
100 mean / median / std of each table column, etc.
101 """
102
103 @property
104 @abstractmethod
105 def schema(self) -> Optional[Any]:
106 """
107 Optional dataset schema, such as an instance of :py:class:`mlflow.types.Schema` representing
108 the features and targets of the dataset.
109 """
110
111 def _to_mlflow_entity(self) -> DatasetEntity:
112 """
113 Returns:
114 A `mlflow.entities.Dataset` instance representing the dataset.
115 """
116 dataset_json = json.loads(self.to_json())
117 return DatasetEntity(
118 name=dataset_json["name"],
119 digest=dataset_json["digest"],
120 source_type=dataset_json["source_type"],
121 source=dataset_json["source"],
122 schema=dataset_json.get("schema"),
123 profile=dataset_json.get("profile"),
124 )
125
[end of mlflow/data/dataset.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mlflow/data/dataset.py b/mlflow/data/dataset.py
--- a/mlflow/data/dataset.py
+++ b/mlflow/data/dataset.py
@@ -113,12 +113,12 @@
Returns:
A `mlflow.entities.Dataset` instance representing the dataset.
"""
- dataset_json = json.loads(self.to_json())
+ dataset_dict = self.to_dict()
return DatasetEntity(
- name=dataset_json["name"],
- digest=dataset_json["digest"],
- source_type=dataset_json["source_type"],
- source=dataset_json["source"],
- schema=dataset_json.get("schema"),
- profile=dataset_json.get("profile"),
+ name=dataset_dict["name"],
+ digest=dataset_dict["digest"],
+ source_type=dataset_dict["source_type"],
+ source=dataset_dict["source"],
+ schema=dataset_dict.get("schema"),
+ profile=dataset_dict.get("profile"),
)
| {"golden_diff": "diff --git a/mlflow/data/dataset.py b/mlflow/data/dataset.py\n--- a/mlflow/data/dataset.py\n+++ b/mlflow/data/dataset.py\n@@ -113,12 +113,12 @@\n Returns:\n A `mlflow.entities.Dataset` instance representing the dataset.\n \"\"\"\n- dataset_json = json.loads(self.to_json())\n+ dataset_dict = self.to_dict()\n return DatasetEntity(\n- name=dataset_json[\"name\"],\n- digest=dataset_json[\"digest\"],\n- source_type=dataset_json[\"source_type\"],\n- source=dataset_json[\"source\"],\n- schema=dataset_json.get(\"schema\"),\n- profile=dataset_json.get(\"profile\"),\n+ name=dataset_dict[\"name\"],\n+ digest=dataset_dict[\"digest\"],\n+ source_type=dataset_dict[\"source_type\"],\n+ source=dataset_dict[\"source\"],\n+ schema=dataset_dict.get(\"schema\"),\n+ profile=dataset_dict.get(\"profile\"),\n )\n", "issue": "[BUG]mlflow.log_input will export dataset to json and import dataset again\n### Issues Policy acknowledgement\n\n- [X] I have read and agree to submit bug reports in accordance with the [issues policy](https://www.github.com/mlflow/mlflow/blob/master/ISSUE_POLICY.md)\n\n### Where did you encounter this bug?\n\nLocal machine\n\n### Willingness to contribute\n\nNo. I cannot contribute a bug fix at this time.\n\n### MLflow version\n\n- Client: 2.11.3\r\n- Tracking server: 2.11.3\r\n\n\n### System information\n\n- **OS Platform and Distribution (e.g., Linux Ubuntu 16.04)**: Ubuntu 18.04 (WSL2)\r\n- **Python version**: 3.10\r\n\n\n### Describe the problem\n\nrun `mlflow.log_input(dataset, context='training')` will export dataset to json and import dataset again\r\n\r\n`mlflow.log_input(dataset, context='training')` will call\r\n`dataset_input = DatasetInput(dataset=dataset._to_mlflow_entity(), tags=tags_to_log)` will cal\r\n```\r\ndef _to_mlflow_entity(self) -> DatasetEntity:\r\n \"\"\"\r\n Returns:\r\n A `mlflow.entities.Dataset` instance representing the dataset.\r\n \"\"\"\r\n dataset_json = json.loads(self.to_json())\r\n return DatasetEntity(\r\n name=dataset_json[\"name\"],\r\n digest=dataset_json[\"digest\"],\r\n source_type=dataset_json[\"source_type\"],\r\n source=dataset_json[\"source\"],\r\n schema=dataset_json.get(\"schema\"),\r\n profile=dataset_json.get(\"profile\"),\r\n )\r\n```\r\n`dataset_json = json.loads(self.to_json())` will export dataset to json and import dataset again\r\n\r\n\r\nDataset dinifination\r\n```\r\nclass MyDataset(Dataset):\r\n def __init__(self, data_dir, segment_len=128):\r\n self.data_dir = data_dir\r\n self.segment_len = segment_len\r\n\r\n # Load the mapping from speaker name to their corresponding id.\r\n mapping_path = Path(data_dir) / \"mapping.json\"\r\n mapping = json.load(mapping_path.open())\r\n self.speaker2id = mapping[\"speaker2id\"]\r\n\r\n # Load metadata of training data.\r\n metadata_path = Path(data_dir) / \"metadata.json\"\r\n metadata = json.load(open(metadata_path))[\"speakers\"]\r\n\r\n # Get the total number of speaker.\r\n self.speaker_num = len(metadata.keys())\r\n self.data = []\r\n for speaker in metadata.keys():\r\n for utterances in metadata[speaker]:\r\n self.data.append([utterances[\"feature_path\"], self.speaker2id[speaker]])\r\n```\r\n\n\n### Tracking information\n\n<!-- PLEASE KEEP BACKTICKS AND CHECK PREVIEW -->\r\n```shell\r\nREPLACE_ME\r\n```\r\n\n\n### Code to reproduce issue\n\n<!-- PLEASE KEEP BACKTICKS AND CHECK PREVIEW -->\r\n```\r\nclass MyDataset(Dataset):\r\n def __init__(self, data_dir, segment_len=128):\r\n self.data_dir = data_dir\r\n self.segment_len = segment_len\r\n\r\n # Load the mapping from speaker name to their corresponding id.\r\n mapping_path = Path(data_dir) / \"mapping.json\"\r\n mapping = json.load(mapping_path.open())\r\n self.speaker2id = mapping[\"speaker2id\"]\r\n\r\n # Load metadata of training data.\r\n metadata_path = Path(data_dir) / \"metadata.json\"\r\n metadata = json.load(open(metadata_path))[\"speakers\"]\r\n\r\n # Get the total number of speaker.\r\n self.speaker_num = len(metadata.keys())\r\n self.data = []\r\n for speaker in metadata.keys():\r\n for utterances in metadata[speaker]:\r\n self.data.append([utterances[\"feature_path\"], self.speaker2id[speaker]])\r\n\r\n # Create a DataFrame from the data\r\n df = pd.DataFrame(self.data, columns=['feature_path', 'speaker_id'])\r\n # Create a Dataset object from the DataFrame\r\n dataset = mlflow.data.from_pandas(df, source=data_dir)\r\n # Log the Dataset object\r\n mlflow.log_input(dataset, context='training')\r\n\r\ndataset = MyDataset(\"./Dataset\")\r\n```\r\n\r\nData is [here](https://drive.google.com/drive/folders/1vI1kuLB-q1VilIftiwnPOCAeOOFfBZge?usp=sharing)\r\n\n\n### Stack trace\n\n<!-- PLEASE KEEP BACKTICKS AND CHECK PREVIEW -->\r\n```\r\nREPLACE_ME\r\n```\r\n\n\n### Other info / logs\n\n<!-- PLEASE KEEP BACKTICKS AND CHECK PREVIEW -->\r\n```\r\nREPLACE_ME\r\n```\r\n\n\n### What component(s) does this bug affect?\n\n- [X] `area/artifacts`: Artifact stores and artifact logging\n- [ ] `area/build`: Build and test infrastructure for MLflow\n- [ ] `area/deployments`: MLflow Deployments client APIs, server, and third-party Deployments integrations\n- [ ] `area/docs`: MLflow documentation pages\n- [ ] `area/examples`: Example code\n- [ ] `area/model-registry`: Model Registry service, APIs, and the fluent client calls for Model Registry\n- [ ] `area/models`: MLmodel format, model serialization/deserialization, flavors\n- [ ] `area/recipes`: Recipes, Recipe APIs, Recipe configs, Recipe Templates\n- [ ] `area/projects`: MLproject format, project running backends\n- [ ] `area/scoring`: MLflow Model server, model deployment tools, Spark UDFs\n- [ ] `area/server-infra`: MLflow Tracking server backend\n- [ ] `area/tracking`: Tracking Service, tracking client APIs, autologging\n\n### What interface(s) does this bug affect?\n\n- [ ] `area/uiux`: Front-end, user experience, plotting, JavaScript, JavaScript dev server\n- [ ] `area/docker`: Docker use across MLflow's components, such as MLflow Projects and MLflow Models\n- [ ] `area/sqlalchemy`: Use of SQLAlchemy in the Tracking Service or Model Registry\n- [ ] `area/windows`: Windows support\n\n### What language(s) does this bug affect?\n\n- [ ] `language/r`: R APIs and clients\n- [ ] `language/java`: Java APIs and clients\n- [ ] `language/new`: Proposals for new client languages\n\n### What integration(s) does this bug affect?\n\n- [ ] `integrations/azure`: Azure and Azure ML integrations\n- [ ] `integrations/sagemaker`: SageMaker integrations\n- [ ] `integrations/databricks`: Databricks integrations\n", "before_files": [{"content": "import json\nfrom abc import abstractmethod\nfrom typing import Any, Dict, Optional\n\nfrom mlflow.data.dataset_source import DatasetSource\nfrom mlflow.entities import Dataset as DatasetEntity\n\n\nclass Dataset:\n \"\"\"\n Represents a dataset for use with MLflow Tracking, including the name, digest (hash),\n schema, and profile of the dataset as well as source information (e.g. the S3 bucket or\n managed Delta table from which the dataset was derived). Most datasets expose features\n and targets for training and evaluation as well.\n \"\"\"\n\n def __init__(\n self, source: DatasetSource, name: Optional[str] = None, digest: Optional[str] = None\n ):\n \"\"\"\n Base constructor for a dataset. All subclasses must call this constructor.\n \"\"\"\n self._name = name\n self._source = source\n # Note: Subclasses should call super() once they've initialized all of\n # the class attributes necessary for digest computation\n self._digest = digest or self._compute_digest()\n\n @abstractmethod\n def _compute_digest(self) -> str:\n \"\"\"Computes a digest for the dataset. Called if the user doesn't supply\n a digest when constructing the dataset.\n\n Returns:\n A string digest for the dataset. We recommend a maximum digest length\n of 10 characters with an ideal length of 8 characters.\n\n \"\"\"\n\n @abstractmethod\n def to_dict(self) -> Dict[str, str]:\n \"\"\"Create config dictionary for the dataset.\n\n Subclasses should override this method to provide additional fields in the config dict,\n e.g., schema, profile, etc.\n\n Returns a string dictionary containing the following fields: name, digest, source, source\n type.\n \"\"\"\n return {\n \"name\": self.name,\n \"digest\": self.digest,\n \"source\": self.source.to_json(),\n \"source_type\": self.source._get_source_type(),\n }\n\n def to_json(self) -> str:\n \"\"\"\n Obtains a JSON string representation of the :py:class:`Dataset\n <mlflow.data.dataset.Dataset>`.\n\n Returns:\n A JSON string representation of the :py:class:`Dataset <mlflow.data.dataset.Dataset>`.\n \"\"\"\n\n return json.dumps(self.to_dict())\n\n @property\n def name(self) -> str:\n \"\"\"\n The name of the dataset, e.g. ``\"iris_data\"``, ``\"myschema.mycatalog.mytable@v1\"``, etc.\n \"\"\"\n if self._name is not None:\n return self._name\n else:\n return \"dataset\"\n\n @property\n def digest(self) -> str:\n \"\"\"\n A unique hash or fingerprint of the dataset, e.g. ``\"498c7496\"``.\n \"\"\"\n return self._digest\n\n @property\n def source(self) -> DatasetSource:\n \"\"\"\n Information about the dataset's source, represented as an instance of\n :py:class:`DatasetSource <mlflow.data.dataset_source.DatasetSource>`. For example, this\n may be the S3 location or the name of the managed Delta Table from which the dataset\n was derived.\n \"\"\"\n return self._source\n\n @property\n @abstractmethod\n def profile(self) -> Optional[Any]:\n \"\"\"\n Optional summary statistics for the dataset, such as the number of rows in a table, the\n mean / median / std of each table column, etc.\n \"\"\"\n\n @property\n @abstractmethod\n def schema(self) -> Optional[Any]:\n \"\"\"\n Optional dataset schema, such as an instance of :py:class:`mlflow.types.Schema` representing\n the features and targets of the dataset.\n \"\"\"\n\n def _to_mlflow_entity(self) -> DatasetEntity:\n \"\"\"\n Returns:\n A `mlflow.entities.Dataset` instance representing the dataset.\n \"\"\"\n dataset_json = json.loads(self.to_json())\n return DatasetEntity(\n name=dataset_json[\"name\"],\n digest=dataset_json[\"digest\"],\n source_type=dataset_json[\"source_type\"],\n source=dataset_json[\"source\"],\n schema=dataset_json.get(\"schema\"),\n profile=dataset_json.get(\"profile\"),\n )\n", "path": "mlflow/data/dataset.py"}]} | 3,074 | 213 |
gh_patches_debug_24212 | rasdani/github-patches | git_diff | edgedb__edgedb-5454 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
InternalServerError on migration with new constraint
I figured this constraint probably wouldn't work but wanted to try it anyway. It gives me an ISE.
<!-- Please search existing issues to avoid creating duplicates. -->
<!--
For the EdgeDB Version: run `edgedb query 'select sys::get_version_as_str()'` from your project directory (or run `select sys::get_version_as_str();` in the EdgeDB interactive shell).
For the EdgeDB CLI Version: Run `edgedb --version` from anywhere
-->
- EdgeDB Version: 2.13+ad0eb0f
- EdgeDB CLI Version: 3.0.0-dev.946+93cec91
- OS Version: macOS 13.2.1
Steps to Reproduce:
1. Create a project with `edgedb project init`
2. Add the problematic schema to `dbschema/default.esdl`
3. Attempt to create migration with `edgedb migration create`
<!-- If the issue is about a query error, please also provide your schema -->
Schema:
```
module default {
type Cause {
required property name -> str;
required multi link charities -> Charity {
property weight -> float64;
constraint expression (sum(.charities@weight) <= 10);
}
}
type Charity {
required property name -> str;
}
}
```
</issue>
<code>
[start of edb/edgeql/utils.py]
1 #
2 # This source file is part of the EdgeDB open source project.
3 #
4 # Copyright 2015-present MagicStack Inc. and the EdgeDB authors.
5 #
6 # Licensed under the Apache License, Version 2.0 (the "License");
7 # you may not use this file except in compliance with the License.
8 # You may obtain a copy of the License at
9 #
10 # http://www.apache.org/licenses/LICENSE-2.0
11 #
12 # Unless required by applicable law or agreed to in writing, software
13 # distributed under the License is distributed on an "AS IS" BASIS,
14 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 # See the License for the specific language governing permissions and
16 # limitations under the License.
17 #
18
19
20 from __future__ import annotations
21
22 import copy
23 import itertools
24 from typing import *
25
26 from edb.common import ast
27 from edb.schema import schema as s_schema
28 from edb.schema import functions as s_func
29
30 from . import ast as qlast
31
32
33 FREE_SHAPE_EXPR = qlast.DetachedExpr(
34 expr=qlast.Path(
35 steps=[qlast.ObjectRef(module='std', name='FreeObject')],
36 ),
37 )
38
39
40 class ParameterInliner(ast.NodeTransformer):
41
42 def __init__(self, args_map: Mapping[str, qlast.Base]) -> None:
43 super().__init__()
44 self.args_map = args_map
45
46 def visit_Path(self, node: qlast.Path) -> qlast.Base:
47 if (len(node.steps) != 1 or
48 not isinstance(node.steps[0], qlast.ObjectRef)):
49 self.visit(node.steps[0])
50 return node
51
52 ref: qlast.ObjectRef = node.steps[0]
53 try:
54 arg = self.args_map[ref.name]
55 except KeyError:
56 return node
57
58 arg = copy.deepcopy(arg)
59 return arg
60
61
62 def inline_parameters(
63 ql_expr: qlast.Base,
64 args: Mapping[str, qlast.Base]
65 ) -> None:
66
67 inliner = ParameterInliner(args)
68 inliner.visit(ql_expr)
69
70
71 def index_parameters(
72 ql_args: List[qlast.Base],
73 *,
74 parameters: s_func.ParameterLikeList,
75 schema: s_schema.Schema
76 ) -> Dict[str, qlast.Base]:
77
78 result: Dict[str, qlast.Base] = {}
79 varargs: Optional[List[qlast.Expr]] = None
80 variadic = parameters.find_variadic(schema)
81 variadic_num = variadic.get_num(schema) if variadic else -1 # type: ignore
82
83 e: qlast.Expr
84 p: s_func.ParameterLike
85 for iter in itertools.zip_longest(
86 enumerate(ql_args), parameters.objects(schema), fillvalue=None
87 ):
88 (i, e), p = iter # type: ignore
89 if isinstance(e, qlast.SelectQuery):
90 e = e.result
91
92 if variadic and variadic_num == i:
93 assert varargs is None
94 varargs = []
95 result[p.get_parameter_name(schema)] = qlast.Array(
96 elements=varargs
97 )
98
99 if varargs is not None:
100 varargs.append(e)
101 else:
102 result[p.get_parameter_name(schema)] = e
103
104 return result
105
106
107 class AnchorInliner(ast.NodeTransformer):
108
109 def __init__(self, anchors: Mapping[str, qlast.Base]) -> None:
110 super().__init__()
111 self.anchors = anchors
112
113 def visit_Path(self, node: qlast.Path) -> qlast.Path:
114 if not node.steps:
115 return node
116
117 step0 = node.steps[0]
118
119 if isinstance(step0, qlast.Anchor):
120 node.steps[0] = self.anchors[step0.name] # type: ignore
121 elif isinstance(step0, qlast.ObjectRef) and step0.name in self.anchors:
122 node.steps[0] = self.anchors[step0.name] # type: ignore
123
124 return node
125
126
127 def inline_anchors(
128 ql_expr: qlast.Base,
129 anchors: Mapping[Any, qlast.Base]
130 ) -> None:
131
132 inliner = AnchorInliner(anchors)
133 inliner.visit(ql_expr)
134
135
136 def find_paths(ql: qlast.Base) -> List[qlast.Path]:
137 return ast.find_children(ql, qlast.Path)
138
139
140 def find_subject_ptrs(ast: qlast.Base) -> Set[str]:
141 ptrs = set()
142 for path in find_paths(ast):
143 if path.partial:
144 p = path.steps[0]
145 elif isinstance(path.steps[0], qlast.Subject) and len(path.steps) > 1:
146 p = path.steps[1]
147 else:
148 continue
149
150 if isinstance(p, qlast.Ptr):
151 ptrs.add(p.ptr.name)
152 return ptrs
153
154
155 def subject_paths_substitute(
156 ast: qlast.Base_T,
157 subject_ptrs: Dict[str, qlast.Expr],
158 ) -> qlast.Base_T:
159 ast = copy.deepcopy(ast)
160 for path in find_paths(ast):
161 if path.partial and isinstance(path.steps[0], qlast.Ptr):
162 path.steps[0] = subject_paths_substitute(
163 subject_ptrs[path.steps[0].ptr.name],
164 subject_ptrs,
165 )
166 elif (
167 isinstance(path.steps[0], qlast.Subject)
168 and len(path.steps)
169 and isinstance(path.steps[1], qlast.Ptr)
170 ):
171 path.steps[0:2] = [subject_paths_substitute(
172 subject_ptrs[path.steps[1].ptr.name],
173 subject_ptrs,
174 )]
175 return ast
176
177
178 def subject_substitute(
179 ast: qlast.Base_T, new_subject: qlast.Expr) -> qlast.Base_T:
180 ast = copy.deepcopy(ast)
181 for path in find_paths(ast):
182 if isinstance(path.steps[0], qlast.Subject):
183 path.steps[0] = new_subject
184 return ast
185
186
187 def contains_dml(ql_expr: qlast.Base) -> bool:
188 """Check whether a expression contains any DML in a subtree."""
189 # If this ends up being a perf problem, we can use a visitor
190 # directly and cache.
191 dml_types = (qlast.InsertQuery, qlast.UpdateQuery, qlast.DeleteQuery)
192 if isinstance(ql_expr, dml_types):
193 return True
194
195 res = ast.find_children(ql_expr, qlast.Query,
196 lambda x: isinstance(x, dml_types),
197 terminate_early=True)
198
199 return bool(res)
200
[end of edb/edgeql/utils.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/edb/edgeql/utils.py b/edb/edgeql/utils.py
--- a/edb/edgeql/utils.py
+++ b/edb/edgeql/utils.py
@@ -23,6 +23,7 @@
import itertools
from typing import *
+from edb import errors
from edb.common import ast
from edb.schema import schema as s_schema
from edb.schema import functions as s_func
@@ -80,10 +81,21 @@
variadic = parameters.find_variadic(schema)
variadic_num = variadic.get_num(schema) if variadic else -1 # type: ignore
+ params = parameters.objects(schema)
+
+ if not variadic and len(ql_args) > len(params):
+ # In error message we discount the implicit __subject__ param.
+ raise errors.SchemaDefinitionError(
+ f'Expected {len(params) - 1} arguments, but found '
+ f'{len(ql_args) - 1}',
+ context=ql_args[-1].context,
+ details='Did you mean to use ON (...) for specifying the subject?',
+ )
+
e: qlast.Expr
p: s_func.ParameterLike
for iter in itertools.zip_longest(
- enumerate(ql_args), parameters.objects(schema), fillvalue=None
+ enumerate(ql_args), params, fillvalue=None
):
(i, e), p = iter # type: ignore
if isinstance(e, qlast.SelectQuery):
| {"golden_diff": "diff --git a/edb/edgeql/utils.py b/edb/edgeql/utils.py\n--- a/edb/edgeql/utils.py\n+++ b/edb/edgeql/utils.py\n@@ -23,6 +23,7 @@\n import itertools\n from typing import *\n \n+from edb import errors\n from edb.common import ast\n from edb.schema import schema as s_schema\n from edb.schema import functions as s_func\n@@ -80,10 +81,21 @@\n variadic = parameters.find_variadic(schema)\n variadic_num = variadic.get_num(schema) if variadic else -1 # type: ignore\n \n+ params = parameters.objects(schema)\n+\n+ if not variadic and len(ql_args) > len(params):\n+ # In error message we discount the implicit __subject__ param.\n+ raise errors.SchemaDefinitionError(\n+ f'Expected {len(params) - 1} arguments, but found '\n+ f'{len(ql_args) - 1}',\n+ context=ql_args[-1].context,\n+ details='Did you mean to use ON (...) for specifying the subject?',\n+ )\n+\n e: qlast.Expr\n p: s_func.ParameterLike\n for iter in itertools.zip_longest(\n- enumerate(ql_args), parameters.objects(schema), fillvalue=None\n+ enumerate(ql_args), params, fillvalue=None\n ):\n (i, e), p = iter # type: ignore\n if isinstance(e, qlast.SelectQuery):\n", "issue": "InternalServerError on migration with new constraint\nI figured this constraint probably wouldn't work but wanted to try it anyway. It gives me an ISE.\r\n<!-- Please search existing issues to avoid creating duplicates. -->\r\n\r\n<!--\r\nFor the EdgeDB Version: run `edgedb query 'select sys::get_version_as_str()'` from your project directory (or run `select sys::get_version_as_str();` in the EdgeDB interactive shell).\r\nFor the EdgeDB CLI Version: Run `edgedb --version` from anywhere\r\n-->\r\n\r\n- EdgeDB Version: 2.13+ad0eb0f\r\n- EdgeDB CLI Version: 3.0.0-dev.946+93cec91\r\n- OS Version: macOS 13.2.1\r\n\r\nSteps to Reproduce:\r\n\r\n1. Create a project with `edgedb project init`\r\n2. Add the problematic schema to `dbschema/default.esdl`\r\n3. Attempt to create migration with `edgedb migration create`\r\n\r\n<!-- If the issue is about a query error, please also provide your schema -->\r\n\r\nSchema:\r\n\r\n```\r\nmodule default {\r\n type Cause {\r\n required property name -> str;\r\n required multi link charities -> Charity {\r\n property weight -> float64;\r\n constraint expression (sum(.charities@weight) <= 10);\r\n }\r\n }\r\n type Charity {\r\n required property name -> str;\r\n }\r\n}\r\n```\n", "before_files": [{"content": "#\n# This source file is part of the EdgeDB open source project.\n#\n# Copyright 2015-present MagicStack Inc. and the EdgeDB authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\nfrom __future__ import annotations\n\nimport copy\nimport itertools\nfrom typing import *\n\nfrom edb.common import ast\nfrom edb.schema import schema as s_schema\nfrom edb.schema import functions as s_func\n\nfrom . import ast as qlast\n\n\nFREE_SHAPE_EXPR = qlast.DetachedExpr(\n expr=qlast.Path(\n steps=[qlast.ObjectRef(module='std', name='FreeObject')],\n ),\n)\n\n\nclass ParameterInliner(ast.NodeTransformer):\n\n def __init__(self, args_map: Mapping[str, qlast.Base]) -> None:\n super().__init__()\n self.args_map = args_map\n\n def visit_Path(self, node: qlast.Path) -> qlast.Base:\n if (len(node.steps) != 1 or\n not isinstance(node.steps[0], qlast.ObjectRef)):\n self.visit(node.steps[0])\n return node\n\n ref: qlast.ObjectRef = node.steps[0]\n try:\n arg = self.args_map[ref.name]\n except KeyError:\n return node\n\n arg = copy.deepcopy(arg)\n return arg\n\n\ndef inline_parameters(\n ql_expr: qlast.Base,\n args: Mapping[str, qlast.Base]\n) -> None:\n\n inliner = ParameterInliner(args)\n inliner.visit(ql_expr)\n\n\ndef index_parameters(\n ql_args: List[qlast.Base],\n *,\n parameters: s_func.ParameterLikeList,\n schema: s_schema.Schema\n) -> Dict[str, qlast.Base]:\n\n result: Dict[str, qlast.Base] = {}\n varargs: Optional[List[qlast.Expr]] = None\n variadic = parameters.find_variadic(schema)\n variadic_num = variadic.get_num(schema) if variadic else -1 # type: ignore\n\n e: qlast.Expr\n p: s_func.ParameterLike\n for iter in itertools.zip_longest(\n enumerate(ql_args), parameters.objects(schema), fillvalue=None\n ):\n (i, e), p = iter # type: ignore\n if isinstance(e, qlast.SelectQuery):\n e = e.result\n\n if variadic and variadic_num == i:\n assert varargs is None\n varargs = []\n result[p.get_parameter_name(schema)] = qlast.Array(\n elements=varargs\n )\n\n if varargs is not None:\n varargs.append(e)\n else:\n result[p.get_parameter_name(schema)] = e\n\n return result\n\n\nclass AnchorInliner(ast.NodeTransformer):\n\n def __init__(self, anchors: Mapping[str, qlast.Base]) -> None:\n super().__init__()\n self.anchors = anchors\n\n def visit_Path(self, node: qlast.Path) -> qlast.Path:\n if not node.steps:\n return node\n\n step0 = node.steps[0]\n\n if isinstance(step0, qlast.Anchor):\n node.steps[0] = self.anchors[step0.name] # type: ignore\n elif isinstance(step0, qlast.ObjectRef) and step0.name in self.anchors:\n node.steps[0] = self.anchors[step0.name] # type: ignore\n\n return node\n\n\ndef inline_anchors(\n ql_expr: qlast.Base,\n anchors: Mapping[Any, qlast.Base]\n) -> None:\n\n inliner = AnchorInliner(anchors)\n inliner.visit(ql_expr)\n\n\ndef find_paths(ql: qlast.Base) -> List[qlast.Path]:\n return ast.find_children(ql, qlast.Path)\n\n\ndef find_subject_ptrs(ast: qlast.Base) -> Set[str]:\n ptrs = set()\n for path in find_paths(ast):\n if path.partial:\n p = path.steps[0]\n elif isinstance(path.steps[0], qlast.Subject) and len(path.steps) > 1:\n p = path.steps[1]\n else:\n continue\n\n if isinstance(p, qlast.Ptr):\n ptrs.add(p.ptr.name)\n return ptrs\n\n\ndef subject_paths_substitute(\n ast: qlast.Base_T,\n subject_ptrs: Dict[str, qlast.Expr],\n) -> qlast.Base_T:\n ast = copy.deepcopy(ast)\n for path in find_paths(ast):\n if path.partial and isinstance(path.steps[0], qlast.Ptr):\n path.steps[0] = subject_paths_substitute(\n subject_ptrs[path.steps[0].ptr.name],\n subject_ptrs,\n )\n elif (\n isinstance(path.steps[0], qlast.Subject)\n and len(path.steps)\n and isinstance(path.steps[1], qlast.Ptr)\n ):\n path.steps[0:2] = [subject_paths_substitute(\n subject_ptrs[path.steps[1].ptr.name],\n subject_ptrs,\n )]\n return ast\n\n\ndef subject_substitute(\n ast: qlast.Base_T, new_subject: qlast.Expr) -> qlast.Base_T:\n ast = copy.deepcopy(ast)\n for path in find_paths(ast):\n if isinstance(path.steps[0], qlast.Subject):\n path.steps[0] = new_subject\n return ast\n\n\ndef contains_dml(ql_expr: qlast.Base) -> bool:\n \"\"\"Check whether a expression contains any DML in a subtree.\"\"\"\n # If this ends up being a perf problem, we can use a visitor\n # directly and cache.\n dml_types = (qlast.InsertQuery, qlast.UpdateQuery, qlast.DeleteQuery)\n if isinstance(ql_expr, dml_types):\n return True\n\n res = ast.find_children(ql_expr, qlast.Query,\n lambda x: isinstance(x, dml_types),\n terminate_early=True)\n\n return bool(res)\n", "path": "edb/edgeql/utils.py"}]} | 2,723 | 331 |
gh_patches_debug_20288 | rasdani/github-patches | git_diff | lk-geimfari__mimesis-926 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add UUID objects support for uuid()
- [x] Add parameter `as_object`
</issue>
<code>
[start of mimesis/providers/cryptographic.py]
1 # -*- coding: utf-8 -*-
2
3 """Cryptographic data provider."""
4
5 import hashlib
6 import secrets
7 from typing import Optional, Union
8 from uuid import UUID, uuid4
9
10 from mimesis.enums import Algorithm
11 from mimesis.providers.base import BaseProvider
12 from mimesis.providers.text import Text
13
14 __all__ = ['Cryptographic']
15
16
17 class Cryptographic(BaseProvider):
18 """Class that provides cryptographic data."""
19
20 def __init__(self, *args, **kwargs) -> None:
21 """Initialize attributes.
22
23 :param seed: Seed.
24 """
25 super().__init__(*args, **kwargs)
26 self.__words = Text('en')._data.get('words', {})
27
28 class Meta:
29 """Class for metadata."""
30
31 name = 'cryptographic'
32
33 @staticmethod
34 def uuid(as_object: bool = False) -> Union[UUID, str]:
35 """Generate random UUID4.
36
37 This method returns string by default,
38 but you can make it return uuid.UUID object using
39 parameter **as_object**
40
41 :param as_object: Returns uuid.UUID.
42 :return: UUID.
43 """
44 _uuid = uuid4()
45
46 if not as_object:
47 return str(_uuid)
48
49 return _uuid
50
51 def hash(self, algorithm: Algorithm = None) -> str: # noqa: A003
52 """Generate random hash.
53
54 To change hashing algorithm, pass parameter ``algorithm``
55 with needed value of the enum object :class:`~mimesis.enums.Algorithm`
56
57 :param algorithm: Enum object :class:`~mimesis.enums.Algorithm`.
58 :return: Hash.
59 :raises NonEnumerableError: When algorithm is unsupported.
60 """
61 key = self._validate_enum(algorithm, Algorithm)
62
63 if hasattr(hashlib, key):
64 fn = getattr(hashlib, key)
65 return fn(self.uuid().encode()).hexdigest() # type: ignore
66
67 @staticmethod
68 def token_bytes(entropy: int = 32) -> bytes:
69 """Generate byte string containing ``entropy`` bytes.
70
71 The string has ``entropy`` random bytes, each byte
72 converted to two hex digits.
73
74 .. warning:: Seed is not applicable to this method,
75 because of its cryptographic-safe nature.
76
77 :param entropy: Number of bytes (default: 32).
78 :return: Random bytes.
79 """
80 return secrets.token_bytes(entropy)
81
82 @staticmethod
83 def token_hex(entropy: int = 32) -> str:
84 """Return a random text string, in hexadecimal.
85
86 The string has *entropy* random bytes, each byte converted to two
87 hex digits. If *entropy* is ``None`` or not supplied, a reasonable
88 default is used.
89
90 .. warning:: Seed is not applicable to this method,
91 because of its cryptographic-safe nature.
92
93 :param entropy: Number of bytes (default: 32).
94 :return: Token.
95 """
96 return secrets.token_hex(entropy)
97
98 @staticmethod
99 def token_urlsafe(entropy: int = 32):
100 """Return a random URL-safe text string, in Base64 encoding.
101
102 The string has *entropy* random bytes. If *entropy* is ``None``
103 or not supplied, a reasonable default is used.
104
105 .. warning:: Seed is not applicable to this method,
106 because of its cryptographic-safe nature.
107
108 :param entropy: Number of bytes (default: 32).
109 :return: URL-safe token.
110 """
111 return secrets.token_urlsafe(entropy)
112
113 def mnemonic_phrase(self, length: int = 12,
114 separator: Optional[str] = None) -> str:
115 """Generate pseudo mnemonic phrase.
116
117 Please, keep in mind that this method generates
118 crypto-insecure values.
119
120 :param separator: Separator of phrases (Default is " ").
121 :param length: Number of words.
122 :return: Mnemonic phrase.
123 """
124 if not separator:
125 separator = ' '
126
127 words = self.__words['normal']
128 words_generator = (self.random.choice(words) for _ in range(length))
129 return '{}'.format(separator).join(words_generator)
130
[end of mimesis/providers/cryptographic.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mimesis/providers/cryptographic.py b/mimesis/providers/cryptographic.py
--- a/mimesis/providers/cryptographic.py
+++ b/mimesis/providers/cryptographic.py
@@ -38,6 +38,9 @@
but you can make it return uuid.UUID object using
parameter **as_object**
+ .. warning:: Seed is not applicable to this method,
+ because of its cryptographic-safe nature.
+
:param as_object: Returns uuid.UUID.
:return: UUID.
"""
@@ -54,6 +57,9 @@
To change hashing algorithm, pass parameter ``algorithm``
with needed value of the enum object :class:`~mimesis.enums.Algorithm`
+ .. warning:: Seed is not applicable to this method,
+ because of its cryptographic-safe nature.
+
:param algorithm: Enum object :class:`~mimesis.enums.Algorithm`.
:return: Hash.
:raises NonEnumerableError: When algorithm is unsupported.
| {"golden_diff": "diff --git a/mimesis/providers/cryptographic.py b/mimesis/providers/cryptographic.py\n--- a/mimesis/providers/cryptographic.py\n+++ b/mimesis/providers/cryptographic.py\n@@ -38,6 +38,9 @@\n but you can make it return uuid.UUID object using\n parameter **as_object**\n \n+ .. warning:: Seed is not applicable to this method,\n+ because of its cryptographic-safe nature.\n+\n :param as_object: Returns uuid.UUID.\n :return: UUID.\n \"\"\"\n@@ -54,6 +57,9 @@\n To change hashing algorithm, pass parameter ``algorithm``\n with needed value of the enum object :class:`~mimesis.enums.Algorithm`\n \n+ .. warning:: Seed is not applicable to this method,\n+ because of its cryptographic-safe nature.\n+\n :param algorithm: Enum object :class:`~mimesis.enums.Algorithm`.\n :return: Hash.\n :raises NonEnumerableError: When algorithm is unsupported.\n", "issue": "Add UUID objects support for uuid()\n- [x] Add parameter `as_object` \n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n\"\"\"Cryptographic data provider.\"\"\"\n\nimport hashlib\nimport secrets\nfrom typing import Optional, Union\nfrom uuid import UUID, uuid4\n\nfrom mimesis.enums import Algorithm\nfrom mimesis.providers.base import BaseProvider\nfrom mimesis.providers.text import Text\n\n__all__ = ['Cryptographic']\n\n\nclass Cryptographic(BaseProvider):\n \"\"\"Class that provides cryptographic data.\"\"\"\n\n def __init__(self, *args, **kwargs) -> None:\n \"\"\"Initialize attributes.\n\n :param seed: Seed.\n \"\"\"\n super().__init__(*args, **kwargs)\n self.__words = Text('en')._data.get('words', {})\n\n class Meta:\n \"\"\"Class for metadata.\"\"\"\n\n name = 'cryptographic'\n\n @staticmethod\n def uuid(as_object: bool = False) -> Union[UUID, str]:\n \"\"\"Generate random UUID4.\n\n This method returns string by default,\n but you can make it return uuid.UUID object using\n parameter **as_object**\n\n :param as_object: Returns uuid.UUID.\n :return: UUID.\n \"\"\"\n _uuid = uuid4()\n\n if not as_object:\n return str(_uuid)\n\n return _uuid\n\n def hash(self, algorithm: Algorithm = None) -> str: # noqa: A003\n \"\"\"Generate random hash.\n\n To change hashing algorithm, pass parameter ``algorithm``\n with needed value of the enum object :class:`~mimesis.enums.Algorithm`\n\n :param algorithm: Enum object :class:`~mimesis.enums.Algorithm`.\n :return: Hash.\n :raises NonEnumerableError: When algorithm is unsupported.\n \"\"\"\n key = self._validate_enum(algorithm, Algorithm)\n\n if hasattr(hashlib, key):\n fn = getattr(hashlib, key)\n return fn(self.uuid().encode()).hexdigest() # type: ignore\n\n @staticmethod\n def token_bytes(entropy: int = 32) -> bytes:\n \"\"\"Generate byte string containing ``entropy`` bytes.\n\n The string has ``entropy`` random bytes, each byte\n converted to two hex digits.\n\n .. warning:: Seed is not applicable to this method,\n because of its cryptographic-safe nature.\n\n :param entropy: Number of bytes (default: 32).\n :return: Random bytes.\n \"\"\"\n return secrets.token_bytes(entropy)\n\n @staticmethod\n def token_hex(entropy: int = 32) -> str:\n \"\"\"Return a random text string, in hexadecimal.\n\n The string has *entropy* random bytes, each byte converted to two\n hex digits. If *entropy* is ``None`` or not supplied, a reasonable\n default is used.\n\n .. warning:: Seed is not applicable to this method,\n because of its cryptographic-safe nature.\n\n :param entropy: Number of bytes (default: 32).\n :return: Token.\n \"\"\"\n return secrets.token_hex(entropy)\n\n @staticmethod\n def token_urlsafe(entropy: int = 32):\n \"\"\"Return a random URL-safe text string, in Base64 encoding.\n\n The string has *entropy* random bytes. If *entropy* is ``None``\n or not supplied, a reasonable default is used.\n\n .. warning:: Seed is not applicable to this method,\n because of its cryptographic-safe nature.\n\n :param entropy: Number of bytes (default: 32).\n :return: URL-safe token.\n \"\"\"\n return secrets.token_urlsafe(entropy)\n\n def mnemonic_phrase(self, length: int = 12,\n separator: Optional[str] = None) -> str:\n \"\"\"Generate pseudo mnemonic phrase.\n\n Please, keep in mind that this method generates\n crypto-insecure values.\n\n :param separator: Separator of phrases (Default is \" \").\n :param length: Number of words.\n :return: Mnemonic phrase.\n \"\"\"\n if not separator:\n separator = ' '\n\n words = self.__words['normal']\n words_generator = (self.random.choice(words) for _ in range(length))\n return '{}'.format(separator).join(words_generator)\n", "path": "mimesis/providers/cryptographic.py"}]} | 1,742 | 216 |
gh_patches_debug_21786 | rasdani/github-patches | git_diff | pre-commit__pre-commit-310 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Non-ascii prints in error handler without tty cause stacktrace
```
23:00:13 style runtests: commands[0] | pre-commit run --all-files
23:00:13 [INFO] Installing environment for [email protected]:mirrors/pre-commit/mirrors-jshint.
23:00:13 [INFO] Once installed this environment will be reused.
23:00:13 [INFO] This may take a few minutes...
23:01:33 Traceback (most recent call last):
23:01:33 File ".tox/style/bin/pre-commit", line 11, in <module>
23:01:33 sys.exit(main())
23:01:33 File ".../.tox/style/local/lib/python2.7/site-packages/pre_commit/main.py", line 157, in main
23:01:33 'Command {0} failed to exit with a returncode'.format(args.command)
23:01:33 File "/usr/lib64/python2.7/contextlib.py", line 35, in __exit__
23:01:33 self.gen.throw(type, value, traceback)
23:01:33 File ".../.tox/style/local/lib/python2.7/site-packages/pre_commit/error_handler.py", line 41, in error_handler
23:01:33 traceback.format_exc(),
23:01:33 File ".../.tox/style/local/lib/python2.7/site-packages/pre_commit/error_handler.py", line 21, in _log_and_exit
23:01:33 print_fn(error_msg)
23:01:33 UnicodeEncodeError: 'ascii' codec can't encode characters in position 735-737: ordinal not in range(128)
```
</issue>
<code>
[start of pre_commit/error_handler.py]
1 from __future__ import absolute_import
2 from __future__ import print_function
3 from __future__ import unicode_literals
4
5 import contextlib
6 import io
7 import os.path
8 import traceback
9
10 from pre_commit.errors import FatalError
11 from pre_commit.store import Store
12
13
14 # For testing purposes
15 class PreCommitSystemExit(SystemExit):
16 pass
17
18
19 def _log_and_exit(msg, exc, formatted, print_fn=print):
20 error_msg = '{0}: {1}: {2}'.format(msg, type(exc).__name__, exc)
21 print_fn(error_msg)
22 print_fn('Check the log at ~/.pre-commit/pre-commit.log')
23 store = Store()
24 store.require_created()
25 with io.open(os.path.join(store.directory, 'pre-commit.log'), 'w') as log:
26 log.write(error_msg + '\n')
27 log.write(formatted + '\n')
28 raise PreCommitSystemExit(1)
29
30
31 @contextlib.contextmanager
32 def error_handler():
33 try:
34 yield
35 except FatalError as e:
36 _log_and_exit('An error has occurred', e, traceback.format_exc())
37 except Exception as e:
38 _log_and_exit(
39 'An unexpected error has occurred',
40 e,
41 traceback.format_exc(),
42 )
43
[end of pre_commit/error_handler.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pre_commit/error_handler.py b/pre_commit/error_handler.py
--- a/pre_commit/error_handler.py
+++ b/pre_commit/error_handler.py
@@ -7,7 +7,9 @@
import os.path
import traceback
+from pre_commit import five
from pre_commit.errors import FatalError
+from pre_commit.output import sys_stdout_write_wrapper
from pre_commit.store import Store
@@ -16,15 +18,15 @@
pass
-def _log_and_exit(msg, exc, formatted, print_fn=print):
- error_msg = '{0}: {1}: {2}'.format(msg, type(exc).__name__, exc)
- print_fn(error_msg)
- print_fn('Check the log at ~/.pre-commit/pre-commit.log')
+def _log_and_exit(msg, exc, formatted, write_fn=sys_stdout_write_wrapper):
+ error_msg = '{0}: {1}: {2}\n'.format(msg, type(exc).__name__, exc)
+ write_fn(error_msg)
+ write_fn('Check the log at ~/.pre-commit/pre-commit.log\n')
store = Store()
store.require_created()
- with io.open(os.path.join(store.directory, 'pre-commit.log'), 'w') as log:
- log.write(error_msg + '\n')
- log.write(formatted + '\n')
+ with io.open(os.path.join(store.directory, 'pre-commit.log'), 'wb') as log:
+ log.write(five.to_bytes(error_msg))
+ log.write(five.to_bytes(formatted) + b'\n')
raise PreCommitSystemExit(1)
| {"golden_diff": "diff --git a/pre_commit/error_handler.py b/pre_commit/error_handler.py\n--- a/pre_commit/error_handler.py\n+++ b/pre_commit/error_handler.py\n@@ -7,7 +7,9 @@\n import os.path\n import traceback\n \n+from pre_commit import five\n from pre_commit.errors import FatalError\n+from pre_commit.output import sys_stdout_write_wrapper\n from pre_commit.store import Store\n \n \n@@ -16,15 +18,15 @@\n pass\n \n \n-def _log_and_exit(msg, exc, formatted, print_fn=print):\n- error_msg = '{0}: {1}: {2}'.format(msg, type(exc).__name__, exc)\n- print_fn(error_msg)\n- print_fn('Check the log at ~/.pre-commit/pre-commit.log')\n+def _log_and_exit(msg, exc, formatted, write_fn=sys_stdout_write_wrapper):\n+ error_msg = '{0}: {1}: {2}\\n'.format(msg, type(exc).__name__, exc)\n+ write_fn(error_msg)\n+ write_fn('Check the log at ~/.pre-commit/pre-commit.log\\n')\n store = Store()\n store.require_created()\n- with io.open(os.path.join(store.directory, 'pre-commit.log'), 'w') as log:\n- log.write(error_msg + '\\n')\n- log.write(formatted + '\\n')\n+ with io.open(os.path.join(store.directory, 'pre-commit.log'), 'wb') as log:\n+ log.write(five.to_bytes(error_msg))\n+ log.write(five.to_bytes(formatted) + b'\\n')\n raise PreCommitSystemExit(1)\n", "issue": "Non-ascii prints in error handler without tty cause stacktrace\n```\n23:00:13 style runtests: commands[0] | pre-commit run --all-files\n23:00:13 [INFO] Installing environment for [email protected]:mirrors/pre-commit/mirrors-jshint.\n23:00:13 [INFO] Once installed this environment will be reused.\n23:00:13 [INFO] This may take a few minutes...\n23:01:33 Traceback (most recent call last):\n23:01:33 File \".tox/style/bin/pre-commit\", line 11, in <module>\n23:01:33 sys.exit(main())\n23:01:33 File \".../.tox/style/local/lib/python2.7/site-packages/pre_commit/main.py\", line 157, in main\n23:01:33 'Command {0} failed to exit with a returncode'.format(args.command)\n23:01:33 File \"/usr/lib64/python2.7/contextlib.py\", line 35, in __exit__\n23:01:33 self.gen.throw(type, value, traceback)\n23:01:33 File \".../.tox/style/local/lib/python2.7/site-packages/pre_commit/error_handler.py\", line 41, in error_handler\n23:01:33 traceback.format_exc(),\n23:01:33 File \".../.tox/style/local/lib/python2.7/site-packages/pre_commit/error_handler.py\", line 21, in _log_and_exit\n23:01:33 print_fn(error_msg)\n23:01:33 UnicodeEncodeError: 'ascii' codec can't encode characters in position 735-737: ordinal not in range(128)\n```\n\n", "before_files": [{"content": "from __future__ import absolute_import\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport contextlib\nimport io\nimport os.path\nimport traceback\n\nfrom pre_commit.errors import FatalError\nfrom pre_commit.store import Store\n\n\n# For testing purposes\nclass PreCommitSystemExit(SystemExit):\n pass\n\n\ndef _log_and_exit(msg, exc, formatted, print_fn=print):\n error_msg = '{0}: {1}: {2}'.format(msg, type(exc).__name__, exc)\n print_fn(error_msg)\n print_fn('Check the log at ~/.pre-commit/pre-commit.log')\n store = Store()\n store.require_created()\n with io.open(os.path.join(store.directory, 'pre-commit.log'), 'w') as log:\n log.write(error_msg + '\\n')\n log.write(formatted + '\\n')\n raise PreCommitSystemExit(1)\n\n\[email protected]\ndef error_handler():\n try:\n yield\n except FatalError as e:\n _log_and_exit('An error has occurred', e, traceback.format_exc())\n except Exception as e:\n _log_and_exit(\n 'An unexpected error has occurred',\n e,\n traceback.format_exc(),\n )\n", "path": "pre_commit/error_handler.py"}]} | 1,301 | 343 |
gh_patches_debug_7181 | rasdani/github-patches | git_diff | vega__altair-3074 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Include a suggestion to update frontend (Jupyterlab, ...) in mimetype error
Follow-up that comes out of #2585. Raised by @joelostblom:
> do you think we need to communicate the minimum version of JuptyerLab that support Altair 5 somewhere? I am thinking ideally directly in the error message if possible, but otherwise at least in the docs and release notes, what do you all think?
</issue>
<code>
[start of altair/vegalite/v5/display.py]
1 import os
2
3 from ...utils.mimebundle import spec_to_mimebundle
4 from ..display import Displayable
5 from ..display import default_renderer_base
6 from ..display import json_renderer_base
7 from ..display import RendererRegistry
8 from ..display import HTMLRenderer
9
10 from .schema import SCHEMA_VERSION
11
12 VEGALITE_VERSION = SCHEMA_VERSION.lstrip("v")
13 VEGA_VERSION = "5"
14 VEGAEMBED_VERSION = "6"
15
16
17 # ==============================================================================
18 # VegaLite v5 renderer logic
19 # ==============================================================================
20
21
22 # The MIME type for Vega-Lite 5.x releases.
23 VEGALITE_MIME_TYPE = "application/vnd.vegalite.v5+json" # type: str
24
25 # The entry point group that can be used by other packages to declare other
26 # renderers that will be auto-detected. Explicit registration is also
27 # allowed by the PluginRegistery API.
28 ENTRY_POINT_GROUP = "altair.vegalite.v5.renderer" # type: str
29
30 # The display message when rendering fails
31 DEFAULT_DISPLAY = """\
32 <VegaLite 5 object>
33
34 If you see this message, it means the renderer has not been properly enabled
35 for the frontend that you are using. For more information, see
36 https://altair-viz.github.io/user_guide/display_frontends.html#troubleshooting
37 """
38
39 renderers = RendererRegistry(entry_point_group=ENTRY_POINT_GROUP)
40
41 here = os.path.dirname(os.path.realpath(__file__))
42
43
44 def mimetype_renderer(spec, **metadata):
45 return default_renderer_base(spec, VEGALITE_MIME_TYPE, DEFAULT_DISPLAY, **metadata)
46
47
48 def json_renderer(spec, **metadata):
49 return json_renderer_base(spec, DEFAULT_DISPLAY, **metadata)
50
51
52 def png_renderer(spec, **metadata):
53 return spec_to_mimebundle(
54 spec,
55 format="png",
56 mode="vega-lite",
57 vega_version=VEGA_VERSION,
58 vegaembed_version=VEGAEMBED_VERSION,
59 vegalite_version=VEGALITE_VERSION,
60 **metadata,
61 )
62
63
64 def svg_renderer(spec, **metadata):
65 return spec_to_mimebundle(
66 spec,
67 format="svg",
68 mode="vega-lite",
69 vega_version=VEGA_VERSION,
70 vegaembed_version=VEGAEMBED_VERSION,
71 vegalite_version=VEGALITE_VERSION,
72 **metadata,
73 )
74
75
76 html_renderer = HTMLRenderer(
77 mode="vega-lite",
78 template="universal",
79 vega_version=VEGA_VERSION,
80 vegaembed_version=VEGAEMBED_VERSION,
81 vegalite_version=VEGALITE_VERSION,
82 )
83
84 renderers.register("default", html_renderer)
85 renderers.register("html", html_renderer)
86 renderers.register("colab", html_renderer)
87 renderers.register("kaggle", html_renderer)
88 renderers.register("zeppelin", html_renderer)
89 renderers.register("mimetype", mimetype_renderer)
90 renderers.register("jupyterlab", mimetype_renderer)
91 renderers.register("nteract", mimetype_renderer)
92 renderers.register("json", json_renderer)
93 renderers.register("png", png_renderer)
94 renderers.register("svg", svg_renderer)
95 renderers.enable("default")
96
97
98 class VegaLite(Displayable):
99 """An IPython/Jupyter display class for rendering VegaLite 5."""
100
101 renderers = renderers
102 schema_path = (__name__, "schema/vega-lite-schema.json")
103
104
105 def vegalite(spec, validate=True):
106 """Render and optionally validate a VegaLite 5 spec.
107
108 This will use the currently enabled renderer to render the spec.
109
110 Parameters
111 ==========
112 spec: dict
113 A fully compliant VegaLite 5 spec, with the data portion fully processed.
114 validate: bool
115 Should the spec be validated against the VegaLite 5 schema?
116 """
117 from IPython.display import display
118
119 display(VegaLite(spec, validate=validate))
120
[end of altair/vegalite/v5/display.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/altair/vegalite/v5/display.py b/altair/vegalite/v5/display.py
--- a/altair/vegalite/v5/display.py
+++ b/altair/vegalite/v5/display.py
@@ -28,8 +28,8 @@
ENTRY_POINT_GROUP = "altair.vegalite.v5.renderer" # type: str
# The display message when rendering fails
-DEFAULT_DISPLAY = """\
-<VegaLite 5 object>
+DEFAULT_DISPLAY = f"""\
+<VegaLite {VEGALITE_VERSION.split('.')[0]} object>
If you see this message, it means the renderer has not been properly enabled
for the frontend that you are using. For more information, see
| {"golden_diff": "diff --git a/altair/vegalite/v5/display.py b/altair/vegalite/v5/display.py\n--- a/altair/vegalite/v5/display.py\n+++ b/altair/vegalite/v5/display.py\n@@ -28,8 +28,8 @@\n ENTRY_POINT_GROUP = \"altair.vegalite.v5.renderer\" # type: str\n \n # The display message when rendering fails\n-DEFAULT_DISPLAY = \"\"\"\\\n-<VegaLite 5 object>\n+DEFAULT_DISPLAY = f\"\"\"\\\n+<VegaLite {VEGALITE_VERSION.split('.')[0]} object>\n \n If you see this message, it means the renderer has not been properly enabled\n for the frontend that you are using. For more information, see\n", "issue": "Include a suggestion to update frontend (Jupyterlab, ...) in mimetype error\nFollow-up that comes out of #2585. Raised by @joelostblom:\r\n\r\n> do you think we need to communicate the minimum version of JuptyerLab that support Altair 5 somewhere? I am thinking ideally directly in the error message if possible, but otherwise at least in the docs and release notes, what do you all think?\n", "before_files": [{"content": "import os\n\nfrom ...utils.mimebundle import spec_to_mimebundle\nfrom ..display import Displayable\nfrom ..display import default_renderer_base\nfrom ..display import json_renderer_base\nfrom ..display import RendererRegistry\nfrom ..display import HTMLRenderer\n\nfrom .schema import SCHEMA_VERSION\n\nVEGALITE_VERSION = SCHEMA_VERSION.lstrip(\"v\")\nVEGA_VERSION = \"5\"\nVEGAEMBED_VERSION = \"6\"\n\n\n# ==============================================================================\n# VegaLite v5 renderer logic\n# ==============================================================================\n\n\n# The MIME type for Vega-Lite 5.x releases.\nVEGALITE_MIME_TYPE = \"application/vnd.vegalite.v5+json\" # type: str\n\n# The entry point group that can be used by other packages to declare other\n# renderers that will be auto-detected. Explicit registration is also\n# allowed by the PluginRegistery API.\nENTRY_POINT_GROUP = \"altair.vegalite.v5.renderer\" # type: str\n\n# The display message when rendering fails\nDEFAULT_DISPLAY = \"\"\"\\\n<VegaLite 5 object>\n\nIf you see this message, it means the renderer has not been properly enabled\nfor the frontend that you are using. For more information, see\nhttps://altair-viz.github.io/user_guide/display_frontends.html#troubleshooting\n\"\"\"\n\nrenderers = RendererRegistry(entry_point_group=ENTRY_POINT_GROUP)\n\nhere = os.path.dirname(os.path.realpath(__file__))\n\n\ndef mimetype_renderer(spec, **metadata):\n return default_renderer_base(spec, VEGALITE_MIME_TYPE, DEFAULT_DISPLAY, **metadata)\n\n\ndef json_renderer(spec, **metadata):\n return json_renderer_base(spec, DEFAULT_DISPLAY, **metadata)\n\n\ndef png_renderer(spec, **metadata):\n return spec_to_mimebundle(\n spec,\n format=\"png\",\n mode=\"vega-lite\",\n vega_version=VEGA_VERSION,\n vegaembed_version=VEGAEMBED_VERSION,\n vegalite_version=VEGALITE_VERSION,\n **metadata,\n )\n\n\ndef svg_renderer(spec, **metadata):\n return spec_to_mimebundle(\n spec,\n format=\"svg\",\n mode=\"vega-lite\",\n vega_version=VEGA_VERSION,\n vegaembed_version=VEGAEMBED_VERSION,\n vegalite_version=VEGALITE_VERSION,\n **metadata,\n )\n\n\nhtml_renderer = HTMLRenderer(\n mode=\"vega-lite\",\n template=\"universal\",\n vega_version=VEGA_VERSION,\n vegaembed_version=VEGAEMBED_VERSION,\n vegalite_version=VEGALITE_VERSION,\n)\n\nrenderers.register(\"default\", html_renderer)\nrenderers.register(\"html\", html_renderer)\nrenderers.register(\"colab\", html_renderer)\nrenderers.register(\"kaggle\", html_renderer)\nrenderers.register(\"zeppelin\", html_renderer)\nrenderers.register(\"mimetype\", mimetype_renderer)\nrenderers.register(\"jupyterlab\", mimetype_renderer)\nrenderers.register(\"nteract\", mimetype_renderer)\nrenderers.register(\"json\", json_renderer)\nrenderers.register(\"png\", png_renderer)\nrenderers.register(\"svg\", svg_renderer)\nrenderers.enable(\"default\")\n\n\nclass VegaLite(Displayable):\n \"\"\"An IPython/Jupyter display class for rendering VegaLite 5.\"\"\"\n\n renderers = renderers\n schema_path = (__name__, \"schema/vega-lite-schema.json\")\n\n\ndef vegalite(spec, validate=True):\n \"\"\"Render and optionally validate a VegaLite 5 spec.\n\n This will use the currently enabled renderer to render the spec.\n\n Parameters\n ==========\n spec: dict\n A fully compliant VegaLite 5 spec, with the data portion fully processed.\n validate: bool\n Should the spec be validated against the VegaLite 5 schema?\n \"\"\"\n from IPython.display import display\n\n display(VegaLite(spec, validate=validate))\n", "path": "altair/vegalite/v5/display.py"}]} | 1,719 | 167 |
gh_patches_debug_7111 | rasdani/github-patches | git_diff | pwndbg__pwndbg-1757 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
"'function' object has no attribute '_reset'" in `pwndbg/pwndbg/color/syntax_highlight.py`
<!--
Before reporting a new issue, make sure that we do not have any duplicates already open.
If there is one it might be good to take part in the discussion there.
Please make sure you have checked that the issue persists on LATEST pwndbg version.
Below is a template for BUG REPORTS.
Don't include it if this is a FEATURE REQUEST.
-->
### Description
using latest dev branch, with `set syntax-highlight-style solarized-light` param, above error will be thrown
```bash
$ gdb ~/matrix-matrix-multiply/build/src/dgemm -ex 'start' -ex ''
pwndbg: loaded 141 pwndbg commands and 42 shell commands. Type pwndbg [--shell | --all] [filter] for a list.
pwndbg: created $rebase, $ida GDB functions (can be used with print/break)
Traceback (most recent call last):
File "/home/czg/pwndbg/pwndbg/gdblib/config.py", line 93, in __get_set_string_gdb_gte_9
trigger()
File "/home/czg/pwndbg/pwndbg/color/syntax_highlight.py", line 37, in check_style
get_highlight_source._reset()
^^^^^^^^^^^^^^^^^^^^^^^^^^^
AttributeError: 'function' object has no attribute '_reset'
/home/czg/.gdbinit:19: Error in sourced command file:
Error occurred in Python: 'function' object has no attribute '_reset'
```
when I read `$ git log -p pwndbg/commands/context.py` I found nothing about `_reset`, but the error only exists after I upgrade python from `3.10` to `3.11.3` and pwndbg recently.
<!--
Briefly describe the problem you are having in a few paragraphs.
-->
<!--
### Steps to reproduce
-->
<!--
What do we have to do to reproduce the problem?
If this is connected to particular C/asm code,
please provide the smallest C code that reproduces the issue.
-->
<!--
### My setup
-->
<!--
Show us your gdb/python/pwndbg/OS/IDA Pro version (depending on your case).
NOTE: We are currently supporting only Ubuntu installations.
It is known that pwndbg is not fully working e.g. on Arch Linux (the heap stuff is not working there).
If you would like to change this situation - help us improving pwndbg and supporting other distros!
This can be displayed in pwndbg through `version` command.
If it is somehow unavailable, use:
* `show version` - for gdb
* `py import sys; print(sys.version)` - for python
* pwndbg version/git commit id
-->
</issue>
<code>
[start of pwndbg/color/syntax_highlight.py]
1 import os.path
2 import re
3 from typing import Any
4 from typing import Dict
5
6 import pygments
7 import pygments.formatters
8 import pygments.lexers
9
10 import pwndbg.gdblib.config
11 from pwndbg.color import disable_colors
12 from pwndbg.color import message
13 from pwndbg.color import theme
14 from pwndbg.color.lexer import PwntoolsLexer
15
16 pwndbg.gdblib.config.add_param("syntax-highlight", True, "Source code / assembly syntax highlight")
17 style = theme.add_param(
18 "syntax-highlight-style",
19 "monokai",
20 "Source code / assembly syntax highlight stylename of pygments module",
21 )
22
23 formatter = pygments.formatters.Terminal256Formatter(style=str(style))
24 pwntools_lexer = PwntoolsLexer()
25 lexer_cache: Dict[str, Any] = {}
26
27
28 @pwndbg.gdblib.config.trigger(style)
29 def check_style() -> None:
30 global formatter
31 try:
32 formatter = pygments.formatters.Terminal256Formatter(style=str(style))
33
34 # Reset the highlighted source cache
35 from pwndbg.commands.context import get_highlight_source
36
37 get_highlight_source._reset()
38 except pygments.util.ClassNotFound:
39 print(
40 message.warn(f"The pygment formatter style '{style}' is not found, restore to default")
41 )
42 style.revert_default()
43
44
45 def syntax_highlight(code, filename=".asm"):
46 # No syntax highlight if pygment is not installed
47 if disable_colors:
48 return code
49
50 filename = os.path.basename(filename)
51
52 lexer = lexer_cache.get(filename, None)
53
54 # If source code is asm, use our customized lexer.
55 # Note: We can not register our Lexer to pygments and use their APIs,
56 # since the pygment only search the lexers installed via setuptools.
57 if not lexer:
58 for glob_pat in PwntoolsLexer.filenames:
59 pat = "^" + glob_pat.replace(".", r"\.").replace("*", r".*") + "$"
60 if re.match(pat, filename):
61 lexer = pwntools_lexer
62 break
63
64 if not lexer:
65 try:
66 lexer = pygments.lexers.guess_lexer_for_filename(filename, code, stripnl=False)
67 except pygments.util.ClassNotFound:
68 # no lexer for this file or invalid style
69 pass
70
71 if lexer:
72 lexer_cache[filename] = lexer
73
74 code = pygments.highlight(code, lexer, formatter).rstrip()
75
76 return code
77
[end of pwndbg/color/syntax_highlight.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pwndbg/color/syntax_highlight.py b/pwndbg/color/syntax_highlight.py
--- a/pwndbg/color/syntax_highlight.py
+++ b/pwndbg/color/syntax_highlight.py
@@ -34,7 +34,7 @@
# Reset the highlighted source cache
from pwndbg.commands.context import get_highlight_source
- get_highlight_source._reset()
+ get_highlight_source.cache.clear()
except pygments.util.ClassNotFound:
print(
message.warn(f"The pygment formatter style '{style}' is not found, restore to default")
| {"golden_diff": "diff --git a/pwndbg/color/syntax_highlight.py b/pwndbg/color/syntax_highlight.py\n--- a/pwndbg/color/syntax_highlight.py\n+++ b/pwndbg/color/syntax_highlight.py\n@@ -34,7 +34,7 @@\n # Reset the highlighted source cache\n from pwndbg.commands.context import get_highlight_source\n \n- get_highlight_source._reset()\n+ get_highlight_source.cache.clear()\n except pygments.util.ClassNotFound:\n print(\n message.warn(f\"The pygment formatter style '{style}' is not found, restore to default\")\n", "issue": "\"'function' object has no attribute '_reset'\" in `pwndbg/pwndbg/color/syntax_highlight.py`\n<!--\r\nBefore reporting a new issue, make sure that we do not have any duplicates already open.\r\nIf there is one it might be good to take part in the discussion there.\r\n\r\nPlease make sure you have checked that the issue persists on LATEST pwndbg version.\r\n\r\nBelow is a template for BUG REPORTS.\r\nDon't include it if this is a FEATURE REQUEST.\r\n-->\r\n\r\n\r\n### Description\r\n\r\nusing latest dev branch, with `set syntax-highlight-style solarized-light` param, above error will be thrown\r\n\r\n```bash\r\n$ gdb ~/matrix-matrix-multiply/build/src/dgemm -ex 'start' -ex ''\r\npwndbg: loaded 141 pwndbg commands and 42 shell commands. Type pwndbg [--shell | --all] [filter] for a list.\r\npwndbg: created $rebase, $ida GDB functions (can be used with print/break)\r\nTraceback (most recent call last):\r\n File \"/home/czg/pwndbg/pwndbg/gdblib/config.py\", line 93, in __get_set_string_gdb_gte_9\r\n trigger()\r\n File \"/home/czg/pwndbg/pwndbg/color/syntax_highlight.py\", line 37, in check_style\r\n get_highlight_source._reset()\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\nAttributeError: 'function' object has no attribute '_reset'\r\n/home/czg/.gdbinit:19: Error in sourced command file:\r\nError occurred in Python: 'function' object has no attribute '_reset'\r\n```\r\n\r\nwhen I read `$ git log -p pwndbg/commands/context.py` I found nothing about `_reset`, but the error only exists after I upgrade python from `3.10` to `3.11.3` and pwndbg recently.\r\n\r\n<!--\r\nBriefly describe the problem you are having in a few paragraphs.\r\n-->\r\n\r\n<!--\r\n### Steps to reproduce\r\n-->\r\n\r\n<!--\r\nWhat do we have to do to reproduce the problem?\r\nIf this is connected to particular C/asm code, \r\nplease provide the smallest C code that reproduces the issue.\r\n-->\r\n\r\n<!--\r\n### My setup\r\n-->\r\n\r\n<!--\r\nShow us your gdb/python/pwndbg/OS/IDA Pro version (depending on your case).\r\n\r\nNOTE: We are currently supporting only Ubuntu installations.\r\nIt is known that pwndbg is not fully working e.g. on Arch Linux (the heap stuff is not working there).\r\nIf you would like to change this situation - help us improving pwndbg and supporting other distros!\r\n\r\nThis can be displayed in pwndbg through `version` command.\r\n\r\nIf it is somehow unavailable, use:\r\n* `show version` - for gdb\r\n* `py import sys; print(sys.version)` - for python\r\n* pwndbg version/git commit id\r\n-->\r\n\n", "before_files": [{"content": "import os.path\nimport re\nfrom typing import Any\nfrom typing import Dict\n\nimport pygments\nimport pygments.formatters\nimport pygments.lexers\n\nimport pwndbg.gdblib.config\nfrom pwndbg.color import disable_colors\nfrom pwndbg.color import message\nfrom pwndbg.color import theme\nfrom pwndbg.color.lexer import PwntoolsLexer\n\npwndbg.gdblib.config.add_param(\"syntax-highlight\", True, \"Source code / assembly syntax highlight\")\nstyle = theme.add_param(\n \"syntax-highlight-style\",\n \"monokai\",\n \"Source code / assembly syntax highlight stylename of pygments module\",\n)\n\nformatter = pygments.formatters.Terminal256Formatter(style=str(style))\npwntools_lexer = PwntoolsLexer()\nlexer_cache: Dict[str, Any] = {}\n\n\[email protected](style)\ndef check_style() -> None:\n global formatter\n try:\n formatter = pygments.formatters.Terminal256Formatter(style=str(style))\n\n # Reset the highlighted source cache\n from pwndbg.commands.context import get_highlight_source\n\n get_highlight_source._reset()\n except pygments.util.ClassNotFound:\n print(\n message.warn(f\"The pygment formatter style '{style}' is not found, restore to default\")\n )\n style.revert_default()\n\n\ndef syntax_highlight(code, filename=\".asm\"):\n # No syntax highlight if pygment is not installed\n if disable_colors:\n return code\n\n filename = os.path.basename(filename)\n\n lexer = lexer_cache.get(filename, None)\n\n # If source code is asm, use our customized lexer.\n # Note: We can not register our Lexer to pygments and use their APIs,\n # since the pygment only search the lexers installed via setuptools.\n if not lexer:\n for glob_pat in PwntoolsLexer.filenames:\n pat = \"^\" + glob_pat.replace(\".\", r\"\\.\").replace(\"*\", r\".*\") + \"$\"\n if re.match(pat, filename):\n lexer = pwntools_lexer\n break\n\n if not lexer:\n try:\n lexer = pygments.lexers.guess_lexer_for_filename(filename, code, stripnl=False)\n except pygments.util.ClassNotFound:\n # no lexer for this file or invalid style\n pass\n\n if lexer:\n lexer_cache[filename] = lexer\n\n code = pygments.highlight(code, lexer, formatter).rstrip()\n\n return code\n", "path": "pwndbg/color/syntax_highlight.py"}]} | 1,839 | 123 |
gh_patches_debug_4793 | rasdani/github-patches | git_diff | iterative__dvc-6688 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
http: allow reading proxies from the current environment
This was something enabled as default by the requests, but not on aiohttp. We have to explicitly enable it to keep the current behavior. https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support. Discord context: https://discord.com/channels/485586884165107732/563406153334128681/891230518992052274
</issue>
<code>
[start of dvc/fs/http.py]
1 import threading
2
3 from funcy import cached_property, memoize, wrap_with
4
5 from dvc import prompt
6 from dvc.path_info import HTTPURLInfo
7 from dvc.scheme import Schemes
8
9 from .fsspec_wrapper import FSSpecWrapper, NoDirectoriesMixin
10
11
12 @wrap_with(threading.Lock())
13 @memoize
14 def ask_password(host, user):
15 return prompt.password(
16 "Enter a password for "
17 "host '{host}' user '{user}'".format(host=host, user=user)
18 )
19
20
21 def make_context(ssl_verify):
22 if isinstance(ssl_verify, bool) or ssl_verify is None:
23 return ssl_verify
24
25 # If this is a path, then we will create an
26 # SSL context for it, and load the given certificate.
27 import ssl
28
29 context = ssl.create_default_context()
30 context.load_verify_locations(ssl_verify)
31 return context
32
33
34 # pylint: disable=abstract-method
35 class HTTPFileSystem(NoDirectoriesMixin, FSSpecWrapper):
36 scheme = Schemes.HTTP
37 PATH_CLS = HTTPURLInfo
38 PARAM_CHECKSUM = "checksum"
39 REQUIRES = {"aiohttp": "aiohttp", "aiohttp-retry": "aiohttp_retry"}
40 CAN_TRAVERSE = False
41
42 SESSION_RETRIES = 5
43 SESSION_BACKOFF_FACTOR = 0.1
44 REQUEST_TIMEOUT = 60
45
46 def _prepare_credentials(self, **config):
47 import aiohttp
48 from fsspec.asyn import fsspec_loop
49
50 from dvc.config import ConfigError
51
52 credentials = {}
53 client_kwargs = credentials.setdefault("client_kwargs", {})
54
55 if config.get("auth"):
56 user = config.get("user")
57 password = config.get("password")
58 custom_auth_header = config.get("custom_auth_header")
59
60 if password is None and config.get("ask_password"):
61 password = ask_password(config.get("url"), user or "custom")
62
63 auth_method = config["auth"]
64 if auth_method == "basic":
65 if user is None or password is None:
66 raise ConfigError(
67 "HTTP 'basic' authentication require both "
68 "'user' and 'password'"
69 )
70
71 client_kwargs["auth"] = aiohttp.BasicAuth(user, password)
72 elif auth_method == "custom":
73 if custom_auth_header is None or password is None:
74 raise ConfigError(
75 "HTTP 'custom' authentication require both "
76 "'custom_auth_header' and 'password'"
77 )
78 credentials["headers"] = {custom_auth_header: password}
79 else:
80 raise NotImplementedError(
81 f"Auth method {auth_method!r} is not supported."
82 )
83
84 if "ssl_verify" in config:
85 with fsspec_loop():
86 client_kwargs["connector"] = aiohttp.TCPConnector(
87 ssl=make_context(config["ssl_verify"])
88 )
89
90 credentials["get_client"] = self.get_client
91 self.upload_method = config.get("method", "POST")
92 return credentials
93
94 async def get_client(self, **kwargs):
95 import aiohttp
96 from aiohttp_retry import ExponentialRetry, RetryClient
97
98 kwargs["retry_options"] = ExponentialRetry(
99 attempts=self.SESSION_RETRIES,
100 factor=self.SESSION_BACKOFF_FACTOR,
101 max_timeout=self.REQUEST_TIMEOUT,
102 )
103
104 # The default timeout for the aiohttp is 300 seconds
105 # which is too low for DVC's interactions (especially
106 # on the read) when dealing with large data blobs. We
107 # unlimit the total time to read, and only limit the
108 # time that is spent when connecting to the remote server.
109 kwargs["timeout"] = aiohttp.ClientTimeout(
110 total=None,
111 connect=self.REQUEST_TIMEOUT,
112 sock_connect=self.REQUEST_TIMEOUT,
113 sock_read=None,
114 )
115
116 return RetryClient(**kwargs)
117
118 @cached_property
119 def fs(self):
120 from fsspec.implementations.http import (
121 HTTPFileSystem as _HTTPFileSystem,
122 )
123
124 return _HTTPFileSystem(**self.fs_args)
125
126 def _entry_hook(self, entry):
127 entry["checksum"] = entry.get("ETag") or entry.get("Content-MD5")
128 return entry
129
[end of dvc/fs/http.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/dvc/fs/http.py b/dvc/fs/http.py
--- a/dvc/fs/http.py
+++ b/dvc/fs/http.py
@@ -87,6 +87,9 @@
ssl=make_context(config["ssl_verify"])
)
+ # Allow reading proxy configurations from the environment.
+ client_kwargs["trust_env"] = True
+
credentials["get_client"] = self.get_client
self.upload_method = config.get("method", "POST")
return credentials
| {"golden_diff": "diff --git a/dvc/fs/http.py b/dvc/fs/http.py\n--- a/dvc/fs/http.py\n+++ b/dvc/fs/http.py\n@@ -87,6 +87,9 @@\n ssl=make_context(config[\"ssl_verify\"])\n )\n \n+ # Allow reading proxy configurations from the environment.\n+ client_kwargs[\"trust_env\"] = True\n+\n credentials[\"get_client\"] = self.get_client\n self.upload_method = config.get(\"method\", \"POST\")\n return credentials\n", "issue": "http: allow reading proxies from the current environment\nThis was something enabled as default by the requests, but not on aiohttp. We have to explicitly enable it to keep the current behavior. https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support. Discord context: https://discord.com/channels/485586884165107732/563406153334128681/891230518992052274\n", "before_files": [{"content": "import threading\n\nfrom funcy import cached_property, memoize, wrap_with\n\nfrom dvc import prompt\nfrom dvc.path_info import HTTPURLInfo\nfrom dvc.scheme import Schemes\n\nfrom .fsspec_wrapper import FSSpecWrapper, NoDirectoriesMixin\n\n\n@wrap_with(threading.Lock())\n@memoize\ndef ask_password(host, user):\n return prompt.password(\n \"Enter a password for \"\n \"host '{host}' user '{user}'\".format(host=host, user=user)\n )\n\n\ndef make_context(ssl_verify):\n if isinstance(ssl_verify, bool) or ssl_verify is None:\n return ssl_verify\n\n # If this is a path, then we will create an\n # SSL context for it, and load the given certificate.\n import ssl\n\n context = ssl.create_default_context()\n context.load_verify_locations(ssl_verify)\n return context\n\n\n# pylint: disable=abstract-method\nclass HTTPFileSystem(NoDirectoriesMixin, FSSpecWrapper):\n scheme = Schemes.HTTP\n PATH_CLS = HTTPURLInfo\n PARAM_CHECKSUM = \"checksum\"\n REQUIRES = {\"aiohttp\": \"aiohttp\", \"aiohttp-retry\": \"aiohttp_retry\"}\n CAN_TRAVERSE = False\n\n SESSION_RETRIES = 5\n SESSION_BACKOFF_FACTOR = 0.1\n REQUEST_TIMEOUT = 60\n\n def _prepare_credentials(self, **config):\n import aiohttp\n from fsspec.asyn import fsspec_loop\n\n from dvc.config import ConfigError\n\n credentials = {}\n client_kwargs = credentials.setdefault(\"client_kwargs\", {})\n\n if config.get(\"auth\"):\n user = config.get(\"user\")\n password = config.get(\"password\")\n custom_auth_header = config.get(\"custom_auth_header\")\n\n if password is None and config.get(\"ask_password\"):\n password = ask_password(config.get(\"url\"), user or \"custom\")\n\n auth_method = config[\"auth\"]\n if auth_method == \"basic\":\n if user is None or password is None:\n raise ConfigError(\n \"HTTP 'basic' authentication require both \"\n \"'user' and 'password'\"\n )\n\n client_kwargs[\"auth\"] = aiohttp.BasicAuth(user, password)\n elif auth_method == \"custom\":\n if custom_auth_header is None or password is None:\n raise ConfigError(\n \"HTTP 'custom' authentication require both \"\n \"'custom_auth_header' and 'password'\"\n )\n credentials[\"headers\"] = {custom_auth_header: password}\n else:\n raise NotImplementedError(\n f\"Auth method {auth_method!r} is not supported.\"\n )\n\n if \"ssl_verify\" in config:\n with fsspec_loop():\n client_kwargs[\"connector\"] = aiohttp.TCPConnector(\n ssl=make_context(config[\"ssl_verify\"])\n )\n\n credentials[\"get_client\"] = self.get_client\n self.upload_method = config.get(\"method\", \"POST\")\n return credentials\n\n async def get_client(self, **kwargs):\n import aiohttp\n from aiohttp_retry import ExponentialRetry, RetryClient\n\n kwargs[\"retry_options\"] = ExponentialRetry(\n attempts=self.SESSION_RETRIES,\n factor=self.SESSION_BACKOFF_FACTOR,\n max_timeout=self.REQUEST_TIMEOUT,\n )\n\n # The default timeout for the aiohttp is 300 seconds\n # which is too low for DVC's interactions (especially\n # on the read) when dealing with large data blobs. We\n # unlimit the total time to read, and only limit the\n # time that is spent when connecting to the remote server.\n kwargs[\"timeout\"] = aiohttp.ClientTimeout(\n total=None,\n connect=self.REQUEST_TIMEOUT,\n sock_connect=self.REQUEST_TIMEOUT,\n sock_read=None,\n )\n\n return RetryClient(**kwargs)\n\n @cached_property\n def fs(self):\n from fsspec.implementations.http import (\n HTTPFileSystem as _HTTPFileSystem,\n )\n\n return _HTTPFileSystem(**self.fs_args)\n\n def _entry_hook(self, entry):\n entry[\"checksum\"] = entry.get(\"ETag\") or entry.get(\"Content-MD5\")\n return entry\n", "path": "dvc/fs/http.py"}]} | 1,844 | 108 |
gh_patches_debug_4485 | rasdani/github-patches | git_diff | goauthentik__authentik-8146 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
2023.10.6 - "Please select a username" after Azure AD login
**Describe your question/**
Is it now a expected behavior in 2023.10.6 version to ask every user for username input after logging in with azure ad?

In previous versions it was simply authenticating without any prompt, using email address from Azure AD as username.
Now it expects user to input username (and it leads to duplicated accounts, because users with mail as username already exist), and if you enter already existing mail as username it shows error:

I think it can be related to this fix:
https://github.com/goauthentik/authentik/pull/7970
Is it possible somehow to set this username automatically, or revert back to using email address so old user accounts will work again?
**Version and Deployment (please complete the following information):**
- authentik version: 2023.10.6
- Deployment: helm
</issue>
<code>
[start of authentik/sources/oauth/types/azure_ad.py]
1 """AzureAD OAuth2 Views"""
2 from typing import Any
3
4 from structlog.stdlib import get_logger
5
6 from authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient
7 from authentik.sources.oauth.types.oidc import OpenIDConnectOAuth2Callback
8 from authentik.sources.oauth.types.registry import SourceType, registry
9 from authentik.sources.oauth.views.redirect import OAuthRedirect
10
11 LOGGER = get_logger()
12
13
14 class AzureADOAuthRedirect(OAuthRedirect):
15 """Azure AD OAuth2 Redirect"""
16
17 def get_additional_parameters(self, source): # pragma: no cover
18 return {
19 "scope": ["openid", "https://graph.microsoft.com/User.Read"],
20 }
21
22
23 class AzureADOAuthCallback(OpenIDConnectOAuth2Callback):
24 """AzureAD OAuth2 Callback"""
25
26 client_class = UserprofileHeaderAuthClient
27
28 def get_user_enroll_context(
29 self,
30 info: dict[str, Any],
31 ) -> dict[str, Any]:
32 mail = info.get("mail", None) or info.get("otherMails", [None])[0]
33 return {
34 "username": info.get("userPrincipalName"),
35 "email": mail,
36 "name": info.get("displayName"),
37 }
38
39
40 @registry.register()
41 class AzureADType(SourceType):
42 """Azure AD Type definition"""
43
44 callback_view = AzureADOAuthCallback
45 redirect_view = AzureADOAuthRedirect
46 verbose_name = "Azure AD"
47 name = "azuread"
48
49 urls_customizable = True
50
51 authorization_url = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize"
52 access_token_url = "https://login.microsoftonline.com/common/oauth2/v2.0/token" # nosec
53 profile_url = "https://graph.microsoft.com/v1.0/me"
54 oidc_well_known_url = (
55 "https://login.microsoftonline.com/common/.well-known/openid-configuration"
56 )
57 oidc_jwks_url = "https://login.microsoftonline.com/common/discovery/keys"
58
[end of authentik/sources/oauth/types/azure_ad.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/authentik/sources/oauth/types/azure_ad.py b/authentik/sources/oauth/types/azure_ad.py
--- a/authentik/sources/oauth/types/azure_ad.py
+++ b/authentik/sources/oauth/types/azure_ad.py
@@ -25,6 +25,11 @@
client_class = UserprofileHeaderAuthClient
+ def get_user_id(self, info: dict[str, str]) -> str:
+ # Default try to get `id` for the Graph API endpoint
+ # fallback to OpenID logic in case the profile URL was changed
+ return info.get("id", super().get_user_id(info))
+
def get_user_enroll_context(
self,
info: dict[str, Any],
| {"golden_diff": "diff --git a/authentik/sources/oauth/types/azure_ad.py b/authentik/sources/oauth/types/azure_ad.py\n--- a/authentik/sources/oauth/types/azure_ad.py\n+++ b/authentik/sources/oauth/types/azure_ad.py\n@@ -25,6 +25,11 @@\n \n client_class = UserprofileHeaderAuthClient\n \n+ def get_user_id(self, info: dict[str, str]) -> str:\n+ # Default try to get `id` for the Graph API endpoint\n+ # fallback to OpenID logic in case the profile URL was changed\n+ return info.get(\"id\", super().get_user_id(info))\n+\n def get_user_enroll_context(\n self,\n info: dict[str, Any],\n", "issue": "2023.10.6 - \"Please select a username\" after Azure AD login\n**Describe your question/**\r\n\r\nIs it now a expected behavior in 2023.10.6 version to ask every user for username input after logging in with azure ad?\r\n\r\n\r\nIn previous versions it was simply authenticating without any prompt, using email address from Azure AD as username.\r\n\r\nNow it expects user to input username (and it leads to duplicated accounts, because users with mail as username already exist), and if you enter already existing mail as username it shows error:\r\n\r\n\r\nI think it can be related to this fix:\r\nhttps://github.com/goauthentik/authentik/pull/7970\r\n\r\nIs it possible somehow to set this username automatically, or revert back to using email address so old user accounts will work again?\r\n\r\n**Version and Deployment (please complete the following information):**\r\n\r\n- authentik version: 2023.10.6\r\n- Deployment: helm\r\n\r\n\n", "before_files": [{"content": "\"\"\"AzureAD OAuth2 Views\"\"\"\nfrom typing import Any\n\nfrom structlog.stdlib import get_logger\n\nfrom authentik.sources.oauth.clients.oauth2 import UserprofileHeaderAuthClient\nfrom authentik.sources.oauth.types.oidc import OpenIDConnectOAuth2Callback\nfrom authentik.sources.oauth.types.registry import SourceType, registry\nfrom authentik.sources.oauth.views.redirect import OAuthRedirect\n\nLOGGER = get_logger()\n\n\nclass AzureADOAuthRedirect(OAuthRedirect):\n \"\"\"Azure AD OAuth2 Redirect\"\"\"\n\n def get_additional_parameters(self, source): # pragma: no cover\n return {\n \"scope\": [\"openid\", \"https://graph.microsoft.com/User.Read\"],\n }\n\n\nclass AzureADOAuthCallback(OpenIDConnectOAuth2Callback):\n \"\"\"AzureAD OAuth2 Callback\"\"\"\n\n client_class = UserprofileHeaderAuthClient\n\n def get_user_enroll_context(\n self,\n info: dict[str, Any],\n ) -> dict[str, Any]:\n mail = info.get(\"mail\", None) or info.get(\"otherMails\", [None])[0]\n return {\n \"username\": info.get(\"userPrincipalName\"),\n \"email\": mail,\n \"name\": info.get(\"displayName\"),\n }\n\n\[email protected]()\nclass AzureADType(SourceType):\n \"\"\"Azure AD Type definition\"\"\"\n\n callback_view = AzureADOAuthCallback\n redirect_view = AzureADOAuthRedirect\n verbose_name = \"Azure AD\"\n name = \"azuread\"\n\n urls_customizable = True\n\n authorization_url = \"https://login.microsoftonline.com/common/oauth2/v2.0/authorize\"\n access_token_url = \"https://login.microsoftonline.com/common/oauth2/v2.0/token\" # nosec\n profile_url = \"https://graph.microsoft.com/v1.0/me\"\n oidc_well_known_url = (\n \"https://login.microsoftonline.com/common/.well-known/openid-configuration\"\n )\n oidc_jwks_url = \"https://login.microsoftonline.com/common/discovery/keys\"\n", "path": "authentik/sources/oauth/types/azure_ad.py"}]} | 1,412 | 161 |
gh_patches_debug_4482 | rasdani/github-patches | git_diff | aio-libs-abandoned__aioredis-py-313 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Missing PyPI classifier for Python 3.6 support
Currently, the PyPI page shows the following as supported:
> Programming Language :: Python
> Programming Language :: Python :: 3
> Programming Language :: Python :: 3.3
> Programming Language :: Python :: 3.4
> Programming Language :: Python :: 3.5
However, Python 3.6 is part of the automated tests, and the README states it is supported,
so I'm presuming this is just an omission.
</issue>
<code>
[start of setup.py]
1 import re
2 import os.path
3 import sys
4 import platform
5 from setuptools import setup, find_packages
6
7
8 install_requires = ['async-timeout']
9 if platform.python_implementation() == 'CPython':
10 install_requires.append('hiredis')
11
12 PY_VER = sys.version_info
13
14 if PY_VER >= (3, 4):
15 pass
16 elif PY_VER >= (3, 3):
17 install_requires.append('asyncio')
18 else:
19 raise RuntimeError("aioredis doesn't support Python version prior 3.3")
20
21
22 def read(*parts):
23 with open(os.path.join(*parts), 'rt') as f:
24 return f.read().strip()
25
26
27 def read_version():
28 regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'")
29 init_py = os.path.join(os.path.dirname(__file__),
30 'aioredis', '__init__.py')
31 with open(init_py) as f:
32 for line in f:
33 match = regexp.match(line)
34 if match is not None:
35 return match.group(1)
36 else:
37 raise RuntimeError('Cannot find version in aioredis/__init__.py')
38
39
40 classifiers = [
41 'License :: OSI Approved :: MIT License',
42 'Development Status :: 4 - Beta',
43 'Programming Language :: Python',
44 'Programming Language :: Python :: 3',
45 'Programming Language :: Python :: 3.3',
46 'Programming Language :: Python :: 3.4',
47 'Programming Language :: Python :: 3.5',
48 'Operating System :: POSIX',
49 'Environment :: Web Environment',
50 'Intended Audience :: Developers',
51 'Topic :: Software Development',
52 'Topic :: Software Development :: Libraries',
53 'Framework :: AsyncIO',
54 ]
55
56 setup(name='aioredis',
57 version=read_version(),
58 description=("asyncio (PEP 3156) Redis support"),
59 long_description="\n\n".join((read('README.rst'), read('CHANGES.txt'))),
60 classifiers=classifiers,
61 platforms=["POSIX"],
62 author="Alexey Popravka",
63 author_email="[email protected]",
64 url="https://github.com/aio-libs/aioredis",
65 license="MIT",
66 packages=find_packages(exclude=["tests"]),
67 install_requires=install_requires,
68 include_package_data=True,
69 )
70
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -45,6 +45,7 @@
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
'Operating System :: POSIX',
'Environment :: Web Environment',
'Intended Audience :: Developers',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -45,6 +45,7 @@\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n+ 'Programming Language :: Python :: 3.6',\n 'Operating System :: POSIX',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n", "issue": "Missing PyPI classifier for Python 3.6 support\nCurrently, the PyPI page shows the following as supported:\r\n\r\n> Programming Language :: Python\r\n> Programming Language :: Python :: 3\r\n> Programming Language :: Python :: 3.3\r\n> Programming Language :: Python :: 3.4\r\n> Programming Language :: Python :: 3.5\r\n\r\nHowever, Python 3.6 is part of the automated tests, and the README states it is supported,\r\nso I'm presuming this is just an omission.\n", "before_files": [{"content": "import re\nimport os.path\nimport sys\nimport platform\nfrom setuptools import setup, find_packages\n\n\ninstall_requires = ['async-timeout']\nif platform.python_implementation() == 'CPython':\n install_requires.append('hiredis')\n\nPY_VER = sys.version_info\n\nif PY_VER >= (3, 4):\n pass\nelif PY_VER >= (3, 3):\n install_requires.append('asyncio')\nelse:\n raise RuntimeError(\"aioredis doesn't support Python version prior 3.3\")\n\n\ndef read(*parts):\n with open(os.path.join(*parts), 'rt') as f:\n return f.read().strip()\n\n\ndef read_version():\n regexp = re.compile(r\"^__version__\\W*=\\W*'([\\d.abrc]+)'\")\n init_py = os.path.join(os.path.dirname(__file__),\n 'aioredis', '__init__.py')\n with open(init_py) as f:\n for line in f:\n match = regexp.match(line)\n if match is not None:\n return match.group(1)\n else:\n raise RuntimeError('Cannot find version in aioredis/__init__.py')\n\n\nclassifiers = [\n 'License :: OSI Approved :: MIT License',\n 'Development Status :: 4 - Beta',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Operating System :: POSIX',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries',\n 'Framework :: AsyncIO',\n]\n\nsetup(name='aioredis',\n version=read_version(),\n description=(\"asyncio (PEP 3156) Redis support\"),\n long_description=\"\\n\\n\".join((read('README.rst'), read('CHANGES.txt'))),\n classifiers=classifiers,\n platforms=[\"POSIX\"],\n author=\"Alexey Popravka\",\n author_email=\"[email protected]\",\n url=\"https://github.com/aio-libs/aioredis\",\n license=\"MIT\",\n packages=find_packages(exclude=[\"tests\"]),\n install_requires=install_requires,\n include_package_data=True,\n )\n", "path": "setup.py"}]} | 1,274 | 103 |
gh_patches_debug_39664 | rasdani/github-patches | git_diff | Cloud-CV__EvalAI-1106 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Improve the check for if the user is a host of a challenge
Currently, at https://github.com/Cloud-CV/EvalAI/blob/master/apps/hosts/utils.py#L12 we only check if the user has created the challegne and based on that we allow the challenge host to see all the submissions (for example, see this https://github.com/Cloud-CV/EvalAI/blob/master/apps/challenges/views.py#L703). We would want to change it so that anyone who is a part of the challenge host team is allowed to access information such as 'View all submissions' etc.
</issue>
<code>
[start of apps/participants/urls.py]
1 from django.conf.urls import url
2
3 from . import views
4
5 urlpatterns = [
6 url(r'participant_team/(?P<pk>[0-9]+)/invite$', views.invite_participant_to_team,
7 name='invite_participant_to_team'),
8 url(r'remove_self_from_participant_team/(?P<participant_team_pk>[0-9]+)$',
9 views.remove_self_from_participant_team,
10 name='remove_self_from_participant_team'),
11 url(r'participant_team/(?P<participant_team_pk>[0-9]+)/participant/(?P<participant_pk>[0-9]+)$',
12 views.delete_participant_from_team, name='delete_participant_from_team'),
13 url(r'participant_teams/challenges/user',
14 views.get_teams_and_corresponding_challenges_for_a_participant,
15 name='get_teams_and_corresponding_challenges_for_a_participant'),
16 url(r'participant_team$', views.participant_team_list,
17 name='get_participant_team_list'),
18 url(r'participant_team/(?P<pk>[0-9]+)$',
19 views.participant_team_detail, name='get_participant_team_details'),
20 ]
21
[end of apps/participants/urls.py]
[start of apps/participants/views.py]
1 from django.contrib.auth.models import User
2
3 from rest_framework import permissions, status
4 from rest_framework.decorators import (api_view,
5 authentication_classes,
6 permission_classes,
7 throttle_classes,)
8 from rest_framework.response import Response
9 from rest_framework_expiring_authtoken.authentication import (
10 ExpiringTokenAuthentication,)
11 from rest_framework.throttling import UserRateThrottle
12
13 from accounts.permissions import HasVerifiedEmail
14 from base.utils import paginated_queryset
15 from challenges.models import Challenge
16
17 from .models import (Participant, ParticipantTeam)
18 from .serializers import (InviteParticipantToTeamSerializer,
19 ParticipantTeamSerializer,
20 ChallengeParticipantTeam,
21 ChallengeParticipantTeamList,
22 ChallengeParticipantTeamListSerializer,
23 ParticipantTeamDetailSerializer,)
24 from .utils import (get_list_of_challenges_for_participant_team,
25 get_list_of_challenges_participated_by_a_user,)
26
27
28 @throttle_classes([UserRateThrottle])
29 @api_view(['GET', 'POST'])
30 @permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
31 @authentication_classes((ExpiringTokenAuthentication,))
32 def participant_team_list(request):
33
34 if request.method == 'GET':
35 participant_teams_id = Participant.objects.filter(user_id=request.user).values_list('team_id', flat=True)
36 participant_teams = ParticipantTeam.objects.filter(
37 id__in=participant_teams_id)
38 paginator, result_page = paginated_queryset(participant_teams, request)
39 serializer = ParticipantTeamDetailSerializer(result_page, many=True)
40 response_data = serializer.data
41 return paginator.get_paginated_response(response_data)
42
43 elif request.method == 'POST':
44 serializer = ParticipantTeamSerializer(data=request.data,
45 context={'request': request})
46 if serializer.is_valid():
47 serializer.save()
48 response_data = serializer.data
49 participant_team = serializer.instance
50 participant = Participant(user=request.user,
51 status=Participant.SELF,
52 team=participant_team)
53 participant.save()
54 return Response(response_data, status=status.HTTP_201_CREATED)
55 return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
56
57
58 @throttle_classes([UserRateThrottle])
59 @api_view(['GET', 'PUT', 'PATCH', 'DELETE'])
60 @permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
61 @authentication_classes((ExpiringTokenAuthentication,))
62 def participant_team_detail(request, pk):
63
64 try:
65 participant_team = ParticipantTeam.objects.get(pk=pk)
66 except ParticipantTeam.DoesNotExist:
67 response_data = {'error': 'ParticipantTeam does not exist'}
68 return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
69
70 if request.method == 'GET':
71 serializer = ParticipantTeamDetailSerializer(participant_team)
72 response_data = serializer.data
73 return Response(response_data, status=status.HTTP_200_OK)
74
75 elif request.method in ['PUT', 'PATCH']:
76
77 if request.method == 'PATCH':
78 serializer = ParticipantTeamSerializer(participant_team, data=request.data,
79 context={
80 'request': request},
81 partial=True)
82 else:
83 serializer = ParticipantTeamSerializer(participant_team, data=request.data,
84 context={'request': request})
85 if serializer.is_valid():
86 serializer.save()
87 response_data = serializer.data
88 return Response(response_data, status=status.HTTP_200_OK)
89 else:
90 return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
91
92 elif request.method == 'DELETE':
93 participant_team.delete()
94 return Response(status=status.HTTP_204_NO_CONTENT)
95
96
97 @throttle_classes([UserRateThrottle])
98 @api_view(['POST'])
99 @permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
100 @authentication_classes((ExpiringTokenAuthentication,))
101 def invite_participant_to_team(request, pk):
102
103 try:
104 participant_team = ParticipantTeam.objects.get(pk=pk)
105 except ParticipantTeam.DoesNotExist:
106 response_data = {'error': 'ParticipantTeam does not exist'}
107 return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
108
109 email = request.data.get('email')
110 try:
111 user = User.objects.get(email=email)
112 except User.DoesNotExist:
113 response_data = {'error': 'User does not exist with this email address!'}
114 return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
115
116 invited_user_participated_challenges = get_list_of_challenges_participated_by_a_user(
117 user).values_list("id", flat=True)
118 team_participated_challenges = get_list_of_challenges_for_participant_team(
119 [participant_team]).values_list("id", flat=True)
120
121 if set(invited_user_participated_challenges) & set(team_participated_challenges):
122 """
123 Condition to check if the user has already participated in challenges where
124 the inviting participant has participated. If this is the case,
125 then the user cannot be invited since he cannot participate in a challenge
126 via two teams.
127 """
128 response_data = {'error': 'Sorry, cannot invite user to the team!'}
129 return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
130
131 serializer = InviteParticipantToTeamSerializer(data=request.data,
132 context={'participant_team': participant_team,
133 'request': request})
134 if serializer.is_valid():
135 serializer.save()
136 response_data = {
137 'message': 'User has been successfully added to the team!'}
138 return Response(response_data, status=status.HTTP_202_ACCEPTED)
139 return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
140
141
142 @throttle_classes([UserRateThrottle])
143 @api_view(['DELETE'])
144 @permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
145 @authentication_classes((ExpiringTokenAuthentication,))
146 def delete_participant_from_team(request, participant_team_pk, participant_pk):
147 """
148 Deletes a participant from a Participant Team
149 """
150 try:
151 participant_team = ParticipantTeam.objects.get(pk=participant_team_pk)
152 except ParticipantTeam.DoesNotExist:
153 response_data = {'error': 'ParticipantTeam does not exist'}
154 return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
155
156 try:
157 participant = Participant.objects.get(pk=participant_pk)
158 except Participant.DoesNotExist:
159 response_data = {'error': 'Participant does not exist'}
160 return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
161
162 if participant_team.created_by == request.user:
163
164 if participant.user == request.user: # when the user tries to remove himself
165 response_data = {
166 'error': 'You are not allowed to remove yourself since you are admin. Please delete the team if you want to do so!'} # noqa: ignore=E501
167 return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
168 else:
169 participant.delete()
170 return Response(status=status.HTTP_204_NO_CONTENT)
171 else:
172 response_data = {
173 'error': 'Sorry, you do not have permissions to remove this participant'}
174 return Response(response_data, status=status.HTTP_401_UNAUTHORIZED)
175
176
177 @throttle_classes([UserRateThrottle])
178 @api_view(['GET', ])
179 @permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
180 @authentication_classes((ExpiringTokenAuthentication,))
181 def get_teams_and_corresponding_challenges_for_a_participant(request):
182 """
183 Returns list of teams and corresponding challenges for a participant
184 """
185 # first get list of all the participants and teams related to the user
186 participant_objs = Participant.objects.filter(user=request.user).prefetch_related('team')
187
188 challenge_participated_teams = []
189 for participant_obj in participant_objs:
190 participant_team = participant_obj.team
191
192 challenges = Challenge.objects.filter(
193 participant_teams=participant_team)
194
195 if challenges.count():
196 for challenge in challenges:
197 challenge_participated_teams.append(ChallengeParticipantTeam(
198 challenge, participant_team))
199 else:
200 challenge = None
201 challenge_participated_teams.append(ChallengeParticipantTeam(
202 challenge, participant_team))
203 serializer = ChallengeParticipantTeamListSerializer(ChallengeParticipantTeamList(challenge_participated_teams))
204 return Response(serializer.data, status=status.HTTP_200_OK)
205
206
207 @throttle_classes([UserRateThrottle])
208 @api_view(['DELETE', ])
209 @permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
210 @authentication_classes((ExpiringTokenAuthentication,))
211 def remove_self_from_participant_team(request, participant_team_pk):
212 """
213 A user can remove himself from the participant team.
214 """
215 try:
216 participant_team = ParticipantTeam.objects.get(pk=participant_team_pk)
217 except ParticipantTeam.DoesNotExist:
218 response_data = {'error': 'ParticipantTeam does not exist!'}
219 return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)
220
221 try:
222 participant = Participant.objects.get(user=request.user, team__pk=participant_team_pk)
223 except:
224 response_data = {'error': 'Sorry, you do not belong to this team!'}
225 return Response(response_data, status=status.HTTP_401_UNAUTHORIZED)
226
227 if get_list_of_challenges_for_participant_team([participant_team]).exists():
228 response_data = {'error': 'Sorry, you cannot delete this team since it has taken part in challenge(s)!'}
229 return Response(response_data, status=status.HTTP_403_FORBIDDEN)
230 else:
231 participant.delete()
232 return Response(status=status.HTTP_204_NO_CONTENT)
233
[end of apps/participants/views.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/apps/participants/urls.py b/apps/participants/urls.py
--- a/apps/participants/urls.py
+++ b/apps/participants/urls.py
@@ -10,7 +10,7 @@
name='remove_self_from_participant_team'),
url(r'participant_team/(?P<participant_team_pk>[0-9]+)/participant/(?P<participant_pk>[0-9]+)$',
views.delete_participant_from_team, name='delete_participant_from_team'),
- url(r'participant_teams/challenges/user',
+ url(r'participant_teams/challenges/(?P<challenge_pk>[0-9]+)/user$',
views.get_teams_and_corresponding_challenges_for_a_participant,
name='get_teams_and_corresponding_challenges_for_a_participant'),
url(r'participant_team$', views.participant_team_list,
diff --git a/apps/participants/views.py b/apps/participants/views.py
--- a/apps/participants/views.py
+++ b/apps/participants/views.py
@@ -13,6 +13,7 @@
from accounts.permissions import HasVerifiedEmail
from base.utils import paginated_queryset
from challenges.models import Challenge
+from hosts.utils import is_user_a_host_of_challenge
from .models import (Participant, ParticipantTeam)
from .serializers import (InviteParticipantToTeamSerializer,
@@ -178,13 +179,15 @@
@api_view(['GET', ])
@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))
@authentication_classes((ExpiringTokenAuthentication,))
-def get_teams_and_corresponding_challenges_for_a_participant(request):
+def get_teams_and_corresponding_challenges_for_a_participant(request, challenge_pk):
"""
Returns list of teams and corresponding challenges for a participant
"""
# first get list of all the participants and teams related to the user
participant_objs = Participant.objects.filter(user=request.user).prefetch_related('team')
+ is_challenge_host = is_user_a_host_of_challenge(user=request.user, challenge_pk=challenge_pk)
+
challenge_participated_teams = []
for participant_obj in participant_objs:
participant_team = participant_obj.team
@@ -201,7 +204,9 @@
challenge_participated_teams.append(ChallengeParticipantTeam(
challenge, participant_team))
serializer = ChallengeParticipantTeamListSerializer(ChallengeParticipantTeamList(challenge_participated_teams))
- return Response(serializer.data, status=status.HTTP_200_OK)
+ response_data = serializer.data
+ response_data['is_challenge_host'] = is_challenge_host
+ return Response(response_data, status=status.HTTP_200_OK)
@throttle_classes([UserRateThrottle])
| {"golden_diff": "diff --git a/apps/participants/urls.py b/apps/participants/urls.py\n--- a/apps/participants/urls.py\n+++ b/apps/participants/urls.py\n@@ -10,7 +10,7 @@\n name='remove_self_from_participant_team'),\n url(r'participant_team/(?P<participant_team_pk>[0-9]+)/participant/(?P<participant_pk>[0-9]+)$',\n views.delete_participant_from_team, name='delete_participant_from_team'),\n- url(r'participant_teams/challenges/user',\n+ url(r'participant_teams/challenges/(?P<challenge_pk>[0-9]+)/user$',\n views.get_teams_and_corresponding_challenges_for_a_participant,\n name='get_teams_and_corresponding_challenges_for_a_participant'),\n url(r'participant_team$', views.participant_team_list,\ndiff --git a/apps/participants/views.py b/apps/participants/views.py\n--- a/apps/participants/views.py\n+++ b/apps/participants/views.py\n@@ -13,6 +13,7 @@\n from accounts.permissions import HasVerifiedEmail\n from base.utils import paginated_queryset\n from challenges.models import Challenge\n+from hosts.utils import is_user_a_host_of_challenge\n \n from .models import (Participant, ParticipantTeam)\n from .serializers import (InviteParticipantToTeamSerializer,\n@@ -178,13 +179,15 @@\n @api_view(['GET', ])\n @permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))\n @authentication_classes((ExpiringTokenAuthentication,))\n-def get_teams_and_corresponding_challenges_for_a_participant(request):\n+def get_teams_and_corresponding_challenges_for_a_participant(request, challenge_pk):\n \"\"\"\n Returns list of teams and corresponding challenges for a participant\n \"\"\"\n # first get list of all the participants and teams related to the user\n participant_objs = Participant.objects.filter(user=request.user).prefetch_related('team')\n \n+ is_challenge_host = is_user_a_host_of_challenge(user=request.user, challenge_pk=challenge_pk)\n+\n challenge_participated_teams = []\n for participant_obj in participant_objs:\n participant_team = participant_obj.team\n@@ -201,7 +204,9 @@\n challenge_participated_teams.append(ChallengeParticipantTeam(\n challenge, participant_team))\n serializer = ChallengeParticipantTeamListSerializer(ChallengeParticipantTeamList(challenge_participated_teams))\n- return Response(serializer.data, status=status.HTTP_200_OK)\n+ response_data = serializer.data\n+ response_data['is_challenge_host'] = is_challenge_host\n+ return Response(response_data, status=status.HTTP_200_OK)\n \n \n @throttle_classes([UserRateThrottle])\n", "issue": "Improve the check for if the user is a host of a challenge\nCurrently, at https://github.com/Cloud-CV/EvalAI/blob/master/apps/hosts/utils.py#L12 we only check if the user has created the challegne and based on that we allow the challenge host to see all the submissions (for example, see this https://github.com/Cloud-CV/EvalAI/blob/master/apps/challenges/views.py#L703). We would want to change it so that anyone who is a part of the challenge host team is allowed to access information such as 'View all submissions' etc. \n", "before_files": [{"content": "from django.conf.urls import url\n\nfrom . import views\n\nurlpatterns = [\n url(r'participant_team/(?P<pk>[0-9]+)/invite$', views.invite_participant_to_team,\n name='invite_participant_to_team'),\n url(r'remove_self_from_participant_team/(?P<participant_team_pk>[0-9]+)$',\n views.remove_self_from_participant_team,\n name='remove_self_from_participant_team'),\n url(r'participant_team/(?P<participant_team_pk>[0-9]+)/participant/(?P<participant_pk>[0-9]+)$',\n views.delete_participant_from_team, name='delete_participant_from_team'),\n url(r'participant_teams/challenges/user',\n views.get_teams_and_corresponding_challenges_for_a_participant,\n name='get_teams_and_corresponding_challenges_for_a_participant'),\n url(r'participant_team$', views.participant_team_list,\n name='get_participant_team_list'),\n url(r'participant_team/(?P<pk>[0-9]+)$',\n views.participant_team_detail, name='get_participant_team_details'),\n]\n", "path": "apps/participants/urls.py"}, {"content": "from django.contrib.auth.models import User\n\nfrom rest_framework import permissions, status\nfrom rest_framework.decorators import (api_view,\n authentication_classes,\n permission_classes,\n throttle_classes,)\nfrom rest_framework.response import Response\nfrom rest_framework_expiring_authtoken.authentication import (\n ExpiringTokenAuthentication,)\nfrom rest_framework.throttling import UserRateThrottle\n\nfrom accounts.permissions import HasVerifiedEmail\nfrom base.utils import paginated_queryset\nfrom challenges.models import Challenge\n\nfrom .models import (Participant, ParticipantTeam)\nfrom .serializers import (InviteParticipantToTeamSerializer,\n ParticipantTeamSerializer,\n ChallengeParticipantTeam,\n ChallengeParticipantTeamList,\n ChallengeParticipantTeamListSerializer,\n ParticipantTeamDetailSerializer,)\nfrom .utils import (get_list_of_challenges_for_participant_team,\n get_list_of_challenges_participated_by_a_user,)\n\n\n@throttle_classes([UserRateThrottle])\n@api_view(['GET', 'POST'])\n@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))\n@authentication_classes((ExpiringTokenAuthentication,))\ndef participant_team_list(request):\n\n if request.method == 'GET':\n participant_teams_id = Participant.objects.filter(user_id=request.user).values_list('team_id', flat=True)\n participant_teams = ParticipantTeam.objects.filter(\n id__in=participant_teams_id)\n paginator, result_page = paginated_queryset(participant_teams, request)\n serializer = ParticipantTeamDetailSerializer(result_page, many=True)\n response_data = serializer.data\n return paginator.get_paginated_response(response_data)\n\n elif request.method == 'POST':\n serializer = ParticipantTeamSerializer(data=request.data,\n context={'request': request})\n if serializer.is_valid():\n serializer.save()\n response_data = serializer.data\n participant_team = serializer.instance\n participant = Participant(user=request.user,\n status=Participant.SELF,\n team=participant_team)\n participant.save()\n return Response(response_data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\n@throttle_classes([UserRateThrottle])\n@api_view(['GET', 'PUT', 'PATCH', 'DELETE'])\n@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))\n@authentication_classes((ExpiringTokenAuthentication,))\ndef participant_team_detail(request, pk):\n\n try:\n participant_team = ParticipantTeam.objects.get(pk=pk)\n except ParticipantTeam.DoesNotExist:\n response_data = {'error': 'ParticipantTeam does not exist'}\n return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)\n\n if request.method == 'GET':\n serializer = ParticipantTeamDetailSerializer(participant_team)\n response_data = serializer.data\n return Response(response_data, status=status.HTTP_200_OK)\n\n elif request.method in ['PUT', 'PATCH']:\n\n if request.method == 'PATCH':\n serializer = ParticipantTeamSerializer(participant_team, data=request.data,\n context={\n 'request': request},\n partial=True)\n else:\n serializer = ParticipantTeamSerializer(participant_team, data=request.data,\n context={'request': request})\n if serializer.is_valid():\n serializer.save()\n response_data = serializer.data\n return Response(response_data, status=status.HTTP_200_OK)\n else:\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n elif request.method == 'DELETE':\n participant_team.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n\n@throttle_classes([UserRateThrottle])\n@api_view(['POST'])\n@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))\n@authentication_classes((ExpiringTokenAuthentication,))\ndef invite_participant_to_team(request, pk):\n\n try:\n participant_team = ParticipantTeam.objects.get(pk=pk)\n except ParticipantTeam.DoesNotExist:\n response_data = {'error': 'ParticipantTeam does not exist'}\n return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)\n\n email = request.data.get('email')\n try:\n user = User.objects.get(email=email)\n except User.DoesNotExist:\n response_data = {'error': 'User does not exist with this email address!'}\n return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)\n\n invited_user_participated_challenges = get_list_of_challenges_participated_by_a_user(\n user).values_list(\"id\", flat=True)\n team_participated_challenges = get_list_of_challenges_for_participant_team(\n [participant_team]).values_list(\"id\", flat=True)\n\n if set(invited_user_participated_challenges) & set(team_participated_challenges):\n \"\"\"\n Condition to check if the user has already participated in challenges where\n the inviting participant has participated. If this is the case,\n then the user cannot be invited since he cannot participate in a challenge\n via two teams.\n \"\"\"\n response_data = {'error': 'Sorry, cannot invite user to the team!'}\n return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)\n\n serializer = InviteParticipantToTeamSerializer(data=request.data,\n context={'participant_team': participant_team,\n 'request': request})\n if serializer.is_valid():\n serializer.save()\n response_data = {\n 'message': 'User has been successfully added to the team!'}\n return Response(response_data, status=status.HTTP_202_ACCEPTED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\n@throttle_classes([UserRateThrottle])\n@api_view(['DELETE'])\n@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))\n@authentication_classes((ExpiringTokenAuthentication,))\ndef delete_participant_from_team(request, participant_team_pk, participant_pk):\n \"\"\"\n Deletes a participant from a Participant Team\n \"\"\"\n try:\n participant_team = ParticipantTeam.objects.get(pk=participant_team_pk)\n except ParticipantTeam.DoesNotExist:\n response_data = {'error': 'ParticipantTeam does not exist'}\n return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)\n\n try:\n participant = Participant.objects.get(pk=participant_pk)\n except Participant.DoesNotExist:\n response_data = {'error': 'Participant does not exist'}\n return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)\n\n if participant_team.created_by == request.user:\n\n if participant.user == request.user: # when the user tries to remove himself\n response_data = {\n 'error': 'You are not allowed to remove yourself since you are admin. Please delete the team if you want to do so!'} # noqa: ignore=E501\n return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)\n else:\n participant.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n else:\n response_data = {\n 'error': 'Sorry, you do not have permissions to remove this participant'}\n return Response(response_data, status=status.HTTP_401_UNAUTHORIZED)\n\n\n@throttle_classes([UserRateThrottle])\n@api_view(['GET', ])\n@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))\n@authentication_classes((ExpiringTokenAuthentication,))\ndef get_teams_and_corresponding_challenges_for_a_participant(request):\n \"\"\"\n Returns list of teams and corresponding challenges for a participant\n \"\"\"\n # first get list of all the participants and teams related to the user\n participant_objs = Participant.objects.filter(user=request.user).prefetch_related('team')\n\n challenge_participated_teams = []\n for participant_obj in participant_objs:\n participant_team = participant_obj.team\n\n challenges = Challenge.objects.filter(\n participant_teams=participant_team)\n\n if challenges.count():\n for challenge in challenges:\n challenge_participated_teams.append(ChallengeParticipantTeam(\n challenge, participant_team))\n else:\n challenge = None\n challenge_participated_teams.append(ChallengeParticipantTeam(\n challenge, participant_team))\n serializer = ChallengeParticipantTeamListSerializer(ChallengeParticipantTeamList(challenge_participated_teams))\n return Response(serializer.data, status=status.HTTP_200_OK)\n\n\n@throttle_classes([UserRateThrottle])\n@api_view(['DELETE', ])\n@permission_classes((permissions.IsAuthenticated, HasVerifiedEmail))\n@authentication_classes((ExpiringTokenAuthentication,))\ndef remove_self_from_participant_team(request, participant_team_pk):\n \"\"\"\n A user can remove himself from the participant team.\n \"\"\"\n try:\n participant_team = ParticipantTeam.objects.get(pk=participant_team_pk)\n except ParticipantTeam.DoesNotExist:\n response_data = {'error': 'ParticipantTeam does not exist!'}\n return Response(response_data, status=status.HTTP_406_NOT_ACCEPTABLE)\n\n try:\n participant = Participant.objects.get(user=request.user, team__pk=participant_team_pk)\n except:\n response_data = {'error': 'Sorry, you do not belong to this team!'}\n return Response(response_data, status=status.HTTP_401_UNAUTHORIZED)\n\n if get_list_of_challenges_for_participant_team([participant_team]).exists():\n response_data = {'error': 'Sorry, you cannot delete this team since it has taken part in challenge(s)!'}\n return Response(response_data, status=status.HTTP_403_FORBIDDEN)\n else:\n participant.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n", "path": "apps/participants/views.py"}]} | 3,515 | 573 |
gh_patches_debug_23807 | rasdani/github-patches | git_diff | digitalfabrik__integreat-cms-860 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Implement WordPress API endpoint URL patterns
### Motivation
With Django we decided to introduce a new, clean API URL pattern. However, the apps do not use this API yet. We already have exports of the content and the app team could start end to end testing, as soon as the app can interface with the CMS.
### Proposed Solution
We need to implement the URL pattern defined in https://github.com/Integreat/cms/wiki/REST-APIv3-Documentation
The WordPress URL pattern should be available in addition to the current (clean) URLs, so that the app team can switch over.
Please test the integration with the web app by [changing the back end URL](https://github.com/Integreat/cms/wiki/REST-APIv3-Documentation#test-api-with-webapp) to the Django system. If you need help finding issues or interpreting error messages, feel free to contact the app team for help.
### Alternatives
None, needs to be done.
</issue>
<code>
[start of src/backend/urls.py]
1 """
2 Django URL dispatcher.
3 Delegates the following namespaces:
4
5 * ``/api/`` to :mod:`api.urls`
6
7 * ``/admin/`` to :meth:`django.contrib.admin.ModelAdmin.get_urls`
8
9 * ``/i18n/`` to :mod:`django.conf.urls.i18n`
10
11 * ``/sitemap.xml`` and ``/<region_slug>/<language_slug>/sitemap.xml`` to :mod:`sitemap.urls`
12
13 * ``/`` to :mod:`cms.urls`
14
15 Additionally, the error handlers in :mod:`cms.views.error_handler` are referenced here (see :doc:`ref/urls`).
16
17 For more information on this file, see :doc:`topics/http/urls`.
18 """
19 from django.conf.urls import include, url
20 from django.conf import settings
21 from django.contrib import admin
22
23
24 urlpatterns = [
25 url(r"^api/", include("api.urls")),
26 url(r"^i18n/", include("django.conf.urls.i18n")),
27 ]
28
29 # The admin/endpoint is only activated if the system is in debug mode.
30 if settings.DEBUG:
31 urlpatterns.append(url(r"^admin/", admin.site.urls))
32
33 # Unfortunatly we need to do this in such way, as the admin endpoint needs to be added before the endpoints of the other apps.
34 urlpatterns += [
35 url(r"^", include("sitemap.urls")),
36 url(r"^", include("cms.urls")),
37 ]
38
39 handler400 = "cms.views.error_handler.handler400"
40 handler403 = "cms.views.error_handler.handler403"
41 handler404 = "cms.views.error_handler.handler404"
42 handler500 = "cms.views.error_handler.handler500"
43
[end of src/backend/urls.py]
[start of src/api/urls.py]
1 """
2 Expansion of API-Endpoints for the CMS
3 """
4 from django.conf.urls import include, url
5
6 from .v3.events import events
7 from .v3.feedback import (
8 page_feedback,
9 search_result_feedback,
10 region_feedback,
11 offer_feedback,
12 offer_list_feedback,
13 event_list_feedback,
14 event_feedback,
15 poi_feedback,
16 map_feedback,
17 imprint_page_feedback,
18 legacy_feedback_endpoint,
19 )
20 from .v3.imprint import imprint
21 from .v3.languages import languages
22 from .v3.locations import locations
23 from .v3.pages import pages
24 from .v3.pdf_export import pdf_export
25 from .v3.push_notifications import sent_push_notifications
26 from .v3.regions import regions, liveregions, hiddenregions, pushnew
27 from .v3.offers import offers
28 from .v3.single_page import single_page
29
30 urlpatterns = [
31 url(r"^regions/?$", regions, name="api_regions"),
32 url(r"^regions/live/?$", liveregions, name="api_regions_live"),
33 url(r"^regions/hidden/?$", hiddenregions, name="api_regions_hidden"),
34 url(r"^regions/pushnew/?$", pushnew, name="api_pushnew"),
35 url(
36 r"^(?P<region_slug>[-\w]+)/",
37 include(
38 [
39 url(r"^languages/?$", languages, name="api_languages"),
40 url(r"^(?:offers|extras)/?$", offers, name="api_offers"),
41 url(
42 r"^(?P<language_slug>[-\w]+)/",
43 include(
44 [
45 url(r"^pages/?$", pages, name="api_pages"),
46 url(r"^locations/?$", locations, name="api_locations"),
47 url(r"^events/?$", events, name="api_events"),
48 url(
49 r"^page/?$",
50 single_page,
51 name="api_single_page",
52 ),
53 url(
54 r"^pdf/?$",
55 pdf_export,
56 name="api_pdf_export",
57 ),
58 url(
59 r"^sent_push_notifications/?$",
60 sent_push_notifications,
61 name="api_sent_push_notifications",
62 ),
63 url(
64 r"^(?:imprint|disclaimer)/?$",
65 imprint,
66 name="api_imprint",
67 ),
68 url(r"^(?:offers|extras)/?$", offers, name="api_offers"),
69 url(
70 r"^feedback/",
71 include(
72 [
73 url(
74 r"^$",
75 legacy_feedback_endpoint.legacy_feedback_endpoint,
76 name="api_legacy_feedback_endpoint",
77 ),
78 url(
79 r"^categories/?$",
80 region_feedback.region_feedback,
81 name="api_region_feedback",
82 ),
83 url(
84 r"^page/?$",
85 page_feedback.page_feedback,
86 name="api_page_feedback",
87 ),
88 url(
89 r"^poi/?$",
90 poi_feedback.poi_feedback,
91 name="api_poi_feedback",
92 ),
93 url(
94 r"^event/?$",
95 event_feedback.event_feedback,
96 name="api_event_feedback",
97 ),
98 url(
99 r"^events/?$",
100 event_list_feedback.event_list_feedback,
101 name="api_event_list_feedback",
102 ),
103 url(
104 r"^imprint-page/?$",
105 imprint_page_feedback.imprint_page_feedback,
106 name="api_imprint_page_feedbacks",
107 ),
108 url(
109 r"^map/?$",
110 map_feedback.map_feedback,
111 name="api_map_feedback",
112 ),
113 url(
114 r"^search/?$",
115 search_result_feedback.search_result_feedback,
116 name="api_search_result_feedback",
117 ),
118 url(
119 r"^(?:extras|offers)/?$",
120 offer_list_feedback.offer_list_feedback,
121 name="api_offer_list_feedback",
122 ),
123 url(
124 r"^(?:extra|offer)/?$",
125 offer_feedback.offer_feedback,
126 name="api_offer_feedback",
127 ),
128 ]
129 ),
130 ),
131 ]
132 ),
133 ),
134 ]
135 ),
136 ),
137 ]
138
[end of src/api/urls.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/api/urls.py b/src/api/urls.py
--- a/src/api/urls.py
+++ b/src/api/urls.py
@@ -27,11 +27,7 @@
from .v3.offers import offers
from .v3.single_page import single_page
-urlpatterns = [
- url(r"^regions/?$", regions, name="api_regions"),
- url(r"^regions/live/?$", liveregions, name="api_regions_live"),
- url(r"^regions/hidden/?$", hiddenregions, name="api_regions_hidden"),
- url(r"^regions/pushnew/?$", pushnew, name="api_pushnew"),
+api_urlpatterns = [
url(
r"^(?P<region_slug>[-\w]+)/",
include(
@@ -135,3 +131,18 @@
),
),
]
+
+region_api_urlpatterns = [
+ url(r"^$", regions, name="api_regions"),
+ url(r"^live/?$", liveregions, name="api_regions_live"),
+ url(r"^hidden/?$", hiddenregions, name="api_regions_hidden"),
+ url(r"^pushnew/?$", pushnew, name="api_pushnew"),
+]
+
+
+urlpatterns = [
+ url(r"^api/", include(api_urlpatterns)),
+ url(r"^api/regions/", include(region_api_urlpatterns)),
+ url(r"^wp-json/extensions/v3/", include(api_urlpatterns)),
+ url(r"^wp-json/extensions/v3/sites/", include(region_api_urlpatterns)),
+]
diff --git a/src/backend/urls.py b/src/backend/urls.py
--- a/src/backend/urls.py
+++ b/src/backend/urls.py
@@ -22,7 +22,7 @@
urlpatterns = [
- url(r"^api/", include("api.urls")),
+ url(r"^", include("api.urls")),
url(r"^i18n/", include("django.conf.urls.i18n")),
]
| {"golden_diff": "diff --git a/src/api/urls.py b/src/api/urls.py\n--- a/src/api/urls.py\n+++ b/src/api/urls.py\n@@ -27,11 +27,7 @@\n from .v3.offers import offers\n from .v3.single_page import single_page\n \n-urlpatterns = [\n- url(r\"^regions/?$\", regions, name=\"api_regions\"),\n- url(r\"^regions/live/?$\", liveregions, name=\"api_regions_live\"),\n- url(r\"^regions/hidden/?$\", hiddenregions, name=\"api_regions_hidden\"),\n- url(r\"^regions/pushnew/?$\", pushnew, name=\"api_pushnew\"),\n+api_urlpatterns = [\n url(\n r\"^(?P<region_slug>[-\\w]+)/\",\n include(\n@@ -135,3 +131,18 @@\n ),\n ),\n ]\n+\n+region_api_urlpatterns = [\n+ url(r\"^$\", regions, name=\"api_regions\"),\n+ url(r\"^live/?$\", liveregions, name=\"api_regions_live\"),\n+ url(r\"^hidden/?$\", hiddenregions, name=\"api_regions_hidden\"),\n+ url(r\"^pushnew/?$\", pushnew, name=\"api_pushnew\"),\n+]\n+\n+\n+urlpatterns = [\n+ url(r\"^api/\", include(api_urlpatterns)),\n+ url(r\"^api/regions/\", include(region_api_urlpatterns)),\n+ url(r\"^wp-json/extensions/v3/\", include(api_urlpatterns)),\n+ url(r\"^wp-json/extensions/v3/sites/\", include(region_api_urlpatterns)),\n+]\ndiff --git a/src/backend/urls.py b/src/backend/urls.py\n--- a/src/backend/urls.py\n+++ b/src/backend/urls.py\n@@ -22,7 +22,7 @@\n \n \n urlpatterns = [\n- url(r\"^api/\", include(\"api.urls\")),\n+ url(r\"^\", include(\"api.urls\")),\n url(r\"^i18n/\", include(\"django.conf.urls.i18n\")),\n ]\n", "issue": "Implement WordPress API endpoint URL patterns\n### Motivation\r\nWith Django we decided to introduce a new, clean API URL pattern. However, the apps do not use this API yet. We already have exports of the content and the app team could start end to end testing, as soon as the app can interface with the CMS.\r\n\r\n\r\n### Proposed Solution\r\nWe need to implement the URL pattern defined in https://github.com/Integreat/cms/wiki/REST-APIv3-Documentation\r\nThe WordPress URL pattern should be available in addition to the current (clean) URLs, so that the app team can switch over.\r\n\r\nPlease test the integration with the web app by [changing the back end URL](https://github.com/Integreat/cms/wiki/REST-APIv3-Documentation#test-api-with-webapp) to the Django system. If you need help finding issues or interpreting error messages, feel free to contact the app team for help.\r\n\r\n### Alternatives\r\nNone, needs to be done.\r\n\n", "before_files": [{"content": "\"\"\"\nDjango URL dispatcher.\nDelegates the following namespaces:\n\n* ``/api/`` to :mod:`api.urls`\n\n* ``/admin/`` to :meth:`django.contrib.admin.ModelAdmin.get_urls`\n\n* ``/i18n/`` to :mod:`django.conf.urls.i18n`\n\n* ``/sitemap.xml`` and ``/<region_slug>/<language_slug>/sitemap.xml`` to :mod:`sitemap.urls`\n\n* ``/`` to :mod:`cms.urls`\n\nAdditionally, the error handlers in :mod:`cms.views.error_handler` are referenced here (see :doc:`ref/urls`).\n\nFor more information on this file, see :doc:`topics/http/urls`.\n\"\"\"\nfrom django.conf.urls import include, url\nfrom django.conf import settings\nfrom django.contrib import admin\n\n\nurlpatterns = [\n url(r\"^api/\", include(\"api.urls\")),\n url(r\"^i18n/\", include(\"django.conf.urls.i18n\")),\n]\n\n# The admin/endpoint is only activated if the system is in debug mode.\nif settings.DEBUG:\n urlpatterns.append(url(r\"^admin/\", admin.site.urls))\n\n# Unfortunatly we need to do this in such way, as the admin endpoint needs to be added before the endpoints of the other apps.\nurlpatterns += [\n url(r\"^\", include(\"sitemap.urls\")),\n url(r\"^\", include(\"cms.urls\")),\n]\n\nhandler400 = \"cms.views.error_handler.handler400\"\nhandler403 = \"cms.views.error_handler.handler403\"\nhandler404 = \"cms.views.error_handler.handler404\"\nhandler500 = \"cms.views.error_handler.handler500\"\n", "path": "src/backend/urls.py"}, {"content": "\"\"\"\nExpansion of API-Endpoints for the CMS\n\"\"\"\nfrom django.conf.urls import include, url\n\nfrom .v3.events import events\nfrom .v3.feedback import (\n page_feedback,\n search_result_feedback,\n region_feedback,\n offer_feedback,\n offer_list_feedback,\n event_list_feedback,\n event_feedback,\n poi_feedback,\n map_feedback,\n imprint_page_feedback,\n legacy_feedback_endpoint,\n)\nfrom .v3.imprint import imprint\nfrom .v3.languages import languages\nfrom .v3.locations import locations\nfrom .v3.pages import pages\nfrom .v3.pdf_export import pdf_export\nfrom .v3.push_notifications import sent_push_notifications\nfrom .v3.regions import regions, liveregions, hiddenregions, pushnew\nfrom .v3.offers import offers\nfrom .v3.single_page import single_page\n\nurlpatterns = [\n url(r\"^regions/?$\", regions, name=\"api_regions\"),\n url(r\"^regions/live/?$\", liveregions, name=\"api_regions_live\"),\n url(r\"^regions/hidden/?$\", hiddenregions, name=\"api_regions_hidden\"),\n url(r\"^regions/pushnew/?$\", pushnew, name=\"api_pushnew\"),\n url(\n r\"^(?P<region_slug>[-\\w]+)/\",\n include(\n [\n url(r\"^languages/?$\", languages, name=\"api_languages\"),\n url(r\"^(?:offers|extras)/?$\", offers, name=\"api_offers\"),\n url(\n r\"^(?P<language_slug>[-\\w]+)/\",\n include(\n [\n url(r\"^pages/?$\", pages, name=\"api_pages\"),\n url(r\"^locations/?$\", locations, name=\"api_locations\"),\n url(r\"^events/?$\", events, name=\"api_events\"),\n url(\n r\"^page/?$\",\n single_page,\n name=\"api_single_page\",\n ),\n url(\n r\"^pdf/?$\",\n pdf_export,\n name=\"api_pdf_export\",\n ),\n url(\n r\"^sent_push_notifications/?$\",\n sent_push_notifications,\n name=\"api_sent_push_notifications\",\n ),\n url(\n r\"^(?:imprint|disclaimer)/?$\",\n imprint,\n name=\"api_imprint\",\n ),\n url(r\"^(?:offers|extras)/?$\", offers, name=\"api_offers\"),\n url(\n r\"^feedback/\",\n include(\n [\n url(\n r\"^$\",\n legacy_feedback_endpoint.legacy_feedback_endpoint,\n name=\"api_legacy_feedback_endpoint\",\n ),\n url(\n r\"^categories/?$\",\n region_feedback.region_feedback,\n name=\"api_region_feedback\",\n ),\n url(\n r\"^page/?$\",\n page_feedback.page_feedback,\n name=\"api_page_feedback\",\n ),\n url(\n r\"^poi/?$\",\n poi_feedback.poi_feedback,\n name=\"api_poi_feedback\",\n ),\n url(\n r\"^event/?$\",\n event_feedback.event_feedback,\n name=\"api_event_feedback\",\n ),\n url(\n r\"^events/?$\",\n event_list_feedback.event_list_feedback,\n name=\"api_event_list_feedback\",\n ),\n url(\n r\"^imprint-page/?$\",\n imprint_page_feedback.imprint_page_feedback,\n name=\"api_imprint_page_feedbacks\",\n ),\n url(\n r\"^map/?$\",\n map_feedback.map_feedback,\n name=\"api_map_feedback\",\n ),\n url(\n r\"^search/?$\",\n search_result_feedback.search_result_feedback,\n name=\"api_search_result_feedback\",\n ),\n url(\n r\"^(?:extras|offers)/?$\",\n offer_list_feedback.offer_list_feedback,\n name=\"api_offer_list_feedback\",\n ),\n url(\n r\"^(?:extra|offer)/?$\",\n offer_feedback.offer_feedback,\n name=\"api_offer_feedback\",\n ),\n ]\n ),\n ),\n ]\n ),\n ),\n ]\n ),\n ),\n]\n", "path": "src/api/urls.py"}]} | 2,322 | 424 |
gh_patches_debug_20523 | rasdani/github-patches | git_diff | DataDog__dd-trace-py-507 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
"cannot decode v0.3 services payload" error when tracing aiopg
I have a Python application using aiopg and monitored by Datadog. When starting, it sends a bad frame to `trace-agent` and then everything goes fine.
Versions:
- datadog-agent: 6.2.1
- ddtrace-py: 0.12.1
- aiopg: 0.14.0
Here is a minimalist test case:
```python
import asyncio
import aiopg
from ddtrace import patch
patch(aiopg=True)
async def main():
async with aiopg.connect(host=None) as db:
pass
while True:
await asyncio.sleep(0.1)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
```
This logs the following error:
```
failed_to_send services to Agent: HTTP error status 400, reason Bad Request, message Content-Type: text/plain; charset=utf-8
X-Content-Type-Options: nosniff
Date: Mon, 18 Jun 2018 15:25:18 GMT
Content-Length: 59
```
And then `trace-agent` reports:
```
trace-agent[4437]: 2018-06-18 15:31:16 ERROR (receiver.go:275) - cannot decode v0.3 services payload: msgp: attempted to decode type "nil" with method for "str"
```
I believe this is related to https://github.com/DataDog/datadog-trace-agent/issues/350.
</issue>
<code>
[start of ddtrace/contrib/aiopg/connection.py]
1 import asyncio
2 import wrapt
3
4 from aiopg.utils import _ContextManager
5
6 from .. import dbapi
7 from ...ext import sql
8 from ...pin import Pin
9
10
11 class AIOTracedCursor(wrapt.ObjectProxy):
12 """ TracedCursor wraps a psql cursor and traces it's queries. """
13
14 def __init__(self, cursor, pin):
15 super(AIOTracedCursor, self).__init__(cursor)
16 pin.onto(self)
17 name = pin.app or 'sql'
18 self._datadog_name = '%s.query' % name
19
20 @asyncio.coroutine
21 def _trace_method(self, method, resource, extra_tags, *args, **kwargs):
22 pin = Pin.get_from(self)
23 if not pin or not pin.enabled():
24 result = yield from method(*args, **kwargs) # noqa: E999
25 return result
26 service = pin.service
27
28 with pin.tracer.trace(self._datadog_name, service=service,
29 resource=resource) as s:
30 s.span_type = sql.TYPE
31 s.set_tag(sql.QUERY, resource)
32 s.set_tags(pin.tags)
33 s.set_tags(extra_tags)
34
35 try:
36 result = yield from method(*args, **kwargs)
37 return result
38 finally:
39 s.set_metric("db.rowcount", self.rowcount)
40
41 @asyncio.coroutine
42 def executemany(self, query, *args, **kwargs):
43 # FIXME[matt] properly handle kwargs here. arg names can be different
44 # with different libs.
45 result = yield from self._trace_method(
46 self.__wrapped__.executemany, query, {'sql.executemany': 'true'},
47 query, *args, **kwargs) # noqa: E999
48 return result
49
50 @asyncio.coroutine
51 def execute(self, query, *args, **kwargs):
52 result = yield from self._trace_method(
53 self.__wrapped__.execute, query, {}, query, *args, **kwargs)
54 return result
55
56 @asyncio.coroutine
57 def callproc(self, proc, args):
58 result = yield from self._trace_method(
59 self.__wrapped__.callproc, proc, {}, proc, args) # noqa: E999
60 return result
61
62
63 class AIOTracedConnection(wrapt.ObjectProxy):
64 """ TracedConnection wraps a Connection with tracing code. """
65
66 def __init__(self, conn):
67 super(AIOTracedConnection, self).__init__(conn)
68 name = dbapi._get_vendor(conn)
69 Pin(service=name, app=name).onto(self)
70
71 def cursor(self, *args, **kwargs):
72 # unfortunately we also need to patch this method as otherwise "self"
73 # ends up being the aiopg connection object
74 coro = self._cursor(*args, **kwargs)
75 return _ContextManager(coro)
76
77 @asyncio.coroutine
78 def _cursor(self, *args, **kwargs):
79 cursor = yield from self.__wrapped__._cursor(*args, **kwargs) # noqa: E999
80 pin = Pin.get_from(self)
81 if not pin:
82 return cursor
83 return AIOTracedCursor(cursor, pin)
84
[end of ddtrace/contrib/aiopg/connection.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/ddtrace/contrib/aiopg/connection.py b/ddtrace/contrib/aiopg/connection.py
--- a/ddtrace/contrib/aiopg/connection.py
+++ b/ddtrace/contrib/aiopg/connection.py
@@ -4,8 +4,8 @@
from aiopg.utils import _ContextManager
from .. import dbapi
-from ...ext import sql
from ...pin import Pin
+from ...ext import sql, AppTypes
class AIOTracedCursor(wrapt.ObjectProxy):
@@ -63,10 +63,11 @@
class AIOTracedConnection(wrapt.ObjectProxy):
""" TracedConnection wraps a Connection with tracing code. """
- def __init__(self, conn):
+ def __init__(self, conn, pin=None):
super(AIOTracedConnection, self).__init__(conn)
name = dbapi._get_vendor(conn)
- Pin(service=name, app=name).onto(self)
+ db_pin = pin or Pin(service=name, app=name, app_type=AppTypes.db)
+ db_pin.onto(self)
def cursor(self, *args, **kwargs):
# unfortunately we also need to patch this method as otherwise "self"
| {"golden_diff": "diff --git a/ddtrace/contrib/aiopg/connection.py b/ddtrace/contrib/aiopg/connection.py\n--- a/ddtrace/contrib/aiopg/connection.py\n+++ b/ddtrace/contrib/aiopg/connection.py\n@@ -4,8 +4,8 @@\n from aiopg.utils import _ContextManager\n \n from .. import dbapi\n-from ...ext import sql\n from ...pin import Pin\n+from ...ext import sql, AppTypes\n \n \n class AIOTracedCursor(wrapt.ObjectProxy):\n@@ -63,10 +63,11 @@\n class AIOTracedConnection(wrapt.ObjectProxy):\n \"\"\" TracedConnection wraps a Connection with tracing code. \"\"\"\n \n- def __init__(self, conn):\n+ def __init__(self, conn, pin=None):\n super(AIOTracedConnection, self).__init__(conn)\n name = dbapi._get_vendor(conn)\n- Pin(service=name, app=name).onto(self)\n+ db_pin = pin or Pin(service=name, app=name, app_type=AppTypes.db)\n+ db_pin.onto(self)\n \n def cursor(self, *args, **kwargs):\n # unfortunately we also need to patch this method as otherwise \"self\"\n", "issue": "\"cannot decode v0.3 services payload\" error when tracing aiopg\nI have a Python application using aiopg and monitored by Datadog. When starting, it sends a bad frame to `trace-agent` and then everything goes fine.\r\n\r\nVersions:\r\n- datadog-agent: 6.2.1\r\n- ddtrace-py: 0.12.1\r\n- aiopg: 0.14.0\r\n\r\nHere is a minimalist test case:\r\n\r\n```python\r\nimport asyncio\r\nimport aiopg\r\n\r\nfrom ddtrace import patch\r\n\r\npatch(aiopg=True)\r\n\r\n\r\nasync def main():\r\n async with aiopg.connect(host=None) as db:\r\n pass\r\n\r\n while True:\r\n await asyncio.sleep(0.1)\r\n\r\n\r\nloop = asyncio.get_event_loop()\r\nloop.run_until_complete(main())\r\n```\r\n\r\nThis logs the following error:\r\n```\r\nfailed_to_send services to Agent: HTTP error status 400, reason Bad Request, message Content-Type: text/plain; charset=utf-8\r\nX-Content-Type-Options: nosniff\r\nDate: Mon, 18 Jun 2018 15:25:18 GMT\r\nContent-Length: 59\r\n```\r\n\r\nAnd then `trace-agent` reports:\r\n```\r\ntrace-agent[4437]: 2018-06-18 15:31:16 ERROR (receiver.go:275) - cannot decode v0.3 services payload: msgp: attempted to decode type \"nil\" with method for \"str\"\r\n```\r\n\r\nI believe this is related to https://github.com/DataDog/datadog-trace-agent/issues/350.\n", "before_files": [{"content": "import asyncio\nimport wrapt\n\nfrom aiopg.utils import _ContextManager\n\nfrom .. import dbapi\nfrom ...ext import sql\nfrom ...pin import Pin\n\n\nclass AIOTracedCursor(wrapt.ObjectProxy):\n \"\"\" TracedCursor wraps a psql cursor and traces it's queries. \"\"\"\n\n def __init__(self, cursor, pin):\n super(AIOTracedCursor, self).__init__(cursor)\n pin.onto(self)\n name = pin.app or 'sql'\n self._datadog_name = '%s.query' % name\n\n @asyncio.coroutine\n def _trace_method(self, method, resource, extra_tags, *args, **kwargs):\n pin = Pin.get_from(self)\n if not pin or not pin.enabled():\n result = yield from method(*args, **kwargs) # noqa: E999\n return result\n service = pin.service\n\n with pin.tracer.trace(self._datadog_name, service=service,\n resource=resource) as s:\n s.span_type = sql.TYPE\n s.set_tag(sql.QUERY, resource)\n s.set_tags(pin.tags)\n s.set_tags(extra_tags)\n\n try:\n result = yield from method(*args, **kwargs)\n return result\n finally:\n s.set_metric(\"db.rowcount\", self.rowcount)\n\n @asyncio.coroutine\n def executemany(self, query, *args, **kwargs):\n # FIXME[matt] properly handle kwargs here. arg names can be different\n # with different libs.\n result = yield from self._trace_method(\n self.__wrapped__.executemany, query, {'sql.executemany': 'true'},\n query, *args, **kwargs) # noqa: E999\n return result\n\n @asyncio.coroutine\n def execute(self, query, *args, **kwargs):\n result = yield from self._trace_method(\n self.__wrapped__.execute, query, {}, query, *args, **kwargs)\n return result\n\n @asyncio.coroutine\n def callproc(self, proc, args):\n result = yield from self._trace_method(\n self.__wrapped__.callproc, proc, {}, proc, args) # noqa: E999\n return result\n\n\nclass AIOTracedConnection(wrapt.ObjectProxy):\n \"\"\" TracedConnection wraps a Connection with tracing code. \"\"\"\n\n def __init__(self, conn):\n super(AIOTracedConnection, self).__init__(conn)\n name = dbapi._get_vendor(conn)\n Pin(service=name, app=name).onto(self)\n\n def cursor(self, *args, **kwargs):\n # unfortunately we also need to patch this method as otherwise \"self\"\n # ends up being the aiopg connection object\n coro = self._cursor(*args, **kwargs)\n return _ContextManager(coro)\n\n @asyncio.coroutine\n def _cursor(self, *args, **kwargs):\n cursor = yield from self.__wrapped__._cursor(*args, **kwargs) # noqa: E999\n pin = Pin.get_from(self)\n if not pin:\n return cursor\n return AIOTracedCursor(cursor, pin)\n", "path": "ddtrace/contrib/aiopg/connection.py"}]} | 1,767 | 268 |
gh_patches_debug_31165 | rasdani/github-patches | git_diff | napari__napari-2837 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Using arrow keys to switch selected layer skips one layer
## 🐛 Bug
If a layer is selected and you press the up/down arrow keys, instead of switching to the next/previous layer, it will skip over one layer.
## To Reproduce
Open napari, create create a bunch of layers, select one of them and press `up` or `down`.
## Expected behavior
The adjacent layer should be selected.
## Environment
```yaml
napari: 0.4.9rc0
Platform: Linux-5.12.8-arch1-1-x86_64-with-glibc2.33
System: Arch Linux
Python: 3.9.5 (default, May 24 2021, 12:50:35) [GCC 11.1.0]
Qt: 5.15.2
PyQt5: 5.15.4
NumPy: 1.20.3
SciPy: 1.6.3
Dask: 2020.12.0
VisPy: 0.6.6
OpenGL:
- GL version: 4.6.0 NVIDIA 465.31
- MAX_TEXTURE_SIZE: 16384
Screens:
- screen 1: resolution 1920x1080, scale 1.0
- screen 2: resolution 1920x1200, scale 1.0
Plugins:
- animation: 0.0.2
- console: 0.0.3
- mrcfile-reader: 0.1.2
- napari-em-reader: 0.1.0
- napari-properties-plotter: 0.1.dev2+g2734460.d20210520
- ndtiffs: 0.1.1
- scikit-image: 0.4.8
- svg: 0.1.5
```
</issue>
<code>
[start of napari/components/_viewer_key_bindings.py]
1 from ..utils.action_manager import action_manager
2 from ..utils.settings import SETTINGS
3 from ..utils.theme import available_themes
4 from ..utils.translations import trans
5 from .viewer_model import ViewerModel
6
7
8 def register_viewer_action(description):
9 """
10 Convenient decorator to register an action with the current ViewerModel
11
12 It will use the function name as the action name. We force the description
13 to be given instead of function docstring for translation purpose.
14 """
15
16 def _inner(func):
17 name = 'napari:' + func.__name__
18 action_manager.register_action(
19 name=name,
20 command=func,
21 description=description,
22 keymapprovider=ViewerModel,
23 )
24 return func
25
26 return _inner
27
28
29 @register_viewer_action(trans._("Reset scroll."))
30 def reset_scroll_progress(viewer):
31
32 # on key press
33 viewer.dims._scroll_progress = 0
34 yield
35
36 # on key release
37 viewer.dims._scroll_progress = 0
38
39
40 reset_scroll_progress.__doc__ = trans._("Reset dims scroll progress")
41
42
43 @register_viewer_action(trans._("Toggle ndisplay."))
44 def toggle_ndisplay(viewer):
45 if viewer.dims.ndisplay == 3:
46 viewer.dims.ndisplay = 2
47 else:
48 viewer.dims.ndisplay = 3
49
50
51 # Making this an action makes vispy really unhappy during the tests
52 # on mac only with:
53 # ```
54 # RuntimeError: wrapped C/C++ object of type CanvasBackendDesktop has been deleted
55 # ```
56 @register_viewer_action(trans._("Toggle theme."))
57 def toggle_theme(viewer):
58 """Toggle theme for viewer"""
59 themes = available_themes()
60 current_theme = SETTINGS.appearance.theme
61 idx = themes.index(current_theme)
62 idx += 1
63 if idx == len(themes):
64 idx = 0
65
66 SETTINGS.appearance.theme = themes[idx]
67
68
69 @register_viewer_action(trans._("Reset view to original state."))
70 def reset_view(viewer):
71 viewer.reset_view()
72
73
74 @register_viewer_action(trans._("Increment dimensions slider to the left."))
75 def increment_dims_left(viewer):
76 viewer.dims._increment_dims_left()
77
78
79 @register_viewer_action(trans._("Increment dimensions slider to the right."))
80 def increment_dims_right(viewer):
81 viewer.dims._increment_dims_right()
82
83
84 @register_viewer_action(trans._("Move focus of dimensions slider up."))
85 def focus_axes_up(viewer):
86 viewer.dims._focus_up()
87
88
89 @register_viewer_action(trans._("Move focus of dimensions slider down."))
90 def focus_axes_down(viewer):
91 viewer.dims._focus_down()
92
93
94 @register_viewer_action(
95 trans._("Change order of the visible axes, e.g. [0, 1, 2] -> [2, 0, 1]."),
96 )
97 def roll_axes(viewer):
98 viewer.dims._roll()
99
100
101 @register_viewer_action(
102 trans._(
103 "Transpose order of the last two visible axes, e.g. [0, 1] -> [1, 0]."
104 ),
105 )
106 def transpose_axes(viewer):
107 viewer.dims._transpose()
108
109
110 @register_viewer_action(trans._("Remove selected layers."))
111 def remove_selected(viewer):
112 viewer.layers.remove_selected()
113
114
115 @register_viewer_action(trans._("Selected all layers."))
116 def select_all(viewer):
117 viewer.layers.select_all()
118
119
120 @register_viewer_action(trans._("Remove all layers."))
121 def remove_all_layers(viewer):
122 viewer.layers.clear()
123
124
125 @register_viewer_action(trans._("Select layer above."))
126 def select_layer_above(viewer):
127 viewer.layers.select_next()
128
129
130 @register_viewer_action(trans._("Select layer below."))
131 def select_layer_below(viewer):
132 viewer.layers.select_previous()
133
134
135 @register_viewer_action(trans._("Also select layer above."))
136 def also_select_layer_above(viewer):
137 viewer.layers.select_next(shift=True)
138
139
140 @register_viewer_action(trans._("Also select layer below."))
141 def also_select_layer_below(viewer):
142 viewer.layers.select_previous(shift=True)
143
144
145 @register_viewer_action(trans._("Toggle grid mode."))
146 def toggle_grid(viewer):
147 viewer.grid.enabled = not viewer.grid.enabled
148
149
150 @register_viewer_action(trans._("Toggle visibility of selected layers"))
151 def toggle_selected_visibility(viewer):
152 viewer.layers.toggle_selected_visibility()
153
[end of napari/components/_viewer_key_bindings.py]
[start of napari/utils/shortcuts.py]
1 default_shortcuts = {
2 'napari:toggle_console_visibility': ['Control-Shift-C'],
3 'napari:reset_scroll_progress': ['Control'],
4 'napari:toggle_ndisplay': ['Control-Y'],
5 'napari:toggle_theme': ['Control-Shift-T'],
6 'napari:reset_view': ['Control-R'],
7 'napari:increment_dims_left': ['Left'],
8 'napari:increment_dims_right': ['Right'],
9 'napari:focus_axes_up': ['Alt-Up'],
10 'napari:focus_axes_down': ['Alt-Down'],
11 'napari:roll_axes': ['Control-E'],
12 'napari:transpose_axes': ['Control-T'],
13 'napari:remove_selected': ['Control-Backspace', 'Control-Delete'],
14 'napari:select_all': ['Control-A'],
15 'napari:remove_all_layers': [
16 'Control-Shift-Backspace',
17 'Control-Shift-Delete',
18 ],
19 'napari:select_layer_above': ['Up'],
20 'napari:select_layer_below': ['Down'],
21 'napari:also_select_layer_above': ['Shift-Up'],
22 'napari:also_select_layer_below': ['Shift-Down'],
23 'napari:toggle_grid': ['Control-G'],
24 'napari:toggle_selected_visibility': ['V'],
25 }
26
[end of napari/utils/shortcuts.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/napari/components/_viewer_key_bindings.py b/napari/components/_viewer_key_bindings.py
--- a/napari/components/_viewer_key_bindings.py
+++ b/napari/components/_viewer_key_bindings.py
@@ -107,41 +107,6 @@
viewer.dims._transpose()
-@register_viewer_action(trans._("Remove selected layers."))
-def remove_selected(viewer):
- viewer.layers.remove_selected()
-
-
-@register_viewer_action(trans._("Selected all layers."))
-def select_all(viewer):
- viewer.layers.select_all()
-
-
-@register_viewer_action(trans._("Remove all layers."))
-def remove_all_layers(viewer):
- viewer.layers.clear()
-
-
-@register_viewer_action(trans._("Select layer above."))
-def select_layer_above(viewer):
- viewer.layers.select_next()
-
-
-@register_viewer_action(trans._("Select layer below."))
-def select_layer_below(viewer):
- viewer.layers.select_previous()
-
-
-@register_viewer_action(trans._("Also select layer above."))
-def also_select_layer_above(viewer):
- viewer.layers.select_next(shift=True)
-
-
-@register_viewer_action(trans._("Also select layer below."))
-def also_select_layer_below(viewer):
- viewer.layers.select_previous(shift=True)
-
-
@register_viewer_action(trans._("Toggle grid mode."))
def toggle_grid(viewer):
viewer.grid.enabled = not viewer.grid.enabled
diff --git a/napari/utils/shortcuts.py b/napari/utils/shortcuts.py
--- a/napari/utils/shortcuts.py
+++ b/napari/utils/shortcuts.py
@@ -10,16 +10,6 @@
'napari:focus_axes_down': ['Alt-Down'],
'napari:roll_axes': ['Control-E'],
'napari:transpose_axes': ['Control-T'],
- 'napari:remove_selected': ['Control-Backspace', 'Control-Delete'],
- 'napari:select_all': ['Control-A'],
- 'napari:remove_all_layers': [
- 'Control-Shift-Backspace',
- 'Control-Shift-Delete',
- ],
- 'napari:select_layer_above': ['Up'],
- 'napari:select_layer_below': ['Down'],
- 'napari:also_select_layer_above': ['Shift-Up'],
- 'napari:also_select_layer_below': ['Shift-Down'],
'napari:toggle_grid': ['Control-G'],
'napari:toggle_selected_visibility': ['V'],
}
| {"golden_diff": "diff --git a/napari/components/_viewer_key_bindings.py b/napari/components/_viewer_key_bindings.py\n--- a/napari/components/_viewer_key_bindings.py\n+++ b/napari/components/_viewer_key_bindings.py\n@@ -107,41 +107,6 @@\n viewer.dims._transpose()\n \n \n-@register_viewer_action(trans._(\"Remove selected layers.\"))\n-def remove_selected(viewer):\n- viewer.layers.remove_selected()\n-\n-\n-@register_viewer_action(trans._(\"Selected all layers.\"))\n-def select_all(viewer):\n- viewer.layers.select_all()\n-\n-\n-@register_viewer_action(trans._(\"Remove all layers.\"))\n-def remove_all_layers(viewer):\n- viewer.layers.clear()\n-\n-\n-@register_viewer_action(trans._(\"Select layer above.\"))\n-def select_layer_above(viewer):\n- viewer.layers.select_next()\n-\n-\n-@register_viewer_action(trans._(\"Select layer below.\"))\n-def select_layer_below(viewer):\n- viewer.layers.select_previous()\n-\n-\n-@register_viewer_action(trans._(\"Also select layer above.\"))\n-def also_select_layer_above(viewer):\n- viewer.layers.select_next(shift=True)\n-\n-\n-@register_viewer_action(trans._(\"Also select layer below.\"))\n-def also_select_layer_below(viewer):\n- viewer.layers.select_previous(shift=True)\n-\n-\n @register_viewer_action(trans._(\"Toggle grid mode.\"))\n def toggle_grid(viewer):\n viewer.grid.enabled = not viewer.grid.enabled\ndiff --git a/napari/utils/shortcuts.py b/napari/utils/shortcuts.py\n--- a/napari/utils/shortcuts.py\n+++ b/napari/utils/shortcuts.py\n@@ -10,16 +10,6 @@\n 'napari:focus_axes_down': ['Alt-Down'],\n 'napari:roll_axes': ['Control-E'],\n 'napari:transpose_axes': ['Control-T'],\n- 'napari:remove_selected': ['Control-Backspace', 'Control-Delete'],\n- 'napari:select_all': ['Control-A'],\n- 'napari:remove_all_layers': [\n- 'Control-Shift-Backspace',\n- 'Control-Shift-Delete',\n- ],\n- 'napari:select_layer_above': ['Up'],\n- 'napari:select_layer_below': ['Down'],\n- 'napari:also_select_layer_above': ['Shift-Up'],\n- 'napari:also_select_layer_below': ['Shift-Down'],\n 'napari:toggle_grid': ['Control-G'],\n 'napari:toggle_selected_visibility': ['V'],\n }\n", "issue": "Using arrow keys to switch selected layer skips one layer\n## \ud83d\udc1b Bug\r\n\r\nIf a layer is selected and you press the up/down arrow keys, instead of switching to the next/previous layer, it will skip over one layer.\r\n\r\n## To Reproduce\r\n\r\nOpen napari, create create a bunch of layers, select one of them and press `up` or `down`.\r\n\r\n## Expected behavior\r\n\r\nThe adjacent layer should be selected.\r\n\r\n## Environment\r\n```yaml\r\nnapari: 0.4.9rc0\r\nPlatform: Linux-5.12.8-arch1-1-x86_64-with-glibc2.33\r\nSystem: Arch Linux\r\nPython: 3.9.5 (default, May 24 2021, 12:50:35) [GCC 11.1.0]\r\nQt: 5.15.2\r\nPyQt5: 5.15.4\r\nNumPy: 1.20.3\r\nSciPy: 1.6.3\r\nDask: 2020.12.0\r\nVisPy: 0.6.6\r\n\r\nOpenGL:\r\n- GL version: 4.6.0 NVIDIA 465.31\r\n- MAX_TEXTURE_SIZE: 16384\r\n\r\nScreens:\r\n- screen 1: resolution 1920x1080, scale 1.0\r\n- screen 2: resolution 1920x1200, scale 1.0\r\n\r\nPlugins:\r\n- animation: 0.0.2\r\n- console: 0.0.3\r\n- mrcfile-reader: 0.1.2\r\n- napari-em-reader: 0.1.0\r\n- napari-properties-plotter: 0.1.dev2+g2734460.d20210520\r\n- ndtiffs: 0.1.1\r\n- scikit-image: 0.4.8\r\n- svg: 0.1.5\r\n```\n", "before_files": [{"content": "from ..utils.action_manager import action_manager\nfrom ..utils.settings import SETTINGS\nfrom ..utils.theme import available_themes\nfrom ..utils.translations import trans\nfrom .viewer_model import ViewerModel\n\n\ndef register_viewer_action(description):\n \"\"\"\n Convenient decorator to register an action with the current ViewerModel\n\n It will use the function name as the action name. We force the description\n to be given instead of function docstring for translation purpose.\n \"\"\"\n\n def _inner(func):\n name = 'napari:' + func.__name__\n action_manager.register_action(\n name=name,\n command=func,\n description=description,\n keymapprovider=ViewerModel,\n )\n return func\n\n return _inner\n\n\n@register_viewer_action(trans._(\"Reset scroll.\"))\ndef reset_scroll_progress(viewer):\n\n # on key press\n viewer.dims._scroll_progress = 0\n yield\n\n # on key release\n viewer.dims._scroll_progress = 0\n\n\nreset_scroll_progress.__doc__ = trans._(\"Reset dims scroll progress\")\n\n\n@register_viewer_action(trans._(\"Toggle ndisplay.\"))\ndef toggle_ndisplay(viewer):\n if viewer.dims.ndisplay == 3:\n viewer.dims.ndisplay = 2\n else:\n viewer.dims.ndisplay = 3\n\n\n# Making this an action makes vispy really unhappy during the tests\n# on mac only with:\n# ```\n# RuntimeError: wrapped C/C++ object of type CanvasBackendDesktop has been deleted\n# ```\n@register_viewer_action(trans._(\"Toggle theme.\"))\ndef toggle_theme(viewer):\n \"\"\"Toggle theme for viewer\"\"\"\n themes = available_themes()\n current_theme = SETTINGS.appearance.theme\n idx = themes.index(current_theme)\n idx += 1\n if idx == len(themes):\n idx = 0\n\n SETTINGS.appearance.theme = themes[idx]\n\n\n@register_viewer_action(trans._(\"Reset view to original state.\"))\ndef reset_view(viewer):\n viewer.reset_view()\n\n\n@register_viewer_action(trans._(\"Increment dimensions slider to the left.\"))\ndef increment_dims_left(viewer):\n viewer.dims._increment_dims_left()\n\n\n@register_viewer_action(trans._(\"Increment dimensions slider to the right.\"))\ndef increment_dims_right(viewer):\n viewer.dims._increment_dims_right()\n\n\n@register_viewer_action(trans._(\"Move focus of dimensions slider up.\"))\ndef focus_axes_up(viewer):\n viewer.dims._focus_up()\n\n\n@register_viewer_action(trans._(\"Move focus of dimensions slider down.\"))\ndef focus_axes_down(viewer):\n viewer.dims._focus_down()\n\n\n@register_viewer_action(\n trans._(\"Change order of the visible axes, e.g. [0, 1, 2] -> [2, 0, 1].\"),\n)\ndef roll_axes(viewer):\n viewer.dims._roll()\n\n\n@register_viewer_action(\n trans._(\n \"Transpose order of the last two visible axes, e.g. [0, 1] -> [1, 0].\"\n ),\n)\ndef transpose_axes(viewer):\n viewer.dims._transpose()\n\n\n@register_viewer_action(trans._(\"Remove selected layers.\"))\ndef remove_selected(viewer):\n viewer.layers.remove_selected()\n\n\n@register_viewer_action(trans._(\"Selected all layers.\"))\ndef select_all(viewer):\n viewer.layers.select_all()\n\n\n@register_viewer_action(trans._(\"Remove all layers.\"))\ndef remove_all_layers(viewer):\n viewer.layers.clear()\n\n\n@register_viewer_action(trans._(\"Select layer above.\"))\ndef select_layer_above(viewer):\n viewer.layers.select_next()\n\n\n@register_viewer_action(trans._(\"Select layer below.\"))\ndef select_layer_below(viewer):\n viewer.layers.select_previous()\n\n\n@register_viewer_action(trans._(\"Also select layer above.\"))\ndef also_select_layer_above(viewer):\n viewer.layers.select_next(shift=True)\n\n\n@register_viewer_action(trans._(\"Also select layer below.\"))\ndef also_select_layer_below(viewer):\n viewer.layers.select_previous(shift=True)\n\n\n@register_viewer_action(trans._(\"Toggle grid mode.\"))\ndef toggle_grid(viewer):\n viewer.grid.enabled = not viewer.grid.enabled\n\n\n@register_viewer_action(trans._(\"Toggle visibility of selected layers\"))\ndef toggle_selected_visibility(viewer):\n viewer.layers.toggle_selected_visibility()\n", "path": "napari/components/_viewer_key_bindings.py"}, {"content": "default_shortcuts = {\n 'napari:toggle_console_visibility': ['Control-Shift-C'],\n 'napari:reset_scroll_progress': ['Control'],\n 'napari:toggle_ndisplay': ['Control-Y'],\n 'napari:toggle_theme': ['Control-Shift-T'],\n 'napari:reset_view': ['Control-R'],\n 'napari:increment_dims_left': ['Left'],\n 'napari:increment_dims_right': ['Right'],\n 'napari:focus_axes_up': ['Alt-Up'],\n 'napari:focus_axes_down': ['Alt-Down'],\n 'napari:roll_axes': ['Control-E'],\n 'napari:transpose_axes': ['Control-T'],\n 'napari:remove_selected': ['Control-Backspace', 'Control-Delete'],\n 'napari:select_all': ['Control-A'],\n 'napari:remove_all_layers': [\n 'Control-Shift-Backspace',\n 'Control-Shift-Delete',\n ],\n 'napari:select_layer_above': ['Up'],\n 'napari:select_layer_below': ['Down'],\n 'napari:also_select_layer_above': ['Shift-Up'],\n 'napari:also_select_layer_below': ['Shift-Down'],\n 'napari:toggle_grid': ['Control-G'],\n 'napari:toggle_selected_visibility': ['V'],\n}\n", "path": "napari/utils/shortcuts.py"}]} | 2,614 | 549 |
gh_patches_debug_8570 | rasdani/github-patches | git_diff | bookwyrm-social__bookwyrm-882 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Book context missing in boosts from remote posts
**Describe the bug**
In some cases, a post federates and the associated book is not shown.
**Screenshots**
Screenshot from a review where the associated book is not shown. It's a remote post and the booster is on another instance as well.

Screenshot from the original instance with the book

Screenshots from the federated tl where the remote review is there once with book, once without.

**Additional context**
Will post more matching posts. In other cases, it works properly - it may be that it works if the OP is followed from my instance so the post is already known but not enough examples yet.
</issue>
<code>
[start of bookwyrm/activitypub/base_activity.py]
1 """ basics for an activitypub serializer """
2 from dataclasses import dataclass, fields, MISSING
3 from json import JSONEncoder
4
5 from django.apps import apps
6 from django.db import IntegrityError, transaction
7
8 from bookwyrm.connectors import ConnectorException, get_data
9 from bookwyrm.tasks import app
10
11
12 class ActivitySerializerError(ValueError):
13 """ routine problems serializing activitypub json """
14
15
16 class ActivityEncoder(JSONEncoder):
17 """ used to convert an Activity object into json """
18
19 def default(self, o):
20 return o.__dict__
21
22
23 @dataclass
24 class Link:
25 """ for tagging a book in a status """
26
27 href: str
28 name: str
29 type: str = "Link"
30
31
32 @dataclass
33 class Mention(Link):
34 """ a subtype of Link for mentioning an actor """
35
36 type: str = "Mention"
37
38
39 @dataclass
40 class Signature:
41 """ public key block """
42
43 creator: str
44 created: str
45 signatureValue: str
46 type: str = "RsaSignature2017"
47
48
49 def naive_parse(activity_objects, activity_json, serializer=None):
50 """ this navigates circular import issues """
51 if not serializer:
52 if activity_json.get("publicKeyPem"):
53 # ugh
54 activity_json["type"] = "PublicKey"
55 try:
56 activity_type = activity_json["type"]
57 serializer = activity_objects[activity_type]
58 except KeyError as e:
59 raise ActivitySerializerError(e)
60
61 return serializer(activity_objects=activity_objects, **activity_json)
62
63
64 @dataclass(init=False)
65 class ActivityObject:
66 """ actor activitypub json """
67
68 id: str
69 type: str
70
71 def __init__(self, activity_objects=None, **kwargs):
72 """this lets you pass in an object with fields that aren't in the
73 dataclass, which it ignores. Any field in the dataclass is required or
74 has a default value"""
75 for field in fields(self):
76 try:
77 value = kwargs[field.name]
78 if value in (None, MISSING, {}):
79 raise KeyError()
80 try:
81 is_subclass = issubclass(field.type, ActivityObject)
82 except TypeError:
83 is_subclass = False
84 # serialize a model obj
85 if hasattr(value, "to_activity"):
86 value = value.to_activity()
87 # parse a dict into the appropriate activity
88 elif is_subclass and isinstance(value, dict):
89 if activity_objects:
90 value = naive_parse(activity_objects, value)
91 else:
92 value = naive_parse(
93 activity_objects, value, serializer=field.type
94 )
95
96 except KeyError:
97 if field.default == MISSING and field.default_factory == MISSING:
98 raise ActivitySerializerError(
99 "Missing required field: %s" % field.name
100 )
101 value = field.default
102 setattr(self, field.name, value)
103
104 def to_model(self, model=None, instance=None, allow_create=True, save=True):
105 """ convert from an activity to a model instance """
106 model = model or get_model_from_type(self.type)
107
108 # only reject statuses if we're potentially creating them
109 if (
110 allow_create
111 and hasattr(model, "ignore_activity")
112 and model.ignore_activity(self)
113 ):
114 raise ActivitySerializerError()
115
116 # check for an existing instance
117 instance = instance or model.find_existing(self.serialize())
118
119 if not instance and not allow_create:
120 # so that we don't create when we want to delete or update
121 return None
122 instance = instance or model()
123
124 for field in instance.simple_fields:
125 try:
126 field.set_field_from_activity(instance, self)
127 except AttributeError as e:
128 raise ActivitySerializerError(e)
129
130 # image fields have to be set after other fields because they can save
131 # too early and jank up users
132 for field in instance.image_fields:
133 field.set_field_from_activity(instance, self, save=save)
134
135 if not save:
136 return instance
137
138 with transaction.atomic():
139 # we can't set many to many and reverse fields on an unsaved object
140 try:
141 try:
142 instance.save(broadcast=False)
143 except TypeError:
144 instance.save()
145 except IntegrityError as e:
146 raise ActivitySerializerError(e)
147
148 # add many to many fields, which have to be set post-save
149 for field in instance.many_to_many_fields:
150 # mention books/users, for example
151 field.set_field_from_activity(instance, self)
152
153 # reversed relationships in the models
154 for (
155 model_field_name,
156 activity_field_name,
157 ) in instance.deserialize_reverse_fields:
158 # attachments on Status, for example
159 values = getattr(self, activity_field_name)
160 if values is None or values is MISSING:
161 continue
162
163 model_field = getattr(model, model_field_name)
164 # creating a Work, model_field is 'editions'
165 # creating a User, model field is 'key_pair'
166 related_model = model_field.field.model
167 related_field_name = model_field.field.name
168
169 for item in values:
170 set_related_field.delay(
171 related_model.__name__,
172 instance.__class__.__name__,
173 related_field_name,
174 instance.remote_id,
175 item,
176 )
177 return instance
178
179 def serialize(self):
180 """ convert to dictionary with context attr """
181 data = self.__dict__.copy()
182 # recursively serialize
183 for (k, v) in data.items():
184 try:
185 if issubclass(type(v), ActivityObject):
186 data[k] = v.serialize()
187 except TypeError:
188 pass
189 data = {k: v for (k, v) in data.items() if v is not None}
190 data["@context"] = "https://www.w3.org/ns/activitystreams"
191 return data
192
193
194 @app.task
195 @transaction.atomic
196 def set_related_field(
197 model_name, origin_model_name, related_field_name, related_remote_id, data
198 ):
199 """ load reverse related fields (editions, attachments) without blocking """
200 model = apps.get_model("bookwyrm.%s" % model_name, require_ready=True)
201 origin_model = apps.get_model("bookwyrm.%s" % origin_model_name, require_ready=True)
202
203 with transaction.atomic():
204 if isinstance(data, str):
205 existing = model.find_existing_by_remote_id(data)
206 if existing:
207 data = existing.to_activity()
208 else:
209 data = get_data(data)
210 activity = model.activity_serializer(**data)
211
212 # this must exist because it's the object that triggered this function
213 instance = origin_model.find_existing_by_remote_id(related_remote_id)
214 if not instance:
215 raise ValueError("Invalid related remote id: %s" % related_remote_id)
216
217 # set the origin's remote id on the activity so it will be there when
218 # the model instance is created
219 # edition.parentWork = instance, for example
220 model_field = getattr(model, related_field_name)
221 if hasattr(model_field, "activitypub_field"):
222 setattr(
223 activity, getattr(model_field, "activitypub_field"), instance.remote_id
224 )
225 item = activity.to_model()
226
227 # if the related field isn't serialized (attachments on Status), then
228 # we have to set it post-creation
229 if not hasattr(model_field, "activitypub_field"):
230 setattr(item, related_field_name, instance)
231 item.save()
232
233
234 def get_model_from_type(activity_type):
235 """ given the activity, what type of model """
236 models = apps.get_models()
237 model = [
238 m
239 for m in models
240 if hasattr(m, "activity_serializer")
241 and hasattr(m.activity_serializer, "type")
242 and m.activity_serializer.type == activity_type
243 ]
244 if not model:
245 raise ActivitySerializerError(
246 'No model found for activity type "%s"' % activity_type
247 )
248 return model[0]
249
250
251 def resolve_remote_id(
252 remote_id, model=None, refresh=False, save=True, get_activity=False
253 ):
254 """ take a remote_id and return an instance, creating if necessary """
255 if model: # a bonus check we can do if we already know the model
256 result = model.find_existing_by_remote_id(remote_id)
257 if result and not refresh:
258 return result if not get_activity else result.to_activity_dataclass()
259
260 # load the data and create the object
261 try:
262 data = get_data(remote_id)
263 except ConnectorException:
264 raise ActivitySerializerError(
265 "Could not connect to host for remote_id in: %s" % (remote_id)
266 )
267 # determine the model implicitly, if not provided
268 if not model:
269 model = get_model_from_type(data.get("type"))
270
271 # check for existing items with shared unique identifiers
272 result = model.find_existing(data)
273 if result and not refresh:
274 return result if not get_activity else result.to_activity_dataclass()
275
276 item = model.activity_serializer(**data)
277 if get_activity:
278 return item
279
280 # if we're refreshing, "result" will be set and we'll update it
281 return item.to_model(model=model, instance=result, save=save)
282
[end of bookwyrm/activitypub/base_activity.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/bookwyrm/activitypub/base_activity.py b/bookwyrm/activitypub/base_activity.py
--- a/bookwyrm/activitypub/base_activity.py
+++ b/bookwyrm/activitypub/base_activity.py
@@ -265,7 +265,8 @@
"Could not connect to host for remote_id in: %s" % (remote_id)
)
# determine the model implicitly, if not provided
- if not model:
+ # or if it's a model with subclasses like Status, check again
+ if not model or hasattr(model.objects, "select_subclasses"):
model = get_model_from_type(data.get("type"))
# check for existing items with shared unique identifiers
| {"golden_diff": "diff --git a/bookwyrm/activitypub/base_activity.py b/bookwyrm/activitypub/base_activity.py\n--- a/bookwyrm/activitypub/base_activity.py\n+++ b/bookwyrm/activitypub/base_activity.py\n@@ -265,7 +265,8 @@\n \"Could not connect to host for remote_id in: %s\" % (remote_id)\n )\n # determine the model implicitly, if not provided\n- if not model:\n+ # or if it's a model with subclasses like Status, check again\n+ if not model or hasattr(model.objects, \"select_subclasses\"):\n model = get_model_from_type(data.get(\"type\"))\n \n # check for existing items with shared unique identifiers\n", "issue": "Book context missing in boosts from remote posts\n**Describe the bug**\r\nIn some cases, a post federates and the associated book is not shown.\r\n\r\n**Screenshots**\r\nScreenshot from a review where the associated book is not shown. It's a remote post and the booster is on another instance as well.\r\n\r\n\r\nScreenshot from the original instance with the book\r\n\r\n\r\nScreenshots from the federated tl where the remote review is there once with book, once without.\r\n\r\n\r\n**Additional context**\r\nWill post more matching posts. In other cases, it works properly - it may be that it works if the OP is followed from my instance so the post is already known but not enough examples yet.\n", "before_files": [{"content": "\"\"\" basics for an activitypub serializer \"\"\"\nfrom dataclasses import dataclass, fields, MISSING\nfrom json import JSONEncoder\n\nfrom django.apps import apps\nfrom django.db import IntegrityError, transaction\n\nfrom bookwyrm.connectors import ConnectorException, get_data\nfrom bookwyrm.tasks import app\n\n\nclass ActivitySerializerError(ValueError):\n \"\"\" routine problems serializing activitypub json \"\"\"\n\n\nclass ActivityEncoder(JSONEncoder):\n \"\"\" used to convert an Activity object into json \"\"\"\n\n def default(self, o):\n return o.__dict__\n\n\n@dataclass\nclass Link:\n \"\"\" for tagging a book in a status \"\"\"\n\n href: str\n name: str\n type: str = \"Link\"\n\n\n@dataclass\nclass Mention(Link):\n \"\"\" a subtype of Link for mentioning an actor \"\"\"\n\n type: str = \"Mention\"\n\n\n@dataclass\nclass Signature:\n \"\"\" public key block \"\"\"\n\n creator: str\n created: str\n signatureValue: str\n type: str = \"RsaSignature2017\"\n\n\ndef naive_parse(activity_objects, activity_json, serializer=None):\n \"\"\" this navigates circular import issues \"\"\"\n if not serializer:\n if activity_json.get(\"publicKeyPem\"):\n # ugh\n activity_json[\"type\"] = \"PublicKey\"\n try:\n activity_type = activity_json[\"type\"]\n serializer = activity_objects[activity_type]\n except KeyError as e:\n raise ActivitySerializerError(e)\n\n return serializer(activity_objects=activity_objects, **activity_json)\n\n\n@dataclass(init=False)\nclass ActivityObject:\n \"\"\" actor activitypub json \"\"\"\n\n id: str\n type: str\n\n def __init__(self, activity_objects=None, **kwargs):\n \"\"\"this lets you pass in an object with fields that aren't in the\n dataclass, which it ignores. Any field in the dataclass is required or\n has a default value\"\"\"\n for field in fields(self):\n try:\n value = kwargs[field.name]\n if value in (None, MISSING, {}):\n raise KeyError()\n try:\n is_subclass = issubclass(field.type, ActivityObject)\n except TypeError:\n is_subclass = False\n # serialize a model obj\n if hasattr(value, \"to_activity\"):\n value = value.to_activity()\n # parse a dict into the appropriate activity\n elif is_subclass and isinstance(value, dict):\n if activity_objects:\n value = naive_parse(activity_objects, value)\n else:\n value = naive_parse(\n activity_objects, value, serializer=field.type\n )\n\n except KeyError:\n if field.default == MISSING and field.default_factory == MISSING:\n raise ActivitySerializerError(\n \"Missing required field: %s\" % field.name\n )\n value = field.default\n setattr(self, field.name, value)\n\n def to_model(self, model=None, instance=None, allow_create=True, save=True):\n \"\"\" convert from an activity to a model instance \"\"\"\n model = model or get_model_from_type(self.type)\n\n # only reject statuses if we're potentially creating them\n if (\n allow_create\n and hasattr(model, \"ignore_activity\")\n and model.ignore_activity(self)\n ):\n raise ActivitySerializerError()\n\n # check for an existing instance\n instance = instance or model.find_existing(self.serialize())\n\n if not instance and not allow_create:\n # so that we don't create when we want to delete or update\n return None\n instance = instance or model()\n\n for field in instance.simple_fields:\n try:\n field.set_field_from_activity(instance, self)\n except AttributeError as e:\n raise ActivitySerializerError(e)\n\n # image fields have to be set after other fields because they can save\n # too early and jank up users\n for field in instance.image_fields:\n field.set_field_from_activity(instance, self, save=save)\n\n if not save:\n return instance\n\n with transaction.atomic():\n # we can't set many to many and reverse fields on an unsaved object\n try:\n try:\n instance.save(broadcast=False)\n except TypeError:\n instance.save()\n except IntegrityError as e:\n raise ActivitySerializerError(e)\n\n # add many to many fields, which have to be set post-save\n for field in instance.many_to_many_fields:\n # mention books/users, for example\n field.set_field_from_activity(instance, self)\n\n # reversed relationships in the models\n for (\n model_field_name,\n activity_field_name,\n ) in instance.deserialize_reverse_fields:\n # attachments on Status, for example\n values = getattr(self, activity_field_name)\n if values is None or values is MISSING:\n continue\n\n model_field = getattr(model, model_field_name)\n # creating a Work, model_field is 'editions'\n # creating a User, model field is 'key_pair'\n related_model = model_field.field.model\n related_field_name = model_field.field.name\n\n for item in values:\n set_related_field.delay(\n related_model.__name__,\n instance.__class__.__name__,\n related_field_name,\n instance.remote_id,\n item,\n )\n return instance\n\n def serialize(self):\n \"\"\" convert to dictionary with context attr \"\"\"\n data = self.__dict__.copy()\n # recursively serialize\n for (k, v) in data.items():\n try:\n if issubclass(type(v), ActivityObject):\n data[k] = v.serialize()\n except TypeError:\n pass\n data = {k: v for (k, v) in data.items() if v is not None}\n data[\"@context\"] = \"https://www.w3.org/ns/activitystreams\"\n return data\n\n\[email protected]\[email protected]\ndef set_related_field(\n model_name, origin_model_name, related_field_name, related_remote_id, data\n):\n \"\"\" load reverse related fields (editions, attachments) without blocking \"\"\"\n model = apps.get_model(\"bookwyrm.%s\" % model_name, require_ready=True)\n origin_model = apps.get_model(\"bookwyrm.%s\" % origin_model_name, require_ready=True)\n\n with transaction.atomic():\n if isinstance(data, str):\n existing = model.find_existing_by_remote_id(data)\n if existing:\n data = existing.to_activity()\n else:\n data = get_data(data)\n activity = model.activity_serializer(**data)\n\n # this must exist because it's the object that triggered this function\n instance = origin_model.find_existing_by_remote_id(related_remote_id)\n if not instance:\n raise ValueError(\"Invalid related remote id: %s\" % related_remote_id)\n\n # set the origin's remote id on the activity so it will be there when\n # the model instance is created\n # edition.parentWork = instance, for example\n model_field = getattr(model, related_field_name)\n if hasattr(model_field, \"activitypub_field\"):\n setattr(\n activity, getattr(model_field, \"activitypub_field\"), instance.remote_id\n )\n item = activity.to_model()\n\n # if the related field isn't serialized (attachments on Status), then\n # we have to set it post-creation\n if not hasattr(model_field, \"activitypub_field\"):\n setattr(item, related_field_name, instance)\n item.save()\n\n\ndef get_model_from_type(activity_type):\n \"\"\" given the activity, what type of model \"\"\"\n models = apps.get_models()\n model = [\n m\n for m in models\n if hasattr(m, \"activity_serializer\")\n and hasattr(m.activity_serializer, \"type\")\n and m.activity_serializer.type == activity_type\n ]\n if not model:\n raise ActivitySerializerError(\n 'No model found for activity type \"%s\"' % activity_type\n )\n return model[0]\n\n\ndef resolve_remote_id(\n remote_id, model=None, refresh=False, save=True, get_activity=False\n):\n \"\"\" take a remote_id and return an instance, creating if necessary \"\"\"\n if model: # a bonus check we can do if we already know the model\n result = model.find_existing_by_remote_id(remote_id)\n if result and not refresh:\n return result if not get_activity else result.to_activity_dataclass()\n\n # load the data and create the object\n try:\n data = get_data(remote_id)\n except ConnectorException:\n raise ActivitySerializerError(\n \"Could not connect to host for remote_id in: %s\" % (remote_id)\n )\n # determine the model implicitly, if not provided\n if not model:\n model = get_model_from_type(data.get(\"type\"))\n\n # check for existing items with shared unique identifiers\n result = model.find_existing(data)\n if result and not refresh:\n return result if not get_activity else result.to_activity_dataclass()\n\n item = model.activity_serializer(**data)\n if get_activity:\n return item\n\n # if we're refreshing, \"result\" will be set and we'll update it\n return item.to_model(model=model, instance=result, save=save)\n", "path": "bookwyrm/activitypub/base_activity.py"}]} | 3,547 | 153 |
gh_patches_debug_13412 | rasdani/github-patches | git_diff | pre-commit__pre-commit-785 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
documention regarding adding new python based hooks needs improvement
Apparently we need some kind of how-to or mini tutorial on how to add a new hook to pre-commit as the basic documentation does not help someone without previous pre-commit knowledge.
I wanted to add support for `bashate`, a shell script linter written in python, available on pypi and that installs a shell script with the same name that can be used just like other linters.
Initially I went to https://pre-commit.com/#new-hooks which didn't give me enough info. So, i looked for other linters based on python and I found yamllint, which pointed me to https://github.com/adrienverge/yamllint/blob/master/.pre-commit-hooks.yaml
So the idea was to add the hook definition directly to the linter package. In this case I had to fork bashate in order to test the new, hook. So I ended up creating https://github.com/ssbarnea/bashate/blob/master/.pre-commit-hooks.yaml -- folowing the same model used in yamllint.
Now, I wanted to add and test the hook on one of the repos I maintain so I did create https://github.com/pycontribs/jira/blob/feature/pre-commit/.pre-commit-config.yaml#L25
When I tried to run it using `pre-commit run bashate --all`, it failed with this error:
```
Bashate..................................................................An unexpected error has occurred: OSError: [Errno 2] No such file or directory
Check the log at /Users/ssbarnea/.cache/pre-commit/pre-commit.log
An unexpected error has occurred: OSError: [Errno 2] No such file or directory
Traceback (most recent call last):
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/error_handler.py", line 47, in error_handler
yield
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/main.py", line 258, in main
return run(runner, args)
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/commands/run.py", line 270, in run
return _run_hooks(runner.config, repo_hooks, args, environ)
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/commands/run.py", line 199, in _run_hooks
retval |= _run_single_hook(filenames, hook, repo, args, skips, cols)
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/commands/run.py", line 110, in _run_single_hook
hook, tuple(filenames) if hook['pass_filenames'] else (),
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/repository.py", line 207, in run_hook
return languages[language_name].run_hook(prefix, hook, file_args)
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/languages/script.py", line 16, in run_hook
return xargs(cmd, file_args)
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/xargs.py", line 63, in xargs
*run_cmd, encoding=None, retcode=None
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/util.py", line 167, in cmd_output
proc = subprocess.Popen(cmd, **popen_kwargs)
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/subprocess.py", line 390, in __init__
errread, errwrite)
File "/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/subprocess.py", line 1025, in _execute_child
raise child_exception
OSError: [Errno 2] No such file or directory
```
At the moment I have no idea what I did wrong, probably something simple. Still, we need to make it easier to integrate new linters into pre-commit.
</issue>
<code>
[start of pre_commit/parse_shebang.py]
1 from __future__ import absolute_import
2 from __future__ import unicode_literals
3
4 import os.path
5
6 from identify.identify import parse_shebang_from_file
7
8
9 class ExecutableNotFoundError(OSError):
10 def to_output(self):
11 return (1, self.args[0].encode('UTF-8'), b'')
12
13
14 def parse_filename(filename):
15 if not os.path.exists(filename):
16 return ()
17 else:
18 return parse_shebang_from_file(filename)
19
20
21 def find_executable(exe, _environ=None):
22 exe = os.path.normpath(exe)
23 if os.sep in exe:
24 return exe
25
26 environ = _environ if _environ is not None else os.environ
27
28 if 'PATHEXT' in environ:
29 possible_exe_names = tuple(
30 exe + ext.lower() for ext in environ['PATHEXT'].split(os.pathsep)
31 ) + (exe,)
32
33 else:
34 possible_exe_names = (exe,)
35
36 for path in environ.get('PATH', '').split(os.pathsep):
37 for possible_exe_name in possible_exe_names:
38 joined = os.path.join(path, possible_exe_name)
39 if os.path.isfile(joined) and os.access(joined, os.X_OK):
40 return joined
41 else:
42 return None
43
44
45 def normexe(orig_exe):
46 if os.sep not in orig_exe:
47 exe = find_executable(orig_exe)
48 if exe is None:
49 raise ExecutableNotFoundError(
50 'Executable `{}` not found'.format(orig_exe),
51 )
52 return exe
53 else:
54 return orig_exe
55
56
57 def normalize_cmd(cmd):
58 """Fixes for the following issues on windows
59 - https://bugs.python.org/issue8557
60 - windows does not parse shebangs
61
62 This function also makes deep-path shebangs work just fine
63 """
64 # Use PATH to determine the executable
65 exe = normexe(cmd[0])
66
67 # Figure out the shebang from the resulting command
68 cmd = parse_filename(exe) + (exe,) + cmd[1:]
69
70 # This could have given us back another bare executable
71 exe = normexe(cmd[0])
72
73 return (exe,) + cmd[1:]
74
[end of pre_commit/parse_shebang.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pre_commit/parse_shebang.py b/pre_commit/parse_shebang.py
--- a/pre_commit/parse_shebang.py
+++ b/pre_commit/parse_shebang.py
@@ -42,16 +42,21 @@
return None
-def normexe(orig_exe):
- if os.sep not in orig_exe:
- exe = find_executable(orig_exe)
+def normexe(orig):
+ def _error(msg):
+ raise ExecutableNotFoundError('Executable `{}` {}'.format(orig, msg))
+
+ if os.sep not in orig and (not os.altsep or os.altsep not in orig):
+ exe = find_executable(orig)
if exe is None:
- raise ExecutableNotFoundError(
- 'Executable `{}` not found'.format(orig_exe),
- )
+ _error('not found')
return exe
+ elif not os.access(orig, os.X_OK):
+ _error('not found')
+ elif os.path.isdir(orig):
+ _error('is a directory')
else:
- return orig_exe
+ return orig
def normalize_cmd(cmd):
| {"golden_diff": "diff --git a/pre_commit/parse_shebang.py b/pre_commit/parse_shebang.py\n--- a/pre_commit/parse_shebang.py\n+++ b/pre_commit/parse_shebang.py\n@@ -42,16 +42,21 @@\n return None\n \n \n-def normexe(orig_exe):\n- if os.sep not in orig_exe:\n- exe = find_executable(orig_exe)\n+def normexe(orig):\n+ def _error(msg):\n+ raise ExecutableNotFoundError('Executable `{}` {}'.format(orig, msg))\n+\n+ if os.sep not in orig and (not os.altsep or os.altsep not in orig):\n+ exe = find_executable(orig)\n if exe is None:\n- raise ExecutableNotFoundError(\n- 'Executable `{}` not found'.format(orig_exe),\n- )\n+ _error('not found')\n return exe\n+ elif not os.access(orig, os.X_OK):\n+ _error('not found')\n+ elif os.path.isdir(orig):\n+ _error('is a directory')\n else:\n- return orig_exe\n+ return orig\n \n \n def normalize_cmd(cmd):\n", "issue": "documention regarding adding new python based hooks needs improvement\nApparently we need some kind of how-to or mini tutorial on how to add a new hook to pre-commit as the basic documentation does not help someone without previous pre-commit knowledge.\r\n\r\nI wanted to add support for `bashate`, a shell script linter written in python, available on pypi and that installs a shell script with the same name that can be used just like other linters.\r\n\r\nInitially I went to https://pre-commit.com/#new-hooks which didn't give me enough info. So, i looked for other linters based on python and I found yamllint, which pointed me to https://github.com/adrienverge/yamllint/blob/master/.pre-commit-hooks.yaml\r\n\r\nSo the idea was to add the hook definition directly to the linter package. In this case I had to fork bashate in order to test the new, hook. So I ended up creating https://github.com/ssbarnea/bashate/blob/master/.pre-commit-hooks.yaml -- folowing the same model used in yamllint.\r\n\r\nNow, I wanted to add and test the hook on one of the repos I maintain so I did create https://github.com/pycontribs/jira/blob/feature/pre-commit/.pre-commit-config.yaml#L25\r\n\r\nWhen I tried to run it using `pre-commit run bashate --all`, it failed with this error:\r\n```\r\nBashate..................................................................An unexpected error has occurred: OSError: [Errno 2] No such file or directory\r\nCheck the log at /Users/ssbarnea/.cache/pre-commit/pre-commit.log\r\n\r\nAn unexpected error has occurred: OSError: [Errno 2] No such file or directory\r\nTraceback (most recent call last):\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/error_handler.py\", line 47, in error_handler\r\n yield\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/main.py\", line 258, in main\r\n return run(runner, args)\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 270, in run\r\n return _run_hooks(runner.config, repo_hooks, args, environ)\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 199, in _run_hooks\r\n retval |= _run_single_hook(filenames, hook, repo, args, skips, cols)\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 110, in _run_single_hook\r\n hook, tuple(filenames) if hook['pass_filenames'] else (),\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/repository.py\", line 207, in run_hook\r\n return languages[language_name].run_hook(prefix, hook, file_args)\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/languages/script.py\", line 16, in run_hook\r\n return xargs(cmd, file_args)\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/xargs.py\", line 63, in xargs\r\n *run_cmd, encoding=None, retcode=None\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/site-packages/pre_commit/util.py\", line 167, in cmd_output\r\n proc = subprocess.Popen(cmd, **popen_kwargs)\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/subprocess.py\", line 390, in __init__\r\n errread, errwrite)\r\n File \"/Users/ssbarnea/.pyenv/versions/2.7.14/lib/python2.7/subprocess.py\", line 1025, in _execute_child\r\n raise child_exception\r\nOSError: [Errno 2] No such file or directory\r\n```\r\n\r\nAt the moment I have no idea what I did wrong, probably something simple. Still, we need to make it easier to integrate new linters into pre-commit.\n", "before_files": [{"content": "from __future__ import absolute_import\nfrom __future__ import unicode_literals\n\nimport os.path\n\nfrom identify.identify import parse_shebang_from_file\n\n\nclass ExecutableNotFoundError(OSError):\n def to_output(self):\n return (1, self.args[0].encode('UTF-8'), b'')\n\n\ndef parse_filename(filename):\n if not os.path.exists(filename):\n return ()\n else:\n return parse_shebang_from_file(filename)\n\n\ndef find_executable(exe, _environ=None):\n exe = os.path.normpath(exe)\n if os.sep in exe:\n return exe\n\n environ = _environ if _environ is not None else os.environ\n\n if 'PATHEXT' in environ:\n possible_exe_names = tuple(\n exe + ext.lower() for ext in environ['PATHEXT'].split(os.pathsep)\n ) + (exe,)\n\n else:\n possible_exe_names = (exe,)\n\n for path in environ.get('PATH', '').split(os.pathsep):\n for possible_exe_name in possible_exe_names:\n joined = os.path.join(path, possible_exe_name)\n if os.path.isfile(joined) and os.access(joined, os.X_OK):\n return joined\n else:\n return None\n\n\ndef normexe(orig_exe):\n if os.sep not in orig_exe:\n exe = find_executable(orig_exe)\n if exe is None:\n raise ExecutableNotFoundError(\n 'Executable `{}` not found'.format(orig_exe),\n )\n return exe\n else:\n return orig_exe\n\n\ndef normalize_cmd(cmd):\n \"\"\"Fixes for the following issues on windows\n - https://bugs.python.org/issue8557\n - windows does not parse shebangs\n\n This function also makes deep-path shebangs work just fine\n \"\"\"\n # Use PATH to determine the executable\n exe = normexe(cmd[0])\n\n # Figure out the shebang from the resulting command\n cmd = parse_filename(exe) + (exe,) + cmd[1:]\n\n # This could have given us back another bare executable\n exe = normexe(cmd[0])\n\n return (exe,) + cmd[1:]\n", "path": "pre_commit/parse_shebang.py"}]} | 2,147 | 248 |
gh_patches_debug_63906 | rasdani/github-patches | git_diff | Lightning-AI__torchmetrics-810 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
pip install failure for Python 3.10
## 🐛 Bug
pip fails to install required dependencies
### To Reproduce
Steps to reproduce the behavior:
```shell
% python --version
Python 3.10.2
% pip freeze
% pip install torchmetrics
Collecting torchmetrics
Using cached torchmetrics-0.7.0-py3-none-any.whl (396 kB)
Using cached torchmetrics-0.6.2-py3-none-any.whl (332 kB)
Using cached torchmetrics-0.6.1-py3-none-any.whl (332 kB)
Using cached torchmetrics-0.6.0-py3-none-any.whl (329 kB)
Using cached torchmetrics-0.5.1-py3-none-any.whl (282 kB)
Using cached torchmetrics-0.5.0-py3-none-any.whl (272 kB)
Using cached torchmetrics-0.4.1-py3-none-any.whl (234 kB)
Using cached torchmetrics-0.3.2-py3-none-any.whl (274 kB)
Using cached torchmetrics-0.3.1-py3-none-any.whl (271 kB)
Using cached torchmetrics-0.3.0-py3-none-any.whl (270 kB)
Using cached torchmetrics-0.2.0-py3-none-any.whl (176 kB)
ERROR: Cannot install torchmetrics==0.2.0, torchmetrics==0.3.0, torchmetrics==0.3.1, torchmetrics==0.3.2, torchmetrics==0.4.1, torchmetrics==0.5.0, torchmetrics==0.5.1, torchmetrics==0.6.0, torchmetrics==0.6.1, torchmetrics==0.6.2 and torchmetrics==0.7.0 because these package versions have conflicting dependencies.
The conflict is caused by:
torchmetrics 0.7.0 depends on torch>=1.3.1
torchmetrics 0.6.2 depends on torch>=1.3.1
torchmetrics 0.6.1 depends on torch>=1.3.1
torchmetrics 0.6.0 depends on torch>=1.3.1
torchmetrics 0.5.1 depends on torch>=1.3.1
torchmetrics 0.5.0 depends on torch>=1.3.1
torchmetrics 0.4.1 depends on torch>=1.3.1
torchmetrics 0.3.2 depends on torch>=1.3.1
torchmetrics 0.3.1 depends on torch>=1.3.1
torchmetrics 0.3.0 depends on torch>=1.3.1
torchmetrics 0.2.0 depends on torch>=1.3.1
To fix this you could try to:
1. loosen the range of package versions you've specified
2. remove package versions to allow pip attempt to solve the dependency conflict
ERROR: ResolutionImpossible: for help visit https://pip.pypa.io/en/latest/user_guide/#fixing-conflicting-dependencies
```
### Expected behavior
`pip` instals all dependencies itself and then installs `torchmetrics`.
### Environment
- PyTorch Version (e.g., 1.0): No
- OS (e.g., Linux): Ubuntu 21.10
- How you installed PyTorch (`conda`, `pip`, source): pip
- Build command you used (if compiling from source): pip install torchmetrics
- Python version: 3.10.2
</issue>
<code>
[start of setup.py]
1 #!/usr/bin/env python
2 import glob
3 import os
4 from functools import partial
5 from importlib.util import module_from_spec, spec_from_file_location
6 from typing import Tuple
7
8 from setuptools import find_packages, setup
9
10 _PATH_ROOT = os.path.realpath(os.path.dirname(__file__))
11 _PATH_REQUIRE = os.path.join(_PATH_ROOT, "requirements")
12
13
14 def _load_py_module(fname, pkg="torchmetrics"):
15 spec = spec_from_file_location(os.path.join(pkg, fname), os.path.join(_PATH_ROOT, pkg, fname))
16 py = module_from_spec(spec)
17 spec.loader.exec_module(py)
18 return py
19
20
21 about = _load_py_module("__about__.py")
22 setup_tools = _load_py_module("setup_tools.py")
23 long_description = setup_tools._load_readme_description(
24 _PATH_ROOT,
25 homepage=about.__homepage__,
26 version=f"v{about.__version__}",
27 )
28
29
30 BASE_REQUIREMENTS = setup_tools._load_requirements(path_dir=_PATH_ROOT, file_name="requirements.txt")
31
32
33 def _prepare_extras(skip_files: Tuple[str] = ("devel.txt")):
34 # find all extra requirements
35 _load_req = partial(setup_tools._load_requirements, path_dir=_PATH_REQUIRE)
36 found_req_files = sorted(os.path.basename(p) for p in glob.glob(os.path.join(_PATH_REQUIRE, "*.txt")))
37 # filter unwanted files
38 found_req_files = [n for n in found_req_files if n not in skip_files]
39 found_req_names = [os.path.splitext(req)[0] for req in found_req_files]
40 # define basic and extra extras
41 extras_req = {
42 name: _load_req(file_name=fname) for name, fname in zip(found_req_names, found_req_files) if "_test" not in name
43 }
44 for name, fname in zip(found_req_names, found_req_files):
45 if "_test" in name:
46 extras_req["test"] += _load_req(file_name=fname)
47 # filter the uniques
48 extras_req = {n: list(set(req)) for n, req in extras_req.items()}
49 # create an 'all' keyword that install all possible denpendencies
50 extras_req["all"] = [pkg for reqs in extras_req.values() for pkg in reqs]
51 return extras_req
52
53
54 # https://packaging.python.org/discussions/install-requires-vs-requirements /
55 # keep the meta-data here for simplicity in reading this file... it's not obvious
56 # what happens and to non-engineers they won't know to look in init ...
57 # the goal of the project is simplicity for researchers, don't want to add too much
58 # engineer specific practices
59 setup(
60 name="torchmetrics",
61 version=about.__version__,
62 description=about.__docs__,
63 author=about.__author__,
64 author_email=about.__author_email__,
65 url=about.__homepage__,
66 download_url=os.path.join(about.__homepage__, "archive", "master.zip"),
67 license=about.__license__,
68 packages=find_packages(exclude=["tests", "tests.*", "docs"]),
69 long_description=long_description,
70 long_description_content_type="text/markdown",
71 include_package_data=True,
72 zip_safe=False,
73 keywords=["deep learning", "machine learning", "pytorch", "metrics", "AI"],
74 python_requires=">=3.6",
75 setup_requires=[],
76 install_requires=BASE_REQUIREMENTS,
77 extras_require=_prepare_extras(),
78 project_urls={
79 "Bug Tracker": os.path.join(about.__homepage__, "issues"),
80 "Documentation": "https://torchmetrics.rtfd.io/en/latest/",
81 "Source Code": about.__homepage__,
82 },
83 classifiers=[
84 "Environment :: Console",
85 "Natural Language :: English",
86 # How mature is this project? Common values are
87 # 3 - Alpha, 4 - Beta, 5 - Production/Stable
88 "Development Status :: 5 - Production/Stable",
89 # Indicate who your project is intended for
90 "Intended Audience :: Developers",
91 "Topic :: Scientific/Engineering :: Artificial Intelligence",
92 "Topic :: Scientific/Engineering :: Image Recognition",
93 "Topic :: Scientific/Engineering :: Information Analysis",
94 # Pick your license as you wish
95 "License :: OSI Approved :: Apache Software License",
96 "Operating System :: OS Independent",
97 # Specify the Python versions you support here. In particular, ensure
98 # that you indicate whether you support Python 2, Python 3 or both.
99 "Programming Language :: Python :: 3",
100 "Programming Language :: Python :: 3.6",
101 "Programming Language :: Python :: 3.7",
102 "Programming Language :: Python :: 3.8",
103 "Programming Language :: Python :: 3.9",
104 "Programming Language :: Python :: 3.10",
105 ],
106 )
107
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -101,6 +101,5 @@
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
],
)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -101,6 +101,5 @@\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n- \"Programming Language :: Python :: 3.10\",\n ],\n )\n", "issue": "pip install failure for Python 3.10\n## \ud83d\udc1b Bug\r\n\r\npip fails to install required dependencies\r\n\r\n### To Reproduce\r\n\r\nSteps to reproduce the behavior:\r\n\r\n```shell\r\n % python --version\r\nPython 3.10.2\r\n % pip freeze\r\n % pip install torchmetrics\r\nCollecting torchmetrics\r\n Using cached torchmetrics-0.7.0-py3-none-any.whl (396 kB)\r\n Using cached torchmetrics-0.6.2-py3-none-any.whl (332 kB)\r\n Using cached torchmetrics-0.6.1-py3-none-any.whl (332 kB)\r\n Using cached torchmetrics-0.6.0-py3-none-any.whl (329 kB)\r\n Using cached torchmetrics-0.5.1-py3-none-any.whl (282 kB)\r\n Using cached torchmetrics-0.5.0-py3-none-any.whl (272 kB)\r\n Using cached torchmetrics-0.4.1-py3-none-any.whl (234 kB)\r\n Using cached torchmetrics-0.3.2-py3-none-any.whl (274 kB)\r\n Using cached torchmetrics-0.3.1-py3-none-any.whl (271 kB)\r\n Using cached torchmetrics-0.3.0-py3-none-any.whl (270 kB)\r\n Using cached torchmetrics-0.2.0-py3-none-any.whl (176 kB)\r\nERROR: Cannot install torchmetrics==0.2.0, torchmetrics==0.3.0, torchmetrics==0.3.1, torchmetrics==0.3.2, torchmetrics==0.4.1, torchmetrics==0.5.0, torchmetrics==0.5.1, torchmetrics==0.6.0, torchmetrics==0.6.1, torchmetrics==0.6.2 and torchmetrics==0.7.0 because these package versions have conflicting dependencies.\r\n\r\nThe conflict is caused by:\r\n torchmetrics 0.7.0 depends on torch>=1.3.1\r\n torchmetrics 0.6.2 depends on torch>=1.3.1\r\n torchmetrics 0.6.1 depends on torch>=1.3.1\r\n torchmetrics 0.6.0 depends on torch>=1.3.1\r\n torchmetrics 0.5.1 depends on torch>=1.3.1\r\n torchmetrics 0.5.0 depends on torch>=1.3.1\r\n torchmetrics 0.4.1 depends on torch>=1.3.1\r\n torchmetrics 0.3.2 depends on torch>=1.3.1\r\n torchmetrics 0.3.1 depends on torch>=1.3.1\r\n torchmetrics 0.3.0 depends on torch>=1.3.1\r\n torchmetrics 0.2.0 depends on torch>=1.3.1\r\n\r\nTo fix this you could try to:\r\n1. loosen the range of package versions you've specified\r\n2. remove package versions to allow pip attempt to solve the dependency conflict\r\n\r\nERROR: ResolutionImpossible: for help visit https://pip.pypa.io/en/latest/user_guide/#fixing-conflicting-dependencies\r\n```\r\n\r\n### Expected behavior\r\n\r\n`pip` instals all dependencies itself and then installs `torchmetrics`.\r\n\r\n### Environment\r\n\r\n- PyTorch Version (e.g., 1.0): No\r\n- OS (e.g., Linux): Ubuntu 21.10\r\n- How you installed PyTorch (`conda`, `pip`, source): pip\r\n- Build command you used (if compiling from source): pip install torchmetrics\r\n- Python version: 3.10.2\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\nimport glob\nimport os\nfrom functools import partial\nfrom importlib.util import module_from_spec, spec_from_file_location\nfrom typing import Tuple\n\nfrom setuptools import find_packages, setup\n\n_PATH_ROOT = os.path.realpath(os.path.dirname(__file__))\n_PATH_REQUIRE = os.path.join(_PATH_ROOT, \"requirements\")\n\n\ndef _load_py_module(fname, pkg=\"torchmetrics\"):\n spec = spec_from_file_location(os.path.join(pkg, fname), os.path.join(_PATH_ROOT, pkg, fname))\n py = module_from_spec(spec)\n spec.loader.exec_module(py)\n return py\n\n\nabout = _load_py_module(\"__about__.py\")\nsetup_tools = _load_py_module(\"setup_tools.py\")\nlong_description = setup_tools._load_readme_description(\n _PATH_ROOT,\n homepage=about.__homepage__,\n version=f\"v{about.__version__}\",\n)\n\n\nBASE_REQUIREMENTS = setup_tools._load_requirements(path_dir=_PATH_ROOT, file_name=\"requirements.txt\")\n\n\ndef _prepare_extras(skip_files: Tuple[str] = (\"devel.txt\")):\n # find all extra requirements\n _load_req = partial(setup_tools._load_requirements, path_dir=_PATH_REQUIRE)\n found_req_files = sorted(os.path.basename(p) for p in glob.glob(os.path.join(_PATH_REQUIRE, \"*.txt\")))\n # filter unwanted files\n found_req_files = [n for n in found_req_files if n not in skip_files]\n found_req_names = [os.path.splitext(req)[0] for req in found_req_files]\n # define basic and extra extras\n extras_req = {\n name: _load_req(file_name=fname) for name, fname in zip(found_req_names, found_req_files) if \"_test\" not in name\n }\n for name, fname in zip(found_req_names, found_req_files):\n if \"_test\" in name:\n extras_req[\"test\"] += _load_req(file_name=fname)\n # filter the uniques\n extras_req = {n: list(set(req)) for n, req in extras_req.items()}\n # create an 'all' keyword that install all possible denpendencies\n extras_req[\"all\"] = [pkg for reqs in extras_req.values() for pkg in reqs]\n return extras_req\n\n\n# https://packaging.python.org/discussions/install-requires-vs-requirements /\n# keep the meta-data here for simplicity in reading this file... it's not obvious\n# what happens and to non-engineers they won't know to look in init ...\n# the goal of the project is simplicity for researchers, don't want to add too much\n# engineer specific practices\nsetup(\n name=\"torchmetrics\",\n version=about.__version__,\n description=about.__docs__,\n author=about.__author__,\n author_email=about.__author_email__,\n url=about.__homepage__,\n download_url=os.path.join(about.__homepage__, \"archive\", \"master.zip\"),\n license=about.__license__,\n packages=find_packages(exclude=[\"tests\", \"tests.*\", \"docs\"]),\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n include_package_data=True,\n zip_safe=False,\n keywords=[\"deep learning\", \"machine learning\", \"pytorch\", \"metrics\", \"AI\"],\n python_requires=\">=3.6\",\n setup_requires=[],\n install_requires=BASE_REQUIREMENTS,\n extras_require=_prepare_extras(),\n project_urls={\n \"Bug Tracker\": os.path.join(about.__homepage__, \"issues\"),\n \"Documentation\": \"https://torchmetrics.rtfd.io/en/latest/\",\n \"Source Code\": about.__homepage__,\n },\n classifiers=[\n \"Environment :: Console\",\n \"Natural Language :: English\",\n # How mature is this project? Common values are\n # 3 - Alpha, 4 - Beta, 5 - Production/Stable\n \"Development Status :: 5 - Production/Stable\",\n # Indicate who your project is intended for\n \"Intended Audience :: Developers\",\n \"Topic :: Scientific/Engineering :: Artificial Intelligence\",\n \"Topic :: Scientific/Engineering :: Image Recognition\",\n \"Topic :: Scientific/Engineering :: Information Analysis\",\n # Pick your license as you wish\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n # Specify the Python versions you support here. In particular, ensure\n # that you indicate whether you support Python 2, Python 3 or both.\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n ],\n)\n", "path": "setup.py"}]} | 2,587 | 87 |
gh_patches_debug_12558 | rasdani/github-patches | git_diff | hydroshare__hydroshare-1690 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
"Web App" needs a space (not "WebApp")
On the open web app button there needs to be a space:

</issue>
<code>
[start of hs_tools_resource/migrations/0010_auto_20161203_1913.py]
1 # -*- coding: utf-8 -*-
2 from __future__ import unicode_literals
3
4 from django.db import migrations, models
5
6
7 class Migration(migrations.Migration):
8
9 dependencies = [
10 ('hs_tools_resource', '0009_auto_20160929_1543'),
11 ]
12
13 operations = [
14 migrations.RemoveField(
15 model_name='toolicon',
16 name='url',
17 ),
18 migrations.AddField(
19 model_name='toolicon',
20 name='value',
21 field=models.CharField(default=b'', max_length=1024, blank=True),
22 ),
23 migrations.AlterField(
24 model_name='apphomepageurl',
25 name='value',
26 field=models.CharField(default=b'', max_length=1024, blank=True),
27 ),
28 migrations.AlterField(
29 model_name='requesturlbase',
30 name='value',
31 field=models.CharField(default=b'', max_length=1024, blank=True),
32 ),
33 migrations.AlterField(
34 model_name='supportedrestypes',
35 name='supported_res_types',
36 field=models.ManyToManyField(to='hs_tools_resource.SupportedResTypeChoices', blank=True),
37 ),
38 migrations.AlterField(
39 model_name='supportedsharingstatus',
40 name='sharing_status',
41 field=models.ManyToManyField(to='hs_tools_resource.SupportedSharingStatusChoices', blank=True),
42 ),
43 ]
44
[end of hs_tools_resource/migrations/0010_auto_20161203_1913.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/hs_tools_resource/migrations/0010_auto_20161203_1913.py b/hs_tools_resource/migrations/0010_auto_20161203_1913.py
--- a/hs_tools_resource/migrations/0010_auto_20161203_1913.py
+++ b/hs_tools_resource/migrations/0010_auto_20161203_1913.py
@@ -11,14 +11,15 @@
]
operations = [
- migrations.RemoveField(
+ migrations.AlterField(
model_name='toolicon',
name='url',
+ field=models.CharField(default=b'', max_length=1024, blank=True),
),
- migrations.AddField(
+ migrations.RenameField(
model_name='toolicon',
- name='value',
- field=models.CharField(default=b'', max_length=1024, blank=True),
+ old_name='url',
+ new_name='value'
),
migrations.AlterField(
model_name='apphomepageurl',
| {"golden_diff": "diff --git a/hs_tools_resource/migrations/0010_auto_20161203_1913.py b/hs_tools_resource/migrations/0010_auto_20161203_1913.py\n--- a/hs_tools_resource/migrations/0010_auto_20161203_1913.py\n+++ b/hs_tools_resource/migrations/0010_auto_20161203_1913.py\n@@ -11,14 +11,15 @@\n ]\n \n operations = [\n- migrations.RemoveField(\n+ migrations.AlterField(\n model_name='toolicon',\n name='url',\n+ field=models.CharField(default=b'', max_length=1024, blank=True),\n ),\n- migrations.AddField(\n+ migrations.RenameField(\n model_name='toolicon',\n- name='value',\n- field=models.CharField(default=b'', max_length=1024, blank=True),\n+ old_name='url',\n+ new_name='value'\n ),\n migrations.AlterField(\n model_name='apphomepageurl',\n", "issue": "\"Web App\" needs a space (not \"WebApp\")\nOn the open web app button there needs to be a space:\r\n\r\n\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('hs_tools_resource', '0009_auto_20160929_1543'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='toolicon',\n name='url',\n ),\n migrations.AddField(\n model_name='toolicon',\n name='value',\n field=models.CharField(default=b'', max_length=1024, blank=True),\n ),\n migrations.AlterField(\n model_name='apphomepageurl',\n name='value',\n field=models.CharField(default=b'', max_length=1024, blank=True),\n ),\n migrations.AlterField(\n model_name='requesturlbase',\n name='value',\n field=models.CharField(default=b'', max_length=1024, blank=True),\n ),\n migrations.AlterField(\n model_name='supportedrestypes',\n name='supported_res_types',\n field=models.ManyToManyField(to='hs_tools_resource.SupportedResTypeChoices', blank=True),\n ),\n migrations.AlterField(\n model_name='supportedsharingstatus',\n name='sharing_status',\n field=models.ManyToManyField(to='hs_tools_resource.SupportedSharingStatusChoices', blank=True),\n ),\n ]\n", "path": "hs_tools_resource/migrations/0010_auto_20161203_1913.py"}]} | 1,030 | 259 |
gh_patches_debug_15450 | rasdani/github-patches | git_diff | aws-cloudformation__cfn-lint-758 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
E0001 Error for SAR Apps
*cfn-lint version: `cfn-lint 0.15.0`*
*Description of issue*
While running `cfn-lint` on a nested CloudFormation stack containing `AWS::Serverless::Application` resources, it errors with:
```yaml
Resources:
AppName:
Type: AWS::Serverless::Application
Properties:
Location:
ApplicationId: arn:aws:serverlessrepo:us-west-2:<my-account-id>:applications/<app-name>
SemanticVersion: 1.0.0
Parameters:
Debug: 'True'
MemorySizeMB: 128
TimeoutSec: 300
```
```bash
$ cfn-lint template.yml
E0001 Resource with id [AppName] is invalid. Application with id arn:aws:serverlessrepo:us-west-2:<my-account-Id-hosting-the-app>:applications/<app-name> could not be found.
template.yml:1:1
```
Supporting evidence:
1. The Application definitely exists, since the template runs with no issues.
2. I have admin permissions with the current user to make, update, view, delete the app.
</issue>
<code>
[start of src/cfnlint/transform.py]
1 """
2 Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3
4 Permission is hereby granted, free of charge, to any person obtaining a copy of this
5 software and associated documentation files (the "Software"), to deal in the Software
6 without restriction, including without limitation the rights to use, copy, modify,
7 merge, publish, distribute, sublicense, and/or sell copies of the Software, and to
8 permit persons to whom the Software is furnished to do so.
9
10 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
11 INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
12 PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
13 HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
14 OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
15 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
16 """
17 import os
18 import logging
19 import six
20 import samtranslator
21 from samtranslator.parser import parser
22 from samtranslator.translator.translator import Translator
23 from samtranslator.public.exceptions import InvalidDocumentException
24
25 import cfnlint.helpers
26 LOGGER = logging.getLogger('cfnlint')
27
28 class Transform(object):
29 """
30 Application Serverless Module tranform Wrappor. Based on code from AWS SAM CLI:
31 https://github.com/awslabs/aws-sam-cli/blob/develop/samcli/commands/validate/lib/sam_template_validator.py
32 """
33
34 def __init__(self, filename, template, region):
35 """
36 Initialize Transform class
37 """
38 self._filename = filename
39 self._template = template
40 self._region = region
41
42 self._managed_policy_map = self.load_managed_policies()
43 self._sam_parser = parser.Parser()
44
45 def template(self):
46 """Get the template"""
47 return self._template
48
49 def load_managed_policies(self):
50 """
51 Load the ManagedPolicies locally, based on the AWS-CLI:
52 https://github.com/awslabs/aws-sam-cli/blob/develop/samcli/lib/samlib/default_managed_policies.json
53 """
54 return cfnlint.helpers.load_resources('data/Serverless/ManagedPolicies.json')
55
56 def _replace_local_codeuri(self):
57 """
58 Replaces the CodeUri in AWS::Serverless::Function and DefinitionUri in AWS::Serverless::Api to a fake
59 S3 Uri. This is to support running the SAM Translator with valid values for these fields. If this in not done,
60 the template is invalid in the eyes of SAM Translator (the translator does not support local paths)
61 """
62
63 all_resources = self._template.get('Resources', {})
64
65 for _, resource in all_resources.items():
66
67 resource_type = resource.get('Type')
68 resource_dict = resource.get('Properties')
69
70 if resource_type == 'AWS::Serverless::Function':
71
72 Transform._update_to_s3_uri('CodeUri', resource_dict)
73 if resource_type in ['AWS::Serverless::LayerVersion']:
74 if resource_dict.get('ContentUri'):
75 Transform._update_to_s3_uri('ContentUri', resource_dict)
76 if resource_type == 'AWS::Serverless::Api':
77 if 'DefinitionBody' not in resource_dict:
78 Transform._update_to_s3_uri('DefinitionUri', resource_dict)
79
80 def transform_template(self):
81 """
82 Transform the Template using the Serverless Application Model.
83 """
84 matches = []
85
86 try:
87 # Output the SAM Translator version in debug mode
88 LOGGER.debug('SAM Translator: %s', samtranslator.__version__)
89
90 sam_translator = Translator(managed_policy_map=self._managed_policy_map,
91 sam_parser=self._sam_parser)
92
93 self._replace_local_codeuri()
94
95 # Tell SAM to use the region we're linting in, this has to be controlled using the default AWS mechanisms, see also:
96 # https://github.com/awslabs/serverless-application-model/blob/master/samtranslator/translator/arn_generator.py
97 LOGGER.debug('Setting AWS_DEFAULT_REGION to %s', self._region)
98 os.environ['AWS_DEFAULT_REGION'] = self._region
99
100 self._template = cfnlint.helpers.convert_dict(
101 sam_translator.translate(sam_template=self._template, parameter_values={}))
102
103 LOGGER.debug('Transformed template: %s', self._template)
104 except InvalidDocumentException as e:
105 message = 'Error transforming template: {0}'
106 for cause in e.causes:
107 matches.append(cfnlint.Match(
108 1, 1,
109 1, 1,
110 self._filename, cfnlint.TransformError(), message.format(cause.message)))
111 except Exception as e: # pylint: disable=W0703
112 LOGGER.debug('Error transforming template: %s', str(e))
113 LOGGER.debug('Stack trace: %s', e, exc_info=True)
114 message = 'Error transforming template: {0}'
115 matches.append(cfnlint.Match(
116 1, 1,
117 1, 1,
118 self._filename, cfnlint.TransformError(), message.format(str(e))))
119
120 return matches
121
122 @staticmethod
123 def is_s3_uri(uri):
124 """
125 Checks the uri and determines if it is a valid S3 Uri
126 Parameters
127 ----------
128 uri str, required
129 Uri to check
130 Returns
131 -------
132 bool
133 Returns True if the uri given is an S3 uri, otherwise False
134 """
135 return isinstance(uri, six.string_types) and uri.startswith('s3://')
136
137 @staticmethod
138 def _update_to_s3_uri(property_key, resource_property_dict, s3_uri_value='s3://bucket/value'):
139 """
140 Updates the 'property_key' in the 'resource_property_dict' to the value of 's3_uri_value'
141 Note: The function will mutate the resource_property_dict that is pass in
142 Parameters
143 ----------
144 property_key str, required
145 Key in the resource_property_dict
146 resource_property_dict dict, required
147 Property dictionary of a Resource in the template to replace
148 s3_uri_value str, optional
149 Value to update the value of the property_key to
150 """
151 uri_property = resource_property_dict.get(property_key, '.')
152
153 # ignore if dict or already an S3 Uri
154 if isinstance(uri_property, dict) or Transform.is_s3_uri(uri_property):
155 return
156
157 resource_property_dict[property_key] = s3_uri_value
158
[end of src/cfnlint/transform.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/cfnlint/transform.py b/src/cfnlint/transform.py
--- a/src/cfnlint/transform.py
+++ b/src/cfnlint/transform.py
@@ -73,6 +73,10 @@
if resource_type in ['AWS::Serverless::LayerVersion']:
if resource_dict.get('ContentUri'):
Transform._update_to_s3_uri('ContentUri', resource_dict)
+ if resource_type == 'AWS::Serverless::Application':
+ if resource_dict.get('Location'):
+ resource_dict['Location'] = ''
+ Transform._update_to_s3_uri('Location', resource_dict)
if resource_type == 'AWS::Serverless::Api':
if 'DefinitionBody' not in resource_dict:
Transform._update_to_s3_uri('DefinitionUri', resource_dict)
| {"golden_diff": "diff --git a/src/cfnlint/transform.py b/src/cfnlint/transform.py\n--- a/src/cfnlint/transform.py\n+++ b/src/cfnlint/transform.py\n@@ -73,6 +73,10 @@\n if resource_type in ['AWS::Serverless::LayerVersion']:\n if resource_dict.get('ContentUri'):\n Transform._update_to_s3_uri('ContentUri', resource_dict)\n+ if resource_type == 'AWS::Serverless::Application':\n+ if resource_dict.get('Location'):\n+ resource_dict['Location'] = ''\n+ Transform._update_to_s3_uri('Location', resource_dict)\n if resource_type == 'AWS::Serverless::Api':\n if 'DefinitionBody' not in resource_dict:\n Transform._update_to_s3_uri('DefinitionUri', resource_dict)\n", "issue": "E0001 Error for SAR Apps\n*cfn-lint version: `cfn-lint 0.15.0`*\r\n\r\n*Description of issue*\r\n\r\nWhile running `cfn-lint` on a nested CloudFormation stack containing `AWS::Serverless::Application` resources, it errors with:\r\n\r\n```yaml\r\nResources:\r\n AppName:\r\n Type: AWS::Serverless::Application\r\n Properties:\r\n Location:\r\n ApplicationId: arn:aws:serverlessrepo:us-west-2:<my-account-id>:applications/<app-name>\r\n SemanticVersion: 1.0.0\r\n Parameters:\r\n Debug: 'True'\r\n MemorySizeMB: 128\r\n TimeoutSec: 300\r\n```\r\n\r\n```bash\r\n$ cfn-lint template.yml\r\n\r\nE0001 Resource with id [AppName] is invalid. Application with id arn:aws:serverlessrepo:us-west-2:<my-account-Id-hosting-the-app>:applications/<app-name> could not be found.\r\ntemplate.yml:1:1\r\n```\r\n\r\nSupporting evidence:\r\n\r\n1. The Application definitely exists, since the template runs with no issues.\r\n2. I have admin permissions with the current user to make, update, view, delete the app.\r\n\r\n\n", "before_files": [{"content": "\"\"\"\n Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n\n Permission is hereby granted, free of charge, to any person obtaining a copy of this\n software and associated documentation files (the \"Software\"), to deal in the Software\n without restriction, including without limitation the rights to use, copy, modify,\n merge, publish, distribute, sublicense, and/or sell copies of the Software, and to\n permit persons to whom the Software is furnished to do so.\n\n THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,\n INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\"\"\"\nimport os\nimport logging\nimport six\nimport samtranslator\nfrom samtranslator.parser import parser\nfrom samtranslator.translator.translator import Translator\nfrom samtranslator.public.exceptions import InvalidDocumentException\n\nimport cfnlint.helpers\nLOGGER = logging.getLogger('cfnlint')\n\nclass Transform(object):\n \"\"\"\n Application Serverless Module tranform Wrappor. Based on code from AWS SAM CLI:\n https://github.com/awslabs/aws-sam-cli/blob/develop/samcli/commands/validate/lib/sam_template_validator.py\n \"\"\"\n\n def __init__(self, filename, template, region):\n \"\"\"\n Initialize Transform class\n \"\"\"\n self._filename = filename\n self._template = template\n self._region = region\n\n self._managed_policy_map = self.load_managed_policies()\n self._sam_parser = parser.Parser()\n\n def template(self):\n \"\"\"Get the template\"\"\"\n return self._template\n\n def load_managed_policies(self):\n \"\"\"\n Load the ManagedPolicies locally, based on the AWS-CLI:\n https://github.com/awslabs/aws-sam-cli/blob/develop/samcli/lib/samlib/default_managed_policies.json\n \"\"\"\n return cfnlint.helpers.load_resources('data/Serverless/ManagedPolicies.json')\n\n def _replace_local_codeuri(self):\n \"\"\"\n Replaces the CodeUri in AWS::Serverless::Function and DefinitionUri in AWS::Serverless::Api to a fake\n S3 Uri. This is to support running the SAM Translator with valid values for these fields. If this in not done,\n the template is invalid in the eyes of SAM Translator (the translator does not support local paths)\n \"\"\"\n\n all_resources = self._template.get('Resources', {})\n\n for _, resource in all_resources.items():\n\n resource_type = resource.get('Type')\n resource_dict = resource.get('Properties')\n\n if resource_type == 'AWS::Serverless::Function':\n\n Transform._update_to_s3_uri('CodeUri', resource_dict)\n if resource_type in ['AWS::Serverless::LayerVersion']:\n if resource_dict.get('ContentUri'):\n Transform._update_to_s3_uri('ContentUri', resource_dict)\n if resource_type == 'AWS::Serverless::Api':\n if 'DefinitionBody' not in resource_dict:\n Transform._update_to_s3_uri('DefinitionUri', resource_dict)\n\n def transform_template(self):\n \"\"\"\n Transform the Template using the Serverless Application Model.\n \"\"\"\n matches = []\n\n try:\n # Output the SAM Translator version in debug mode\n LOGGER.debug('SAM Translator: %s', samtranslator.__version__)\n\n sam_translator = Translator(managed_policy_map=self._managed_policy_map,\n sam_parser=self._sam_parser)\n\n self._replace_local_codeuri()\n\n # Tell SAM to use the region we're linting in, this has to be controlled using the default AWS mechanisms, see also:\n # https://github.com/awslabs/serverless-application-model/blob/master/samtranslator/translator/arn_generator.py\n LOGGER.debug('Setting AWS_DEFAULT_REGION to %s', self._region)\n os.environ['AWS_DEFAULT_REGION'] = self._region\n\n self._template = cfnlint.helpers.convert_dict(\n sam_translator.translate(sam_template=self._template, parameter_values={}))\n\n LOGGER.debug('Transformed template: %s', self._template)\n except InvalidDocumentException as e:\n message = 'Error transforming template: {0}'\n for cause in e.causes:\n matches.append(cfnlint.Match(\n 1, 1,\n 1, 1,\n self._filename, cfnlint.TransformError(), message.format(cause.message)))\n except Exception as e: # pylint: disable=W0703\n LOGGER.debug('Error transforming template: %s', str(e))\n LOGGER.debug('Stack trace: %s', e, exc_info=True)\n message = 'Error transforming template: {0}'\n matches.append(cfnlint.Match(\n 1, 1,\n 1, 1,\n self._filename, cfnlint.TransformError(), message.format(str(e))))\n\n return matches\n\n @staticmethod\n def is_s3_uri(uri):\n \"\"\"\n Checks the uri and determines if it is a valid S3 Uri\n Parameters\n ----------\n uri str, required\n Uri to check\n Returns\n -------\n bool\n Returns True if the uri given is an S3 uri, otherwise False\n \"\"\"\n return isinstance(uri, six.string_types) and uri.startswith('s3://')\n\n @staticmethod\n def _update_to_s3_uri(property_key, resource_property_dict, s3_uri_value='s3://bucket/value'):\n \"\"\"\n Updates the 'property_key' in the 'resource_property_dict' to the value of 's3_uri_value'\n Note: The function will mutate the resource_property_dict that is pass in\n Parameters\n ----------\n property_key str, required\n Key in the resource_property_dict\n resource_property_dict dict, required\n Property dictionary of a Resource in the template to replace\n s3_uri_value str, optional\n Value to update the value of the property_key to\n \"\"\"\n uri_property = resource_property_dict.get(property_key, '.')\n\n # ignore if dict or already an S3 Uri\n if isinstance(uri_property, dict) or Transform.is_s3_uri(uri_property):\n return\n\n resource_property_dict[property_key] = s3_uri_value\n", "path": "src/cfnlint/transform.py"}]} | 2,566 | 177 |
gh_patches_debug_21982 | rasdani/github-patches | git_diff | beetbox__beets-3655 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
work_date of parentwork with parentwork plugin
I currently have my beets library paths looking like this (I mostly have classical music, so I use the parentwork plugin): parent_composer_sort/work_date - parentwork/..., but if different subworks have different dates, this can store them in different directories. Is there a way to work around this, perhaps in the path config? Or would I need to write a plugin or edit the parentwork plugin?
</issue>
<code>
[start of beetsplug/parentwork.py]
1 # -*- coding: utf-8 -*-
2 # This file is part of beets.
3 # Copyright 2017, Dorian Soergel.
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
15
16 """Gets parent work, its disambiguation and id, composer, composer sort name
17 and work composition date
18 """
19
20 from __future__ import division, absolute_import, print_function
21
22 from beets import ui
23 from beets.plugins import BeetsPlugin
24
25 import musicbrainzngs
26
27
28 def direct_parent_id(mb_workid, work_date=None):
29 """Given a Musicbrainz work id, find the id one of the works the work is
30 part of and the first composition date it encounters.
31 """
32 work_info = musicbrainzngs.get_work_by_id(mb_workid,
33 includes=["work-rels",
34 "artist-rels"])
35 if 'artist-relation-list' in work_info['work'] and work_date is None:
36 for artist in work_info['work']['artist-relation-list']:
37 if artist['type'] == 'composer':
38 if 'end' in artist.keys():
39 work_date = artist['end']
40
41 if 'work-relation-list' in work_info['work']:
42 for direct_parent in work_info['work']['work-relation-list']:
43 if direct_parent['type'] == 'parts' \
44 and direct_parent.get('direction') == 'backward':
45 direct_id = direct_parent['work']['id']
46 return direct_id, work_date
47 return None, work_date
48
49
50 def work_parent_id(mb_workid):
51 """Find the parent work id and composition date of a work given its id.
52 """
53 work_date = None
54 while True:
55 new_mb_workid, work_date = direct_parent_id(mb_workid, work_date)
56 if not new_mb_workid:
57 return mb_workid, work_date
58 mb_workid = new_mb_workid
59 return mb_workid, work_date
60
61
62 def find_parentwork_info(mb_workid):
63 """Get the MusicBrainz information dict about a parent work, including
64 the artist relations, and the composition date for a work's parent work.
65 """
66 parent_id, work_date = work_parent_id(mb_workid)
67 work_info = musicbrainzngs.get_work_by_id(parent_id,
68 includes=["artist-rels"])
69 return work_info, work_date
70
71
72 class ParentWorkPlugin(BeetsPlugin):
73 def __init__(self):
74 super(ParentWorkPlugin, self).__init__()
75
76 self.config.add({
77 'auto': False,
78 'force': False,
79 })
80
81 if self.config['auto']:
82 self.import_stages = [self.imported]
83
84 def commands(self):
85
86 def func(lib, opts, args):
87 self.config.set_args(opts)
88 force_parent = self.config['force'].get(bool)
89 write = ui.should_write()
90
91 for item in lib.items(ui.decargs(args)):
92 changed = self.find_work(item, force_parent)
93 if changed:
94 item.store()
95 if write:
96 item.try_write()
97 command = ui.Subcommand(
98 'parentwork',
99 help=u'fetche parent works, composers and dates')
100
101 command.parser.add_option(
102 u'-f', u'--force', dest='force',
103 action='store_true', default=None,
104 help=u're-fetch when parent work is already present')
105
106 command.func = func
107 return [command]
108
109 def imported(self, session, task):
110 """Import hook for fetching parent works automatically.
111 """
112 force_parent = self.config['force'].get(bool)
113
114 for item in task.imported_items():
115 self.find_work(item, force_parent)
116 item.store()
117
118 def get_info(self, item, work_info):
119 """Given the parent work info dict, fetch parent_composer,
120 parent_composer_sort, parentwork, parentwork_disambig, mb_workid and
121 composer_ids.
122 """
123
124 parent_composer = []
125 parent_composer_sort = []
126 parentwork_info = {}
127
128 composer_exists = False
129 if 'artist-relation-list' in work_info['work']:
130 for artist in work_info['work']['artist-relation-list']:
131 if artist['type'] == 'composer':
132 parent_composer.append(artist['artist']['name'])
133 parent_composer_sort.append(artist['artist']['sort-name'])
134
135 parentwork_info['parent_composer'] = u', '.join(parent_composer)
136 parentwork_info['parent_composer_sort'] = u', '.join(
137 parent_composer_sort)
138
139 if not composer_exists:
140 self._log.debug(
141 'no composer for {}; add one at '
142 'https://musicbrainz.org/work/{}',
143 item, work_info['work']['id'],
144 )
145
146 parentwork_info['parentwork'] = work_info['work']['title']
147 parentwork_info['mb_parentworkid'] = work_info['work']['id']
148
149 if 'disambiguation' in work_info['work']:
150 parentwork_info['parentwork_disambig'] = work_info[
151 'work']['disambiguation']
152
153 else:
154 parentwork_info['parentwork_disambig'] = None
155
156 return parentwork_info
157
158 def find_work(self, item, force):
159 """Finds the parent work of a recording and populates the tags
160 accordingly.
161
162 The parent work is found recursively, by finding the direct parent
163 repeatedly until there are no more links in the chain. We return the
164 final, topmost work in the chain.
165
166 Namely, the tags parentwork, parentwork_disambig, mb_parentworkid,
167 parent_composer, parent_composer_sort and work_date are populated.
168 """
169
170 if not item.mb_workid:
171 self._log.info('No work for {}, \
172 add one at https://musicbrainz.org/recording/{}', item, item.mb_trackid)
173 return
174
175 hasparent = hasattr(item, 'parentwork')
176 work_changed = True
177 if hasattr(item, 'parentwork_workid_current'):
178 work_changed = item.parentwork_workid_current != item.mb_workid
179 if force or not hasparent or work_changed:
180 try:
181 work_info, work_date = find_parentwork_info(item.mb_workid)
182 except musicbrainzngs.musicbrainz.WebServiceError as e:
183 self._log.debug("error fetching work: {}", e)
184 return
185 parent_info = self.get_info(item, work_info)
186 parent_info['parentwork_workid_current'] = item.mb_workid
187 if 'parent_composer' in parent_info:
188 self._log.debug("Work fetched: {} - {}",
189 parent_info['parentwork'],
190 parent_info['parent_composer'])
191 else:
192 self._log.debug("Work fetched: {} - no parent composer",
193 parent_info['parentwork'])
194
195 elif hasparent:
196 self._log.debug("{}: Work present, skipping", item)
197 return
198
199 # apply all non-null values to the item
200 for key, value in parent_info.items():
201 if value:
202 item[key] = value
203
204 if work_date:
205 item['work_date'] = work_date
206 return ui.show_model_changes(
207 item, fields=['parentwork', 'parentwork_disambig',
208 'mb_parentworkid', 'parent_composer',
209 'parent_composer_sort', 'work_date',
210 'parentwork_workid_current'])
211
[end of beetsplug/parentwork.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/beetsplug/parentwork.py b/beetsplug/parentwork.py
--- a/beetsplug/parentwork.py
+++ b/beetsplug/parentwork.py
@@ -131,6 +131,8 @@
if artist['type'] == 'composer':
parent_composer.append(artist['artist']['name'])
parent_composer_sort.append(artist['artist']['sort-name'])
+ if 'end' in artist.keys():
+ parentwork_info["parentwork_date"] = artist['end']
parentwork_info['parent_composer'] = u', '.join(parent_composer)
parentwork_info['parent_composer_sort'] = u', '.join(
@@ -207,4 +209,4 @@
item, fields=['parentwork', 'parentwork_disambig',
'mb_parentworkid', 'parent_composer',
'parent_composer_sort', 'work_date',
- 'parentwork_workid_current'])
+ 'parentwork_workid_current', 'parentwork_date'])
| {"golden_diff": "diff --git a/beetsplug/parentwork.py b/beetsplug/parentwork.py\n--- a/beetsplug/parentwork.py\n+++ b/beetsplug/parentwork.py\n@@ -131,6 +131,8 @@\n if artist['type'] == 'composer':\n parent_composer.append(artist['artist']['name'])\n parent_composer_sort.append(artist['artist']['sort-name'])\n+ if 'end' in artist.keys():\n+ parentwork_info[\"parentwork_date\"] = artist['end']\n \n parentwork_info['parent_composer'] = u', '.join(parent_composer)\n parentwork_info['parent_composer_sort'] = u', '.join(\n@@ -207,4 +209,4 @@\n item, fields=['parentwork', 'parentwork_disambig',\n 'mb_parentworkid', 'parent_composer',\n 'parent_composer_sort', 'work_date',\n- 'parentwork_workid_current'])\n+ 'parentwork_workid_current', 'parentwork_date'])\n", "issue": "work_date of parentwork with parentwork plugin\nI currently have my beets library paths looking like this (I mostly have classical music, so I use the parentwork plugin): parent_composer_sort/work_date - parentwork/..., but if different subworks have different dates, this can store them in different directories. Is there a way to work around this, perhaps in the path config? Or would I need to write a plugin or edit the parentwork plugin?\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# This file is part of beets.\n# Copyright 2017, Dorian Soergel.\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be\n# included in all copies or substantial portions of the Software.\n\n\"\"\"Gets parent work, its disambiguation and id, composer, composer sort name\nand work composition date\n\"\"\"\n\nfrom __future__ import division, absolute_import, print_function\n\nfrom beets import ui\nfrom beets.plugins import BeetsPlugin\n\nimport musicbrainzngs\n\n\ndef direct_parent_id(mb_workid, work_date=None):\n \"\"\"Given a Musicbrainz work id, find the id one of the works the work is\n part of and the first composition date it encounters.\n \"\"\"\n work_info = musicbrainzngs.get_work_by_id(mb_workid,\n includes=[\"work-rels\",\n \"artist-rels\"])\n if 'artist-relation-list' in work_info['work'] and work_date is None:\n for artist in work_info['work']['artist-relation-list']:\n if artist['type'] == 'composer':\n if 'end' in artist.keys():\n work_date = artist['end']\n\n if 'work-relation-list' in work_info['work']:\n for direct_parent in work_info['work']['work-relation-list']:\n if direct_parent['type'] == 'parts' \\\n and direct_parent.get('direction') == 'backward':\n direct_id = direct_parent['work']['id']\n return direct_id, work_date\n return None, work_date\n\n\ndef work_parent_id(mb_workid):\n \"\"\"Find the parent work id and composition date of a work given its id.\n \"\"\"\n work_date = None\n while True:\n new_mb_workid, work_date = direct_parent_id(mb_workid, work_date)\n if not new_mb_workid:\n return mb_workid, work_date\n mb_workid = new_mb_workid\n return mb_workid, work_date\n\n\ndef find_parentwork_info(mb_workid):\n \"\"\"Get the MusicBrainz information dict about a parent work, including\n the artist relations, and the composition date for a work's parent work.\n \"\"\"\n parent_id, work_date = work_parent_id(mb_workid)\n work_info = musicbrainzngs.get_work_by_id(parent_id,\n includes=[\"artist-rels\"])\n return work_info, work_date\n\n\nclass ParentWorkPlugin(BeetsPlugin):\n def __init__(self):\n super(ParentWorkPlugin, self).__init__()\n\n self.config.add({\n 'auto': False,\n 'force': False,\n })\n\n if self.config['auto']:\n self.import_stages = [self.imported]\n\n def commands(self):\n\n def func(lib, opts, args):\n self.config.set_args(opts)\n force_parent = self.config['force'].get(bool)\n write = ui.should_write()\n\n for item in lib.items(ui.decargs(args)):\n changed = self.find_work(item, force_parent)\n if changed:\n item.store()\n if write:\n item.try_write()\n command = ui.Subcommand(\n 'parentwork',\n help=u'fetche parent works, composers and dates')\n\n command.parser.add_option(\n u'-f', u'--force', dest='force',\n action='store_true', default=None,\n help=u're-fetch when parent work is already present')\n\n command.func = func\n return [command]\n\n def imported(self, session, task):\n \"\"\"Import hook for fetching parent works automatically.\n \"\"\"\n force_parent = self.config['force'].get(bool)\n\n for item in task.imported_items():\n self.find_work(item, force_parent)\n item.store()\n\n def get_info(self, item, work_info):\n \"\"\"Given the parent work info dict, fetch parent_composer,\n parent_composer_sort, parentwork, parentwork_disambig, mb_workid and\n composer_ids.\n \"\"\"\n\n parent_composer = []\n parent_composer_sort = []\n parentwork_info = {}\n\n composer_exists = False\n if 'artist-relation-list' in work_info['work']:\n for artist in work_info['work']['artist-relation-list']:\n if artist['type'] == 'composer':\n parent_composer.append(artist['artist']['name'])\n parent_composer_sort.append(artist['artist']['sort-name'])\n\n parentwork_info['parent_composer'] = u', '.join(parent_composer)\n parentwork_info['parent_composer_sort'] = u', '.join(\n parent_composer_sort)\n\n if not composer_exists:\n self._log.debug(\n 'no composer for {}; add one at '\n 'https://musicbrainz.org/work/{}',\n item, work_info['work']['id'],\n )\n\n parentwork_info['parentwork'] = work_info['work']['title']\n parentwork_info['mb_parentworkid'] = work_info['work']['id']\n\n if 'disambiguation' in work_info['work']:\n parentwork_info['parentwork_disambig'] = work_info[\n 'work']['disambiguation']\n\n else:\n parentwork_info['parentwork_disambig'] = None\n\n return parentwork_info\n\n def find_work(self, item, force):\n \"\"\"Finds the parent work of a recording and populates the tags\n accordingly.\n\n The parent work is found recursively, by finding the direct parent\n repeatedly until there are no more links in the chain. We return the\n final, topmost work in the chain.\n\n Namely, the tags parentwork, parentwork_disambig, mb_parentworkid,\n parent_composer, parent_composer_sort and work_date are populated.\n \"\"\"\n\n if not item.mb_workid:\n self._log.info('No work for {}, \\\nadd one at https://musicbrainz.org/recording/{}', item, item.mb_trackid)\n return\n\n hasparent = hasattr(item, 'parentwork')\n work_changed = True\n if hasattr(item, 'parentwork_workid_current'):\n work_changed = item.parentwork_workid_current != item.mb_workid\n if force or not hasparent or work_changed:\n try:\n work_info, work_date = find_parentwork_info(item.mb_workid)\n except musicbrainzngs.musicbrainz.WebServiceError as e:\n self._log.debug(\"error fetching work: {}\", e)\n return\n parent_info = self.get_info(item, work_info)\n parent_info['parentwork_workid_current'] = item.mb_workid\n if 'parent_composer' in parent_info:\n self._log.debug(\"Work fetched: {} - {}\",\n parent_info['parentwork'],\n parent_info['parent_composer'])\n else:\n self._log.debug(\"Work fetched: {} - no parent composer\",\n parent_info['parentwork'])\n\n elif hasparent:\n self._log.debug(\"{}: Work present, skipping\", item)\n return\n\n # apply all non-null values to the item\n for key, value in parent_info.items():\n if value:\n item[key] = value\n\n if work_date:\n item['work_date'] = work_date\n return ui.show_model_changes(\n item, fields=['parentwork', 'parentwork_disambig',\n 'mb_parentworkid', 'parent_composer',\n 'parent_composer_sort', 'work_date',\n 'parentwork_workid_current'])\n", "path": "beetsplug/parentwork.py"}]} | 2,871 | 223 |
gh_patches_debug_20328 | rasdani/github-patches | git_diff | talonhub__community-698 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Shouldn't call methods directly on an @action_class
You shouldn't call methods directly on an @action_class, for example:
https://github.com/knausj85/knausj_talon/blob/65ccd94b8fe97056a34b858b2f1f261cc5269c3f/mouse_grid/mouse_grid.py#L282
This will continue to work, but generate a warning in the next beta.
You should probably use `actions.self.grid_narrow(...)` instead.
</issue>
<code>
[start of mouse_grid/mouse_grid.py]
1 # courtesy of https://github.com/timo/
2 # see https://github.com/timo/talon_scripts
3 from talon import Module, Context, app, canvas, screen, settings, ui, ctrl, cron
4 from talon.skia import Shader, Color, Paint, Rect
5 from talon.types.point import Point2d
6 from talon_plugins import eye_mouse, eye_zoom_mouse
7 from typing import Union
8
9 import math, time
10
11 import typing
12
13 mod = Module()
14 narrow_expansion = mod.setting(
15 "grid_narrow_expansion",
16 type=int,
17 default=0,
18 desc="""After narrowing, grow the new region by this many pixels in every direction, to make things immediately on edges easier to hit, and when the grid is at its smallest, it allows you to still nudge it around""",
19 )
20
21 mod.tag("mouse_grid_showing", desc="Tag indicates whether the mouse grid is showing")
22 mod.tag("mouse_grid_enabled", desc="Deprecated: do not use. Activates legacy m grid command")
23 ctx = Context()
24
25
26 class MouseSnapNine:
27 def __init__(self):
28 self.screen = None
29 self.rect = None
30 self.history = []
31 self.img = None
32 self.mcanvas = None
33 self.active = False
34 self.count = 0
35 self.was_control_mouse_active = False
36 self.was_zoom_mouse_active = False
37
38 def setup(self, *, rect: Rect = None, screen_num: int = None):
39 screens = ui.screens()
40 # each if block here might set the rect to None to indicate failure
41 if rect is not None:
42 try:
43 screen = ui.screen_containing(*rect.center)
44 except Exception:
45 rect = None
46 if rect is None and screen_num is not None:
47 screen = screens[screen_num % len(screens)]
48 rect = screen.rect
49 if rect is None:
50 screen = screens[0]
51 rect = screen.rect
52 self.rect = rect.copy()
53 self.screen = screen
54 self.count = 0
55 self.img = None
56 if self.mcanvas is not None:
57 self.mcanvas.close()
58 self.mcanvas = canvas.Canvas.from_screen(screen)
59 if self.active:
60 self.mcanvas.register("draw", self.draw)
61 self.mcanvas.freeze()
62
63 def show(self):
64 if self.active:
65 return
66 # noinspection PyUnresolvedReferences
67 if eye_zoom_mouse.zoom_mouse.enabled:
68 self.was_zoom_mouse_active = True
69 eye_zoom_mouse.toggle_zoom_mouse(False)
70 if eye_mouse.control_mouse.enabled:
71 self.was_control_mouse_active = True
72 eye_mouse.control_mouse.toggle()
73 self.mcanvas.register("draw", self.draw)
74 self.mcanvas.freeze()
75 self.active = True
76 return
77
78 def close(self):
79 if not self.active:
80 return
81 self.mcanvas.unregister("draw", self.draw)
82 self.mcanvas.close()
83 self.mcanvas = None
84 self.img = None
85
86 self.active = False
87 if self.was_control_mouse_active and not eye_mouse.control_mouse.enabled:
88 eye_mouse.control_mouse.toggle()
89 if self.was_zoom_mouse_active and not eye_zoom_mouse.zoom_mouse.enabled:
90 eye_zoom_mouse.toggle_zoom_mouse(True)
91
92 self.was_zoom_mouse_active = False
93 self.was_control_mouse_active = False
94
95 def draw(self, canvas):
96 paint = canvas.paint
97
98 def draw_grid(offset_x, offset_y, width, height):
99 canvas.draw_line(
100 offset_x + width // 3,
101 offset_y,
102 offset_x + width // 3,
103 offset_y + height,
104 )
105 canvas.draw_line(
106 offset_x + 2 * width // 3,
107 offset_y,
108 offset_x + 2 * width // 3,
109 offset_y + height,
110 )
111
112 canvas.draw_line(
113 offset_x,
114 offset_y + height // 3,
115 offset_x + width,
116 offset_y + height // 3,
117 )
118 canvas.draw_line(
119 offset_x,
120 offset_y + 2 * height // 3,
121 offset_x + width,
122 offset_y + 2 * height // 3,
123 )
124
125 def draw_crosses(offset_x, offset_y, width, height):
126 for row in range(0, 2):
127 for col in range(0, 2):
128 cx = offset_x + width / 6 + (col + 0.5) * width / 3
129 cy = offset_y + height / 6 + (row + 0.5) * height / 3
130
131 canvas.draw_line(cx - 10, cy, cx + 10, cy)
132 canvas.draw_line(cx, cy - 10, cx, cy + 10)
133
134 grid_stroke = 1
135
136 def draw_text(offset_x, offset_y, width, height):
137 canvas.paint.text_align = canvas.paint.TextAlign.CENTER
138 for row in range(3):
139 for col in range(3):
140 text_string = ""
141 if settings["user.grids_put_one_bottom_left"]:
142 text_string = f"{(2 - row)*3+col+1}"
143 else:
144 text_string = f"{row*3+col+1}"
145 text_rect = canvas.paint.measure_text(text_string)[1]
146 background_rect = text_rect.copy()
147 background_rect.center = Point2d(
148 offset_x + width / 6 + col * width / 3,
149 offset_y + height / 6 + row * height / 3,
150 )
151 background_rect = background_rect.inset(-4)
152 paint.color = "9999995f"
153 paint.style = Paint.Style.FILL
154 canvas.draw_rect(background_rect)
155 paint.color = "00ff00ff"
156 canvas.draw_text(
157 text_string,
158 offset_x + width / 6 + col * width / 3,
159 offset_y + height / 6 + row * height / 3 + text_rect.height / 2,
160 )
161
162 if self.count < 2:
163 paint.color = "00ff007f"
164 for which in range(1, 10):
165 gap = 35 - self.count * 10
166 if not self.active:
167 gap = 45
168 draw_crosses(*self.calc_narrow(which, self.rect))
169
170 paint.stroke_width = grid_stroke
171 if self.active:
172 paint.color = "ff0000ff"
173 else:
174 paint.color = "000000ff"
175 if self.count >= 2:
176 aspect = self.rect.width / self.rect.height
177 if aspect >= 1:
178 w = self.screen.width / 3
179 h = w / aspect
180 else:
181 h = self.screen.height / 3
182 w = h * aspect
183 x = self.screen.x + (self.screen.width - w) / 2
184 y = self.screen.y + (self.screen.height - h) / 2
185 self.draw_zoom(canvas, x, y, w, h)
186 draw_grid(x, y, w, h)
187 draw_text(x, y, w, h)
188 else:
189 draw_grid(self.rect.x, self.rect.y, self.rect.width, self.rect.height)
190
191 paint.textsize += 12 - self.count * 3
192 draw_text(self.rect.x, self.rect.y, self.rect.width, self.rect.height)
193
194 def calc_narrow(self, which, rect):
195 rect = rect.copy()
196 bdr = narrow_expansion.get()
197 row = int(which - 1) // 3
198 col = int(which - 1) % 3
199 if settings["user.grids_put_one_bottom_left"]:
200 row = 2 - row
201 rect.x += int(col * rect.width // 3) - bdr
202 rect.y += int(row * rect.height // 3) - bdr
203 rect.width = (rect.width // 3) + bdr * 2
204 rect.height = (rect.height // 3) + bdr * 2
205 return rect
206
207 def narrow(self, which, move=True):
208 if which < 1 or which > 9:
209 return
210 self.save_state()
211 rect = self.calc_narrow(which, self.rect)
212 # check count so we don't bother zooming in _too_ far
213 if self.count < 5:
214 self.rect = rect.copy()
215 self.count += 1
216 if move:
217 ctrl.mouse_move(*rect.center)
218 if self.count >= 2:
219 self.update_screenshot()
220 else:
221 self.mcanvas.freeze()
222
223 def update_screenshot(self):
224 def finish_capture():
225 self.img = screen.capture_rect(self.rect)
226 self.mcanvas.freeze()
227
228 self.mcanvas.hide()
229 cron.after("16ms", finish_capture)
230
231 def draw_zoom(self, canvas, x, y, w, h):
232 if self.img:
233 src = Rect(0, 0, self.img.width, self.img.height)
234 dst = Rect(x, y, w, h)
235 canvas.draw_image_rect(self.img, src, dst)
236
237 def narrow_to_pos(self, x, y):
238 col_size = int(self.width // 3)
239 row_size = int(self.height // 3)
240 col = math.floor((x - self.rect.x) / col_size)
241 row = math.floor((y - self.rect.x) / row_size)
242 self.narrow(1 + col + 3 * row, move=False)
243
244 def save_state(self):
245 self.history.append((self.count, self.rect.copy()))
246
247 def go_back(self):
248 # FIXME: need window and screen tracking
249 self.count, self.rect = self.history.pop()
250 self.mcanvas.freeze()
251
252
253 mg = MouseSnapNine()
254
255
256 @mod.action_class
257 class GridActions:
258 def grid_activate():
259 """Show mouse grid"""
260 if not mg.mcanvas:
261 mg.setup()
262 mg.show()
263 ctx.tags = ["user.mouse_grid_showing"]
264
265 def grid_place_window():
266 """Places the grid on the currently active window"""
267 mg.setup(rect=ui.active_window().rect)
268
269 def grid_reset():
270 """Resets the grid to fill the whole screen again"""
271 if mg.active:
272 mg.setup()
273
274 def grid_select_screen(screen: int):
275 """Brings up mouse grid"""
276 mg.setup(screen_num=screen - 1)
277 mg.show()
278
279 def grid_narrow_list(digit_list: typing.List[str]):
280 """Choose fields multiple times in a row"""
281 for d in digit_list:
282 GridActions.grid_narrow(int(d))
283
284 def grid_narrow(digit: Union[int, str]):
285 """Choose a field of the grid and narrow the selection down"""
286 mg.narrow(int(digit))
287
288 def grid_go_back():
289 """Sets the grid state back to what it was before the last command"""
290 mg.go_back()
291
292 def grid_close():
293 """Close the active grid"""
294 ctx.tags = []
295 mg.close()
296
[end of mouse_grid/mouse_grid.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mouse_grid/mouse_grid.py b/mouse_grid/mouse_grid.py
--- a/mouse_grid/mouse_grid.py
+++ b/mouse_grid/mouse_grid.py
@@ -1,6 +1,6 @@
# courtesy of https://github.com/timo/
# see https://github.com/timo/talon_scripts
-from talon import Module, Context, app, canvas, screen, settings, ui, ctrl, cron
+from talon import Module, Context, actions, app, canvas, screen, settings, ui, ctrl, cron
from talon.skia import Shader, Color, Paint, Rect
from talon.types.point import Point2d
from talon_plugins import eye_mouse, eye_zoom_mouse
@@ -279,7 +279,7 @@
def grid_narrow_list(digit_list: typing.List[str]):
"""Choose fields multiple times in a row"""
for d in digit_list:
- GridActions.grid_narrow(int(d))
+ actions.self.grid_narrow(int(d))
def grid_narrow(digit: Union[int, str]):
"""Choose a field of the grid and narrow the selection down"""
| {"golden_diff": "diff --git a/mouse_grid/mouse_grid.py b/mouse_grid/mouse_grid.py\n--- a/mouse_grid/mouse_grid.py\n+++ b/mouse_grid/mouse_grid.py\n@@ -1,6 +1,6 @@\n # courtesy of https://github.com/timo/\n # see https://github.com/timo/talon_scripts\n-from talon import Module, Context, app, canvas, screen, settings, ui, ctrl, cron\n+from talon import Module, Context, actions, app, canvas, screen, settings, ui, ctrl, cron\n from talon.skia import Shader, Color, Paint, Rect\n from talon.types.point import Point2d\n from talon_plugins import eye_mouse, eye_zoom_mouse\n@@ -279,7 +279,7 @@\n def grid_narrow_list(digit_list: typing.List[str]):\n \"\"\"Choose fields multiple times in a row\"\"\"\n for d in digit_list:\n- GridActions.grid_narrow(int(d))\n+ actions.self.grid_narrow(int(d))\n \n def grid_narrow(digit: Union[int, str]):\n \"\"\"Choose a field of the grid and narrow the selection down\"\"\"\n", "issue": "Shouldn't call methods directly on an @action_class\nYou shouldn't call methods directly on an @action_class, for example:\r\n\r\nhttps://github.com/knausj85/knausj_talon/blob/65ccd94b8fe97056a34b858b2f1f261cc5269c3f/mouse_grid/mouse_grid.py#L282\r\n\r\nThis will continue to work, but generate a warning in the next beta.\r\n\r\nYou should probably use `actions.self.grid_narrow(...)` instead.\n", "before_files": [{"content": "# courtesy of https://github.com/timo/\n# see https://github.com/timo/talon_scripts\nfrom talon import Module, Context, app, canvas, screen, settings, ui, ctrl, cron\nfrom talon.skia import Shader, Color, Paint, Rect\nfrom talon.types.point import Point2d\nfrom talon_plugins import eye_mouse, eye_zoom_mouse\nfrom typing import Union\n\nimport math, time\n\nimport typing\n\nmod = Module()\nnarrow_expansion = mod.setting(\n \"grid_narrow_expansion\",\n type=int,\n default=0,\n desc=\"\"\"After narrowing, grow the new region by this many pixels in every direction, to make things immediately on edges easier to hit, and when the grid is at its smallest, it allows you to still nudge it around\"\"\",\n)\n\nmod.tag(\"mouse_grid_showing\", desc=\"Tag indicates whether the mouse grid is showing\")\nmod.tag(\"mouse_grid_enabled\", desc=\"Deprecated: do not use. Activates legacy m grid command\")\nctx = Context()\n\n\nclass MouseSnapNine:\n def __init__(self):\n self.screen = None\n self.rect = None\n self.history = []\n self.img = None\n self.mcanvas = None\n self.active = False\n self.count = 0\n self.was_control_mouse_active = False\n self.was_zoom_mouse_active = False\n\n def setup(self, *, rect: Rect = None, screen_num: int = None):\n screens = ui.screens()\n # each if block here might set the rect to None to indicate failure\n if rect is not None:\n try:\n screen = ui.screen_containing(*rect.center)\n except Exception:\n rect = None\n if rect is None and screen_num is not None:\n screen = screens[screen_num % len(screens)]\n rect = screen.rect\n if rect is None:\n screen = screens[0]\n rect = screen.rect\n self.rect = rect.copy()\n self.screen = screen\n self.count = 0\n self.img = None\n if self.mcanvas is not None:\n self.mcanvas.close()\n self.mcanvas = canvas.Canvas.from_screen(screen)\n if self.active:\n self.mcanvas.register(\"draw\", self.draw)\n self.mcanvas.freeze()\n\n def show(self):\n if self.active:\n return\n # noinspection PyUnresolvedReferences\n if eye_zoom_mouse.zoom_mouse.enabled:\n self.was_zoom_mouse_active = True\n eye_zoom_mouse.toggle_zoom_mouse(False)\n if eye_mouse.control_mouse.enabled:\n self.was_control_mouse_active = True\n eye_mouse.control_mouse.toggle()\n self.mcanvas.register(\"draw\", self.draw)\n self.mcanvas.freeze()\n self.active = True\n return\n\n def close(self):\n if not self.active:\n return\n self.mcanvas.unregister(\"draw\", self.draw)\n self.mcanvas.close()\n self.mcanvas = None\n self.img = None\n\n self.active = False\n if self.was_control_mouse_active and not eye_mouse.control_mouse.enabled:\n eye_mouse.control_mouse.toggle()\n if self.was_zoom_mouse_active and not eye_zoom_mouse.zoom_mouse.enabled:\n eye_zoom_mouse.toggle_zoom_mouse(True)\n\n self.was_zoom_mouse_active = False\n self.was_control_mouse_active = False\n\n def draw(self, canvas):\n paint = canvas.paint\n\n def draw_grid(offset_x, offset_y, width, height):\n canvas.draw_line(\n offset_x + width // 3,\n offset_y,\n offset_x + width // 3,\n offset_y + height,\n )\n canvas.draw_line(\n offset_x + 2 * width // 3,\n offset_y,\n offset_x + 2 * width // 3,\n offset_y + height,\n )\n\n canvas.draw_line(\n offset_x,\n offset_y + height // 3,\n offset_x + width,\n offset_y + height // 3,\n )\n canvas.draw_line(\n offset_x,\n offset_y + 2 * height // 3,\n offset_x + width,\n offset_y + 2 * height // 3,\n )\n\n def draw_crosses(offset_x, offset_y, width, height):\n for row in range(0, 2):\n for col in range(0, 2):\n cx = offset_x + width / 6 + (col + 0.5) * width / 3\n cy = offset_y + height / 6 + (row + 0.5) * height / 3\n\n canvas.draw_line(cx - 10, cy, cx + 10, cy)\n canvas.draw_line(cx, cy - 10, cx, cy + 10)\n\n grid_stroke = 1\n\n def draw_text(offset_x, offset_y, width, height):\n canvas.paint.text_align = canvas.paint.TextAlign.CENTER\n for row in range(3):\n for col in range(3):\n text_string = \"\"\n if settings[\"user.grids_put_one_bottom_left\"]:\n text_string = f\"{(2 - row)*3+col+1}\"\n else:\n text_string = f\"{row*3+col+1}\"\n text_rect = canvas.paint.measure_text(text_string)[1]\n background_rect = text_rect.copy()\n background_rect.center = Point2d(\n offset_x + width / 6 + col * width / 3,\n offset_y + height / 6 + row * height / 3,\n )\n background_rect = background_rect.inset(-4)\n paint.color = \"9999995f\"\n paint.style = Paint.Style.FILL\n canvas.draw_rect(background_rect)\n paint.color = \"00ff00ff\"\n canvas.draw_text(\n text_string,\n offset_x + width / 6 + col * width / 3,\n offset_y + height / 6 + row * height / 3 + text_rect.height / 2,\n )\n\n if self.count < 2:\n paint.color = \"00ff007f\"\n for which in range(1, 10):\n gap = 35 - self.count * 10\n if not self.active:\n gap = 45\n draw_crosses(*self.calc_narrow(which, self.rect))\n\n paint.stroke_width = grid_stroke\n if self.active:\n paint.color = \"ff0000ff\"\n else:\n paint.color = \"000000ff\"\n if self.count >= 2:\n aspect = self.rect.width / self.rect.height\n if aspect >= 1:\n w = self.screen.width / 3\n h = w / aspect\n else:\n h = self.screen.height / 3\n w = h * aspect\n x = self.screen.x + (self.screen.width - w) / 2\n y = self.screen.y + (self.screen.height - h) / 2\n self.draw_zoom(canvas, x, y, w, h)\n draw_grid(x, y, w, h)\n draw_text(x, y, w, h)\n else:\n draw_grid(self.rect.x, self.rect.y, self.rect.width, self.rect.height)\n\n paint.textsize += 12 - self.count * 3\n draw_text(self.rect.x, self.rect.y, self.rect.width, self.rect.height)\n\n def calc_narrow(self, which, rect):\n rect = rect.copy()\n bdr = narrow_expansion.get()\n row = int(which - 1) // 3\n col = int(which - 1) % 3\n if settings[\"user.grids_put_one_bottom_left\"]:\n row = 2 - row\n rect.x += int(col * rect.width // 3) - bdr\n rect.y += int(row * rect.height // 3) - bdr\n rect.width = (rect.width // 3) + bdr * 2\n rect.height = (rect.height // 3) + bdr * 2\n return rect\n\n def narrow(self, which, move=True):\n if which < 1 or which > 9:\n return\n self.save_state()\n rect = self.calc_narrow(which, self.rect)\n # check count so we don't bother zooming in _too_ far\n if self.count < 5:\n self.rect = rect.copy()\n self.count += 1\n if move:\n ctrl.mouse_move(*rect.center)\n if self.count >= 2:\n self.update_screenshot()\n else:\n self.mcanvas.freeze()\n\n def update_screenshot(self):\n def finish_capture():\n self.img = screen.capture_rect(self.rect)\n self.mcanvas.freeze()\n\n self.mcanvas.hide()\n cron.after(\"16ms\", finish_capture)\n\n def draw_zoom(self, canvas, x, y, w, h):\n if self.img:\n src = Rect(0, 0, self.img.width, self.img.height)\n dst = Rect(x, y, w, h)\n canvas.draw_image_rect(self.img, src, dst)\n\n def narrow_to_pos(self, x, y):\n col_size = int(self.width // 3)\n row_size = int(self.height // 3)\n col = math.floor((x - self.rect.x) / col_size)\n row = math.floor((y - self.rect.x) / row_size)\n self.narrow(1 + col + 3 * row, move=False)\n\n def save_state(self):\n self.history.append((self.count, self.rect.copy()))\n\n def go_back(self):\n # FIXME: need window and screen tracking\n self.count, self.rect = self.history.pop()\n self.mcanvas.freeze()\n\n\nmg = MouseSnapNine()\n\n\[email protected]_class\nclass GridActions:\n def grid_activate():\n \"\"\"Show mouse grid\"\"\"\n if not mg.mcanvas:\n mg.setup()\n mg.show()\n ctx.tags = [\"user.mouse_grid_showing\"]\n\n def grid_place_window():\n \"\"\"Places the grid on the currently active window\"\"\"\n mg.setup(rect=ui.active_window().rect)\n\n def grid_reset():\n \"\"\"Resets the grid to fill the whole screen again\"\"\"\n if mg.active:\n mg.setup()\n\n def grid_select_screen(screen: int):\n \"\"\"Brings up mouse grid\"\"\"\n mg.setup(screen_num=screen - 1)\n mg.show()\n\n def grid_narrow_list(digit_list: typing.List[str]):\n \"\"\"Choose fields multiple times in a row\"\"\"\n for d in digit_list:\n GridActions.grid_narrow(int(d))\n\n def grid_narrow(digit: Union[int, str]):\n \"\"\"Choose a field of the grid and narrow the selection down\"\"\"\n mg.narrow(int(digit))\n\n def grid_go_back():\n \"\"\"Sets the grid state back to what it was before the last command\"\"\"\n mg.go_back()\n\n def grid_close():\n \"\"\"Close the active grid\"\"\"\n ctx.tags = []\n mg.close()\n", "path": "mouse_grid/mouse_grid.py"}]} | 3,829 | 248 |
gh_patches_debug_14598 | rasdani/github-patches | git_diff | sunpy__sunpy-3076 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add list of constants to docs
Currently if one wants to know what constants `sunpy.sun` has built in you have to run `sunpy.sun.constants.print_all()`, but it would be nice to have a table that lists the constants here: https://docs.sunpy.org/en/latest/code_ref/sun.html#module-sunpy.sun.constants (like AstroPy: http://docs.astropy.org/en/stable/constants/index.html#reference-api)
</issue>
<code>
[start of sunpy/sun/constants.py]
1 """
2 This module provides fundamental solar physical constants.
3 """
4 from astropy.table import Table
5
6 from sunpy.sun import _constants as _con
7
8 __all__ = [
9 'get', 'find', 'print_all', 'spectral_classification', 'au', 'mass', 'equatorial_radius',
10 'volume', 'surface_area', 'average_density', 'equatorial_surface_gravity',
11 'effective_temperature', 'luminosity', 'mass_conversion_rate', 'escape_velocity', 'sfu',
12 'average_angular_size'
13 ]
14
15 constants = _con.physical_constants
16
17
18 def get(key):
19 """
20 Retrieve a constant by key. This is just a short cut into a dictionary.
21
22 Parameters
23 ----------
24 key : `str`
25 Key in dictionary in ``constants``.
26
27 Returns
28 -------
29 constant : `~astropy.units.Constant`
30
31 See Also
32 --------
33 `sunpy.sun.constants`
34 Contains the description of ``constants``, which, as a dictionary literal object, does not itself possess a docstring.
35
36 Examples
37 --------
38 >>> from sunpy.sun import constants
39 >>> constants.get('mass')
40 <<class 'astropy.constants.iau2015.IAU2015'> name='Solar mass' value=1.9884754153381438e+30 uncertainty=9.236140093538353e+25 unit='kg' reference='IAU 2015 Resolution B 3 + CODATA 2014'>
41 """
42 return constants[key]
43
44
45 def find(sub=None):
46 """
47 Return list of constants keys containing a given string.
48
49 Parameters
50 ----------
51 sub : `str`, optional
52 Sub-string to search keys for. By default set to `None` and returns all keys.
53
54 Returns
55 -------
56 `None`, `list`
57 The matching keys.
58
59 See Also
60 --------
61 `sunpy.sun.constants`
62 Contains the description of ``constants``, which, as a dictionary literal object, does not itself possess a docstring.
63 """
64 if sub is None:
65 result = list(constants.keys())
66 else:
67 result = [key for key in constants if sub.lower() in key.lower()]
68
69 result.sort()
70 return result
71
72
73 def print_all():
74 """
75 Provides a table of the complete list of constants.
76
77 Returns
78 -------
79 `astropy.table.Table`
80 """
81 data_rows = []
82 for key, this_constant in constants.items():
83 data_rows.append([
84 key, this_constant.name, this_constant.value, this_constant.uncertainty,
85 str(this_constant.unit), this_constant.reference
86 ])
87
88 t = Table(rows=data_rows, names=('key', 'name', 'value', 'uncertainty', 'unit', 'Reference'))
89 return t
90
91
92 # Spectral class is not included in physical constants since it is not a number
93 spectral_classification = 'G2V'
94 au = astronomical_unit = get('mean distance')
95 # The following variables from _gets are brought out by making them
96 # accessible through a call such as sun.volume
97 mass = get('mass')
98 equatorial_radius = radius = get('radius')
99 volume = get('volume')
100 surface_area = get('surface area')
101 average_density = density = get('average density')
102 equatorial_surface_gravity = surface_gravity = get('surface gravity')
103 effective_temperature = get('effective temperature')
104 luminosity = get('luminosity')
105 mass_conversion_rate = get('mass conversion rate')
106 escape_velocity = get('escape velocity')
107 sfu = get('solar flux unit')
108 # Observable parameters
109 average_angular_size = get('average angular size')
110
[end of sunpy/sun/constants.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/sunpy/sun/constants.py b/sunpy/sun/constants.py
--- a/sunpy/sun/constants.py
+++ b/sunpy/sun/constants.py
@@ -89,6 +89,20 @@
return t
+# Add a list of constants to the docs
+_lines = [
+ 'The following constants are available:\n',
+ '====================== ============== ================ =================================',
+ ' Name Value Unit Description',
+ '====================== ============== ================ =================================',
+]
+for key, const in constants.items():
+ _lines.append('{0:^22} {1:^14.9g} {2:^16} {3}'.format(
+ key, const.value, const._unit_string, const.name))
+_lines.append(_lines[1])
+if __doc__ is not None:
+ __doc__ += '\n'.join(_lines)
+
# Spectral class is not included in physical constants since it is not a number
spectral_classification = 'G2V'
au = astronomical_unit = get('mean distance')
| {"golden_diff": "diff --git a/sunpy/sun/constants.py b/sunpy/sun/constants.py\n--- a/sunpy/sun/constants.py\n+++ b/sunpy/sun/constants.py\n@@ -89,6 +89,20 @@\n return t\n \n \n+# Add a list of constants to the docs\n+_lines = [\n+ 'The following constants are available:\\n',\n+ '====================== ============== ================ =================================',\n+ ' Name Value Unit Description',\n+ '====================== ============== ================ =================================',\n+]\n+for key, const in constants.items():\n+ _lines.append('{0:^22} {1:^14.9g} {2:^16} {3}'.format(\n+ key, const.value, const._unit_string, const.name))\n+_lines.append(_lines[1])\n+if __doc__ is not None:\n+ __doc__ += '\\n'.join(_lines)\n+\n # Spectral class is not included in physical constants since it is not a number\n spectral_classification = 'G2V'\n au = astronomical_unit = get('mean distance')\n", "issue": "Add list of constants to docs\nCurrently if one wants to know what constants `sunpy.sun` has built in you have to run `sunpy.sun.constants.print_all()`, but it would be nice to have a table that lists the constants here: https://docs.sunpy.org/en/latest/code_ref/sun.html#module-sunpy.sun.constants (like AstroPy: http://docs.astropy.org/en/stable/constants/index.html#reference-api)\n", "before_files": [{"content": "\"\"\"\nThis module provides fundamental solar physical constants.\n\"\"\"\nfrom astropy.table import Table\n\nfrom sunpy.sun import _constants as _con\n\n__all__ = [\n 'get', 'find', 'print_all', 'spectral_classification', 'au', 'mass', 'equatorial_radius',\n 'volume', 'surface_area', 'average_density', 'equatorial_surface_gravity',\n 'effective_temperature', 'luminosity', 'mass_conversion_rate', 'escape_velocity', 'sfu',\n 'average_angular_size'\n]\n\nconstants = _con.physical_constants\n\n\ndef get(key):\n \"\"\"\n Retrieve a constant by key. This is just a short cut into a dictionary.\n\n Parameters\n ----------\n key : `str`\n Key in dictionary in ``constants``.\n\n Returns\n -------\n constant : `~astropy.units.Constant`\n\n See Also\n --------\n `sunpy.sun.constants`\n Contains the description of ``constants``, which, as a dictionary literal object, does not itself possess a docstring.\n\n Examples\n --------\n >>> from sunpy.sun import constants\n >>> constants.get('mass')\n <<class 'astropy.constants.iau2015.IAU2015'> name='Solar mass' value=1.9884754153381438e+30 uncertainty=9.236140093538353e+25 unit='kg' reference='IAU 2015 Resolution B 3 + CODATA 2014'>\n \"\"\"\n return constants[key]\n\n\ndef find(sub=None):\n \"\"\"\n Return list of constants keys containing a given string.\n\n Parameters\n ----------\n sub : `str`, optional\n Sub-string to search keys for. By default set to `None` and returns all keys.\n\n Returns\n -------\n `None`, `list`\n The matching keys.\n\n See Also\n --------\n `sunpy.sun.constants`\n Contains the description of ``constants``, which, as a dictionary literal object, does not itself possess a docstring.\n \"\"\"\n if sub is None:\n result = list(constants.keys())\n else:\n result = [key for key in constants if sub.lower() in key.lower()]\n\n result.sort()\n return result\n\n\ndef print_all():\n \"\"\"\n Provides a table of the complete list of constants.\n\n Returns\n -------\n `astropy.table.Table`\n \"\"\"\n data_rows = []\n for key, this_constant in constants.items():\n data_rows.append([\n key, this_constant.name, this_constant.value, this_constant.uncertainty,\n str(this_constant.unit), this_constant.reference\n ])\n\n t = Table(rows=data_rows, names=('key', 'name', 'value', 'uncertainty', 'unit', 'Reference'))\n return t\n\n\n# Spectral class is not included in physical constants since it is not a number\nspectral_classification = 'G2V'\nau = astronomical_unit = get('mean distance')\n# The following variables from _gets are brought out by making them\n# accessible through a call such as sun.volume\nmass = get('mass')\nequatorial_radius = radius = get('radius')\nvolume = get('volume')\nsurface_area = get('surface area')\naverage_density = density = get('average density')\nequatorial_surface_gravity = surface_gravity = get('surface gravity')\neffective_temperature = get('effective temperature')\nluminosity = get('luminosity')\nmass_conversion_rate = get('mass conversion rate')\nescape_velocity = get('escape velocity')\nsfu = get('solar flux unit')\n# Observable parameters\naverage_angular_size = get('average angular size')\n", "path": "sunpy/sun/constants.py"}]} | 1,646 | 240 |
gh_patches_debug_72 | rasdani/github-patches | git_diff | Kinto__kinto-7 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
PostgreSQL by default ?
- put `cliquet[postgresql]` in requirements
- put storage_url in config (default postgres:postgres@localhost/postgres)
</issue>
<code>
[start of setup.py]
1 import os
2 from setuptools import setup, find_packages
3
4 here = os.path.abspath(os.path.dirname(__file__))
5
6 with open(os.path.join(here, 'README.rst')) as f:
7 README = f.read()
8
9 REQUIREMENTS = [
10 'colander',
11 'cornice',
12 'six',
13 'waitress',
14 'cliquet'
15 ]
16
17 ENTRY_POINTS = {
18 'paste.app_factory': [
19 'main = kinto:main',
20 ]}
21
22 setup(name='kinto',
23 version='0.1.dev0',
24 description='kinto',
25 long_description=README,
26 classifiers=[
27 "Programming Language :: Python",
28 "Topic :: Internet :: WWW/HTTP",
29 "Topic :: Internet :: WWW/HTTP :: WSGI :: Application"
30 ],
31 keywords="web services",
32 author='Mozilla Services',
33 author_email='[email protected]',
34 url='',
35 packages=find_packages(),
36 include_package_data=True,
37 zip_safe=False,
38 install_requires=REQUIREMENTS,
39 entry_points=ENTRY_POINTS)
40
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -11,7 +11,7 @@
'cornice',
'six',
'waitress',
- 'cliquet'
+ 'cliquet[postgresql]'
]
ENTRY_POINTS = {
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -11,7 +11,7 @@\n 'cornice',\n 'six',\n 'waitress',\n- 'cliquet'\n+ 'cliquet[postgresql]'\n ]\n \n ENTRY_POINTS = {\n", "issue": "PostgreSQL by default ?\n- put `cliquet[postgresql]` in requirements\n- put storage_url in config (default postgres:postgres@localhost/postgres)\n\n", "before_files": [{"content": "import os\nfrom setuptools import setup, find_packages\n\nhere = os.path.abspath(os.path.dirname(__file__))\n\nwith open(os.path.join(here, 'README.rst')) as f:\n README = f.read()\n\nREQUIREMENTS = [\n 'colander',\n 'cornice',\n 'six',\n 'waitress',\n 'cliquet'\n]\n\nENTRY_POINTS = {\n 'paste.app_factory': [\n 'main = kinto:main',\n ]}\n\nsetup(name='kinto',\n version='0.1.dev0',\n description='kinto',\n long_description=README,\n classifiers=[\n \"Programming Language :: Python\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Topic :: Internet :: WWW/HTTP :: WSGI :: Application\"\n ],\n keywords=\"web services\",\n author='Mozilla Services',\n author_email='[email protected]',\n url='',\n packages=find_packages(),\n include_package_data=True,\n zip_safe=False,\n install_requires=REQUIREMENTS,\n entry_points=ENTRY_POINTS)\n", "path": "setup.py"}]} | 856 | 68 |
gh_patches_debug_42249 | rasdani/github-patches | git_diff | wagtail__wagtail-1110 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
image tag runs query on template load
Theres a database query in the `__init__` method of `ImageNode`:
https://github.com/torchbox/wagtail/blob/master/wagtail/wagtailimages/templatetags/wagtailimages_tags.py#L42
This causes a query to be generated every time the template is loaded. The filter is not actually needed until the node renders though.
This can cause crashes if you are trying to load a template without a database (such as doing offline compression of assets).
</issue>
<code>
[start of wagtail/wagtailcore/management/commands/fixtree.py]
1 import operator
2 import functools
3 from optparse import make_option
4
5 from django.core.management.base import BaseCommand
6 from django.core.exceptions import ObjectDoesNotExist
7 from django.db import models
8 from django.db.models import Q
9 from django.utils import six
10
11 from wagtail.wagtailcore.models import Page
12
13
14 class Command(BaseCommand):
15 help = "Checks for data integrity errors on the page tree, and fixes them where possible."
16 base_options = (
17 make_option('--noinput', action='store_false', dest='interactive', default=True,
18 help='If provided, any fixes requiring user interaction will be skipped.'
19 ),
20 )
21 option_list = BaseCommand.option_list + base_options
22
23 def handle(self, **options):
24 any_problems_fixed = False
25
26 for page in Page.objects.all():
27 try:
28 page.specific
29 except ObjectDoesNotExist:
30 self.stdout.write("Page %d (%s) is missing a subclass record; deleting." % (page.id, page.title))
31 any_problems_fixed = True
32 page.delete()
33
34 (bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems()
35
36 if bad_depth:
37 self.stdout.write("Incorrect depth value found for pages: %r" % bad_depth)
38 if bad_numchild:
39 self.stdout.write("Incorrect numchild value found for pages: %r" % bad_numchild)
40
41 if bad_depth or bad_numchild:
42 Page.fix_tree(destructive=False)
43 any_problems_fixed = True
44
45 if orphans:
46 # The 'orphans' list as returned by treebeard only includes pages that are
47 # missing an immediate parent; descendants of orphans are not included.
48 # Deleting only the *actual* orphans is a bit silly (since it'll just create
49 # more orphans), so generate a queryset that contains descendants as well.
50 orphan_paths = Page.objects.filter(id__in=orphans).values_list('path', flat=True)
51 filter_conditions = []
52 for path in orphan_paths:
53 filter_conditions.append(Q(path__startswith=path))
54
55 # combine filter_conditions into a single ORed condition
56 final_filter = functools.reduce(operator.or_, filter_conditions)
57
58 # build a queryset of all pages to be removed; this must be a vanilla Django
59 # queryset rather than a treebeard MP_NodeQuerySet, so that we bypass treebeard's
60 # custom delete() logic that would trip up on the very same corruption that we're
61 # trying to fix here.
62 pages_to_delete = models.query.QuerySet(Page).filter(final_filter)
63
64 self.stdout.write("Orphaned pages found:")
65 for page in pages_to_delete:
66 self.stdout.write("ID %d: %s" % (page.id, page.title))
67 self.stdout.write('')
68
69 if options.get('interactive', True):
70 yes_or_no = six.moves.input("Delete these pages? [y/N] ")
71 delete_orphans = yes_or_no.lower().startswith('y')
72 self.stdout.write('')
73 else:
74 # Running tests, check for the "delete_orphans" option
75 delete_orphans = options.get('delete_orphans', False)
76
77 if delete_orphans:
78 deletion_count = len(pages_to_delete)
79 pages_to_delete.delete()
80 self.stdout.write(
81 "%d orphaned page%s deleted." % (deletion_count, "s"[deletion_count==1:])
82 )
83 any_problems_fixed = True
84
85 if any_problems_fixed:
86 # re-run find_problems to see if any new ones have surfaced
87 (bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems()
88
89 if any((bad_alpha, bad_path, orphans, bad_depth, bad_numchild)):
90 self.stdout.write("Remaining problems (cannot fix automatically):")
91 if bad_alpha:
92 self.stdout.write("Invalid characters found in path for pages: %r" % bad_alpha)
93 if bad_path:
94 self.stdout.write("Invalid path length found for pages: %r" % bad_path)
95 if orphans:
96 self.stdout.write("Orphaned pages found: %r" % orphans)
97 if bad_depth:
98 self.stdout.write("Incorrect depth value found for pages: %r" % bad_depth)
99 if bad_numchild:
100 self.stdout.write("Incorrect numchild value found for pages: %r" % bad_numchild)
101
102 elif any_problems_fixed:
103 self.stdout.write("All problems fixed.")
104 else:
105 self.stdout.write("No problems found.")
106
[end of wagtail/wagtailcore/management/commands/fixtree.py]
[start of wagtail/wagtailimages/templatetags/wagtailimages_tags.py]
1 from django import template
2
3 from wagtail.wagtailimages.models import Filter, SourceImageIOError
4
5 register = template.Library()
6
7 # Local cache of filters, avoid hitting the DB
8 filters = {}
9
10
11 @register.tag(name="image")
12 def image(parser, token):
13 bits = token.split_contents()[1:]
14 image_var = bits[0]
15 filter_spec = bits[1]
16 bits = bits[2:]
17
18 if len(bits) == 2 and bits[0] == 'as':
19 # token is of the form {% image self.photo max-320x200 as img %}
20 return ImageNode(image_var, filter_spec, output_var_name=bits[1])
21 else:
22 # token is of the form {% image self.photo max-320x200 %} - all additional tokens
23 # should be kwargs, which become attributes
24 attrs = {}
25 for bit in bits:
26 try:
27 name, value = bit.split('=')
28 except ValueError:
29 raise template.TemplateSyntaxError("'image' tag should be of the form {% image self.photo max-320x200 [ custom-attr=\"value\" ... ] %} or {% image self.photo max-320x200 as img %}")
30 attrs[name] = parser.compile_filter(value) # setup to resolve context variables as value
31
32 return ImageNode(image_var, filter_spec, attrs=attrs)
33
34
35 class ImageNode(template.Node):
36 def __init__(self, image_var_name, filter_spec, output_var_name=None, attrs={}):
37 self.image_var = template.Variable(image_var_name)
38 self.output_var_name = output_var_name
39 self.attrs = attrs
40
41 if filter_spec not in filters:
42 filters[filter_spec], _ = Filter.objects.get_or_create(spec=filter_spec)
43 self.filter = filters[filter_spec]
44
45 def render(self, context):
46 try:
47 image = self.image_var.resolve(context)
48 except template.VariableDoesNotExist:
49 return ''
50
51 if not image:
52 return ''
53
54 try:
55 rendition = image.get_rendition(self.filter)
56 except SourceImageIOError:
57 # It's fairly routine for people to pull down remote databases to their
58 # local dev versions without retrieving the corresponding image files.
59 # In such a case, we would get a SourceImageIOError at the point where we try to
60 # create the resized version of a non-existent image. Since this is a
61 # bit catastrophic for a missing image, we'll substitute a dummy
62 # Rendition object so that we just output a broken link instead.
63 Rendition = image.renditions.model # pick up any custom Image / Rendition classes that may be in use
64 rendition = Rendition(image=image, width=0, height=0)
65 rendition.file.name = 'not-found'
66
67 if self.output_var_name:
68 # return the rendition object in the given variable
69 context[self.output_var_name] = rendition
70 return ''
71 else:
72 # render the rendition's image tag now
73 resolved_attrs = {}
74 for key in self.attrs:
75 resolved_attrs[key] = self.attrs[key].resolve(context)
76 return rendition.img_tag(resolved_attrs)
77
[end of wagtail/wagtailimages/templatetags/wagtailimages_tags.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/wagtail/wagtailcore/management/commands/fixtree.py b/wagtail/wagtailcore/management/commands/fixtree.py
--- a/wagtail/wagtailcore/management/commands/fixtree.py
+++ b/wagtail/wagtailcore/management/commands/fixtree.py
@@ -20,6 +20,11 @@
)
option_list = BaseCommand.option_list + base_options
+ def numberlist_to_string(self, numberlist):
+ # Converts a list of numbers into a string
+ # Doesn't put "L" after longs
+ return '[' + ', '.join(map(str, numberlist)) + ']'
+
def handle(self, **options):
any_problems_fixed = False
@@ -34,9 +39,9 @@
(bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems()
if bad_depth:
- self.stdout.write("Incorrect depth value found for pages: %r" % bad_depth)
+ self.stdout.write("Incorrect depth value found for pages: %s" % self.numberlist_to_string(bad_depth))
if bad_numchild:
- self.stdout.write("Incorrect numchild value found for pages: %r" % bad_numchild)
+ self.stdout.write("Incorrect numchild value found for pages: %s" % self.numberlist_to_string(bad_numchild))
if bad_depth or bad_numchild:
Page.fix_tree(destructive=False)
@@ -89,15 +94,15 @@
if any((bad_alpha, bad_path, orphans, bad_depth, bad_numchild)):
self.stdout.write("Remaining problems (cannot fix automatically):")
if bad_alpha:
- self.stdout.write("Invalid characters found in path for pages: %r" % bad_alpha)
+ self.stdout.write("Invalid characters found in path for pages: %s" % self.numberlist_to_string(bad_alpha))
if bad_path:
- self.stdout.write("Invalid path length found for pages: %r" % bad_path)
+ self.stdout.write("Invalid path length found for pages: %s" % self.numberlist_to_string(bad_path))
if orphans:
- self.stdout.write("Orphaned pages found: %r" % orphans)
+ self.stdout.write("Orphaned pages found: %s" % self.numberlist_to_string(orphans))
if bad_depth:
- self.stdout.write("Incorrect depth value found for pages: %r" % bad_depth)
+ self.stdout.write("Incorrect depth value found for pages: %s" % self.numberlist_to_string(bad_depth))
if bad_numchild:
- self.stdout.write("Incorrect numchild value found for pages: %r" % bad_numchild)
+ self.stdout.write("Incorrect numchild value found for pages: %s" % self.numberlist_to_string(bad_numchild))
elif any_problems_fixed:
self.stdout.write("All problems fixed.")
diff --git a/wagtail/wagtailimages/templatetags/wagtailimages_tags.py b/wagtail/wagtailimages/templatetags/wagtailimages_tags.py
--- a/wagtail/wagtailimages/templatetags/wagtailimages_tags.py
+++ b/wagtail/wagtailimages/templatetags/wagtailimages_tags.py
@@ -1,12 +1,10 @@
from django import template
+from django.utils.functional import cached_property
from wagtail.wagtailimages.models import Filter, SourceImageIOError
register = template.Library()
-# Local cache of filters, avoid hitting the DB
-filters = {}
-
@register.tag(name="image")
def image(parser, token):
@@ -37,10 +35,12 @@
self.image_var = template.Variable(image_var_name)
self.output_var_name = output_var_name
self.attrs = attrs
+ self.filter_spec = filter_spec
- if filter_spec not in filters:
- filters[filter_spec], _ = Filter.objects.get_or_create(spec=filter_spec)
- self.filter = filters[filter_spec]
+ @cached_property
+ def filter(self):
+ _filter, _ = Filter.objects.get_or_create(spec=self.filter_spec)
+ return _filter
def render(self, context):
try:
| {"golden_diff": "diff --git a/wagtail/wagtailcore/management/commands/fixtree.py b/wagtail/wagtailcore/management/commands/fixtree.py\n--- a/wagtail/wagtailcore/management/commands/fixtree.py\n+++ b/wagtail/wagtailcore/management/commands/fixtree.py\n@@ -20,6 +20,11 @@\n )\n option_list = BaseCommand.option_list + base_options\n \n+ def numberlist_to_string(self, numberlist):\n+ # Converts a list of numbers into a string\n+ # Doesn't put \"L\" after longs\n+ return '[' + ', '.join(map(str, numberlist)) + ']'\n+\n def handle(self, **options):\n any_problems_fixed = False\n \n@@ -34,9 +39,9 @@\n (bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems()\n \n if bad_depth:\n- self.stdout.write(\"Incorrect depth value found for pages: %r\" % bad_depth)\n+ self.stdout.write(\"Incorrect depth value found for pages: %s\" % self.numberlist_to_string(bad_depth))\n if bad_numchild:\n- self.stdout.write(\"Incorrect numchild value found for pages: %r\" % bad_numchild)\n+ self.stdout.write(\"Incorrect numchild value found for pages: %s\" % self.numberlist_to_string(bad_numchild))\n \n if bad_depth or bad_numchild:\n Page.fix_tree(destructive=False)\n@@ -89,15 +94,15 @@\n if any((bad_alpha, bad_path, orphans, bad_depth, bad_numchild)):\n self.stdout.write(\"Remaining problems (cannot fix automatically):\")\n if bad_alpha:\n- self.stdout.write(\"Invalid characters found in path for pages: %r\" % bad_alpha)\n+ self.stdout.write(\"Invalid characters found in path for pages: %s\" % self.numberlist_to_string(bad_alpha))\n if bad_path:\n- self.stdout.write(\"Invalid path length found for pages: %r\" % bad_path)\n+ self.stdout.write(\"Invalid path length found for pages: %s\" % self.numberlist_to_string(bad_path))\n if orphans:\n- self.stdout.write(\"Orphaned pages found: %r\" % orphans)\n+ self.stdout.write(\"Orphaned pages found: %s\" % self.numberlist_to_string(orphans))\n if bad_depth:\n- self.stdout.write(\"Incorrect depth value found for pages: %r\" % bad_depth)\n+ self.stdout.write(\"Incorrect depth value found for pages: %s\" % self.numberlist_to_string(bad_depth))\n if bad_numchild:\n- self.stdout.write(\"Incorrect numchild value found for pages: %r\" % bad_numchild)\n+ self.stdout.write(\"Incorrect numchild value found for pages: %s\" % self.numberlist_to_string(bad_numchild))\n \n elif any_problems_fixed:\n self.stdout.write(\"All problems fixed.\")\ndiff --git a/wagtail/wagtailimages/templatetags/wagtailimages_tags.py b/wagtail/wagtailimages/templatetags/wagtailimages_tags.py\n--- a/wagtail/wagtailimages/templatetags/wagtailimages_tags.py\n+++ b/wagtail/wagtailimages/templatetags/wagtailimages_tags.py\n@@ -1,12 +1,10 @@\n from django import template\n+from django.utils.functional import cached_property\n \n from wagtail.wagtailimages.models import Filter, SourceImageIOError\n \n register = template.Library()\n \n-# Local cache of filters, avoid hitting the DB\n-filters = {}\n-\n \n @register.tag(name=\"image\")\n def image(parser, token):\n@@ -37,10 +35,12 @@\n self.image_var = template.Variable(image_var_name)\n self.output_var_name = output_var_name\n self.attrs = attrs\n+ self.filter_spec = filter_spec\n \n- if filter_spec not in filters:\n- filters[filter_spec], _ = Filter.objects.get_or_create(spec=filter_spec)\n- self.filter = filters[filter_spec]\n+ @cached_property\n+ def filter(self):\n+ _filter, _ = Filter.objects.get_or_create(spec=self.filter_spec)\n+ return _filter\n \n def render(self, context):\n try:\n", "issue": "image tag runs query on template load\nTheres a database query in the `__init__` method of `ImageNode`:\n\nhttps://github.com/torchbox/wagtail/blob/master/wagtail/wagtailimages/templatetags/wagtailimages_tags.py#L42\n\nThis causes a query to be generated every time the template is loaded. The filter is not actually needed until the node renders though.\n\nThis can cause crashes if you are trying to load a template without a database (such as doing offline compression of assets).\n\n", "before_files": [{"content": "import operator\nimport functools\nfrom optparse import make_option\n\nfrom django.core.management.base import BaseCommand\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom django.db import models\nfrom django.db.models import Q\nfrom django.utils import six\n\nfrom wagtail.wagtailcore.models import Page\n\n\nclass Command(BaseCommand):\n help = \"Checks for data integrity errors on the page tree, and fixes them where possible.\"\n base_options = (\n make_option('--noinput', action='store_false', dest='interactive', default=True,\n help='If provided, any fixes requiring user interaction will be skipped.'\n ),\n )\n option_list = BaseCommand.option_list + base_options\n\n def handle(self, **options):\n any_problems_fixed = False\n\n for page in Page.objects.all():\n try:\n page.specific\n except ObjectDoesNotExist:\n self.stdout.write(\"Page %d (%s) is missing a subclass record; deleting.\" % (page.id, page.title))\n any_problems_fixed = True\n page.delete()\n\n (bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems()\n\n if bad_depth:\n self.stdout.write(\"Incorrect depth value found for pages: %r\" % bad_depth)\n if bad_numchild:\n self.stdout.write(\"Incorrect numchild value found for pages: %r\" % bad_numchild)\n\n if bad_depth or bad_numchild:\n Page.fix_tree(destructive=False)\n any_problems_fixed = True\n\n if orphans:\n # The 'orphans' list as returned by treebeard only includes pages that are\n # missing an immediate parent; descendants of orphans are not included.\n # Deleting only the *actual* orphans is a bit silly (since it'll just create\n # more orphans), so generate a queryset that contains descendants as well.\n orphan_paths = Page.objects.filter(id__in=orphans).values_list('path', flat=True)\n filter_conditions = []\n for path in orphan_paths:\n filter_conditions.append(Q(path__startswith=path))\n\n # combine filter_conditions into a single ORed condition\n final_filter = functools.reduce(operator.or_, filter_conditions)\n\n # build a queryset of all pages to be removed; this must be a vanilla Django\n # queryset rather than a treebeard MP_NodeQuerySet, so that we bypass treebeard's\n # custom delete() logic that would trip up on the very same corruption that we're\n # trying to fix here.\n pages_to_delete = models.query.QuerySet(Page).filter(final_filter)\n\n self.stdout.write(\"Orphaned pages found:\")\n for page in pages_to_delete:\n self.stdout.write(\"ID %d: %s\" % (page.id, page.title))\n self.stdout.write('')\n\n if options.get('interactive', True):\n yes_or_no = six.moves.input(\"Delete these pages? [y/N] \")\n delete_orphans = yes_or_no.lower().startswith('y')\n self.stdout.write('')\n else:\n # Running tests, check for the \"delete_orphans\" option\n delete_orphans = options.get('delete_orphans', False)\n\n if delete_orphans:\n deletion_count = len(pages_to_delete)\n pages_to_delete.delete()\n self.stdout.write(\n \"%d orphaned page%s deleted.\" % (deletion_count, \"s\"[deletion_count==1:])\n )\n any_problems_fixed = True\n\n if any_problems_fixed:\n # re-run find_problems to see if any new ones have surfaced\n (bad_alpha, bad_path, orphans, bad_depth, bad_numchild) = Page.find_problems()\n\n if any((bad_alpha, bad_path, orphans, bad_depth, bad_numchild)):\n self.stdout.write(\"Remaining problems (cannot fix automatically):\")\n if bad_alpha:\n self.stdout.write(\"Invalid characters found in path for pages: %r\" % bad_alpha)\n if bad_path:\n self.stdout.write(\"Invalid path length found for pages: %r\" % bad_path)\n if orphans:\n self.stdout.write(\"Orphaned pages found: %r\" % orphans)\n if bad_depth:\n self.stdout.write(\"Incorrect depth value found for pages: %r\" % bad_depth)\n if bad_numchild:\n self.stdout.write(\"Incorrect numchild value found for pages: %r\" % bad_numchild)\n\n elif any_problems_fixed:\n self.stdout.write(\"All problems fixed.\")\n else:\n self.stdout.write(\"No problems found.\")\n", "path": "wagtail/wagtailcore/management/commands/fixtree.py"}, {"content": "from django import template\n\nfrom wagtail.wagtailimages.models import Filter, SourceImageIOError\n\nregister = template.Library()\n\n# Local cache of filters, avoid hitting the DB\nfilters = {}\n\n\[email protected](name=\"image\")\ndef image(parser, token):\n bits = token.split_contents()[1:]\n image_var = bits[0]\n filter_spec = bits[1]\n bits = bits[2:]\n\n if len(bits) == 2 and bits[0] == 'as':\n # token is of the form {% image self.photo max-320x200 as img %}\n return ImageNode(image_var, filter_spec, output_var_name=bits[1])\n else:\n # token is of the form {% image self.photo max-320x200 %} - all additional tokens\n # should be kwargs, which become attributes\n attrs = {}\n for bit in bits:\n try:\n name, value = bit.split('=')\n except ValueError:\n raise template.TemplateSyntaxError(\"'image' tag should be of the form {% image self.photo max-320x200 [ custom-attr=\\\"value\\\" ... ] %} or {% image self.photo max-320x200 as img %}\")\n attrs[name] = parser.compile_filter(value) # setup to resolve context variables as value\n\n return ImageNode(image_var, filter_spec, attrs=attrs)\n\n\nclass ImageNode(template.Node):\n def __init__(self, image_var_name, filter_spec, output_var_name=None, attrs={}):\n self.image_var = template.Variable(image_var_name)\n self.output_var_name = output_var_name\n self.attrs = attrs\n\n if filter_spec not in filters:\n filters[filter_spec], _ = Filter.objects.get_or_create(spec=filter_spec)\n self.filter = filters[filter_spec]\n\n def render(self, context):\n try:\n image = self.image_var.resolve(context)\n except template.VariableDoesNotExist:\n return ''\n\n if not image:\n return ''\n\n try:\n rendition = image.get_rendition(self.filter)\n except SourceImageIOError:\n # It's fairly routine for people to pull down remote databases to their\n # local dev versions without retrieving the corresponding image files.\n # In such a case, we would get a SourceImageIOError at the point where we try to\n # create the resized version of a non-existent image. Since this is a\n # bit catastrophic for a missing image, we'll substitute a dummy\n # Rendition object so that we just output a broken link instead.\n Rendition = image.renditions.model # pick up any custom Image / Rendition classes that may be in use\n rendition = Rendition(image=image, width=0, height=0)\n rendition.file.name = 'not-found'\n\n if self.output_var_name:\n # return the rendition object in the given variable\n context[self.output_var_name] = rendition\n return ''\n else:\n # render the rendition's image tag now\n resolved_attrs = {}\n for key in self.attrs:\n resolved_attrs[key] = self.attrs[key].resolve(context)\n return rendition.img_tag(resolved_attrs)\n", "path": "wagtail/wagtailimages/templatetags/wagtailimages_tags.py"}]} | 2,731 | 953 |
gh_patches_debug_47989 | rasdani/github-patches | git_diff | mlflow__mlflow-10877 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[BUG] Git version not logged when executable in current path.
### Issues Policy acknowledgement
- [X] I have read and agree to submit bug reports in accordance with the [issues policy](https://www.github.com/mlflow/mlflow/blob/master/ISSUE_POLICY.md)
### Where did you encounter this bug?
Local machine
### Willingness to contribute
Yes. I can contribute a fix for this bug independently.
### MLflow version
- Client: 2.8.0
### System information
- **OS Platform and Distribution (e.g., Linux Ubuntu 16.04)**: Ubuntu 20.04.2 LTS
- **Python version**: 3.11.6
### Describe the problem
The tag ``mlflow.source.git.commit`` is not populated if running a python file that's in the current path.
Imagine a file ``mypath/myfile.py`` with this content:
```python
from mlflow.tracking.context.git_context import _get_source_version
print(_get_source_version())
```
If you run from within ``mypath`` via ``python myfile.py`` this will not produce a version,
but if you run from the parent folder via ``python mypath/myfile.py`` it will produce the correct version.
That is caused by [`get_git_commit`](https://github.com/mlflow/mlflow/blob/73fbc7d857b3f04b823b3fcbc6723efc41070288/mlflow/utils/git_utils.py#L47C1-L49C39)
using the path of the ``argv[1]``, and then passing this to ``Repo``. If the path is the current directory, ``os.path.dirname(path)`` will be the empty string, which will make ``Repo`` raise
```
git.exc.GitCommandError: Cmd('git') failed due to: exit code(128)
cmdline: git check-ignore
stderr: 'fatal: empty string is not a valid pathspec. please use . instead if you meant to match all paths
```
which will be caught, and then an empty string will be returned.
A work-around could be as easy as replacing empty string by ``'.'``, or maybe better using ``os.path.dirname(os.path.abspath(path))``?
### What component(s) does this bug affect?
- [ ] `area/artifacts`: Artifact stores and artifact logging
- [ ] `area/build`: Build and test infrastructure for MLflow
- [ ] `area/deployments`: MLflow Deployments client APIs, server, and third-party Deployments integrations
- [ ] `area/docs`: MLflow documentation pages
- [ ] `area/examples`: Example code
- [ ] `area/model-registry`: Model Registry service, APIs, and the fluent client calls for Model Registry
- [ ] `area/models`: MLmodel format, model serialization/deserialization, flavors
- [ ] `area/recipes`: Recipes, Recipe APIs, Recipe configs, Recipe Templates
- [ ] `area/projects`: MLproject format, project running backends
- [ ] `area/scoring`: MLflow Model server, model deployment tools, Spark UDFs
- [ ] `area/server-infra`: MLflow Tracking server backend
- [X] `area/tracking`: Tracking Service, tracking client APIs, autologging
### What interface(s) does this bug affect?
- [ ] `area/uiux`: Front-end, user experience, plotting, JavaScript, JavaScript dev server
- [ ] `area/docker`: Docker use across MLflow's components, such as MLflow Projects and MLflow Models
- [ ] `area/sqlalchemy`: Use of SQLAlchemy in the Tracking Service or Model Registry
- [ ] `area/windows`: Windows support
### What language(s) does this bug affect?
- [ ] `language/r`: R APIs and clients
- [ ] `language/java`: Java APIs and clients
- [ ] `language/new`: Proposals for new client languages
### What integration(s) does this bug affect?
- [ ] `integrations/azure`: Azure and Azure ML integrations
- [ ] `integrations/sagemaker`: SageMaker integrations
- [ ] `integrations/databricks`: Databricks integrations
</issue>
<code>
[start of mlflow/utils/git_utils.py]
1 import logging
2 import os
3 from typing import Optional
4
5 _logger = logging.getLogger(__name__)
6
7
8 def get_git_repo_url(path: str) -> Optional[str]:
9 """
10 Obtains the url of the git repository associated with the specified path,
11 returning ``None`` if the path does not correspond to a git repository.
12 """
13 try:
14 from git import Repo
15 except ImportError as e:
16 _logger.warning(
17 "Failed to import Git (the Git executable is probably not on your PATH),"
18 " so Git SHA is not available. Error: %s",
19 e,
20 )
21 return None
22
23 try:
24 repo = Repo(path, search_parent_directories=True)
25 return next((remote.url for remote in repo.remotes), None)
26 except Exception:
27 return None
28
29
30 def get_git_commit(path: str) -> Optional[str]:
31 """
32 Obtains the hash of the latest commit on the current branch of the git repository associated
33 with the specified path, returning ``None`` if the path does not correspond to a git
34 repository.
35 """
36 try:
37 from git import Repo
38 except ImportError as e:
39 _logger.warning(
40 "Failed to import Git (the Git executable is probably not on your PATH),"
41 " so Git SHA is not available. Error: %s",
42 e,
43 )
44 return None
45 try:
46 if os.path.isfile(path):
47 path = os.path.dirname(path)
48 repo = Repo(path, search_parent_directories=True)
49 if path in repo.ignored(path):
50 return None
51 return repo.head.commit.hexsha
52 except Exception:
53 return None
54
55
56 def get_git_branch(path: str) -> Optional[str]:
57 """
58 Obtains the name of the current branch of the git repository associated with the specified
59 path, returning ``None`` if the path does not correspond to a git repository.
60 """
61 try:
62 from git import Repo
63 except ImportError as e:
64 _logger.warning(
65 "Failed to import Git (the Git executable is probably not on your PATH),"
66 " so Git SHA is not available. Error: %s",
67 e,
68 )
69 return None
70
71 try:
72 if os.path.isfile(path):
73 path = os.path.dirname(path)
74 repo = Repo(path, search_parent_directories=True)
75 return repo.active_branch.name
76 except Exception:
77 return None
78
[end of mlflow/utils/git_utils.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mlflow/utils/git_utils.py b/mlflow/utils/git_utils.py
--- a/mlflow/utils/git_utils.py
+++ b/mlflow/utils/git_utils.py
@@ -44,7 +44,7 @@
return None
try:
if os.path.isfile(path):
- path = os.path.dirname(path)
+ path = os.path.dirname(os.path.abspath(path))
repo = Repo(path, search_parent_directories=True)
if path in repo.ignored(path):
return None
| {"golden_diff": "diff --git a/mlflow/utils/git_utils.py b/mlflow/utils/git_utils.py\n--- a/mlflow/utils/git_utils.py\n+++ b/mlflow/utils/git_utils.py\n@@ -44,7 +44,7 @@\n return None\n try:\n if os.path.isfile(path):\n- path = os.path.dirname(path)\n+ path = os.path.dirname(os.path.abspath(path))\n repo = Repo(path, search_parent_directories=True)\n if path in repo.ignored(path):\n return None\n", "issue": "[BUG] Git version not logged when executable in current path.\n### Issues Policy acknowledgement\r\n\r\n- [X] I have read and agree to submit bug reports in accordance with the [issues policy](https://www.github.com/mlflow/mlflow/blob/master/ISSUE_POLICY.md)\r\n\r\n### Where did you encounter this bug?\r\n\r\nLocal machine\r\n\r\n### Willingness to contribute\r\n\r\nYes. I can contribute a fix for this bug independently.\r\n\r\n### MLflow version\r\n\r\n- Client: 2.8.0\r\n\r\n### System information\r\n\r\n- **OS Platform and Distribution (e.g., Linux Ubuntu 16.04)**: Ubuntu 20.04.2 LTS\r\n- **Python version**: 3.11.6\r\n\r\n\r\n### Describe the problem\r\n\r\nThe tag ``mlflow.source.git.commit`` is not populated if running a python file that's in the current path.\r\nImagine a file ``mypath/myfile.py`` with this content:\r\n```python\r\nfrom mlflow.tracking.context.git_context import _get_source_version\r\nprint(_get_source_version())\r\n```\r\nIf you run from within ``mypath`` via ``python myfile.py`` this will not produce a version,\r\nbut if you run from the parent folder via ``python mypath/myfile.py`` it will produce the correct version.\r\n\r\nThat is caused by [`get_git_commit`](https://github.com/mlflow/mlflow/blob/73fbc7d857b3f04b823b3fcbc6723efc41070288/mlflow/utils/git_utils.py#L47C1-L49C39)\r\nusing the path of the ``argv[1]``, and then passing this to ``Repo``. If the path is the current directory, ``os.path.dirname(path)`` will be the empty string, which will make ``Repo`` raise \r\n```\r\ngit.exc.GitCommandError: Cmd('git') failed due to: exit code(128)\r\n cmdline: git check-ignore\r\n stderr: 'fatal: empty string is not a valid pathspec. please use . instead if you meant to match all paths\r\n```\r\nwhich will be caught, and then an empty string will be returned.\r\n\r\nA work-around could be as easy as replacing empty string by ``'.'``, or maybe better using ``os.path.dirname(os.path.abspath(path))``?\r\n\r\n\r\n\r\n\r\n### What component(s) does this bug affect?\r\n\r\n- [ ] `area/artifacts`: Artifact stores and artifact logging\r\n- [ ] `area/build`: Build and test infrastructure for MLflow\r\n- [ ] `area/deployments`: MLflow Deployments client APIs, server, and third-party Deployments integrations\r\n- [ ] `area/docs`: MLflow documentation pages\r\n- [ ] `area/examples`: Example code\r\n- [ ] `area/model-registry`: Model Registry service, APIs, and the fluent client calls for Model Registry\r\n- [ ] `area/models`: MLmodel format, model serialization/deserialization, flavors\r\n- [ ] `area/recipes`: Recipes, Recipe APIs, Recipe configs, Recipe Templates\r\n- [ ] `area/projects`: MLproject format, project running backends\r\n- [ ] `area/scoring`: MLflow Model server, model deployment tools, Spark UDFs\r\n- [ ] `area/server-infra`: MLflow Tracking server backend\r\n- [X] `area/tracking`: Tracking Service, tracking client APIs, autologging\r\n\r\n### What interface(s) does this bug affect?\r\n\r\n- [ ] `area/uiux`: Front-end, user experience, plotting, JavaScript, JavaScript dev server\r\n- [ ] `area/docker`: Docker use across MLflow's components, such as MLflow Projects and MLflow Models\r\n- [ ] `area/sqlalchemy`: Use of SQLAlchemy in the Tracking Service or Model Registry\r\n- [ ] `area/windows`: Windows support\r\n\r\n### What language(s) does this bug affect?\r\n\r\n- [ ] `language/r`: R APIs and clients\r\n- [ ] `language/java`: Java APIs and clients\r\n- [ ] `language/new`: Proposals for new client languages\r\n\r\n### What integration(s) does this bug affect?\r\n\r\n- [ ] `integrations/azure`: Azure and Azure ML integrations\r\n- [ ] `integrations/sagemaker`: SageMaker integrations\r\n- [ ] `integrations/databricks`: Databricks integrations\n", "before_files": [{"content": "import logging\nimport os\nfrom typing import Optional\n\n_logger = logging.getLogger(__name__)\n\n\ndef get_git_repo_url(path: str) -> Optional[str]:\n \"\"\"\n Obtains the url of the git repository associated with the specified path,\n returning ``None`` if the path does not correspond to a git repository.\n \"\"\"\n try:\n from git import Repo\n except ImportError as e:\n _logger.warning(\n \"Failed to import Git (the Git executable is probably not on your PATH),\"\n \" so Git SHA is not available. Error: %s\",\n e,\n )\n return None\n\n try:\n repo = Repo(path, search_parent_directories=True)\n return next((remote.url for remote in repo.remotes), None)\n except Exception:\n return None\n\n\ndef get_git_commit(path: str) -> Optional[str]:\n \"\"\"\n Obtains the hash of the latest commit on the current branch of the git repository associated\n with the specified path, returning ``None`` if the path does not correspond to a git\n repository.\n \"\"\"\n try:\n from git import Repo\n except ImportError as e:\n _logger.warning(\n \"Failed to import Git (the Git executable is probably not on your PATH),\"\n \" so Git SHA is not available. Error: %s\",\n e,\n )\n return None\n try:\n if os.path.isfile(path):\n path = os.path.dirname(path)\n repo = Repo(path, search_parent_directories=True)\n if path in repo.ignored(path):\n return None\n return repo.head.commit.hexsha\n except Exception:\n return None\n\n\ndef get_git_branch(path: str) -> Optional[str]:\n \"\"\"\n Obtains the name of the current branch of the git repository associated with the specified\n path, returning ``None`` if the path does not correspond to a git repository.\n \"\"\"\n try:\n from git import Repo\n except ImportError as e:\n _logger.warning(\n \"Failed to import Git (the Git executable is probably not on your PATH),\"\n \" so Git SHA is not available. Error: %s\",\n e,\n )\n return None\n\n try:\n if os.path.isfile(path):\n path = os.path.dirname(path)\n repo = Repo(path, search_parent_directories=True)\n return repo.active_branch.name\n except Exception:\n return None\n", "path": "mlflow/utils/git_utils.py"}]} | 2,106 | 108 |
gh_patches_debug_33992 | rasdani/github-patches | git_diff | ray-project__ray-10122 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[rllib] Off Policy Estimation breaks with GPU on PyTorch (MARWIL) (Offline API)
### What is the problem?
When I use MARWIL with PyTorch and num_gpus: 1, I get an error when computing metrics. This happens because in off policy estimation it uses the torch tensors on gpu instead of numpy arrays. Particularly, I use "input_evaluation": ["is", "wis"] and the error goes away when "input_evaluation": ["simulation"]
```
Traceback (most recent call last):
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\tune\trial_runner.py", line 497, in _process_trial
result = self.trial_executor.fetch_result(trial)
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\tune\ray_trial_executor.py", line 434, in fetch_result
result = ray.get(trial_future[0], DEFAULT_GET_TIMEOUT)
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\worker.py", line 1553, in get
raise value.as_instanceof_cause()
ray.exceptions.RayTaskError(AttributeError): ray::MARWIL.train() (pid=9136, ip=10.0.0.18)
File "python\ray\_raylet.pyx", line 474, in ray._raylet.execute_task
File "python\ray\_raylet.pyx", line 427, in ray._raylet.execute_task.function_executor
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\function_manager.py", line 567, in actor_method_executor
raise e
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\function_manager.py", line 559, in actor_method_executor
method_returns = method(actor, *args, **kwargs)
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\rllib\agents\trainer.py", line 522, in train
raise e
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\rllib\agents\trainer.py", line 508, in train
result = Trainable.train(self)
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\tune\trainable.py", line 337, in train
result = self.step()
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\rllib\agents\trainer_template.py", line 110, in step
res = next(self.train_exec_impl)
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\util\iter.py", line 758, in __next__
return next(self.built_iterator)
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\util\iter.py", line 793, in apply_foreach
result = fn(item)
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\rllib\execution\metric_ops.py", line 87, in __call__
res = summarize_episodes(episodes, orig_episodes)
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\ray\rllib\evaluation\metrics.py", line 173, in summarize_episodes
metrics[k] = np.mean(v_list)
File "<__array_function__ internals>", line 6, in mean
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\numpy\core\fromnumeric.py", line 3335, in mean
out=out, **kwargs)
File "C:\Users\Julius\Anaconda3\envs\ray\lib\site-packages\numpy\core\_methods.py", line 161, in _mean
ret = ret.dtype.type(ret / rcount)
AttributeError: 'torch.dtype' object has no attribute 'type'
```
### Reproduction (REQUIRED)
Please provide a script that can be run to reproduce the issue. The script should have **no external library dependencies** (i.e., use fake or mock data / environments):
If we cannot run your script, we cannot fix your issue.
- [ ] I have verified my script runs in a clean environment and reproduces the issue.
- [ ] I have verified the issue also occurs with the [latest wheels](https://docs.ray.io/en/latest/installation.html).
</issue>
<code>
[start of rllib/offline/off_policy_estimator.py]
1 from collections import namedtuple
2 import logging
3
4 from ray.rllib.policy.sample_batch import MultiAgentBatch, SampleBatch
5 from ray.rllib.policy import Policy
6 from ray.rllib.utils.annotations import DeveloperAPI
7 from ray.rllib.offline.io_context import IOContext
8 from ray.rllib.utils.typing import TensorType, SampleBatchType
9 from typing import List
10
11 logger = logging.getLogger(__name__)
12
13 OffPolicyEstimate = namedtuple("OffPolicyEstimate",
14 ["estimator_name", "metrics"])
15
16
17 @DeveloperAPI
18 class OffPolicyEstimator:
19 """Interface for an off policy reward estimator."""
20
21 @DeveloperAPI
22 def __init__(self, policy: Policy, gamma: float):
23 """Creates an off-policy estimator.
24
25 Arguments:
26 policy (Policy): Policy to evaluate.
27 gamma (float): Discount of the MDP.
28 """
29 self.policy = policy
30 self.gamma = gamma
31 self.new_estimates = []
32
33 @classmethod
34 def create(cls, ioctx: IOContext) -> "OffPolicyEstimator":
35 """Create an off-policy estimator from a IOContext."""
36 gamma = ioctx.worker.policy_config["gamma"]
37 # Grab a reference to the current model
38 keys = list(ioctx.worker.policy_map.keys())
39 if len(keys) > 1:
40 raise NotImplementedError(
41 "Off-policy estimation is not implemented for multi-agent. "
42 "You can set `input_evaluation: []` to resolve this.")
43 policy = ioctx.worker.get_policy(keys[0])
44 return cls(policy, gamma)
45
46 @DeveloperAPI
47 def estimate(self, batch: SampleBatchType):
48 """Returns an estimate for the given batch of experiences.
49
50 The batch will only contain data from one episode, but it may only be
51 a fragment of an episode.
52 """
53 raise NotImplementedError
54
55 @DeveloperAPI
56 def action_prob(self, batch: SampleBatchType) -> TensorType:
57 """Returns the probs for the batch actions for the current policy."""
58
59 num_state_inputs = 0
60 for k in batch.keys():
61 if k.startswith("state_in_"):
62 num_state_inputs += 1
63 state_keys = ["state_in_{}".format(i) for i in range(num_state_inputs)]
64 log_likelihoods = self.policy.compute_log_likelihoods(
65 actions=batch[SampleBatch.ACTIONS],
66 obs_batch=batch[SampleBatch.CUR_OBS],
67 state_batches=[batch[k] for k in state_keys],
68 prev_action_batch=batch.data.get(SampleBatch.PREV_ACTIONS),
69 prev_reward_batch=batch.data.get(SampleBatch.PREV_REWARDS))
70 return log_likelihoods
71
72 @DeveloperAPI
73 def process(self, batch: SampleBatchType):
74 self.new_estimates.append(self.estimate(batch))
75
76 @DeveloperAPI
77 def check_can_estimate_for(self, batch: SampleBatchType):
78 """Returns whether we can support OPE for this batch."""
79
80 if isinstance(batch, MultiAgentBatch):
81 raise ValueError(
82 "IS-estimation is not implemented for multi-agent batches. "
83 "You can set `input_evaluation: []` to resolve this.")
84
85 if "action_prob" not in batch:
86 raise ValueError(
87 "Off-policy estimation is not possible unless the inputs "
88 "include action probabilities (i.e., the policy is stochastic "
89 "and emits the 'action_prob' key). For DQN this means using "
90 "`exploration_config: {type: 'SoftQ'}`. You can also set "
91 "`input_evaluation: []` to disable estimation.")
92
93 @DeveloperAPI
94 def get_metrics(self) -> List[OffPolicyEstimate]:
95 """Return a list of new episode metric estimates since the last call.
96
97 Returns:
98 list of OffPolicyEstimate objects.
99 """
100 out = self.new_estimates
101 self.new_estimates = []
102 return out
103
[end of rllib/offline/off_policy_estimator.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/rllib/offline/off_policy_estimator.py b/rllib/offline/off_policy_estimator.py
--- a/rllib/offline/off_policy_estimator.py
+++ b/rllib/offline/off_policy_estimator.py
@@ -1,10 +1,13 @@
from collections import namedtuple
import logging
+import numpy as np
+
from ray.rllib.policy.sample_batch import MultiAgentBatch, SampleBatch
from ray.rllib.policy import Policy
from ray.rllib.utils.annotations import DeveloperAPI
from ray.rllib.offline.io_context import IOContext
+from ray.rllib.utils.numpy import convert_to_numpy
from ray.rllib.utils.typing import TensorType, SampleBatchType
from typing import List
@@ -53,7 +56,7 @@
raise NotImplementedError
@DeveloperAPI
- def action_prob(self, batch: SampleBatchType) -> TensorType:
+ def action_prob(self, batch: SampleBatchType) -> np.ndarray:
"""Returns the probs for the batch actions for the current policy."""
num_state_inputs = 0
@@ -61,13 +64,13 @@
if k.startswith("state_in_"):
num_state_inputs += 1
state_keys = ["state_in_{}".format(i) for i in range(num_state_inputs)]
- log_likelihoods = self.policy.compute_log_likelihoods(
+ log_likelihoods: TensorType = self.policy.compute_log_likelihoods(
actions=batch[SampleBatch.ACTIONS],
obs_batch=batch[SampleBatch.CUR_OBS],
state_batches=[batch[k] for k in state_keys],
prev_action_batch=batch.data.get(SampleBatch.PREV_ACTIONS),
prev_reward_batch=batch.data.get(SampleBatch.PREV_REWARDS))
- return log_likelihoods
+ return convert_to_numpy(log_likelihoods)
@DeveloperAPI
def process(self, batch: SampleBatchType):
| {"golden_diff": "diff --git a/rllib/offline/off_policy_estimator.py b/rllib/offline/off_policy_estimator.py\n--- a/rllib/offline/off_policy_estimator.py\n+++ b/rllib/offline/off_policy_estimator.py\n@@ -1,10 +1,13 @@\n from collections import namedtuple\n import logging\n \n+import numpy as np\n+\n from ray.rllib.policy.sample_batch import MultiAgentBatch, SampleBatch\n from ray.rllib.policy import Policy\n from ray.rllib.utils.annotations import DeveloperAPI\n from ray.rllib.offline.io_context import IOContext\n+from ray.rllib.utils.numpy import convert_to_numpy\n from ray.rllib.utils.typing import TensorType, SampleBatchType\n from typing import List\n \n@@ -53,7 +56,7 @@\n raise NotImplementedError\n \n @DeveloperAPI\n- def action_prob(self, batch: SampleBatchType) -> TensorType:\n+ def action_prob(self, batch: SampleBatchType) -> np.ndarray:\n \"\"\"Returns the probs for the batch actions for the current policy.\"\"\"\n \n num_state_inputs = 0\n@@ -61,13 +64,13 @@\n if k.startswith(\"state_in_\"):\n num_state_inputs += 1\n state_keys = [\"state_in_{}\".format(i) for i in range(num_state_inputs)]\n- log_likelihoods = self.policy.compute_log_likelihoods(\n+ log_likelihoods: TensorType = self.policy.compute_log_likelihoods(\n actions=batch[SampleBatch.ACTIONS],\n obs_batch=batch[SampleBatch.CUR_OBS],\n state_batches=[batch[k] for k in state_keys],\n prev_action_batch=batch.data.get(SampleBatch.PREV_ACTIONS),\n prev_reward_batch=batch.data.get(SampleBatch.PREV_REWARDS))\n- return log_likelihoods\n+ return convert_to_numpy(log_likelihoods)\n \n @DeveloperAPI\n def process(self, batch: SampleBatchType):\n", "issue": "[rllib] Off Policy Estimation breaks with GPU on PyTorch (MARWIL) (Offline API)\n### What is the problem?\r\n\r\nWhen I use MARWIL with PyTorch and num_gpus: 1, I get an error when computing metrics. This happens because in off policy estimation it uses the torch tensors on gpu instead of numpy arrays. Particularly, I use \"input_evaluation\": [\"is\", \"wis\"] and the error goes away when \"input_evaluation\": [\"simulation\"]\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\tune\\trial_runner.py\", line 497, in _process_trial\r\n result = self.trial_executor.fetch_result(trial)\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\tune\\ray_trial_executor.py\", line 434, in fetch_result\r\n result = ray.get(trial_future[0], DEFAULT_GET_TIMEOUT)\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\worker.py\", line 1553, in get\r\n raise value.as_instanceof_cause()\r\nray.exceptions.RayTaskError(AttributeError): ray::MARWIL.train() (pid=9136, ip=10.0.0.18)\r\n File \"python\\ray\\_raylet.pyx\", line 474, in ray._raylet.execute_task\r\n File \"python\\ray\\_raylet.pyx\", line 427, in ray._raylet.execute_task.function_executor\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\function_manager.py\", line 567, in actor_method_executor\r\n raise e\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\function_manager.py\", line 559, in actor_method_executor\r\n method_returns = method(actor, *args, **kwargs)\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\rllib\\agents\\trainer.py\", line 522, in train\r\n raise e\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\rllib\\agents\\trainer.py\", line 508, in train\r\n result = Trainable.train(self)\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\tune\\trainable.py\", line 337, in train\r\n result = self.step()\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\rllib\\agents\\trainer_template.py\", line 110, in step\r\n res = next(self.train_exec_impl)\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\util\\iter.py\", line 758, in __next__\r\n return next(self.built_iterator)\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\util\\iter.py\", line 793, in apply_foreach\r\n result = fn(item)\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\rllib\\execution\\metric_ops.py\", line 87, in __call__\r\n res = summarize_episodes(episodes, orig_episodes)\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\ray\\rllib\\evaluation\\metrics.py\", line 173, in summarize_episodes\r\n metrics[k] = np.mean(v_list)\r\n File \"<__array_function__ internals>\", line 6, in mean\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\numpy\\core\\fromnumeric.py\", line 3335, in mean\r\n out=out, **kwargs)\r\n File \"C:\\Users\\Julius\\Anaconda3\\envs\\ray\\lib\\site-packages\\numpy\\core\\_methods.py\", line 161, in _mean\r\n ret = ret.dtype.type(ret / rcount)\r\nAttributeError: 'torch.dtype' object has no attribute 'type'\r\n``` \r\n\r\n### Reproduction (REQUIRED)\r\nPlease provide a script that can be run to reproduce the issue. The script should have **no external library dependencies** (i.e., use fake or mock data / environments):\r\n\r\nIf we cannot run your script, we cannot fix your issue.\r\n\r\n- [ ] I have verified my script runs in a clean environment and reproduces the issue.\r\n- [ ] I have verified the issue also occurs with the [latest wheels](https://docs.ray.io/en/latest/installation.html).\r\n\n", "before_files": [{"content": "from collections import namedtuple\nimport logging\n\nfrom ray.rllib.policy.sample_batch import MultiAgentBatch, SampleBatch\nfrom ray.rllib.policy import Policy\nfrom ray.rllib.utils.annotations import DeveloperAPI\nfrom ray.rllib.offline.io_context import IOContext\nfrom ray.rllib.utils.typing import TensorType, SampleBatchType\nfrom typing import List\n\nlogger = logging.getLogger(__name__)\n\nOffPolicyEstimate = namedtuple(\"OffPolicyEstimate\",\n [\"estimator_name\", \"metrics\"])\n\n\n@DeveloperAPI\nclass OffPolicyEstimator:\n \"\"\"Interface for an off policy reward estimator.\"\"\"\n\n @DeveloperAPI\n def __init__(self, policy: Policy, gamma: float):\n \"\"\"Creates an off-policy estimator.\n\n Arguments:\n policy (Policy): Policy to evaluate.\n gamma (float): Discount of the MDP.\n \"\"\"\n self.policy = policy\n self.gamma = gamma\n self.new_estimates = []\n\n @classmethod\n def create(cls, ioctx: IOContext) -> \"OffPolicyEstimator\":\n \"\"\"Create an off-policy estimator from a IOContext.\"\"\"\n gamma = ioctx.worker.policy_config[\"gamma\"]\n # Grab a reference to the current model\n keys = list(ioctx.worker.policy_map.keys())\n if len(keys) > 1:\n raise NotImplementedError(\n \"Off-policy estimation is not implemented for multi-agent. \"\n \"You can set `input_evaluation: []` to resolve this.\")\n policy = ioctx.worker.get_policy(keys[0])\n return cls(policy, gamma)\n\n @DeveloperAPI\n def estimate(self, batch: SampleBatchType):\n \"\"\"Returns an estimate for the given batch of experiences.\n\n The batch will only contain data from one episode, but it may only be\n a fragment of an episode.\n \"\"\"\n raise NotImplementedError\n\n @DeveloperAPI\n def action_prob(self, batch: SampleBatchType) -> TensorType:\n \"\"\"Returns the probs for the batch actions for the current policy.\"\"\"\n\n num_state_inputs = 0\n for k in batch.keys():\n if k.startswith(\"state_in_\"):\n num_state_inputs += 1\n state_keys = [\"state_in_{}\".format(i) for i in range(num_state_inputs)]\n log_likelihoods = self.policy.compute_log_likelihoods(\n actions=batch[SampleBatch.ACTIONS],\n obs_batch=batch[SampleBatch.CUR_OBS],\n state_batches=[batch[k] for k in state_keys],\n prev_action_batch=batch.data.get(SampleBatch.PREV_ACTIONS),\n prev_reward_batch=batch.data.get(SampleBatch.PREV_REWARDS))\n return log_likelihoods\n\n @DeveloperAPI\n def process(self, batch: SampleBatchType):\n self.new_estimates.append(self.estimate(batch))\n\n @DeveloperAPI\n def check_can_estimate_for(self, batch: SampleBatchType):\n \"\"\"Returns whether we can support OPE for this batch.\"\"\"\n\n if isinstance(batch, MultiAgentBatch):\n raise ValueError(\n \"IS-estimation is not implemented for multi-agent batches. \"\n \"You can set `input_evaluation: []` to resolve this.\")\n\n if \"action_prob\" not in batch:\n raise ValueError(\n \"Off-policy estimation is not possible unless the inputs \"\n \"include action probabilities (i.e., the policy is stochastic \"\n \"and emits the 'action_prob' key). For DQN this means using \"\n \"`exploration_config: {type: 'SoftQ'}`. You can also set \"\n \"`input_evaluation: []` to disable estimation.\")\n\n @DeveloperAPI\n def get_metrics(self) -> List[OffPolicyEstimate]:\n \"\"\"Return a list of new episode metric estimates since the last call.\n\n Returns:\n list of OffPolicyEstimate objects.\n \"\"\"\n out = self.new_estimates\n self.new_estimates = []\n return out\n", "path": "rllib/offline/off_policy_estimator.py"}]} | 2,658 | 402 |
gh_patches_debug_25562 | rasdani/github-patches | git_diff | pydantic__pydantic-107 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Missing HISTORY.rst
pydantic installs fine from pip but via a dependency in setup.py it fails with a missing HISTORY.rst due to your long_description in setup.py. Basically, you need a MANIFEST.in that includes that file.
```
Processing pydantic-0.6.2.tar.gz
Writing /var/folders/4j/00jv8sg138n61hsj6ppf60pm0000gn/T/easy_install-o7rp3h7o/pydantic-0.6.2/setup.cfg
Running pydantic-0.6.2/setup.py -q bdist_egg --dist-dir /var/folders/4j/00jv8sg138n61hsj6ppf60pm0000gn/T/easy_install-o7rp3h7o/pydantic-0.6.2/egg-dist-tmp-7bd8a1a8
error: [Errno 2] No such file or directory: '/private/var/folders/4j/00jv8sg138n61hsj6ppf60pm0000gn/T/easy_install-o7rp3h7o/pydantic-0.6.2/HISTORY.rst'
```
</issue>
<code>
[start of setup.py]
1 from importlib.machinery import SourceFileLoader
2 from pathlib import Path
3 from setuptools import setup
4
5 THIS_DIR = Path(__file__).resolve().parent
6 long_description = (
7 THIS_DIR.joinpath('README.rst').read_text() +
8 '\n\n' +
9 THIS_DIR.joinpath('HISTORY.rst').read_text()
10 )
11
12 # avoid loading the package before requirements are installed:
13 version = SourceFileLoader('version', 'pydantic/version.py').load_module()
14
15 setup(
16 name='pydantic',
17 version=str(version.VERSION),
18 description='Data validation and settings management using python 3.6 type hinting',
19 long_description=long_description,
20 classifiers=[
21 'Development Status :: 4 - Beta',
22 'Environment :: Console',
23 'Programming Language :: Python',
24 'Programming Language :: Python :: 3',
25 'Programming Language :: Python :: 3 :: Only',
26 'Programming Language :: Python :: 3.6',
27 'Intended Audience :: Developers',
28 'Intended Audience :: Information Technology',
29 'Intended Audience :: System Administrators',
30 'License :: OSI Approved :: MIT License',
31 'Operating System :: Unix',
32 'Operating System :: POSIX :: Linux',
33 'Environment :: MacOS X',
34 'Topic :: Software Development :: Libraries :: Python Modules',
35 'Topic :: Internet',
36 ],
37 author='Samuel Colvin',
38 author_email='[email protected]',
39 url='https://github.com/samuelcolvin/pydantic',
40 license='MIT',
41 packages=['pydantic'],
42 python_requires='>=3.6',
43 zip_safe=True,
44 extras_require={
45 'msgpack': ['msgpack-python>=0.4.8'],
46 'ujson': ['ujson>=1.35'],
47 }
48 )
49
[end of setup.py]
[start of pydantic/version.py]
1 from distutils.version import StrictVersion
2
3 __all__ = ['VERSION']
4
5 VERSION = StrictVersion('0.6.2')
6
[end of pydantic/version.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pydantic/version.py b/pydantic/version.py
--- a/pydantic/version.py
+++ b/pydantic/version.py
@@ -2,4 +2,4 @@
__all__ = ['VERSION']
-VERSION = StrictVersion('0.6.2')
+VERSION = StrictVersion('0.6.3')
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -2,12 +2,15 @@
from pathlib import Path
from setuptools import setup
+description = 'Data validation and settings management using python 3.6 type hinting'
THIS_DIR = Path(__file__).resolve().parent
-long_description = (
- THIS_DIR.joinpath('README.rst').read_text() +
- '\n\n' +
- THIS_DIR.joinpath('HISTORY.rst').read_text()
-)
+try:
+ long_description = '\n\n'.join([
+ THIS_DIR.joinpath('README.rst').read_text(),
+ THIS_DIR.joinpath('HISTORY.rst').read_text()
+ ])
+except FileNotFoundError:
+ long_description = description + '.\n\nSee https://pydantic-docs.helpmanual.io/ for documentation.'
# avoid loading the package before requirements are installed:
version = SourceFileLoader('version', 'pydantic/version.py').load_module()
@@ -15,7 +18,7 @@
setup(
name='pydantic',
version=str(version.VERSION),
- description='Data validation and settings management using python 3.6 type hinting',
+ description=description,
long_description=long_description,
classifiers=[
'Development Status :: 4 - Beta',
| {"golden_diff": "diff --git a/pydantic/version.py b/pydantic/version.py\n--- a/pydantic/version.py\n+++ b/pydantic/version.py\n@@ -2,4 +2,4 @@\n \n __all__ = ['VERSION']\n \n-VERSION = StrictVersion('0.6.2')\n+VERSION = StrictVersion('0.6.3')\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -2,12 +2,15 @@\n from pathlib import Path\n from setuptools import setup\n \n+description = 'Data validation and settings management using python 3.6 type hinting'\n THIS_DIR = Path(__file__).resolve().parent\n-long_description = (\n- THIS_DIR.joinpath('README.rst').read_text() +\n- '\\n\\n' +\n- THIS_DIR.joinpath('HISTORY.rst').read_text()\n-)\n+try:\n+ long_description = '\\n\\n'.join([\n+ THIS_DIR.joinpath('README.rst').read_text(),\n+ THIS_DIR.joinpath('HISTORY.rst').read_text()\n+ ])\n+except FileNotFoundError:\n+ long_description = description + '.\\n\\nSee https://pydantic-docs.helpmanual.io/ for documentation.'\n \n # avoid loading the package before requirements are installed:\n version = SourceFileLoader('version', 'pydantic/version.py').load_module()\n@@ -15,7 +18,7 @@\n setup(\n name='pydantic',\n version=str(version.VERSION),\n- description='Data validation and settings management using python 3.6 type hinting',\n+ description=description,\n long_description=long_description,\n classifiers=[\n 'Development Status :: 4 - Beta',\n", "issue": "Missing HISTORY.rst\npydantic installs fine from pip but via a dependency in setup.py it fails with a missing HISTORY.rst due to your long_description in setup.py. Basically, you need a MANIFEST.in that includes that file.\r\n\r\n```\r\nProcessing pydantic-0.6.2.tar.gz\r\nWriting /var/folders/4j/00jv8sg138n61hsj6ppf60pm0000gn/T/easy_install-o7rp3h7o/pydantic-0.6.2/setup.cfg\r\nRunning pydantic-0.6.2/setup.py -q bdist_egg --dist-dir /var/folders/4j/00jv8sg138n61hsj6ppf60pm0000gn/T/easy_install-o7rp3h7o/pydantic-0.6.2/egg-dist-tmp-7bd8a1a8\r\nerror: [Errno 2] No such file or directory: '/private/var/folders/4j/00jv8sg138n61hsj6ppf60pm0000gn/T/easy_install-o7rp3h7o/pydantic-0.6.2/HISTORY.rst'\r\n```\n", "before_files": [{"content": "from importlib.machinery import SourceFileLoader\nfrom pathlib import Path\nfrom setuptools import setup\n\nTHIS_DIR = Path(__file__).resolve().parent\nlong_description = (\n THIS_DIR.joinpath('README.rst').read_text() +\n '\\n\\n' +\n THIS_DIR.joinpath('HISTORY.rst').read_text()\n)\n\n# avoid loading the package before requirements are installed:\nversion = SourceFileLoader('version', 'pydantic/version.py').load_module()\n\nsetup(\n name='pydantic',\n version=str(version.VERSION),\n description='Data validation and settings management using python 3.6 type hinting',\n long_description=long_description,\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Environment :: Console',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3 :: Only',\n 'Programming Language :: Python :: 3.6',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Information Technology',\n 'Intended Audience :: System Administrators',\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: Unix',\n 'Operating System :: POSIX :: Linux',\n 'Environment :: MacOS X',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Internet',\n ],\n author='Samuel Colvin',\n author_email='[email protected]',\n url='https://github.com/samuelcolvin/pydantic',\n license='MIT',\n packages=['pydantic'],\n python_requires='>=3.6',\n zip_safe=True,\n extras_require={\n 'msgpack': ['msgpack-python>=0.4.8'],\n 'ujson': ['ujson>=1.35'],\n }\n)\n", "path": "setup.py"}, {"content": "from distutils.version import StrictVersion\n\n__all__ = ['VERSION']\n\nVERSION = StrictVersion('0.6.2')\n", "path": "pydantic/version.py"}]} | 1,332 | 367 |
gh_patches_debug_4505 | rasdani/github-patches | git_diff | LMFDB__lmfdb-1309 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Can't add new accounts
We can't assign new accounts (for editing knowls) because that has to be done on beta, not www.
I think this just involves changing line 28 of lmfdb/users/main.py
from
base_url = "http://www.l-functions.org"
to
base_url = "http://beta.lmfdb.org"
But I didn't want to do that in case it has a bad side-effect.
</issue>
<code>
[start of lmfdb/users/main.py]
1 # -*- encoding: utf-8 -*-
2 # this holds all the flask-login specific logic (+ url mapping an rendering templates)
3 # for the user management
4 # author: harald schilly <[email protected]>
5
6 import pymongo
7 ASC = pymongo.ASCENDING
8 import flask
9 from functools import wraps
10 from lmfdb.base import app, getDBConnection
11 from flask import render_template, request, abort, Blueprint, url_for, make_response
12 from flask.ext.login import login_required, login_user, current_user, logout_user
13
14 login_page = Blueprint("users", __name__, template_folder='templates')
15 import lmfdb.utils
16 logger = lmfdb.utils.make_logger(login_page)
17
18 import re
19 allowed_usernames = re.compile("^[a-zA-Z0-9._-]+$")
20
21 from flask.ext.login import LoginManager
22 login_manager = LoginManager()
23
24 import pwdmanager
25 from pwdmanager import LmfdbUser, LmfdbAnonymousUser
26
27 # TODO update this url, needed for the user login token
28 base_url = "http://www.l-functions.org"
29 # TODO: Not sure this should be changed from l-functions -> lmfdb, because
30 # I don't really understand how this is used. Paul
31
32
33 @login_manager.user_loader
34 def load_user(userid):
35 from pwdmanager import LmfdbUser
36 return LmfdbUser(userid)
37
38 login_manager.login_view = "users.info"
39
40 # this anonymous user has the is_admin() method
41 login_manager.anonymous_user = LmfdbAnonymousUser
42
43
44 def get_username(uid):
45 """returns the name of user @uid"""
46 return LmfdbUser(uid).name
47
48 # globally define user properties and username
49
50
51 @app.context_processor
52 def ctx_proc_userdata():
53 userdata = {}
54 userdata['userid'] = 'anon' if current_user.is_anonymous() else current_user._uid
55 userdata['username'] = 'Anonymous' if current_user.is_anonymous() else current_user.name
56 userdata['user_is_authenticated'] = current_user.is_authenticated()
57 userdata['user_is_admin'] = current_user.is_admin()
58 userdata['get_username'] = get_username # this is a function
59 return userdata
60
61 # blueprint specific definition of the body_class variable
62
63
64 @login_page.context_processor
65 def body_class():
66 return {'body_class': 'login'}
67
68 # the following doesn't work as it should, also depends on blinker python lib
69 # flask signal when a user logs in. we record the last logins in the user's data
70 # http://flask.pocoo.org/docs/signals/
71 # def log_login_callback(cur_app, user = None):
72 # cur_user = user or current_user
73 # logger.info(">> curr_app: %s user: %s" % (cur_app, cur_user))
74 #
75 # from flask.ext.login import user_logged_in, user_login_confirmed
76 # user_logged_in.connect(log_login_callback)
77 # user_login_confirmed.connect(log_login_callback)
78
79
80 def base_bread():
81 return [('Users', url_for(".list"))]
82
83
84 @login_page.route("/")
85 @login_required
86 def list():
87 import pwdmanager
88 users = pwdmanager.get_user_list()
89 # trying to be smart and sorting by last name
90 users = sorted(users, key=lambda x: x[1].split(" ")[-1].lower())
91 bread = base_bread()
92 return render_template("user-list.html", title="All Users",
93 users=users, bread=bread)
94
95
96 @login_page.route("/myself")
97 def info():
98 info = {}
99 info['login'] = url_for(".login")
100 info['logout'] = url_for(".logout")
101 info['user'] = current_user
102 info['next'] = request.referrer
103 return render_template("user-info.html",
104 info=info, title="Userinfo",
105 bread=base_bread() + [("Myself", url_for(".info"))])
106
107 # ./info again, but for POST!
108
109
110 @login_page.route("/info", methods=['POST'])
111 @login_required
112 def set_info():
113 for k, v in request.form.iteritems():
114 setattr(current_user, k, v)
115 current_user.save()
116 flask.flash("Thank you for updating your details!")
117 return flask.redirect(url_for(".info"))
118
119
120 @login_page.route("/profile/<userid>")
121 @login_required
122 def profile(userid):
123 # See issue #1169
124 #try:
125 # getDBConnection().knowledge.knowls.ensure_index('title')
126 #except pymongo.errors.OperationFailure:
127 # pass
128 user = LmfdbUser(userid)
129 bread = base_bread() + [(user.name, url_for('.profile', userid=user.get_id()))]
130 userknowls = getDBConnection(
131 ).knowledge.knowls.find({'authors': userid}, ['title']).sort([('title', ASC)])
132 userfiles = getDBConnection(
133 ).upload.fs.files.find({'metadata.uploader_id': userid, 'metadata.status': 'approved'})
134 userfilesmod = getDBConnection(
135 ).upload.fs.files.find({'metadata.uploader_id': userid, 'metadata.status': 'unmoderated'})
136 return render_template("user-detail.html", user=user,
137 title="%s" % user.name, bread=bread, userknowls=userknowls, userfiles=userfiles, userfilesmod=userfilesmod)
138
139
140 @login_page.route("/login", methods=["POST"])
141 def login(**kwargs):
142 bread = base_bread() + [('Login', url_for('.login'))]
143 # login and validate the user …
144 # remember = True sets a cookie to remmeber the user
145 name = request.form["name"]
146 password = request.form["password"]
147 next = request.form["next"]
148 remember = True if request.form["remember"] == "on" else False
149 user = LmfdbUser(name)
150 if user and user.authenticate(password):
151 login_user(user, remember=remember)
152 flask.flash("Hello %s, your login was successful!" % user.name)
153 logger.info("login: '%s' - '%s'" % (user.get_id(), user.name))
154 return flask.redirect(next or url_for(".info"))
155 flask.flash("Oops! Wrong username or password.", "error")
156 return flask.redirect(url_for(".info"))
157
158
159 def admin_required(fn):
160 """
161 wrap this around those entry points where you need to be an admin.
162 """
163 @wraps(fn)
164 @login_required
165 def decorated_view(*args, **kwargs):
166 logger.info("admin access attempt by %s" % current_user.get_id())
167 if not current_user.is_admin():
168 return flask.abort(403) # access denied
169 return fn(*args, **kwargs)
170 return decorated_view
171
172
173 def housekeeping(fn):
174 """
175 wrap this around maintenance calls, they are only accessible for
176 admins and for localhost
177 """
178 @wraps(fn)
179 def decorated_view(*args, **kwargs):
180 logger.info("housekeeping access attempt by %s" % request.remote_addr)
181 if request.remote_addr in ["127.0.0.1", "localhost"]:
182 return fn(*args, **kwargs)
183 return admin_required(fn)(*args, **kwargs)
184 return decorated_view
185
186
187 def get_user_token_coll():
188 return getDBConnection().userdb.tokens
189
190
191 @login_page.route("/register")
192 def register_new():
193 return ""
194 # q_admins = getDBConnection().userdb.users.find({'admin' : True})
195 # admins =', '.join((_['full_name'] or _['_id'] for _ in q_admins))
196 # return "You have to contact one of the Admins: %s" % admins
197
198
199 @login_page.route("/register/new")
200 @login_page.route("/register/new/<int:N>")
201 @admin_required
202 def register(N=10):
203 N = 100 if N > 100 else N
204 from datetime import datetime, timedelta
205 now = datetime.utcnow()
206 tdelta = timedelta(days=1)
207 exp = now + tdelta
208 import random
209 tokens = [str(random.randrange(1e20, 1e21)) for _ in range(N)]
210 for t in tokens:
211 get_user_token_coll().save({'_id': t, 'expire': exp})
212 urls = ["%s%s" % (base_url, url_for(".register_token", token=_)) for _ in tokens]
213 resp = make_response('\n'.join(urls))
214 resp.headers['Content-type'] = 'text/plain'
215 return resp
216
217
218 def delete_old_tokens():
219 from datetime import datetime, timedelta
220 now = datetime.utcnow()
221 tdelta = timedelta(days=8)
222 exp = now + tdelta
223 get_user_token_coll().remove({'expire': {'$gt': exp}})
224
225
226 @login_page.route("/register/<token>", methods=['GET', 'POST'])
227 def register_token(token):
228 delete_old_tokens()
229 token_exists = get_user_token_coll().find({'_id': token}).count() == 1
230 if not token_exists:
231 flask.abort(401)
232 bread = base_bread() + [('Register', url_for(".register_new"))]
233 if request.method == "GET":
234 return render_template("register.html", title="Register", bread=bread, next=request.referrer or "/", token=token)
235 elif request.method == 'POST':
236 name = request.form['name']
237 if not allowed_usernames.match(name):
238 flask.flash("""Oops, usename '%s' is not allowed.
239 It must consist of lower/uppercase characters,
240 no spaces, numbers or '.', '_' and '-'.""" % name, "error")
241 return flask.redirect(url_for(".register_new"))
242
243 pw1 = request.form['password1']
244 pw2 = request.form['password2']
245 if pw1 != pw2:
246 flask.flash("Oops, passwords do not match!", "error")
247 return flask.redirect(url_for(".register_new"))
248
249 if len(pw1) <= 3:
250 flask.flash("Oops, password too short. Minimum 4 characters please!", "error")
251 return flask.redirect(url_for(".register_new"))
252
253 full_name = request.form['full_name']
254 email = request.form['email']
255 next = request.form["next"]
256
257 if pwdmanager.user_exists(name):
258 flask.flash("Sorry, user ID '%s' already exists!" % name, "error")
259 return flask.redirect(url_for(".register_new"))
260
261 newuser = pwdmanager.new_user(name, pw1)
262 newuser.full_name = full_name
263 newuser.email = email
264 newuser.save()
265 login_user(newuser, remember=True)
266 flask.flash("Hello %s! Congratulations, you are a new user!" % newuser.name)
267 get_user_token_coll().remove({'_id': token})
268 logger.debug("removed login token '%s'" % token)
269 logger.info("new user: '%s' - '%s'" % (newuser.get_id(), newuser.name))
270 return flask.redirect(next or url_for(".info"))
271
272
273 @login_page.route("/change_password", methods=['POST'])
274 @login_required
275 def change_password():
276 uid = current_user.get_id()
277 pw_old = request.form['oldpwd']
278 if not current_user.authenticate(pw_old):
279 flask.flash("Ooops, old password is wrong!", "error")
280 return flask.redirect(url_for(".info"))
281
282 pw1 = request.form['password1']
283 pw2 = request.form['password2']
284 if pw1 != pw2:
285 flask.flash("Oops, new passwords do not match!", "error")
286 return flask.redirect(url_for(".info"))
287
288 pwdmanager.change_password(uid, pw1)
289 flask.flash("Your password has been changed.")
290 return flask.redirect(url_for(".info"))
291
292
293 @login_page.route("/logout")
294 @login_required
295 def logout():
296 bread = base_bread() + [('Login', url_for('.logout'))]
297 logout_user()
298 flask.flash("You are logged out now. Have a nice day!")
299 return flask.redirect(request.args.get("next") or request.referrer or url_for('.info'))
300
301
302 @login_page.route("/admin")
303 @login_required
304 @admin_required
305 def admin():
306 return "success: only admins can read this!"
307
[end of lmfdb/users/main.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/lmfdb/users/main.py b/lmfdb/users/main.py
--- a/lmfdb/users/main.py
+++ b/lmfdb/users/main.py
@@ -24,11 +24,7 @@
import pwdmanager
from pwdmanager import LmfdbUser, LmfdbAnonymousUser
-# TODO update this url, needed for the user login token
-base_url = "http://www.l-functions.org"
-# TODO: Not sure this should be changed from l-functions -> lmfdb, because
-# I don't really understand how this is used. Paul
-
+base_url = "http://beta.lmfdb.org"
@login_manager.user_loader
def load_user(userid):
| {"golden_diff": "diff --git a/lmfdb/users/main.py b/lmfdb/users/main.py\n--- a/lmfdb/users/main.py\n+++ b/lmfdb/users/main.py\n@@ -24,11 +24,7 @@\n import pwdmanager\n from pwdmanager import LmfdbUser, LmfdbAnonymousUser\n \n-# TODO update this url, needed for the user login token\n-base_url = \"http://www.l-functions.org\"\n-# TODO: Not sure this should be changed from l-functions -> lmfdb, because\n-# I don't really understand how this is used. Paul\n-\n+base_url = \"http://beta.lmfdb.org\"\n \n @login_manager.user_loader\n def load_user(userid):\n", "issue": "Can't add new accounts\nWe can't assign new accounts (for editing knowls) because that has to be done on beta, not www.\n\nI think this just involves changing line 28 of lmfdb/users/main.py\nfrom\nbase_url = \"http://www.l-functions.org\"\nto\nbase_url = \"http://beta.lmfdb.org\"\n\nBut I didn't want to do that in case it has a bad side-effect.\n\n", "before_files": [{"content": "# -*- encoding: utf-8 -*-\n# this holds all the flask-login specific logic (+ url mapping an rendering templates)\n# for the user management\n# author: harald schilly <[email protected]>\n\nimport pymongo\nASC = pymongo.ASCENDING\nimport flask\nfrom functools import wraps\nfrom lmfdb.base import app, getDBConnection\nfrom flask import render_template, request, abort, Blueprint, url_for, make_response\nfrom flask.ext.login import login_required, login_user, current_user, logout_user\n\nlogin_page = Blueprint(\"users\", __name__, template_folder='templates')\nimport lmfdb.utils\nlogger = lmfdb.utils.make_logger(login_page)\n\nimport re\nallowed_usernames = re.compile(\"^[a-zA-Z0-9._-]+$\")\n\nfrom flask.ext.login import LoginManager\nlogin_manager = LoginManager()\n\nimport pwdmanager\nfrom pwdmanager import LmfdbUser, LmfdbAnonymousUser\n\n# TODO update this url, needed for the user login token\nbase_url = \"http://www.l-functions.org\"\n# TODO: Not sure this should be changed from l-functions -> lmfdb, because\n# I don't really understand how this is used. Paul\n\n\n@login_manager.user_loader\ndef load_user(userid):\n from pwdmanager import LmfdbUser\n return LmfdbUser(userid)\n\nlogin_manager.login_view = \"users.info\"\n\n# this anonymous user has the is_admin() method\nlogin_manager.anonymous_user = LmfdbAnonymousUser\n\n\ndef get_username(uid):\n \"\"\"returns the name of user @uid\"\"\"\n return LmfdbUser(uid).name\n\n# globally define user properties and username\n\n\[email protected]_processor\ndef ctx_proc_userdata():\n userdata = {}\n userdata['userid'] = 'anon' if current_user.is_anonymous() else current_user._uid\n userdata['username'] = 'Anonymous' if current_user.is_anonymous() else current_user.name\n userdata['user_is_authenticated'] = current_user.is_authenticated()\n userdata['user_is_admin'] = current_user.is_admin()\n userdata['get_username'] = get_username # this is a function\n return userdata\n\n# blueprint specific definition of the body_class variable\n\n\n@login_page.context_processor\ndef body_class():\n return {'body_class': 'login'}\n\n# the following doesn't work as it should, also depends on blinker python lib\n# flask signal when a user logs in. we record the last logins in the user's data\n# http://flask.pocoo.org/docs/signals/\n# def log_login_callback(cur_app, user = None):\n# cur_user = user or current_user\n# logger.info(\">> curr_app: %s user: %s\" % (cur_app, cur_user))\n#\n# from flask.ext.login import user_logged_in, user_login_confirmed\n# user_logged_in.connect(log_login_callback)\n# user_login_confirmed.connect(log_login_callback)\n\n\ndef base_bread():\n return [('Users', url_for(\".list\"))]\n\n\n@login_page.route(\"/\")\n@login_required\ndef list():\n import pwdmanager\n users = pwdmanager.get_user_list()\n # trying to be smart and sorting by last name\n users = sorted(users, key=lambda x: x[1].split(\" \")[-1].lower())\n bread = base_bread()\n return render_template(\"user-list.html\", title=\"All Users\",\n users=users, bread=bread)\n\n\n@login_page.route(\"/myself\")\ndef info():\n info = {}\n info['login'] = url_for(\".login\")\n info['logout'] = url_for(\".logout\")\n info['user'] = current_user\n info['next'] = request.referrer\n return render_template(\"user-info.html\",\n info=info, title=\"Userinfo\",\n bread=base_bread() + [(\"Myself\", url_for(\".info\"))])\n\n# ./info again, but for POST!\n\n\n@login_page.route(\"/info\", methods=['POST'])\n@login_required\ndef set_info():\n for k, v in request.form.iteritems():\n setattr(current_user, k, v)\n current_user.save()\n flask.flash(\"Thank you for updating your details!\")\n return flask.redirect(url_for(\".info\"))\n\n\n@login_page.route(\"/profile/<userid>\")\n@login_required\ndef profile(userid):\n # See issue #1169\n #try:\n # getDBConnection().knowledge.knowls.ensure_index('title')\n #except pymongo.errors.OperationFailure:\n # pass\n user = LmfdbUser(userid)\n bread = base_bread() + [(user.name, url_for('.profile', userid=user.get_id()))]\n userknowls = getDBConnection(\n ).knowledge.knowls.find({'authors': userid}, ['title']).sort([('title', ASC)])\n userfiles = getDBConnection(\n ).upload.fs.files.find({'metadata.uploader_id': userid, 'metadata.status': 'approved'})\n userfilesmod = getDBConnection(\n ).upload.fs.files.find({'metadata.uploader_id': userid, 'metadata.status': 'unmoderated'})\n return render_template(\"user-detail.html\", user=user,\n title=\"%s\" % user.name, bread=bread, userknowls=userknowls, userfiles=userfiles, userfilesmod=userfilesmod)\n\n\n@login_page.route(\"/login\", methods=[\"POST\"])\ndef login(**kwargs):\n bread = base_bread() + [('Login', url_for('.login'))]\n # login and validate the user \u2026\n # remember = True sets a cookie to remmeber the user\n name = request.form[\"name\"]\n password = request.form[\"password\"]\n next = request.form[\"next\"]\n remember = True if request.form[\"remember\"] == \"on\" else False\n user = LmfdbUser(name)\n if user and user.authenticate(password):\n login_user(user, remember=remember)\n flask.flash(\"Hello %s, your login was successful!\" % user.name)\n logger.info(\"login: '%s' - '%s'\" % (user.get_id(), user.name))\n return flask.redirect(next or url_for(\".info\"))\n flask.flash(\"Oops! Wrong username or password.\", \"error\")\n return flask.redirect(url_for(\".info\"))\n\n\ndef admin_required(fn):\n \"\"\"\n wrap this around those entry points where you need to be an admin.\n \"\"\"\n @wraps(fn)\n @login_required\n def decorated_view(*args, **kwargs):\n logger.info(\"admin access attempt by %s\" % current_user.get_id())\n if not current_user.is_admin():\n return flask.abort(403) # access denied\n return fn(*args, **kwargs)\n return decorated_view\n\n\ndef housekeeping(fn):\n \"\"\"\n wrap this around maintenance calls, they are only accessible for\n admins and for localhost\n \"\"\"\n @wraps(fn)\n def decorated_view(*args, **kwargs):\n logger.info(\"housekeeping access attempt by %s\" % request.remote_addr)\n if request.remote_addr in [\"127.0.0.1\", \"localhost\"]:\n return fn(*args, **kwargs)\n return admin_required(fn)(*args, **kwargs)\n return decorated_view\n\n\ndef get_user_token_coll():\n return getDBConnection().userdb.tokens\n\n\n@login_page.route(\"/register\")\ndef register_new():\n return \"\"\n # q_admins = getDBConnection().userdb.users.find({'admin' : True})\n # admins =', '.join((_['full_name'] or _['_id'] for _ in q_admins))\n # return \"You have to contact one of the Admins: %s\" % admins\n\n\n@login_page.route(\"/register/new\")\n@login_page.route(\"/register/new/<int:N>\")\n@admin_required\ndef register(N=10):\n N = 100 if N > 100 else N\n from datetime import datetime, timedelta\n now = datetime.utcnow()\n tdelta = timedelta(days=1)\n exp = now + tdelta\n import random\n tokens = [str(random.randrange(1e20, 1e21)) for _ in range(N)]\n for t in tokens:\n get_user_token_coll().save({'_id': t, 'expire': exp})\n urls = [\"%s%s\" % (base_url, url_for(\".register_token\", token=_)) for _ in tokens]\n resp = make_response('\\n'.join(urls))\n resp.headers['Content-type'] = 'text/plain'\n return resp\n\n\ndef delete_old_tokens():\n from datetime import datetime, timedelta\n now = datetime.utcnow()\n tdelta = timedelta(days=8)\n exp = now + tdelta\n get_user_token_coll().remove({'expire': {'$gt': exp}})\n\n\n@login_page.route(\"/register/<token>\", methods=['GET', 'POST'])\ndef register_token(token):\n delete_old_tokens()\n token_exists = get_user_token_coll().find({'_id': token}).count() == 1\n if not token_exists:\n flask.abort(401)\n bread = base_bread() + [('Register', url_for(\".register_new\"))]\n if request.method == \"GET\":\n return render_template(\"register.html\", title=\"Register\", bread=bread, next=request.referrer or \"/\", token=token)\n elif request.method == 'POST':\n name = request.form['name']\n if not allowed_usernames.match(name):\n flask.flash(\"\"\"Oops, usename '%s' is not allowed.\n It must consist of lower/uppercase characters,\n no spaces, numbers or '.', '_' and '-'.\"\"\" % name, \"error\")\n return flask.redirect(url_for(\".register_new\"))\n\n pw1 = request.form['password1']\n pw2 = request.form['password2']\n if pw1 != pw2:\n flask.flash(\"Oops, passwords do not match!\", \"error\")\n return flask.redirect(url_for(\".register_new\"))\n\n if len(pw1) <= 3:\n flask.flash(\"Oops, password too short. Minimum 4 characters please!\", \"error\")\n return flask.redirect(url_for(\".register_new\"))\n\n full_name = request.form['full_name']\n email = request.form['email']\n next = request.form[\"next\"]\n\n if pwdmanager.user_exists(name):\n flask.flash(\"Sorry, user ID '%s' already exists!\" % name, \"error\")\n return flask.redirect(url_for(\".register_new\"))\n\n newuser = pwdmanager.new_user(name, pw1)\n newuser.full_name = full_name\n newuser.email = email\n newuser.save()\n login_user(newuser, remember=True)\n flask.flash(\"Hello %s! Congratulations, you are a new user!\" % newuser.name)\n get_user_token_coll().remove({'_id': token})\n logger.debug(\"removed login token '%s'\" % token)\n logger.info(\"new user: '%s' - '%s'\" % (newuser.get_id(), newuser.name))\n return flask.redirect(next or url_for(\".info\"))\n\n\n@login_page.route(\"/change_password\", methods=['POST'])\n@login_required\ndef change_password():\n uid = current_user.get_id()\n pw_old = request.form['oldpwd']\n if not current_user.authenticate(pw_old):\n flask.flash(\"Ooops, old password is wrong!\", \"error\")\n return flask.redirect(url_for(\".info\"))\n\n pw1 = request.form['password1']\n pw2 = request.form['password2']\n if pw1 != pw2:\n flask.flash(\"Oops, new passwords do not match!\", \"error\")\n return flask.redirect(url_for(\".info\"))\n\n pwdmanager.change_password(uid, pw1)\n flask.flash(\"Your password has been changed.\")\n return flask.redirect(url_for(\".info\"))\n\n\n@login_page.route(\"/logout\")\n@login_required\ndef logout():\n bread = base_bread() + [('Login', url_for('.logout'))]\n logout_user()\n flask.flash(\"You are logged out now. Have a nice day!\")\n return flask.redirect(request.args.get(\"next\") or request.referrer or url_for('.info'))\n\n\n@login_page.route(\"/admin\")\n@login_required\n@admin_required\ndef admin():\n return \"success: only admins can read this!\"\n", "path": "lmfdb/users/main.py"}]} | 4,073 | 153 |
gh_patches_debug_17096 | rasdani/github-patches | git_diff | inventree__InvenTree-5045 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Make event test more specific
@matmair [this CI failure](https://github.com/inventree/InvenTree/actions/runs/5259899543/jobs/9506168595?pr=4732) is the last in the current batch of weird ones. I'm seeing it on multiple PRs, about 50% of the time.
Here's the failing line:
https://github.com/inventree/InvenTree/blob/c8365ccd0c9371ea4d127fe616e0029f35b3c19c/InvenTree/plugin/samples/event/test_event_sample.py#L27
Sometimes, `cm.warning.args[0]` returns an "unclosed file object", rather than a string. Any ideas?
_Originally posted by @SchrodingersGat in https://github.com/inventree/InvenTree/issues/4732#issuecomment-1590219025_
</issue>
<code>
[start of InvenTree/plugin/samples/event/event_sample.py]
1 """Sample plugin which responds to events."""
2
3 import warnings
4
5 from django.conf import settings
6
7 from plugin import InvenTreePlugin
8 from plugin.mixins import EventMixin
9
10
11 class EventPluginSample(EventMixin, InvenTreePlugin):
12 """A sample plugin which provides supports for triggered events."""
13
14 NAME = "EventPlugin"
15 SLUG = "sampleevent"
16 TITLE = "Triggered Events"
17
18 def process_event(self, event, *args, **kwargs):
19 """Custom event processing."""
20 print(f"Processing triggered event: '{event}'")
21 print("args:", str(args))
22 print("kwargs:", str(kwargs))
23
24 # Issue warning that we can test for
25 if settings.PLUGIN_TESTING:
26 warnings.warn(f'Event `{event}` triggered', stacklevel=2)
27
[end of InvenTree/plugin/samples/event/event_sample.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/InvenTree/plugin/samples/event/event_sample.py b/InvenTree/plugin/samples/event/event_sample.py
--- a/InvenTree/plugin/samples/event/event_sample.py
+++ b/InvenTree/plugin/samples/event/event_sample.py
@@ -1,12 +1,14 @@
"""Sample plugin which responds to events."""
-import warnings
+import logging
from django.conf import settings
from plugin import InvenTreePlugin
from plugin.mixins import EventMixin
+logger = logging.getLogger('inventree')
+
class EventPluginSample(EventMixin, InvenTreePlugin):
"""A sample plugin which provides supports for triggered events."""
@@ -23,4 +25,4 @@
# Issue warning that we can test for
if settings.PLUGIN_TESTING:
- warnings.warn(f'Event `{event}` triggered', stacklevel=2)
+ logger.debug(f'Event `{event}` triggered in sample plugin')
| {"golden_diff": "diff --git a/InvenTree/plugin/samples/event/event_sample.py b/InvenTree/plugin/samples/event/event_sample.py\n--- a/InvenTree/plugin/samples/event/event_sample.py\n+++ b/InvenTree/plugin/samples/event/event_sample.py\n@@ -1,12 +1,14 @@\n \"\"\"Sample plugin which responds to events.\"\"\"\n \n-import warnings\n+import logging\n \n from django.conf import settings\n \n from plugin import InvenTreePlugin\n from plugin.mixins import EventMixin\n \n+logger = logging.getLogger('inventree')\n+\n \n class EventPluginSample(EventMixin, InvenTreePlugin):\n \"\"\"A sample plugin which provides supports for triggered events.\"\"\"\n@@ -23,4 +25,4 @@\n \n # Issue warning that we can test for\n if settings.PLUGIN_TESTING:\n- warnings.warn(f'Event `{event}` triggered', stacklevel=2)\n+ logger.debug(f'Event `{event}` triggered in sample plugin')\n", "issue": "Make event test more specific\n @matmair [this CI failure](https://github.com/inventree/InvenTree/actions/runs/5259899543/jobs/9506168595?pr=4732) is the last in the current batch of weird ones. I'm seeing it on multiple PRs, about 50% of the time.\r\n\r\nHere's the failing line:\r\n\r\nhttps://github.com/inventree/InvenTree/blob/c8365ccd0c9371ea4d127fe616e0029f35b3c19c/InvenTree/plugin/samples/event/test_event_sample.py#L27\r\n\r\nSometimes, `cm.warning.args[0]` returns an \"unclosed file object\", rather than a string. Any ideas?\r\n\r\n_Originally posted by @SchrodingersGat in https://github.com/inventree/InvenTree/issues/4732#issuecomment-1590219025_\r\n \n", "before_files": [{"content": "\"\"\"Sample plugin which responds to events.\"\"\"\n\nimport warnings\n\nfrom django.conf import settings\n\nfrom plugin import InvenTreePlugin\nfrom plugin.mixins import EventMixin\n\n\nclass EventPluginSample(EventMixin, InvenTreePlugin):\n \"\"\"A sample plugin which provides supports for triggered events.\"\"\"\n\n NAME = \"EventPlugin\"\n SLUG = \"sampleevent\"\n TITLE = \"Triggered Events\"\n\n def process_event(self, event, *args, **kwargs):\n \"\"\"Custom event processing.\"\"\"\n print(f\"Processing triggered event: '{event}'\")\n print(\"args:\", str(args))\n print(\"kwargs:\", str(kwargs))\n\n # Issue warning that we can test for\n if settings.PLUGIN_TESTING:\n warnings.warn(f'Event `{event}` triggered', stacklevel=2)\n", "path": "InvenTree/plugin/samples/event/event_sample.py"}]} | 994 | 205 |
gh_patches_debug_26444 | rasdani/github-patches | git_diff | ansible-collections__community.vmware-1456 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
vsphere_copy: Remove deprecated parameters
##### SUMMARY
The parameters `host` and `login` are deprecated and should be removed in version 3.
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
vsphere_copy
##### ADDITIONAL INFORMATION
#1194
https://github.com/ansible-collections/community.vmware/blob/67b9506a306da2caec9a2eda60003fd54a9df71e/plugins/modules/vsphere_copy.py#L143-L146
</issue>
<code>
[start of plugins/modules/vsphere_copy.py]
1 #!/usr/bin/python
2 # -*- coding: utf-8 -*-
3
4 # Copyright: (c) 2015, Dag Wieers (@dagwieers) <[email protected]>
5 # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
6 # SPDX-License-Identifier: GPL-3.0-or-later
7
8 from __future__ import absolute_import, division, print_function
9 __metaclass__ = type
10
11
12 DOCUMENTATION = r'''
13 ---
14 module: vsphere_copy
15 short_description: Copy a file to a VMware datastore
16 description:
17 - Upload files to a VMware datastore through a vCenter REST API.
18 author:
19 - Dag Wieers (@dagwieers)
20 options:
21 hostname:
22 aliases: ['host']
23 username:
24 aliases: ['login']
25 src:
26 description:
27 - The file to push to vCenter.
28 required: true
29 type: str
30 aliases: [ name ]
31 datacenter:
32 description:
33 - The datacenter on the vCenter server that holds the datastore.
34 required: false
35 type: str
36 datastore:
37 description:
38 - The datastore to push files to.
39 required: true
40 type: str
41 path:
42 description:
43 - The file to push to the datastore.
44 required: true
45 type: str
46 aliases: [ dest ]
47 timeout:
48 description:
49 - The timeout in seconds for the upload to the datastore.
50 default: 10
51 type: int
52
53 notes:
54 - "This module ought to be run from a system that can access the vCenter or the ESXi directly and has the file to transfer.
55 It can be the normal remote target or you can change it either by using C(transport: local) or using C(delegate_to)."
56 extends_documentation_fragment:
57 - community.vmware.vmware.documentation
58
59 '''
60
61 EXAMPLES = r'''
62 - name: Copy file to datastore using delegate_to
63 community.vmware.vsphere_copy:
64 hostname: '{{ vcenter_hostname }}'
65 username: '{{ vcenter_username }}'
66 password: '{{ vcenter_password }}'
67 src: /some/local/file
68 datacenter: DC1 Someplace
69 datastore: datastore1
70 path: some/remote/file
71 delegate_to: localhost
72
73 - name: Copy file to datastore when datacenter is inside folder called devel
74 community.vmware.vsphere_copy:
75 hostname: '{{ vcenter_hostname }}'
76 username: '{{ vcenter_username }}'
77 password: '{{ vcenter_password }}'
78 src: /some/local/file
79 datacenter: devel/DC1
80 datastore: datastore1
81 path: some/remote/file
82 delegate_to: localhost
83
84 - name: Copy file to datastore using other_system
85 community.vmware.vsphere_copy:
86 hostname: '{{ vcenter_hostname }}'
87 username: '{{ vcenter_username }}'
88 password: '{{ vcenter_password }}'
89 src: /other/local/file
90 datacenter: DC2 Someplace
91 datastore: datastore2
92 path: other/remote/file
93 delegate_to: other_system
94 '''
95
96 import atexit
97 import errno
98 import mmap
99 import os
100 import socket
101 import traceback
102
103 from ansible.module_utils.basic import AnsibleModule
104 from ansible.module_utils.six.moves.urllib.parse import urlencode, quote
105 from ansible.module_utils._text import to_native
106 from ansible.module_utils.urls import open_url
107 from ansible_collections.community.vmware.plugins.module_utils.vmware import vmware_argument_spec
108
109
110 def vmware_path(datastore, datacenter, path):
111 ''' Constructs a URL path that vSphere accepts reliably '''
112 path = "/folder/%s" % quote(path.lstrip("/"))
113 # Due to a software bug in vSphere, it fails to handle ampersand in datacenter names
114 # The solution is to do what vSphere does (when browsing) and double-encode ampersands, maybe others ?
115 if not path.startswith("/"):
116 path = "/" + path
117 params = dict(dsName=datastore)
118 if datacenter:
119 datacenter = datacenter.replace('&', '%26')
120 params["dcPath"] = datacenter
121 params = urlencode(params)
122 return "%s?%s" % (path, params)
123
124
125 def main():
126 argument_spec = vmware_argument_spec()
127 argument_spec.update(dict(
128 hostname=dict(required=False, aliases=['host']),
129 username=dict(required=False, aliases=['login']),
130 src=dict(required=True, aliases=['name']),
131 datacenter=dict(required=False),
132 datastore=dict(required=True),
133 path=dict(required=True, aliases=['dest'], type='str'),
134 timeout=dict(default=10, type='int'))
135 )
136
137 module = AnsibleModule(
138 argument_spec=argument_spec,
139 # Implementing check-mode using HEAD is impossible, since size/date is not 100% reliable
140 supports_check_mode=False,
141 )
142
143 if module.params.get('host'):
144 module.deprecate("The 'host' option is being replaced by 'hostname'", version='3.0.0', collection_name='community.vmware')
145 if module.params.get('login'):
146 module.deprecate("The 'login' option is being replaced by 'username'", version='3.0.0', collection_name='community.vmware')
147
148 hostname = module.params['hostname']
149 username = module.params['username']
150 password = module.params.get('password')
151 src = module.params.get('src')
152 datacenter = module.params.get('datacenter')
153 datastore = module.params.get('datastore')
154 path = module.params.get('path')
155 validate_certs = module.params.get('validate_certs')
156 timeout = module.params.get('timeout')
157
158 try:
159 fd = open(src, "rb")
160 atexit.register(fd.close)
161 except Exception as e:
162 module.fail_json(msg="Failed to open src file %s" % to_native(e))
163
164 if os.stat(src).st_size == 0:
165 data = ''
166 else:
167 data = mmap.mmap(fd.fileno(), 0, access=mmap.ACCESS_READ)
168 atexit.register(data.close)
169
170 remote_path = vmware_path(datastore, datacenter, path)
171
172 if not all([hostname, username, password]):
173 module.fail_json(msg="One of following parameter is missing - hostname, username, password")
174 url = 'https://%s%s' % (hostname, remote_path)
175
176 headers = {
177 "Content-Type": "application/octet-stream",
178 "Content-Length": str(len(data)),
179 }
180
181 r = None
182 try:
183 r = open_url(url, data=data, headers=headers, method='PUT', timeout=timeout,
184 url_username=username, url_password=password, validate_certs=validate_certs,
185 force_basic_auth=True)
186 except socket.error as e:
187 if isinstance(e.args, tuple):
188 if len(e.args) > 0:
189 if e[0] == errno.ECONNRESET:
190 # vSphere resets connection if the file is in use and cannot be replaced
191 module.fail_json(msg='Failed to upload, image probably in use', status=None, errno=e[0], reason=to_native(e), url=url)
192 else:
193 module.fail_json(msg=to_native(e))
194 else:
195 module.fail_json(msg=str(e), status=None, errno=e[0], reason=str(e),
196 url=url, exception=traceback.format_exc())
197 except Exception as e:
198 error_code = -1
199 try:
200 if isinstance(e[0], int):
201 error_code = e[0]
202 except (KeyError, TypeError):
203 pass
204 module.fail_json(msg=to_native(e), status=None, errno=error_code,
205 reason=to_native(e), url=url, exception=traceback.format_exc())
206
207 if not r:
208 module.fail_json(msg="Failed to upload", url=url,
209 errno=None, status=None, reason=None)
210 status = r.getcode()
211 if 200 <= status < 300:
212 module.exit_json(changed=True, status=status, reason=r.msg, url=url)
213 else:
214 length = r.headers.get('content-length', None)
215 if r.headers.get('transfer-encoding', '').lower() == 'chunked':
216 chunked = 1
217 else:
218 chunked = 0
219
220 module.fail_json(msg='Failed to upload', errno=None, status=status, reason=r.msg, length=length, headers=dict(r.headers), chunked=chunked, url=url)
221
222
223 if __name__ == '__main__':
224 main()
225
[end of plugins/modules/vsphere_copy.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/plugins/modules/vsphere_copy.py b/plugins/modules/vsphere_copy.py
--- a/plugins/modules/vsphere_copy.py
+++ b/plugins/modules/vsphere_copy.py
@@ -18,10 +18,6 @@
author:
- Dag Wieers (@dagwieers)
options:
- hostname:
- aliases: ['host']
- username:
- aliases: ['login']
src:
description:
- The file to push to vCenter.
@@ -125,8 +121,6 @@
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(
- hostname=dict(required=False, aliases=['host']),
- username=dict(required=False, aliases=['login']),
src=dict(required=True, aliases=['name']),
datacenter=dict(required=False),
datastore=dict(required=True),
@@ -140,11 +134,6 @@
supports_check_mode=False,
)
- if module.params.get('host'):
- module.deprecate("The 'host' option is being replaced by 'hostname'", version='3.0.0', collection_name='community.vmware')
- if module.params.get('login'):
- module.deprecate("The 'login' option is being replaced by 'username'", version='3.0.0', collection_name='community.vmware')
-
hostname = module.params['hostname']
username = module.params['username']
password = module.params.get('password')
| {"golden_diff": "diff --git a/plugins/modules/vsphere_copy.py b/plugins/modules/vsphere_copy.py\n--- a/plugins/modules/vsphere_copy.py\n+++ b/plugins/modules/vsphere_copy.py\n@@ -18,10 +18,6 @@\n author:\n - Dag Wieers (@dagwieers)\n options:\n- hostname:\n- aliases: ['host']\n- username:\n- aliases: ['login']\n src:\n description:\n - The file to push to vCenter.\n@@ -125,8 +121,6 @@\n def main():\n argument_spec = vmware_argument_spec()\n argument_spec.update(dict(\n- hostname=dict(required=False, aliases=['host']),\n- username=dict(required=False, aliases=['login']),\n src=dict(required=True, aliases=['name']),\n datacenter=dict(required=False),\n datastore=dict(required=True),\n@@ -140,11 +134,6 @@\n supports_check_mode=False,\n )\n \n- if module.params.get('host'):\n- module.deprecate(\"The 'host' option is being replaced by 'hostname'\", version='3.0.0', collection_name='community.vmware')\n- if module.params.get('login'):\n- module.deprecate(\"The 'login' option is being replaced by 'username'\", version='3.0.0', collection_name='community.vmware')\n-\n hostname = module.params['hostname']\n username = module.params['username']\n password = module.params.get('password')\n", "issue": "vsphere_copy: Remove deprecated parameters\n##### SUMMARY\r\nThe parameters `host` and `login` are deprecated and should be removed in version 3.\r\n\r\n##### ISSUE TYPE\r\n- Feature Idea\r\n\r\n##### COMPONENT NAME\r\nvsphere_copy\r\n\r\n##### ADDITIONAL INFORMATION\r\n#1194\r\n\r\nhttps://github.com/ansible-collections/community.vmware/blob/67b9506a306da2caec9a2eda60003fd54a9df71e/plugins/modules/vsphere_copy.py#L143-L146\n", "before_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n# Copyright: (c) 2015, Dag Wieers (@dagwieers) <[email protected]>\n# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)\n# SPDX-License-Identifier: GPL-3.0-or-later\n\nfrom __future__ import absolute_import, division, print_function\n__metaclass__ = type\n\n\nDOCUMENTATION = r'''\n---\nmodule: vsphere_copy\nshort_description: Copy a file to a VMware datastore\ndescription:\n - Upload files to a VMware datastore through a vCenter REST API.\nauthor:\n- Dag Wieers (@dagwieers)\noptions:\n hostname:\n aliases: ['host']\n username:\n aliases: ['login']\n src:\n description:\n - The file to push to vCenter.\n required: true\n type: str\n aliases: [ name ]\n datacenter:\n description:\n - The datacenter on the vCenter server that holds the datastore.\n required: false\n type: str\n datastore:\n description:\n - The datastore to push files to.\n required: true\n type: str\n path:\n description:\n - The file to push to the datastore.\n required: true\n type: str\n aliases: [ dest ]\n timeout:\n description:\n - The timeout in seconds for the upload to the datastore.\n default: 10\n type: int\n\nnotes:\n - \"This module ought to be run from a system that can access the vCenter or the ESXi directly and has the file to transfer.\n It can be the normal remote target or you can change it either by using C(transport: local) or using C(delegate_to).\"\nextends_documentation_fragment:\n- community.vmware.vmware.documentation\n\n'''\n\nEXAMPLES = r'''\n- name: Copy file to datastore using delegate_to\n community.vmware.vsphere_copy:\n hostname: '{{ vcenter_hostname }}'\n username: '{{ vcenter_username }}'\n password: '{{ vcenter_password }}'\n src: /some/local/file\n datacenter: DC1 Someplace\n datastore: datastore1\n path: some/remote/file\n delegate_to: localhost\n\n- name: Copy file to datastore when datacenter is inside folder called devel\n community.vmware.vsphere_copy:\n hostname: '{{ vcenter_hostname }}'\n username: '{{ vcenter_username }}'\n password: '{{ vcenter_password }}'\n src: /some/local/file\n datacenter: devel/DC1\n datastore: datastore1\n path: some/remote/file\n delegate_to: localhost\n\n- name: Copy file to datastore using other_system\n community.vmware.vsphere_copy:\n hostname: '{{ vcenter_hostname }}'\n username: '{{ vcenter_username }}'\n password: '{{ vcenter_password }}'\n src: /other/local/file\n datacenter: DC2 Someplace\n datastore: datastore2\n path: other/remote/file\n delegate_to: other_system\n'''\n\nimport atexit\nimport errno\nimport mmap\nimport os\nimport socket\nimport traceback\n\nfrom ansible.module_utils.basic import AnsibleModule\nfrom ansible.module_utils.six.moves.urllib.parse import urlencode, quote\nfrom ansible.module_utils._text import to_native\nfrom ansible.module_utils.urls import open_url\nfrom ansible_collections.community.vmware.plugins.module_utils.vmware import vmware_argument_spec\n\n\ndef vmware_path(datastore, datacenter, path):\n ''' Constructs a URL path that vSphere accepts reliably '''\n path = \"/folder/%s\" % quote(path.lstrip(\"/\"))\n # Due to a software bug in vSphere, it fails to handle ampersand in datacenter names\n # The solution is to do what vSphere does (when browsing) and double-encode ampersands, maybe others ?\n if not path.startswith(\"/\"):\n path = \"/\" + path\n params = dict(dsName=datastore)\n if datacenter:\n datacenter = datacenter.replace('&', '%26')\n params[\"dcPath\"] = datacenter\n params = urlencode(params)\n return \"%s?%s\" % (path, params)\n\n\ndef main():\n argument_spec = vmware_argument_spec()\n argument_spec.update(dict(\n hostname=dict(required=False, aliases=['host']),\n username=dict(required=False, aliases=['login']),\n src=dict(required=True, aliases=['name']),\n datacenter=dict(required=False),\n datastore=dict(required=True),\n path=dict(required=True, aliases=['dest'], type='str'),\n timeout=dict(default=10, type='int'))\n )\n\n module = AnsibleModule(\n argument_spec=argument_spec,\n # Implementing check-mode using HEAD is impossible, since size/date is not 100% reliable\n supports_check_mode=False,\n )\n\n if module.params.get('host'):\n module.deprecate(\"The 'host' option is being replaced by 'hostname'\", version='3.0.0', collection_name='community.vmware')\n if module.params.get('login'):\n module.deprecate(\"The 'login' option is being replaced by 'username'\", version='3.0.0', collection_name='community.vmware')\n\n hostname = module.params['hostname']\n username = module.params['username']\n password = module.params.get('password')\n src = module.params.get('src')\n datacenter = module.params.get('datacenter')\n datastore = module.params.get('datastore')\n path = module.params.get('path')\n validate_certs = module.params.get('validate_certs')\n timeout = module.params.get('timeout')\n\n try:\n fd = open(src, \"rb\")\n atexit.register(fd.close)\n except Exception as e:\n module.fail_json(msg=\"Failed to open src file %s\" % to_native(e))\n\n if os.stat(src).st_size == 0:\n data = ''\n else:\n data = mmap.mmap(fd.fileno(), 0, access=mmap.ACCESS_READ)\n atexit.register(data.close)\n\n remote_path = vmware_path(datastore, datacenter, path)\n\n if not all([hostname, username, password]):\n module.fail_json(msg=\"One of following parameter is missing - hostname, username, password\")\n url = 'https://%s%s' % (hostname, remote_path)\n\n headers = {\n \"Content-Type\": \"application/octet-stream\",\n \"Content-Length\": str(len(data)),\n }\n\n r = None\n try:\n r = open_url(url, data=data, headers=headers, method='PUT', timeout=timeout,\n url_username=username, url_password=password, validate_certs=validate_certs,\n force_basic_auth=True)\n except socket.error as e:\n if isinstance(e.args, tuple):\n if len(e.args) > 0:\n if e[0] == errno.ECONNRESET:\n # vSphere resets connection if the file is in use and cannot be replaced\n module.fail_json(msg='Failed to upload, image probably in use', status=None, errno=e[0], reason=to_native(e), url=url)\n else:\n module.fail_json(msg=to_native(e))\n else:\n module.fail_json(msg=str(e), status=None, errno=e[0], reason=str(e),\n url=url, exception=traceback.format_exc())\n except Exception as e:\n error_code = -1\n try:\n if isinstance(e[0], int):\n error_code = e[0]\n except (KeyError, TypeError):\n pass\n module.fail_json(msg=to_native(e), status=None, errno=error_code,\n reason=to_native(e), url=url, exception=traceback.format_exc())\n\n if not r:\n module.fail_json(msg=\"Failed to upload\", url=url,\n errno=None, status=None, reason=None)\n status = r.getcode()\n if 200 <= status < 300:\n module.exit_json(changed=True, status=status, reason=r.msg, url=url)\n else:\n length = r.headers.get('content-length', None)\n if r.headers.get('transfer-encoding', '').lower() == 'chunked':\n chunked = 1\n else:\n chunked = 0\n\n module.fail_json(msg='Failed to upload', errno=None, status=status, reason=r.msg, length=length, headers=dict(r.headers), chunked=chunked, url=url)\n\n\nif __name__ == '__main__':\n main()\n", "path": "plugins/modules/vsphere_copy.py"}]} | 3,068 | 313 |
gh_patches_debug_2653 | rasdani/github-patches | git_diff | esphome__esphome-docs-1148 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Update docs for new fan speed
## Description:
**Related issue (if applicable):** fixes https://github.com/esphome/issues/issues/1278
**Pull request in [esphome](https://github.com/esphome/esphome) with YAML changes (if applicable):** esphome/esphome#https://github.com/esphome/esphome/pull/1391
## Checklist:
- [ ] Branch: `next` is for changes and new documentation that will go public with the next ESPHome release. Fixes, changes and adjustments for the current release should be created against `current`.
- [ ] Link added in `/index.rst` when creating new documents for new components or cookbook.
</issue>
<code>
[start of conf.py]
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*-
3 #
4 # esphome documentation build configuration file, created by
5 # sphinx-quickstart on Mon Jan 22 21:44:07 2018.
6 #
7 # This file is execfile()d with the current directory set to its
8 # containing dir.
9 #
10 # Note that not all possible configuration values are present in this
11 # autogenerated file.
12 #
13 # All configuration values have a default; values that are commented out
14 # serve to show the default.
15
16 # If extensions (or modules to document with autodoc) are in another directory,
17 # add these directories to sys.path here. If the directory is relative to the
18 # documentation root, use os.path.abspath to make it absolute, like shown here.
19 #
20 # import os
21 # import sys
22 # sys.path.insert(0, os.path.abspath('.'))
23 import hashlib
24 import os
25 import sys
26
27
28 sys.path.append(os.path.abspath("."))
29
30 # -- General configuration ------------------------------------------------
31
32 # If your documentation needs a minimal Sphinx version, state it here.
33 #
34 # needs_sphinx = '1.0'
35
36 # Add any Sphinx extension module names here, as strings. They can be
37 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
38 # ones.
39 extensions = [
40 "github",
41 "seo",
42 "sitemap",
43 "schema_doc",
44 ]
45
46 # Add any paths that contain templates here, relative to this directory.
47 templates_path = ["_templates"]
48
49 # The suffix(es) of source filenames.
50 # You can specify multiple suffix as a list of string:
51 #
52 # source_suffix = ['.rst', '.md']
53 source_suffix = ".rst"
54
55 # The master toctree document.
56 master_doc = "index"
57
58 # General information about the project.
59 project = "ESPHome"
60 copyright = "2019, Otto Winter"
61 html_show_copyright = False
62 html_show_sphinx = False
63 author = "Otto Winter"
64
65 # The version info for the project you're documenting, acts as replacement for
66 # |version| and |release|, also used in various other places throughout the
67 # built documents.
68 #
69 # The short X.Y version.
70 version = "1.17"
71 # The full version, including alpha/beta/rc tags.
72 release = "1.17.1"
73
74 # The language for content autogenerated by Sphinx. Refer to documentation
75 # for a list of supported languages.
76 #
77 # This is also used if you do content translation via gettext catalogs.
78 # Usually you set "language" from the command line for these cases.
79 language = "en"
80
81 # List of patterns, relative to source directory, that match files and
82 # directories to ignore when looking for source files.
83 # This patterns also effect to html_static_path and html_extra_path
84 exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
85
86 # The reST default role (used for this markup: `text`) to use for all documents.
87 # default_role = 'cpp:any'
88
89 # The name of the Pygments (syntax highlighting) style to use.
90 pygments_style = "xcode"
91
92 highlight_language = "yaml"
93
94 primary_domain = None
95
96 # If true, `todo` and `todoList` produce output, else they produce nothing.
97 todo_include_todos = False
98
99
100 # -- Options for HTML output ----------------------------------------------
101
102 # The theme to use for HTML and HTML Help pages. See the documentation for
103 # a list of builtin themes.
104 #
105 html_theme = "alabaster"
106
107 # Theme options are theme-specific and customize the look and feel of a theme
108 # further. For a list of options available for each theme, see the
109 # documentation.
110 #
111 html_baseurl = os.getenv("BASE_URL", "https://esphome.io")
112 with open("_static/custom.css", "rb") as f:
113 custom_css_hash = hashlib.md5(f.read()).hexdigest()[:8]
114
115 html_theme_options = {
116 # 'logo': 'logo-full.png',
117 "logo_name": False,
118 "show_related": False,
119 "sidebar_collapse": True,
120 "fixed_sidebar": True,
121 "show_powered_by": False,
122 }
123
124 html_context = {
125 "custom_css_hash": custom_css_hash,
126 }
127
128 html_logo = "images/logo-text.svg"
129 html_copy_source = True
130 html_show_sourcelink = False
131 html_last_updated_fmt = None
132 html_use_smartypants = False
133 html_title = "ESPHome"
134
135 # Add any paths that contain custom static files (such as style sheets) here,
136 # relative to this directory. They are copied after the builtin static files,
137 # so a file named "default.css" will overwrite the builtin "default.css".
138 html_static_path = ["_static"]
139
140 # Custom sidebar templates, must be a dictionary that maps document names
141 # to template names.
142 #
143 # This is required for the alabaster theme
144 # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
145 html_sidebars = {
146 "**": [
147 # 'about.html',
148 "searchbox.html",
149 "localtoc.html",
150 ]
151 }
152
153
154 # -- Options for HTMLHelp output ------------------------------------------
155
156 # Output file base name for HTML help builder.
157 htmlhelp_basename = "esphomedoc"
158
159
160 # -- Options for LaTeX output ---------------------------------------------
161
162 latex_elements = {
163 # The paper size ('letterpaper' or 'a4paper').
164 #
165 # 'papersize': 'letterpaper',
166 # The font size ('10pt', '11pt' or '12pt').
167 #
168 # 'pointsize': '10pt',
169 # Additional stuff for the LaTeX preamble.
170 #
171 # 'preamble': '',
172 # Latex figure (float) alignment
173 #
174 # 'figure_align': 'htbp',
175 }
176
177 # Grouping the document tree into LaTeX files. List of tuples
178 # (source start file, target name, title,
179 # author, documentclass [howto, manual, or own class]).
180 latex_documents = [
181 (master_doc, "esphome.tex", "ESPHome Documentation", "Otto Winter", "manual"),
182 ]
183
184 latex_engine = "xelatex"
185
186
187 # -- Options for manual page output ---------------------------------------
188
189 # One entry per manual page. List of tuples
190 # (source start file, name, description, authors, manual section).
191 man_pages = [(master_doc, "esphome", "ESPHome Documentation", [author], 1)]
192
193
194 # -- Options for Texinfo output -------------------------------------------
195
196 # Grouping the document tree into Texinfo files. List of tuples
197 # (source start file, target name, title, author,
198 # dir menu entry, description, category)
199 texinfo_documents = [
200 (
201 master_doc,
202 "esphome",
203 "ESPHome Documentation",
204 author,
205 "esphome",
206 "One line description of project.",
207 "Miscellaneous",
208 ),
209 ]
210 linkcheck_ignore = [r"https://github.com/.*", r"https://discord.gg/.*"]
211
[end of conf.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/conf.py b/conf.py
--- a/conf.py
+++ b/conf.py
@@ -69,7 +69,7 @@
# The short X.Y version.
version = "1.17"
# The full version, including alpha/beta/rc tags.
-release = "1.17.1"
+release = "1.17.2"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
| {"golden_diff": "diff --git a/conf.py b/conf.py\n--- a/conf.py\n+++ b/conf.py\n@@ -69,7 +69,7 @@\n # The short X.Y version.\n version = \"1.17\"\n # The full version, including alpha/beta/rc tags.\n-release = \"1.17.1\"\n+release = \"1.17.2\"\n \n # The language for content autogenerated by Sphinx. Refer to documentation\n # for a list of supported languages.\n", "issue": "Update docs for new fan speed\n## Description:\r\n\r\n\r\n**Related issue (if applicable):** fixes https://github.com/esphome/issues/issues/1278\r\n\r\n**Pull request in [esphome](https://github.com/esphome/esphome) with YAML changes (if applicable):** esphome/esphome#https://github.com/esphome/esphome/pull/1391\r\n\r\n## Checklist:\r\n\r\n - [ ] Branch: `next` is for changes and new documentation that will go public with the next ESPHome release. Fixes, changes and adjustments for the current release should be created against `current`.\r\n - [ ] Link added in `/index.rst` when creating new documents for new components or cookbook.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n#\n# esphome documentation build configuration file, created by\n# sphinx-quickstart on Mon Jan 22 21:44:07 2018.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\n# import os\n# import sys\n# sys.path.insert(0, os.path.abspath('.'))\nimport hashlib\nimport os\nimport sys\n\n\nsys.path.append(os.path.abspath(\".\"))\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n \"github\",\n \"seo\",\n \"sitemap\",\n \"schema_doc\",\n]\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\n# source_suffix = ['.rst', '.md']\nsource_suffix = \".rst\"\n\n# The master toctree document.\nmaster_doc = \"index\"\n\n# General information about the project.\nproject = \"ESPHome\"\ncopyright = \"2019, Otto Winter\"\nhtml_show_copyright = False\nhtml_show_sphinx = False\nauthor = \"Otto Winter\"\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = \"1.17\"\n# The full version, including alpha/beta/rc tags.\nrelease = \"1.17.1\"\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = \"en\"\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This patterns also effect to html_static_path and html_extra_path\nexclude_patterns = [\"_build\", \"Thumbs.db\", \".DS_Store\"]\n\n# The reST default role (used for this markup: `text`) to use for all documents.\n# default_role = 'cpp:any'\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = \"xcode\"\n\nhighlight_language = \"yaml\"\n\nprimary_domain = None\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = \"alabaster\"\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#\nhtml_baseurl = os.getenv(\"BASE_URL\", \"https://esphome.io\")\nwith open(\"_static/custom.css\", \"rb\") as f:\n custom_css_hash = hashlib.md5(f.read()).hexdigest()[:8]\n\nhtml_theme_options = {\n # 'logo': 'logo-full.png',\n \"logo_name\": False,\n \"show_related\": False,\n \"sidebar_collapse\": True,\n \"fixed_sidebar\": True,\n \"show_powered_by\": False,\n}\n\nhtml_context = {\n \"custom_css_hash\": custom_css_hash,\n}\n\nhtml_logo = \"images/logo-text.svg\"\nhtml_copy_source = True\nhtml_show_sourcelink = False\nhtml_last_updated_fmt = None\nhtml_use_smartypants = False\nhtml_title = \"ESPHome\"\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\n# Custom sidebar templates, must be a dictionary that maps document names\n# to template names.\n#\n# This is required for the alabaster theme\n# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars\nhtml_sidebars = {\n \"**\": [\n # 'about.html',\n \"searchbox.html\",\n \"localtoc.html\",\n ]\n}\n\n\n# -- Options for HTMLHelp output ------------------------------------------\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = \"esphomedoc\"\n\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #\n # 'papersize': 'letterpaper',\n # The font size ('10pt', '11pt' or '12pt').\n #\n # 'pointsize': '10pt',\n # Additional stuff for the LaTeX preamble.\n #\n # 'preamble': '',\n # Latex figure (float) alignment\n #\n # 'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (master_doc, \"esphome.tex\", \"ESPHome Documentation\", \"Otto Winter\", \"manual\"),\n]\n\nlatex_engine = \"xelatex\"\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [(master_doc, \"esphome\", \"ESPHome Documentation\", [author], 1)]\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (\n master_doc,\n \"esphome\",\n \"ESPHome Documentation\",\n author,\n \"esphome\",\n \"One line description of project.\",\n \"Miscellaneous\",\n ),\n]\nlinkcheck_ignore = [r\"https://github.com/.*\", r\"https://discord.gg/.*\"]\n", "path": "conf.py"}]} | 2,719 | 102 |
gh_patches_debug_32092 | rasdani/github-patches | git_diff | bokeh__bokeh-10229 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Simplify/refactor filters in bokehjs
ref https://github.com/bokeh/bokeh/pull/10084#discussion_r432112063
</issue>
<code>
[start of bokeh/models/filters.py]
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
3 # All rights reserved.
4 #
5 # The full license is in the file LICENSE.txt, distributed with this software.
6 #-----------------------------------------------------------------------------
7
8 #-----------------------------------------------------------------------------
9 # Boilerplate
10 #-----------------------------------------------------------------------------
11 import logging # isort:skip
12 log = logging.getLogger(__name__)
13
14 #-----------------------------------------------------------------------------
15 # Imports
16 #-----------------------------------------------------------------------------
17
18 # Bokeh imports
19 from ..core.properties import AnyRef, Bool, Dict, Either, Int, Seq, String
20 from ..model import Model
21
22 #-----------------------------------------------------------------------------
23 # Globals and constants
24 #-----------------------------------------------------------------------------
25
26 __all__ = (
27 'BooleanFilter',
28 'CustomJSFilter',
29 'Filter',
30 'GroupFilter',
31 'IndexFilter',
32 )
33
34 #-----------------------------------------------------------------------------
35 # General API
36 #-----------------------------------------------------------------------------
37
38 class Filter(Model):
39 ''' A Filter model represents a filtering operation that returns a row-wise subset of
40 data when applied to a ``ColumnDataSource``.
41 '''
42
43 filter = Either(Seq(Int), Seq(Bool), help="""
44 A list that can be either integer indices or booleans representing a row-wise subset of data.
45 """)
46
47 def __init__(self, *args, **kw):
48 if len(args) == 1 and "filter" not in kw:
49 kw["filter"] = args[0]
50
51 super().__init__(**kw)
52
53 class IndexFilter(Filter):
54 ''' An ``IndexFilter`` filters data by returning the subset of data at a given set of indices.
55 '''
56
57 indices = Seq(Int, help="""
58 A list of integer indices representing the subset of data to select.
59 """)
60
61 def __init__(self, *args, **kw):
62 if len(args) == 1 and "indices" not in kw:
63 kw["indices"] = args[0]
64
65 super().__init__(**kw)
66
67 class BooleanFilter(Filter):
68 ''' A ``BooleanFilter`` filters data by returning the subset of data corresponding to indices
69 where the values of the booleans array is True.
70 '''
71
72 booleans = Seq(Bool, help="""
73 A list of booleans indicating which rows of data to select.
74 """)
75
76 def __init__(self, *args, **kw):
77 if len(args) == 1 and "booleans" not in kw:
78 kw["booleans"] = args[0]
79
80 super().__init__(**kw)
81
82 class GroupFilter(Filter):
83 ''' A ``GroupFilter`` represents the rows of a ``ColumnDataSource`` where the values of the categorical
84 column column_name match the group variable.
85 '''
86
87 column_name = String(help="""
88 The name of the column to perform the group filtering operation on.
89 """)
90
91 group = String(help="""
92 The value of the column indicating the rows of data to keep.
93 """)
94
95 def __init__(self, *args, **kw):
96 if len(args) == 2 and "column_name" not in kw and "group" not in kw:
97 kw["column_name"] = args[0]
98 kw["group"] = args[1]
99
100 super().__init__(**kw)
101
102 class CustomJSFilter(Filter):
103 ''' Filter data sources with a custom defined JavaScript function.
104
105 .. warning::
106 The explicit purpose of this Bokeh Model is to embed *raw JavaScript
107 code* for a browser to execute. If any part of the code is derived
108 from untrusted user inputs, then you must take appropriate care to
109 sanitize the user input prior to passing to Bokeh.
110
111 '''
112
113 args = Dict(String, AnyRef, help="""
114 A mapping of names to Python objects. In particular those can be bokeh's models.
115 These objects are made available to the callback's code snippet as the values of
116 named parameters to the callback.
117 """)
118
119 code = String(default="", help="""
120 A snippet of JavaScript code to filter data contained in a columnar data source.
121 The code is made into the body of a function, and all of of the named objects in
122 ``args`` are available as parameters that the code can use. The variable
123 ``source`` will contain the data source that is associated with the ``CDSView`` this
124 filter is added to.
125
126 The code should either return the indices of the subset or an array of booleans
127 to use to subset data source rows.
128
129 Example:
130
131 .. code-block
132
133 code = '''
134 const indices = []
135 for (var i = 0; i <= source.data['some_column'].length; i++) {
136 if (source.data['some_column'][i] == 'some_value') {
137 indices.push(i)
138 }
139 }
140 return indices
141 '''
142
143 """)
144
145 #-----------------------------------------------------------------------------
146 # Dev API
147 #-----------------------------------------------------------------------------
148
149 #-----------------------------------------------------------------------------
150 # Private API
151 #-----------------------------------------------------------------------------
152
153 #-----------------------------------------------------------------------------
154 # Code
155 #-----------------------------------------------------------------------------
156
[end of bokeh/models/filters.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/bokeh/models/filters.py b/bokeh/models/filters.py
--- a/bokeh/models/filters.py
+++ b/bokeh/models/filters.py
@@ -16,7 +16,8 @@
#-----------------------------------------------------------------------------
# Bokeh imports
-from ..core.properties import AnyRef, Bool, Dict, Either, Int, Seq, String
+from ..core.has_props import abstract
+from ..core.properties import AnyRef, Bool, Dict, Int, Seq, String
from ..model import Model
#-----------------------------------------------------------------------------
@@ -35,21 +36,12 @@
# General API
#-----------------------------------------------------------------------------
+@abstract
class Filter(Model):
''' A Filter model represents a filtering operation that returns a row-wise subset of
data when applied to a ``ColumnDataSource``.
'''
- filter = Either(Seq(Int), Seq(Bool), help="""
- A list that can be either integer indices or booleans representing a row-wise subset of data.
- """)
-
- def __init__(self, *args, **kw):
- if len(args) == 1 and "filter" not in kw:
- kw["filter"] = args[0]
-
- super().__init__(**kw)
-
class IndexFilter(Filter):
''' An ``IndexFilter`` filters data by returning the subset of data at a given set of indices.
'''
@@ -132,7 +124,7 @@
code = '''
const indices = []
- for (var i = 0; i <= source.data['some_column'].length; i++) {
+ for (let i = 0; i <= source.data['some_column'].length; i++) {
if (source.data['some_column'][i] == 'some_value') {
indices.push(i)
}
| {"golden_diff": "diff --git a/bokeh/models/filters.py b/bokeh/models/filters.py\n--- a/bokeh/models/filters.py\n+++ b/bokeh/models/filters.py\n@@ -16,7 +16,8 @@\n #-----------------------------------------------------------------------------\n \n # Bokeh imports\n-from ..core.properties import AnyRef, Bool, Dict, Either, Int, Seq, String\n+from ..core.has_props import abstract\n+from ..core.properties import AnyRef, Bool, Dict, Int, Seq, String\n from ..model import Model\n \n #-----------------------------------------------------------------------------\n@@ -35,21 +36,12 @@\n # General API\n #-----------------------------------------------------------------------------\n \n+@abstract\n class Filter(Model):\n ''' A Filter model represents a filtering operation that returns a row-wise subset of\n data when applied to a ``ColumnDataSource``.\n '''\n \n- filter = Either(Seq(Int), Seq(Bool), help=\"\"\"\n- A list that can be either integer indices or booleans representing a row-wise subset of data.\n- \"\"\")\n-\n- def __init__(self, *args, **kw):\n- if len(args) == 1 and \"filter\" not in kw:\n- kw[\"filter\"] = args[0]\n-\n- super().__init__(**kw)\n-\n class IndexFilter(Filter):\n ''' An ``IndexFilter`` filters data by returning the subset of data at a given set of indices.\n '''\n@@ -132,7 +124,7 @@\n \n code = '''\n const indices = []\n- for (var i = 0; i <= source.data['some_column'].length; i++) {\n+ for (let i = 0; i <= source.data['some_column'].length; i++) {\n if (source.data['some_column'][i] == 'some_value') {\n indices.push(i)\n }\n", "issue": "Simplify/refactor filters in bokehjs\nref https://github.com/bokeh/bokeh/pull/10084#discussion_r432112063\n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.\n# All rights reserved.\n#\n# The full license is in the file LICENSE.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\n#-----------------------------------------------------------------------------\n# Boilerplate\n#-----------------------------------------------------------------------------\nimport logging # isort:skip\nlog = logging.getLogger(__name__)\n\n#-----------------------------------------------------------------------------\n# Imports\n#-----------------------------------------------------------------------------\n\n# Bokeh imports\nfrom ..core.properties import AnyRef, Bool, Dict, Either, Int, Seq, String\nfrom ..model import Model\n\n#-----------------------------------------------------------------------------\n# Globals and constants\n#-----------------------------------------------------------------------------\n\n__all__ = (\n 'BooleanFilter',\n 'CustomJSFilter',\n 'Filter',\n 'GroupFilter',\n 'IndexFilter',\n)\n\n#-----------------------------------------------------------------------------\n# General API\n#-----------------------------------------------------------------------------\n\nclass Filter(Model):\n ''' A Filter model represents a filtering operation that returns a row-wise subset of\n data when applied to a ``ColumnDataSource``.\n '''\n\n filter = Either(Seq(Int), Seq(Bool), help=\"\"\"\n A list that can be either integer indices or booleans representing a row-wise subset of data.\n \"\"\")\n\n def __init__(self, *args, **kw):\n if len(args) == 1 and \"filter\" not in kw:\n kw[\"filter\"] = args[0]\n\n super().__init__(**kw)\n\nclass IndexFilter(Filter):\n ''' An ``IndexFilter`` filters data by returning the subset of data at a given set of indices.\n '''\n\n indices = Seq(Int, help=\"\"\"\n A list of integer indices representing the subset of data to select.\n \"\"\")\n\n def __init__(self, *args, **kw):\n if len(args) == 1 and \"indices\" not in kw:\n kw[\"indices\"] = args[0]\n\n super().__init__(**kw)\n\nclass BooleanFilter(Filter):\n ''' A ``BooleanFilter`` filters data by returning the subset of data corresponding to indices\n where the values of the booleans array is True.\n '''\n\n booleans = Seq(Bool, help=\"\"\"\n A list of booleans indicating which rows of data to select.\n \"\"\")\n\n def __init__(self, *args, **kw):\n if len(args) == 1 and \"booleans\" not in kw:\n kw[\"booleans\"] = args[0]\n\n super().__init__(**kw)\n\nclass GroupFilter(Filter):\n ''' A ``GroupFilter`` represents the rows of a ``ColumnDataSource`` where the values of the categorical\n column column_name match the group variable.\n '''\n\n column_name = String(help=\"\"\"\n The name of the column to perform the group filtering operation on.\n \"\"\")\n\n group = String(help=\"\"\"\n The value of the column indicating the rows of data to keep.\n \"\"\")\n\n def __init__(self, *args, **kw):\n if len(args) == 2 and \"column_name\" not in kw and \"group\" not in kw:\n kw[\"column_name\"] = args[0]\n kw[\"group\"] = args[1]\n\n super().__init__(**kw)\n\nclass CustomJSFilter(Filter):\n ''' Filter data sources with a custom defined JavaScript function.\n\n .. warning::\n The explicit purpose of this Bokeh Model is to embed *raw JavaScript\n code* for a browser to execute. If any part of the code is derived\n from untrusted user inputs, then you must take appropriate care to\n sanitize the user input prior to passing to Bokeh.\n\n '''\n\n args = Dict(String, AnyRef, help=\"\"\"\n A mapping of names to Python objects. In particular those can be bokeh's models.\n These objects are made available to the callback's code snippet as the values of\n named parameters to the callback.\n \"\"\")\n\n code = String(default=\"\", help=\"\"\"\n A snippet of JavaScript code to filter data contained in a columnar data source.\n The code is made into the body of a function, and all of of the named objects in\n ``args`` are available as parameters that the code can use. The variable\n ``source`` will contain the data source that is associated with the ``CDSView`` this\n filter is added to.\n\n The code should either return the indices of the subset or an array of booleans\n to use to subset data source rows.\n\n Example:\n\n .. code-block\n\n code = '''\n const indices = []\n for (var i = 0; i <= source.data['some_column'].length; i++) {\n if (source.data['some_column'][i] == 'some_value') {\n indices.push(i)\n }\n }\n return indices\n '''\n\n \"\"\")\n\n#-----------------------------------------------------------------------------\n# Dev API\n#-----------------------------------------------------------------------------\n\n#-----------------------------------------------------------------------------\n# Private API\n#-----------------------------------------------------------------------------\n\n#-----------------------------------------------------------------------------\n# Code\n#-----------------------------------------------------------------------------\n", "path": "bokeh/models/filters.py"}]} | 2,000 | 396 |
gh_patches_debug_15868 | rasdani/github-patches | git_diff | pyinstaller__pyinstaller-2479 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
PyInstaller should not check for modules specific for other than the current platform
**Python extensions for Windows**
We should not be checking for these modules in os x and linux.
</issue>
<code>
[start of PyInstaller/hooks/pre_safe_import_module/hook-win32com.py]
1 #-----------------------------------------------------------------------------
2 # Copyright (c) 2005-2017, PyInstaller Development Team.
3 #
4 # Distributed under the terms of the GNU General Public License with exception
5 # for distributing bootloader.
6 #
7 # The full license is in the file COPYING.txt, distributed with this software.
8 #-----------------------------------------------------------------------------
9
10 """
11 PyWin32 package 'win32com' extends it's __path__ attribute with win32comext
12 directory and thus PyInstaller is not able to find modules in it. For example
13 module 'win32com.shell' is in reality 'win32comext.shell'.
14
15 >>> win32com.__path__
16 ['win32com', 'C:\\Python27\\Lib\\site-packages\\win32comext']
17
18 """
19
20
21 import os
22
23 from PyInstaller.utils.hooks import logger, get_module_file_attribute
24
25
26 def pre_safe_import_module(api):
27 win32com_dir = os.path.dirname(get_module_file_attribute('win32com'))
28 comext_dir = os.path.join(os.path.dirname(win32com_dir), 'win32comext')
29 logger.debug('win32com: extending __path__ with dir %r' % comext_dir)
30 # Append the __path__ where PyInstaller will look for 'win32com' modules.'
31 api.append_package_path(comext_dir)
32
[end of PyInstaller/hooks/pre_safe_import_module/hook-win32com.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/PyInstaller/hooks/pre_safe_import_module/hook-win32com.py b/PyInstaller/hooks/pre_safe_import_module/hook-win32com.py
--- a/PyInstaller/hooks/pre_safe_import_module/hook-win32com.py
+++ b/PyInstaller/hooks/pre_safe_import_module/hook-win32com.py
@@ -21,11 +21,13 @@
import os
from PyInstaller.utils.hooks import logger, get_module_file_attribute
+from PyInstaller.compat import is_win
def pre_safe_import_module(api):
- win32com_dir = os.path.dirname(get_module_file_attribute('win32com'))
- comext_dir = os.path.join(os.path.dirname(win32com_dir), 'win32comext')
- logger.debug('win32com: extending __path__ with dir %r' % comext_dir)
- # Append the __path__ where PyInstaller will look for 'win32com' modules.'
- api.append_package_path(comext_dir)
+ if is_win:
+ win32com_dir = os.path.dirname(get_module_file_attribute('win32com'))
+ comext_dir = os.path.join(os.path.dirname(win32com_dir), 'win32comext')
+ logger.debug('win32com: extending __path__ with dir %r' % comext_dir)
+ # Append the __path__ where PyInstaller will look for 'win32com' modules.'
+ api.append_package_path(comext_dir)
| {"golden_diff": "diff --git a/PyInstaller/hooks/pre_safe_import_module/hook-win32com.py b/PyInstaller/hooks/pre_safe_import_module/hook-win32com.py\n--- a/PyInstaller/hooks/pre_safe_import_module/hook-win32com.py\n+++ b/PyInstaller/hooks/pre_safe_import_module/hook-win32com.py\n@@ -21,11 +21,13 @@\n import os\n \n from PyInstaller.utils.hooks import logger, get_module_file_attribute\n+from PyInstaller.compat import is_win\n \n \n def pre_safe_import_module(api):\n- win32com_dir = os.path.dirname(get_module_file_attribute('win32com'))\n- comext_dir = os.path.join(os.path.dirname(win32com_dir), 'win32comext')\n- logger.debug('win32com: extending __path__ with dir %r' % comext_dir)\n- # Append the __path__ where PyInstaller will look for 'win32com' modules.'\n- api.append_package_path(comext_dir)\n+ if is_win:\n+ win32com_dir = os.path.dirname(get_module_file_attribute('win32com'))\n+ comext_dir = os.path.join(os.path.dirname(win32com_dir), 'win32comext')\n+ logger.debug('win32com: extending __path__ with dir %r' % comext_dir)\n+ # Append the __path__ where PyInstaller will look for 'win32com' modules.'\n+ api.append_package_path(comext_dir)\n", "issue": "PyInstaller should not check for modules specific for other than the current platform\n**Python extensions for Windows**\r\n\r\nWe should not be checking for these modules in os x and linux. \n", "before_files": [{"content": "#-----------------------------------------------------------------------------\n# Copyright (c) 2005-2017, PyInstaller Development Team.\n#\n# Distributed under the terms of the GNU General Public License with exception\n# for distributing bootloader.\n#\n# The full license is in the file COPYING.txt, distributed with this software.\n#-----------------------------------------------------------------------------\n\n\"\"\"\nPyWin32 package 'win32com' extends it's __path__ attribute with win32comext\ndirectory and thus PyInstaller is not able to find modules in it. For example\nmodule 'win32com.shell' is in reality 'win32comext.shell'.\n\n>>> win32com.__path__\n['win32com', 'C:\\\\Python27\\\\Lib\\\\site-packages\\\\win32comext']\n\n\"\"\"\n\n\nimport os\n\nfrom PyInstaller.utils.hooks import logger, get_module_file_attribute\n\n\ndef pre_safe_import_module(api):\n win32com_dir = os.path.dirname(get_module_file_attribute('win32com'))\n comext_dir = os.path.join(os.path.dirname(win32com_dir), 'win32comext')\n logger.debug('win32com: extending __path__ with dir %r' % comext_dir)\n # Append the __path__ where PyInstaller will look for 'win32com' modules.'\n api.append_package_path(comext_dir)\n", "path": "PyInstaller/hooks/pre_safe_import_module/hook-win32com.py"}]} | 933 | 330 |
gh_patches_debug_1377 | rasdani/github-patches | git_diff | pyro-ppl__pyro-1629 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[FR] Add tutorial on implementing new effects
Users at PROBPROG 2018 requested a tutorial on high-level Pyro architecture including advanced features like poutines. This issue proposes adding a tutorial on implementing a new effect handler.
Whereas #1553 should aim to explain Pyro's architecture in a simplified way, this tutorial should prepare developers to make changes to Pyro.
</issue>
<code>
[start of pyro/poutine/runtime.py]
1 import functools
2
3 from pyro.params.param_store import _MODULE_NAMESPACE_DIVIDER, ParamStoreDict # noqa: F401
4
5 # the global pyro stack
6 _PYRO_STACK = []
7
8 # the global ParamStore
9 _PYRO_PARAM_STORE = ParamStoreDict()
10
11
12 class _DimAllocator(object):
13 """
14 Dimension allocator for internal use by :class:`plate`.
15 There is a single global instance.
16
17 Note that dimensions are indexed from the right, e.g. -1, -2.
18 """
19 def __init__(self):
20 self._stack = [] # in reverse orientation of log_prob.shape
21
22 def allocate(self, name, dim):
23 """
24 Allocate a dimension to an :class:`plate` with given name.
25 Dim should be either None for automatic allocation or a negative
26 integer for manual allocation.
27 """
28 if name in self._stack:
29 raise ValueError('duplicate plate "{}"'.format(name))
30 if dim is None:
31 # Automatically designate the rightmost available dim for allocation.
32 dim = -1
33 while -dim <= len(self._stack) and self._stack[-1 - dim] is not None:
34 dim -= 1
35 elif dim >= 0:
36 raise ValueError('Expected dim < 0 to index from the right, actual {}'.format(dim))
37
38 # Allocate the requested dimension.
39 while dim < -len(self._stack):
40 self._stack.append(None)
41 if self._stack[-1 - dim] is not None:
42 raise ValueError('\n'.join([
43 'at plates "{}" and "{}", collide at dim={}'.format(name, self._stack[-1 - dim], dim),
44 '\nTry moving the dim of one plate to the left, e.g. dim={}'.format(dim - 1)]))
45 self._stack[-1 - dim] = name
46 return dim
47
48 def free(self, name, dim):
49 """
50 Free a dimension.
51 """
52 free_idx = -1 - dim # stack index to free
53 assert self._stack[free_idx] == name
54 self._stack[free_idx] = None
55 while self._stack and self._stack[-1] is None:
56 self._stack.pop()
57
58
59 # Handles placement of plate dimensions
60 _DIM_ALLOCATOR = _DimAllocator()
61
62
63 class _EnumAllocator(object):
64 """
65 Dimension allocator for internal use by :func:`~pyro.poutine.markov`.
66 There is a single global instance.
67
68 Note that dimensions are indexed from the right, e.g. -1, -2.
69 Note that ids are simply nonnegative integers here.
70 """
71 def set_first_available_dim(self, first_available_dim):
72 """
73 Set the first available dim, which should be to the left of all
74 :class:`plate` dimensions, e.g. ``-1 - max_plate_nesting``. This should
75 be called once per program. In SVI this should be called only once per
76 (guide,model) pair.
77 """
78 assert first_available_dim < 0, first_available_dim
79 self.next_available_dim = first_available_dim
80 self.next_available_id = 0
81 self.dim_to_id = {} # only the global ids
82
83 def allocate(self, scope_dims=None):
84 """
85 Allocate a new recyclable dim and a unique id.
86
87 If ``scope_dims`` is None, this allocates a global enumeration dim
88 that will never be recycled. If ``scope_dims`` is specified, this
89 allocates a local enumeration dim that can be reused by at any other
90 local site whose scope excludes this site.
91
92 :param set scope_dims: An optional set of (negative integer)
93 local enumeration dims to avoid when allocating this dim.
94 :return: A pair ``(dim, id)``, where ``dim`` is a negative integer
95 and ``id`` is a nonnegative integer.
96 :rtype: tuple
97 """
98 id_ = self.next_available_id
99 self.next_available_id += 1
100
101 dim = self.next_available_dim
102 if dim == -float('inf'):
103 raise ValueError("max_plate_nesting must be set to a finite value for parallel enumeration")
104 if scope_dims is None:
105 # allocate a new global dimension
106 self.next_available_dim -= 1
107 self.dim_to_id[dim] = id_
108 else:
109 # allocate a new local dimension
110 while dim in scope_dims:
111 dim -= 1
112
113 return dim, id_
114
115
116 # Handles placement of enumeration dimensions
117 _ENUM_ALLOCATOR = _EnumAllocator()
118
119
120 class NonlocalExit(Exception):
121 """
122 Exception for exiting nonlocally from poutine execution.
123
124 Used by poutine.EscapeMessenger to return site information.
125 """
126 def __init__(self, site, *args, **kwargs):
127 """
128 :param site: message at a pyro site
129
130 constructor. Just stores the input site.
131 """
132 super(NonlocalExit, self).__init__(*args, **kwargs)
133 self.site = site
134
135 def reset_stack(self):
136 """
137 Reset the state of the frames remaining in the stack.
138 Necessary for multiple re-executions in poutine.queue.
139 """
140 for frame in reversed(_PYRO_STACK):
141 frame._reset()
142 if type(frame).__name__ == "BlockMessenger" and frame.hide_fn(self.site):
143 break
144
145
146 def default_process_message(msg):
147 """
148 Default method for processing messages in inference.
149 :param msg: a message to be processed
150 :returns: None
151 """
152 if msg["done"] or msg["is_observed"]:
153 msg["done"] = True
154 return msg
155
156 msg["value"] = msg["fn"](*msg["args"], **msg["kwargs"])
157
158 # after fn has been called, update msg to prevent it from being called again.
159 msg["done"] = True
160
161
162 def apply_stack(initial_msg):
163 """
164 Execute the effect stack at a single site according to the following scheme:
165
166 1. For each ``Messenger`` in the stack from bottom to top,
167 execute ``Messenger._process_message`` with the message;
168 if the message field "stop" is True, stop;
169 otherwise, continue
170 2. Apply default behavior (``default_process_message``) to finish remaining site execution
171 3. For each ``Messenger`` in the stack from top to bottom,
172 execute ``_postprocess_message`` to update the message and internal messenger state with the site results
173 4. If the message field "continuation" is not ``None``, call it with the message
174
175 :param dict initial_msg: the starting version of the trace site
176 :returns: ``None``
177 """
178 stack = _PYRO_STACK
179 # TODO check at runtime if stack is valid
180
181 # msg is used to pass information up and down the stack
182 msg = initial_msg
183
184 pointer = 0
185 # go until time to stop?
186 for frame in reversed(stack):
187
188 pointer = pointer + 1
189
190 frame._process_message(msg)
191
192 if msg["stop"]:
193 break
194
195 default_process_message(msg)
196
197 for frame in stack[-pointer:]: # reversed(stack[0:pointer])
198 frame._postprocess_message(msg)
199
200 cont = msg["continuation"]
201 if cont is not None:
202 cont(msg)
203
204 return None
205
206
207 def am_i_wrapped():
208 """
209 Checks whether the current computation is wrapped in a poutine.
210 :returns: bool
211 """
212 return len(_PYRO_STACK) > 0
213
214
215 def effectful(fn=None, type=None):
216 """
217 :param fn: function or callable that performs an effectful computation
218 :param str type: the type label of the operation, e.g. `"sample"`
219
220 Wrapper for calling :func:~`pyro.poutine.runtime.apply_stack` to apply any active effects.
221 """
222 if fn is None:
223 return functools.partial(effectful, type=type)
224
225 if getattr(fn, "_is_effectful", None):
226 return fn
227
228 assert type is not None, "must provide a type label for operation {}".format(fn)
229 assert type != "message", "cannot use 'message' as keyword"
230
231 def _fn(*args, **kwargs):
232
233 name = kwargs.pop("name", None)
234 infer = kwargs.pop("infer", {})
235
236 value = kwargs.pop("obs", None)
237 is_observed = value is not None
238
239 if not am_i_wrapped():
240 return fn(*args, **kwargs)
241 else:
242 msg = {
243 "type": type,
244 "name": name,
245 "fn": fn,
246 "is_observed": is_observed,
247 "args": args,
248 "kwargs": kwargs,
249 "value": value,
250 "scale": 1.0,
251 "mask": None,
252 "cond_indep_stack": (),
253 "done": False,
254 "stop": False,
255 "continuation": None,
256 "infer": infer,
257 }
258 # apply the stack and return its return value
259 apply_stack(msg)
260 return msg["value"]
261 _fn._is_effectful = True
262 return _fn
263
[end of pyro/poutine/runtime.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pyro/poutine/runtime.py b/pyro/poutine/runtime.py
--- a/pyro/poutine/runtime.py
+++ b/pyro/poutine/runtime.py
@@ -149,7 +149,7 @@
:param msg: a message to be processed
:returns: None
"""
- if msg["done"] or msg["is_observed"]:
+ if msg["done"] or msg["is_observed"] or msg["value"] is not None:
msg["done"] = True
return msg
| {"golden_diff": "diff --git a/pyro/poutine/runtime.py b/pyro/poutine/runtime.py\n--- a/pyro/poutine/runtime.py\n+++ b/pyro/poutine/runtime.py\n@@ -149,7 +149,7 @@\n :param msg: a message to be processed\n :returns: None\n \"\"\"\n- if msg[\"done\"] or msg[\"is_observed\"]:\n+ if msg[\"done\"] or msg[\"is_observed\"] or msg[\"value\"] is not None:\n msg[\"done\"] = True\n return msg\n", "issue": "[FR] Add tutorial on implementing new effects\nUsers at PROBPROG 2018 requested a tutorial on high-level Pyro architecture including advanced features like poutines. This issue proposes adding a tutorial on implementing a new effect handler.\r\n\r\nWhereas #1553 should aim to explain Pyro's architecture in a simplified way, this tutorial should prepare developers to make changes to Pyro.\n", "before_files": [{"content": "import functools\n\nfrom pyro.params.param_store import _MODULE_NAMESPACE_DIVIDER, ParamStoreDict # noqa: F401\n\n# the global pyro stack\n_PYRO_STACK = []\n\n# the global ParamStore\n_PYRO_PARAM_STORE = ParamStoreDict()\n\n\nclass _DimAllocator(object):\n \"\"\"\n Dimension allocator for internal use by :class:`plate`.\n There is a single global instance.\n\n Note that dimensions are indexed from the right, e.g. -1, -2.\n \"\"\"\n def __init__(self):\n self._stack = [] # in reverse orientation of log_prob.shape\n\n def allocate(self, name, dim):\n \"\"\"\n Allocate a dimension to an :class:`plate` with given name.\n Dim should be either None for automatic allocation or a negative\n integer for manual allocation.\n \"\"\"\n if name in self._stack:\n raise ValueError('duplicate plate \"{}\"'.format(name))\n if dim is None:\n # Automatically designate the rightmost available dim for allocation.\n dim = -1\n while -dim <= len(self._stack) and self._stack[-1 - dim] is not None:\n dim -= 1\n elif dim >= 0:\n raise ValueError('Expected dim < 0 to index from the right, actual {}'.format(dim))\n\n # Allocate the requested dimension.\n while dim < -len(self._stack):\n self._stack.append(None)\n if self._stack[-1 - dim] is not None:\n raise ValueError('\\n'.join([\n 'at plates \"{}\" and \"{}\", collide at dim={}'.format(name, self._stack[-1 - dim], dim),\n '\\nTry moving the dim of one plate to the left, e.g. dim={}'.format(dim - 1)]))\n self._stack[-1 - dim] = name\n return dim\n\n def free(self, name, dim):\n \"\"\"\n Free a dimension.\n \"\"\"\n free_idx = -1 - dim # stack index to free\n assert self._stack[free_idx] == name\n self._stack[free_idx] = None\n while self._stack and self._stack[-1] is None:\n self._stack.pop()\n\n\n# Handles placement of plate dimensions\n_DIM_ALLOCATOR = _DimAllocator()\n\n\nclass _EnumAllocator(object):\n \"\"\"\n Dimension allocator for internal use by :func:`~pyro.poutine.markov`.\n There is a single global instance.\n\n Note that dimensions are indexed from the right, e.g. -1, -2.\n Note that ids are simply nonnegative integers here.\n \"\"\"\n def set_first_available_dim(self, first_available_dim):\n \"\"\"\n Set the first available dim, which should be to the left of all\n :class:`plate` dimensions, e.g. ``-1 - max_plate_nesting``. This should\n be called once per program. In SVI this should be called only once per\n (guide,model) pair.\n \"\"\"\n assert first_available_dim < 0, first_available_dim\n self.next_available_dim = first_available_dim\n self.next_available_id = 0\n self.dim_to_id = {} # only the global ids\n\n def allocate(self, scope_dims=None):\n \"\"\"\n Allocate a new recyclable dim and a unique id.\n\n If ``scope_dims`` is None, this allocates a global enumeration dim\n that will never be recycled. If ``scope_dims`` is specified, this\n allocates a local enumeration dim that can be reused by at any other\n local site whose scope excludes this site.\n\n :param set scope_dims: An optional set of (negative integer)\n local enumeration dims to avoid when allocating this dim.\n :return: A pair ``(dim, id)``, where ``dim`` is a negative integer\n and ``id`` is a nonnegative integer.\n :rtype: tuple\n \"\"\"\n id_ = self.next_available_id\n self.next_available_id += 1\n\n dim = self.next_available_dim\n if dim == -float('inf'):\n raise ValueError(\"max_plate_nesting must be set to a finite value for parallel enumeration\")\n if scope_dims is None:\n # allocate a new global dimension\n self.next_available_dim -= 1\n self.dim_to_id[dim] = id_\n else:\n # allocate a new local dimension\n while dim in scope_dims:\n dim -= 1\n\n return dim, id_\n\n\n# Handles placement of enumeration dimensions\n_ENUM_ALLOCATOR = _EnumAllocator()\n\n\nclass NonlocalExit(Exception):\n \"\"\"\n Exception for exiting nonlocally from poutine execution.\n\n Used by poutine.EscapeMessenger to return site information.\n \"\"\"\n def __init__(self, site, *args, **kwargs):\n \"\"\"\n :param site: message at a pyro site\n\n constructor. Just stores the input site.\n \"\"\"\n super(NonlocalExit, self).__init__(*args, **kwargs)\n self.site = site\n\n def reset_stack(self):\n \"\"\"\n Reset the state of the frames remaining in the stack.\n Necessary for multiple re-executions in poutine.queue.\n \"\"\"\n for frame in reversed(_PYRO_STACK):\n frame._reset()\n if type(frame).__name__ == \"BlockMessenger\" and frame.hide_fn(self.site):\n break\n\n\ndef default_process_message(msg):\n \"\"\"\n Default method for processing messages in inference.\n :param msg: a message to be processed\n :returns: None\n \"\"\"\n if msg[\"done\"] or msg[\"is_observed\"]:\n msg[\"done\"] = True\n return msg\n\n msg[\"value\"] = msg[\"fn\"](*msg[\"args\"], **msg[\"kwargs\"])\n\n # after fn has been called, update msg to prevent it from being called again.\n msg[\"done\"] = True\n\n\ndef apply_stack(initial_msg):\n \"\"\"\n Execute the effect stack at a single site according to the following scheme:\n\n 1. For each ``Messenger`` in the stack from bottom to top,\n execute ``Messenger._process_message`` with the message;\n if the message field \"stop\" is True, stop;\n otherwise, continue\n 2. Apply default behavior (``default_process_message``) to finish remaining site execution\n 3. For each ``Messenger`` in the stack from top to bottom,\n execute ``_postprocess_message`` to update the message and internal messenger state with the site results\n 4. If the message field \"continuation\" is not ``None``, call it with the message\n\n :param dict initial_msg: the starting version of the trace site\n :returns: ``None``\n \"\"\"\n stack = _PYRO_STACK\n # TODO check at runtime if stack is valid\n\n # msg is used to pass information up and down the stack\n msg = initial_msg\n\n pointer = 0\n # go until time to stop?\n for frame in reversed(stack):\n\n pointer = pointer + 1\n\n frame._process_message(msg)\n\n if msg[\"stop\"]:\n break\n\n default_process_message(msg)\n\n for frame in stack[-pointer:]: # reversed(stack[0:pointer])\n frame._postprocess_message(msg)\n\n cont = msg[\"continuation\"]\n if cont is not None:\n cont(msg)\n\n return None\n\n\ndef am_i_wrapped():\n \"\"\"\n Checks whether the current computation is wrapped in a poutine.\n :returns: bool\n \"\"\"\n return len(_PYRO_STACK) > 0\n\n\ndef effectful(fn=None, type=None):\n \"\"\"\n :param fn: function or callable that performs an effectful computation\n :param str type: the type label of the operation, e.g. `\"sample\"`\n\n Wrapper for calling :func:~`pyro.poutine.runtime.apply_stack` to apply any active effects.\n \"\"\"\n if fn is None:\n return functools.partial(effectful, type=type)\n\n if getattr(fn, \"_is_effectful\", None):\n return fn\n\n assert type is not None, \"must provide a type label for operation {}\".format(fn)\n assert type != \"message\", \"cannot use 'message' as keyword\"\n\n def _fn(*args, **kwargs):\n\n name = kwargs.pop(\"name\", None)\n infer = kwargs.pop(\"infer\", {})\n\n value = kwargs.pop(\"obs\", None)\n is_observed = value is not None\n\n if not am_i_wrapped():\n return fn(*args, **kwargs)\n else:\n msg = {\n \"type\": type,\n \"name\": name,\n \"fn\": fn,\n \"is_observed\": is_observed,\n \"args\": args,\n \"kwargs\": kwargs,\n \"value\": value,\n \"scale\": 1.0,\n \"mask\": None,\n \"cond_indep_stack\": (),\n \"done\": False,\n \"stop\": False,\n \"continuation\": None,\n \"infer\": infer,\n }\n # apply the stack and return its return value\n apply_stack(msg)\n return msg[\"value\"]\n _fn._is_effectful = True\n return _fn\n", "path": "pyro/poutine/runtime.py"}]} | 3,287 | 118 |
gh_patches_debug_6606 | rasdani/github-patches | git_diff | ansible__ansible-modules-extras-2669 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
modprobe: Call 'modprobe -r' instasd of 'rmmod' for absent?
##### ISSUE TYPE
- Feature Idea
##### COMPONENT NAME
modprobe
##### ANSIBLE VERSION
```
ansible 2.0.1.0
config file =
configured module search path = Default w/o overrides
```
##### CONFIGURATION
N/A
##### OS / ENVIRONMENT
N/A (linux target)
##### SUMMARY
Current implementation of modprobe module uses `rmmod` command to remove kernel module.
https://github.com/ansible/ansible-modules-extras/blob/stable-2.1/system/modprobe.py#L114
Why don't we use `modprobe -r` instead of `rmmod` here?
`modprobe -r` would be better because;
1. It will also unload unused modules
2. Straight forward from module name
##### STEPS TO REPRODUCE
I was trying to unload sb_edac module from my server (since it conflict with some hardware monitoring of server), the module depends on edac_core and edac_core was loaded only for sb_edac.
Before applying playbook, on the target server.
```
server# lsmod | grep edac
sb_edac 28672 0
edac_core 53248 1 sb_edac
```
playbook (snippet)
```
- name: unload edac modules
modprobe:
name: sb_edac
state: absent
```
##### EXPECTED RESULTS
edac_core module unloaded, since it no longer be used.
##### ACTUAL RESULTS
After applying playbook, on the target server.
```
server# lsmod | grep edac
edac_core 53248 0
```
</issue>
<code>
[start of system/modprobe.py]
1 #!/usr/bin/python
2 #coding: utf-8 -*-
3
4 # (c) 2013, David Stygstra <[email protected]>
5 #
6 # This file is part of Ansible
7 #
8 # This module is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This software is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the GNU General Public License
19 # along with this software. If not, see <http://www.gnu.org/licenses/>.
20
21
22 DOCUMENTATION = '''
23 ---
24 module: modprobe
25 short_description: Add or remove kernel modules
26 requirements: []
27 version_added: 1.4
28 author:
29 - "David Stygstra (@stygstra)"
30 - "Julien Dauphant"
31 - "Matt Jeffery"
32 description:
33 - Add or remove kernel modules.
34 options:
35 name:
36 required: true
37 description:
38 - Name of kernel module to manage.
39 state:
40 required: false
41 default: "present"
42 choices: [ present, absent ]
43 description:
44 - Whether the module should be present or absent.
45 params:
46 required: false
47 default: ""
48 version_added: "1.6"
49 description:
50 - Modules parameters.
51 '''
52
53 EXAMPLES = '''
54 # Add the 802.1q module
55 - modprobe: name=8021q state=present
56 # Add the dummy module
57 - modprobe: name=dummy state=present params="numdummies=2"
58 '''
59
60 from ansible.module_utils.basic import *
61 from ansible.module_utils.pycompat24 import get_exception
62 import shlex
63
64
65 def main():
66 module = AnsibleModule(
67 argument_spec={
68 'name': {'required': True},
69 'state': {'default': 'present', 'choices': ['present', 'absent']},
70 'params': {'default': ''},
71 },
72 supports_check_mode=True,
73 )
74 args = {
75 'changed': False,
76 'failed': False,
77 'name': module.params['name'],
78 'state': module.params['state'],
79 'params': module.params['params'],
80 }
81
82 # Check if module is present
83 try:
84 modules = open('/proc/modules')
85 present = False
86 module_name = args['name'].replace('-', '_') + ' '
87 for line in modules:
88 if line.startswith(module_name):
89 present = True
90 break
91 modules.close()
92 except IOError:
93 e = get_exception()
94 module.fail_json(msg=str(e), **args)
95
96 # Check only; don't modify
97 if module.check_mode:
98 if args['state'] == 'present' and not present:
99 changed = True
100 elif args['state'] == 'absent' and present:
101 changed = True
102 else:
103 changed = False
104 module.exit_json(changed=changed)
105
106 # Add/remove module as needed
107 if args['state'] == 'present':
108 if not present:
109 command = [module.get_bin_path('modprobe', True), args['name']]
110 command.extend(shlex.split(args['params']))
111 rc, _, err = module.run_command(command)
112 if rc != 0:
113 module.fail_json(msg=err, **args)
114 args['changed'] = True
115 elif args['state'] == 'absent':
116 if present:
117 rc, _, err = module.run_command([module.get_bin_path('rmmod', True), args['name']])
118 if rc != 0:
119 module.fail_json(msg=err, **args)
120 args['changed'] = True
121
122 module.exit_json(**args)
123
124 main()
125
[end of system/modprobe.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/system/modprobe.py b/system/modprobe.py
--- a/system/modprobe.py
+++ b/system/modprobe.py
@@ -114,7 +114,7 @@
args['changed'] = True
elif args['state'] == 'absent':
if present:
- rc, _, err = module.run_command([module.get_bin_path('rmmod', True), args['name']])
+ rc, _, err = module.run_command([module.get_bin_path('modprobe', True), '-r', args['name']])
if rc != 0:
module.fail_json(msg=err, **args)
args['changed'] = True
| {"golden_diff": "diff --git a/system/modprobe.py b/system/modprobe.py\n--- a/system/modprobe.py\n+++ b/system/modprobe.py\n@@ -114,7 +114,7 @@\n args['changed'] = True\n elif args['state'] == 'absent':\n if present:\n- rc, _, err = module.run_command([module.get_bin_path('rmmod', True), args['name']])\n+ rc, _, err = module.run_command([module.get_bin_path('modprobe', True), '-r', args['name']])\n if rc != 0:\n module.fail_json(msg=err, **args)\n args['changed'] = True\n", "issue": "modprobe: Call 'modprobe -r' instasd of 'rmmod' for absent?\n##### ISSUE TYPE\n- Feature Idea\n##### COMPONENT NAME\n\nmodprobe\n##### ANSIBLE VERSION\n\n```\nansible 2.0.1.0\n config file =\n configured module search path = Default w/o overrides\n```\n##### CONFIGURATION\n\nN/A\n##### OS / ENVIRONMENT\n\nN/A (linux target)\n##### SUMMARY\n\nCurrent implementation of modprobe module uses `rmmod` command to remove kernel module.\nhttps://github.com/ansible/ansible-modules-extras/blob/stable-2.1/system/modprobe.py#L114\n\nWhy don't we use `modprobe -r` instead of `rmmod` here?\n\n`modprobe -r` would be better because; \n1. It will also unload unused modules\n2. Straight forward from module name\n##### STEPS TO REPRODUCE\n\nI was trying to unload sb_edac module from my server (since it conflict with some hardware monitoring of server), the module depends on edac_core and edac_core was loaded only for sb_edac.\n\nBefore applying playbook, on the target server.\n\n```\nserver# lsmod | grep edac\nsb_edac 28672 0\nedac_core 53248 1 sb_edac\n```\n\nplaybook (snippet)\n\n```\n- name: unload edac modules\n modprobe:\n name: sb_edac\n state: absent\n```\n##### EXPECTED RESULTS\n\nedac_core module unloaded, since it no longer be used.\n##### ACTUAL RESULTS\n\nAfter applying playbook, on the target server.\n\n```\nserver# lsmod | grep edac\nedac_core 53248 0\n```\n\n", "before_files": [{"content": "#!/usr/bin/python\n#coding: utf-8 -*-\n\n# (c) 2013, David Stygstra <[email protected]>\n#\n# This file is part of Ansible\n#\n# This module is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This software is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this software. If not, see <http://www.gnu.org/licenses/>.\n\n\nDOCUMENTATION = '''\n---\nmodule: modprobe\nshort_description: Add or remove kernel modules\nrequirements: []\nversion_added: 1.4\nauthor:\n - \"David Stygstra (@stygstra)\" \n - \"Julien Dauphant\"\n - \"Matt Jeffery\"\ndescription:\n - Add or remove kernel modules.\noptions:\n name:\n required: true\n description:\n - Name of kernel module to manage.\n state:\n required: false\n default: \"present\"\n choices: [ present, absent ]\n description:\n - Whether the module should be present or absent.\n params:\n required: false\n default: \"\"\n version_added: \"1.6\"\n description:\n - Modules parameters.\n'''\n\nEXAMPLES = '''\n# Add the 802.1q module\n- modprobe: name=8021q state=present\n# Add the dummy module\n- modprobe: name=dummy state=present params=\"numdummies=2\"\n'''\n\nfrom ansible.module_utils.basic import *\nfrom ansible.module_utils.pycompat24 import get_exception\nimport shlex\n\n\ndef main():\n module = AnsibleModule(\n argument_spec={\n 'name': {'required': True},\n 'state': {'default': 'present', 'choices': ['present', 'absent']},\n 'params': {'default': ''},\n },\n supports_check_mode=True,\n )\n args = {\n 'changed': False,\n 'failed': False,\n 'name': module.params['name'],\n 'state': module.params['state'],\n 'params': module.params['params'],\n }\n\n # Check if module is present\n try:\n modules = open('/proc/modules')\n present = False\n module_name = args['name'].replace('-', '_') + ' '\n for line in modules:\n if line.startswith(module_name):\n present = True\n break\n modules.close()\n except IOError:\n e = get_exception()\n module.fail_json(msg=str(e), **args)\n\n # Check only; don't modify\n if module.check_mode:\n if args['state'] == 'present' and not present:\n changed = True\n elif args['state'] == 'absent' and present:\n changed = True\n else:\n changed = False\n module.exit_json(changed=changed)\n\n # Add/remove module as needed\n if args['state'] == 'present':\n if not present:\n command = [module.get_bin_path('modprobe', True), args['name']]\n command.extend(shlex.split(args['params']))\n rc, _, err = module.run_command(command)\n if rc != 0:\n module.fail_json(msg=err, **args)\n args['changed'] = True\n elif args['state'] == 'absent':\n if present:\n rc, _, err = module.run_command([module.get_bin_path('rmmod', True), args['name']])\n if rc != 0:\n module.fail_json(msg=err, **args)\n args['changed'] = True\n\n module.exit_json(**args)\n\nmain()\n", "path": "system/modprobe.py"}]} | 2,043 | 144 |
gh_patches_debug_6672 | rasdani/github-patches | git_diff | kivy__kivy-3619 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
sandbox decorator not working on py3
run:
```
python3 kivy/kivy/uix/sandbox.py
```
result:
```
Clock.tick()
File "/usr/lib/python3.4/site-packages/kivy/clock.py", line 483, in tick
self._process_events()
File "/usr/lib/python3.4/site-packages/kivy/clock.py", line 615, in _process_events
event.tick(self._last_tick, remove)
File "/usr/lib/python3.4/site-packages/kivy/clock.py", line 353, in tick
callback = self.get_callback()
File "/usr/lib/python3.4/site-packages/kivy/clock.py", line 320, in get_callback
return callback()
File "/usr/lib/python3.4/site-packages/kivy/weakmethod.py", line 47, in __call__
return getattr(self.proxy, self.method_name)
AttributeError: 'Sandbox' object has no attribute '_f2'
```
</issue>
<code>
[start of kivy/uix/sandbox.py]
1 '''
2 Sandbox
3 =======
4
5 .. versionadded:: 1.8.0
6
7 .. warning::
8
9 This is experimental and subject to change as long as this warning notice
10 is present.
11
12 This is a widget that runs itself and all of its children in a Sandbox. That
13 means if a child raises an Exception, it will be caught. The Sandbox
14 itself runs its own Clock, Cache, etc.
15
16 The SandBox widget is still experimental and required for the Kivy designer.
17 When the user designs their own widget, if they do something wrong (wrong size
18 value, invalid python code), it will be caught correctly without breaking
19 the whole application. Because it has been designed that way, we are still
20 enhancing this widget and the :mod:`kivy.context` module.
21 Don't use it unless you know what you are doing.
22
23 '''
24
25 __all__ = ('Sandbox', )
26
27 from kivy.context import Context
28 from kivy.base import ExceptionManagerBase
29 from kivy.clock import Clock
30 from kivy.uix.widget import Widget
31 from kivy.uix.floatlayout import FloatLayout
32 from kivy.uix.relativelayout import RelativeLayout
33 from kivy.lang import Builder
34
35
36 def sandbox(f):
37 def _f2(self, *args, **kwargs):
38 ret = None
39 with self:
40 ret = f(self, *args, **kwargs)
41 return ret
42 return _f2
43
44
45 class SandboxExceptionManager(ExceptionManagerBase):
46
47 def __init__(self, sandbox):
48 ExceptionManagerBase.__init__(self)
49 self.sandbox = sandbox
50
51 def handle_exception(self, e):
52 if not self.sandbox.on_exception(e):
53 return ExceptionManagerBase.RAISE
54 return ExceptionManagerBase.PASS
55
56
57 class SandboxContent(RelativeLayout):
58 pass
59
60
61 class Sandbox(FloatLayout):
62 '''Sandbox widget, used to trap all the exceptions raised by child
63 widgets.
64 '''
65
66 def __init__(self, **kwargs):
67 self._context = Context(init=True)
68 self._context['ExceptionManager'] = SandboxExceptionManager(self)
69 self._context.sandbox = self
70 self._context.push()
71 self.on_context_created()
72 self._container = None
73 super(Sandbox, self).__init__(**kwargs)
74 self._container = SandboxContent(size=self.size, pos=self.pos)
75 super(Sandbox, self).add_widget(self._container)
76 self._context.pop()
77
78 # force SandboxClock's scheduling
79 Clock.schedule_interval(self._clock_sandbox, 0)
80 Clock.schedule_once(self._clock_sandbox_draw, -1)
81 self.main_clock = object.__getattribute__(Clock, '_obj')
82
83 def __enter__(self):
84 self._context.push()
85
86 def __exit__(self, _type, value, traceback):
87 self._context.pop()
88 if _type is not None:
89 return self.on_exception(value, _traceback=traceback)
90
91 def on_context_created(self):
92 '''Override this method in order to load your kv file or do anything
93 else with the newly created context.
94 '''
95 pass
96
97 def on_exception(self, exception, _traceback=None):
98 '''Override this method in order to catch all the exceptions from
99 children.
100
101 If you return True, it will not reraise the exception.
102 If you return False, the exception will be raised to the parent.
103 '''
104 import traceback
105 traceback.print_tb(_traceback)
106 return True
107
108 on_touch_down = sandbox(Widget.on_touch_down)
109 on_touch_move = sandbox(Widget.on_touch_move)
110 on_touch_up = sandbox(Widget.on_touch_up)
111
112 @sandbox
113 def add_widget(self, *args, **kwargs):
114 self._container.add_widget(*args, **kwargs)
115
116 @sandbox
117 def remove_widget(self, *args, **kwargs):
118 self._container.remove_widget(*args, **kwargs)
119
120 @sandbox
121 def clear_widgets(self, *args, **kwargs):
122 self._container.clear_widgets()
123
124 @sandbox
125 def on_size(self, *args):
126 if self._container:
127 self._container.size = self.size
128
129 @sandbox
130 def on_pos(self, *args):
131 if self._container:
132 self._container.pos = self.pos
133
134 @sandbox
135 def _clock_sandbox(self, dt):
136 #import pdb; pdb.set_trace()
137 Clock.tick()
138 Builder.sync()
139
140 @sandbox
141 def _clock_sandbox_draw(self, dt):
142 Clock.tick_draw()
143 Builder.sync()
144 self.main_clock.schedule_once(self._call_draw, 0)
145
146 def _call_draw(self, dt):
147 self.main_clock.schedule_once(self._clock_sandbox_draw, -1)
148
149 if __name__ == '__main__':
150 from kivy.base import runTouchApp
151 from kivy.uix.button import Button
152
153 class TestButton(Button):
154
155 def on_touch_up(self, touch):
156 #raise Exception('fdfdfdfdfdfdfd')
157 return super(TestButton, self).on_touch_up(touch)
158
159 def on_touch_down(self, touch):
160 #raise Exception('')
161 return super(TestButton, self).on_touch_down(touch)
162
163 s = Sandbox()
164 with s:
165 Builder.load_string('''
166 <TestButton>:
167 canvas:
168 Color:
169 rgb: (.3, .2, 0) if self.state == 'normal' else (.7, .7, 0)
170 Rectangle:
171 pos: self.pos
172 size: self.size
173 Color:
174 rgb: 1, 1, 1
175 Rectangle:
176 size: self.texture_size
177 pos: self.center_x - self.texture_size[0] / 2.,\
178 self.center_y - self.texture_size[1] / 2.
179 texture: self.texture
180
181 # invalid... for testing.
182 #on_touch_up: root.d()
183 #on_touch_down: root.f()
184 on_release: root.args()
185 #on_press: root.args()
186 ''')
187 b = TestButton(text='Hello World')
188 s.add_widget(b)
189
190 # this exception is within the "with" block, but will be ignored by
191 # default because the sandbox on_exception will return True
192 raise Exception('hello')
193
194 runTouchApp(s)
195
[end of kivy/uix/sandbox.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kivy/uix/sandbox.py b/kivy/uix/sandbox.py
--- a/kivy/uix/sandbox.py
+++ b/kivy/uix/sandbox.py
@@ -24,6 +24,7 @@
__all__ = ('Sandbox', )
+from functools import wraps
from kivy.context import Context
from kivy.base import ExceptionManagerBase
from kivy.clock import Clock
@@ -34,6 +35,7 @@
def sandbox(f):
+ @wraps(f)
def _f2(self, *args, **kwargs):
ret = None
with self:
| {"golden_diff": "diff --git a/kivy/uix/sandbox.py b/kivy/uix/sandbox.py\n--- a/kivy/uix/sandbox.py\n+++ b/kivy/uix/sandbox.py\n@@ -24,6 +24,7 @@\n \n __all__ = ('Sandbox', )\n \n+from functools import wraps\n from kivy.context import Context\n from kivy.base import ExceptionManagerBase\n from kivy.clock import Clock\n@@ -34,6 +35,7 @@\n \n \n def sandbox(f):\n+ @wraps(f)\n def _f2(self, *args, **kwargs):\n ret = None\n with self:\n", "issue": "sandbox decorator not working on py3\nrun:\n\n```\npython3 kivy/kivy/uix/sandbox.py\n```\n\nresult: \n\n```\nClock.tick()\nFile \"/usr/lib/python3.4/site-packages/kivy/clock.py\", line 483, in tick\n self._process_events()\nFile \"/usr/lib/python3.4/site-packages/kivy/clock.py\", line 615, in _process_events\n event.tick(self._last_tick, remove)\nFile \"/usr/lib/python3.4/site-packages/kivy/clock.py\", line 353, in tick\n callback = self.get_callback()\nFile \"/usr/lib/python3.4/site-packages/kivy/clock.py\", line 320, in get_callback\n return callback()\nFile \"/usr/lib/python3.4/site-packages/kivy/weakmethod.py\", line 47, in __call__\n return getattr(self.proxy, self.method_name)\nAttributeError: 'Sandbox' object has no attribute '_f2'\n```\n\n", "before_files": [{"content": "'''\nSandbox\n=======\n\n.. versionadded:: 1.8.0\n\n.. warning::\n\n This is experimental and subject to change as long as this warning notice\n is present.\n\nThis is a widget that runs itself and all of its children in a Sandbox. That\nmeans if a child raises an Exception, it will be caught. The Sandbox\nitself runs its own Clock, Cache, etc.\n\nThe SandBox widget is still experimental and required for the Kivy designer.\nWhen the user designs their own widget, if they do something wrong (wrong size\nvalue, invalid python code), it will be caught correctly without breaking\nthe whole application. Because it has been designed that way, we are still\nenhancing this widget and the :mod:`kivy.context` module.\nDon't use it unless you know what you are doing.\n\n'''\n\n__all__ = ('Sandbox', )\n\nfrom kivy.context import Context\nfrom kivy.base import ExceptionManagerBase\nfrom kivy.clock import Clock\nfrom kivy.uix.widget import Widget\nfrom kivy.uix.floatlayout import FloatLayout\nfrom kivy.uix.relativelayout import RelativeLayout\nfrom kivy.lang import Builder\n\n\ndef sandbox(f):\n def _f2(self, *args, **kwargs):\n ret = None\n with self:\n ret = f(self, *args, **kwargs)\n return ret\n return _f2\n\n\nclass SandboxExceptionManager(ExceptionManagerBase):\n\n def __init__(self, sandbox):\n ExceptionManagerBase.__init__(self)\n self.sandbox = sandbox\n\n def handle_exception(self, e):\n if not self.sandbox.on_exception(e):\n return ExceptionManagerBase.RAISE\n return ExceptionManagerBase.PASS\n\n\nclass SandboxContent(RelativeLayout):\n pass\n\n\nclass Sandbox(FloatLayout):\n '''Sandbox widget, used to trap all the exceptions raised by child\n widgets.\n '''\n\n def __init__(self, **kwargs):\n self._context = Context(init=True)\n self._context['ExceptionManager'] = SandboxExceptionManager(self)\n self._context.sandbox = self\n self._context.push()\n self.on_context_created()\n self._container = None\n super(Sandbox, self).__init__(**kwargs)\n self._container = SandboxContent(size=self.size, pos=self.pos)\n super(Sandbox, self).add_widget(self._container)\n self._context.pop()\n\n # force SandboxClock's scheduling\n Clock.schedule_interval(self._clock_sandbox, 0)\n Clock.schedule_once(self._clock_sandbox_draw, -1)\n self.main_clock = object.__getattribute__(Clock, '_obj')\n\n def __enter__(self):\n self._context.push()\n\n def __exit__(self, _type, value, traceback):\n self._context.pop()\n if _type is not None:\n return self.on_exception(value, _traceback=traceback)\n\n def on_context_created(self):\n '''Override this method in order to load your kv file or do anything\n else with the newly created context.\n '''\n pass\n\n def on_exception(self, exception, _traceback=None):\n '''Override this method in order to catch all the exceptions from\n children.\n\n If you return True, it will not reraise the exception.\n If you return False, the exception will be raised to the parent.\n '''\n import traceback\n traceback.print_tb(_traceback)\n return True\n\n on_touch_down = sandbox(Widget.on_touch_down)\n on_touch_move = sandbox(Widget.on_touch_move)\n on_touch_up = sandbox(Widget.on_touch_up)\n\n @sandbox\n def add_widget(self, *args, **kwargs):\n self._container.add_widget(*args, **kwargs)\n\n @sandbox\n def remove_widget(self, *args, **kwargs):\n self._container.remove_widget(*args, **kwargs)\n\n @sandbox\n def clear_widgets(self, *args, **kwargs):\n self._container.clear_widgets()\n\n @sandbox\n def on_size(self, *args):\n if self._container:\n self._container.size = self.size\n\n @sandbox\n def on_pos(self, *args):\n if self._container:\n self._container.pos = self.pos\n\n @sandbox\n def _clock_sandbox(self, dt):\n #import pdb; pdb.set_trace()\n Clock.tick()\n Builder.sync()\n\n @sandbox\n def _clock_sandbox_draw(self, dt):\n Clock.tick_draw()\n Builder.sync()\n self.main_clock.schedule_once(self._call_draw, 0)\n\n def _call_draw(self, dt):\n self.main_clock.schedule_once(self._clock_sandbox_draw, -1)\n\nif __name__ == '__main__':\n from kivy.base import runTouchApp\n from kivy.uix.button import Button\n\n class TestButton(Button):\n\n def on_touch_up(self, touch):\n #raise Exception('fdfdfdfdfdfdfd')\n return super(TestButton, self).on_touch_up(touch)\n\n def on_touch_down(self, touch):\n #raise Exception('')\n return super(TestButton, self).on_touch_down(touch)\n\n s = Sandbox()\n with s:\n Builder.load_string('''\n<TestButton>:\n canvas:\n Color:\n rgb: (.3, .2, 0) if self.state == 'normal' else (.7, .7, 0)\n Rectangle:\n pos: self.pos\n size: self.size\n Color:\n rgb: 1, 1, 1\n Rectangle:\n size: self.texture_size\n pos: self.center_x - self.texture_size[0] / 2.,\\\n self.center_y - self.texture_size[1] / 2.\n texture: self.texture\n\n # invalid... for testing.\n #on_touch_up: root.d()\n #on_touch_down: root.f()\n on_release: root.args()\n #on_press: root.args()\n''')\n b = TestButton(text='Hello World')\n s.add_widget(b)\n\n # this exception is within the \"with\" block, but will be ignored by\n # default because the sandbox on_exception will return True\n raise Exception('hello')\n\n runTouchApp(s)\n", "path": "kivy/uix/sandbox.py"}]} | 2,585 | 138 |
gh_patches_debug_11188 | rasdani/github-patches | git_diff | keras-team__keras-core-339 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
BatchNorm layer appears broken in multi-backend Keras
classic MNIST training:
- [Keras multi-backend with BN on JAX](https://colab.research.google.com/drive/1ZZI6UK9umgCDSIRcPqVUl6YpLmRdwMF3?resourcekey=0-8YS9usjjxUuh0FUl_JW0Ow&usp=sharing). Final accuracy __19%__!!!
- [Keras multi-backend w/o BN on JAX](https://colab.research.google.com/drive/16Yj63zZHmH2oQNBtNYmP5W0KJJ_RnLQi?resourcekey=0-Ut-aMYTyo3i36iJeQB4LRQ&usp=sharing). Final accuracy: 99%
- [Keras multi-backend with BN on TF](https://colab.research.google.com/drive/1ipSnQuGe2w4KGphG6hMeWdzt3H8m36cx?resourcekey=0-ZH-RRNE3rfXqqurX8yE_YA&usp=sharing). Final accuracy: __14%__!!!
- [Keras multi-backend w/o BN on TF](https://colab.research.google.com/drive/1beSnSJDvBwqYXzaGpvdVH9bUgCM2Jtsi?resourcekey=0-jmZOshPvFOUqWOEGq-z2Dg&usp=sharing). Final accuracy: 99%
- [Keras baseline (regular tf.keras on TF, with BN)](https://colab.research.google.com/drive/1dI3RTfVvYdYER09Q6onYIdsh3vkRmEci?resourcekey=0-FgN08JmB4pWDpsL8aePs8A&usp=sharing). Final accuracy: 99%
The common factors triggering the issue seem to be: using the BatchNorm layer, on Keras multi-backend. The choice of backend does not seem to change the behavior.
</issue>
<code>
[start of keras_core/layers/normalization/batch_normalization.py]
1 from keras_core import constraints
2 from keras_core import initializers
3 from keras_core import operations as ops
4 from keras_core import regularizers
5 from keras_core.api_export import keras_core_export
6 from keras_core.layers.input_spec import InputSpec
7 from keras_core.layers.layer import Layer
8
9
10 @keras_core_export("keras_core.layers.BatchNormalization")
11 class BatchNormalization(Layer):
12 """Layer that normalizes its inputs.
13
14 Batch normalization applies a transformation that maintains the mean output
15 close to 0 and the output standard deviation close to 1.
16
17 Importantly, batch normalization works differently during training and
18 during inference.
19
20 **During training** (i.e. when using `fit()` or when calling the layer/model
21 with the argument `training=True`), the layer normalizes its output using
22 the mean and standard deviation of the current batch of inputs. That is to
23 say, for each channel being normalized, the layer returns
24 `gamma * (batch - mean(batch)) / sqrt(var(batch) + epsilon) + beta`, where:
25
26 - `epsilon` is small constant (configurable as part of the constructor
27 arguments)
28 - `gamma` is a learned scaling factor (initialized as 1), which
29 can be disabled by passing `scale=False` to the constructor.
30 - `beta` is a learned offset factor (initialized as 0), which
31 can be disabled by passing `center=False` to the constructor.
32
33 **During inference** (i.e. when using `evaluate()` or `predict()` or when
34 calling the layer/model with the argument `training=False` (which is the
35 default), the layer normalizes its output using a moving average of the
36 mean and standard deviation of the batches it has seen during training. That
37 is to say, it returns
38 `gamma * (batch - self.moving_mean) / sqrt(self.moving_var+epsilon) + beta`.
39
40 `self.moving_mean` and `self.moving_var` are non-trainable variables that
41 are updated each time the layer in called in training mode, as such:
42
43 - `moving_mean = moving_mean * momentum + mean(batch) * (1 - momentum)`
44 - `moving_var = moving_var * momentum + var(batch) * (1 - momentum)`
45
46 As such, the layer will only normalize its inputs during inference
47 *after having been trained on data that has similar statistics as the
48 inference data*.
49
50 Args:
51 axis: Integer, the axis that should be normalized
52 (typically the features axis). For instance, after a `Conv2D` layer
53 with `data_format="channels_first"`, use `axis=1`.
54 momentum: Momentum for the moving average.
55 epsilon: Small float added to variance to avoid dividing by zero.
56 center: If `True`, add offset of `beta` to normalized tensor.
57 If `False`, `beta` is ignored.
58 scale: If `True`, multiply by `gamma`. If `False`, `gamma` is not used.
59 When the next layer is linear this can be disabled
60 since the scaling will be done by the next layer.
61 beta_initializer: Initializer for the beta weight.
62 gamma_initializer: Initializer for the gamma weight.
63 moving_mean_initializer: Initializer for the moving mean.
64 moving_variance_initializer: Initializer for the moving variance.
65 beta_regularizer: Optional regularizer for the beta weight.
66 gamma_regularizer: Optional regularizer for the gamma weight.
67 beta_constraint: Optional constraint for the beta weight.
68 gamma_constraint: Optional constraint for the gamma weight.
69 **kwargs: Base layer keyword arguments (e.g. `name` and `dtype`).
70
71 Call arguments:
72 inputs: Input tensor (of any rank).
73 training: Python boolean indicating whether the layer should behave in
74 training mode or in inference mode.
75 - `training=True`: The layer will normalize its inputs using
76 the mean and variance of the current batch of inputs.
77 - `training=False`: The layer will normalize its inputs using
78 the mean and variance of its moving statistics, learned during
79 training.
80
81 Reference:
82
83 - [Ioffe and Szegedy, 2015](https://arxiv.org/abs/1502.03167).
84
85 **About setting `layer.trainable = False` on a `BatchNormalization` layer:**
86
87 The meaning of setting `layer.trainable = False` is to freeze the layer,
88 i.e. its internal state will not change during training:
89 its trainable weights will not be updated
90 during `fit()` or `train_on_batch()`, and its state updates will not be run.
91
92 Usually, this does not necessarily mean that the layer is run in inference
93 mode (which is normally controlled by the `training` argument that can
94 be passed when calling a layer). "Frozen state" and "inference mode"
95 are two separate concepts.
96
97 However, in the case of the `BatchNormalization` layer, **setting
98 `trainable = False` on the layer means that the layer will be
99 subsequently run in inference mode** (meaning that it will use
100 the moving mean and the moving variance to normalize the current batch,
101 rather than using the mean and variance of the current batch).
102
103 Note that:
104
105 - Setting `trainable` on an model containing other layers will recursively
106 set the `trainable` value of all inner layers.
107 - If the value of the `trainable` attribute is changed after calling
108 `compile()` on a model, the new value doesn't take effect for this model
109 until `compile()` is called again.
110 """
111
112 def __init__(
113 self,
114 axis=-1,
115 momentum=0.99,
116 epsilon=1e-3,
117 center=True,
118 scale=True,
119 beta_initializer="zeros",
120 gamma_initializer="ones",
121 moving_mean_initializer="zeros",
122 moving_variance_initializer="ones",
123 beta_regularizer=None,
124 gamma_regularizer=None,
125 beta_constraint=None,
126 gamma_constraint=None,
127 **kwargs,
128 ):
129 super().__init__(**kwargs)
130 self.axis = int(axis)
131 self.momentum = float(momentum)
132 self.epsilon = float(epsilon)
133 self.center = center
134 self.scale = scale
135 self.beta_initializer = initializers.get(beta_initializer)
136 self.gamma_initializer = initializers.get(gamma_initializer)
137 self.moving_mean_initializer = initializers.get(moving_mean_initializer)
138 self.moving_variance_initializer = initializers.get(
139 moving_variance_initializer
140 )
141 self.beta_regularizer = regularizers.get(beta_regularizer)
142 self.gamma_regularizer = regularizers.get(gamma_regularizer)
143 self.beta_constraint = constraints.get(beta_constraint)
144 self.gamma_constraint = constraints.get(gamma_constraint)
145 self.supports_masking = True
146
147 def build(self, input_shape):
148 shape = (input_shape[self.axis],)
149 if self.scale:
150 self.gamma = self.add_weight(
151 shape=shape,
152 name="gamma",
153 initializer=self.gamma_initializer,
154 regularizer=self.gamma_regularizer,
155 constraint=self.gamma_constraint,
156 trainable=True,
157 )
158 if self.center:
159 self.beta = self.add_weight(
160 shape=shape,
161 name="beta",
162 initializer=self.beta_initializer,
163 regularizer=self.beta_regularizer,
164 constraint=self.beta_constraint,
165 trainable=True,
166 )
167 self.moving_mean = self.add_weight(
168 shape=shape,
169 name="moving_mean",
170 initializer=self.moving_mean_initializer,
171 trainable=False,
172 )
173 self.moving_variance = self.add_weight(
174 shape=shape,
175 name="moving_variance",
176 initializer=self.moving_variance_initializer,
177 trainable=False,
178 )
179 self.input_spec = InputSpec(
180 ndim=len(input_shape), axes={self.axis: input_shape[self.axis]}
181 )
182 reduction_axes = list(range(len(input_shape)))
183 del reduction_axes[self.axis]
184 self._reduction_axes = reduction_axes
185 self.built = True
186
187 def compute_output_shape(self, input_shape):
188 return input_shape
189
190 def call(self, inputs, training=None, mask=None):
191 broadcast_shape = [1] * len(inputs.shape)
192 broadcast_shape[self.axis] = inputs.shape[self.axis]
193 if training and self.trainable:
194 mean = ops.mean(inputs, axis=self._reduction_axes, keepdims=True)
195 variance = ops.mean(
196 ops.square(inputs), axis=self._reduction_axes, keepdims=True
197 ) - ops.square(mean)
198 outputs = (inputs - mean) / ops.sqrt(variance + self.epsilon)
199 mean = ops.squeeze(mean, self._reduction_axes)
200 variance = ops.squeeze(variance, self._reduction_axes)
201 self.moving_mean.assign(
202 self.moving_mean * self.momentum + mean * (1.0 - self.momentum)
203 )
204 else:
205 moving_mean = ops.reshape(self.moving_mean, broadcast_shape)
206 moving_variance = ops.reshape(self.moving_variance, broadcast_shape)
207 outputs = (inputs - moving_mean) / ops.sqrt(
208 moving_variance + self.epsilon
209 )
210 if self.scale:
211 gamma = ops.reshape(self.gamma, broadcast_shape)
212 outputs = outputs * gamma
213 if self.center:
214 beta = ops.reshape(self.beta, broadcast_shape)
215 outputs = outputs + beta
216 return outputs
217
218 def get_config(self):
219 base_config = super().get_config()
220 config = {
221 "axis": self.axis,
222 "momentum": self.momentum,
223 "epsilon": self.epsilon,
224 "center": self.center,
225 "scale": self.scale,
226 "beta_initializer": initializers.serialize(self.beta_initializer),
227 "gamma_initializer": initializers.serialize(self.gamma_initializer),
228 "moving_mean_initializer": initializers.serialize(
229 self.moving_mean_initializer
230 ),
231 "moving_variance_initializer": initializers.serialize(
232 self.moving_variance_initializer
233 ),
234 "beta_regularizer": regularizers.serialize(self.beta_regularizer),
235 "gamma_regularizer": regularizers.serialize(self.gamma_regularizer),
236 "beta_constraint": constraints.serialize(self.beta_constraint),
237 "gamma_constraint": constraints.serialize(self.gamma_constraint),
238 }
239 return {**base_config, **config}
240
[end of keras_core/layers/normalization/batch_normalization.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/keras_core/layers/normalization/batch_normalization.py b/keras_core/layers/normalization/batch_normalization.py
--- a/keras_core/layers/normalization/batch_normalization.py
+++ b/keras_core/layers/normalization/batch_normalization.py
@@ -201,6 +201,10 @@
self.moving_mean.assign(
self.moving_mean * self.momentum + mean * (1.0 - self.momentum)
)
+ self.moving_variance.assign(
+ self.moving_variance * self.momentum
+ + variance * (1.0 - self.momentum)
+ )
else:
moving_mean = ops.reshape(self.moving_mean, broadcast_shape)
moving_variance = ops.reshape(self.moving_variance, broadcast_shape)
| {"golden_diff": "diff --git a/keras_core/layers/normalization/batch_normalization.py b/keras_core/layers/normalization/batch_normalization.py\n--- a/keras_core/layers/normalization/batch_normalization.py\n+++ b/keras_core/layers/normalization/batch_normalization.py\n@@ -201,6 +201,10 @@\n self.moving_mean.assign(\n self.moving_mean * self.momentum + mean * (1.0 - self.momentum)\n )\n+ self.moving_variance.assign(\n+ self.moving_variance * self.momentum\n+ + variance * (1.0 - self.momentum)\n+ )\n else:\n moving_mean = ops.reshape(self.moving_mean, broadcast_shape)\n moving_variance = ops.reshape(self.moving_variance, broadcast_shape)\n", "issue": "BatchNorm layer appears broken in multi-backend Keras\nclassic MNIST training:\r\n- [Keras multi-backend with BN on JAX](https://colab.research.google.com/drive/1ZZI6UK9umgCDSIRcPqVUl6YpLmRdwMF3?resourcekey=0-8YS9usjjxUuh0FUl_JW0Ow&usp=sharing). Final accuracy __19%__!!!\r\n- [Keras multi-backend w/o BN on JAX](https://colab.research.google.com/drive/16Yj63zZHmH2oQNBtNYmP5W0KJJ_RnLQi?resourcekey=0-Ut-aMYTyo3i36iJeQB4LRQ&usp=sharing). Final accuracy: 99%\r\n- [Keras multi-backend with BN on TF](https://colab.research.google.com/drive/1ipSnQuGe2w4KGphG6hMeWdzt3H8m36cx?resourcekey=0-ZH-RRNE3rfXqqurX8yE_YA&usp=sharing). Final accuracy: __14%__!!!\r\n- [Keras multi-backend w/o BN on TF](https://colab.research.google.com/drive/1beSnSJDvBwqYXzaGpvdVH9bUgCM2Jtsi?resourcekey=0-jmZOshPvFOUqWOEGq-z2Dg&usp=sharing). Final accuracy: 99%\r\n- [Keras baseline (regular tf.keras on TF, with BN)](https://colab.research.google.com/drive/1dI3RTfVvYdYER09Q6onYIdsh3vkRmEci?resourcekey=0-FgN08JmB4pWDpsL8aePs8A&usp=sharing). Final accuracy: 99%\r\n\r\nThe common factors triggering the issue seem to be: using the BatchNorm layer, on Keras multi-backend. The choice of backend does not seem to change the behavior.\n", "before_files": [{"content": "from keras_core import constraints\nfrom keras_core import initializers\nfrom keras_core import operations as ops\nfrom keras_core import regularizers\nfrom keras_core.api_export import keras_core_export\nfrom keras_core.layers.input_spec import InputSpec\nfrom keras_core.layers.layer import Layer\n\n\n@keras_core_export(\"keras_core.layers.BatchNormalization\")\nclass BatchNormalization(Layer):\n \"\"\"Layer that normalizes its inputs.\n\n Batch normalization applies a transformation that maintains the mean output\n close to 0 and the output standard deviation close to 1.\n\n Importantly, batch normalization works differently during training and\n during inference.\n\n **During training** (i.e. when using `fit()` or when calling the layer/model\n with the argument `training=True`), the layer normalizes its output using\n the mean and standard deviation of the current batch of inputs. That is to\n say, for each channel being normalized, the layer returns\n `gamma * (batch - mean(batch)) / sqrt(var(batch) + epsilon) + beta`, where:\n\n - `epsilon` is small constant (configurable as part of the constructor\n arguments)\n - `gamma` is a learned scaling factor (initialized as 1), which\n can be disabled by passing `scale=False` to the constructor.\n - `beta` is a learned offset factor (initialized as 0), which\n can be disabled by passing `center=False` to the constructor.\n\n **During inference** (i.e. when using `evaluate()` or `predict()` or when\n calling the layer/model with the argument `training=False` (which is the\n default), the layer normalizes its output using a moving average of the\n mean and standard deviation of the batches it has seen during training. That\n is to say, it returns\n `gamma * (batch - self.moving_mean) / sqrt(self.moving_var+epsilon) + beta`.\n\n `self.moving_mean` and `self.moving_var` are non-trainable variables that\n are updated each time the layer in called in training mode, as such:\n\n - `moving_mean = moving_mean * momentum + mean(batch) * (1 - momentum)`\n - `moving_var = moving_var * momentum + var(batch) * (1 - momentum)`\n\n As such, the layer will only normalize its inputs during inference\n *after having been trained on data that has similar statistics as the\n inference data*.\n\n Args:\n axis: Integer, the axis that should be normalized\n (typically the features axis). For instance, after a `Conv2D` layer\n with `data_format=\"channels_first\"`, use `axis=1`.\n momentum: Momentum for the moving average.\n epsilon: Small float added to variance to avoid dividing by zero.\n center: If `True`, add offset of `beta` to normalized tensor.\n If `False`, `beta` is ignored.\n scale: If `True`, multiply by `gamma`. If `False`, `gamma` is not used.\n When the next layer is linear this can be disabled\n since the scaling will be done by the next layer.\n beta_initializer: Initializer for the beta weight.\n gamma_initializer: Initializer for the gamma weight.\n moving_mean_initializer: Initializer for the moving mean.\n moving_variance_initializer: Initializer for the moving variance.\n beta_regularizer: Optional regularizer for the beta weight.\n gamma_regularizer: Optional regularizer for the gamma weight.\n beta_constraint: Optional constraint for the beta weight.\n gamma_constraint: Optional constraint for the gamma weight.\n **kwargs: Base layer keyword arguments (e.g. `name` and `dtype`).\n\n Call arguments:\n inputs: Input tensor (of any rank).\n training: Python boolean indicating whether the layer should behave in\n training mode or in inference mode.\n - `training=True`: The layer will normalize its inputs using\n the mean and variance of the current batch of inputs.\n - `training=False`: The layer will normalize its inputs using\n the mean and variance of its moving statistics, learned during\n training.\n\n Reference:\n\n - [Ioffe and Szegedy, 2015](https://arxiv.org/abs/1502.03167).\n\n **About setting `layer.trainable = False` on a `BatchNormalization` layer:**\n\n The meaning of setting `layer.trainable = False` is to freeze the layer,\n i.e. its internal state will not change during training:\n its trainable weights will not be updated\n during `fit()` or `train_on_batch()`, and its state updates will not be run.\n\n Usually, this does not necessarily mean that the layer is run in inference\n mode (which is normally controlled by the `training` argument that can\n be passed when calling a layer). \"Frozen state\" and \"inference mode\"\n are two separate concepts.\n\n However, in the case of the `BatchNormalization` layer, **setting\n `trainable = False` on the layer means that the layer will be\n subsequently run in inference mode** (meaning that it will use\n the moving mean and the moving variance to normalize the current batch,\n rather than using the mean and variance of the current batch).\n\n Note that:\n\n - Setting `trainable` on an model containing other layers will recursively\n set the `trainable` value of all inner layers.\n - If the value of the `trainable` attribute is changed after calling\n `compile()` on a model, the new value doesn't take effect for this model\n until `compile()` is called again.\n \"\"\"\n\n def __init__(\n self,\n axis=-1,\n momentum=0.99,\n epsilon=1e-3,\n center=True,\n scale=True,\n beta_initializer=\"zeros\",\n gamma_initializer=\"ones\",\n moving_mean_initializer=\"zeros\",\n moving_variance_initializer=\"ones\",\n beta_regularizer=None,\n gamma_regularizer=None,\n beta_constraint=None,\n gamma_constraint=None,\n **kwargs,\n ):\n super().__init__(**kwargs)\n self.axis = int(axis)\n self.momentum = float(momentum)\n self.epsilon = float(epsilon)\n self.center = center\n self.scale = scale\n self.beta_initializer = initializers.get(beta_initializer)\n self.gamma_initializer = initializers.get(gamma_initializer)\n self.moving_mean_initializer = initializers.get(moving_mean_initializer)\n self.moving_variance_initializer = initializers.get(\n moving_variance_initializer\n )\n self.beta_regularizer = regularizers.get(beta_regularizer)\n self.gamma_regularizer = regularizers.get(gamma_regularizer)\n self.beta_constraint = constraints.get(beta_constraint)\n self.gamma_constraint = constraints.get(gamma_constraint)\n self.supports_masking = True\n\n def build(self, input_shape):\n shape = (input_shape[self.axis],)\n if self.scale:\n self.gamma = self.add_weight(\n shape=shape,\n name=\"gamma\",\n initializer=self.gamma_initializer,\n regularizer=self.gamma_regularizer,\n constraint=self.gamma_constraint,\n trainable=True,\n )\n if self.center:\n self.beta = self.add_weight(\n shape=shape,\n name=\"beta\",\n initializer=self.beta_initializer,\n regularizer=self.beta_regularizer,\n constraint=self.beta_constraint,\n trainable=True,\n )\n self.moving_mean = self.add_weight(\n shape=shape,\n name=\"moving_mean\",\n initializer=self.moving_mean_initializer,\n trainable=False,\n )\n self.moving_variance = self.add_weight(\n shape=shape,\n name=\"moving_variance\",\n initializer=self.moving_variance_initializer,\n trainable=False,\n )\n self.input_spec = InputSpec(\n ndim=len(input_shape), axes={self.axis: input_shape[self.axis]}\n )\n reduction_axes = list(range(len(input_shape)))\n del reduction_axes[self.axis]\n self._reduction_axes = reduction_axes\n self.built = True\n\n def compute_output_shape(self, input_shape):\n return input_shape\n\n def call(self, inputs, training=None, mask=None):\n broadcast_shape = [1] * len(inputs.shape)\n broadcast_shape[self.axis] = inputs.shape[self.axis]\n if training and self.trainable:\n mean = ops.mean(inputs, axis=self._reduction_axes, keepdims=True)\n variance = ops.mean(\n ops.square(inputs), axis=self._reduction_axes, keepdims=True\n ) - ops.square(mean)\n outputs = (inputs - mean) / ops.sqrt(variance + self.epsilon)\n mean = ops.squeeze(mean, self._reduction_axes)\n variance = ops.squeeze(variance, self._reduction_axes)\n self.moving_mean.assign(\n self.moving_mean * self.momentum + mean * (1.0 - self.momentum)\n )\n else:\n moving_mean = ops.reshape(self.moving_mean, broadcast_shape)\n moving_variance = ops.reshape(self.moving_variance, broadcast_shape)\n outputs = (inputs - moving_mean) / ops.sqrt(\n moving_variance + self.epsilon\n )\n if self.scale:\n gamma = ops.reshape(self.gamma, broadcast_shape)\n outputs = outputs * gamma\n if self.center:\n beta = ops.reshape(self.beta, broadcast_shape)\n outputs = outputs + beta\n return outputs\n\n def get_config(self):\n base_config = super().get_config()\n config = {\n \"axis\": self.axis,\n \"momentum\": self.momentum,\n \"epsilon\": self.epsilon,\n \"center\": self.center,\n \"scale\": self.scale,\n \"beta_initializer\": initializers.serialize(self.beta_initializer),\n \"gamma_initializer\": initializers.serialize(self.gamma_initializer),\n \"moving_mean_initializer\": initializers.serialize(\n self.moving_mean_initializer\n ),\n \"moving_variance_initializer\": initializers.serialize(\n self.moving_variance_initializer\n ),\n \"beta_regularizer\": regularizers.serialize(self.beta_regularizer),\n \"gamma_regularizer\": regularizers.serialize(self.gamma_regularizer),\n \"beta_constraint\": constraints.serialize(self.beta_constraint),\n \"gamma_constraint\": constraints.serialize(self.gamma_constraint),\n }\n return {**base_config, **config}\n", "path": "keras_core/layers/normalization/batch_normalization.py"}]} | 3,802 | 180 |
gh_patches_debug_40119 | rasdani/github-patches | git_diff | beeware__toga-1914 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Inconsistent behaviour with toga.Selection value attribute between Windows and Android
### Describe the bug
On Windows, when a selection widget is created with a list containing an empty string (eg: toga.Selection(items=['1','a','','4']) ), the widget returns '' for the "value" attribute.
On Android, the selection's "value" attribute is set to None if the selection item is an empty string.
I don't have MacOS, iOS, or Linux devices so I don't know what happens there.
### Steps to reproduce
0. Create Beeware Project
1. Go to the app.py file
2. Create a list "list_1" containing an empty string
3. Create the base app class with the startup function and setup main box + window
4. In the startup function, create a selection with the items being that of list_1. Store it as a class attribute for future access.
5. Define a class function that outputs the selection value via print or info_dialog.
6. Create Button to call the output function
7. Add Selector & Button to main box
9. Compile the code for both Windows and Android & launch them.
11. Select the empty item of the selector
12. Press the button
13. Watch as the devices disagree on the value.
### Expected behavior
I would expect the selector value attribute to be an empty string as that was what the item was, and that is what the selector widget understands when values are written to it.
### Screenshots
The source code I used:
```
"""
Testing Crashes
"""
import toga
class TestLab(toga.App):
def startup(self):
main_box=toga.Box()
self.select=toga.Selection(items=['A','B','0','','1','+','-'])
main_box.add(self.select)
main_box.add(toga.Button('Get',on_press=self.valuedump))
self.main_window=toga.MainWindow(title=self.formal_name)
self.main_window.content=main_box
self.main_window.show()
async def valuedump(self,widget):
q=self.select.value
print('GET',q,type(q))
self.main_window.info_dialog('Selection Value',f'Value: "{q}"\n Type: {type(q)}')
def main():return TestLab()
```
On Windows:



On Android:



### Environment
Windows 8.1 / Android 11
- Python version: 3.10.0
- Briefcase: 3.11
- Toga: 0.3.0.dev8
### Logs
Windows:
[briefcase.2022_12_27-17_09_47.run.log](https://github.com/beeware/toga/files/10311153/briefcase.2022_12_27-17_09_47.run.log)
Android:
[briefcase.2022_12_27-17_17_14.run.log](https://github.com/beeware/toga/files/10311154/briefcase.2022_12_27-17_17_14.run.log)
### Additional context
_No response_
</issue>
<code>
[start of cocoa/src/toga_cocoa/widgets/selection.py]
1 from travertino.size import at_least
2
3 from toga_cocoa.libs import SEL, NSPopUpButton, objc_method, objc_property
4
5 from .base import Widget
6
7
8 class TogaPopupButton(NSPopUpButton):
9 interface = objc_property(object, weak=True)
10 impl = objc_property(object, weak=True)
11
12 @objc_method
13 def onSelect_(self, obj) -> None:
14 if self.interface.on_select:
15 self.interface.on_select(self.interface)
16
17
18 class Selection(Widget):
19 def create(self):
20 self.native = TogaPopupButton.alloc().init()
21 self.native.interface = self.interface
22 self.native.impl = self
23
24 self.native.target = self.native
25 self.native.action = SEL("onSelect:")
26
27 self.add_constraints()
28
29 def rehint(self):
30 content_size = self.native.intrinsicContentSize()
31 self.interface.intrinsic.height = content_size.height
32 self.interface.intrinsic.width = at_least(
33 max(self.interface._MIN_WIDTH, content_size.width)
34 )
35
36 def remove_all_items(self):
37 self.native.removeAllItems()
38
39 def add_item(self, item):
40 self.native.addItemWithTitle(item)
41
42 def select_item(self, item):
43 self.native.selectItemWithTitle(item)
44
45 def get_selected_item(self):
46 return str(self.native.titleOfSelectedItem)
47
48 def set_on_select(self, handler):
49 pass
50
[end of cocoa/src/toga_cocoa/widgets/selection.py]
[start of iOS/src/toga_iOS/widgets/selection.py]
1 from rubicon.objc import CGSize, objc_method, objc_property
2 from travertino.size import at_least
3
4 from toga_iOS.libs import UIColor, UIPickerView, UITextBorderStyle, UITextField
5 from toga_iOS.widgets.base import Widget
6
7
8 class TogaPickerView(UIPickerView):
9 interface = objc_property(object, weak=True)
10 impl = objc_property(object, weak=True)
11
12 @objc_method
13 def numberOfComponentsInPickerView_(self, pickerView) -> int:
14 return 1
15
16 @objc_method
17 def pickerView_numberOfRowsInComponent_(self, pickerView, component: int) -> int:
18 return len(self.interface.items)
19
20 @objc_method
21 def pickerView_titleForRow_forComponent_(
22 self, pickerView, row: int, component: int
23 ):
24 return str(self.interface.items[row])
25
26 @objc_method
27 def pickerView_didSelectRow_inComponent_(
28 self, pickerView, row: int, component: int
29 ):
30 self.native.text = self.interface.items[row]
31 if self.interface.on_select:
32 self.interface.on_select(self.interface)
33
34
35 class Selection(Widget):
36 def create(self):
37 self.native = UITextField.alloc().init()
38 self.native.interface = self.interface
39 self.native.impl = self
40 self.native.tintColor = UIColor.clearColor
41 self.native.borderStyle = UITextBorderStyle.RoundedRect
42
43 self.picker = TogaPickerView.alloc().init()
44 self.picker.interface = self.interface
45 self.picker.native = self.native
46 self.picker.delegate = self.picker
47 self.picker.dataSource = self.picker
48
49 self.native.inputView = self.picker
50 self.native.delegate = self.picker
51
52 self.add_constraints()
53
54 def rehint(self):
55 # Height of a text input is known.
56 fitting_size = self.native.systemLayoutSizeFittingSize(CGSize(0, 0))
57 self.interface.intrinsic.width = at_least(fitting_size.width)
58 self.interface.intrinsic.height = fitting_size.height
59
60 def remove_all_items(self):
61 # No special handling required
62 pass
63
64 def add_item(self, item):
65 if not self.native.text:
66 self.native.text = item
67
68 def select_item(self, item):
69 self.interface.factory.not_implemented("Selection.select_item()")
70
71 def get_selected_item(self):
72 return self.interface.items[self.picker.selectedRowInComponent(0)]
73
74 def set_on_select(self, handler):
75 # No special handling required
76 pass
77
[end of iOS/src/toga_iOS/widgets/selection.py]
[start of examples/selection/selection/app.py]
1 import toga
2 from toga.constants import COLUMN, ROW
3 from toga.style import Pack
4
5
6 class SelectionApp(toga.App):
7 CARBON = "Carbon"
8 YTTERBIUM = "Ytterbium"
9 THULIUM = "Thulium"
10 OPTIONS = [CARBON, YTTERBIUM, THULIUM]
11
12 def startup(self):
13 # Main window of the application with title and size
14 self.main_window = toga.MainWindow(title=self.name, size=(640, 400))
15
16 # set up common styles
17 label_style = Pack(flex=1, padding_right=24)
18 box_style = Pack(direction=ROW, padding=10)
19
20 # Add the content on the main window
21 self.selection = toga.Selection(items=self.OPTIONS)
22
23 self.main_window.content = toga.Box(
24 children=[
25 toga.Box(
26 style=box_style,
27 children=[
28 toga.Label("Select an element", style=label_style),
29 self.selection,
30 ],
31 ),
32 toga.Box(
33 style=box_style,
34 children=[
35 toga.Label(
36 "Selection value can be set by property setter",
37 style=label_style,
38 ),
39 toga.Button(text="Set Carbon", on_press=self.set_carbon),
40 toga.Button(text="Set Ytterbium", on_press=self.set_ytterbium),
41 toga.Button(text="Set THULIUM", on_press=self.set_thulium),
42 ],
43 ),
44 toga.Box(
45 style=box_style,
46 children=[
47 toga.Label(
48 "use the 'on_select' callback to respond to changes",
49 style=label_style,
50 ),
51 toga.Selection(
52 on_select=self.my_on_select,
53 items=["Dubnium", "Holmium", "Zirconium"],
54 ),
55 ],
56 ),
57 toga.Box(
58 style=box_style,
59 children=[
60 toga.Label(
61 "Long lists of items should scroll", style=label_style
62 ),
63 toga.Selection(items=dir(toga)),
64 ],
65 ),
66 toga.Box(
67 style=box_style,
68 children=[
69 toga.Label("use some style!", style=label_style),
70 toga.Selection(
71 style=Pack(width=200, padding=24),
72 items=["Curium", "Titanium", "Copernicium"],
73 ),
74 ],
75 ),
76 toga.Box(
77 style=box_style,
78 children=[
79 toga.Label(
80 "Selection widgets can be disabled", style=label_style
81 ),
82 toga.Selection(
83 items=[
84 "Helium",
85 "Neon",
86 "Argon",
87 "Krypton",
88 "Xenon",
89 "Radon",
90 "Oganesson",
91 ],
92 enabled=False,
93 ),
94 ],
95 ),
96 ],
97 style=Pack(direction=COLUMN, padding=24),
98 )
99
100 self.main_window.show()
101
102 def set_carbon(self, widget):
103 self.selection.value = self.CARBON
104
105 def set_ytterbium(self, widget):
106 self.selection.value = self.YTTERBIUM
107
108 def set_thulium(self, widget):
109 self.selection.value = self.THULIUM
110
111 def my_on_select(self, selection):
112 # get the current value of the slider with `selection.value`
113
114 print(f"The selection widget changed to {selection.value}")
115
116
117 def main():
118 # App name and namespace
119 return SelectionApp("Selection", "org.beeware.selection")
120
121
122 if __name__ == "__main__":
123 app = main()
124 app.main_loop()
125
[end of examples/selection/selection/app.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/cocoa/src/toga_cocoa/widgets/selection.py b/cocoa/src/toga_cocoa/widgets/selection.py
--- a/cocoa/src/toga_cocoa/widgets/selection.py
+++ b/cocoa/src/toga_cocoa/widgets/selection.py
@@ -43,7 +43,9 @@
self.native.selectItemWithTitle(item)
def get_selected_item(self):
- return str(self.native.titleOfSelectedItem)
+ selected = self.native.titleOfSelectedItem
+ if selected:
+ return str(selected)
def set_on_select(self, handler):
pass
diff --git a/examples/selection/selection/app.py b/examples/selection/selection/app.py
--- a/examples/selection/selection/app.py
+++ b/examples/selection/selection/app.py
@@ -19,6 +19,9 @@
# Add the content on the main window
self.selection = toga.Selection(items=self.OPTIONS)
+ self.empty_selection = toga.Selection()
+
+ self.report_label = toga.Label("", style=label_style)
self.main_window.content = toga.Box(
children=[
@@ -29,6 +32,22 @@
self.selection,
],
),
+ toga.Box(
+ style=box_style,
+ children=[
+ toga.Label("Empty selection", style=label_style),
+ self.empty_selection,
+ ],
+ ),
+ toga.Box(
+ style=box_style,
+ children=[
+ toga.Button(
+ "Report on selection", on_press=self.report_selection
+ ),
+ self.report_label,
+ ],
+ ),
toga.Box(
style=box_style,
children=[
@@ -66,7 +85,7 @@
toga.Box(
style=box_style,
children=[
- toga.Label("use some style!", style=label_style),
+ toga.Label("Use some style!", style=label_style),
toga.Selection(
style=Pack(width=200, padding=24),
items=["Curium", "Titanium", "Copernicium"],
@@ -113,6 +132,11 @@
print(f"The selection widget changed to {selection.value}")
+ def report_selection(self, widget):
+ self.report_label.text = (
+ f"Element: {self.selection.value!r}; Empty: {self.empty_selection.value!r}"
+ )
+
def main():
# App name and namespace
diff --git a/iOS/src/toga_iOS/widgets/selection.py b/iOS/src/toga_iOS/widgets/selection.py
--- a/iOS/src/toga_iOS/widgets/selection.py
+++ b/iOS/src/toga_iOS/widgets/selection.py
@@ -69,7 +69,10 @@
self.interface.factory.not_implemented("Selection.select_item()")
def get_selected_item(self):
- return self.interface.items[self.picker.selectedRowInComponent(0)]
+ try:
+ return self.interface.items[self.picker.selectedRowInComponent(0)]
+ except IndexError:
+ return None
def set_on_select(self, handler):
# No special handling required
| {"golden_diff": "diff --git a/cocoa/src/toga_cocoa/widgets/selection.py b/cocoa/src/toga_cocoa/widgets/selection.py\n--- a/cocoa/src/toga_cocoa/widgets/selection.py\n+++ b/cocoa/src/toga_cocoa/widgets/selection.py\n@@ -43,7 +43,9 @@\n self.native.selectItemWithTitle(item)\n \n def get_selected_item(self):\n- return str(self.native.titleOfSelectedItem)\n+ selected = self.native.titleOfSelectedItem\n+ if selected:\n+ return str(selected)\n \n def set_on_select(self, handler):\n pass\ndiff --git a/examples/selection/selection/app.py b/examples/selection/selection/app.py\n--- a/examples/selection/selection/app.py\n+++ b/examples/selection/selection/app.py\n@@ -19,6 +19,9 @@\n \n # Add the content on the main window\n self.selection = toga.Selection(items=self.OPTIONS)\n+ self.empty_selection = toga.Selection()\n+\n+ self.report_label = toga.Label(\"\", style=label_style)\n \n self.main_window.content = toga.Box(\n children=[\n@@ -29,6 +32,22 @@\n self.selection,\n ],\n ),\n+ toga.Box(\n+ style=box_style,\n+ children=[\n+ toga.Label(\"Empty selection\", style=label_style),\n+ self.empty_selection,\n+ ],\n+ ),\n+ toga.Box(\n+ style=box_style,\n+ children=[\n+ toga.Button(\n+ \"Report on selection\", on_press=self.report_selection\n+ ),\n+ self.report_label,\n+ ],\n+ ),\n toga.Box(\n style=box_style,\n children=[\n@@ -66,7 +85,7 @@\n toga.Box(\n style=box_style,\n children=[\n- toga.Label(\"use some style!\", style=label_style),\n+ toga.Label(\"Use some style!\", style=label_style),\n toga.Selection(\n style=Pack(width=200, padding=24),\n items=[\"Curium\", \"Titanium\", \"Copernicium\"],\n@@ -113,6 +132,11 @@\n \n print(f\"The selection widget changed to {selection.value}\")\n \n+ def report_selection(self, widget):\n+ self.report_label.text = (\n+ f\"Element: {self.selection.value!r}; Empty: {self.empty_selection.value!r}\"\n+ )\n+\n \n def main():\n # App name and namespace\ndiff --git a/iOS/src/toga_iOS/widgets/selection.py b/iOS/src/toga_iOS/widgets/selection.py\n--- a/iOS/src/toga_iOS/widgets/selection.py\n+++ b/iOS/src/toga_iOS/widgets/selection.py\n@@ -69,7 +69,10 @@\n self.interface.factory.not_implemented(\"Selection.select_item()\")\n \n def get_selected_item(self):\n- return self.interface.items[self.picker.selectedRowInComponent(0)]\n+ try:\n+ return self.interface.items[self.picker.selectedRowInComponent(0)]\n+ except IndexError:\n+ return None\n \n def set_on_select(self, handler):\n # No special handling required\n", "issue": "Inconsistent behaviour with toga.Selection value attribute between Windows and Android\n### Describe the bug\n\nOn Windows, when a selection widget is created with a list containing an empty string (eg: toga.Selection(items=['1','a','','4']) ), the widget returns '' for the \"value\" attribute.\r\n\r\nOn Android, the selection's \"value\" attribute is set to None if the selection item is an empty string.\r\n\r\nI don't have MacOS, iOS, or Linux devices so I don't know what happens there.\n\n### Steps to reproduce\n\n0. Create Beeware Project\r\n1. Go to the app.py file\r\n2. Create a list \"list_1\" containing an empty string\r\n3. Create the base app class with the startup function and setup main box + window\r\n4. In the startup function, create a selection with the items being that of list_1. Store it as a class attribute for future access.\r\n5. Define a class function that outputs the selection value via print or info_dialog.\r\n6. Create Button to call the output function\r\n7. Add Selector & Button to main box\r\n9. Compile the code for both Windows and Android & launch them.\r\n11. Select the empty item of the selector\r\n12. Press the button\r\n13. Watch as the devices disagree on the value.\n\n### Expected behavior\n\nI would expect the selector value attribute to be an empty string as that was what the item was, and that is what the selector widget understands when values are written to it.\n\n### Screenshots\n\nThe source code I used:\r\n```\r\n\"\"\"\r\nTesting Crashes\r\n\"\"\"\r\nimport toga\r\nclass TestLab(toga.App):\r\n def startup(self):\r\n main_box=toga.Box()\r\n self.select=toga.Selection(items=['A','B','0','','1','+','-'])\r\n main_box.add(self.select)\r\n main_box.add(toga.Button('Get',on_press=self.valuedump))\r\n self.main_window=toga.MainWindow(title=self.formal_name)\r\n self.main_window.content=main_box\r\n self.main_window.show()\r\n async def valuedump(self,widget):\r\n q=self.select.value\r\n print('GET',q,type(q))\r\n self.main_window.info_dialog('Selection Value',f'Value: \"{q}\"\\n Type: {type(q)}')\r\ndef main():return TestLab()\r\n```\r\n\r\nOn Windows:\r\n\r\n\r\n\r\n\r\n\r\nOn Android:\r\n\r\n\r\n\r\n\r\n\n\n### Environment\n\n\r\nWindows 8.1 / Android 11\r\n- Python version: 3.10.0\r\n - Briefcase: 3.11\r\n - Toga: 0.3.0.dev8\r\n\n\n### Logs\n\nWindows:\r\n[briefcase.2022_12_27-17_09_47.run.log](https://github.com/beeware/toga/files/10311153/briefcase.2022_12_27-17_09_47.run.log)\r\n\r\nAndroid:\r\n[briefcase.2022_12_27-17_17_14.run.log](https://github.com/beeware/toga/files/10311154/briefcase.2022_12_27-17_17_14.run.log)\r\n\n\n### Additional context\n\n_No response_\n", "before_files": [{"content": "from travertino.size import at_least\n\nfrom toga_cocoa.libs import SEL, NSPopUpButton, objc_method, objc_property\n\nfrom .base import Widget\n\n\nclass TogaPopupButton(NSPopUpButton):\n interface = objc_property(object, weak=True)\n impl = objc_property(object, weak=True)\n\n @objc_method\n def onSelect_(self, obj) -> None:\n if self.interface.on_select:\n self.interface.on_select(self.interface)\n\n\nclass Selection(Widget):\n def create(self):\n self.native = TogaPopupButton.alloc().init()\n self.native.interface = self.interface\n self.native.impl = self\n\n self.native.target = self.native\n self.native.action = SEL(\"onSelect:\")\n\n self.add_constraints()\n\n def rehint(self):\n content_size = self.native.intrinsicContentSize()\n self.interface.intrinsic.height = content_size.height\n self.interface.intrinsic.width = at_least(\n max(self.interface._MIN_WIDTH, content_size.width)\n )\n\n def remove_all_items(self):\n self.native.removeAllItems()\n\n def add_item(self, item):\n self.native.addItemWithTitle(item)\n\n def select_item(self, item):\n self.native.selectItemWithTitle(item)\n\n def get_selected_item(self):\n return str(self.native.titleOfSelectedItem)\n\n def set_on_select(self, handler):\n pass\n", "path": "cocoa/src/toga_cocoa/widgets/selection.py"}, {"content": "from rubicon.objc import CGSize, objc_method, objc_property\nfrom travertino.size import at_least\n\nfrom toga_iOS.libs import UIColor, UIPickerView, UITextBorderStyle, UITextField\nfrom toga_iOS.widgets.base import Widget\n\n\nclass TogaPickerView(UIPickerView):\n interface = objc_property(object, weak=True)\n impl = objc_property(object, weak=True)\n\n @objc_method\n def numberOfComponentsInPickerView_(self, pickerView) -> int:\n return 1\n\n @objc_method\n def pickerView_numberOfRowsInComponent_(self, pickerView, component: int) -> int:\n return len(self.interface.items)\n\n @objc_method\n def pickerView_titleForRow_forComponent_(\n self, pickerView, row: int, component: int\n ):\n return str(self.interface.items[row])\n\n @objc_method\n def pickerView_didSelectRow_inComponent_(\n self, pickerView, row: int, component: int\n ):\n self.native.text = self.interface.items[row]\n if self.interface.on_select:\n self.interface.on_select(self.interface)\n\n\nclass Selection(Widget):\n def create(self):\n self.native = UITextField.alloc().init()\n self.native.interface = self.interface\n self.native.impl = self\n self.native.tintColor = UIColor.clearColor\n self.native.borderStyle = UITextBorderStyle.RoundedRect\n\n self.picker = TogaPickerView.alloc().init()\n self.picker.interface = self.interface\n self.picker.native = self.native\n self.picker.delegate = self.picker\n self.picker.dataSource = self.picker\n\n self.native.inputView = self.picker\n self.native.delegate = self.picker\n\n self.add_constraints()\n\n def rehint(self):\n # Height of a text input is known.\n fitting_size = self.native.systemLayoutSizeFittingSize(CGSize(0, 0))\n self.interface.intrinsic.width = at_least(fitting_size.width)\n self.interface.intrinsic.height = fitting_size.height\n\n def remove_all_items(self):\n # No special handling required\n pass\n\n def add_item(self, item):\n if not self.native.text:\n self.native.text = item\n\n def select_item(self, item):\n self.interface.factory.not_implemented(\"Selection.select_item()\")\n\n def get_selected_item(self):\n return self.interface.items[self.picker.selectedRowInComponent(0)]\n\n def set_on_select(self, handler):\n # No special handling required\n pass\n", "path": "iOS/src/toga_iOS/widgets/selection.py"}, {"content": "import toga\nfrom toga.constants import COLUMN, ROW\nfrom toga.style import Pack\n\n\nclass SelectionApp(toga.App):\n CARBON = \"Carbon\"\n YTTERBIUM = \"Ytterbium\"\n THULIUM = \"Thulium\"\n OPTIONS = [CARBON, YTTERBIUM, THULIUM]\n\n def startup(self):\n # Main window of the application with title and size\n self.main_window = toga.MainWindow(title=self.name, size=(640, 400))\n\n # set up common styles\n label_style = Pack(flex=1, padding_right=24)\n box_style = Pack(direction=ROW, padding=10)\n\n # Add the content on the main window\n self.selection = toga.Selection(items=self.OPTIONS)\n\n self.main_window.content = toga.Box(\n children=[\n toga.Box(\n style=box_style,\n children=[\n toga.Label(\"Select an element\", style=label_style),\n self.selection,\n ],\n ),\n toga.Box(\n style=box_style,\n children=[\n toga.Label(\n \"Selection value can be set by property setter\",\n style=label_style,\n ),\n toga.Button(text=\"Set Carbon\", on_press=self.set_carbon),\n toga.Button(text=\"Set Ytterbium\", on_press=self.set_ytterbium),\n toga.Button(text=\"Set THULIUM\", on_press=self.set_thulium),\n ],\n ),\n toga.Box(\n style=box_style,\n children=[\n toga.Label(\n \"use the 'on_select' callback to respond to changes\",\n style=label_style,\n ),\n toga.Selection(\n on_select=self.my_on_select,\n items=[\"Dubnium\", \"Holmium\", \"Zirconium\"],\n ),\n ],\n ),\n toga.Box(\n style=box_style,\n children=[\n toga.Label(\n \"Long lists of items should scroll\", style=label_style\n ),\n toga.Selection(items=dir(toga)),\n ],\n ),\n toga.Box(\n style=box_style,\n children=[\n toga.Label(\"use some style!\", style=label_style),\n toga.Selection(\n style=Pack(width=200, padding=24),\n items=[\"Curium\", \"Titanium\", \"Copernicium\"],\n ),\n ],\n ),\n toga.Box(\n style=box_style,\n children=[\n toga.Label(\n \"Selection widgets can be disabled\", style=label_style\n ),\n toga.Selection(\n items=[\n \"Helium\",\n \"Neon\",\n \"Argon\",\n \"Krypton\",\n \"Xenon\",\n \"Radon\",\n \"Oganesson\",\n ],\n enabled=False,\n ),\n ],\n ),\n ],\n style=Pack(direction=COLUMN, padding=24),\n )\n\n self.main_window.show()\n\n def set_carbon(self, widget):\n self.selection.value = self.CARBON\n\n def set_ytterbium(self, widget):\n self.selection.value = self.YTTERBIUM\n\n def set_thulium(self, widget):\n self.selection.value = self.THULIUM\n\n def my_on_select(self, selection):\n # get the current value of the slider with `selection.value`\n\n print(f\"The selection widget changed to {selection.value}\")\n\n\ndef main():\n # App name and namespace\n return SelectionApp(\"Selection\", \"org.beeware.selection\")\n\n\nif __name__ == \"__main__\":\n app = main()\n app.main_loop()\n", "path": "examples/selection/selection/app.py"}]} | 3,842 | 696 |
gh_patches_debug_28590 | rasdani/github-patches | git_diff | biopython__biopython-1827 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ExPASy.sprot_search_ful and ExPASy.sprot_search_de do not work
Both will return just pages stating "Please update your links/bookmarks" and eventually correct link at the end of page.
The functions use http://www.expasy.ch/cgi-bin/sprot-search-ful or http://www.expasy.ch/cgi-bin/sprot-search-de eventually, which pages do not exist anymore.
</issue>
<code>
[start of Bio/ExPASy/__init__.py]
1 # Copyright 2000 by Jeffrey Chang. All rights reserved.
2 # This code is part of the Biopython distribution and governed by its
3 # license. Please see the LICENSE file that should have been included
4 # as part of this package.
5
6 """Code to access resources at ExPASy over the WWW.
7
8 See https://www.expasy.org/
9
10
11 Functions:
12 - get_prodoc_entry Interface to the get-prodoc-entry CGI script.
13 - get_prosite_entry Interface to the get-prosite-entry CGI script.
14 - get_prosite_raw Interface to the get-prosite-raw CGI script.
15 - get_sprot_raw Interface to the get-sprot-raw CGI script.
16 - sprot_search_ful Interface to the sprot-search-ful CGI script.
17 - sprot_search_de Interface to the sprot-search-de CGI script.
18
19 """
20
21 # Importing these functions with leading underscore as not intended for reuse
22 from Bio._py3k import urlopen as _urlopen
23 from Bio._py3k import urlencode as _urlencode
24 from Bio._py3k import _binary_to_string_handle
25
26
27 def get_prodoc_entry(id,
28 cgi='https://prosite.expasy.org/cgi-bin/prosite/get-prodoc-entry'):
29 """Get a text handle to a PRODOC entry at ExPASy in HTML format.
30
31 >>> from Bio import ExPASy
32 >>> with ExPASy.get_prodoc_entry('PDOC00001') as in_handle:
33 ... html = in_handle.read()
34 ...
35 >>> with open("myprodocrecord.html", "w") as out_handle:
36 ... out_handle.write(html)
37 ...
38
39 For a non-existing key XXX, ExPASy returns an HTML-formatted page
40 containing this text: 'There is currently no PROSITE entry for'
41 """
42 return _binary_to_string_handle(_urlopen("%s?%s" % (cgi, id)))
43
44
45 def get_prosite_entry(id,
46 cgi='https://prosite.expasy.org/cgi-bin/prosite/get-prosite-entry'):
47 """Get a text handle to a PROSITE entry at ExPASy in HTML format.
48
49 >>> from Bio import ExPASy
50 >>> with ExPASy.get_prosite_entry('PS00001') as in_handle:
51 ... html = in_handle.read()
52 ...
53 >>> with open("myprositerecord.html", "w") as out_handle:
54 ... out_handle.write(html)
55 ...
56
57 For a non-existing key XXX, ExPASy returns an HTML-formatted page
58 containing this text: 'There is currently no PROSITE entry for'
59 """
60 return _binary_to_string_handle(_urlopen("%s?%s" % (cgi, id)))
61
62
63 def get_prosite_raw(id, cgi=None):
64 """Get a text handle to a raw PROSITE or PRODOC record at ExPASy.
65
66 The cgi argument is deprecated due to changes in the ExPASy
67 website.
68
69 For a non-existing key, ExPASy returns nothing.
70
71 >>> from Bio import ExPASy
72 >>> from Bio.ExPASy import Prosite
73 >>> with ExPASy.get_prosite_raw('PS00001') as handle:
74 ... record = Prosite.read(handle)
75 ...
76 >>> print(record.accession)
77 PS00001
78
79
80 For a non-existing key, ExPASy returns an error:
81
82 >>> handle = get_prosite_raw("does_not_exist")
83 Traceback (most recent call last):
84 ...
85 urllib.error.HTTPError: HTTP Error 404: Not Found
86
87 """
88 url = "https://prosite.expasy.org/%s.txt" % id
89 return _binary_to_string_handle(_urlopen(url))
90
91
92 def get_sprot_raw(id):
93 """Get a text handle to a raw SwissProt entry at ExPASy.
94
95 For an ID of XXX, fetches http://www.uniprot.org/uniprot/XXX.txt
96 (as per the https://www.expasy.org/expasy_urls.html documentation).
97
98 >>> from Bio import ExPASy
99 >>> from Bio import SwissProt
100 >>> with ExPASy.get_sprot_raw("O23729") as handle:
101 ... record = SwissProt.read(handle)
102 ...
103 >>> print(record.entry_name)
104 CHS3_BROFI
105
106 For a non-existing identifier, UniProt returns an error:
107
108 >>> ExPASy.get_sprot_raw("DOES_NOT_EXIST")
109 Traceback (most recent call last):
110 ...
111 urllib.error.HTTPError: HTTP Error 404: Not Found
112
113 """
114 url = "http://www.uniprot.org/uniprot/%s.txt" % id
115 return _binary_to_string_handle(_urlopen(url))
116
117
118 def sprot_search_ful(text, make_wild=None, swissprot=1, trembl=None,
119 cgi='https://www.expasy.org/cgi-bin/sprot-search-ful'):
120 """Search SwissProt by full text (BROKEN)."""
121 variables = {'SEARCH': text}
122 if make_wild:
123 variables['makeWild'] = 'on'
124 if swissprot:
125 variables['S'] = 'on'
126 if trembl:
127 variables['T'] = 'on'
128 options = _urlencode(variables)
129 fullcgi = "%s?%s" % (cgi, options)
130 handle = _binary_to_string_handle(_urlopen(fullcgi))
131 return handle
132
133
134 def sprot_search_de(text, swissprot=1, trembl=None,
135 cgi='https://www.expasy.org/cgi-bin/sprot-search-de'):
136 """Search SwissProt (BROKEN).
137
138 Search by name, description, gene name, species, or organelle.
139 """
140 variables = {'SEARCH': text}
141 if swissprot:
142 variables['S'] = 'on'
143 if trembl:
144 variables['T'] = 'on'
145 options = _urlencode(variables)
146 fullcgi = "%s?%s" % (cgi, options)
147 return _binary_to_string_handle(_urlopen(fullcgi))
148
[end of Bio/ExPASy/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/Bio/ExPASy/__init__.py b/Bio/ExPASy/__init__.py
--- a/Bio/ExPASy/__init__.py
+++ b/Bio/ExPASy/__init__.py
@@ -13,8 +13,6 @@
- get_prosite_entry Interface to the get-prosite-entry CGI script.
- get_prosite_raw Interface to the get-prosite-raw CGI script.
- get_sprot_raw Interface to the get-sprot-raw CGI script.
- - sprot_search_ful Interface to the sprot-search-ful CGI script.
- - sprot_search_de Interface to the sprot-search-de CGI script.
"""
@@ -113,35 +111,3 @@
"""
url = "http://www.uniprot.org/uniprot/%s.txt" % id
return _binary_to_string_handle(_urlopen(url))
-
-
-def sprot_search_ful(text, make_wild=None, swissprot=1, trembl=None,
- cgi='https://www.expasy.org/cgi-bin/sprot-search-ful'):
- """Search SwissProt by full text (BROKEN)."""
- variables = {'SEARCH': text}
- if make_wild:
- variables['makeWild'] = 'on'
- if swissprot:
- variables['S'] = 'on'
- if trembl:
- variables['T'] = 'on'
- options = _urlencode(variables)
- fullcgi = "%s?%s" % (cgi, options)
- handle = _binary_to_string_handle(_urlopen(fullcgi))
- return handle
-
-
-def sprot_search_de(text, swissprot=1, trembl=None,
- cgi='https://www.expasy.org/cgi-bin/sprot-search-de'):
- """Search SwissProt (BROKEN).
-
- Search by name, description, gene name, species, or organelle.
- """
- variables = {'SEARCH': text}
- if swissprot:
- variables['S'] = 'on'
- if trembl:
- variables['T'] = 'on'
- options = _urlencode(variables)
- fullcgi = "%s?%s" % (cgi, options)
- return _binary_to_string_handle(_urlopen(fullcgi))
| {"golden_diff": "diff --git a/Bio/ExPASy/__init__.py b/Bio/ExPASy/__init__.py\n--- a/Bio/ExPASy/__init__.py\n+++ b/Bio/ExPASy/__init__.py\n@@ -13,8 +13,6 @@\n - get_prosite_entry Interface to the get-prosite-entry CGI script.\n - get_prosite_raw Interface to the get-prosite-raw CGI script.\n - get_sprot_raw Interface to the get-sprot-raw CGI script.\n- - sprot_search_ful Interface to the sprot-search-ful CGI script.\n- - sprot_search_de Interface to the sprot-search-de CGI script.\n \n \"\"\"\n \n@@ -113,35 +111,3 @@\n \"\"\"\n url = \"http://www.uniprot.org/uniprot/%s.txt\" % id\n return _binary_to_string_handle(_urlopen(url))\n-\n-\n-def sprot_search_ful(text, make_wild=None, swissprot=1, trembl=None,\n- cgi='https://www.expasy.org/cgi-bin/sprot-search-ful'):\n- \"\"\"Search SwissProt by full text (BROKEN).\"\"\"\n- variables = {'SEARCH': text}\n- if make_wild:\n- variables['makeWild'] = 'on'\n- if swissprot:\n- variables['S'] = 'on'\n- if trembl:\n- variables['T'] = 'on'\n- options = _urlencode(variables)\n- fullcgi = \"%s?%s\" % (cgi, options)\n- handle = _binary_to_string_handle(_urlopen(fullcgi))\n- return handle\n-\n-\n-def sprot_search_de(text, swissprot=1, trembl=None,\n- cgi='https://www.expasy.org/cgi-bin/sprot-search-de'):\n- \"\"\"Search SwissProt (BROKEN).\n-\n- Search by name, description, gene name, species, or organelle.\n- \"\"\"\n- variables = {'SEARCH': text}\n- if swissprot:\n- variables['S'] = 'on'\n- if trembl:\n- variables['T'] = 'on'\n- options = _urlencode(variables)\n- fullcgi = \"%s?%s\" % (cgi, options)\n- return _binary_to_string_handle(_urlopen(fullcgi))\n", "issue": "ExPASy.sprot_search_ful and ExPASy.sprot_search_de do not work\nBoth will return just pages stating \"Please update your links/bookmarks\" and eventually correct link at the end of page.\nThe functions use http://www.expasy.ch/cgi-bin/sprot-search-ful or http://www.expasy.ch/cgi-bin/sprot-search-de eventually, which pages do not exist anymore.\n\n", "before_files": [{"content": "# Copyright 2000 by Jeffrey Chang. All rights reserved.\n# This code is part of the Biopython distribution and governed by its\n# license. Please see the LICENSE file that should have been included\n# as part of this package.\n\n\"\"\"Code to access resources at ExPASy over the WWW.\n\nSee https://www.expasy.org/\n\n\nFunctions:\n - get_prodoc_entry Interface to the get-prodoc-entry CGI script.\n - get_prosite_entry Interface to the get-prosite-entry CGI script.\n - get_prosite_raw Interface to the get-prosite-raw CGI script.\n - get_sprot_raw Interface to the get-sprot-raw CGI script.\n - sprot_search_ful Interface to the sprot-search-ful CGI script.\n - sprot_search_de Interface to the sprot-search-de CGI script.\n\n\"\"\"\n\n# Importing these functions with leading underscore as not intended for reuse\nfrom Bio._py3k import urlopen as _urlopen\nfrom Bio._py3k import urlencode as _urlencode\nfrom Bio._py3k import _binary_to_string_handle\n\n\ndef get_prodoc_entry(id,\n cgi='https://prosite.expasy.org/cgi-bin/prosite/get-prodoc-entry'):\n \"\"\"Get a text handle to a PRODOC entry at ExPASy in HTML format.\n\n >>> from Bio import ExPASy\n >>> with ExPASy.get_prodoc_entry('PDOC00001') as in_handle:\n ... html = in_handle.read()\n ...\n >>> with open(\"myprodocrecord.html\", \"w\") as out_handle:\n ... out_handle.write(html)\n ...\n\n For a non-existing key XXX, ExPASy returns an HTML-formatted page\n containing this text: 'There is currently no PROSITE entry for'\n \"\"\"\n return _binary_to_string_handle(_urlopen(\"%s?%s\" % (cgi, id)))\n\n\ndef get_prosite_entry(id,\n cgi='https://prosite.expasy.org/cgi-bin/prosite/get-prosite-entry'):\n \"\"\"Get a text handle to a PROSITE entry at ExPASy in HTML format.\n\n >>> from Bio import ExPASy\n >>> with ExPASy.get_prosite_entry('PS00001') as in_handle:\n ... html = in_handle.read()\n ...\n >>> with open(\"myprositerecord.html\", \"w\") as out_handle:\n ... out_handle.write(html)\n ...\n\n For a non-existing key XXX, ExPASy returns an HTML-formatted page\n containing this text: 'There is currently no PROSITE entry for'\n \"\"\"\n return _binary_to_string_handle(_urlopen(\"%s?%s\" % (cgi, id)))\n\n\ndef get_prosite_raw(id, cgi=None):\n \"\"\"Get a text handle to a raw PROSITE or PRODOC record at ExPASy.\n\n The cgi argument is deprecated due to changes in the ExPASy\n website.\n\n For a non-existing key, ExPASy returns nothing.\n\n >>> from Bio import ExPASy\n >>> from Bio.ExPASy import Prosite\n >>> with ExPASy.get_prosite_raw('PS00001') as handle:\n ... record = Prosite.read(handle)\n ...\n >>> print(record.accession)\n PS00001\n\n\n For a non-existing key, ExPASy returns an error:\n\n >>> handle = get_prosite_raw(\"does_not_exist\")\n Traceback (most recent call last):\n ...\n urllib.error.HTTPError: HTTP Error 404: Not Found\n\n \"\"\"\n url = \"https://prosite.expasy.org/%s.txt\" % id\n return _binary_to_string_handle(_urlopen(url))\n\n\ndef get_sprot_raw(id):\n \"\"\"Get a text handle to a raw SwissProt entry at ExPASy.\n\n For an ID of XXX, fetches http://www.uniprot.org/uniprot/XXX.txt\n (as per the https://www.expasy.org/expasy_urls.html documentation).\n\n >>> from Bio import ExPASy\n >>> from Bio import SwissProt\n >>> with ExPASy.get_sprot_raw(\"O23729\") as handle:\n ... record = SwissProt.read(handle)\n ...\n >>> print(record.entry_name)\n CHS3_BROFI\n\n For a non-existing identifier, UniProt returns an error:\n\n >>> ExPASy.get_sprot_raw(\"DOES_NOT_EXIST\")\n Traceback (most recent call last):\n ...\n urllib.error.HTTPError: HTTP Error 404: Not Found\n\n \"\"\"\n url = \"http://www.uniprot.org/uniprot/%s.txt\" % id\n return _binary_to_string_handle(_urlopen(url))\n\n\ndef sprot_search_ful(text, make_wild=None, swissprot=1, trembl=None,\n cgi='https://www.expasy.org/cgi-bin/sprot-search-ful'):\n \"\"\"Search SwissProt by full text (BROKEN).\"\"\"\n variables = {'SEARCH': text}\n if make_wild:\n variables['makeWild'] = 'on'\n if swissprot:\n variables['S'] = 'on'\n if trembl:\n variables['T'] = 'on'\n options = _urlencode(variables)\n fullcgi = \"%s?%s\" % (cgi, options)\n handle = _binary_to_string_handle(_urlopen(fullcgi))\n return handle\n\n\ndef sprot_search_de(text, swissprot=1, trembl=None,\n cgi='https://www.expasy.org/cgi-bin/sprot-search-de'):\n \"\"\"Search SwissProt (BROKEN).\n\n Search by name, description, gene name, species, or organelle.\n \"\"\"\n variables = {'SEARCH': text}\n if swissprot:\n variables['S'] = 'on'\n if trembl:\n variables['T'] = 'on'\n options = _urlencode(variables)\n fullcgi = \"%s?%s\" % (cgi, options)\n return _binary_to_string_handle(_urlopen(fullcgi))\n", "path": "Bio/ExPASy/__init__.py"}]} | 2,321 | 524 |
gh_patches_debug_2037 | rasdani/github-patches | git_diff | Lightning-AI__torchmetrics-2346 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
High memory usage of Perplexity metric
## 🐛 Bug
I ran out of memory (GPU) when computing the perplexity metric and would like to propose a small optimization to decrease its memory utilization.
### To Reproduce
For instance, when running the following code PyTorch tries to allocate 1024 GB of GPU memory on my system.
```py
from torchmetrics.text import Perplexity
import torch
gen = torch.manual_seed(42)
preds = torch.rand(512, 1024, 12, generator=gen).cuda()
target = torch.randint(12, (512, 1024), generator=gen).cuda()
perp = Perplexity().cuda()
print(perp(preds, target))
```
### Memory Inefficiency
I think the inefficiency is in this line:
https://github.com/Lightning-AI/torchmetrics/blob/a68455afb9041d1d32c1d6546897fee416abdc41/src/torchmetrics/functional/text/perplexity.py#L94
`probs[:, target]` results in a large temporary tensor with `(512*1024)^2` elements. Afterwards only the diagonal values are used.
### Potential Solution
In contrast
```
probs = probs[torch.arange(target.numel()), target][mask]
```
would only require memory of the size of target.
Would you consider accepting a pull request with this optimization? Or was the previous implementation chosen for another reason?
### Environment
- TorchMetrics v1.2.1 (installed with pip) and Master branch.
- Python 3.10.12
- Pytorch 2.2.0
- CUDA 12.1
</issue>
<code>
[start of src/torchmetrics/functional/text/perplexity.py]
1 # Copyright The Lightning team.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from typing import Optional, Tuple
16
17 import torch
18 from torch import Tensor
19
20
21 def _check_shape_and_type_consistency(preds: Tensor, target: Tensor) -> None:
22 """Check shape and type consistency of input vectors.
23
24 Args:
25 preds:
26 Logits or a unnormalized score assigned to each token in a sequence with shape [batch_size, seq_len,
27 vocab_size]. Scores will be normalized internally using softmax.
28 target:
29 Ground truth values with a shape [batch_size, seq_len].
30
31 Raises:
32 ValueError:
33 If ``preds`` tensor has no 3 dimensions.
34 ValueError:
35 If ``target`` tensor has no 2 dimensions.
36 ValueError:
37 If the first two dimensions of ``preds`` and ``target`` do not equal.
38 TypeError:
39 If ``preds`` dtype is not one of ``(torch.float16, torch.float32, torch.float64)``
40 TypeError:
41 If ``target`` is not of a type LongTensor (torch.int64)
42
43 """
44 if len(preds.shape) != 3:
45 raise ValueError(
46 "Input tensor `preds` is expected to have 3 dimensions, [batch_size, seq_len, vocab_size],"
47 f" but got {len(preds.shape)}."
48 )
49 if len(target.shape) != 2:
50 raise ValueError(
51 "Input tensor `target` is expected to have 2 dimensions, [batch_size, seq_len],"
52 f" but got {len(target.shape)}."
53 )
54 if preds.shape[:2] != target.shape:
55 raise ValueError(
56 "Input tensors `preds` and `target` are expected to have equaling first two dimensions,"
57 f" [batch_size, seq_len], but got {preds.shape[:2]} and {target.shape}."
58 )
59 if not preds.is_floating_point():
60 raise TypeError(f"Input tensor `preds` is expected to be of floating point type but got {preds.dtype}.")
61 if target.dtype != torch.int64:
62 raise TypeError(f"Input tensor `target` is expected to be of a type {torch.int64} but got {target.dtype}.")
63
64
65 def _perplexity_update(preds: Tensor, target: Tensor, ignore_index: Optional[int] = None) -> Tuple[Tensor, Tensor]:
66 """Compute intermediate statistics for Perplexity.
67
68 Args:
69 preds:
70 Logits or a unnormalized score assigned to each token in a sequence with shape [batch_size, seq_len,
71 vocab_size]. Scores will be normalized internally using softmax.
72 target:
73 Ground truth values with a shape [batch_size, seq_len].
74 ignore_index:
75 Integer specifying a target class to ignore. If given, this class index does not contribute
76 to the returned score.
77
78 Returns:
79 Log probabilities, summed over all samples
80 Number of samples
81
82 """
83 _check_shape_and_type_consistency(preds, target)
84
85 probs = torch.nn.functional.softmax(preds.reshape(-1, preds.shape[-1]), dim=1)
86 target = target.reshape(-1)
87
88 if ignore_index is not None:
89 mask = target.ne(ignore_index)
90 target = target.where(target != ignore_index, torch.tensor(0, device=target.device))
91 else:
92 mask = torch.ones_like(target, dtype=torch.bool)
93
94 probs = probs[:, target].diagonal()[mask]
95 total_log_probs = -probs.log().sum()
96 count = mask.sum()
97
98 return total_log_probs, count
99
100
101 def _perplexity_compute(total: Tensor, count: Tensor) -> Tensor:
102 """Compute the Perplexity.
103
104 Args:
105 total: Log probabilities, summed over all samples
106 count: Number of samples
107 Returns:
108 Perplexity
109
110 """
111 return torch.exp(total / count)
112
113
114 def perplexity(preds: Tensor, target: Tensor, ignore_index: Optional[int] = None) -> Tensor:
115 """Perplexity measures how well a language model predicts a text sample.
116
117 This metric is calculated as the average number of bits per word a model needs to represent the sample.
118
119 Args:
120 preds:
121 Logits or a unnormalized score assigned to each token in a sequence with shape [batch_size, seq_len,
122 vocab_size], which is the output of a language model. Scores will be normalized internally using softmax.
123 target:
124 Ground truth values with a shape [batch_size, seq_len].
125 ignore_index:
126 Integer specifying a target class to ignore. If given, this class index does not contribute
127 to the returned score.
128
129 Returns:
130 Perplexity value
131
132 Examples:
133 >>> import torch
134 >>> gen = torch.manual_seed(42)
135 >>> preds = torch.rand(2, 8, 5, generator=gen)
136 >>> target = torch.randint(5, (2, 8), generator=gen)
137 >>> target[0, 6:] = -100
138 >>> perplexity(preds, target, ignore_index=-100)
139 tensor(5.8540)
140
141 """
142 total, count = _perplexity_update(preds, target, ignore_index)
143 return _perplexity_compute(total, count)
144
[end of src/torchmetrics/functional/text/perplexity.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/torchmetrics/functional/text/perplexity.py b/src/torchmetrics/functional/text/perplexity.py
--- a/src/torchmetrics/functional/text/perplexity.py
+++ b/src/torchmetrics/functional/text/perplexity.py
@@ -91,7 +91,7 @@
else:
mask = torch.ones_like(target, dtype=torch.bool)
- probs = probs[:, target].diagonal()[mask]
+ probs = probs[torch.arange(target.numel()), target][mask]
total_log_probs = -probs.log().sum()
count = mask.sum()
| {"golden_diff": "diff --git a/src/torchmetrics/functional/text/perplexity.py b/src/torchmetrics/functional/text/perplexity.py\n--- a/src/torchmetrics/functional/text/perplexity.py\n+++ b/src/torchmetrics/functional/text/perplexity.py\n@@ -91,7 +91,7 @@\n else:\n mask = torch.ones_like(target, dtype=torch.bool)\n \n- probs = probs[:, target].diagonal()[mask]\n+ probs = probs[torch.arange(target.numel()), target][mask]\n total_log_probs = -probs.log().sum()\n count = mask.sum()\n", "issue": "High memory usage of Perplexity metric\n## \ud83d\udc1b Bug\r\n\r\nI ran out of memory (GPU) when computing the perplexity metric and would like to propose a small optimization to decrease its memory utilization.\r\n\r\n### To Reproduce\r\n\r\nFor instance, when running the following code PyTorch tries to allocate 1024 GB of GPU memory on my system.\r\n```py\r\nfrom torchmetrics.text import Perplexity\r\nimport torch\r\n\r\ngen = torch.manual_seed(42)\r\npreds = torch.rand(512, 1024, 12, generator=gen).cuda()\r\ntarget = torch.randint(12, (512, 1024), generator=gen).cuda()\r\n\r\nperp = Perplexity().cuda()\r\nprint(perp(preds, target))\r\n```\r\n\r\n### Memory Inefficiency\r\n\r\nI think the inefficiency is in this line:\r\n\r\nhttps://github.com/Lightning-AI/torchmetrics/blob/a68455afb9041d1d32c1d6546897fee416abdc41/src/torchmetrics/functional/text/perplexity.py#L94\r\n\r\n`probs[:, target]` results in a large temporary tensor with `(512*1024)^2` elements. Afterwards only the diagonal values are used. \r\n\r\n### Potential Solution\r\n\r\nIn contrast\r\n\r\n```\r\nprobs = probs[torch.arange(target.numel()), target][mask]\r\n```\r\n\r\nwould only require memory of the size of target. \r\n\r\nWould you consider accepting a pull request with this optimization? Or was the previous implementation chosen for another reason?\r\n\r\n### Environment\r\n\r\n- TorchMetrics v1.2.1 (installed with pip) and Master branch.\r\n- Python 3.10.12\r\n- Pytorch 2.2.0\r\n- CUDA 12.1\n", "before_files": [{"content": "# Copyright The Lightning team.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Optional, Tuple\n\nimport torch\nfrom torch import Tensor\n\n\ndef _check_shape_and_type_consistency(preds: Tensor, target: Tensor) -> None:\n \"\"\"Check shape and type consistency of input vectors.\n\n Args:\n preds:\n Logits or a unnormalized score assigned to each token in a sequence with shape [batch_size, seq_len,\n vocab_size]. Scores will be normalized internally using softmax.\n target:\n Ground truth values with a shape [batch_size, seq_len].\n\n Raises:\n ValueError:\n If ``preds`` tensor has no 3 dimensions.\n ValueError:\n If ``target`` tensor has no 2 dimensions.\n ValueError:\n If the first two dimensions of ``preds`` and ``target`` do not equal.\n TypeError:\n If ``preds`` dtype is not one of ``(torch.float16, torch.float32, torch.float64)``\n TypeError:\n If ``target`` is not of a type LongTensor (torch.int64)\n\n \"\"\"\n if len(preds.shape) != 3:\n raise ValueError(\n \"Input tensor `preds` is expected to have 3 dimensions, [batch_size, seq_len, vocab_size],\"\n f\" but got {len(preds.shape)}.\"\n )\n if len(target.shape) != 2:\n raise ValueError(\n \"Input tensor `target` is expected to have 2 dimensions, [batch_size, seq_len],\"\n f\" but got {len(target.shape)}.\"\n )\n if preds.shape[:2] != target.shape:\n raise ValueError(\n \"Input tensors `preds` and `target` are expected to have equaling first two dimensions,\"\n f\" [batch_size, seq_len], but got {preds.shape[:2]} and {target.shape}.\"\n )\n if not preds.is_floating_point():\n raise TypeError(f\"Input tensor `preds` is expected to be of floating point type but got {preds.dtype}.\")\n if target.dtype != torch.int64:\n raise TypeError(f\"Input tensor `target` is expected to be of a type {torch.int64} but got {target.dtype}.\")\n\n\ndef _perplexity_update(preds: Tensor, target: Tensor, ignore_index: Optional[int] = None) -> Tuple[Tensor, Tensor]:\n \"\"\"Compute intermediate statistics for Perplexity.\n\n Args:\n preds:\n Logits or a unnormalized score assigned to each token in a sequence with shape [batch_size, seq_len,\n vocab_size]. Scores will be normalized internally using softmax.\n target:\n Ground truth values with a shape [batch_size, seq_len].\n ignore_index:\n Integer specifying a target class to ignore. If given, this class index does not contribute\n to the returned score.\n\n Returns:\n Log probabilities, summed over all samples\n Number of samples\n\n \"\"\"\n _check_shape_and_type_consistency(preds, target)\n\n probs = torch.nn.functional.softmax(preds.reshape(-1, preds.shape[-1]), dim=1)\n target = target.reshape(-1)\n\n if ignore_index is not None:\n mask = target.ne(ignore_index)\n target = target.where(target != ignore_index, torch.tensor(0, device=target.device))\n else:\n mask = torch.ones_like(target, dtype=torch.bool)\n\n probs = probs[:, target].diagonal()[mask]\n total_log_probs = -probs.log().sum()\n count = mask.sum()\n\n return total_log_probs, count\n\n\ndef _perplexity_compute(total: Tensor, count: Tensor) -> Tensor:\n \"\"\"Compute the Perplexity.\n\n Args:\n total: Log probabilities, summed over all samples\n count: Number of samples\n Returns:\n Perplexity\n\n \"\"\"\n return torch.exp(total / count)\n\n\ndef perplexity(preds: Tensor, target: Tensor, ignore_index: Optional[int] = None) -> Tensor:\n \"\"\"Perplexity measures how well a language model predicts a text sample.\n\n This metric is calculated as the average number of bits per word a model needs to represent the sample.\n\n Args:\n preds:\n Logits or a unnormalized score assigned to each token in a sequence with shape [batch_size, seq_len,\n vocab_size], which is the output of a language model. Scores will be normalized internally using softmax.\n target:\n Ground truth values with a shape [batch_size, seq_len].\n ignore_index:\n Integer specifying a target class to ignore. If given, this class index does not contribute\n to the returned score.\n\n Returns:\n Perplexity value\n\n Examples:\n >>> import torch\n >>> gen = torch.manual_seed(42)\n >>> preds = torch.rand(2, 8, 5, generator=gen)\n >>> target = torch.randint(5, (2, 8), generator=gen)\n >>> target[0, 6:] = -100\n >>> perplexity(preds, target, ignore_index=-100)\n tensor(5.8540)\n\n \"\"\"\n total, count = _perplexity_update(preds, target, ignore_index)\n return _perplexity_compute(total, count)\n", "path": "src/torchmetrics/functional/text/perplexity.py"}]} | 2,518 | 131 |
gh_patches_debug_21870 | rasdani/github-patches | git_diff | scrapy__scrapy-4298 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
SCRAPY_CHECK is not set while running contract
### Description
Hi, it seems that #3739 is not doing what the [documentation describe](https://docs.scrapy.org/en/latest/topics/contracts.html#detecting-check-runs):
`os.environ.get('SCRAPY_CHECK')` is returning `None` in my contract check.
### Steps to Reproduce
1. Create a project from scratch
2. Add a random spider
3. Contract code is as follow
```
def parse(self, response):
"""
@url http://www.amazon.com/s?field-keywords=selfish+gene
@returns requests 1 1
"""
print("test", os.environ.get('SCRAPY_CHECK'))
if os.environ.get('SCRAPY_CHECK'):
yield scrapy.Request(url="next_url")
```
**Expected behavior:** Request should be yielded as per the documentation
**Actual behavior:** Nothing happen
**Reproduces how often:** In my local project and with fresh project
### Versions
Windows
```
(globenv) C:\Users\johnl>scrapy version --verbose
Scrapy : 1.8.0
lxml : 4.4.1.0
libxml2 : 2.9.5
cssselect : 1.1.0
parsel : 1.5.2
w3lib : 1.21.0
Twisted : 19.10.0
Python : 3.7.4 (tags/v3.7.4:e09359112e, Jul 8 2019, 19:29:22) [MSC v.1916 32 bit (Intel)]
pyOpenSSL : 19.0.0 (OpenSSL 1.1.1c 28 May 2019)
cryptography : 2.7
Platform : Windows-10-10.0.18362-SP0
```
Linux
```
scrapy version --verbose
Scrapy : 1.8.0
lxml : 4.4.1.0
libxml2 : 2.9.9
cssselect : 1.1.0
parsel : 1.5.2
w3lib : 1.21.0
Twisted : 19.7.0
Python : 3.6.8 (default, Oct 7 2019, 12:59:55) - [GCC 8.3.0]
pyOpenSSL : 19.0.0 (OpenSSL 1.1.1d 10 Sep 2019)
cryptography : 2.8
Platform : Linux-4.4.0-18362-Microsoft-x86_64-with-Ubuntu-18.04-bionic
```
</issue>
<code>
[start of scrapy/commands/check.py]
1 import time
2 from collections import defaultdict
3 from unittest import TextTestRunner, TextTestResult as _TextTestResult
4
5 from scrapy.commands import ScrapyCommand
6 from scrapy.contracts import ContractsManager
7 from scrapy.utils.misc import load_object, set_environ
8 from scrapy.utils.conf import build_component_list
9
10
11 class TextTestResult(_TextTestResult):
12 def printSummary(self, start, stop):
13 write = self.stream.write
14 writeln = self.stream.writeln
15
16 run = self.testsRun
17 plural = "s" if run != 1 else ""
18
19 writeln(self.separator2)
20 writeln("Ran %d contract%s in %.3fs" % (run, plural, stop - start))
21 writeln()
22
23 infos = []
24 if not self.wasSuccessful():
25 write("FAILED")
26 failed, errored = map(len, (self.failures, self.errors))
27 if failed:
28 infos.append("failures=%d" % failed)
29 if errored:
30 infos.append("errors=%d" % errored)
31 else:
32 write("OK")
33
34 if infos:
35 writeln(" (%s)" % (", ".join(infos),))
36 else:
37 write("\n")
38
39
40 class Command(ScrapyCommand):
41 requires_project = True
42 default_settings = {'LOG_ENABLED': False}
43
44 def syntax(self):
45 return "[options] <spider>"
46
47 def short_desc(self):
48 return "Check spider contracts"
49
50 def add_options(self, parser):
51 ScrapyCommand.add_options(self, parser)
52 parser.add_option("-l", "--list", dest="list", action="store_true",
53 help="only list contracts, without checking them")
54 parser.add_option("-v", "--verbose", dest="verbose", default=False, action='store_true',
55 help="print contract tests for all spiders")
56
57 def run(self, args, opts):
58 # load contracts
59 contracts = build_component_list(self.settings.getwithbase('SPIDER_CONTRACTS'))
60 conman = ContractsManager(load_object(c) for c in contracts)
61 runner = TextTestRunner(verbosity=2 if opts.verbose else 1)
62 result = TextTestResult(runner.stream, runner.descriptions, runner.verbosity)
63
64 # contract requests
65 contract_reqs = defaultdict(list)
66
67 spider_loader = self.crawler_process.spider_loader
68
69 with set_environ(SCRAPY_CHECK='true'):
70 for spidername in args or spider_loader.list():
71 spidercls = spider_loader.load(spidername)
72 spidercls.start_requests = lambda s: conman.from_spider(s, result)
73
74 tested_methods = conman.tested_methods_from_spidercls(spidercls)
75 if opts.list:
76 for method in tested_methods:
77 contract_reqs[spidercls.name].append(method)
78 elif tested_methods:
79 self.crawler_process.crawl(spidercls)
80
81 # start checks
82 if opts.list:
83 for spider, methods in sorted(contract_reqs.items()):
84 if not methods and not opts.verbose:
85 continue
86 print(spider)
87 for method in sorted(methods):
88 print(' * %s' % method)
89 else:
90 start = time.time()
91 self.crawler_process.start()
92 stop = time.time()
93
94 result.printErrors()
95 result.printSummary(start, stop)
96 self.exitcode = int(not result.wasSuccessful())
97
[end of scrapy/commands/check.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/scrapy/commands/check.py b/scrapy/commands/check.py
--- a/scrapy/commands/check.py
+++ b/scrapy/commands/check.py
@@ -78,19 +78,19 @@
elif tested_methods:
self.crawler_process.crawl(spidercls)
- # start checks
- if opts.list:
- for spider, methods in sorted(contract_reqs.items()):
- if not methods and not opts.verbose:
- continue
- print(spider)
- for method in sorted(methods):
- print(' * %s' % method)
- else:
- start = time.time()
- self.crawler_process.start()
- stop = time.time()
-
- result.printErrors()
- result.printSummary(start, stop)
- self.exitcode = int(not result.wasSuccessful())
+ # start checks
+ if opts.list:
+ for spider, methods in sorted(contract_reqs.items()):
+ if not methods and not opts.verbose:
+ continue
+ print(spider)
+ for method in sorted(methods):
+ print(' * %s' % method)
+ else:
+ start = time.time()
+ self.crawler_process.start()
+ stop = time.time()
+
+ result.printErrors()
+ result.printSummary(start, stop)
+ self.exitcode = int(not result.wasSuccessful())
| {"golden_diff": "diff --git a/scrapy/commands/check.py b/scrapy/commands/check.py\n--- a/scrapy/commands/check.py\n+++ b/scrapy/commands/check.py\n@@ -78,19 +78,19 @@\n elif tested_methods:\n self.crawler_process.crawl(spidercls)\n \n- # start checks\n- if opts.list:\n- for spider, methods in sorted(contract_reqs.items()):\n- if not methods and not opts.verbose:\n- continue\n- print(spider)\n- for method in sorted(methods):\n- print(' * %s' % method)\n- else:\n- start = time.time()\n- self.crawler_process.start()\n- stop = time.time()\n-\n- result.printErrors()\n- result.printSummary(start, stop)\n- self.exitcode = int(not result.wasSuccessful())\n+ # start checks\n+ if opts.list:\n+ for spider, methods in sorted(contract_reqs.items()):\n+ if not methods and not opts.verbose:\n+ continue\n+ print(spider)\n+ for method in sorted(methods):\n+ print(' * %s' % method)\n+ else:\n+ start = time.time()\n+ self.crawler_process.start()\n+ stop = time.time()\n+\n+ result.printErrors()\n+ result.printSummary(start, stop)\n+ self.exitcode = int(not result.wasSuccessful())\n", "issue": "SCRAPY_CHECK is not set while running contract\n### Description\r\n\r\nHi, it seems that #3739 is not doing what the [documentation describe](https://docs.scrapy.org/en/latest/topics/contracts.html#detecting-check-runs):\r\n\r\n`os.environ.get('SCRAPY_CHECK')` is returning `None` in my contract check.\r\n\r\n### Steps to Reproduce\r\n\r\n1. Create a project from scratch\r\n2. Add a random spider\r\n3. Contract code is as follow\r\n```\r\n def parse(self, response):\r\n \"\"\"\r\n @url http://www.amazon.com/s?field-keywords=selfish+gene\r\n @returns requests 1 1\r\n \"\"\"\r\n print(\"test\", os.environ.get('SCRAPY_CHECK'))\r\n if os.environ.get('SCRAPY_CHECK'):\r\n yield scrapy.Request(url=\"next_url\")\r\n```\r\n\r\n**Expected behavior:** Request should be yielded as per the documentation\r\n\r\n**Actual behavior:** Nothing happen\r\n\r\n**Reproduces how often:** In my local project and with fresh project\r\n\r\n### Versions\r\n\r\nWindows\r\n```\r\n(globenv) C:\\Users\\johnl>scrapy version --verbose\r\nScrapy : 1.8.0\r\nlxml : 4.4.1.0\r\nlibxml2 : 2.9.5\r\ncssselect : 1.1.0\r\nparsel : 1.5.2\r\nw3lib : 1.21.0\r\nTwisted : 19.10.0\r\nPython : 3.7.4 (tags/v3.7.4:e09359112e, Jul 8 2019, 19:29:22) [MSC v.1916 32 bit (Intel)]\r\npyOpenSSL : 19.0.0 (OpenSSL 1.1.1c 28 May 2019)\r\ncryptography : 2.7\r\nPlatform : Windows-10-10.0.18362-SP0\r\n```\r\n\r\nLinux\r\n```\r\nscrapy version --verbose\r\nScrapy : 1.8.0\r\nlxml : 4.4.1.0\r\nlibxml2 : 2.9.9\r\ncssselect : 1.1.0\r\nparsel : 1.5.2\r\nw3lib : 1.21.0\r\nTwisted : 19.7.0\r\nPython : 3.6.8 (default, Oct 7 2019, 12:59:55) - [GCC 8.3.0]\r\npyOpenSSL : 19.0.0 (OpenSSL 1.1.1d 10 Sep 2019)\r\ncryptography : 2.8\r\nPlatform : Linux-4.4.0-18362-Microsoft-x86_64-with-Ubuntu-18.04-bionic\r\n```\r\n\n", "before_files": [{"content": "import time\nfrom collections import defaultdict\nfrom unittest import TextTestRunner, TextTestResult as _TextTestResult\n\nfrom scrapy.commands import ScrapyCommand\nfrom scrapy.contracts import ContractsManager\nfrom scrapy.utils.misc import load_object, set_environ\nfrom scrapy.utils.conf import build_component_list\n\n\nclass TextTestResult(_TextTestResult):\n def printSummary(self, start, stop):\n write = self.stream.write\n writeln = self.stream.writeln\n\n run = self.testsRun\n plural = \"s\" if run != 1 else \"\"\n\n writeln(self.separator2)\n writeln(\"Ran %d contract%s in %.3fs\" % (run, plural, stop - start))\n writeln()\n\n infos = []\n if not self.wasSuccessful():\n write(\"FAILED\")\n failed, errored = map(len, (self.failures, self.errors))\n if failed:\n infos.append(\"failures=%d\" % failed)\n if errored:\n infos.append(\"errors=%d\" % errored)\n else:\n write(\"OK\")\n\n if infos:\n writeln(\" (%s)\" % (\", \".join(infos),))\n else:\n write(\"\\n\")\n\n\nclass Command(ScrapyCommand):\n requires_project = True\n default_settings = {'LOG_ENABLED': False}\n\n def syntax(self):\n return \"[options] <spider>\"\n\n def short_desc(self):\n return \"Check spider contracts\"\n\n def add_options(self, parser):\n ScrapyCommand.add_options(self, parser)\n parser.add_option(\"-l\", \"--list\", dest=\"list\", action=\"store_true\",\n help=\"only list contracts, without checking them\")\n parser.add_option(\"-v\", \"--verbose\", dest=\"verbose\", default=False, action='store_true',\n help=\"print contract tests for all spiders\")\n\n def run(self, args, opts):\n # load contracts\n contracts = build_component_list(self.settings.getwithbase('SPIDER_CONTRACTS'))\n conman = ContractsManager(load_object(c) for c in contracts)\n runner = TextTestRunner(verbosity=2 if opts.verbose else 1)\n result = TextTestResult(runner.stream, runner.descriptions, runner.verbosity)\n\n # contract requests\n contract_reqs = defaultdict(list)\n\n spider_loader = self.crawler_process.spider_loader\n\n with set_environ(SCRAPY_CHECK='true'):\n for spidername in args or spider_loader.list():\n spidercls = spider_loader.load(spidername)\n spidercls.start_requests = lambda s: conman.from_spider(s, result)\n\n tested_methods = conman.tested_methods_from_spidercls(spidercls)\n if opts.list:\n for method in tested_methods:\n contract_reqs[spidercls.name].append(method)\n elif tested_methods:\n self.crawler_process.crawl(spidercls)\n\n # start checks\n if opts.list:\n for spider, methods in sorted(contract_reqs.items()):\n if not methods and not opts.verbose:\n continue\n print(spider)\n for method in sorted(methods):\n print(' * %s' % method)\n else:\n start = time.time()\n self.crawler_process.start()\n stop = time.time()\n\n result.printErrors()\n result.printSummary(start, stop)\n self.exitcode = int(not result.wasSuccessful())\n", "path": "scrapy/commands/check.py"}]} | 2,107 | 309 |
gh_patches_debug_27118 | rasdani/github-patches | git_diff | google__flax-1570 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
PReLU activation implementation
I wanted to gauge interest on adding a PReLU activation. I noticed that `flax.linen.activations` are simply aliasing `jax.nn` activation functions which also doesn't have a PReLU implementation.
To add some background, PReLU is simply Leaky ReLU where the alpha (slope) parameter is trainable and not fixed. This makes it simple to implement as a Module if desired.
Here's an example implementation from another [project](https://github.com/isaaccorley/jax-enhance) of mine.
```python
from functools import partial
from typing import Any, Sequence
import jax.numpy as jnp
import flax.linen as nn
# This is nearly identical to jnp.ones however multiplies the output of jnp.ones by the constant value
def constant(key, shape: Sequence[int], value: Any, dtype: Any = jnp.float32) -> jnp.ndarray:
value = jnp.asarray(value, dtype)
return jnp.ones(shape, dtype) * value
class PReLU(nn.Module):
negative_slope_init: float = 0.01
dtype: Any = jnp.float32
@nn.compact
def __call__(self, x: jnp.ndarray) -> jnp.ndarray:
x = jnp.asarray(x, self.dtype)
negative_slope = self.param(
"negative_slope",
partial(constant, value=self.negative_slope_init, dtype=self.dtype),
(1,)
)
return jnp.where(x >= 0, x, negative_slope * x)
```
</issue>
<code>
[start of flax/linen/__init__.py]
1 # Copyright 2021 The Flax Authors.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """The Flax Module system."""
16
17
18 # pylint: disable=g-multiple-import
19 # re-export commonly used modules and functions
20 from .activation import (celu, elu, gelu, glu, leaky_relu, log_sigmoid,
21 log_softmax, relu, sigmoid, soft_sign, softmax,
22 softplus, swish, silu, tanh)
23 from .attention import (MultiHeadDotProductAttention, SelfAttention,
24 dot_product_attention, make_attention_mask,
25 make_causal_mask, combine_masks)
26 from ..core import broadcast, DenyList, FrozenDict
27 from .linear import Conv, ConvTranspose, Dense, DenseGeneral, Embed
28 from .module import (Module, compact, nowrap, enable_named_call,
29 disable_named_call, override_named_call, Variable, init,
30 init_with_output, apply, merge_param)
31 from .normalization import BatchNorm, GroupNorm, LayerNorm
32 from .pooling import avg_pool, max_pool
33 from .recurrent import GRUCell, LSTMCell, ConvLSTM, OptimizedLSTMCell
34 from .stochastic import Dropout
35 from .transforms import jit, named_call, checkpoint, remat, remat_scan, scan, vmap, map_variables, vjp, jvp
36 from .initializers import zeros, ones
37
38 # pylint: enable=g-multiple-import
39
[end of flax/linen/__init__.py]
[start of flax/linen/activation.py]
1 # Copyright 2021 The Flax Authors.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Activation functions.
16 """
17
18 # pylint: disable=unused-import
19 # re-export activation functions from jax.nn
20 from jax.nn import celu
21 from jax.nn import elu
22 from jax.nn import gelu
23 from jax.nn import glu
24 from jax.nn import leaky_relu
25 from jax.nn import log_sigmoid
26 from jax.nn import log_softmax
27 from jax.nn import normalize
28 from jax.nn import relu
29 from jax.nn import sigmoid
30 from jax.nn import soft_sign
31 from jax.nn import softmax
32 from jax.nn import softplus
33 from jax.nn import swish
34 from jax.nn import silu
35 from jax.nn import selu
36 from jax.nn import hard_tanh
37 from jax.nn import relu6
38 from jax.nn import hard_sigmoid
39 from jax.nn import hard_swish
40
41 from jax.numpy import tanh
42 # pylint: enable=unused-import
43
[end of flax/linen/activation.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/flax/linen/__init__.py b/flax/linen/__init__.py
--- a/flax/linen/__init__.py
+++ b/flax/linen/__init__.py
@@ -19,7 +19,7 @@
# re-export commonly used modules and functions
from .activation import (celu, elu, gelu, glu, leaky_relu, log_sigmoid,
log_softmax, relu, sigmoid, soft_sign, softmax,
- softplus, swish, silu, tanh)
+ softplus, swish, silu, tanh, PReLU)
from .attention import (MultiHeadDotProductAttention, SelfAttention,
dot_product_attention, make_attention_mask,
make_causal_mask, combine_masks)
diff --git a/flax/linen/activation.py b/flax/linen/activation.py
--- a/flax/linen/activation.py
+++ b/flax/linen/activation.py
@@ -40,3 +40,35 @@
from jax.numpy import tanh
# pylint: enable=unused-import
+
+from typing import Any
+
+from flax.linen.module import Module, compact
+import jax.numpy as jnp
+
+
+Array = Any
+
+
+class PReLU(Module):
+ """Parametric Rectified Linear Unit (PReLU) activation function.
+
+ Attributes:
+ negative_slope_init: the value to initialize the negative slope.
+ """
+ negative_slope_init: float = 0.01
+ @compact
+ def __call__(self, inputs: Array) -> Array:
+ """Applies an activation to the inputs.
+
+ Args:
+ inputs: the nd-array to apply the activation function to.
+
+ Returns:
+ The transformed input.
+ """
+ negative_slope = self.param(
+ 'negative_slope',
+ lambda k: jnp.asarray(self.negative_slope_init, jnp.float32)
+ )
+ return jnp.where(inputs >= 0, inputs, jnp.asarray(negative_slope, inputs.dtype) * inputs)
| {"golden_diff": "diff --git a/flax/linen/__init__.py b/flax/linen/__init__.py\n--- a/flax/linen/__init__.py\n+++ b/flax/linen/__init__.py\n@@ -19,7 +19,7 @@\n # re-export commonly used modules and functions\n from .activation import (celu, elu, gelu, glu, leaky_relu, log_sigmoid,\n log_softmax, relu, sigmoid, soft_sign, softmax,\n- softplus, swish, silu, tanh)\n+ softplus, swish, silu, tanh, PReLU)\n from .attention import (MultiHeadDotProductAttention, SelfAttention,\n dot_product_attention, make_attention_mask,\n make_causal_mask, combine_masks)\ndiff --git a/flax/linen/activation.py b/flax/linen/activation.py\n--- a/flax/linen/activation.py\n+++ b/flax/linen/activation.py\n@@ -40,3 +40,35 @@\n \n from jax.numpy import tanh\n # pylint: enable=unused-import\n+\n+from typing import Any\n+\n+from flax.linen.module import Module, compact\n+import jax.numpy as jnp\n+\n+\n+Array = Any\n+\n+\n+class PReLU(Module):\n+ \"\"\"Parametric Rectified Linear Unit (PReLU) activation function.\n+\n+ Attributes:\n+ negative_slope_init: the value to initialize the negative slope.\n+ \"\"\"\n+ negative_slope_init: float = 0.01\n+ @compact\n+ def __call__(self, inputs: Array) -> Array:\n+ \"\"\"Applies an activation to the inputs.\n+\n+ Args:\n+ inputs: the nd-array to apply the activation function to.\n+\n+ Returns:\n+ The transformed input.\n+ \"\"\"\n+ negative_slope = self.param(\n+ 'negative_slope',\n+ lambda k: jnp.asarray(self.negative_slope_init, jnp.float32)\n+ )\n+ return jnp.where(inputs >= 0, inputs, jnp.asarray(negative_slope, inputs.dtype) * inputs)\n", "issue": "PReLU activation implementation\nI wanted to gauge interest on adding a PReLU activation. I noticed that `flax.linen.activations` are simply aliasing `jax.nn` activation functions which also doesn't have a PReLU implementation.\r\n\r\nTo add some background, PReLU is simply Leaky ReLU where the alpha (slope) parameter is trainable and not fixed. This makes it simple to implement as a Module if desired.\r\n\r\nHere's an example implementation from another [project](https://github.com/isaaccorley/jax-enhance) of mine. \r\n\r\n```python\r\nfrom functools import partial\r\nfrom typing import Any, Sequence\r\n\r\nimport jax.numpy as jnp\r\nimport flax.linen as nn\r\n\r\n\r\n# This is nearly identical to jnp.ones however multiplies the output of jnp.ones by the constant value\r\ndef constant(key, shape: Sequence[int], value: Any, dtype: Any = jnp.float32) -> jnp.ndarray:\r\n value = jnp.asarray(value, dtype)\r\n return jnp.ones(shape, dtype) * value\r\n\r\n\r\nclass PReLU(nn.Module):\r\n negative_slope_init: float = 0.01\r\n dtype: Any = jnp.float32\r\n\r\n @nn.compact\r\n def __call__(self, x: jnp.ndarray) -> jnp.ndarray:\r\n x = jnp.asarray(x, self.dtype)\r\n negative_slope = self.param(\r\n \"negative_slope\",\r\n partial(constant, value=self.negative_slope_init, dtype=self.dtype),\r\n (1,)\r\n )\r\n return jnp.where(x >= 0, x, negative_slope * x)\r\n```\n", "before_files": [{"content": "# Copyright 2021 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"The Flax Module system.\"\"\"\n\n\n# pylint: disable=g-multiple-import\n# re-export commonly used modules and functions\nfrom .activation import (celu, elu, gelu, glu, leaky_relu, log_sigmoid,\n log_softmax, relu, sigmoid, soft_sign, softmax,\n softplus, swish, silu, tanh)\nfrom .attention import (MultiHeadDotProductAttention, SelfAttention,\n dot_product_attention, make_attention_mask,\n make_causal_mask, combine_masks)\nfrom ..core import broadcast, DenyList, FrozenDict\nfrom .linear import Conv, ConvTranspose, Dense, DenseGeneral, Embed\nfrom .module import (Module, compact, nowrap, enable_named_call,\n disable_named_call, override_named_call, Variable, init,\n init_with_output, apply, merge_param)\nfrom .normalization import BatchNorm, GroupNorm, LayerNorm\nfrom .pooling import avg_pool, max_pool\nfrom .recurrent import GRUCell, LSTMCell, ConvLSTM, OptimizedLSTMCell\nfrom .stochastic import Dropout\nfrom .transforms import jit, named_call, checkpoint, remat, remat_scan, scan, vmap, map_variables, vjp, jvp\nfrom .initializers import zeros, ones\n\n# pylint: enable=g-multiple-import\n", "path": "flax/linen/__init__.py"}, {"content": "# Copyright 2021 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Activation functions.\n\"\"\"\n\n# pylint: disable=unused-import\n# re-export activation functions from jax.nn\nfrom jax.nn import celu\nfrom jax.nn import elu\nfrom jax.nn import gelu\nfrom jax.nn import glu\nfrom jax.nn import leaky_relu\nfrom jax.nn import log_sigmoid\nfrom jax.nn import log_softmax\nfrom jax.nn import normalize\nfrom jax.nn import relu\nfrom jax.nn import sigmoid\nfrom jax.nn import soft_sign\nfrom jax.nn import softmax\nfrom jax.nn import softplus\nfrom jax.nn import swish\nfrom jax.nn import silu\nfrom jax.nn import selu\nfrom jax.nn import hard_tanh\nfrom jax.nn import relu6\nfrom jax.nn import hard_sigmoid\nfrom jax.nn import hard_swish\n\nfrom jax.numpy import tanh\n# pylint: enable=unused-import\n", "path": "flax/linen/activation.py"}]} | 1,797 | 466 |
gh_patches_debug_33098 | rasdani/github-patches | git_diff | gammapy__gammapy-3206 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Label issue in background rate figures
**Gammapy version**
0.18.2
**Bug description**
The label of the background rate figure seems wrong.
See the documentation, cell 24, bottom left figure:
https://docs.gammapy.org/0.18.2/tutorials/cta.html?highlight=cta#IRFs
The labels are all identical `offset = 0.2 deg`.
</issue>
<code>
[start of gammapy/irf/background.py]
1 # Licensed under a 3-clause BSD style license - see LICENSE.rst
2 import logging
3 import numpy as np
4 import astropy.units as u
5 from gammapy.maps import MapAxes, MapAxis
6 from gammapy.utils.integrate import trapz_loglog
7 from .core import IRF
8
9 __all__ = ["Background3D", "Background2D"]
10
11 log = logging.getLogger(__name__)
12
13
14 class BackgroundIRF(IRF):
15 """Background IRF base class"""
16 default_interp_kwargs = dict(
17 bounds_error=False, fill_value=None, values_scale="log"
18 )
19 """Default Interpolation kwargs to extrapolate."""
20
21 @classmethod
22 def from_table(cls, table, format="gadf-dl3"):
23 """Read from `~astropy.table.Table`.
24
25 Parameters
26 ----------
27 table : `~astropy.table.Table`
28 Table with background data
29 format : {"gadf-dl3"}
30 Format specification
31
32 Returns
33 -------
34 bkg : `Background2D` or `Background2D`
35 Background IRF class.
36 """
37 axes = MapAxes.from_table(table, format=format)[cls.required_axes]
38
39 # Spec says key should be "BKG", but there are files around
40 # (e.g. CTA 1DC) that use "BGD". For now we support both
41 if "BKG" in table.colnames:
42 bkg_name = "BKG"
43 elif "BGD" in table.colnames:
44 bkg_name = "BGD"
45 else:
46 raise ValueError('Invalid column names. Need "BKG" or "BGD".')
47
48 data = table[bkg_name].quantity[0].T
49
50 if data.unit == "" or isinstance(data.unit, u.UnrecognizedUnit):
51 data = u.Quantity(data.value, "s-1 MeV-1 sr-1", copy=False)
52 log.warning(
53 "Invalid unit found in background table! Assuming (s-1 MeV-1 sr-1)"
54 )
55
56 # TODO: The present HESS and CTA backgroundfits files
57 # have a reverse order (lon, lat, E) than recommened in GADF(E, lat, lon)
58 # For now, we suport both.
59
60 if axes.shape == axes.shape[::-1]:
61 log.error("Ambiguous axes order in Background fits files!")
62
63 if np.shape(data) != axes.shape:
64 log.debug("Transposing background table on read")
65 data = data.transpose()
66
67 return cls(
68 axes=axes,
69 data=data.value,
70 meta=table.meta,
71 unit=data.unit
72 )
73
74
75 class Background3D(BackgroundIRF):
76 """Background 3D.
77
78 Data format specification: :ref:`gadf:bkg_3d`
79
80 Parameters
81 ----------
82 axes : list of `MapAxis` or `MapAxes` object
83 Required data axes: ["energy", "fov_lon", "fov_lat"] in the given order.
84 data : `~np.ndarray`
85 Data array.
86 unit : str or `~astropy.units.Unit`
87 Data unit usuually ``s^-1 MeV^-1 sr^-1``
88 meta : dict
89 Meta data
90
91 Examples
92 --------
93 Here's an example you can use to learn about this class:
94
95 >>> from gammapy.irf import Background3D
96 >>> filename = '$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits'
97 >>> bkg_3d = Background3D.read(filename, hdu='BACKGROUND')
98 >>> print(bkg_3d)
99 Background3D
100 ------------
101 <BLANKLINE>
102 axes : ['energy', 'fov_lon', 'fov_lat']
103 shape : (21, 36, 36)
104 ndim : 3
105 unit : 1 / (MeV s sr)
106 dtype : >f4
107 <BLANKLINE>
108
109 """
110 tag = "bkg_3d"
111 required_axes = ["energy", "fov_lon", "fov_lat"]
112
113 def to_2d(self):
114 """Convert to `Background2D`.
115
116 This takes the values at Y = 0 and X >= 0.
117 """
118 # TODO: this is incorrect as it misses the Jacobian?
119 idx_lon = self.axes["fov_lon"].coord_to_idx(0 * u.deg)[0]
120 idx_lat = self.axes["fov_lat"].coord_to_idx(0 * u.deg)[0]
121 data = self.quantity[:, idx_lon:, idx_lat].copy()
122
123 offset = self.axes["fov_lon"].edges[idx_lon:]
124 offset_axis = MapAxis.from_edges(offset, name="offset")
125
126 return Background2D(
127 axes=[self.axes["energy"], offset_axis], data=data.value, unit=data.unit
128 )
129
130 def peek(self, figsize=(10, 8)):
131 return self.to_2d().peek(figsize)
132
133
134 class Background2D(BackgroundIRF):
135 """Background 2D.
136
137 Data format specification: :ref:`gadf:bkg_2d`
138
139 Parameters
140 ----------
141 axes : list of `MapAxis` or `MapAxes` object
142 Required data axes: ["energy", "offset"] in the given order.
143 data : `~np.ndarray`
144 Data array.
145 unit : str or `~astropy.units.Unit`
146 Data unit usually ``s^-1 MeV^-1 sr^-1``
147 meta : dict
148 Meta data
149 """
150
151 tag = "bkg_2d"
152 required_axes = ["energy", "offset"]
153 default_interp_kwargs = dict(bounds_error=False, fill_value=None)
154 """Default Interpolation kwargs."""
155
156 def plot(self, ax=None, add_cbar=True, **kwargs):
157 """Plot energy offset dependence of the background model.
158 """
159 import matplotlib.pyplot as plt
160 from matplotlib.colors import LogNorm
161
162 ax = plt.gca() if ax is None else ax
163
164 x = self.axes["energy"].edges.to_value("TeV")
165 y = self.axes["offset"].edges.to_value("deg")
166 z = self.quantity.T.value
167
168 kwargs.setdefault("cmap", "GnBu")
169 kwargs.setdefault("edgecolors", "face")
170
171 caxes = ax.pcolormesh(x, y, z, norm=LogNorm(), **kwargs)
172 ax.set_xscale("log")
173 ax.set_ylabel(f"Offset (deg)")
174 ax.set_xlabel(f"Energy (TeV)")
175
176 xmin, xmax = x.min(), x.max()
177 ax.set_xlim(xmin, xmax)
178
179 if add_cbar:
180 label = f"Background rate ({self.unit})"
181 ax.figure.colorbar(caxes, ax=ax, label=label)
182
183 def plot_offset_dependence(self, ax=None, energy=None, **kwargs):
184 """Plot background rate versus offset for a given energy.
185
186 Parameters
187 ----------
188 ax : `~matplotlib.axes.Axes`, optional
189 Axis
190 energy : `~astropy.units.Quantity`
191 Energy
192
193 Returns
194 -------
195 ax : `~matplotlib.axes.Axes`
196 Axis
197 """
198 import matplotlib.pyplot as plt
199
200 ax = plt.gca() if ax is None else ax
201
202 if energy is None:
203 energy_axis = self.axes["energy"]
204 e_min, e_max = np.log10(energy_axis.center.value[[0, -1]])
205 energy = np.logspace(e_min, e_max, 4) * energy_axis.unit
206
207 offset = self.axes["offset"].center
208
209 for ee in energy:
210 bkg = self.evaluate(offset=offset, energy=ee)
211 if np.isnan(bkg).all():
212 continue
213 label = f"energy = {ee:.1f}"
214 ax.plot(offset, bkg.value, label=label, **kwargs)
215
216 ax.set_xlabel(f"Offset ({self.axes['offset'].unit})")
217 ax.set_ylabel(f"Background rate ({self.unit})")
218 ax.set_yscale("log")
219 ax.legend(loc="upper right")
220 return ax
221
222 def plot_energy_dependence(self, ax=None, offset=None, **kwargs):
223 """Plot background rate versus energy for a given offset.
224
225 Parameters
226 ----------
227 ax : `~matplotlib.axes.Axes`, optional
228 Axis
229 offset : `~astropy.coordinates.Angle`
230 Offset
231 kwargs : dict
232 Forwarded tp plt.plot()
233
234 Returns
235 -------
236 ax : `~matplotlib.axes.Axes`
237 Axis
238 """
239 import matplotlib.pyplot as plt
240
241 ax = plt.gca() if ax is None else ax
242
243 if offset is None:
244 offset_axis = self.axes["offset"]
245 off_min, off_max = offset_axis.center.value[[0, -1]]
246 offset = np.linspace(off_min, off_max, 4) * offset_axis.unit
247
248 energy = self.axes["energy"].center
249
250 for off in offset:
251 bkg = self.evaluate(offset=off, energy=energy)
252 kwargs.setdefault("label", f"offset = {off:.1f}")
253 ax.plot(energy, bkg.value, **kwargs)
254
255 ax.set_xscale("log")
256 ax.set_yscale("log")
257 ax.set_xlabel(f"Energy [{energy.unit}]")
258 ax.set_ylabel(f"Background rate ({self.unit})")
259 ax.set_xlim(min(energy.value), max(energy.value))
260 ax.legend(loc="best")
261
262 return ax
263
264 def plot_spectrum(self, ax=None, **kwargs):
265 """Plot angle integrated background rate versus energy.
266
267 Parameters
268 ----------
269 ax : `~matplotlib.axes.Axes`, optional
270 Axis
271 kwargs : dict
272 Forwarded tp plt.plot()
273
274 Returns
275 -------
276 ax : `~matplotlib.axes.Axes`
277 Axis
278 """
279 import matplotlib.pyplot as plt
280
281 ax = plt.gca() if ax is None else ax
282 offset = self.axes["offset"].edges
283 energy = self.axes["energy"].center
284
285 bkg = []
286 for ee in energy:
287 data = self.evaluate(offset=offset, energy=ee)
288 val = np.nansum(trapz_loglog(data, offset, axis=0))
289 bkg.append(val.value)
290
291 ax.plot(energy, bkg, label="integrated spectrum", **kwargs)
292
293 unit = self.unit * offset.unit ** 2
294
295 ax.set_xscale("log")
296 ax.set_yscale("log")
297 ax.set_xlabel(f"Energy [{energy.unit}]")
298 ax.set_ylabel(f"Background rate ({unit})")
299 ax.set_xlim(min(energy.value), max(energy.value))
300 ax.legend(loc="best")
301 return ax
302
303 def peek(self, figsize=(10, 8)):
304 """Quick-look summary plots."""
305 import matplotlib.pyplot as plt
306
307 fig, axes = plt.subplots(nrows=2, ncols=2, figsize=figsize)
308 self.plot(ax=axes[1][1])
309 self.plot_offset_dependence(ax=axes[0][0])
310 self.plot_energy_dependence(ax=axes[1][0])
311 self.plot_spectrum(ax=axes[0][1])
312 plt.tight_layout()
313
[end of gammapy/irf/background.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/gammapy/irf/background.py b/gammapy/irf/background.py
--- a/gammapy/irf/background.py
+++ b/gammapy/irf/background.py
@@ -13,6 +13,7 @@
class BackgroundIRF(IRF):
"""Background IRF base class"""
+
default_interp_kwargs = dict(
bounds_error=False, fill_value=None, values_scale="log"
)
@@ -35,7 +36,7 @@
Background IRF class.
"""
axes = MapAxes.from_table(table, format=format)[cls.required_axes]
-
+
# Spec says key should be "BKG", but there are files around
# (e.g. CTA 1DC) that use "BGD". For now we support both
if "BKG" in table.colnames:
@@ -64,12 +65,7 @@
log.debug("Transposing background table on read")
data = data.transpose()
- return cls(
- axes=axes,
- data=data.value,
- meta=table.meta,
- unit=data.unit
- )
+ return cls(axes=axes, data=data.value, meta=table.meta, unit=data.unit)
class Background3D(BackgroundIRF):
@@ -107,6 +103,7 @@
<BLANKLINE>
"""
+
tag = "bkg_3d"
required_axes = ["energy", "fov_lon", "fov_lat"]
@@ -249,8 +246,8 @@
for off in offset:
bkg = self.evaluate(offset=off, energy=energy)
- kwargs.setdefault("label", f"offset = {off:.1f}")
- ax.plot(energy, bkg.value, **kwargs)
+ label = f"offset = {off:.2f}"
+ ax.plot(energy, bkg.value, label=label, **kwargs)
ax.set_xscale("log")
ax.set_yscale("log")
| {"golden_diff": "diff --git a/gammapy/irf/background.py b/gammapy/irf/background.py\n--- a/gammapy/irf/background.py\n+++ b/gammapy/irf/background.py\n@@ -13,6 +13,7 @@\n \n class BackgroundIRF(IRF):\n \"\"\"Background IRF base class\"\"\"\n+\n default_interp_kwargs = dict(\n bounds_error=False, fill_value=None, values_scale=\"log\"\n )\n@@ -35,7 +36,7 @@\n Background IRF class.\n \"\"\"\n axes = MapAxes.from_table(table, format=format)[cls.required_axes]\n- \n+\n # Spec says key should be \"BKG\", but there are files around\n # (e.g. CTA 1DC) that use \"BGD\". For now we support both\n if \"BKG\" in table.colnames:\n@@ -64,12 +65,7 @@\n log.debug(\"Transposing background table on read\")\n data = data.transpose()\n \n- return cls(\n- axes=axes,\n- data=data.value,\n- meta=table.meta,\n- unit=data.unit\n- )\n+ return cls(axes=axes, data=data.value, meta=table.meta, unit=data.unit)\n \n \n class Background3D(BackgroundIRF):\n@@ -107,6 +103,7 @@\n <BLANKLINE>\n \n \"\"\"\n+\n tag = \"bkg_3d\"\n required_axes = [\"energy\", \"fov_lon\", \"fov_lat\"]\n \n@@ -249,8 +246,8 @@\n \n for off in offset:\n bkg = self.evaluate(offset=off, energy=energy)\n- kwargs.setdefault(\"label\", f\"offset = {off:.1f}\")\n- ax.plot(energy, bkg.value, **kwargs)\n+ label = f\"offset = {off:.2f}\"\n+ ax.plot(energy, bkg.value, label=label, **kwargs)\n \n ax.set_xscale(\"log\")\n ax.set_yscale(\"log\")\n", "issue": "Label issue in background rate figures\n**Gammapy version**\r\n0.18.2\r\n\r\n**Bug description**\r\nThe label of the background rate figure seems wrong.\r\nSee the documentation, cell 24, bottom left figure:\r\nhttps://docs.gammapy.org/0.18.2/tutorials/cta.html?highlight=cta#IRFs\r\n\r\nThe labels are all identical `offset = 0.2 deg`.\r\n\r\n\n", "before_files": [{"content": "# Licensed under a 3-clause BSD style license - see LICENSE.rst\nimport logging\nimport numpy as np\nimport astropy.units as u\nfrom gammapy.maps import MapAxes, MapAxis\nfrom gammapy.utils.integrate import trapz_loglog\nfrom .core import IRF\n\n__all__ = [\"Background3D\", \"Background2D\"]\n\nlog = logging.getLogger(__name__)\n\n\nclass BackgroundIRF(IRF):\n \"\"\"Background IRF base class\"\"\"\n default_interp_kwargs = dict(\n bounds_error=False, fill_value=None, values_scale=\"log\"\n )\n \"\"\"Default Interpolation kwargs to extrapolate.\"\"\"\n\n @classmethod\n def from_table(cls, table, format=\"gadf-dl3\"):\n \"\"\"Read from `~astropy.table.Table`.\n\n Parameters\n ----------\n table : `~astropy.table.Table`\n Table with background data\n format : {\"gadf-dl3\"}\n Format specification\n\n Returns\n -------\n bkg : `Background2D` or `Background2D`\n Background IRF class.\n \"\"\"\n axes = MapAxes.from_table(table, format=format)[cls.required_axes]\n \n # Spec says key should be \"BKG\", but there are files around\n # (e.g. CTA 1DC) that use \"BGD\". For now we support both\n if \"BKG\" in table.colnames:\n bkg_name = \"BKG\"\n elif \"BGD\" in table.colnames:\n bkg_name = \"BGD\"\n else:\n raise ValueError('Invalid column names. Need \"BKG\" or \"BGD\".')\n\n data = table[bkg_name].quantity[0].T\n\n if data.unit == \"\" or isinstance(data.unit, u.UnrecognizedUnit):\n data = u.Quantity(data.value, \"s-1 MeV-1 sr-1\", copy=False)\n log.warning(\n \"Invalid unit found in background table! Assuming (s-1 MeV-1 sr-1)\"\n )\n\n # TODO: The present HESS and CTA backgroundfits files\n # have a reverse order (lon, lat, E) than recommened in GADF(E, lat, lon)\n # For now, we suport both.\n\n if axes.shape == axes.shape[::-1]:\n log.error(\"Ambiguous axes order in Background fits files!\")\n\n if np.shape(data) != axes.shape:\n log.debug(\"Transposing background table on read\")\n data = data.transpose()\n\n return cls(\n axes=axes,\n data=data.value,\n meta=table.meta,\n unit=data.unit\n )\n\n\nclass Background3D(BackgroundIRF):\n \"\"\"Background 3D.\n\n Data format specification: :ref:`gadf:bkg_3d`\n\n Parameters\n ----------\n axes : list of `MapAxis` or `MapAxes` object\n Required data axes: [\"energy\", \"fov_lon\", \"fov_lat\"] in the given order.\n data : `~np.ndarray`\n Data array.\n unit : str or `~astropy.units.Unit`\n Data unit usuually ``s^-1 MeV^-1 sr^-1``\n meta : dict\n Meta data\n\n Examples\n --------\n Here's an example you can use to learn about this class:\n\n >>> from gammapy.irf import Background3D\n >>> filename = '$GAMMAPY_DATA/cta-1dc/caldb/data/cta/1dc/bcf/South_z20_50h/irf_file.fits'\n >>> bkg_3d = Background3D.read(filename, hdu='BACKGROUND')\n >>> print(bkg_3d)\n Background3D\n ------------\n <BLANKLINE>\n axes : ['energy', 'fov_lon', 'fov_lat']\n shape : (21, 36, 36)\n ndim : 3\n unit : 1 / (MeV s sr)\n dtype : >f4\n <BLANKLINE>\n\n \"\"\"\n tag = \"bkg_3d\"\n required_axes = [\"energy\", \"fov_lon\", \"fov_lat\"]\n\n def to_2d(self):\n \"\"\"Convert to `Background2D`.\n\n This takes the values at Y = 0 and X >= 0.\n \"\"\"\n # TODO: this is incorrect as it misses the Jacobian?\n idx_lon = self.axes[\"fov_lon\"].coord_to_idx(0 * u.deg)[0]\n idx_lat = self.axes[\"fov_lat\"].coord_to_idx(0 * u.deg)[0]\n data = self.quantity[:, idx_lon:, idx_lat].copy()\n\n offset = self.axes[\"fov_lon\"].edges[idx_lon:]\n offset_axis = MapAxis.from_edges(offset, name=\"offset\")\n\n return Background2D(\n axes=[self.axes[\"energy\"], offset_axis], data=data.value, unit=data.unit\n )\n\n def peek(self, figsize=(10, 8)):\n return self.to_2d().peek(figsize)\n\n\nclass Background2D(BackgroundIRF):\n \"\"\"Background 2D.\n\n Data format specification: :ref:`gadf:bkg_2d`\n\n Parameters\n ----------\n axes : list of `MapAxis` or `MapAxes` object\n Required data axes: [\"energy\", \"offset\"] in the given order.\n data : `~np.ndarray`\n Data array.\n unit : str or `~astropy.units.Unit`\n Data unit usually ``s^-1 MeV^-1 sr^-1``\n meta : dict\n Meta data\n \"\"\"\n\n tag = \"bkg_2d\"\n required_axes = [\"energy\", \"offset\"]\n default_interp_kwargs = dict(bounds_error=False, fill_value=None)\n \"\"\"Default Interpolation kwargs.\"\"\"\n\n def plot(self, ax=None, add_cbar=True, **kwargs):\n \"\"\"Plot energy offset dependence of the background model.\n \"\"\"\n import matplotlib.pyplot as plt\n from matplotlib.colors import LogNorm\n\n ax = plt.gca() if ax is None else ax\n\n x = self.axes[\"energy\"].edges.to_value(\"TeV\")\n y = self.axes[\"offset\"].edges.to_value(\"deg\")\n z = self.quantity.T.value\n\n kwargs.setdefault(\"cmap\", \"GnBu\")\n kwargs.setdefault(\"edgecolors\", \"face\")\n\n caxes = ax.pcolormesh(x, y, z, norm=LogNorm(), **kwargs)\n ax.set_xscale(\"log\")\n ax.set_ylabel(f\"Offset (deg)\")\n ax.set_xlabel(f\"Energy (TeV)\")\n\n xmin, xmax = x.min(), x.max()\n ax.set_xlim(xmin, xmax)\n\n if add_cbar:\n label = f\"Background rate ({self.unit})\"\n ax.figure.colorbar(caxes, ax=ax, label=label)\n\n def plot_offset_dependence(self, ax=None, energy=None, **kwargs):\n \"\"\"Plot background rate versus offset for a given energy.\n\n Parameters\n ----------\n ax : `~matplotlib.axes.Axes`, optional\n Axis\n energy : `~astropy.units.Quantity`\n Energy\n\n Returns\n -------\n ax : `~matplotlib.axes.Axes`\n Axis\n \"\"\"\n import matplotlib.pyplot as plt\n\n ax = plt.gca() if ax is None else ax\n\n if energy is None:\n energy_axis = self.axes[\"energy\"]\n e_min, e_max = np.log10(energy_axis.center.value[[0, -1]])\n energy = np.logspace(e_min, e_max, 4) * energy_axis.unit\n\n offset = self.axes[\"offset\"].center\n\n for ee in energy:\n bkg = self.evaluate(offset=offset, energy=ee)\n if np.isnan(bkg).all():\n continue\n label = f\"energy = {ee:.1f}\"\n ax.plot(offset, bkg.value, label=label, **kwargs)\n\n ax.set_xlabel(f\"Offset ({self.axes['offset'].unit})\")\n ax.set_ylabel(f\"Background rate ({self.unit})\")\n ax.set_yscale(\"log\")\n ax.legend(loc=\"upper right\")\n return ax\n\n def plot_energy_dependence(self, ax=None, offset=None, **kwargs):\n \"\"\"Plot background rate versus energy for a given offset.\n\n Parameters\n ----------\n ax : `~matplotlib.axes.Axes`, optional\n Axis\n offset : `~astropy.coordinates.Angle`\n Offset\n kwargs : dict\n Forwarded tp plt.plot()\n\n Returns\n -------\n ax : `~matplotlib.axes.Axes`\n Axis\n \"\"\"\n import matplotlib.pyplot as plt\n\n ax = plt.gca() if ax is None else ax\n\n if offset is None:\n offset_axis = self.axes[\"offset\"]\n off_min, off_max = offset_axis.center.value[[0, -1]]\n offset = np.linspace(off_min, off_max, 4) * offset_axis.unit\n\n energy = self.axes[\"energy\"].center\n\n for off in offset:\n bkg = self.evaluate(offset=off, energy=energy)\n kwargs.setdefault(\"label\", f\"offset = {off:.1f}\")\n ax.plot(energy, bkg.value, **kwargs)\n\n ax.set_xscale(\"log\")\n ax.set_yscale(\"log\")\n ax.set_xlabel(f\"Energy [{energy.unit}]\")\n ax.set_ylabel(f\"Background rate ({self.unit})\")\n ax.set_xlim(min(energy.value), max(energy.value))\n ax.legend(loc=\"best\")\n\n return ax\n\n def plot_spectrum(self, ax=None, **kwargs):\n \"\"\"Plot angle integrated background rate versus energy.\n\n Parameters\n ----------\n ax : `~matplotlib.axes.Axes`, optional\n Axis\n kwargs : dict\n Forwarded tp plt.plot()\n\n Returns\n -------\n ax : `~matplotlib.axes.Axes`\n Axis\n \"\"\"\n import matplotlib.pyplot as plt\n\n ax = plt.gca() if ax is None else ax\n offset = self.axes[\"offset\"].edges\n energy = self.axes[\"energy\"].center\n\n bkg = []\n for ee in energy:\n data = self.evaluate(offset=offset, energy=ee)\n val = np.nansum(trapz_loglog(data, offset, axis=0))\n bkg.append(val.value)\n\n ax.plot(energy, bkg, label=\"integrated spectrum\", **kwargs)\n\n unit = self.unit * offset.unit ** 2\n\n ax.set_xscale(\"log\")\n ax.set_yscale(\"log\")\n ax.set_xlabel(f\"Energy [{energy.unit}]\")\n ax.set_ylabel(f\"Background rate ({unit})\")\n ax.set_xlim(min(energy.value), max(energy.value))\n ax.legend(loc=\"best\")\n return ax\n\n def peek(self, figsize=(10, 8)):\n \"\"\"Quick-look summary plots.\"\"\"\n import matplotlib.pyplot as plt\n\n fig, axes = plt.subplots(nrows=2, ncols=2, figsize=figsize)\n self.plot(ax=axes[1][1])\n self.plot_offset_dependence(ax=axes[0][0])\n self.plot_energy_dependence(ax=axes[1][0])\n self.plot_spectrum(ax=axes[0][1])\n plt.tight_layout()\n", "path": "gammapy/irf/background.py"}]} | 3,907 | 451 |
gh_patches_debug_12 | rasdani/github-patches | git_diff | OCHA-DAP__hdx-ckan-1737 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Shrink the map and related divs

</issue>
<code>
[start of ckanext-hdx_theme/ckanext/hdx_theme/version.py]
1 hdx_version = 'v0.4.8'
2
[end of ckanext-hdx_theme/ckanext/hdx_theme/version.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/ckanext-hdx_theme/ckanext/hdx_theme/version.py b/ckanext-hdx_theme/ckanext/hdx_theme/version.py
--- a/ckanext-hdx_theme/ckanext/hdx_theme/version.py
+++ b/ckanext-hdx_theme/ckanext/hdx_theme/version.py
@@ -1 +1 @@
-hdx_version = 'v0.4.8'
+hdx_version = 'v0.4.9'
| {"golden_diff": "diff --git a/ckanext-hdx_theme/ckanext/hdx_theme/version.py b/ckanext-hdx_theme/ckanext/hdx_theme/version.py\n--- a/ckanext-hdx_theme/ckanext/hdx_theme/version.py\n+++ b/ckanext-hdx_theme/ckanext/hdx_theme/version.py\n@@ -1 +1 @@\n-hdx_version = 'v0.4.8'\n+hdx_version = 'v0.4.9'\n", "issue": "Shrink the map and related divs\n\n\n", "before_files": [{"content": "hdx_version = 'v0.4.8'\n", "path": "ckanext-hdx_theme/ckanext/hdx_theme/version.py"}]} | 639 | 106 |
gh_patches_debug_13118 | rasdani/github-patches | git_diff | netbox-community__netbox-3858 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Custom link in group does not render Jinja2 code
<!--
NOTE: This form is only for reproducible bugs. If you need assistance with
NetBox installation, or if you have a general question, DO NOT open an
issue. Instead, post to our mailing list:
https://groups.google.com/forum/#!forum/netbox-discuss
Please describe the environment in which you are running NetBox. Be sure
that you are running an unmodified instance of the latest stable release
before submitting a bug report.
-->
### Environment
* Python version: 3.6.8
* NetBox version: 2.6.11
<!--
Describe in detail the exact steps that someone else can take to reproduce
this bug using the current stable release of NetBox (or the current beta
release where applicable). Begin with the creation of any necessary
database objects and call out every operation being performed explicitly.
If reporting a bug in the REST API, be sure to reconstruct the raw HTTP
request(s) being made: Don't rely on a wrapper like pynetbox.
-->
### Steps to Reproduce
1. Create a custom link and add it to a group. Make the name use Jinja style code - example {% if True %} Custom Link 1{% endif %}
2. Open a device and look at the custom link
<!-- What did you expect to happen? -->
### Expected Behavior
Expected the link to use the Jinja2 code and show "Custom Link 1" as name.
<!-- What happened instead? -->
### Observed Behavior
The Jinja code is shown as the name. So the name is "{% if True %} Custom Link 1{% endif %}" instead of "Custom Link 1". The link itself is also not rendering Jinja code.
This seems to come from #3461 and due to the `text_rendered` and `link_rendered` not being used in the format statement on https://github.com/netbox-community/netbox/blob/b5455ed882c47273064933f0120b0e368af93a1f/netbox/extras/templatetags/custom_links.py#L72
I did a quick check and changed
```try:
text_rendered = render_jinja2(cl.text, context)
if text_rendered:
link_target = ' target="_blank"' if cl.new_window else ''
links_rendered.append(
GROUP_LINK.format(cl.url, link_target, cl.text)
)
```
to
```try:
text_rendered = render_jinja2(cl.text, context)
if text_rendered:
link_target = ' target="_blank"' if cl.new_window else ''
link_rendered = render_jinja2(cl.url, context)
links_rendered.append(
GROUP_LINK.format(link_rendered, link_target, text_rendered)
)
```
This made it work as expected again.
</issue>
<code>
[start of netbox/extras/templatetags/custom_links.py]
1 from collections import OrderedDict
2
3 from django import template
4 from django.contrib.contenttypes.models import ContentType
5 from django.utils.safestring import mark_safe
6
7 from extras.models import CustomLink
8 from utilities.utils import render_jinja2
9
10
11 register = template.Library()
12
13 LINK_BUTTON = '<a href="{}"{} class="btn btn-sm btn-{}">{}</a>\n'
14 GROUP_BUTTON = '<div class="btn-group">\n' \
15 '<button type="button" class="btn btn-sm btn-{} dropdown-toggle" data-toggle="dropdown">\n' \
16 '{} <span class="caret"></span>\n' \
17 '</button>\n' \
18 '<ul class="dropdown-menu pull-right">\n' \
19 '{}</ul></div>'
20 GROUP_LINK = '<li><a href="{}"{}>{}</a></li>\n'
21
22
23 @register.simple_tag()
24 def custom_links(obj):
25 """
26 Render all applicable links for the given object.
27 """
28 content_type = ContentType.objects.get_for_model(obj)
29 custom_links = CustomLink.objects.filter(content_type=content_type)
30 if not custom_links:
31 return ''
32
33 context = {
34 'obj': obj,
35 }
36 template_code = ''
37 group_names = OrderedDict()
38
39 for cl in custom_links:
40
41 # Organize custom links by group
42 if cl.group_name and cl.group_name in group_names:
43 group_names[cl.group_name].append(cl)
44 elif cl.group_name:
45 group_names[cl.group_name] = [cl]
46
47 # Add non-grouped links
48 else:
49 try:
50 text_rendered = render_jinja2(cl.text, context)
51 if text_rendered:
52 link_rendered = render_jinja2(cl.url, context)
53 link_target = ' target="_blank"' if cl.new_window else ''
54 template_code += LINK_BUTTON.format(
55 link_rendered, link_target, cl.button_class, text_rendered
56 )
57 except Exception as e:
58 template_code += '<a class="btn btn-sm btn-default" disabled="disabled" title="{}">' \
59 '<i class="fa fa-warning"></i> {}</a>\n'.format(e, cl.name)
60
61 # Add grouped links to template
62 for group, links in group_names.items():
63
64 links_rendered = []
65
66 for cl in links:
67 try:
68 text_rendered = render_jinja2(cl.text, context)
69 if text_rendered:
70 link_target = ' target="_blank"' if cl.new_window else ''
71 links_rendered.append(
72 GROUP_LINK.format(cl.url, link_target, cl.text)
73 )
74 except Exception as e:
75 links_rendered.append(
76 '<li><a disabled="disabled" title="{}"><span class="text-muted">'
77 '<i class="fa fa-warning"></i> {}</span></a></li>'.format(e, cl.name)
78 )
79
80 if links_rendered:
81 template_code += GROUP_BUTTON.format(
82 links[0].button_class, group, ''.join(links_rendered)
83 )
84
85 return mark_safe(template_code)
86
[end of netbox/extras/templatetags/custom_links.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/netbox/extras/templatetags/custom_links.py b/netbox/extras/templatetags/custom_links.py
--- a/netbox/extras/templatetags/custom_links.py
+++ b/netbox/extras/templatetags/custom_links.py
@@ -68,8 +68,9 @@
text_rendered = render_jinja2(cl.text, context)
if text_rendered:
link_target = ' target="_blank"' if cl.new_window else ''
+ link_rendered = render_jinja2(cl.url, context)
links_rendered.append(
- GROUP_LINK.format(cl.url, link_target, cl.text)
+ GROUP_LINK.format(link_rendered, link_target, text_rendered)
)
except Exception as e:
links_rendered.append(
| {"golden_diff": "diff --git a/netbox/extras/templatetags/custom_links.py b/netbox/extras/templatetags/custom_links.py\n--- a/netbox/extras/templatetags/custom_links.py\n+++ b/netbox/extras/templatetags/custom_links.py\n@@ -68,8 +68,9 @@\n text_rendered = render_jinja2(cl.text, context)\n if text_rendered:\n link_target = ' target=\"_blank\"' if cl.new_window else ''\n+ link_rendered = render_jinja2(cl.url, context)\n links_rendered.append(\n- GROUP_LINK.format(cl.url, link_target, cl.text)\n+ GROUP_LINK.format(link_rendered, link_target, text_rendered)\n )\n except Exception as e:\n links_rendered.append(\n", "issue": "Custom link in group does not render Jinja2 code\n<!--\r\n NOTE: This form is only for reproducible bugs. If you need assistance with\r\n NetBox installation, or if you have a general question, DO NOT open an\r\n issue. Instead, post to our mailing list:\r\n\r\n https://groups.google.com/forum/#!forum/netbox-discuss\r\n\r\n Please describe the environment in which you are running NetBox. Be sure\r\n that you are running an unmodified instance of the latest stable release\r\n before submitting a bug report.\r\n-->\r\n### Environment\r\n* Python version: 3.6.8\r\n* NetBox version: 2.6.11\r\n\r\n<!--\r\n Describe in detail the exact steps that someone else can take to reproduce\r\n this bug using the current stable release of NetBox (or the current beta\r\n release where applicable). Begin with the creation of any necessary\r\n database objects and call out every operation being performed explicitly.\r\n If reporting a bug in the REST API, be sure to reconstruct the raw HTTP\r\n request(s) being made: Don't rely on a wrapper like pynetbox.\r\n-->\r\n### Steps to Reproduce\r\n1. Create a custom link and add it to a group. Make the name use Jinja style code - example {% if True %} Custom Link 1{% endif %}\r\n2. Open a device and look at the custom link\r\n\r\n<!-- What did you expect to happen? -->\r\n### Expected Behavior\r\nExpected the link to use the Jinja2 code and show \"Custom Link 1\" as name.\r\n\r\n<!-- What happened instead? -->\r\n### Observed Behavior\r\nThe Jinja code is shown as the name. So the name is \"{% if True %} Custom Link 1{% endif %}\" instead of \"Custom Link 1\". The link itself is also not rendering Jinja code.\r\n\r\nThis seems to come from #3461 and due to the `text_rendered` and `link_rendered` not being used in the format statement on https://github.com/netbox-community/netbox/blob/b5455ed882c47273064933f0120b0e368af93a1f/netbox/extras/templatetags/custom_links.py#L72\r\n\r\nI did a quick check and changed \r\n```try:\r\n text_rendered = render_jinja2(cl.text, context)\r\n if text_rendered:\r\n link_target = ' target=\"_blank\"' if cl.new_window else ''\r\n links_rendered.append(\r\n GROUP_LINK.format(cl.url, link_target, cl.text)\r\n )\r\n```\r\nto\r\n\r\n```try:\r\n text_rendered = render_jinja2(cl.text, context)\r\n if text_rendered:\r\n link_target = ' target=\"_blank\"' if cl.new_window else ''\r\n link_rendered = render_jinja2(cl.url, context)\r\n links_rendered.append(\r\n GROUP_LINK.format(link_rendered, link_target, text_rendered)\r\n )\r\n```\r\n\r\nThis made it work as expected again.\n", "before_files": [{"content": "from collections import OrderedDict\n\nfrom django import template\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.utils.safestring import mark_safe\n\nfrom extras.models import CustomLink\nfrom utilities.utils import render_jinja2\n\n\nregister = template.Library()\n\nLINK_BUTTON = '<a href=\"{}\"{} class=\"btn btn-sm btn-{}\">{}</a>\\n'\nGROUP_BUTTON = '<div class=\"btn-group\">\\n' \\\n '<button type=\"button\" class=\"btn btn-sm btn-{} dropdown-toggle\" data-toggle=\"dropdown\">\\n' \\\n '{} <span class=\"caret\"></span>\\n' \\\n '</button>\\n' \\\n '<ul class=\"dropdown-menu pull-right\">\\n' \\\n '{}</ul></div>'\nGROUP_LINK = '<li><a href=\"{}\"{}>{}</a></li>\\n'\n\n\[email protected]_tag()\ndef custom_links(obj):\n \"\"\"\n Render all applicable links for the given object.\n \"\"\"\n content_type = ContentType.objects.get_for_model(obj)\n custom_links = CustomLink.objects.filter(content_type=content_type)\n if not custom_links:\n return ''\n\n context = {\n 'obj': obj,\n }\n template_code = ''\n group_names = OrderedDict()\n\n for cl in custom_links:\n\n # Organize custom links by group\n if cl.group_name and cl.group_name in group_names:\n group_names[cl.group_name].append(cl)\n elif cl.group_name:\n group_names[cl.group_name] = [cl]\n\n # Add non-grouped links\n else:\n try:\n text_rendered = render_jinja2(cl.text, context)\n if text_rendered:\n link_rendered = render_jinja2(cl.url, context)\n link_target = ' target=\"_blank\"' if cl.new_window else ''\n template_code += LINK_BUTTON.format(\n link_rendered, link_target, cl.button_class, text_rendered\n )\n except Exception as e:\n template_code += '<a class=\"btn btn-sm btn-default\" disabled=\"disabled\" title=\"{}\">' \\\n '<i class=\"fa fa-warning\"></i> {}</a>\\n'.format(e, cl.name)\n\n # Add grouped links to template\n for group, links in group_names.items():\n\n links_rendered = []\n\n for cl in links:\n try:\n text_rendered = render_jinja2(cl.text, context)\n if text_rendered:\n link_target = ' target=\"_blank\"' if cl.new_window else ''\n links_rendered.append(\n GROUP_LINK.format(cl.url, link_target, cl.text)\n )\n except Exception as e:\n links_rendered.append(\n '<li><a disabled=\"disabled\" title=\"{}\"><span class=\"text-muted\">'\n '<i class=\"fa fa-warning\"></i> {}</span></a></li>'.format(e, cl.name)\n )\n\n if links_rendered:\n template_code += GROUP_BUTTON.format(\n links[0].button_class, group, ''.join(links_rendered)\n )\n\n return mark_safe(template_code)\n", "path": "netbox/extras/templatetags/custom_links.py"}]} | 2,007 | 172 |
gh_patches_debug_11386 | rasdani/github-patches | git_diff | hpcaitech__ColossalAI-5227 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[tensor] fix some unittests
[tensor] fix some unittests
[tensor] fix some unittests
</issue>
<code>
[start of applications/Chat/examples/community/peft/train_peft_prompts.py]
1 import argparse
2
3 import torch
4 import torch.distributed as dist
5 from coati.dataset import DataCollatorForSupervisedDataset
6 from coati.models.bloom import BLOOMRM, BLOOMCritic
7 from coati.models.gpt import GPTRM, GPTCritic
8 from coati.models.llama import LlamaCritic, LlamaRM
9 from coati.models.opt import OPTRM, OPTCritic
10 from coati.trainer import PPOTrainer
11 from coati.trainer.strategies import DDPStrategy, GeminiStrategy, LowLevelZeroStrategy
12 from easy_dataset import EasyPromptsDataset, EasySupervisedDataset
13 from easy_models import BLOOMActor
14 from torch.optim import Adam
15 from torch.utils.data import DataLoader
16 from torch.utils.data.distributed import DistributedSampler
17 from transformers import AutoTokenizer, BloomTokenizerFast, GPT2Tokenizer, LlamaTokenizer
18
19 from colossalai.nn.optimizer import HybridAdam
20
21
22 def main(args):
23 # configure strategy
24 if args.strategy == "ddp":
25 strategy = DDPStrategy()
26 elif args.strategy == "colossalai_gemini":
27 strategy = GeminiStrategy(placement_policy="static", offload_optim_frac=1.0, offload_param_frac=1.0, initial_scale=2**5)
28 elif args.strategy == "colossalai_zero2":
29 strategy = LowLevelZeroStrategy(stage=2, placement_policy="cpu")
30 else:
31 raise ValueError(f'Unsupported strategy "{args.strategy}"')
32
33 if args.rm_path is not None:
34 state_dict = torch.load(args.rm_path, map_location="cpu")
35
36 # configure model
37 if args.model == "bloom":
38 # initial_model = BLOOMActor(pretrained=args.pretrain)
39 print("Using peft lora to load Bloom model as initial_model")
40 initial_model = BLOOMActor(pretrained=args.pretrain, lora_path=args.sft_lora_path)
41 print("Using peft lora to load Bloom model as initial_model (Done)")
42 else:
43 raise ValueError(f'Unsupported actor model "{args.model}"')
44
45 if args.rm_model == None:
46 rm_model_name = args.model
47 else:
48 rm_model_name = args.rm_model
49
50 if rm_model_name == "gpt2":
51 reward_model = GPTRM(pretrained=args.rm_pretrain)
52 elif rm_model_name == "bloom":
53 print("load bloom reward model ", args.rm_pretrain)
54 reward_model = BLOOMRM(pretrained=args.rm_pretrain)
55 elif rm_model_name == "opt":
56 reward_model = OPTRM(pretrained=args.rm_pretrain)
57 elif rm_model_name == "llama":
58 reward_model = LlamaRM(pretrained=args.rm_pretrain)
59 else:
60 raise ValueError(f'Unsupported reward model "{rm_model_name}"')
61
62 if args.rm_path is not None:
63 print("Loading reward model from", args.rm_path)
64 reward_model.load_state_dict(state_dict)
65
66 if args.strategy != "colossalai_gemini":
67 initial_model.to(torch.float16).to(torch.cuda.current_device())
68 reward_model.to(torch.float16).to(torch.cuda.current_device())
69
70 with strategy.model_init_context():
71 if args.model == "bloom":
72 # actor = BLOOMActor(pretrained=args.pretrain, lora_rank=args.lora_rank)
73 print("Using peft lora to load Bloom model as Actor")
74 actor = BLOOMActor(pretrained=args.pretrain, lora_path=args.sft_lora_path)
75 print("Using peft lora to load Bloom model as Actor (Done)")
76 else:
77 raise ValueError(f'Unsupported actor model "{args.model}"')
78
79 if rm_model_name == "gpt2":
80 critic = GPTCritic(pretrained=args.rm_pretrain, lora_rank=args.lora_rank, use_action_mask=True)
81 elif rm_model_name == "bloom":
82 print("load bloom critic ", args.rm_pretrain, " lora_rank ", args.lora_rank, " use_action_mask ", True)
83 critic = BLOOMCritic(pretrained=args.rm_pretrain, lora_rank=args.lora_rank, use_action_mask=True)
84 print("load bloom critic (Done) ")
85 elif rm_model_name == "opt":
86 critic = OPTCritic(pretrained=args.rm_pretrain, lora_rank=args.lora_rank, use_action_mask=True)
87 elif rm_model_name == "llama":
88 critic = LlamaCritic(pretrained=args.rm_pretrain, lora_rank=args.lora_rank, use_action_mask=True)
89 else:
90 raise ValueError(f'Unsupported reward model "{rm_model_name}"')
91
92 if args.rm_path is not None:
93 print("Loading reward model from", args.rm_path)
94 critic.load_state_dict(state_dict)
95 del state_dict
96
97 if args.strategy != "colossalai_gemini":
98 critic.to(torch.float16).to(torch.cuda.current_device())
99 actor.to(torch.float16).to(torch.cuda.current_device())
100
101 # configure optimizer
102 if args.strategy.startswith("colossalai"):
103 actor_optim = HybridAdam(actor.parameters(), lr=1e-7)
104 critic_optim = HybridAdam(critic.parameters(), lr=1e-7)
105 else:
106 actor_optim = Adam(actor.parameters(), lr=1e-7)
107 critic_optim = Adam(critic.parameters(), lr=1e-7)
108
109 # configure tokenizer
110 if args.model == "gpt2":
111 tokenizer = GPT2Tokenizer.from_pretrained(args.rm_pretrain)
112 tokenizer.pad_token = tokenizer.eos_token
113 elif args.model == "bloom":
114 tokenizer = BloomTokenizerFast.from_pretrained(args.rm_pretrain)
115 tokenizer.pad_token = tokenizer.eos_token
116 elif args.model == "opt":
117 tokenizer = AutoTokenizer.from_pretrained(args.rm_pretrain)
118 tokenizer.pad_token = tokenizer.eos_token
119 elif args.model == "llama":
120 tokenizer = LlamaTokenizer.from_pretrained(args.pretrain)
121 tokenizer.eos_token = "</s>"
122 tokenizer.pad_token = tokenizer.unk_token
123 else:
124 raise ValueError(f'Unsupported model "{args.model}"')
125
126 data_collator = DataCollatorForSupervisedDataset(tokenizer=tokenizer)
127
128 prompt_dataset = EasyPromptsDataset(args.prompt_path, tokenizer)
129 if dist.is_initialized() and dist.get_world_size() > 1:
130 prompt_sampler = DistributedSampler(prompt_dataset, shuffle=True, seed=42, drop_last=True)
131 else:
132 prompt_sampler = None
133 prompt_dataloader = DataLoader(
134 prompt_dataset, shuffle=(prompt_sampler is None), sampler=prompt_sampler, batch_size=args.train_batch_size
135 )
136
137 pretrain_dataset = EasySupervisedDataset(args.pretrain_dataset, tokenizer)
138 if dist.is_initialized() and dist.get_world_size() > 1:
139 pretrain_sampler = DistributedSampler(pretrain_dataset, shuffle=True, seed=42, drop_last=True)
140 else:
141 pretrain_sampler = None
142 pretrain_dataloader = DataLoader(
143 pretrain_dataset,
144 shuffle=(pretrain_sampler is None),
145 sampler=pretrain_sampler,
146 batch_size=args.ptx_batch_size,
147 collate_fn=data_collator,
148 )
149
150 def tokenize_fn(texts):
151 # MUST padding to max length to ensure inputs of all ranks have the same length
152 # Different length may lead to hang when using gemini, as different generation steps
153 batch = tokenizer(texts, return_tensors="pt", max_length=96, padding="max_length", truncation=True)
154 return {k: v.to(torch.cuda.current_device()) for k, v in batch.items()}
155
156 (actor, actor_optim), (critic, critic_optim) = strategy.prepare((actor, actor_optim), (critic, critic_optim))
157
158 # configure trainer
159 trainer = PPOTrainer(
160 strategy,
161 actor,
162 critic,
163 reward_model,
164 initial_model,
165 actor_optim,
166 critic_optim,
167 kl_coef=args.kl_coef,
168 ptx_coef=args.ptx_coef,
169 train_batch_size=args.train_batch_size,
170 experience_batch_size=args.experience_batch_size,
171 tokenizer=tokenize_fn,
172 max_length=512,
173 do_sample=True,
174 temperature=1.0,
175 top_k=50,
176 pad_token_id=tokenizer.pad_token_id,
177 eos_token_id=tokenizer.eos_token_id,
178 )
179
180 trainer.fit(
181 prompt_dataloader=prompt_dataloader,
182 pretrain_dataloader=pretrain_dataloader,
183 num_episodes=args.num_episodes,
184 num_update_steps=args.num_update_steps,
185 num_collect_steps=args.num_collect_steps,
186 )
187
188 # save model checkpoint after fitting
189 trainer.save_model(args.save_path, only_rank0=True, tokenizer=tokenizer)
190 # save optimizer checkpoint on all ranks
191 if args.need_optim_ckpt:
192 strategy.save_optimizer(
193 actor_optim, "actor_optim_checkpoint_prompts_%d.pt" % (torch.cuda.current_device()), only_rank0=False
194 )
195
196
197 if __name__ == "__main__":
198 parser = argparse.ArgumentParser()
199 parser.add_argument("--prompt_path", type=str, default=None, help="path to the prompt dataset")
200 parser.add_argument("--pretrain_dataset", type=str, default=None, help="path to the pretrained dataset")
201 parser.add_argument(
202 "--strategy", choices=["ddp", "colossalai_gemini", "colossalai_zero2"], default="ddp", help="strategy to use"
203 )
204 parser.add_argument("--model", default="gpt2", choices=["gpt2", "bloom", "opt", "llama"])
205 parser.add_argument("--pretrain", type=str, default=None)
206 parser.add_argument("--sft_lora_path", type=str, default=None)
207 parser.add_argument("--rm_model", default=None, choices=["gpt2", "bloom", "opt", "llama"])
208 parser.add_argument("--rm_path", type=str, default=None)
209 parser.add_argument("--rm_pretrain", type=str, default=None)
210 parser.add_argument("--save_path", type=str, default="actor_checkpoint_prompts")
211 parser.add_argument("--need_optim_ckpt", type=bool, default=False)
212 parser.add_argument("--num_episodes", type=int, default=10)
213 parser.add_argument("--num_collect_steps", type=int, default=10)
214 parser.add_argument("--num_update_steps", type=int, default=5)
215 parser.add_argument("--train_batch_size", type=int, default=2)
216 parser.add_argument("--ptx_batch_size", type=int, default=1)
217 parser.add_argument("--experience_batch_size", type=int, default=8)
218 parser.add_argument("--lora_rank", type=int, default=0, help="low-rank adaptation matrices rank")
219 parser.add_argument("--kl_coef", type=float, default=0.1)
220 parser.add_argument("--ptx_coef", type=float, default=0.9)
221 args = parser.parse_args()
222 main(args)
223
[end of applications/Chat/examples/community/peft/train_peft_prompts.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/applications/Chat/examples/community/peft/train_peft_prompts.py b/applications/Chat/examples/community/peft/train_peft_prompts.py
--- a/applications/Chat/examples/community/peft/train_peft_prompts.py
+++ b/applications/Chat/examples/community/peft/train_peft_prompts.py
@@ -24,7 +24,9 @@
if args.strategy == "ddp":
strategy = DDPStrategy()
elif args.strategy == "colossalai_gemini":
- strategy = GeminiStrategy(placement_policy="static", offload_optim_frac=1.0, offload_param_frac=1.0, initial_scale=2**5)
+ strategy = GeminiStrategy(
+ placement_policy="static", offload_optim_frac=1.0, offload_param_frac=1.0, initial_scale=2**5
+ )
elif args.strategy == "colossalai_zero2":
strategy = LowLevelZeroStrategy(stage=2, placement_policy="cpu")
else:
| {"golden_diff": "diff --git a/applications/Chat/examples/community/peft/train_peft_prompts.py b/applications/Chat/examples/community/peft/train_peft_prompts.py\n--- a/applications/Chat/examples/community/peft/train_peft_prompts.py\n+++ b/applications/Chat/examples/community/peft/train_peft_prompts.py\n@@ -24,7 +24,9 @@\n if args.strategy == \"ddp\":\n strategy = DDPStrategy()\n elif args.strategy == \"colossalai_gemini\":\n- strategy = GeminiStrategy(placement_policy=\"static\", offload_optim_frac=1.0, offload_param_frac=1.0, initial_scale=2**5)\n+ strategy = GeminiStrategy(\n+ placement_policy=\"static\", offload_optim_frac=1.0, offload_param_frac=1.0, initial_scale=2**5\n+ )\n elif args.strategy == \"colossalai_zero2\":\n strategy = LowLevelZeroStrategy(stage=2, placement_policy=\"cpu\")\n else:\n", "issue": "[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n[tensor] fix some unittests\n\n", "before_files": [{"content": "import argparse\n\nimport torch\nimport torch.distributed as dist\nfrom coati.dataset import DataCollatorForSupervisedDataset\nfrom coati.models.bloom import BLOOMRM, BLOOMCritic\nfrom coati.models.gpt import GPTRM, GPTCritic\nfrom coati.models.llama import LlamaCritic, LlamaRM\nfrom coati.models.opt import OPTRM, OPTCritic\nfrom coati.trainer import PPOTrainer\nfrom coati.trainer.strategies import DDPStrategy, GeminiStrategy, LowLevelZeroStrategy\nfrom easy_dataset import EasyPromptsDataset, EasySupervisedDataset\nfrom easy_models import BLOOMActor\nfrom torch.optim import Adam\nfrom torch.utils.data import DataLoader\nfrom torch.utils.data.distributed import DistributedSampler\nfrom transformers import AutoTokenizer, BloomTokenizerFast, GPT2Tokenizer, LlamaTokenizer\n\nfrom colossalai.nn.optimizer import HybridAdam\n\n\ndef main(args):\n # configure strategy\n if args.strategy == \"ddp\":\n strategy = DDPStrategy()\n elif args.strategy == \"colossalai_gemini\":\n strategy = GeminiStrategy(placement_policy=\"static\", offload_optim_frac=1.0, offload_param_frac=1.0, initial_scale=2**5)\n elif args.strategy == \"colossalai_zero2\":\n strategy = LowLevelZeroStrategy(stage=2, placement_policy=\"cpu\")\n else:\n raise ValueError(f'Unsupported strategy \"{args.strategy}\"')\n\n if args.rm_path is not None:\n state_dict = torch.load(args.rm_path, map_location=\"cpu\")\n\n # configure model\n if args.model == \"bloom\":\n # initial_model = BLOOMActor(pretrained=args.pretrain)\n print(\"Using peft lora to load Bloom model as initial_model\")\n initial_model = BLOOMActor(pretrained=args.pretrain, lora_path=args.sft_lora_path)\n print(\"Using peft lora to load Bloom model as initial_model (Done)\")\n else:\n raise ValueError(f'Unsupported actor model \"{args.model}\"')\n\n if args.rm_model == None:\n rm_model_name = args.model\n else:\n rm_model_name = args.rm_model\n\n if rm_model_name == \"gpt2\":\n reward_model = GPTRM(pretrained=args.rm_pretrain)\n elif rm_model_name == \"bloom\":\n print(\"load bloom reward model \", args.rm_pretrain)\n reward_model = BLOOMRM(pretrained=args.rm_pretrain)\n elif rm_model_name == \"opt\":\n reward_model = OPTRM(pretrained=args.rm_pretrain)\n elif rm_model_name == \"llama\":\n reward_model = LlamaRM(pretrained=args.rm_pretrain)\n else:\n raise ValueError(f'Unsupported reward model \"{rm_model_name}\"')\n\n if args.rm_path is not None:\n print(\"Loading reward model from\", args.rm_path)\n reward_model.load_state_dict(state_dict)\n\n if args.strategy != \"colossalai_gemini\":\n initial_model.to(torch.float16).to(torch.cuda.current_device())\n reward_model.to(torch.float16).to(torch.cuda.current_device())\n\n with strategy.model_init_context():\n if args.model == \"bloom\":\n # actor = BLOOMActor(pretrained=args.pretrain, lora_rank=args.lora_rank)\n print(\"Using peft lora to load Bloom model as Actor\")\n actor = BLOOMActor(pretrained=args.pretrain, lora_path=args.sft_lora_path)\n print(\"Using peft lora to load Bloom model as Actor (Done)\")\n else:\n raise ValueError(f'Unsupported actor model \"{args.model}\"')\n\n if rm_model_name == \"gpt2\":\n critic = GPTCritic(pretrained=args.rm_pretrain, lora_rank=args.lora_rank, use_action_mask=True)\n elif rm_model_name == \"bloom\":\n print(\"load bloom critic \", args.rm_pretrain, \" lora_rank \", args.lora_rank, \" use_action_mask \", True)\n critic = BLOOMCritic(pretrained=args.rm_pretrain, lora_rank=args.lora_rank, use_action_mask=True)\n print(\"load bloom critic (Done) \")\n elif rm_model_name == \"opt\":\n critic = OPTCritic(pretrained=args.rm_pretrain, lora_rank=args.lora_rank, use_action_mask=True)\n elif rm_model_name == \"llama\":\n critic = LlamaCritic(pretrained=args.rm_pretrain, lora_rank=args.lora_rank, use_action_mask=True)\n else:\n raise ValueError(f'Unsupported reward model \"{rm_model_name}\"')\n\n if args.rm_path is not None:\n print(\"Loading reward model from\", args.rm_path)\n critic.load_state_dict(state_dict)\n del state_dict\n\n if args.strategy != \"colossalai_gemini\":\n critic.to(torch.float16).to(torch.cuda.current_device())\n actor.to(torch.float16).to(torch.cuda.current_device())\n\n # configure optimizer\n if args.strategy.startswith(\"colossalai\"):\n actor_optim = HybridAdam(actor.parameters(), lr=1e-7)\n critic_optim = HybridAdam(critic.parameters(), lr=1e-7)\n else:\n actor_optim = Adam(actor.parameters(), lr=1e-7)\n critic_optim = Adam(critic.parameters(), lr=1e-7)\n\n # configure tokenizer\n if args.model == \"gpt2\":\n tokenizer = GPT2Tokenizer.from_pretrained(args.rm_pretrain)\n tokenizer.pad_token = tokenizer.eos_token\n elif args.model == \"bloom\":\n tokenizer = BloomTokenizerFast.from_pretrained(args.rm_pretrain)\n tokenizer.pad_token = tokenizer.eos_token\n elif args.model == \"opt\":\n tokenizer = AutoTokenizer.from_pretrained(args.rm_pretrain)\n tokenizer.pad_token = tokenizer.eos_token\n elif args.model == \"llama\":\n tokenizer = LlamaTokenizer.from_pretrained(args.pretrain)\n tokenizer.eos_token = \"</s>\"\n tokenizer.pad_token = tokenizer.unk_token\n else:\n raise ValueError(f'Unsupported model \"{args.model}\"')\n\n data_collator = DataCollatorForSupervisedDataset(tokenizer=tokenizer)\n\n prompt_dataset = EasyPromptsDataset(args.prompt_path, tokenizer)\n if dist.is_initialized() and dist.get_world_size() > 1:\n prompt_sampler = DistributedSampler(prompt_dataset, shuffle=True, seed=42, drop_last=True)\n else:\n prompt_sampler = None\n prompt_dataloader = DataLoader(\n prompt_dataset, shuffle=(prompt_sampler is None), sampler=prompt_sampler, batch_size=args.train_batch_size\n )\n\n pretrain_dataset = EasySupervisedDataset(args.pretrain_dataset, tokenizer)\n if dist.is_initialized() and dist.get_world_size() > 1:\n pretrain_sampler = DistributedSampler(pretrain_dataset, shuffle=True, seed=42, drop_last=True)\n else:\n pretrain_sampler = None\n pretrain_dataloader = DataLoader(\n pretrain_dataset,\n shuffle=(pretrain_sampler is None),\n sampler=pretrain_sampler,\n batch_size=args.ptx_batch_size,\n collate_fn=data_collator,\n )\n\n def tokenize_fn(texts):\n # MUST padding to max length to ensure inputs of all ranks have the same length\n # Different length may lead to hang when using gemini, as different generation steps\n batch = tokenizer(texts, return_tensors=\"pt\", max_length=96, padding=\"max_length\", truncation=True)\n return {k: v.to(torch.cuda.current_device()) for k, v in batch.items()}\n\n (actor, actor_optim), (critic, critic_optim) = strategy.prepare((actor, actor_optim), (critic, critic_optim))\n\n # configure trainer\n trainer = PPOTrainer(\n strategy,\n actor,\n critic,\n reward_model,\n initial_model,\n actor_optim,\n critic_optim,\n kl_coef=args.kl_coef,\n ptx_coef=args.ptx_coef,\n train_batch_size=args.train_batch_size,\n experience_batch_size=args.experience_batch_size,\n tokenizer=tokenize_fn,\n max_length=512,\n do_sample=True,\n temperature=1.0,\n top_k=50,\n pad_token_id=tokenizer.pad_token_id,\n eos_token_id=tokenizer.eos_token_id,\n )\n\n trainer.fit(\n prompt_dataloader=prompt_dataloader,\n pretrain_dataloader=pretrain_dataloader,\n num_episodes=args.num_episodes,\n num_update_steps=args.num_update_steps,\n num_collect_steps=args.num_collect_steps,\n )\n\n # save model checkpoint after fitting\n trainer.save_model(args.save_path, only_rank0=True, tokenizer=tokenizer)\n # save optimizer checkpoint on all ranks\n if args.need_optim_ckpt:\n strategy.save_optimizer(\n actor_optim, \"actor_optim_checkpoint_prompts_%d.pt\" % (torch.cuda.current_device()), only_rank0=False\n )\n\n\nif __name__ == \"__main__\":\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--prompt_path\", type=str, default=None, help=\"path to the prompt dataset\")\n parser.add_argument(\"--pretrain_dataset\", type=str, default=None, help=\"path to the pretrained dataset\")\n parser.add_argument(\n \"--strategy\", choices=[\"ddp\", \"colossalai_gemini\", \"colossalai_zero2\"], default=\"ddp\", help=\"strategy to use\"\n )\n parser.add_argument(\"--model\", default=\"gpt2\", choices=[\"gpt2\", \"bloom\", \"opt\", \"llama\"])\n parser.add_argument(\"--pretrain\", type=str, default=None)\n parser.add_argument(\"--sft_lora_path\", type=str, default=None)\n parser.add_argument(\"--rm_model\", default=None, choices=[\"gpt2\", \"bloom\", \"opt\", \"llama\"])\n parser.add_argument(\"--rm_path\", type=str, default=None)\n parser.add_argument(\"--rm_pretrain\", type=str, default=None)\n parser.add_argument(\"--save_path\", type=str, default=\"actor_checkpoint_prompts\")\n parser.add_argument(\"--need_optim_ckpt\", type=bool, default=False)\n parser.add_argument(\"--num_episodes\", type=int, default=10)\n parser.add_argument(\"--num_collect_steps\", type=int, default=10)\n parser.add_argument(\"--num_update_steps\", type=int, default=5)\n parser.add_argument(\"--train_batch_size\", type=int, default=2)\n parser.add_argument(\"--ptx_batch_size\", type=int, default=1)\n parser.add_argument(\"--experience_batch_size\", type=int, default=8)\n parser.add_argument(\"--lora_rank\", type=int, default=0, help=\"low-rank adaptation matrices rank\")\n parser.add_argument(\"--kl_coef\", type=float, default=0.1)\n parser.add_argument(\"--ptx_coef\", type=float, default=0.9)\n args = parser.parse_args()\n main(args)\n", "path": "applications/Chat/examples/community/peft/train_peft_prompts.py"}]} | 3,494 | 224 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.