problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.1k
10.2k
| golden_diff
stringlengths 151
4.94k
| verification_info
stringlengths 582
21k
| num_tokens
int64 271
2.05k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_30888 | rasdani/github-patches | git_diff | mozilla__telemetry-analysis-service-551 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
EMR release form shows inactive records
The EMR release model has a column for `is_active`, but it's not being considered when querying the list of EMR releases in the form.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `atmo/clusters/queries.py`
Content:
```
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, you can obtain one at http://mozilla.org/MPL/2.0/.
4 from django.db import models
5
6
7 class EMRReleaseQuerySet(models.QuerySet):
8
9 def stable(self):
10 return self.filter(
11 is_experimental=False,
12 is_deprecated=False,
13 is_active=True,
14 )
15
16 def experimental(self):
17 return self.filter(
18 is_experimental=True,
19 is_active=True,
20 )
21
22 def deprecated(self):
23 return self.filter(
24 is_deprecated=True,
25 is_active=True,
26 )
27
28
29 class ClusterQuerySet(models.QuerySet):
30
31 def active(self):
32 return self.filter(
33 most_recent_status__in=self.model.ACTIVE_STATUS_LIST,
34 )
35
36 def terminated(self):
37 return self.filter(
38 most_recent_status__in=self.model.TERMINATED_STATUS_LIST,
39 )
40
41 def failed(self):
42 return self.filter(
43 most_recent_status__in=self.model.FAILED_STATUS_LIST,
44 )
45
```
Path: `atmo/clusters/forms.py`
Content:
```
1 # This Source Code Form is subject to the terms of the Mozilla Public
2 # License, v. 2.0. If a copy of the MPL was not distributed with this
3 # file, you can obtain one at http://mozilla.org/MPL/2.0/.
4 from django import forms
5 from django.conf import settings
6 from django.core.urlresolvers import reverse
7
8 from . import models
9 from ..forms.mixins import AutoClassFormMixin, CreatedByModelFormMixin
10 from ..keys.models import SSHKey
11
12
13 class EMRReleaseChoiceField(forms.ModelChoiceField):
14 def __init__(self, *args, **kwargs):
15 super().__init__(
16 label='EMR release',
17 queryset=models.EMRRelease.objects.all(),
18 required=True,
19 empty_label=None,
20 widget=forms.RadioSelect(attrs={
21 'required': 'required',
22 'class': 'radioset',
23 }),
24 help_text=models.Cluster.EMR_RELEASE_HELP,
25 )
26
27 def label_from_instance(self, obj):
28 label = obj.version
29 extra = []
30 if obj.is_experimental:
31 extra.append('experimental')
32 elif obj.is_deprecated:
33 extra.append('deprecated')
34 if extra:
35 label = '%s (%s)' % (label, ', '.join(extra))
36 return label
37
38
39 class NewClusterForm(AutoClassFormMixin, CreatedByModelFormMixin,
40 forms.ModelForm):
41 prefix = 'new'
42
43 identifier = forms.RegexField(
44 required=True,
45 label='Identifier',
46 regex=r'^[a-z0-9-]{1,100}$',
47 widget=forms.TextInput(attrs={
48 'pattern': r'[a-z0-9-]{1,100}',
49 'data-parsley-pattern-message': 'Identifier contains invalid characters.',
50 }),
51 help_text='A unique identifier for your cluster, visible in '
52 'the AWS management console. (Lowercase, use hyphens '
53 'instead of spaces.)'
54 )
55 size = forms.IntegerField(
56 label='Size',
57 required=True,
58 min_value=1,
59 max_value=settings.AWS_CONFIG['MAX_CLUSTER_SIZE'],
60 widget=forms.NumberInput(attrs={
61 'min': '1',
62 'max': str(settings.AWS_CONFIG['MAX_CLUSTER_SIZE']),
63 }),
64 help_text=('Number of workers to use in the cluster, between 1 and %s. '
65 'For testing or development 1 is recommended.' %
66 settings.AWS_CONFIG['MAX_CLUSTER_SIZE'])
67 )
68 lifetime = forms.IntegerField(
69 label='Lifetime',
70 required=True,
71 min_value=2,
72 max_value=settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'],
73 widget=forms.NumberInput(attrs={
74 'min': '2',
75 'max': str(settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME']),
76 }),
77 help_text=('Lifetime in hours after which the cluster is automatically '
78 'terminated, between 2 and %s.' %
79 settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'])
80 )
81 ssh_key = forms.ModelChoiceField(
82 label='SSH key',
83 queryset=SSHKey.objects.all(),
84 required=True,
85 empty_label=None,
86 )
87 emr_release = EMRReleaseChoiceField()
88
89 class Meta:
90 model = models.Cluster
91 fields = ['identifier', 'size', 'lifetime', 'ssh_key', 'emr_release']
92
93 def __init__(self, *args, **kwargs):
94 super().__init__(*args, **kwargs)
95 user_sshkeys = self.created_by.created_sshkeys.all()
96 self.fields['ssh_key'].queryset = user_sshkeys.all()
97 self.fields['ssh_key'].help_text = (
98 'The SSH key to deploy to the cluster. '
99 'See <a href="%s">your keys</a> or '
100 '<a href="%s">add a new one</a>.' %
101 (reverse('keys-list'), reverse('keys-new'))
102 )
103 # if there are fewer options we just show radio select buttons
104 if user_sshkeys.count() <= 6:
105 self.fields['ssh_key'].widget = forms.RadioSelect(
106 choices=self.fields['ssh_key'].choices,
107 attrs={
108 'class': 'radioset',
109 },
110 )
111
112
113 class ExtendClusterForm(AutoClassFormMixin, forms.Form):
114 prefix = 'extend'
115 extension = forms.IntegerField(
116 label='Lifetime extension in hours',
117 required=True,
118 min_value=2,
119 max_value=settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'],
120 widget=forms.NumberInput(attrs={
121 'min': '2',
122 'max': str(settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME']),
123 }),
124 help_text=("Number of hours to extend the cluster's lifetime with, between 2 and %s." %
125 settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'])
126 )
127
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/atmo/clusters/forms.py b/atmo/clusters/forms.py
--- a/atmo/clusters/forms.py
+++ b/atmo/clusters/forms.py
@@ -4,6 +4,7 @@
from django import forms
from django.conf import settings
from django.core.urlresolvers import reverse
+from django.utils.safestring import mark_safe
from . import models
from ..forms.mixins import AutoClassFormMixin, CreatedByModelFormMixin
@@ -14,7 +15,7 @@
def __init__(self, *args, **kwargs):
super().__init__(
label='EMR release',
- queryset=models.EMRRelease.objects.all(),
+ queryset=models.EMRRelease.objects.active(),
required=True,
empty_label=None,
widget=forms.RadioSelect(attrs={
@@ -28,11 +29,11 @@
label = obj.version
extra = []
if obj.is_experimental:
- extra.append('experimental')
+ extra.append('<span class="label label-info">experimental</span>')
elif obj.is_deprecated:
- extra.append('deprecated')
+ extra.append('<span class="label label-warning">deprecated</span>')
if extra:
- label = '%s (%s)' % (label, ', '.join(extra))
+ label = mark_safe('%s %s' % (label, ''.join(extra)))
return label
diff --git a/atmo/clusters/queries.py b/atmo/clusters/queries.py
--- a/atmo/clusters/queries.py
+++ b/atmo/clusters/queries.py
@@ -6,6 +6,11 @@
class EMRReleaseQuerySet(models.QuerySet):
+ def active(self):
+ return self.filter(
+ is_active=True,
+ )
+
def stable(self):
return self.filter(
is_experimental=False,
| {"golden_diff": "diff --git a/atmo/clusters/forms.py b/atmo/clusters/forms.py\n--- a/atmo/clusters/forms.py\n+++ b/atmo/clusters/forms.py\n@@ -4,6 +4,7 @@\n from django import forms\n from django.conf import settings\n from django.core.urlresolvers import reverse\n+from django.utils.safestring import mark_safe\n \n from . import models\n from ..forms.mixins import AutoClassFormMixin, CreatedByModelFormMixin\n@@ -14,7 +15,7 @@\n def __init__(self, *args, **kwargs):\n super().__init__(\n label='EMR release',\n- queryset=models.EMRRelease.objects.all(),\n+ queryset=models.EMRRelease.objects.active(),\n required=True,\n empty_label=None,\n widget=forms.RadioSelect(attrs={\n@@ -28,11 +29,11 @@\n label = obj.version\n extra = []\n if obj.is_experimental:\n- extra.append('experimental')\n+ extra.append('<span class=\"label label-info\">experimental</span>')\n elif obj.is_deprecated:\n- extra.append('deprecated')\n+ extra.append('<span class=\"label label-warning\">deprecated</span>')\n if extra:\n- label = '%s (%s)' % (label, ', '.join(extra))\n+ label = mark_safe('%s %s' % (label, ''.join(extra)))\n return label\n \n \ndiff --git a/atmo/clusters/queries.py b/atmo/clusters/queries.py\n--- a/atmo/clusters/queries.py\n+++ b/atmo/clusters/queries.py\n@@ -6,6 +6,11 @@\n \n class EMRReleaseQuerySet(models.QuerySet):\n \n+ def active(self):\n+ return self.filter(\n+ is_active=True,\n+ )\n+\n def stable(self):\n return self.filter(\n is_experimental=False,\n", "issue": "EMR release form shows inactive records\nThe EMR release model has a column for `is_active`, but it's not being considered when querying the list of EMR releases in the form. \n", "before_files": [{"content": "# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, you can obtain one at http://mozilla.org/MPL/2.0/.\nfrom django.db import models\n\n\nclass EMRReleaseQuerySet(models.QuerySet):\n\n def stable(self):\n return self.filter(\n is_experimental=False,\n is_deprecated=False,\n is_active=True,\n )\n\n def experimental(self):\n return self.filter(\n is_experimental=True,\n is_active=True,\n )\n\n def deprecated(self):\n return self.filter(\n is_deprecated=True,\n is_active=True,\n )\n\n\nclass ClusterQuerySet(models.QuerySet):\n\n def active(self):\n return self.filter(\n most_recent_status__in=self.model.ACTIVE_STATUS_LIST,\n )\n\n def terminated(self):\n return self.filter(\n most_recent_status__in=self.model.TERMINATED_STATUS_LIST,\n )\n\n def failed(self):\n return self.filter(\n most_recent_status__in=self.model.FAILED_STATUS_LIST,\n )\n", "path": "atmo/clusters/queries.py"}, {"content": "# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, you can obtain one at http://mozilla.org/MPL/2.0/.\nfrom django import forms\nfrom django.conf import settings\nfrom django.core.urlresolvers import reverse\n\nfrom . import models\nfrom ..forms.mixins import AutoClassFormMixin, CreatedByModelFormMixin\nfrom ..keys.models import SSHKey\n\n\nclass EMRReleaseChoiceField(forms.ModelChoiceField):\n def __init__(self, *args, **kwargs):\n super().__init__(\n label='EMR release',\n queryset=models.EMRRelease.objects.all(),\n required=True,\n empty_label=None,\n widget=forms.RadioSelect(attrs={\n 'required': 'required',\n 'class': 'radioset',\n }),\n help_text=models.Cluster.EMR_RELEASE_HELP,\n )\n\n def label_from_instance(self, obj):\n label = obj.version\n extra = []\n if obj.is_experimental:\n extra.append('experimental')\n elif obj.is_deprecated:\n extra.append('deprecated')\n if extra:\n label = '%s (%s)' % (label, ', '.join(extra))\n return label\n\n\nclass NewClusterForm(AutoClassFormMixin, CreatedByModelFormMixin,\n forms.ModelForm):\n prefix = 'new'\n\n identifier = forms.RegexField(\n required=True,\n label='Identifier',\n regex=r'^[a-z0-9-]{1,100}$',\n widget=forms.TextInput(attrs={\n 'pattern': r'[a-z0-9-]{1,100}',\n 'data-parsley-pattern-message': 'Identifier contains invalid characters.',\n }),\n help_text='A unique identifier for your cluster, visible in '\n 'the AWS management console. (Lowercase, use hyphens '\n 'instead of spaces.)'\n )\n size = forms.IntegerField(\n label='Size',\n required=True,\n min_value=1,\n max_value=settings.AWS_CONFIG['MAX_CLUSTER_SIZE'],\n widget=forms.NumberInput(attrs={\n 'min': '1',\n 'max': str(settings.AWS_CONFIG['MAX_CLUSTER_SIZE']),\n }),\n help_text=('Number of workers to use in the cluster, between 1 and %s. '\n 'For testing or development 1 is recommended.' %\n settings.AWS_CONFIG['MAX_CLUSTER_SIZE'])\n )\n lifetime = forms.IntegerField(\n label='Lifetime',\n required=True,\n min_value=2,\n max_value=settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'],\n widget=forms.NumberInput(attrs={\n 'min': '2',\n 'max': str(settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME']),\n }),\n help_text=('Lifetime in hours after which the cluster is automatically '\n 'terminated, between 2 and %s.' %\n settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'])\n )\n ssh_key = forms.ModelChoiceField(\n label='SSH key',\n queryset=SSHKey.objects.all(),\n required=True,\n empty_label=None,\n )\n emr_release = EMRReleaseChoiceField()\n\n class Meta:\n model = models.Cluster\n fields = ['identifier', 'size', 'lifetime', 'ssh_key', 'emr_release']\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n user_sshkeys = self.created_by.created_sshkeys.all()\n self.fields['ssh_key'].queryset = user_sshkeys.all()\n self.fields['ssh_key'].help_text = (\n 'The SSH key to deploy to the cluster. '\n 'See <a href=\"%s\">your keys</a> or '\n '<a href=\"%s\">add a new one</a>.' %\n (reverse('keys-list'), reverse('keys-new'))\n )\n # if there are fewer options we just show radio select buttons\n if user_sshkeys.count() <= 6:\n self.fields['ssh_key'].widget = forms.RadioSelect(\n choices=self.fields['ssh_key'].choices,\n attrs={\n 'class': 'radioset',\n },\n )\n\n\nclass ExtendClusterForm(AutoClassFormMixin, forms.Form):\n prefix = 'extend'\n extension = forms.IntegerField(\n label='Lifetime extension in hours',\n required=True,\n min_value=2,\n max_value=settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'],\n widget=forms.NumberInput(attrs={\n 'min': '2',\n 'max': str(settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME']),\n }),\n help_text=(\"Number of hours to extend the cluster's lifetime with, between 2 and %s.\" %\n settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'])\n )\n", "path": "atmo/clusters/forms.py"}], "after_files": [{"content": "# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, you can obtain one at http://mozilla.org/MPL/2.0/.\nfrom django.db import models\n\n\nclass EMRReleaseQuerySet(models.QuerySet):\n\n def active(self):\n return self.filter(\n is_active=True,\n )\n\n def stable(self):\n return self.filter(\n is_experimental=False,\n is_deprecated=False,\n is_active=True,\n )\n\n def experimental(self):\n return self.filter(\n is_experimental=True,\n is_active=True,\n )\n\n def deprecated(self):\n return self.filter(\n is_deprecated=True,\n is_active=True,\n )\n\n\nclass ClusterQuerySet(models.QuerySet):\n\n def active(self):\n return self.filter(\n most_recent_status__in=self.model.ACTIVE_STATUS_LIST,\n )\n\n def terminated(self):\n return self.filter(\n most_recent_status__in=self.model.TERMINATED_STATUS_LIST,\n )\n\n def failed(self):\n return self.filter(\n most_recent_status__in=self.model.FAILED_STATUS_LIST,\n )\n", "path": "atmo/clusters/queries.py"}, {"content": "# This Source Code Form is subject to the terms of the Mozilla Public\n# License, v. 2.0. If a copy of the MPL was not distributed with this\n# file, you can obtain one at http://mozilla.org/MPL/2.0/.\nfrom django import forms\nfrom django.conf import settings\nfrom django.core.urlresolvers import reverse\nfrom django.utils.safestring import mark_safe\n\nfrom . import models\nfrom ..forms.mixins import AutoClassFormMixin, CreatedByModelFormMixin\nfrom ..keys.models import SSHKey\n\n\nclass EMRReleaseChoiceField(forms.ModelChoiceField):\n def __init__(self, *args, **kwargs):\n super().__init__(\n label='EMR release',\n queryset=models.EMRRelease.objects.active(),\n required=True,\n empty_label=None,\n widget=forms.RadioSelect(attrs={\n 'required': 'required',\n 'class': 'radioset',\n }),\n help_text=models.Cluster.EMR_RELEASE_HELP,\n )\n\n def label_from_instance(self, obj):\n label = obj.version\n extra = []\n if obj.is_experimental:\n extra.append('<span class=\"label label-info\">experimental</span>')\n elif obj.is_deprecated:\n extra.append('<span class=\"label label-warning\">deprecated</span>')\n if extra:\n label = mark_safe('%s %s' % (label, ''.join(extra)))\n return label\n\n\nclass NewClusterForm(AutoClassFormMixin, CreatedByModelFormMixin,\n forms.ModelForm):\n prefix = 'new'\n\n identifier = forms.RegexField(\n required=True,\n label='Identifier',\n regex=r'^[a-z0-9-]{1,100}$',\n widget=forms.TextInput(attrs={\n 'pattern': r'[a-z0-9-]{1,100}',\n 'data-parsley-pattern-message': 'Identifier contains invalid characters.',\n }),\n help_text='A unique identifier for your cluster, visible in '\n 'the AWS management console. (Lowercase, use hyphens '\n 'instead of spaces.)'\n )\n size = forms.IntegerField(\n label='Size',\n required=True,\n min_value=1,\n max_value=settings.AWS_CONFIG['MAX_CLUSTER_SIZE'],\n widget=forms.NumberInput(attrs={\n 'min': '1',\n 'max': str(settings.AWS_CONFIG['MAX_CLUSTER_SIZE']),\n }),\n help_text=('Number of workers to use in the cluster, between 1 and %s. '\n 'For testing or development 1 is recommended.' %\n settings.AWS_CONFIG['MAX_CLUSTER_SIZE'])\n )\n lifetime = forms.IntegerField(\n label='Lifetime',\n required=True,\n min_value=2,\n max_value=settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'],\n widget=forms.NumberInput(attrs={\n 'min': '2',\n 'max': str(settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME']),\n }),\n help_text=('Lifetime in hours after which the cluster is automatically '\n 'terminated, between 2 and %s.' %\n settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'])\n )\n ssh_key = forms.ModelChoiceField(\n label='SSH key',\n queryset=SSHKey.objects.all(),\n required=True,\n empty_label=None,\n )\n emr_release = EMRReleaseChoiceField()\n\n class Meta:\n model = models.Cluster\n fields = ['identifier', 'size', 'lifetime', 'ssh_key', 'emr_release']\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n user_sshkeys = self.created_by.created_sshkeys.all()\n self.fields['ssh_key'].queryset = user_sshkeys.all()\n self.fields['ssh_key'].help_text = (\n 'The SSH key to deploy to the cluster. '\n 'See <a href=\"%s\">your keys</a> or '\n '<a href=\"%s\">add a new one</a>.' %\n (reverse('keys-list'), reverse('keys-new'))\n )\n # if there are fewer options we just show radio select buttons\n if user_sshkeys.count() <= 6:\n self.fields['ssh_key'].widget = forms.RadioSelect(\n choices=self.fields['ssh_key'].choices,\n attrs={\n 'class': 'radioset',\n },\n )\n\n\nclass ExtendClusterForm(AutoClassFormMixin, forms.Form):\n prefix = 'extend'\n extension = forms.IntegerField(\n label='Lifetime extension in hours',\n required=True,\n min_value=2,\n max_value=settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'],\n widget=forms.NumberInput(attrs={\n 'min': '2',\n 'max': str(settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME']),\n }),\n help_text=(\"Number of hours to extend the cluster's lifetime with, between 2 and %s.\" %\n settings.AWS_CONFIG['MAX_CLUSTER_LIFETIME'])\n )\n", "path": "atmo/clusters/forms.py"}]} | 1,930 | 406 |
gh_patches_debug_22288 | rasdani/github-patches | git_diff | dask__distributed-8381 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Dashboards fail with 500 status code when using `bokeh<3.3.0`
When using the latest `main` with `bokeh<3.3.0`, the dashboards fail with a 500 status code.
Scheduler traceback:
```
2023-11-30 18:00:07,300 - tornado.application - ERROR - Uncaught exception GET /status (192.168.178.45)
HTTPServerRequest(protocol='http', host='192.168.178.45:8787', method='GET', uri='/status', version='HTTP/1.1', remote_ip='192.168.178.45')
Traceback (most recent call last):
File "/opt/homebrew/Caskroom/mambaforge/base/envs/dask-distributed/lib/python3.11/site-packages/tornado/web.py", line 1786, in _execute
result = await result
^^^^^^^^^^^^
File "/opt/homebrew/Caskroom/mambaforge/base/envs/dask-distributed/lib/python3.11/site-packages/bokeh/server/views/doc_handler.py", line 57, in get
resources=self.application.resources(),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/hendrikmakait/projects/dask/distributed/distributed/dashboard/core.py", line 37, in resources
return super().resources(absolute_url)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Caskroom/mambaforge/base/envs/dask-distributed/lib/python3.11/site-packages/bokeh/server/tornado.py", line 621, in resources
return Resources(mode="server", root_url=root_url, path_versioner=StaticHandler.append_version)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Caskroom/mambaforge/base/envs/dask-distributed/lib/python3.11/site-packages/bokeh/resources.py", line 377, in __init__
if root_url and not root_url.endswith("/"):
^^^^^^^^^^^^^^^^^
AttributeError: 'bool' object has no attribute 'endswith'
```
git bisect blames #8347
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `distributed/dashboard/core.py`
Content:
```
1 from __future__ import annotations
2
3 import functools
4 import warnings
5
6 from bokeh.application import Application
7 from bokeh.application.handlers.function import FunctionHandler
8 from bokeh.resources import Resources
9 from bokeh.server.server import BokehTornado
10 from bokeh.server.util import create_hosts_allowlist
11
12 import dask
13
14 from distributed.dashboard.utils import BOKEH_VERSION
15 from distributed.versions import BOKEH_REQUIREMENT
16
17 # Set `prereleases=True` to allow for use with dev versions of `bokeh`
18 if not BOKEH_REQUIREMENT.specifier.contains(BOKEH_VERSION, prereleases=True):
19 warnings.warn(
20 f"\nDask needs {BOKEH_REQUIREMENT} for the dashboard."
21 f"\nYou have bokeh={BOKEH_VERSION}."
22 "\nContinuing without the dashboard."
23 )
24 raise ImportError(
25 f"Dask needs {BOKEH_REQUIREMENT} for the dashboard, not bokeh={BOKEH_VERSION}"
26 )
27
28
29 if BOKEH_VERSION.major < 3:
30 from bokeh.models import Panel as TabPanel # noqa: F401
31 else:
32 from bokeh.models import TabPanel # noqa: F401
33
34
35 class DaskBokehTornado(BokehTornado):
36 def resources(self, absolute_url: str | bool | None = True) -> Resources:
37 return super().resources(absolute_url)
38
39
40 def BokehApplication(applications, server, prefix="/", template_variables=None):
41 template_variables = template_variables or {}
42 prefix = "/" + prefix.strip("/") + "/" if prefix else "/"
43
44 extra = {"prefix": prefix, **template_variables}
45
46 funcs = {k: functools.partial(v, server, extra) for k, v in applications.items()}
47 apps = {k: Application(FunctionHandler(v)) for k, v in funcs.items()}
48
49 kwargs = dask.config.get("distributed.scheduler.dashboard.bokeh-application").copy()
50 extra_websocket_origins = create_hosts_allowlist(
51 kwargs.pop("allow_websocket_origin"), server.http_server.port
52 )
53
54 return DaskBokehTornado(
55 apps,
56 prefix=prefix,
57 use_index=False,
58 extra_websocket_origins=extra_websocket_origins,
59 absolute_url="",
60 **kwargs,
61 )
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/distributed/dashboard/core.py b/distributed/dashboard/core.py
--- a/distributed/dashboard/core.py
+++ b/distributed/dashboard/core.py
@@ -6,8 +6,8 @@
from bokeh.application import Application
from bokeh.application.handlers.function import FunctionHandler
from bokeh.resources import Resources
-from bokeh.server.server import BokehTornado
from bokeh.server.util import create_hosts_allowlist
+from packaging.version import parse as parse_version
import dask
@@ -32,9 +32,14 @@
from bokeh.models import TabPanel # noqa: F401
-class DaskBokehTornado(BokehTornado):
- def resources(self, absolute_url: str | bool | None = True) -> Resources:
- return super().resources(absolute_url)
+if BOKEH_VERSION < parse_version("3.3.0"):
+ from bokeh.server.server import BokehTornado as DaskBokehTornado
+else:
+ from bokeh.server.server import BokehTornado
+
+ class DaskBokehTornado(BokehTornado): # type: ignore[no-redef]
+ def resources(self, absolute_url: str | bool | None = True) -> Resources:
+ return super().resources(absolute_url)
def BokehApplication(applications, server, prefix="/", template_variables=None):
| {"golden_diff": "diff --git a/distributed/dashboard/core.py b/distributed/dashboard/core.py\n--- a/distributed/dashboard/core.py\n+++ b/distributed/dashboard/core.py\n@@ -6,8 +6,8 @@\n from bokeh.application import Application\n from bokeh.application.handlers.function import FunctionHandler\n from bokeh.resources import Resources\n-from bokeh.server.server import BokehTornado\n from bokeh.server.util import create_hosts_allowlist\n+from packaging.version import parse as parse_version\n \n import dask\n \n@@ -32,9 +32,14 @@\n from bokeh.models import TabPanel # noqa: F401\n \n \n-class DaskBokehTornado(BokehTornado):\n- def resources(self, absolute_url: str | bool | None = True) -> Resources:\n- return super().resources(absolute_url)\n+if BOKEH_VERSION < parse_version(\"3.3.0\"):\n+ from bokeh.server.server import BokehTornado as DaskBokehTornado\n+else:\n+ from bokeh.server.server import BokehTornado\n+\n+ class DaskBokehTornado(BokehTornado): # type: ignore[no-redef]\n+ def resources(self, absolute_url: str | bool | None = True) -> Resources:\n+ return super().resources(absolute_url)\n \n \n def BokehApplication(applications, server, prefix=\"/\", template_variables=None):\n", "issue": "Dashboards fail with 500 status code when using `bokeh<3.3.0`\nWhen using the latest `main` with `bokeh<3.3.0`, the dashboards fail with a 500 status code.\r\n\r\nScheduler traceback:\r\n```\r\n2023-11-30 18:00:07,300 - tornado.application - ERROR - Uncaught exception GET /status (192.168.178.45)\r\nHTTPServerRequest(protocol='http', host='192.168.178.45:8787', method='GET', uri='/status', version='HTTP/1.1', remote_ip='192.168.178.45')\r\nTraceback (most recent call last):\r\n File \"/opt/homebrew/Caskroom/mambaforge/base/envs/dask-distributed/lib/python3.11/site-packages/tornado/web.py\", line 1786, in _execute\r\n result = await result\r\n ^^^^^^^^^^^^\r\n File \"/opt/homebrew/Caskroom/mambaforge/base/envs/dask-distributed/lib/python3.11/site-packages/bokeh/server/views/doc_handler.py\", line 57, in get\r\n resources=self.application.resources(),\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File \"/Users/hendrikmakait/projects/dask/distributed/distributed/dashboard/core.py\", line 37, in resources\r\n return super().resources(absolute_url)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File \"/opt/homebrew/Caskroom/mambaforge/base/envs/dask-distributed/lib/python3.11/site-packages/bokeh/server/tornado.py\", line 621, in resources\r\n return Resources(mode=\"server\", root_url=root_url, path_versioner=StaticHandler.append_version)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File \"/opt/homebrew/Caskroom/mambaforge/base/envs/dask-distributed/lib/python3.11/site-packages/bokeh/resources.py\", line 377, in __init__\r\n if root_url and not root_url.endswith(\"/\"):\r\n ^^^^^^^^^^^^^^^^^\r\nAttributeError: 'bool' object has no attribute 'endswith'\r\n```\r\n\r\ngit bisect blames #8347\n", "before_files": [{"content": "from __future__ import annotations\n\nimport functools\nimport warnings\n\nfrom bokeh.application import Application\nfrom bokeh.application.handlers.function import FunctionHandler\nfrom bokeh.resources import Resources\nfrom bokeh.server.server import BokehTornado\nfrom bokeh.server.util import create_hosts_allowlist\n\nimport dask\n\nfrom distributed.dashboard.utils import BOKEH_VERSION\nfrom distributed.versions import BOKEH_REQUIREMENT\n\n# Set `prereleases=True` to allow for use with dev versions of `bokeh`\nif not BOKEH_REQUIREMENT.specifier.contains(BOKEH_VERSION, prereleases=True):\n warnings.warn(\n f\"\\nDask needs {BOKEH_REQUIREMENT} for the dashboard.\"\n f\"\\nYou have bokeh={BOKEH_VERSION}.\"\n \"\\nContinuing without the dashboard.\"\n )\n raise ImportError(\n f\"Dask needs {BOKEH_REQUIREMENT} for the dashboard, not bokeh={BOKEH_VERSION}\"\n )\n\n\nif BOKEH_VERSION.major < 3:\n from bokeh.models import Panel as TabPanel # noqa: F401\nelse:\n from bokeh.models import TabPanel # noqa: F401\n\n\nclass DaskBokehTornado(BokehTornado):\n def resources(self, absolute_url: str | bool | None = True) -> Resources:\n return super().resources(absolute_url)\n\n\ndef BokehApplication(applications, server, prefix=\"/\", template_variables=None):\n template_variables = template_variables or {}\n prefix = \"/\" + prefix.strip(\"/\") + \"/\" if prefix else \"/\"\n\n extra = {\"prefix\": prefix, **template_variables}\n\n funcs = {k: functools.partial(v, server, extra) for k, v in applications.items()}\n apps = {k: Application(FunctionHandler(v)) for k, v in funcs.items()}\n\n kwargs = dask.config.get(\"distributed.scheduler.dashboard.bokeh-application\").copy()\n extra_websocket_origins = create_hosts_allowlist(\n kwargs.pop(\"allow_websocket_origin\"), server.http_server.port\n )\n\n return DaskBokehTornado(\n apps,\n prefix=prefix,\n use_index=False,\n extra_websocket_origins=extra_websocket_origins,\n absolute_url=\"\",\n **kwargs,\n )\n", "path": "distributed/dashboard/core.py"}], "after_files": [{"content": "from __future__ import annotations\n\nimport functools\nimport warnings\n\nfrom bokeh.application import Application\nfrom bokeh.application.handlers.function import FunctionHandler\nfrom bokeh.resources import Resources\nfrom bokeh.server.util import create_hosts_allowlist\nfrom packaging.version import parse as parse_version\n\nimport dask\n\nfrom distributed.dashboard.utils import BOKEH_VERSION\nfrom distributed.versions import BOKEH_REQUIREMENT\n\n# Set `prereleases=True` to allow for use with dev versions of `bokeh`\nif not BOKEH_REQUIREMENT.specifier.contains(BOKEH_VERSION, prereleases=True):\n warnings.warn(\n f\"\\nDask needs {BOKEH_REQUIREMENT} for the dashboard.\"\n f\"\\nYou have bokeh={BOKEH_VERSION}.\"\n \"\\nContinuing without the dashboard.\"\n )\n raise ImportError(\n f\"Dask needs {BOKEH_REQUIREMENT} for the dashboard, not bokeh={BOKEH_VERSION}\"\n )\n\n\nif BOKEH_VERSION.major < 3:\n from bokeh.models import Panel as TabPanel # noqa: F401\nelse:\n from bokeh.models import TabPanel # noqa: F401\n\n\nif BOKEH_VERSION < parse_version(\"3.3.0\"):\n from bokeh.server.server import BokehTornado as DaskBokehTornado\nelse:\n from bokeh.server.server import BokehTornado\n\n class DaskBokehTornado(BokehTornado): # type: ignore[no-redef]\n def resources(self, absolute_url: str | bool | None = True) -> Resources:\n return super().resources(absolute_url)\n\n\ndef BokehApplication(applications, server, prefix=\"/\", template_variables=None):\n template_variables = template_variables or {}\n prefix = \"/\" + prefix.strip(\"/\") + \"/\" if prefix else \"/\"\n\n extra = {\"prefix\": prefix, **template_variables}\n\n funcs = {k: functools.partial(v, server, extra) for k, v in applications.items()}\n apps = {k: Application(FunctionHandler(v)) for k, v in funcs.items()}\n\n kwargs = dask.config.get(\"distributed.scheduler.dashboard.bokeh-application\").copy()\n extra_websocket_origins = create_hosts_allowlist(\n kwargs.pop(\"allow_websocket_origin\"), server.http_server.port\n )\n\n return DaskBokehTornado(\n apps,\n prefix=prefix,\n use_index=False,\n extra_websocket_origins=extra_websocket_origins,\n absolute_url=\"\",\n **kwargs,\n )\n", "path": "distributed/dashboard/core.py"}]} | 1,386 | 301 |
gh_patches_debug_23710 | rasdani/github-patches | git_diff | mindsdb__mindsdb-2704 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Endpoint to return handler's icons
At the moment we return icons for handlers by general `GET /handlers` route. Icons are return in svg or base64, which is not effective. We need new endpoint to return handler icon:
`GET /handlers/{name}/icon/{icon_file_name}`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mindsdb/api/http/namespaces/handlers.py`
Content:
```
1 from flask import request
2 from flask_restx import Resource
3
4 from mindsdb.api.http.utils import http_error
5 from mindsdb.api.http.namespaces.configs.handlers import ns_conf
6 from mindsdb.integrations.utilities.install import install_dependencies
7
8
9 @ns_conf.route('/')
10 class HandlersList(Resource):
11 @ns_conf.doc('handlers_list')
12 def get(self):
13 '''List all db handlers'''
14 handlers = request.integration_controller.get_handlers_import_status()
15 result = []
16 for handler_type, handler_meta in handlers.items():
17 row = {'name': handler_type}
18 row.update(handler_meta)
19 result.append(row)
20 return result
21
22
23 @ns_conf.route('/<handler_name>/install')
24 class InstallDependencies(Resource):
25 @ns_conf.param('handler_name', 'Handler name')
26 def post(self, handler_name):
27 handler_import_status = request.integration_controller.get_handlers_import_status()
28 if handler_name not in handler_import_status:
29 return f'Unkown handler: {handler_name}', 400
30
31 if handler_import_status[handler_name].get('import', {}).get('success', False) is True:
32 return 'Installed', 200
33
34 handler_meta = handler_import_status[handler_name]
35
36 dependencies = handler_meta['import']['dependencies']
37 if len(dependencies) == 0:
38 return 'Installed', 200
39
40 result = install_dependencies(dependencies)
41
42 # reload it if any result, so we can get new error message
43 request.integration_controller.reload_handler_module(handler_name)
44 if result.get('success') is True:
45 return '', 200
46 return http_error(
47 500,
48 'Failed to install dependency',
49 result.get('error_message', 'unknown error')
50 )
51
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mindsdb/api/http/namespaces/handlers.py b/mindsdb/api/http/namespaces/handlers.py
--- a/mindsdb/api/http/namespaces/handlers.py
+++ b/mindsdb/api/http/namespaces/handlers.py
@@ -1,4 +1,8 @@
-from flask import request
+import os
+import importlib
+from pathlib import Path
+
+from flask import request, send_file, abort
from flask_restx import Resource
from mindsdb.api.http.utils import http_error
@@ -20,6 +24,24 @@
return result
+@ns_conf.route('/<handler_name>/icon')
+class HandlerIcon(Resource):
+ @ns_conf.param('handler_name', 'Handler name')
+ def get(self, handler_name):
+ try:
+ handlers_import_status = request.integration_controller.get_handlers_import_status()
+ icon_name = handlers_import_status[handler_name]['icon']['name']
+ handler_folder = handlers_import_status[handler_name]['import']['folder']
+ mindsdb_path = Path(importlib.util.find_spec('mindsdb').origin).parent
+ icon_path = mindsdb_path.joinpath('integrations/handlers').joinpath(handler_folder).joinpath(icon_name)
+ if icon_path.is_absolute() is False:
+ icon_path = Path(os.getcwd()).joinpath(icon_path)
+ except Exception:
+ return abort(404)
+ else:
+ return send_file(icon_path)
+
+
@ns_conf.route('/<handler_name>/install')
class InstallDependencies(Resource):
@ns_conf.param('handler_name', 'Handler name')
| {"golden_diff": "diff --git a/mindsdb/api/http/namespaces/handlers.py b/mindsdb/api/http/namespaces/handlers.py\n--- a/mindsdb/api/http/namespaces/handlers.py\n+++ b/mindsdb/api/http/namespaces/handlers.py\n@@ -1,4 +1,8 @@\n-from flask import request\n+import os\n+import importlib\n+from pathlib import Path\n+\n+from flask import request, send_file, abort\n from flask_restx import Resource\n \n from mindsdb.api.http.utils import http_error\n@@ -20,6 +24,24 @@\n return result\n \n \n+@ns_conf.route('/<handler_name>/icon')\n+class HandlerIcon(Resource):\n+ @ns_conf.param('handler_name', 'Handler name')\n+ def get(self, handler_name):\n+ try:\n+ handlers_import_status = request.integration_controller.get_handlers_import_status()\n+ icon_name = handlers_import_status[handler_name]['icon']['name']\n+ handler_folder = handlers_import_status[handler_name]['import']['folder']\n+ mindsdb_path = Path(importlib.util.find_spec('mindsdb').origin).parent\n+ icon_path = mindsdb_path.joinpath('integrations/handlers').joinpath(handler_folder).joinpath(icon_name)\n+ if icon_path.is_absolute() is False:\n+ icon_path = Path(os.getcwd()).joinpath(icon_path)\n+ except Exception:\n+ return abort(404)\n+ else:\n+ return send_file(icon_path)\n+\n+\n @ns_conf.route('/<handler_name>/install')\n class InstallDependencies(Resource):\n @ns_conf.param('handler_name', 'Handler name')\n", "issue": "Endpoint to return handler's icons\nAt the moment we return icons for handlers by general `GET /handlers` route. Icons are return in svg or base64, which is not effective. We need new endpoint to return handler icon:\r\n`GET /handlers/{name}/icon/{icon_file_name}`\r\n\n", "before_files": [{"content": "from flask import request\nfrom flask_restx import Resource\n\nfrom mindsdb.api.http.utils import http_error\nfrom mindsdb.api.http.namespaces.configs.handlers import ns_conf\nfrom mindsdb.integrations.utilities.install import install_dependencies\n\n\n@ns_conf.route('/')\nclass HandlersList(Resource):\n @ns_conf.doc('handlers_list')\n def get(self):\n '''List all db handlers'''\n handlers = request.integration_controller.get_handlers_import_status()\n result = []\n for handler_type, handler_meta in handlers.items():\n row = {'name': handler_type}\n row.update(handler_meta)\n result.append(row)\n return result\n\n\n@ns_conf.route('/<handler_name>/install')\nclass InstallDependencies(Resource):\n @ns_conf.param('handler_name', 'Handler name')\n def post(self, handler_name):\n handler_import_status = request.integration_controller.get_handlers_import_status()\n if handler_name not in handler_import_status:\n return f'Unkown handler: {handler_name}', 400\n\n if handler_import_status[handler_name].get('import', {}).get('success', False) is True:\n return 'Installed', 200\n\n handler_meta = handler_import_status[handler_name]\n\n dependencies = handler_meta['import']['dependencies']\n if len(dependencies) == 0:\n return 'Installed', 200\n\n result = install_dependencies(dependencies)\n\n # reload it if any result, so we can get new error message\n request.integration_controller.reload_handler_module(handler_name)\n if result.get('success') is True:\n return '', 200\n return http_error(\n 500,\n 'Failed to install dependency',\n result.get('error_message', 'unknown error')\n )\n", "path": "mindsdb/api/http/namespaces/handlers.py"}], "after_files": [{"content": "import os\nimport importlib\nfrom pathlib import Path\n\nfrom flask import request, send_file, abort\nfrom flask_restx import Resource\n\nfrom mindsdb.api.http.utils import http_error\nfrom mindsdb.api.http.namespaces.configs.handlers import ns_conf\nfrom mindsdb.integrations.utilities.install import install_dependencies\n\n\n@ns_conf.route('/')\nclass HandlersList(Resource):\n @ns_conf.doc('handlers_list')\n def get(self):\n '''List all db handlers'''\n handlers = request.integration_controller.get_handlers_import_status()\n result = []\n for handler_type, handler_meta in handlers.items():\n row = {'name': handler_type}\n row.update(handler_meta)\n result.append(row)\n return result\n\n\n@ns_conf.route('/<handler_name>/icon')\nclass HandlerIcon(Resource):\n @ns_conf.param('handler_name', 'Handler name')\n def get(self, handler_name):\n try:\n handlers_import_status = request.integration_controller.get_handlers_import_status()\n icon_name = handlers_import_status[handler_name]['icon']['name']\n handler_folder = handlers_import_status[handler_name]['import']['folder']\n mindsdb_path = Path(importlib.util.find_spec('mindsdb').origin).parent\n icon_path = mindsdb_path.joinpath('integrations/handlers').joinpath(handler_folder).joinpath(icon_name)\n if icon_path.is_absolute() is False:\n icon_path = Path(os.getcwd()).joinpath(icon_path)\n except Exception:\n return abort(404)\n else:\n return send_file(icon_path)\n\n\n@ns_conf.route('/<handler_name>/install')\nclass InstallDependencies(Resource):\n @ns_conf.param('handler_name', 'Handler name')\n def post(self, handler_name):\n handler_import_status = request.integration_controller.get_handlers_import_status()\n if handler_name not in handler_import_status:\n return f'Unkown handler: {handler_name}', 400\n\n if handler_import_status[handler_name].get('import', {}).get('success', False) is True:\n return 'Installed', 200\n\n handler_meta = handler_import_status[handler_name]\n\n dependencies = handler_meta['import']['dependencies']\n if len(dependencies) == 0:\n return 'Installed', 200\n\n result = install_dependencies(dependencies)\n\n # reload it if any result, so we can get new error message\n request.integration_controller.reload_handler_module(handler_name)\n if result.get('success') is True:\n return '', 200\n return http_error(\n 500,\n 'Failed to install dependency',\n result.get('error_message', 'unknown error')\n )\n", "path": "mindsdb/api/http/namespaces/handlers.py"}]} | 796 | 358 |
gh_patches_debug_15237 | rasdani/github-patches | git_diff | rlworkgroup__garage-691 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
sim_policy not working
Hi,
I just found that sim_policy.py cannot work.
data that read from "params.pkl" does not include the key of "policy"
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/sim_policy.py`
Content:
```
1 #!/usr/bin/env python3
2
3 import argparse
4
5 import joblib
6 import tensorflow as tf
7
8 from garage.misc.console import query_yes_no
9 from garage.sampler.utils import rollout
10
11 if __name__ == "__main__":
12
13 parser = argparse.ArgumentParser()
14 parser.add_argument('file', type=str, help='path to the snapshot file')
15 parser.add_argument(
16 '--max_path_length',
17 type=int,
18 default=1000,
19 help='Max length of rollout')
20 parser.add_argument('--speedup', type=float, default=1, help='Speedup')
21 args = parser.parse_args()
22
23 # If the snapshot file use tensorflow, do:
24 # import tensorflow as tf
25 # with tf.Session():
26 # [rest of the code]
27 with tf.Session() as sess:
28 data = joblib.load(args.file)
29 policy = data['policy']
30 env = data['env']
31 while True:
32 path = rollout(
33 env,
34 policy,
35 max_path_length=args.max_path_length,
36 animated=True,
37 speedup=args.speedup)
38 if not query_yes_no('Continue simulation?'):
39 break
40
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/sim_policy.py b/examples/sim_policy.py
--- a/examples/sim_policy.py
+++ b/examples/sim_policy.py
@@ -8,7 +8,7 @@
from garage.misc.console import query_yes_no
from garage.sampler.utils import rollout
-if __name__ == "__main__":
+if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('file', type=str, help='path to the snapshot file')
@@ -26,7 +26,7 @@
# [rest of the code]
with tf.Session() as sess:
data = joblib.load(args.file)
- policy = data['policy']
+ policy = data['algo'].policy
env = data['env']
while True:
path = rollout(
| {"golden_diff": "diff --git a/examples/sim_policy.py b/examples/sim_policy.py\n--- a/examples/sim_policy.py\n+++ b/examples/sim_policy.py\n@@ -8,7 +8,7 @@\n from garage.misc.console import query_yes_no\n from garage.sampler.utils import rollout\n \n-if __name__ == \"__main__\":\n+if __name__ == '__main__':\n \n parser = argparse.ArgumentParser()\n parser.add_argument('file', type=str, help='path to the snapshot file')\n@@ -26,7 +26,7 @@\n # [rest of the code]\n with tf.Session() as sess:\n data = joblib.load(args.file)\n- policy = data['policy']\n+ policy = data['algo'].policy\n env = data['env']\n while True:\n path = rollout(\n", "issue": "sim_policy not working\nHi, \r\nI just found that sim_policy.py cannot work. \r\ndata that read from \"params.pkl\" does not include the key of \"policy\"\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nimport argparse\n\nimport joblib\nimport tensorflow as tf\n\nfrom garage.misc.console import query_yes_no\nfrom garage.sampler.utils import rollout\n\nif __name__ == \"__main__\":\n\n parser = argparse.ArgumentParser()\n parser.add_argument('file', type=str, help='path to the snapshot file')\n parser.add_argument(\n '--max_path_length',\n type=int,\n default=1000,\n help='Max length of rollout')\n parser.add_argument('--speedup', type=float, default=1, help='Speedup')\n args = parser.parse_args()\n\n # If the snapshot file use tensorflow, do:\n # import tensorflow as tf\n # with tf.Session():\n # [rest of the code]\n with tf.Session() as sess:\n data = joblib.load(args.file)\n policy = data['policy']\n env = data['env']\n while True:\n path = rollout(\n env,\n policy,\n max_path_length=args.max_path_length,\n animated=True,\n speedup=args.speedup)\n if not query_yes_no('Continue simulation?'):\n break\n", "path": "examples/sim_policy.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\nimport argparse\n\nimport joblib\nimport tensorflow as tf\n\nfrom garage.misc.console import query_yes_no\nfrom garage.sampler.utils import rollout\n\nif __name__ == '__main__':\n\n parser = argparse.ArgumentParser()\n parser.add_argument('file', type=str, help='path to the snapshot file')\n parser.add_argument(\n '--max_path_length',\n type=int,\n default=1000,\n help='Max length of rollout')\n parser.add_argument('--speedup', type=float, default=1, help='Speedup')\n args = parser.parse_args()\n\n # If the snapshot file use tensorflow, do:\n # import tensorflow as tf\n # with tf.Session():\n # [rest of the code]\n with tf.Session() as sess:\n data = joblib.load(args.file)\n policy = data['algo'].policy\n env = data['env']\n while True:\n path = rollout(\n env,\n policy,\n max_path_length=args.max_path_length,\n animated=True,\n speedup=args.speedup)\n if not query_yes_no('Continue simulation?'):\n break\n", "path": "examples/sim_policy.py"}]} | 612 | 173 |
gh_patches_debug_9203 | rasdani/github-patches | git_diff | Qiskit__qiskit-4081 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Improve an error message in qiskit.converters.circuit_to_gate()
<!-- ⚠️ If you do not respect this template, your issue will be closed -->
<!-- ⚠️ Make sure to browse the opened and closed issues to confirm this idea does not exist. -->
### What is the expected enhancement?
Let's assume we have `QuantumCircuit` object called `qc`, and one tries to convert it into a `Gate` object using `qiskit.converters.circuit_to_gate()`. If `qc` contains some instructions which cannot be converted into `Gate`, the following exception is raised
```
QiskitError: 'One or more instructions in this instruction cannot be converted to a gate'
```
My suggestion is to improve this error message and add some info about the particular instruction preventing the convertion from happening. I believe, something like the instruction name in the error message should be more helpfull, than the current general statement.
Below is a code snippet (for a `qc` containing a measurement operation) which can be used to achieve the error mentioned above
```
from qiskit import QuantumCircuit
from qiskit.converters import circuit_to_gate
qc = QuantumCircuit(1, 1)
qc.h(0)
qc.measure(0, 0)
gate = circuit_to_gate(qc)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `qiskit/converters/circuit_to_gate.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # This code is part of Qiskit.
4 #
5 # (C) Copyright IBM 2017, 2019.
6 #
7 # This code is licensed under the Apache License, Version 2.0. You may
8 # obtain a copy of this license in the LICENSE.txt file in the root directory
9 # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
10 #
11 # Any modifications or derivative works of this code must retain this
12 # copyright notice, and modified files need to carry a notice indicating
13 # that they have been altered from the originals.
14
15 """Helper function for converting a circuit to a gate"""
16
17 from qiskit.circuit.gate import Gate
18 from qiskit.circuit.quantumregister import QuantumRegister, Qubit
19 from qiskit.exceptions import QiskitError
20
21
22 def circuit_to_gate(circuit, parameter_map=None):
23 """Build a ``Gate`` object from a ``QuantumCircuit``.
24
25 The gate is anonymous (not tied to a named quantum register),
26 and so can be inserted into another circuit. The gate will
27 have the same string name as the circuit.
28
29 Args:
30 circuit (QuantumCircuit): the input circuit.
31 parameter_map (dict): For parameterized circuits, a mapping from
32 parameters in the circuit to parameters to be used in the gate.
33 If None, existing circuit parameters will also parameterize the
34 Gate.
35
36 Raises:
37 QiskitError: if circuit is non-unitary or if
38 parameter_map is not compatible with circuit
39
40 Return:
41 Gate: a Gate equivalent to the action of the
42 input circuit. Upon decomposition, this gate will
43 yield the components comprising the original circuit.
44 """
45 if circuit.clbits:
46 raise QiskitError('Circuit with classical bits cannot be converted '
47 'to gate.')
48
49 for inst, _, _ in circuit.data:
50 if not isinstance(inst, Gate):
51 raise QiskitError('One or more instructions in this instruction '
52 'cannot be converted to a gate')
53
54 if parameter_map is None:
55 parameter_dict = {p: p for p in circuit.parameters}
56 else:
57 parameter_dict = circuit._unroll_param_dict(parameter_map)
58
59 if parameter_dict.keys() != circuit.parameters:
60 raise QiskitError(('parameter_map should map all circuit parameters. '
61 'Circuit parameters: {}, parameter_map: {}').format(
62 circuit.parameters, parameter_dict))
63
64 gate = Gate(name=circuit.name,
65 num_qubits=sum([qreg.size for qreg in circuit.qregs]),
66 params=sorted(parameter_dict.values(), key=lambda p: p.name))
67 gate.condition = None
68
69 def find_bit_position(bit):
70 """find the index of a given bit (Register, int) within
71 a flat ordered list of bits of the circuit
72 """
73 if isinstance(bit, Qubit):
74 ordered_regs = circuit.qregs
75 else:
76 ordered_regs = circuit.cregs
77 reg_index = ordered_regs.index(bit.register)
78 return sum([reg.size for reg in ordered_regs[:reg_index]]) + bit.index
79
80 target = circuit.copy()
81 target._substitute_parameters(parameter_dict)
82
83 # pylint: disable=cyclic-import
84 from qiskit.circuit.equivalence_library import SessionEquivalenceLibrary as sel
85 # pylint: enable=cyclic-import
86 sel.add_equivalence(gate, target)
87
88 definition = target.data
89
90 if gate.num_qubits > 0:
91 q = QuantumRegister(gate.num_qubits, 'q')
92
93 # The 3rd parameter in the output tuple) is hard coded to [] because
94 # Gate objects do not have cregs set and we've verified that all
95 # instructions are gates
96 definition = list(map(
97 lambda x: (x[0],
98 list(map(lambda y: q[find_bit_position(y)], x[1])),
99 []),
100 definition))
101 gate.definition = definition
102
103 return gate
104
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/qiskit/converters/circuit_to_gate.py b/qiskit/converters/circuit_to_gate.py
--- a/qiskit/converters/circuit_to_gate.py
+++ b/qiskit/converters/circuit_to_gate.py
@@ -48,8 +48,9 @@
for inst, _, _ in circuit.data:
if not isinstance(inst, Gate):
- raise QiskitError('One or more instructions in this instruction '
- 'cannot be converted to a gate')
+ raise QiskitError(('One or more instructions cannot be converted to'
+ ' a gate. "{}" is not a gate instruction').format(
+ inst.name))
if parameter_map is None:
parameter_dict = {p: p for p in circuit.parameters}
| {"golden_diff": "diff --git a/qiskit/converters/circuit_to_gate.py b/qiskit/converters/circuit_to_gate.py\n--- a/qiskit/converters/circuit_to_gate.py\n+++ b/qiskit/converters/circuit_to_gate.py\n@@ -48,8 +48,9 @@\n \n for inst, _, _ in circuit.data:\n if not isinstance(inst, Gate):\n- raise QiskitError('One or more instructions in this instruction '\n- 'cannot be converted to a gate')\n+ raise QiskitError(('One or more instructions cannot be converted to'\n+ ' a gate. \"{}\" is not a gate instruction').format(\n+ inst.name))\n \n if parameter_map is None:\n parameter_dict = {p: p for p in circuit.parameters}\n", "issue": "Improve an error message in qiskit.converters.circuit_to_gate()\n<!-- \u26a0\ufe0f If you do not respect this template, your issue will be closed -->\r\n<!-- \u26a0\ufe0f Make sure to browse the opened and closed issues to confirm this idea does not exist. -->\r\n\r\n### What is the expected enhancement?\r\n\r\nLet's assume we have `QuantumCircuit` object called `qc`, and one tries to convert it into a `Gate` object using `qiskit.converters.circuit_to_gate()`. If `qc` contains some instructions which cannot be converted into `Gate`, the following exception is raised\r\n```\r\nQiskitError: 'One or more instructions in this instruction cannot be converted to a gate'\r\n```\r\nMy suggestion is to improve this error message and add some info about the particular instruction preventing the convertion from happening. I believe, something like the instruction name in the error message should be more helpfull, than the current general statement.\r\n\r\nBelow is a code snippet (for a `qc` containing a measurement operation) which can be used to achieve the error mentioned above\r\n```\r\nfrom qiskit import QuantumCircuit\r\nfrom qiskit.converters import circuit_to_gate\r\n\r\nqc = QuantumCircuit(1, 1)\r\nqc.h(0)\r\nqc.measure(0, 0)\r\n\r\ngate = circuit_to_gate(qc)\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# This code is part of Qiskit.\n#\n# (C) Copyright IBM 2017, 2019.\n#\n# This code is licensed under the Apache License, Version 2.0. You may\n# obtain a copy of this license in the LICENSE.txt file in the root directory\n# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.\n#\n# Any modifications or derivative works of this code must retain this\n# copyright notice, and modified files need to carry a notice indicating\n# that they have been altered from the originals.\n\n\"\"\"Helper function for converting a circuit to a gate\"\"\"\n\nfrom qiskit.circuit.gate import Gate\nfrom qiskit.circuit.quantumregister import QuantumRegister, Qubit\nfrom qiskit.exceptions import QiskitError\n\n\ndef circuit_to_gate(circuit, parameter_map=None):\n \"\"\"Build a ``Gate`` object from a ``QuantumCircuit``.\n\n The gate is anonymous (not tied to a named quantum register),\n and so can be inserted into another circuit. The gate will\n have the same string name as the circuit.\n\n Args:\n circuit (QuantumCircuit): the input circuit.\n parameter_map (dict): For parameterized circuits, a mapping from\n parameters in the circuit to parameters to be used in the gate.\n If None, existing circuit parameters will also parameterize the\n Gate.\n\n Raises:\n QiskitError: if circuit is non-unitary or if\n parameter_map is not compatible with circuit\n\n Return:\n Gate: a Gate equivalent to the action of the\n input circuit. Upon decomposition, this gate will\n yield the components comprising the original circuit.\n \"\"\"\n if circuit.clbits:\n raise QiskitError('Circuit with classical bits cannot be converted '\n 'to gate.')\n\n for inst, _, _ in circuit.data:\n if not isinstance(inst, Gate):\n raise QiskitError('One or more instructions in this instruction '\n 'cannot be converted to a gate')\n\n if parameter_map is None:\n parameter_dict = {p: p for p in circuit.parameters}\n else:\n parameter_dict = circuit._unroll_param_dict(parameter_map)\n\n if parameter_dict.keys() != circuit.parameters:\n raise QiskitError(('parameter_map should map all circuit parameters. '\n 'Circuit parameters: {}, parameter_map: {}').format(\n circuit.parameters, parameter_dict))\n\n gate = Gate(name=circuit.name,\n num_qubits=sum([qreg.size for qreg in circuit.qregs]),\n params=sorted(parameter_dict.values(), key=lambda p: p.name))\n gate.condition = None\n\n def find_bit_position(bit):\n \"\"\"find the index of a given bit (Register, int) within\n a flat ordered list of bits of the circuit\n \"\"\"\n if isinstance(bit, Qubit):\n ordered_regs = circuit.qregs\n else:\n ordered_regs = circuit.cregs\n reg_index = ordered_regs.index(bit.register)\n return sum([reg.size for reg in ordered_regs[:reg_index]]) + bit.index\n\n target = circuit.copy()\n target._substitute_parameters(parameter_dict)\n\n # pylint: disable=cyclic-import\n from qiskit.circuit.equivalence_library import SessionEquivalenceLibrary as sel\n # pylint: enable=cyclic-import\n sel.add_equivalence(gate, target)\n\n definition = target.data\n\n if gate.num_qubits > 0:\n q = QuantumRegister(gate.num_qubits, 'q')\n\n # The 3rd parameter in the output tuple) is hard coded to [] because\n # Gate objects do not have cregs set and we've verified that all\n # instructions are gates\n definition = list(map(\n lambda x: (x[0],\n list(map(lambda y: q[find_bit_position(y)], x[1])),\n []),\n definition))\n gate.definition = definition\n\n return gate\n", "path": "qiskit/converters/circuit_to_gate.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# This code is part of Qiskit.\n#\n# (C) Copyright IBM 2017, 2019.\n#\n# This code is licensed under the Apache License, Version 2.0. You may\n# obtain a copy of this license in the LICENSE.txt file in the root directory\n# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.\n#\n# Any modifications or derivative works of this code must retain this\n# copyright notice, and modified files need to carry a notice indicating\n# that they have been altered from the originals.\n\n\"\"\"Helper function for converting a circuit to a gate\"\"\"\n\nfrom qiskit.circuit.gate import Gate\nfrom qiskit.circuit.quantumregister import QuantumRegister, Qubit\nfrom qiskit.exceptions import QiskitError\n\n\ndef circuit_to_gate(circuit, parameter_map=None):\n \"\"\"Build a ``Gate`` object from a ``QuantumCircuit``.\n\n The gate is anonymous (not tied to a named quantum register),\n and so can be inserted into another circuit. The gate will\n have the same string name as the circuit.\n\n Args:\n circuit (QuantumCircuit): the input circuit.\n parameter_map (dict): For parameterized circuits, a mapping from\n parameters in the circuit to parameters to be used in the gate.\n If None, existing circuit parameters will also parameterize the\n Gate.\n\n Raises:\n QiskitError: if circuit is non-unitary or if\n parameter_map is not compatible with circuit\n\n Return:\n Gate: a Gate equivalent to the action of the\n input circuit. Upon decomposition, this gate will\n yield the components comprising the original circuit.\n \"\"\"\n if circuit.clbits:\n raise QiskitError('Circuit with classical bits cannot be converted '\n 'to gate.')\n\n for inst, _, _ in circuit.data:\n if not isinstance(inst, Gate):\n raise QiskitError(('One or more instructions cannot be converted to'\n ' a gate. \"{}\" is not a gate instruction').format(\n inst.name))\n\n if parameter_map is None:\n parameter_dict = {p: p for p in circuit.parameters}\n else:\n parameter_dict = circuit._unroll_param_dict(parameter_map)\n\n if parameter_dict.keys() != circuit.parameters:\n raise QiskitError(('parameter_map should map all circuit parameters. '\n 'Circuit parameters: {}, parameter_map: {}').format(\n circuit.parameters, parameter_dict))\n\n gate = Gate(name=circuit.name,\n num_qubits=sum([qreg.size for qreg in circuit.qregs]),\n params=sorted(parameter_dict.values(), key=lambda p: p.name))\n gate.condition = None\n\n def find_bit_position(bit):\n \"\"\"find the index of a given bit (Register, int) within\n a flat ordered list of bits of the circuit\n \"\"\"\n if isinstance(bit, Qubit):\n ordered_regs = circuit.qregs\n else:\n ordered_regs = circuit.cregs\n reg_index = ordered_regs.index(bit.register)\n return sum([reg.size for reg in ordered_regs[:reg_index]]) + bit.index\n\n target = circuit.copy()\n target._substitute_parameters(parameter_dict)\n\n # pylint: disable=cyclic-import\n from qiskit.circuit.equivalence_library import SessionEquivalenceLibrary as sel\n # pylint: enable=cyclic-import\n sel.add_equivalence(gate, target)\n\n definition = target.data\n\n if gate.num_qubits > 0:\n q = QuantumRegister(gate.num_qubits, 'q')\n\n # The 3rd parameter in the output tuple) is hard coded to [] because\n # Gate objects do not have cregs set and we've verified that all\n # instructions are gates\n definition = list(map(\n lambda x: (x[0],\n list(map(lambda y: q[find_bit_position(y)], x[1])),\n []),\n definition))\n gate.definition = definition\n\n return gate\n", "path": "qiskit/converters/circuit_to_gate.py"}]} | 1,613 | 173 |
gh_patches_debug_984 | rasdani/github-patches | git_diff | Mailu__Mailu-2157 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Admin User Quota sorting is off
Thank you for opening an issue with Mailu. Please understand that issues are meant for bugs and enhancement-requests.
For **user-support questions**, reach out to us on [matrix](https://matrix.to/#/#mailu:tedomum.net).
To be able to help you best, we need some more information.
## Before you open your issue
- [ x] Check if no issue or pull-request for this already exists.
- [ x] Check [documentation](https://mailu.io/master/) and [FAQ](https://mailu.io/master/faq.html). (Tip, use the search function on the documentation page)
- [ x] You understand `Mailu` is made by volunteers in their **free time** — be conscise, civil and accept that delays can occur.
- [ x] The title of the issue should be short and simple. It should contain specific terms related to the actual issue. Be specific while writing the title.
## Environment & Versions
### Environment
- [ x] docker-compose
- [ ] kubernetes
- [ ] docker swarm
### Versions
1.9
## Description
When sorting by quota in the Admin interface the numbers are sorted like text instead of by number and bytes.
## Expected behaviour
kB is smaller than MB is smaller than GB

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/admin/mailu/__init__.py`
Content:
```
1 """ Mailu admin app
2 """
3
4 import flask
5 import flask_bootstrap
6
7 from mailu import utils, debug, models, manage, configuration
8
9 import hmac
10
11 def create_app_from_config(config):
12 """ Create a new application based on the given configuration
13 """
14 app = flask.Flask(__name__, static_folder='static', static_url_path='/static')
15 app.cli.add_command(manage.mailu)
16
17 # Bootstrap is used for error display and flash messages
18 app.bootstrap = flask_bootstrap.Bootstrap(app)
19
20 # Initialize application extensions
21 config.init_app(app)
22 models.db.init_app(app)
23 utils.session.init_app(app)
24 utils.limiter.init_app(app)
25 utils.babel.init_app(app)
26 utils.login.init_app(app)
27 utils.login.user_loader(models.User.get)
28 utils.proxy.init_app(app)
29 utils.migrate.init_app(app, models.db)
30
31 app.device_cookie_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('DEVICE_COOKIE_KEY', 'utf-8'), 'sha256').digest()
32 app.temp_token_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('WEBMAIL_TEMP_TOKEN_KEY', 'utf-8'), 'sha256').digest()
33 app.srs_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('SRS_KEY', 'utf-8'), 'sha256').digest()
34
35 # Initialize list of translations
36 app.config.translations = {
37 str(locale): locale
38 for locale in sorted(
39 utils.babel.list_translations(),
40 key=lambda l: l.get_language_name().title()
41 )
42 }
43
44 # Initialize debugging tools
45 if app.config.get("DEBUG"):
46 debug.toolbar.init_app(app)
47 if app.config.get("DEBUG_PROFILER"):
48 debug.profiler.init_app(app)
49 if assets := app.config.get('DEBUG_ASSETS'):
50 app.static_folder = assets
51
52 # Inject the default variables in the Jinja parser
53 # TODO: move this to blueprints when needed
54 @app.context_processor
55 def inject_defaults():
56 signup_domains = models.Domain.query.filter_by(signup_enabled=True).all()
57 return dict(
58 signup_domains= signup_domains,
59 config = app.config,
60 )
61
62 # Jinja filters
63 @app.template_filter()
64 def format_date(value):
65 return utils.flask_babel.format_date(value) if value else ''
66
67 @app.template_filter()
68 def format_datetime(value):
69 return utils.flask_babel.format_datetime(value) if value else ''
70
71 # Import views
72 from mailu import ui, internal, sso
73 app.register_blueprint(ui.ui, url_prefix=app.config['WEB_ADMIN'])
74 app.register_blueprint(internal.internal, url_prefix='/internal')
75 app.register_blueprint(sso.sso, url_prefix='/sso')
76 return app
77
78
79 def create_app():
80 """ Create a new application based on the config module
81 """
82 config = configuration.ConfigManager()
83 return create_app_from_config(config)
84
85
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/admin/mailu/__init__.py b/core/admin/mailu/__init__.py
--- a/core/admin/mailu/__init__.py
+++ b/core/admin/mailu/__init__.py
@@ -57,6 +57,7 @@
return dict(
signup_domains= signup_domains,
config = app.config,
+ get_locale = utils.get_locale,
)
# Jinja filters
| {"golden_diff": "diff --git a/core/admin/mailu/__init__.py b/core/admin/mailu/__init__.py\n--- a/core/admin/mailu/__init__.py\n+++ b/core/admin/mailu/__init__.py\n@@ -57,6 +57,7 @@\n return dict(\n signup_domains= signup_domains,\n config = app.config,\n+ get_locale = utils.get_locale,\n )\n \n # Jinja filters\n", "issue": "Admin User Quota sorting is off\nThank you for opening an issue with Mailu. Please understand that issues are meant for bugs and enhancement-requests.\r\nFor **user-support questions**, reach out to us on [matrix](https://matrix.to/#/#mailu:tedomum.net).\r\n\r\nTo be able to help you best, we need some more information.\r\n\r\n## Before you open your issue\r\n- [ x] Check if no issue or pull-request for this already exists.\r\n- [ x] Check [documentation](https://mailu.io/master/) and [FAQ](https://mailu.io/master/faq.html). (Tip, use the search function on the documentation page)\r\n- [ x] You understand `Mailu` is made by volunteers in their **free time** \u2014 be conscise, civil and accept that delays can occur.\r\n- [ x] The title of the issue should be short and simple. It should contain specific terms related to the actual issue. Be specific while writing the title.\r\n\r\n## Environment & Versions\r\n### Environment\r\n - [ x] docker-compose\r\n - [ ] kubernetes\r\n - [ ] docker swarm\r\n\r\n### Versions\r\n1.9\r\n\r\n## Description\r\nWhen sorting by quota in the Admin interface the numbers are sorted like text instead of by number and bytes.\r\n\r\n\r\n## Expected behaviour\r\nkB is smaller than MB is smaller than GB\r\n\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "\"\"\" Mailu admin app\n\"\"\"\n\nimport flask\nimport flask_bootstrap\n\nfrom mailu import utils, debug, models, manage, configuration\n\nimport hmac\n\ndef create_app_from_config(config):\n \"\"\" Create a new application based on the given configuration\n \"\"\"\n app = flask.Flask(__name__, static_folder='static', static_url_path='/static')\n app.cli.add_command(manage.mailu)\n\n # Bootstrap is used for error display and flash messages\n app.bootstrap = flask_bootstrap.Bootstrap(app)\n\n # Initialize application extensions\n config.init_app(app)\n models.db.init_app(app)\n utils.session.init_app(app)\n utils.limiter.init_app(app)\n utils.babel.init_app(app)\n utils.login.init_app(app)\n utils.login.user_loader(models.User.get)\n utils.proxy.init_app(app)\n utils.migrate.init_app(app, models.db)\n\n app.device_cookie_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('DEVICE_COOKIE_KEY', 'utf-8'), 'sha256').digest()\n app.temp_token_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('WEBMAIL_TEMP_TOKEN_KEY', 'utf-8'), 'sha256').digest()\n app.srs_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('SRS_KEY', 'utf-8'), 'sha256').digest()\n\n # Initialize list of translations\n app.config.translations = {\n str(locale): locale\n for locale in sorted(\n utils.babel.list_translations(),\n key=lambda l: l.get_language_name().title()\n )\n }\n\n # Initialize debugging tools\n if app.config.get(\"DEBUG\"):\n debug.toolbar.init_app(app)\n if app.config.get(\"DEBUG_PROFILER\"):\n debug.profiler.init_app(app)\n if assets := app.config.get('DEBUG_ASSETS'):\n app.static_folder = assets\n\n # Inject the default variables in the Jinja parser\n # TODO: move this to blueprints when needed\n @app.context_processor\n def inject_defaults():\n signup_domains = models.Domain.query.filter_by(signup_enabled=True).all()\n return dict(\n signup_domains= signup_domains,\n config = app.config,\n )\n\n # Jinja filters\n @app.template_filter()\n def format_date(value):\n return utils.flask_babel.format_date(value) if value else ''\n\n @app.template_filter()\n def format_datetime(value):\n return utils.flask_babel.format_datetime(value) if value else ''\n\n # Import views\n from mailu import ui, internal, sso\n app.register_blueprint(ui.ui, url_prefix=app.config['WEB_ADMIN'])\n app.register_blueprint(internal.internal, url_prefix='/internal')\n app.register_blueprint(sso.sso, url_prefix='/sso')\n return app\n\n\ndef create_app():\n \"\"\" Create a new application based on the config module\n \"\"\"\n config = configuration.ConfigManager()\n return create_app_from_config(config)\n\n", "path": "core/admin/mailu/__init__.py"}], "after_files": [{"content": "\"\"\" Mailu admin app\n\"\"\"\n\nimport flask\nimport flask_bootstrap\n\nfrom mailu import utils, debug, models, manage, configuration\n\nimport hmac\n\ndef create_app_from_config(config):\n \"\"\" Create a new application based on the given configuration\n \"\"\"\n app = flask.Flask(__name__, static_folder='static', static_url_path='/static')\n app.cli.add_command(manage.mailu)\n\n # Bootstrap is used for error display and flash messages\n app.bootstrap = flask_bootstrap.Bootstrap(app)\n\n # Initialize application extensions\n config.init_app(app)\n models.db.init_app(app)\n utils.session.init_app(app)\n utils.limiter.init_app(app)\n utils.babel.init_app(app)\n utils.login.init_app(app)\n utils.login.user_loader(models.User.get)\n utils.proxy.init_app(app)\n utils.migrate.init_app(app, models.db)\n\n app.device_cookie_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('DEVICE_COOKIE_KEY', 'utf-8'), 'sha256').digest()\n app.temp_token_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('WEBMAIL_TEMP_TOKEN_KEY', 'utf-8'), 'sha256').digest()\n app.srs_key = hmac.new(bytearray(app.secret_key, 'utf-8'), bytearray('SRS_KEY', 'utf-8'), 'sha256').digest()\n\n # Initialize list of translations\n app.config.translations = {\n str(locale): locale\n for locale in sorted(\n utils.babel.list_translations(),\n key=lambda l: l.get_language_name().title()\n )\n }\n\n # Initialize debugging tools\n if app.config.get(\"DEBUG\"):\n debug.toolbar.init_app(app)\n if app.config.get(\"DEBUG_PROFILER\"):\n debug.profiler.init_app(app)\n if assets := app.config.get('DEBUG_ASSETS'):\n app.static_folder = assets\n\n # Inject the default variables in the Jinja parser\n # TODO: move this to blueprints when needed\n @app.context_processor\n def inject_defaults():\n signup_domains = models.Domain.query.filter_by(signup_enabled=True).all()\n return dict(\n signup_domains= signup_domains,\n config = app.config,\n get_locale = utils.get_locale,\n )\n\n # Jinja filters\n @app.template_filter()\n def format_date(value):\n return utils.flask_babel.format_date(value) if value else ''\n\n @app.template_filter()\n def format_datetime(value):\n return utils.flask_babel.format_datetime(value) if value else ''\n\n # Import views\n from mailu import ui, internal, sso\n app.register_blueprint(ui.ui, url_prefix=app.config['WEB_ADMIN'])\n app.register_blueprint(internal.internal, url_prefix='/internal')\n app.register_blueprint(sso.sso, url_prefix='/sso')\n return app\n\n\ndef create_app():\n \"\"\" Create a new application based on the config module\n \"\"\"\n config = configuration.ConfigManager()\n return create_app_from_config(config)\n\n", "path": "core/admin/mailu/__init__.py"}]} | 1,412 | 93 |
gh_patches_debug_764 | rasdani/github-patches | git_diff | rasterio__rasterio-1692 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
more explicit NotImplementedError messages in read mode ?
In wanting to set a GeoTIFF's CRS, I encountered [this](https://github.com/mapbox/rasterio/blob/master/rasterio/_base.pyx#L516) NotImplementedError when trying to run the following code:
```
with rasterio.open(filepath) as src:
src.crs = "EPSG:3857"
```
Though in retrospect it is obvious the above will fail without explicitly specifying the proper mode , i.e. `'r+'` in this case, I was momentarily thrown off by the error and assumed something was wrong with my approach. Would a more explicit error message be useful here?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `rasterio/errors.py`
Content:
```
1 """Errors and Warnings."""
2
3 from click import FileError
4
5
6 class RasterioError(Exception):
7 """Root exception class"""
8
9
10 class WindowError(RasterioError):
11 """Raised when errors occur during window operations"""
12
13
14 class CRSError(ValueError):
15 """Raised when a CRS string or mapping is invalid or cannot serve
16 to define a coordinate transformation."""
17
18
19 class EnvError(RasterioError):
20 """Raised when the state of GDAL/AWS environment cannot be created
21 or modified."""
22
23
24 class DriverRegistrationError(ValueError):
25 """Raised when a format driver is requested but is not registered."""
26
27
28 class FileOverwriteError(FileError):
29 """Raised when Rasterio's CLI refuses to clobber output files."""
30
31 def __init__(self, message):
32 """Raise FileOverwriteError with message as hint."""
33 super(FileOverwriteError, self).__init__('', hint=message)
34
35
36 class RasterioIOError(IOError):
37 """Raised when a dataset cannot be opened using one of the
38 registered format drivers."""
39
40
41 class NodataShadowWarning(UserWarning):
42 """Warn that a dataset's nodata attribute is shadowing its alpha band."""
43
44 def __str__(self):
45 return ("The dataset's nodata attribute is shadowing "
46 "the alpha band. All masks will be determined "
47 "by the nodata attribute")
48
49
50 class NotGeoreferencedWarning(UserWarning):
51 """Warn that a dataset isn't georeferenced."""
52
53
54 class GDALBehaviorChangeException(RuntimeError):
55 """Raised when GDAL's behavior differs from the given arguments. For
56 example, antimeridian cutting is always on as of GDAL 2.2.0. Users
57 expecting it to be off will be presented with a MultiPolygon when the
58 rest of their code expects a Polygon.
59
60 # Raises an exception on GDAL >= 2.2.0
61 rasterio.warp.transform_geometry(
62 src_crs, dst_crs, antimeridian_cutting=False)
63 """
64
65
66 class GDALOptionNotImplementedError(RasterioError):
67 """A dataset opening or dataset creation option can't be supported
68
69 This will be raised from Rasterio's shim modules. For example, when
70 a user passes arguments to open_dataset() that can't be evaluated
71 by GDAL 1.x.
72 """
73
74 class GDALVersionError(RasterioError):
75 """Raised if the runtime version of GDAL does not meet the required
76 version of GDAL."""
77
78
79 class WindowEvaluationError(ValueError):
80 """Raised when window evaluation fails"""
81
82
83 class RasterioDeprecationWarning(UserWarning):
84 """Rasterio module deprecations"""
85
86
87 class RasterBlockError(RasterioError):
88 """Raised when raster block access fails"""
89
90
91 class BandOverviewError(UserWarning):
92 """Raised when a band overview access fails."""
93
94
95 class WarpOptionsError(RasterioError):
96 """Raised when options for a warp operation are invalid"""
97
98
99 class UnsupportedOperation(RasterioError):
100 """Raised when reading from a file opened in 'w' mode"""
101
102
103 class OverviewCreationError(RasterioError):
104 """Raised when creation of an overview fails"""
105
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/rasterio/errors.py b/rasterio/errors.py
--- a/rasterio/errors.py
+++ b/rasterio/errors.py
@@ -102,3 +102,7 @@
class OverviewCreationError(RasterioError):
"""Raised when creation of an overview fails"""
+
+
+class DatasetAttributeError(RasterioError, NotImplementedError):
+ """Raised when dataset attributes are misused"""
| {"golden_diff": "diff --git a/rasterio/errors.py b/rasterio/errors.py\n--- a/rasterio/errors.py\n+++ b/rasterio/errors.py\n@@ -102,3 +102,7 @@\n \n class OverviewCreationError(RasterioError):\n \"\"\"Raised when creation of an overview fails\"\"\"\n+\n+\n+class DatasetAttributeError(RasterioError, NotImplementedError):\n+ \"\"\"Raised when dataset attributes are misused\"\"\"\n", "issue": "more explicit NotImplementedError messages in read mode ?\nIn wanting to set a GeoTIFF's CRS, I encountered [this](https://github.com/mapbox/rasterio/blob/master/rasterio/_base.pyx#L516) NotImplementedError when trying to run the following code:\r\n```\r\nwith rasterio.open(filepath) as src:\r\n src.crs = \"EPSG:3857\"\r\n```\r\nThough in retrospect it is obvious the above will fail without explicitly specifying the proper mode , i.e. `'r+'` in this case, I was momentarily thrown off by the error and assumed something was wrong with my approach. Would a more explicit error message be useful here?\r\n\n", "before_files": [{"content": "\"\"\"Errors and Warnings.\"\"\"\n\nfrom click import FileError\n\n\nclass RasterioError(Exception):\n \"\"\"Root exception class\"\"\"\n\n\nclass WindowError(RasterioError):\n \"\"\"Raised when errors occur during window operations\"\"\"\n\n\nclass CRSError(ValueError):\n \"\"\"Raised when a CRS string or mapping is invalid or cannot serve\n to define a coordinate transformation.\"\"\"\n\n\nclass EnvError(RasterioError):\n \"\"\"Raised when the state of GDAL/AWS environment cannot be created\n or modified.\"\"\"\n\n\nclass DriverRegistrationError(ValueError):\n \"\"\"Raised when a format driver is requested but is not registered.\"\"\"\n\n\nclass FileOverwriteError(FileError):\n \"\"\"Raised when Rasterio's CLI refuses to clobber output files.\"\"\"\n\n def __init__(self, message):\n \"\"\"Raise FileOverwriteError with message as hint.\"\"\"\n super(FileOverwriteError, self).__init__('', hint=message)\n\n\nclass RasterioIOError(IOError):\n \"\"\"Raised when a dataset cannot be opened using one of the\n registered format drivers.\"\"\"\n\n\nclass NodataShadowWarning(UserWarning):\n \"\"\"Warn that a dataset's nodata attribute is shadowing its alpha band.\"\"\"\n\n def __str__(self):\n return (\"The dataset's nodata attribute is shadowing \"\n \"the alpha band. All masks will be determined \"\n \"by the nodata attribute\")\n\n\nclass NotGeoreferencedWarning(UserWarning):\n \"\"\"Warn that a dataset isn't georeferenced.\"\"\"\n\n\nclass GDALBehaviorChangeException(RuntimeError):\n \"\"\"Raised when GDAL's behavior differs from the given arguments. For\n example, antimeridian cutting is always on as of GDAL 2.2.0. Users\n expecting it to be off will be presented with a MultiPolygon when the\n rest of their code expects a Polygon.\n\n # Raises an exception on GDAL >= 2.2.0\n rasterio.warp.transform_geometry(\n src_crs, dst_crs, antimeridian_cutting=False)\n \"\"\"\n\n\nclass GDALOptionNotImplementedError(RasterioError):\n \"\"\"A dataset opening or dataset creation option can't be supported\n\n This will be raised from Rasterio's shim modules. For example, when\n a user passes arguments to open_dataset() that can't be evaluated\n by GDAL 1.x.\n \"\"\"\n\nclass GDALVersionError(RasterioError):\n \"\"\"Raised if the runtime version of GDAL does not meet the required\n version of GDAL.\"\"\"\n\n\nclass WindowEvaluationError(ValueError):\n \"\"\"Raised when window evaluation fails\"\"\"\n\n\nclass RasterioDeprecationWarning(UserWarning):\n \"\"\"Rasterio module deprecations\"\"\"\n\n\nclass RasterBlockError(RasterioError):\n \"\"\"Raised when raster block access fails\"\"\"\n\n\nclass BandOverviewError(UserWarning):\n \"\"\"Raised when a band overview access fails.\"\"\"\n\n\nclass WarpOptionsError(RasterioError):\n \"\"\"Raised when options for a warp operation are invalid\"\"\"\n\n\nclass UnsupportedOperation(RasterioError):\n \"\"\"Raised when reading from a file opened in 'w' mode\"\"\"\n\n\nclass OverviewCreationError(RasterioError):\n \"\"\"Raised when creation of an overview fails\"\"\"\n", "path": "rasterio/errors.py"}], "after_files": [{"content": "\"\"\"Errors and Warnings.\"\"\"\n\nfrom click import FileError\n\n\nclass RasterioError(Exception):\n \"\"\"Root exception class\"\"\"\n\n\nclass WindowError(RasterioError):\n \"\"\"Raised when errors occur during window operations\"\"\"\n\n\nclass CRSError(ValueError):\n \"\"\"Raised when a CRS string or mapping is invalid or cannot serve\n to define a coordinate transformation.\"\"\"\n\n\nclass EnvError(RasterioError):\n \"\"\"Raised when the state of GDAL/AWS environment cannot be created\n or modified.\"\"\"\n\n\nclass DriverRegistrationError(ValueError):\n \"\"\"Raised when a format driver is requested but is not registered.\"\"\"\n\n\nclass FileOverwriteError(FileError):\n \"\"\"Raised when Rasterio's CLI refuses to clobber output files.\"\"\"\n\n def __init__(self, message):\n \"\"\"Raise FileOverwriteError with message as hint.\"\"\"\n super(FileOverwriteError, self).__init__('', hint=message)\n\n\nclass RasterioIOError(IOError):\n \"\"\"Raised when a dataset cannot be opened using one of the\n registered format drivers.\"\"\"\n\n\nclass NodataShadowWarning(UserWarning):\n \"\"\"Warn that a dataset's nodata attribute is shadowing its alpha band.\"\"\"\n\n def __str__(self):\n return (\"The dataset's nodata attribute is shadowing \"\n \"the alpha band. All masks will be determined \"\n \"by the nodata attribute\")\n\n\nclass NotGeoreferencedWarning(UserWarning):\n \"\"\"Warn that a dataset isn't georeferenced.\"\"\"\n\n\nclass GDALBehaviorChangeException(RuntimeError):\n \"\"\"Raised when GDAL's behavior differs from the given arguments. For\n example, antimeridian cutting is always on as of GDAL 2.2.0. Users\n expecting it to be off will be presented with a MultiPolygon when the\n rest of their code expects a Polygon.\n\n # Raises an exception on GDAL >= 2.2.0\n rasterio.warp.transform_geometry(\n src_crs, dst_crs, antimeridian_cutting=False)\n \"\"\"\n\n\nclass GDALOptionNotImplementedError(RasterioError):\n \"\"\"A dataset opening or dataset creation option can't be supported\n\n This will be raised from Rasterio's shim modules. For example, when\n a user passes arguments to open_dataset() that can't be evaluated\n by GDAL 1.x.\n \"\"\"\n\nclass GDALVersionError(RasterioError):\n \"\"\"Raised if the runtime version of GDAL does not meet the required\n version of GDAL.\"\"\"\n\n\nclass WindowEvaluationError(ValueError):\n \"\"\"Raised when window evaluation fails\"\"\"\n\n\nclass RasterioDeprecationWarning(UserWarning):\n \"\"\"Rasterio module deprecations\"\"\"\n\n\nclass RasterBlockError(RasterioError):\n \"\"\"Raised when raster block access fails\"\"\"\n\n\nclass BandOverviewError(UserWarning):\n \"\"\"Raised when a band overview access fails.\"\"\"\n\n\nclass WarpOptionsError(RasterioError):\n \"\"\"Raised when options for a warp operation are invalid\"\"\"\n\n\nclass UnsupportedOperation(RasterioError):\n \"\"\"Raised when reading from a file opened in 'w' mode\"\"\"\n\n\nclass OverviewCreationError(RasterioError):\n \"\"\"Raised when creation of an overview fails\"\"\"\n\n\nclass DatasetAttributeError(RasterioError, NotImplementedError):\n \"\"\"Raised when dataset attributes are misused\"\"\"\n", "path": "rasterio/errors.py"}]} | 1,296 | 91 |
gh_patches_debug_30871 | rasdani/github-patches | git_diff | sublimelsp__LSP-1488 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
LspTextCommand should honor both session_name and capability if defined
If `capability` in [LspTextCommand](https://github.com/sublimelsp/LSP/blob/81a6e6aeb2c3a6aebad59fbd6eb0361301243bd1/plugin/core/registry.py#L52-L70) is defined, `session_name` is ignored. You might say that LSP-* plugins exactly know the capabilities of their server and thus never need to specify `capability` in a derived class, but in particular it's impossible for plugins to derive from LspExecuteCommand (which is derived from LspTextCommand), because that class already comes with a predefined `capability`. It can be convenient for a plugin to declare a derived class from LspExecuteCommand, so that their commands are only shown/enabled for corresponding filetypes:
```python
class FooExecuteCommand(LspExecuteCommand):
session_name = "foo"
```
**Describe the solution you'd like**
```python
def is_enabled(self, event: Optional[dict] = None, point: Optional[int] = None) -> bool:
if self.capability:
# At least one active session with the given capability must exist.
if not bool(self.best_session(self.capability, get_position(self.view, event, point))):
return False
if self.session_name:
# There must exist an active session with the given (config) name.
if not bool(self.session_by_name(self.session_name)):
return False
if not self.capability and not self.session_name:
# Any session will do.
return any(self.sessions())
return True
```
**Describe alternatives you've considered**
Make `session_name` win against `capability`
**Additional context**
Notice that the implementation suggested above doesn't guarantee that the sessions with the specified name and capability are the same (in case of multiple attached sessions for a view).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `plugin/core/registry.py`
Content:
```
1 from .configurations import ConfigManager
2 from .sessions import Session
3 from .settings import client_configs
4 from .typing import Optional, Any, Generator, Iterable
5 from .windows import WindowRegistry
6 import sublime
7 import sublime_plugin
8
9
10 def sessions_for_view(view: sublime.View, capability: Optional[str] = None) -> Generator[Session, None, None]:
11 """
12 Returns all sessions for this view, optionally matching the capability path.
13 """
14 window = view.window()
15 if window:
16 manager = windows.lookup(window)
17 yield from manager.sessions(view, capability)
18
19
20 def best_session(view: sublime.View, sessions: Iterable[Session], point: Optional[int] = None) -> Optional[Session]:
21 if point is None:
22 try:
23 point = view.sel()[0].b
24 except IndexError:
25 return None
26 try:
27 return max(sessions, key=lambda s: view.score_selector(point, s.config.priority_selector)) # type: ignore
28 except ValueError:
29 return None
30
31
32 configs = ConfigManager(client_configs.all)
33 client_configs.set_listener(configs.update)
34 windows = WindowRegistry(configs)
35
36
37 def get_position(view: sublime.View, event: Optional[dict] = None, point: Optional[int] = None) -> int:
38 if isinstance(point, int):
39 return point
40 elif event:
41 return view.window_to_text((event["x"], event["y"]))
42 else:
43 return view.sel()[0].begin()
44
45
46 class LspTextCommand(sublime_plugin.TextCommand):
47 """
48 Inherit from this class to define your requests that should be triggered via the command palette and/or a
49 keybinding.
50 """
51
52 # When this is defined in a derived class, the command is enabled if and only if there exists a session attached
53 # to the view that has the given capability. When both `capability` and `session_name` are defined, `capability`
54 # wins.
55 capability = ''
56
57 # When this is defined in a derived class, the command is enabled if and only if there exists a session attached
58 # to the view that has the given name. When both `capability` and `session_name` are defined, `capability` wins.
59 session_name = ''
60
61 def is_enabled(self, event: Optional[dict] = None, point: Optional[int] = None) -> bool:
62 if self.capability:
63 # At least one active session with the given capability must exist.
64 return bool(self.best_session(self.capability, get_position(self.view, event, point)))
65 elif self.session_name:
66 # There must exist an active session with the given (config) name.
67 return bool(self.session_by_name(self.session_name))
68 else:
69 # Any session will do.
70 return any(self.sessions())
71
72 def want_event(self) -> bool:
73 return True
74
75 def best_session(self, capability: str, point: Optional[int] = None) -> Optional[Session]:
76 listener = windows.listener_for_view(self.view)
77 return listener.session(capability, point) if listener else None
78
79 def session_by_name(self, name: Optional[str] = None) -> Optional[Session]:
80 target = name if name else self.session_name
81 for session in self.sessions():
82 if session.config.name == target:
83 return session
84 return None
85
86 def sessions(self, capability: Optional[str] = None) -> Generator[Session, None, None]:
87 yield from sessions_for_view(self.view, capability)
88
89
90 class LspRestartClientCommand(sublime_plugin.TextCommand):
91 def run(self, edit: Any) -> None:
92 window = self.view.window()
93 if window:
94 windows.lookup(window).restart_sessions_async()
95
96
97 class LspRecheckSessionsCommand(sublime_plugin.WindowCommand):
98 def run(self) -> None:
99 sublime.set_timeout_async(lambda: windows.lookup(self.window).restart_sessions_async())
100
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/plugin/core/registry.py b/plugin/core/registry.py
--- a/plugin/core/registry.py
+++ b/plugin/core/registry.py
@@ -49,25 +49,27 @@
keybinding.
"""
- # When this is defined in a derived class, the command is enabled if and only if there exists a session attached
- # to the view that has the given capability. When both `capability` and `session_name` are defined, `capability`
- # wins.
+ # When this is defined in a derived class, the command is enabled only if there exists a session attached to the
+ # view that has the given capability.
capability = ''
- # When this is defined in a derived class, the command is enabled if and only if there exists a session attached
- # to the view that has the given name. When both `capability` and `session_name` are defined, `capability` wins.
+ # When this is defined in a derived class, the command is enabled only if there exists a session attached to the
+ # view that has the given name.
session_name = ''
def is_enabled(self, event: Optional[dict] = None, point: Optional[int] = None) -> bool:
if self.capability:
# At least one active session with the given capability must exist.
- return bool(self.best_session(self.capability, get_position(self.view, event, point)))
- elif self.session_name:
+ if not self.best_session(self.capability, get_position(self.view, event, point)):
+ return False
+ if self.session_name:
# There must exist an active session with the given (config) name.
- return bool(self.session_by_name(self.session_name))
- else:
+ if not self.session_by_name(self.session_name):
+ return False
+ if not self.capability and not self.session_name:
# Any session will do.
return any(self.sessions())
+ return True
def want_event(self) -> bool:
return True
| {"golden_diff": "diff --git a/plugin/core/registry.py b/plugin/core/registry.py\n--- a/plugin/core/registry.py\n+++ b/plugin/core/registry.py\n@@ -49,25 +49,27 @@\n keybinding.\n \"\"\"\n \n- # When this is defined in a derived class, the command is enabled if and only if there exists a session attached\n- # to the view that has the given capability. When both `capability` and `session_name` are defined, `capability`\n- # wins.\n+ # When this is defined in a derived class, the command is enabled only if there exists a session attached to the\n+ # view that has the given capability.\n capability = ''\n \n- # When this is defined in a derived class, the command is enabled if and only if there exists a session attached\n- # to the view that has the given name. When both `capability` and `session_name` are defined, `capability` wins.\n+ # When this is defined in a derived class, the command is enabled only if there exists a session attached to the\n+ # view that has the given name.\n session_name = ''\n \n def is_enabled(self, event: Optional[dict] = None, point: Optional[int] = None) -> bool:\n if self.capability:\n # At least one active session with the given capability must exist.\n- return bool(self.best_session(self.capability, get_position(self.view, event, point)))\n- elif self.session_name:\n+ if not self.best_session(self.capability, get_position(self.view, event, point)):\n+ return False\n+ if self.session_name:\n # There must exist an active session with the given (config) name.\n- return bool(self.session_by_name(self.session_name))\n- else:\n+ if not self.session_by_name(self.session_name):\n+ return False\n+ if not self.capability and not self.session_name:\n # Any session will do.\n return any(self.sessions())\n+ return True\n \n def want_event(self) -> bool:\n return True\n", "issue": "LspTextCommand should honor both session_name and capability if defined\nIf `capability` in [LspTextCommand](https://github.com/sublimelsp/LSP/blob/81a6e6aeb2c3a6aebad59fbd6eb0361301243bd1/plugin/core/registry.py#L52-L70) is defined, `session_name` is ignored. You might say that LSP-* plugins exactly know the capabilities of their server and thus never need to specify `capability` in a derived class, but in particular it's impossible for plugins to derive from LspExecuteCommand (which is derived from LspTextCommand), because that class already comes with a predefined `capability`. It can be convenient for a plugin to declare a derived class from LspExecuteCommand, so that their commands are only shown/enabled for corresponding filetypes:\r\n```python\r\nclass FooExecuteCommand(LspExecuteCommand):\r\n session_name = \"foo\"\r\n```\r\n\r\n**Describe the solution you'd like**\r\n```python\r\n def is_enabled(self, event: Optional[dict] = None, point: Optional[int] = None) -> bool:\r\n if self.capability:\r\n # At least one active session with the given capability must exist.\r\n if not bool(self.best_session(self.capability, get_position(self.view, event, point))):\r\n return False\r\n if self.session_name:\r\n # There must exist an active session with the given (config) name.\r\n if not bool(self.session_by_name(self.session_name)):\r\n return False\r\n if not self.capability and not self.session_name:\r\n # Any session will do.\r\n return any(self.sessions())\r\n return True\r\n```\r\n\r\n**Describe alternatives you've considered**\r\nMake `session_name` win against `capability`\r\n\r\n**Additional context**\r\nNotice that the implementation suggested above doesn't guarantee that the sessions with the specified name and capability are the same (in case of multiple attached sessions for a view).\n", "before_files": [{"content": "from .configurations import ConfigManager\nfrom .sessions import Session\nfrom .settings import client_configs\nfrom .typing import Optional, Any, Generator, Iterable\nfrom .windows import WindowRegistry\nimport sublime\nimport sublime_plugin\n\n\ndef sessions_for_view(view: sublime.View, capability: Optional[str] = None) -> Generator[Session, None, None]:\n \"\"\"\n Returns all sessions for this view, optionally matching the capability path.\n \"\"\"\n window = view.window()\n if window:\n manager = windows.lookup(window)\n yield from manager.sessions(view, capability)\n\n\ndef best_session(view: sublime.View, sessions: Iterable[Session], point: Optional[int] = None) -> Optional[Session]:\n if point is None:\n try:\n point = view.sel()[0].b\n except IndexError:\n return None\n try:\n return max(sessions, key=lambda s: view.score_selector(point, s.config.priority_selector)) # type: ignore\n except ValueError:\n return None\n\n\nconfigs = ConfigManager(client_configs.all)\nclient_configs.set_listener(configs.update)\nwindows = WindowRegistry(configs)\n\n\ndef get_position(view: sublime.View, event: Optional[dict] = None, point: Optional[int] = None) -> int:\n if isinstance(point, int):\n return point\n elif event:\n return view.window_to_text((event[\"x\"], event[\"y\"]))\n else:\n return view.sel()[0].begin()\n\n\nclass LspTextCommand(sublime_plugin.TextCommand):\n \"\"\"\n Inherit from this class to define your requests that should be triggered via the command palette and/or a\n keybinding.\n \"\"\"\n\n # When this is defined in a derived class, the command is enabled if and only if there exists a session attached\n # to the view that has the given capability. When both `capability` and `session_name` are defined, `capability`\n # wins.\n capability = ''\n\n # When this is defined in a derived class, the command is enabled if and only if there exists a session attached\n # to the view that has the given name. When both `capability` and `session_name` are defined, `capability` wins.\n session_name = ''\n\n def is_enabled(self, event: Optional[dict] = None, point: Optional[int] = None) -> bool:\n if self.capability:\n # At least one active session with the given capability must exist.\n return bool(self.best_session(self.capability, get_position(self.view, event, point)))\n elif self.session_name:\n # There must exist an active session with the given (config) name.\n return bool(self.session_by_name(self.session_name))\n else:\n # Any session will do.\n return any(self.sessions())\n\n def want_event(self) -> bool:\n return True\n\n def best_session(self, capability: str, point: Optional[int] = None) -> Optional[Session]:\n listener = windows.listener_for_view(self.view)\n return listener.session(capability, point) if listener else None\n\n def session_by_name(self, name: Optional[str] = None) -> Optional[Session]:\n target = name if name else self.session_name\n for session in self.sessions():\n if session.config.name == target:\n return session\n return None\n\n def sessions(self, capability: Optional[str] = None) -> Generator[Session, None, None]:\n yield from sessions_for_view(self.view, capability)\n\n\nclass LspRestartClientCommand(sublime_plugin.TextCommand):\n def run(self, edit: Any) -> None:\n window = self.view.window()\n if window:\n windows.lookup(window).restart_sessions_async()\n\n\nclass LspRecheckSessionsCommand(sublime_plugin.WindowCommand):\n def run(self) -> None:\n sublime.set_timeout_async(lambda: windows.lookup(self.window).restart_sessions_async())\n", "path": "plugin/core/registry.py"}], "after_files": [{"content": "from .configurations import ConfigManager\nfrom .sessions import Session\nfrom .settings import client_configs\nfrom .typing import Optional, Any, Generator, Iterable\nfrom .windows import WindowRegistry\nimport sublime\nimport sublime_plugin\n\n\ndef sessions_for_view(view: sublime.View, capability: Optional[str] = None) -> Generator[Session, None, None]:\n \"\"\"\n Returns all sessions for this view, optionally matching the capability path.\n \"\"\"\n window = view.window()\n if window:\n manager = windows.lookup(window)\n yield from manager.sessions(view, capability)\n\n\ndef best_session(view: sublime.View, sessions: Iterable[Session], point: Optional[int] = None) -> Optional[Session]:\n if point is None:\n try:\n point = view.sel()[0].b\n except IndexError:\n return None\n try:\n return max(sessions, key=lambda s: view.score_selector(point, s.config.priority_selector)) # type: ignore\n except ValueError:\n return None\n\n\nconfigs = ConfigManager(client_configs.all)\nclient_configs.set_listener(configs.update)\nwindows = WindowRegistry(configs)\n\n\ndef get_position(view: sublime.View, event: Optional[dict] = None, point: Optional[int] = None) -> int:\n if isinstance(point, int):\n return point\n elif event:\n return view.window_to_text((event[\"x\"], event[\"y\"]))\n else:\n return view.sel()[0].begin()\n\n\nclass LspTextCommand(sublime_plugin.TextCommand):\n \"\"\"\n Inherit from this class to define your requests that should be triggered via the command palette and/or a\n keybinding.\n \"\"\"\n\n # When this is defined in a derived class, the command is enabled only if there exists a session attached to the\n # view that has the given capability.\n capability = ''\n\n # When this is defined in a derived class, the command is enabled only if there exists a session attached to the\n # view that has the given name.\n session_name = ''\n\n def is_enabled(self, event: Optional[dict] = None, point: Optional[int] = None) -> bool:\n if self.capability:\n # At least one active session with the given capability must exist.\n if not self.best_session(self.capability, get_position(self.view, event, point)):\n return False\n if self.session_name:\n # There must exist an active session with the given (config) name.\n if not self.session_by_name(self.session_name):\n return False\n if not self.capability and not self.session_name:\n # Any session will do.\n return any(self.sessions())\n return True\n\n def want_event(self) -> bool:\n return True\n\n def best_session(self, capability: str, point: Optional[int] = None) -> Optional[Session]:\n listener = windows.listener_for_view(self.view)\n return listener.session(capability, point) if listener else None\n\n def session_by_name(self, name: Optional[str] = None) -> Optional[Session]:\n target = name if name else self.session_name\n for session in self.sessions():\n if session.config.name == target:\n return session\n return None\n\n def sessions(self, capability: Optional[str] = None) -> Generator[Session, None, None]:\n yield from sessions_for_view(self.view, capability)\n\n\nclass LspRestartClientCommand(sublime_plugin.TextCommand):\n def run(self, edit: Any) -> None:\n window = self.view.window()\n if window:\n windows.lookup(window).restart_sessions_async()\n\n\nclass LspRecheckSessionsCommand(sublime_plugin.WindowCommand):\n def run(self) -> None:\n sublime.set_timeout_async(lambda: windows.lookup(self.window).restart_sessions_async())\n", "path": "plugin/core/registry.py"}]} | 1,692 | 449 |
gh_patches_debug_3419 | rasdani/github-patches | git_diff | encode__httpx-71 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
100% Test coverage
Let's get the test coverage up to 100%, and then force-pin it.
Any contributions towards this are welcome.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `httpcore/concurrency.py`
Content:
```
1 """
2 The `Reader` and `Writer` classes here provide a lightweight layer over
3 `asyncio.StreamReader` and `asyncio.StreamWriter`.
4
5 Similarly `PoolSemaphore` is a lightweight layer over `BoundedSemaphore`.
6
7 These classes help encapsulate the timeout logic, make it easier to unit-test
8 protocols, and help keep the rest of the package more `async`/`await`
9 based, and less strictly `asyncio`-specific.
10 """
11 import asyncio
12 import ssl
13 import typing
14
15 from .config import DEFAULT_TIMEOUT_CONFIG, PoolLimits, TimeoutConfig
16 from .exceptions import ConnectTimeout, PoolTimeout, ReadTimeout, WriteTimeout
17 from .interfaces import (
18 BasePoolSemaphore,
19 BaseReader,
20 BaseWriter,
21 ConcurrencyBackend,
22 Protocol,
23 )
24
25 OptionalTimeout = typing.Optional[TimeoutConfig]
26
27
28 SSL_MONKEY_PATCH_APPLIED = False
29
30
31 def ssl_monkey_patch() -> None:
32 """
33 Monky-patch for https://bugs.python.org/issue36709
34
35 This prevents console errors when outstanding HTTPS connections
36 still exist at the point of exiting.
37
38 Clients which have been opened using a `with` block, or which have
39 had `close()` closed, will not exhibit this issue in the first place.
40 """
41 MonkeyPatch = asyncio.selector_events._SelectorSocketTransport # type: ignore
42
43 _write = MonkeyPatch.write
44
45 def _fixed_write(self, data: bytes) -> None: # type: ignore
46 if not self._loop.is_closed():
47 _write(self, data)
48
49 MonkeyPatch.write = _fixed_write
50
51
52 class Reader(BaseReader):
53 def __init__(
54 self, stream_reader: asyncio.StreamReader, timeout: TimeoutConfig
55 ) -> None:
56 self.stream_reader = stream_reader
57 self.timeout = timeout
58
59 async def read(self, n: int, timeout: OptionalTimeout = None) -> bytes:
60 if timeout is None:
61 timeout = self.timeout
62
63 try:
64 data = await asyncio.wait_for(
65 self.stream_reader.read(n), timeout.read_timeout
66 )
67 except asyncio.TimeoutError:
68 raise ReadTimeout()
69
70 return data
71
72
73 class Writer(BaseWriter):
74 def __init__(self, stream_writer: asyncio.StreamWriter, timeout: TimeoutConfig):
75 self.stream_writer = stream_writer
76 self.timeout = timeout
77
78 def write_no_block(self, data: bytes) -> None:
79 self.stream_writer.write(data)
80
81 async def write(self, data: bytes, timeout: OptionalTimeout = None) -> None:
82 if not data:
83 return
84
85 if timeout is None:
86 timeout = self.timeout
87
88 self.stream_writer.write(data)
89 try:
90 await asyncio.wait_for( # type: ignore
91 self.stream_writer.drain(), timeout.write_timeout
92 )
93 except asyncio.TimeoutError:
94 raise WriteTimeout()
95
96 async def close(self) -> None:
97 self.stream_writer.close()
98
99
100 class PoolSemaphore(BasePoolSemaphore):
101 def __init__(self, pool_limits: PoolLimits):
102 self.pool_limits = pool_limits
103
104 @property
105 def semaphore(self) -> typing.Optional[asyncio.BoundedSemaphore]:
106 if not hasattr(self, "_semaphore"):
107 max_connections = self.pool_limits.hard_limit
108 if max_connections is None:
109 self._semaphore = None
110 else:
111 self._semaphore = asyncio.BoundedSemaphore(value=max_connections)
112 return self._semaphore
113
114 async def acquire(self) -> None:
115 if self.semaphore is None:
116 return
117
118 timeout = self.pool_limits.pool_timeout
119 try:
120 await asyncio.wait_for(self.semaphore.acquire(), timeout)
121 except asyncio.TimeoutError:
122 raise PoolTimeout()
123
124 def release(self) -> None:
125 if self.semaphore is None:
126 return
127
128 self.semaphore.release()
129
130
131 class AsyncioBackend(ConcurrencyBackend):
132 def __init__(self) -> None:
133 global SSL_MONKEY_PATCH_APPLIED
134
135 if not SSL_MONKEY_PATCH_APPLIED:
136 ssl_monkey_patch()
137 SSL_MONKEY_PATCH_APPLIED = True
138
139 async def connect(
140 self,
141 hostname: str,
142 port: int,
143 ssl_context: typing.Optional[ssl.SSLContext],
144 timeout: TimeoutConfig,
145 ) -> typing.Tuple[BaseReader, BaseWriter, Protocol]:
146 try:
147 stream_reader, stream_writer = await asyncio.wait_for( # type: ignore
148 asyncio.open_connection(hostname, port, ssl=ssl_context),
149 timeout.connect_timeout,
150 )
151 except asyncio.TimeoutError:
152 raise ConnectTimeout()
153
154 ssl_object = stream_writer.get_extra_info("ssl_object")
155 if ssl_object is None:
156 ident = "http/1.1"
157 else:
158 ident = ssl_object.selected_alpn_protocol()
159 if ident is None:
160 ident = ssl_object.selected_npn_protocol()
161
162 reader = Reader(stream_reader=stream_reader, timeout=timeout)
163 writer = Writer(stream_writer=stream_writer, timeout=timeout)
164 protocol = Protocol.HTTP_2 if ident == "h2" else Protocol.HTTP_11
165
166 return (reader, writer, protocol)
167
168 def get_semaphore(self, limits: PoolLimits) -> BasePoolSemaphore:
169 return PoolSemaphore(limits)
170
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/httpcore/concurrency.py b/httpcore/concurrency.py
--- a/httpcore/concurrency.py
+++ b/httpcore/concurrency.py
@@ -76,7 +76,7 @@
self.timeout = timeout
def write_no_block(self, data: bytes) -> None:
- self.stream_writer.write(data)
+ self.stream_writer.write(data) # pragma: nocover
async def write(self, data: bytes, timeout: OptionalTimeout = None) -> None:
if not data:
| {"golden_diff": "diff --git a/httpcore/concurrency.py b/httpcore/concurrency.py\n--- a/httpcore/concurrency.py\n+++ b/httpcore/concurrency.py\n@@ -76,7 +76,7 @@\n self.timeout = timeout\n \n def write_no_block(self, data: bytes) -> None:\n- self.stream_writer.write(data)\n+ self.stream_writer.write(data) # pragma: nocover\n \n async def write(self, data: bytes, timeout: OptionalTimeout = None) -> None:\n if not data:\n", "issue": "100% Test coverage\nLet's get the test coverage up to 100%, and then force-pin it.\r\n\r\nAny contributions towards this are welcome.\n", "before_files": [{"content": "\"\"\"\nThe `Reader` and `Writer` classes here provide a lightweight layer over\n`asyncio.StreamReader` and `asyncio.StreamWriter`.\n\nSimilarly `PoolSemaphore` is a lightweight layer over `BoundedSemaphore`.\n\nThese classes help encapsulate the timeout logic, make it easier to unit-test\nprotocols, and help keep the rest of the package more `async`/`await`\nbased, and less strictly `asyncio`-specific.\n\"\"\"\nimport asyncio\nimport ssl\nimport typing\n\nfrom .config import DEFAULT_TIMEOUT_CONFIG, PoolLimits, TimeoutConfig\nfrom .exceptions import ConnectTimeout, PoolTimeout, ReadTimeout, WriteTimeout\nfrom .interfaces import (\n BasePoolSemaphore,\n BaseReader,\n BaseWriter,\n ConcurrencyBackend,\n Protocol,\n)\n\nOptionalTimeout = typing.Optional[TimeoutConfig]\n\n\nSSL_MONKEY_PATCH_APPLIED = False\n\n\ndef ssl_monkey_patch() -> None:\n \"\"\"\n Monky-patch for https://bugs.python.org/issue36709\n\n This prevents console errors when outstanding HTTPS connections\n still exist at the point of exiting.\n\n Clients which have been opened using a `with` block, or which have\n had `close()` closed, will not exhibit this issue in the first place.\n \"\"\"\n MonkeyPatch = asyncio.selector_events._SelectorSocketTransport # type: ignore\n\n _write = MonkeyPatch.write\n\n def _fixed_write(self, data: bytes) -> None: # type: ignore\n if not self._loop.is_closed():\n _write(self, data)\n\n MonkeyPatch.write = _fixed_write\n\n\nclass Reader(BaseReader):\n def __init__(\n self, stream_reader: asyncio.StreamReader, timeout: TimeoutConfig\n ) -> None:\n self.stream_reader = stream_reader\n self.timeout = timeout\n\n async def read(self, n: int, timeout: OptionalTimeout = None) -> bytes:\n if timeout is None:\n timeout = self.timeout\n\n try:\n data = await asyncio.wait_for(\n self.stream_reader.read(n), timeout.read_timeout\n )\n except asyncio.TimeoutError:\n raise ReadTimeout()\n\n return data\n\n\nclass Writer(BaseWriter):\n def __init__(self, stream_writer: asyncio.StreamWriter, timeout: TimeoutConfig):\n self.stream_writer = stream_writer\n self.timeout = timeout\n\n def write_no_block(self, data: bytes) -> None:\n self.stream_writer.write(data)\n\n async def write(self, data: bytes, timeout: OptionalTimeout = None) -> None:\n if not data:\n return\n\n if timeout is None:\n timeout = self.timeout\n\n self.stream_writer.write(data)\n try:\n await asyncio.wait_for( # type: ignore\n self.stream_writer.drain(), timeout.write_timeout\n )\n except asyncio.TimeoutError:\n raise WriteTimeout()\n\n async def close(self) -> None:\n self.stream_writer.close()\n\n\nclass PoolSemaphore(BasePoolSemaphore):\n def __init__(self, pool_limits: PoolLimits):\n self.pool_limits = pool_limits\n\n @property\n def semaphore(self) -> typing.Optional[asyncio.BoundedSemaphore]:\n if not hasattr(self, \"_semaphore\"):\n max_connections = self.pool_limits.hard_limit\n if max_connections is None:\n self._semaphore = None\n else:\n self._semaphore = asyncio.BoundedSemaphore(value=max_connections)\n return self._semaphore\n\n async def acquire(self) -> None:\n if self.semaphore is None:\n return\n\n timeout = self.pool_limits.pool_timeout\n try:\n await asyncio.wait_for(self.semaphore.acquire(), timeout)\n except asyncio.TimeoutError:\n raise PoolTimeout()\n\n def release(self) -> None:\n if self.semaphore is None:\n return\n\n self.semaphore.release()\n\n\nclass AsyncioBackend(ConcurrencyBackend):\n def __init__(self) -> None:\n global SSL_MONKEY_PATCH_APPLIED\n\n if not SSL_MONKEY_PATCH_APPLIED:\n ssl_monkey_patch()\n SSL_MONKEY_PATCH_APPLIED = True\n\n async def connect(\n self,\n hostname: str,\n port: int,\n ssl_context: typing.Optional[ssl.SSLContext],\n timeout: TimeoutConfig,\n ) -> typing.Tuple[BaseReader, BaseWriter, Protocol]:\n try:\n stream_reader, stream_writer = await asyncio.wait_for( # type: ignore\n asyncio.open_connection(hostname, port, ssl=ssl_context),\n timeout.connect_timeout,\n )\n except asyncio.TimeoutError:\n raise ConnectTimeout()\n\n ssl_object = stream_writer.get_extra_info(\"ssl_object\")\n if ssl_object is None:\n ident = \"http/1.1\"\n else:\n ident = ssl_object.selected_alpn_protocol()\n if ident is None:\n ident = ssl_object.selected_npn_protocol()\n\n reader = Reader(stream_reader=stream_reader, timeout=timeout)\n writer = Writer(stream_writer=stream_writer, timeout=timeout)\n protocol = Protocol.HTTP_2 if ident == \"h2\" else Protocol.HTTP_11\n\n return (reader, writer, protocol)\n\n def get_semaphore(self, limits: PoolLimits) -> BasePoolSemaphore:\n return PoolSemaphore(limits)\n", "path": "httpcore/concurrency.py"}], "after_files": [{"content": "\"\"\"\nThe `Reader` and `Writer` classes here provide a lightweight layer over\n`asyncio.StreamReader` and `asyncio.StreamWriter`.\n\nSimilarly `PoolSemaphore` is a lightweight layer over `BoundedSemaphore`.\n\nThese classes help encapsulate the timeout logic, make it easier to unit-test\nprotocols, and help keep the rest of the package more `async`/`await`\nbased, and less strictly `asyncio`-specific.\n\"\"\"\nimport asyncio\nimport ssl\nimport typing\n\nfrom .config import DEFAULT_TIMEOUT_CONFIG, PoolLimits, TimeoutConfig\nfrom .exceptions import ConnectTimeout, PoolTimeout, ReadTimeout, WriteTimeout\nfrom .interfaces import (\n BasePoolSemaphore,\n BaseReader,\n BaseWriter,\n ConcurrencyBackend,\n Protocol,\n)\n\nOptionalTimeout = typing.Optional[TimeoutConfig]\n\n\nSSL_MONKEY_PATCH_APPLIED = False\n\n\ndef ssl_monkey_patch() -> None:\n \"\"\"\n Monky-patch for https://bugs.python.org/issue36709\n\n This prevents console errors when outstanding HTTPS connections\n still exist at the point of exiting.\n\n Clients which have been opened using a `with` block, or which have\n had `close()` closed, will not exhibit this issue in the first place.\n \"\"\"\n MonkeyPatch = asyncio.selector_events._SelectorSocketTransport # type: ignore\n\n _write = MonkeyPatch.write\n\n def _fixed_write(self, data: bytes) -> None: # type: ignore\n if not self._loop.is_closed():\n _write(self, data)\n\n MonkeyPatch.write = _fixed_write\n\n\nclass Reader(BaseReader):\n def __init__(\n self, stream_reader: asyncio.StreamReader, timeout: TimeoutConfig\n ) -> None:\n self.stream_reader = stream_reader\n self.timeout = timeout\n\n async def read(self, n: int, timeout: OptionalTimeout = None) -> bytes:\n if timeout is None:\n timeout = self.timeout\n\n try:\n data = await asyncio.wait_for(\n self.stream_reader.read(n), timeout.read_timeout\n )\n except asyncio.TimeoutError:\n raise ReadTimeout()\n\n return data\n\n\nclass Writer(BaseWriter):\n def __init__(self, stream_writer: asyncio.StreamWriter, timeout: TimeoutConfig):\n self.stream_writer = stream_writer\n self.timeout = timeout\n\n def write_no_block(self, data: bytes) -> None:\n self.stream_writer.write(data) # pragma: nocover\n\n async def write(self, data: bytes, timeout: OptionalTimeout = None) -> None:\n if not data:\n return\n\n if timeout is None:\n timeout = self.timeout\n\n self.stream_writer.write(data)\n try:\n await asyncio.wait_for( # type: ignore\n self.stream_writer.drain(), timeout.write_timeout\n )\n except asyncio.TimeoutError:\n raise WriteTimeout()\n\n async def close(self) -> None:\n self.stream_writer.close()\n\n\nclass PoolSemaphore(BasePoolSemaphore):\n def __init__(self, pool_limits: PoolLimits):\n self.pool_limits = pool_limits\n\n @property\n def semaphore(self) -> typing.Optional[asyncio.BoundedSemaphore]:\n if not hasattr(self, \"_semaphore\"):\n max_connections = self.pool_limits.hard_limit\n if max_connections is None:\n self._semaphore = None\n else:\n self._semaphore = asyncio.BoundedSemaphore(value=max_connections)\n return self._semaphore\n\n async def acquire(self) -> None:\n if self.semaphore is None:\n return\n\n timeout = self.pool_limits.pool_timeout\n try:\n await asyncio.wait_for(self.semaphore.acquire(), timeout)\n except asyncio.TimeoutError:\n raise PoolTimeout()\n\n def release(self) -> None:\n if self.semaphore is None:\n return\n\n self.semaphore.release()\n\n\nclass AsyncioBackend(ConcurrencyBackend):\n def __init__(self) -> None:\n global SSL_MONKEY_PATCH_APPLIED\n\n if not SSL_MONKEY_PATCH_APPLIED:\n ssl_monkey_patch()\n SSL_MONKEY_PATCH_APPLIED = True\n\n async def connect(\n self,\n hostname: str,\n port: int,\n ssl_context: typing.Optional[ssl.SSLContext],\n timeout: TimeoutConfig,\n ) -> typing.Tuple[BaseReader, BaseWriter, Protocol]:\n try:\n stream_reader, stream_writer = await asyncio.wait_for( # type: ignore\n asyncio.open_connection(hostname, port, ssl=ssl_context),\n timeout.connect_timeout,\n )\n except asyncio.TimeoutError:\n raise ConnectTimeout()\n\n ssl_object = stream_writer.get_extra_info(\"ssl_object\")\n if ssl_object is None:\n ident = \"http/1.1\"\n else:\n ident = ssl_object.selected_alpn_protocol()\n if ident is None:\n ident = ssl_object.selected_npn_protocol()\n\n reader = Reader(stream_reader=stream_reader, timeout=timeout)\n writer = Writer(stream_writer=stream_writer, timeout=timeout)\n protocol = Protocol.HTTP_2 if ident == \"h2\" else Protocol.HTTP_11\n\n return (reader, writer, protocol)\n\n def get_semaphore(self, limits: PoolLimits) -> BasePoolSemaphore:\n return PoolSemaphore(limits)\n", "path": "httpcore/concurrency.py"}]} | 1,823 | 112 |
gh_patches_debug_30243 | rasdani/github-patches | git_diff | lhotse-speech__lhotse-847 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Whisper workflow supervision `end` may be incorrect
So I ran the model on one of the AMI headset recordings (~5000s) and it seems like the "end" actually shows the end of the segment, not the duration. Here is the JSON containing the `results["segments"]`: https://drive.google.com/file/d/169igkcDY2SmMs5k3hOhHip89T4MQDnKs/view?usp=sharing
_Originally posted by @desh2608 in https://github.com/lhotse-speech/lhotse/pull/834#discussion_r988376898_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lhotse/workflows/whisper.py`
Content:
```
1 import logging
2 from typing import Any, Generator, List, Optional, Union
3
4 import torch
5
6 from lhotse import CutSet, MonoCut, Recording, RecordingSet, SupervisionSegment
7 from lhotse.qa import trim_supervisions_to_recordings
8 from lhotse.utils import fastcopy, is_module_available
9
10
11 def annotate_with_whisper(
12 manifest: Union[RecordingSet, CutSet],
13 language: Optional[str] = None,
14 model_name: str = "base",
15 device: str = "cpu",
16 ) -> Generator[MonoCut, None, None]:
17 """
18 Use OpenAI Whisper model to annotate either RECORDINGS_MANIFEST, RECORDINGS_DIR, or CUTS_MANIFEST.
19 It will perform automatic segmentation, transcription, and language identification. If
20 the first argument is a CutSet, it will overwrite the supervisions with the results of the inference.
21
22 Note: this is an experimental feature of Lhotse, and is not guaranteed to yield
23 high quality of data.
24
25 See the original repo for more details: https://github.com/openai/whisper
26
27 :param manifest: a ``RecordingSet`` or ``CutSet`` object.
28 :param language: specify the language if known upfront, otherwise it will be auto-detected.
29 :param model_name: one of available Whisper variants (base, medium, large, etc.).
30 :param device: Where to run the inference (cpu, cuda, etc.).
31 :return: a generator of cuts (use ``CutSet.open_writer()`` to write them).
32 """
33 assert is_module_available("whisper"), (
34 "This function expects OpenAI Whisper to be installed. "
35 "You can install it via 'pip install git+https://github.com/openai/whisper.git' "
36 "(see https://github.com/openai/whisper for details)."
37 )
38
39 if isinstance(manifest, RecordingSet):
40 yield from _annotate_recordings(manifest, language, model_name, device)
41 elif isinstance(manifest, CutSet):
42 yield from _annotate_cuts(manifest, language, model_name, device)
43 else:
44 raise ValueError("The ``manifest`` must be either a RecordingSet or a CutSet.")
45
46
47 def _annotate_recordings(
48 recordings: RecordingSet, language: str, model_name: str, device: str
49 ):
50 """
51 Helper function that annotates a RecordingSet with Whisper.
52 """
53 import whisper
54
55 model = whisper.load_model(model_name, device=device)
56
57 for recording in recordings:
58 if recording.num_channels > 1:
59 logging.warning(
60 f"Skipping recording '{recording.id}'. It has {recording.num_channels} channels, "
61 f"but we currently only support mono input."
62 )
63 continue
64 audio = torch.from_numpy(recording.resample(16000).load_audio()).squeeze(0)
65 result = whisper.transcribe(model=model, audio=audio, language=language)
66 supervisions = [
67 SupervisionSegment(
68 id=f"{recording.id}-{segment['id']:06d}",
69 recording_id=recording.id,
70 start=round(segment["start"], ndigits=8),
71 duration=round(segment["end"], ndigits=8),
72 text=segment["text"].strip(),
73 language=result["language"],
74 )
75 for segment in result["segments"]
76 ]
77 cut = recording.to_cut()
78 if supervisions:
79 supervisions = _postprocess_timestamps(supervisions)
80 cut.supervisions = list(
81 trim_supervisions_to_recordings(
82 recordings=recording, supervisions=supervisions, verbose=False
83 )
84 )
85 yield cut
86
87
88 def _annotate_cuts(cuts: CutSet, language: str, model_name: str, device: str):
89 """
90 Helper function that annotates a CutSet with Whisper.
91 """
92 import whisper
93
94 model = whisper.load_model(model_name, device=device)
95
96 for cut in cuts:
97 if cut.num_channels > 1:
98 logging.warning(
99 f"Skipping cut '{cut.id}'. It has {cut.num_channels} channels, "
100 f"but we currently only support mono input."
101 )
102 continue
103 audio = torch.from_numpy(cut.resample(16000).load_audio()).squeeze(0)
104 result = whisper.transcribe(model=model, audio=audio, language=language)
105 supervisions = [
106 SupervisionSegment(
107 id=f"{cut.id}-{segment['id']:06d}",
108 recording_id=cut.recording_id,
109 start=round(segment["start"], ndigits=8),
110 duration=max(cut.duration, round(segment["end"], ndigits=8)),
111 text=segment["text"].strip(),
112 language=result["language"],
113 )
114 for segment in result["segments"]
115 ]
116 new_cut = fastcopy(cut, supervisions=_postprocess_timestamps(supervisions))
117 yield new_cut
118
119
120 def _postprocess_timestamps(supervisions: List[SupervisionSegment]):
121 """
122 Whisper tends to have a lot of overlapping segments due to inaccurate end timestamps.
123 Under a strong assumption that the input speech is non-overlapping, we can fix that
124 by always truncating to the start timestamp of the next segment.
125 """
126 from cytoolz import sliding_window
127
128 if len(supervisions) < 2:
129 return supervisions
130 out = []
131 for cur, nxt in sliding_window(2, supervisions):
132 if cur.end > nxt.start:
133 cur = cur.trim(end=nxt.start)
134 out.append(cur)
135 out.append(nxt)
136 return out
137
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lhotse/workflows/whisper.py b/lhotse/workflows/whisper.py
--- a/lhotse/workflows/whisper.py
+++ b/lhotse/workflows/whisper.py
@@ -3,7 +3,14 @@
import torch
-from lhotse import CutSet, MonoCut, Recording, RecordingSet, SupervisionSegment
+from lhotse import (
+ CutSet,
+ MonoCut,
+ Recording,
+ RecordingSet,
+ SupervisionSegment,
+ add_durations,
+)
from lhotse.qa import trim_supervisions_to_recordings
from lhotse.utils import fastcopy, is_module_available
@@ -68,7 +75,9 @@
id=f"{recording.id}-{segment['id']:06d}",
recording_id=recording.id,
start=round(segment["start"], ndigits=8),
- duration=round(segment["end"], ndigits=8),
+ duration=add_durations(
+ segment["end"], -segment["start"], sampling_rate=16000
+ ),
text=segment["text"].strip(),
language=result["language"],
)
@@ -107,7 +116,12 @@
id=f"{cut.id}-{segment['id']:06d}",
recording_id=cut.recording_id,
start=round(segment["start"], ndigits=8),
- duration=max(cut.duration, round(segment["end"], ndigits=8)),
+ duration=max(
+ cut.duration,
+ add_durations(
+ segment["end"], -segment["start"], sampling_rate=16000
+ ),
+ ),
text=segment["text"].strip(),
language=result["language"],
)
| {"golden_diff": "diff --git a/lhotse/workflows/whisper.py b/lhotse/workflows/whisper.py\n--- a/lhotse/workflows/whisper.py\n+++ b/lhotse/workflows/whisper.py\n@@ -3,7 +3,14 @@\n \n import torch\n \n-from lhotse import CutSet, MonoCut, Recording, RecordingSet, SupervisionSegment\n+from lhotse import (\n+ CutSet,\n+ MonoCut,\n+ Recording,\n+ RecordingSet,\n+ SupervisionSegment,\n+ add_durations,\n+)\n from lhotse.qa import trim_supervisions_to_recordings\n from lhotse.utils import fastcopy, is_module_available\n \n@@ -68,7 +75,9 @@\n id=f\"{recording.id}-{segment['id']:06d}\",\n recording_id=recording.id,\n start=round(segment[\"start\"], ndigits=8),\n- duration=round(segment[\"end\"], ndigits=8),\n+ duration=add_durations(\n+ segment[\"end\"], -segment[\"start\"], sampling_rate=16000\n+ ),\n text=segment[\"text\"].strip(),\n language=result[\"language\"],\n )\n@@ -107,7 +116,12 @@\n id=f\"{cut.id}-{segment['id']:06d}\",\n recording_id=cut.recording_id,\n start=round(segment[\"start\"], ndigits=8),\n- duration=max(cut.duration, round(segment[\"end\"], ndigits=8)),\n+ duration=max(\n+ cut.duration,\n+ add_durations(\n+ segment[\"end\"], -segment[\"start\"], sampling_rate=16000\n+ ),\n+ ),\n text=segment[\"text\"].strip(),\n language=result[\"language\"],\n )\n", "issue": "Whisper workflow supervision `end` may be incorrect\nSo I ran the model on one of the AMI headset recordings (~5000s) and it seems like the \"end\" actually shows the end of the segment, not the duration. Here is the JSON containing the `results[\"segments\"]`: https://drive.google.com/file/d/169igkcDY2SmMs5k3hOhHip89T4MQDnKs/view?usp=sharing\r\n\r\n_Originally posted by @desh2608 in https://github.com/lhotse-speech/lhotse/pull/834#discussion_r988376898_\r\n \n", "before_files": [{"content": "import logging\nfrom typing import Any, Generator, List, Optional, Union\n\nimport torch\n\nfrom lhotse import CutSet, MonoCut, Recording, RecordingSet, SupervisionSegment\nfrom lhotse.qa import trim_supervisions_to_recordings\nfrom lhotse.utils import fastcopy, is_module_available\n\n\ndef annotate_with_whisper(\n manifest: Union[RecordingSet, CutSet],\n language: Optional[str] = None,\n model_name: str = \"base\",\n device: str = \"cpu\",\n) -> Generator[MonoCut, None, None]:\n \"\"\"\n Use OpenAI Whisper model to annotate either RECORDINGS_MANIFEST, RECORDINGS_DIR, or CUTS_MANIFEST.\n It will perform automatic segmentation, transcription, and language identification. If\n the first argument is a CutSet, it will overwrite the supervisions with the results of the inference.\n\n Note: this is an experimental feature of Lhotse, and is not guaranteed to yield\n high quality of data.\n\n See the original repo for more details: https://github.com/openai/whisper\n\n :param manifest: a ``RecordingSet`` or ``CutSet`` object.\n :param language: specify the language if known upfront, otherwise it will be auto-detected.\n :param model_name: one of available Whisper variants (base, medium, large, etc.).\n :param device: Where to run the inference (cpu, cuda, etc.).\n :return: a generator of cuts (use ``CutSet.open_writer()`` to write them).\n \"\"\"\n assert is_module_available(\"whisper\"), (\n \"This function expects OpenAI Whisper to be installed. \"\n \"You can install it via 'pip install git+https://github.com/openai/whisper.git' \"\n \"(see https://github.com/openai/whisper for details).\"\n )\n\n if isinstance(manifest, RecordingSet):\n yield from _annotate_recordings(manifest, language, model_name, device)\n elif isinstance(manifest, CutSet):\n yield from _annotate_cuts(manifest, language, model_name, device)\n else:\n raise ValueError(\"The ``manifest`` must be either a RecordingSet or a CutSet.\")\n\n\ndef _annotate_recordings(\n recordings: RecordingSet, language: str, model_name: str, device: str\n):\n \"\"\"\n Helper function that annotates a RecordingSet with Whisper.\n \"\"\"\n import whisper\n\n model = whisper.load_model(model_name, device=device)\n\n for recording in recordings:\n if recording.num_channels > 1:\n logging.warning(\n f\"Skipping recording '{recording.id}'. It has {recording.num_channels} channels, \"\n f\"but we currently only support mono input.\"\n )\n continue\n audio = torch.from_numpy(recording.resample(16000).load_audio()).squeeze(0)\n result = whisper.transcribe(model=model, audio=audio, language=language)\n supervisions = [\n SupervisionSegment(\n id=f\"{recording.id}-{segment['id']:06d}\",\n recording_id=recording.id,\n start=round(segment[\"start\"], ndigits=8),\n duration=round(segment[\"end\"], ndigits=8),\n text=segment[\"text\"].strip(),\n language=result[\"language\"],\n )\n for segment in result[\"segments\"]\n ]\n cut = recording.to_cut()\n if supervisions:\n supervisions = _postprocess_timestamps(supervisions)\n cut.supervisions = list(\n trim_supervisions_to_recordings(\n recordings=recording, supervisions=supervisions, verbose=False\n )\n )\n yield cut\n\n\ndef _annotate_cuts(cuts: CutSet, language: str, model_name: str, device: str):\n \"\"\"\n Helper function that annotates a CutSet with Whisper.\n \"\"\"\n import whisper\n\n model = whisper.load_model(model_name, device=device)\n\n for cut in cuts:\n if cut.num_channels > 1:\n logging.warning(\n f\"Skipping cut '{cut.id}'. It has {cut.num_channels} channels, \"\n f\"but we currently only support mono input.\"\n )\n continue\n audio = torch.from_numpy(cut.resample(16000).load_audio()).squeeze(0)\n result = whisper.transcribe(model=model, audio=audio, language=language)\n supervisions = [\n SupervisionSegment(\n id=f\"{cut.id}-{segment['id']:06d}\",\n recording_id=cut.recording_id,\n start=round(segment[\"start\"], ndigits=8),\n duration=max(cut.duration, round(segment[\"end\"], ndigits=8)),\n text=segment[\"text\"].strip(),\n language=result[\"language\"],\n )\n for segment in result[\"segments\"]\n ]\n new_cut = fastcopy(cut, supervisions=_postprocess_timestamps(supervisions))\n yield new_cut\n\n\ndef _postprocess_timestamps(supervisions: List[SupervisionSegment]):\n \"\"\"\n Whisper tends to have a lot of overlapping segments due to inaccurate end timestamps.\n Under a strong assumption that the input speech is non-overlapping, we can fix that\n by always truncating to the start timestamp of the next segment.\n \"\"\"\n from cytoolz import sliding_window\n\n if len(supervisions) < 2:\n return supervisions\n out = []\n for cur, nxt in sliding_window(2, supervisions):\n if cur.end > nxt.start:\n cur = cur.trim(end=nxt.start)\n out.append(cur)\n out.append(nxt)\n return out\n", "path": "lhotse/workflows/whisper.py"}], "after_files": [{"content": "import logging\nfrom typing import Any, Generator, List, Optional, Union\n\nimport torch\n\nfrom lhotse import (\n CutSet,\n MonoCut,\n Recording,\n RecordingSet,\n SupervisionSegment,\n add_durations,\n)\nfrom lhotse.qa import trim_supervisions_to_recordings\nfrom lhotse.utils import fastcopy, is_module_available\n\n\ndef annotate_with_whisper(\n manifest: Union[RecordingSet, CutSet],\n language: Optional[str] = None,\n model_name: str = \"base\",\n device: str = \"cpu\",\n) -> Generator[MonoCut, None, None]:\n \"\"\"\n Use OpenAI Whisper model to annotate either RECORDINGS_MANIFEST, RECORDINGS_DIR, or CUTS_MANIFEST.\n It will perform automatic segmentation, transcription, and language identification. If\n the first argument is a CutSet, it will overwrite the supervisions with the results of the inference.\n\n Note: this is an experimental feature of Lhotse, and is not guaranteed to yield\n high quality of data.\n\n See the original repo for more details: https://github.com/openai/whisper\n\n :param manifest: a ``RecordingSet`` or ``CutSet`` object.\n :param language: specify the language if known upfront, otherwise it will be auto-detected.\n :param model_name: one of available Whisper variants (base, medium, large, etc.).\n :param device: Where to run the inference (cpu, cuda, etc.).\n :return: a generator of cuts (use ``CutSet.open_writer()`` to write them).\n \"\"\"\n assert is_module_available(\"whisper\"), (\n \"This function expects OpenAI Whisper to be installed. \"\n \"You can install it via 'pip install git+https://github.com/openai/whisper.git' \"\n \"(see https://github.com/openai/whisper for details).\"\n )\n\n if isinstance(manifest, RecordingSet):\n yield from _annotate_recordings(manifest, language, model_name, device)\n elif isinstance(manifest, CutSet):\n yield from _annotate_cuts(manifest, language, model_name, device)\n else:\n raise ValueError(\"The ``manifest`` must be either a RecordingSet or a CutSet.\")\n\n\ndef _annotate_recordings(\n recordings: RecordingSet, language: str, model_name: str, device: str\n):\n \"\"\"\n Helper function that annotates a RecordingSet with Whisper.\n \"\"\"\n import whisper\n\n model = whisper.load_model(model_name, device=device)\n\n for recording in recordings:\n if recording.num_channels > 1:\n logging.warning(\n f\"Skipping recording '{recording.id}'. It has {recording.num_channels} channels, \"\n f\"but we currently only support mono input.\"\n )\n continue\n audio = torch.from_numpy(recording.resample(16000).load_audio()).squeeze(0)\n result = whisper.transcribe(model=model, audio=audio, language=language)\n supervisions = [\n SupervisionSegment(\n id=f\"{recording.id}-{segment['id']:06d}\",\n recording_id=recording.id,\n start=round(segment[\"start\"], ndigits=8),\n duration=add_durations(\n segment[\"end\"], -segment[\"start\"], sampling_rate=16000\n ),\n text=segment[\"text\"].strip(),\n language=result[\"language\"],\n )\n for segment in result[\"segments\"]\n ]\n cut = recording.to_cut()\n if supervisions:\n supervisions = _postprocess_timestamps(supervisions)\n cut.supervisions = list(\n trim_supervisions_to_recordings(\n recordings=recording, supervisions=supervisions, verbose=False\n )\n )\n yield cut\n\n\ndef _annotate_cuts(cuts: CutSet, language: str, model_name: str, device: str):\n \"\"\"\n Helper function that annotates a CutSet with Whisper.\n \"\"\"\n import whisper\n\n model = whisper.load_model(model_name, device=device)\n\n for cut in cuts:\n if cut.num_channels > 1:\n logging.warning(\n f\"Skipping cut '{cut.id}'. It has {cut.num_channels} channels, \"\n f\"but we currently only support mono input.\"\n )\n continue\n audio = torch.from_numpy(cut.resample(16000).load_audio()).squeeze(0)\n result = whisper.transcribe(model=model, audio=audio, language=language)\n supervisions = [\n SupervisionSegment(\n id=f\"{cut.id}-{segment['id']:06d}\",\n recording_id=cut.recording_id,\n start=round(segment[\"start\"], ndigits=8),\n duration=max(\n cut.duration,\n add_durations(\n segment[\"end\"], -segment[\"start\"], sampling_rate=16000\n ),\n ),\n text=segment[\"text\"].strip(),\n language=result[\"language\"],\n )\n for segment in result[\"segments\"]\n ]\n new_cut = fastcopy(cut, supervisions=_postprocess_timestamps(supervisions))\n yield new_cut\n\n\ndef _postprocess_timestamps(supervisions: List[SupervisionSegment]):\n \"\"\"\n Whisper tends to have a lot of overlapping segments due to inaccurate end timestamps.\n Under a strong assumption that the input speech is non-overlapping, we can fix that\n by always truncating to the start timestamp of the next segment.\n \"\"\"\n from cytoolz import sliding_window\n\n if len(supervisions) < 2:\n return supervisions\n out = []\n for cur, nxt in sliding_window(2, supervisions):\n if cur.end > nxt.start:\n cur = cur.trim(end=nxt.start)\n out.append(cur)\n out.append(nxt)\n return out\n", "path": "lhotse/workflows/whisper.py"}]} | 1,921 | 389 |
gh_patches_debug_25594 | rasdani/github-patches | git_diff | chainer__chainer-4769 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
backward of F.normalize is not stable
`NormalizeL2.backward` computes 0/0 if the input contains a zero vector. PR #4190, I wrote, caused this. Sorry.
To begin with, x/(||x|| + eps) is C^1 but not C^2 (at x=0). The correct backward might not be a good choice.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `chainer/functions/normalization/l2_normalization.py`
Content:
```
1 import numpy
2
3 from chainer.backends import cuda
4 from chainer import function_node
5 import chainer.functions
6 from chainer import utils
7 from chainer.utils import type_check
8
9
10 class NormalizeL2(function_node.FunctionNode):
11
12 """L2 normalization"""
13
14 def __init__(self, eps=1e-5, axis=1):
15 self.eps = eps
16 if isinstance(axis, int):
17 axis = axis,
18 self.axis = axis
19
20 def check_type_forward(self, in_types):
21 type_check.expect(in_types.size() == 1)
22 x_type, = in_types
23
24 type_check.expect(
25 x_type.dtype == numpy.float32,
26 )
27
28 def forward(self, inputs):
29 self.retain_inputs((0,))
30 x, = inputs
31 xp = cuda.get_array_module(x)
32 norm = (xp.sqrt(xp.sum(xp.square(x), axis=self.axis, keepdims=True))
33 + x.dtype.type(self.eps))
34 return utils.force_array(x / norm),
35
36 def backward(self, indexes, grad_outputs):
37 x, = self.get_retained_inputs()
38 gy, = grad_outputs
39 F = chainer.functions
40
41 norm_noeps = F.sqrt(F.sum(F.square(x), axis=self.axis, keepdims=True))
42 norm = norm_noeps + self.eps
43 norm = F.broadcast_to(norm, gy.shape)
44
45 x_gy_reduced = F.sum((x * gy), axis=self.axis, keepdims=True)
46 x_gy_reduced /= norm_noeps
47 x_gy_reduced = F.broadcast_to(x_gy_reduced, gy.shape)
48 gx = gy * norm - x_gy_reduced * x
49 gx = gx / norm ** 2
50
51 return gx,
52
53
54 def normalize(x, eps=1e-5, axis=1):
55 """L2 norm squared (a.k.a.\\ Euclidean norm).
56
57 This function implements L2 normalization on a vector along the given axis.
58 No reduction is done along the normalization axis.
59
60 In the case when :obj:`axis=1` and :math:`\\mathbf{x}` is a matrix of
61 dimension :math:`(N, K)`, where :math:`N` and :math:`K` denote mini-batch
62 size and the dimension of the input vectors, this function computes an
63 output matrix :math:`\\mathbf{y}` of dimension :math:`(N, K)` by the
64 following equation:
65
66 .. math::
67 \\mathbf{y}_i =
68 {\\mathbf{x}_i \\over \\| \\mathbf{x}_i \\|_2 + \\epsilon}
69
70 :obj:`eps` is used to avoid division by zero when norm of
71 :math:`\\mathbf{x}` along the given axis is zero.
72
73 The default value of :obj:`axis` is determined for backward compatibility.
74
75 Args:
76 x (~chainer.Variable): Two dimensional output variable. The first
77 dimension is assumed to be the mini-batch dimension.
78 eps (float): Epsilon value for numerical stability.
79 axis (int or tuple of ints): Axis along which to normalize.
80
81 Returns:
82 ~chainer.Variable: The output variable which has the same shape
83 as :math:`x`.
84
85 """
86 return NormalizeL2(eps, axis).apply((x,))[0]
87
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/chainer/functions/normalization/l2_normalization.py b/chainer/functions/normalization/l2_normalization.py
--- a/chainer/functions/normalization/l2_normalization.py
+++ b/chainer/functions/normalization/l2_normalization.py
@@ -7,6 +7,25 @@
from chainer.utils import type_check
+class _SetItemZero(function_node.FunctionNode):
+
+ """Write values to mask of zero-initialized array"""
+
+ def __init__(self, mask):
+ self.mask = mask
+
+ def forward(self, inputs):
+ x, = inputs
+ xp = cuda.get_array_module(x)
+ y = xp.zeros(self.mask.shape, x.dtype)
+ y[self.mask] = x
+ return y,
+
+ def backward(self, indices, grad_outputs):
+ g, = grad_outputs
+ return g[self.mask],
+
+
class NormalizeL2(function_node.FunctionNode):
"""L2 normalization"""
@@ -43,7 +62,14 @@
norm = F.broadcast_to(norm, gy.shape)
x_gy_reduced = F.sum((x * gy), axis=self.axis, keepdims=True)
- x_gy_reduced /= norm_noeps
+
+ # L2 normalize with eps has continuous backward. However,
+ # the backward is not differentiable for the indices of zero vectors.
+ # To avoid nan in double backward, do not compute outside of mask.
+ mask = norm_noeps.array != 0
+ x_gy_reduced, = _SetItemZero(mask).apply((
+ x_gy_reduced[mask] / norm_noeps[mask],))
+
x_gy_reduced = F.broadcast_to(x_gy_reduced, gy.shape)
gx = gy * norm - x_gy_reduced * x
gx = gx / norm ** 2
| {"golden_diff": "diff --git a/chainer/functions/normalization/l2_normalization.py b/chainer/functions/normalization/l2_normalization.py\n--- a/chainer/functions/normalization/l2_normalization.py\n+++ b/chainer/functions/normalization/l2_normalization.py\n@@ -7,6 +7,25 @@\n from chainer.utils import type_check\n \n \n+class _SetItemZero(function_node.FunctionNode):\n+\n+ \"\"\"Write values to mask of zero-initialized array\"\"\"\n+\n+ def __init__(self, mask):\n+ self.mask = mask\n+\n+ def forward(self, inputs):\n+ x, = inputs\n+ xp = cuda.get_array_module(x)\n+ y = xp.zeros(self.mask.shape, x.dtype)\n+ y[self.mask] = x\n+ return y,\n+\n+ def backward(self, indices, grad_outputs):\n+ g, = grad_outputs\n+ return g[self.mask],\n+\n+\n class NormalizeL2(function_node.FunctionNode):\n \n \"\"\"L2 normalization\"\"\"\n@@ -43,7 +62,14 @@\n norm = F.broadcast_to(norm, gy.shape)\n \n x_gy_reduced = F.sum((x * gy), axis=self.axis, keepdims=True)\n- x_gy_reduced /= norm_noeps\n+\n+ # L2 normalize with eps has continuous backward. However,\n+ # the backward is not differentiable for the indices of zero vectors.\n+ # To avoid nan in double backward, do not compute outside of mask.\n+ mask = norm_noeps.array != 0\n+ x_gy_reduced, = _SetItemZero(mask).apply((\n+ x_gy_reduced[mask] / norm_noeps[mask],))\n+\n x_gy_reduced = F.broadcast_to(x_gy_reduced, gy.shape)\n gx = gy * norm - x_gy_reduced * x\n gx = gx / norm ** 2\n", "issue": "backward of F.normalize is not stable\n`NormalizeL2.backward` computes 0/0 if the input contains a zero vector. PR #4190, I wrote, caused this. Sorry.\r\n\r\nTo begin with, x/(||x|| + eps) is C^1 but not C^2 (at x=0). The correct backward might not be a good choice.\n", "before_files": [{"content": "import numpy\n\nfrom chainer.backends import cuda\nfrom chainer import function_node\nimport chainer.functions\nfrom chainer import utils\nfrom chainer.utils import type_check\n\n\nclass NormalizeL2(function_node.FunctionNode):\n\n \"\"\"L2 normalization\"\"\"\n\n def __init__(self, eps=1e-5, axis=1):\n self.eps = eps\n if isinstance(axis, int):\n axis = axis,\n self.axis = axis\n\n def check_type_forward(self, in_types):\n type_check.expect(in_types.size() == 1)\n x_type, = in_types\n\n type_check.expect(\n x_type.dtype == numpy.float32,\n )\n\n def forward(self, inputs):\n self.retain_inputs((0,))\n x, = inputs\n xp = cuda.get_array_module(x)\n norm = (xp.sqrt(xp.sum(xp.square(x), axis=self.axis, keepdims=True))\n + x.dtype.type(self.eps))\n return utils.force_array(x / norm),\n\n def backward(self, indexes, grad_outputs):\n x, = self.get_retained_inputs()\n gy, = grad_outputs\n F = chainer.functions\n\n norm_noeps = F.sqrt(F.sum(F.square(x), axis=self.axis, keepdims=True))\n norm = norm_noeps + self.eps\n norm = F.broadcast_to(norm, gy.shape)\n\n x_gy_reduced = F.sum((x * gy), axis=self.axis, keepdims=True)\n x_gy_reduced /= norm_noeps\n x_gy_reduced = F.broadcast_to(x_gy_reduced, gy.shape)\n gx = gy * norm - x_gy_reduced * x\n gx = gx / norm ** 2\n\n return gx,\n\n\ndef normalize(x, eps=1e-5, axis=1):\n \"\"\"L2 norm squared (a.k.a.\\\\ Euclidean norm).\n\n This function implements L2 normalization on a vector along the given axis.\n No reduction is done along the normalization axis.\n\n In the case when :obj:`axis=1` and :math:`\\\\mathbf{x}` is a matrix of\n dimension :math:`(N, K)`, where :math:`N` and :math:`K` denote mini-batch\n size and the dimension of the input vectors, this function computes an\n output matrix :math:`\\\\mathbf{y}` of dimension :math:`(N, K)` by the\n following equation:\n\n .. math::\n \\\\mathbf{y}_i =\n {\\\\mathbf{x}_i \\\\over \\\\| \\\\mathbf{x}_i \\\\|_2 + \\\\epsilon}\n\n :obj:`eps` is used to avoid division by zero when norm of\n :math:`\\\\mathbf{x}` along the given axis is zero.\n\n The default value of :obj:`axis` is determined for backward compatibility.\n\n Args:\n x (~chainer.Variable): Two dimensional output variable. The first\n dimension is assumed to be the mini-batch dimension.\n eps (float): Epsilon value for numerical stability.\n axis (int or tuple of ints): Axis along which to normalize.\n\n Returns:\n ~chainer.Variable: The output variable which has the same shape\n as :math:`x`.\n\n \"\"\"\n return NormalizeL2(eps, axis).apply((x,))[0]\n", "path": "chainer/functions/normalization/l2_normalization.py"}], "after_files": [{"content": "import numpy\n\nfrom chainer.backends import cuda\nfrom chainer import function_node\nimport chainer.functions\nfrom chainer import utils\nfrom chainer.utils import type_check\n\n\nclass _SetItemZero(function_node.FunctionNode):\n\n \"\"\"Write values to mask of zero-initialized array\"\"\"\n\n def __init__(self, mask):\n self.mask = mask\n\n def forward(self, inputs):\n x, = inputs\n xp = cuda.get_array_module(x)\n y = xp.zeros(self.mask.shape, x.dtype)\n y[self.mask] = x\n return y,\n\n def backward(self, indices, grad_outputs):\n g, = grad_outputs\n return g[self.mask],\n\n\nclass NormalizeL2(function_node.FunctionNode):\n\n \"\"\"L2 normalization\"\"\"\n\n def __init__(self, eps=1e-5, axis=1):\n self.eps = eps\n if isinstance(axis, int):\n axis = axis,\n self.axis = axis\n\n def check_type_forward(self, in_types):\n type_check.expect(in_types.size() == 1)\n x_type, = in_types\n\n type_check.expect(\n x_type.dtype == numpy.float32,\n )\n\n def forward(self, inputs):\n self.retain_inputs((0,))\n x, = inputs\n xp = cuda.get_array_module(x)\n norm = (xp.sqrt(xp.sum(xp.square(x), axis=self.axis, keepdims=True))\n + x.dtype.type(self.eps))\n return utils.force_array(x / norm),\n\n def backward(self, indexes, grad_outputs):\n x, = self.get_retained_inputs()\n gy, = grad_outputs\n F = chainer.functions\n\n norm_noeps = F.sqrt(F.sum(F.square(x), axis=self.axis, keepdims=True))\n norm = norm_noeps + self.eps\n norm = F.broadcast_to(norm, gy.shape)\n\n x_gy_reduced = F.sum((x * gy), axis=self.axis, keepdims=True)\n\n # L2 normalize with eps has continuous backward. However,\n # the backward is not differentiable for the indices of zero vectors.\n # To avoid nan in double backward, do not compute outside of mask.\n mask = norm_noeps.array != 0\n x_gy_reduced, = _SetItemZero(mask).apply((\n x_gy_reduced[mask] / norm_noeps[mask],))\n\n x_gy_reduced = F.broadcast_to(x_gy_reduced, gy.shape)\n gx = gy * norm - x_gy_reduced * x\n gx = gx / norm ** 2\n\n return gx,\n\n\ndef normalize(x, eps=1e-5, axis=1):\n \"\"\"L2 norm squared (a.k.a.\\\\ Euclidean norm).\n\n This function implements L2 normalization on a vector along the given axis.\n No reduction is done along the normalization axis.\n\n In the case when :obj:`axis=1` and :math:`\\\\mathbf{x}` is a matrix of\n dimension :math:`(N, K)`, where :math:`N` and :math:`K` denote mini-batch\n size and the dimension of the input vectors, this function computes an\n output matrix :math:`\\\\mathbf{y}` of dimension :math:`(N, K)` by the\n following equation:\n\n .. math::\n \\\\mathbf{y}_i =\n {\\\\mathbf{x}_i \\\\over \\\\| \\\\mathbf{x}_i \\\\|_2 + \\\\epsilon}\n\n :obj:`eps` is used to avoid division by zero when norm of\n :math:`\\\\mathbf{x}` along the given axis is zero.\n\n The default value of :obj:`axis` is determined for backward compatibility.\n\n Args:\n x (~chainer.Variable): Two dimensional output variable. The first\n dimension is assumed to be the mini-batch dimension.\n eps (float): Epsilon value for numerical stability.\n axis (int or tuple of ints): Axis along which to normalize.\n\n Returns:\n ~chainer.Variable: The output variable which has the same shape\n as :math:`x`.\n\n \"\"\"\n return NormalizeL2(eps, axis).apply((x,))[0]\n", "path": "chainer/functions/normalization/l2_normalization.py"}]} | 1,244 | 413 |
gh_patches_debug_16537 | rasdani/github-patches | git_diff | svthalia__concrexit-1662 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix adding avatar through api v2
### Describe the bug
In api v1 the avatar can be set through `api/v1/members/me` with a multipart patch request with a file labelled `photo`. Api v2 should also allow this, but instead return 500.
### How to reproduce
Send a request to patch the photo to api v1 and see that it works.
Send the same request to api v2 and see the 500 response.
Note that I have not tried editing anything else through api v2 yet, so it might be that some other fields also don't work.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `website/members/api/v2/serializers/member.py`
Content:
```
1 from rest_framework import serializers
2
3 from members.api.v2.serializers.profile import ProfileSerializer
4 from members.models import Member
5 from members.services import member_achievements, member_societies
6
7
8 class MemberSerializer(serializers.ModelSerializer):
9 def __init__(self, *args, **kwargs):
10 # Don't pass the 'fields' arg up to the superclass
11 detailed = kwargs.pop("detailed", True)
12
13 # Instantiate the superclass normally
14 super().__init__(*args, **kwargs)
15
16 if not detailed:
17 hidden_fields = {"achievements", "societies"}
18 existing = set(self.fields.keys())
19 for field_name in existing & hidden_fields:
20 self.fields.pop(field_name)
21
22 class Meta:
23 model = Member
24 fields = ("pk", "membership_type", "profile", "achievements", "societies")
25
26 membership_type = serializers.SerializerMethodField("_membership_type")
27 profile = ProfileSerializer(
28 fields=(
29 "photo",
30 "display_name",
31 "short_display_name",
32 "programme",
33 "starting_year",
34 "birthday",
35 "website",
36 "profile_description",
37 )
38 )
39 achievements = serializers.SerializerMethodField("_achievements")
40 societies = serializers.SerializerMethodField("_societies")
41
42 def _achievements(self, instance):
43 return member_achievements(instance)
44
45 def _societies(self, instance):
46 return member_societies(instance)
47
48 def _membership_type(self, instance):
49 membership = instance.current_membership
50 if membership:
51 return membership.type
52 return None
53
54 def update(self, instance, validated_data):
55 profile_data = validated_data.pop("profile")
56 instance.profile = self.fields["profile"].update(
57 instance=instance.profile, validated_data=profile_data
58 )
59 return instance
60
61
62 class MemberListSerializer(MemberSerializer):
63 class Meta:
64 model = Member
65 fields = (
66 "pk",
67 "membership_type",
68 "profile",
69 )
70
71
72 class MemberCurrentSerializer(MemberSerializer):
73 class Meta:
74 model = Member
75 fields = ("pk", "membership_type", "profile", "achievements", "societies")
76
77 profile = ProfileSerializer(
78 fields=(
79 "photo",
80 "display_name",
81 "short_display_name",
82 "programme",
83 "starting_year",
84 "birthday",
85 "show_birthday",
86 "website",
87 "profile_description",
88 "address_street",
89 "address_street2",
90 "address_postal_code",
91 "address_city",
92 "address_country",
93 "phone_number",
94 "website",
95 "emergency_contact",
96 "emergency_contact_phone_number",
97 "profile_description",
98 "nickname",
99 "initials",
100 "display_name_preference",
101 "receive_optin",
102 "receive_newsletter",
103 "receive_magazine",
104 "email_gsuite_only",
105 ),
106 force_show_birthday=True,
107 )
108
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/website/members/api/v2/serializers/member.py b/website/members/api/v2/serializers/member.py
--- a/website/members/api/v2/serializers/member.py
+++ b/website/members/api/v2/serializers/member.py
@@ -1,4 +1,5 @@
from rest_framework import serializers
+from rest_framework.exceptions import ValidationError
from members.api.v2.serializers.profile import ProfileSerializer
from members.models import Member
@@ -52,6 +53,9 @@
return None
def update(self, instance, validated_data):
+ if "profile" not in validated_data:
+ raise ValidationError("profile field is missing")
+
profile_data = validated_data.pop("profile")
instance.profile = self.fields["profile"].update(
instance=instance.profile, validated_data=profile_data
| {"golden_diff": "diff --git a/website/members/api/v2/serializers/member.py b/website/members/api/v2/serializers/member.py\n--- a/website/members/api/v2/serializers/member.py\n+++ b/website/members/api/v2/serializers/member.py\n@@ -1,4 +1,5 @@\n from rest_framework import serializers\n+from rest_framework.exceptions import ValidationError\n \n from members.api.v2.serializers.profile import ProfileSerializer\n from members.models import Member\n@@ -52,6 +53,9 @@\n return None\n \n def update(self, instance, validated_data):\n+ if \"profile\" not in validated_data:\n+ raise ValidationError(\"profile field is missing\")\n+\n profile_data = validated_data.pop(\"profile\")\n instance.profile = self.fields[\"profile\"].update(\n instance=instance.profile, validated_data=profile_data\n", "issue": "Fix adding avatar through api v2\n### Describe the bug\r\nIn api v1 the avatar can be set through `api/v1/members/me` with a multipart patch request with a file labelled `photo`. Api v2 should also allow this, but instead return 500.\r\n\r\n### How to reproduce\r\nSend a request to patch the photo to api v1 and see that it works.\r\nSend the same request to api v2 and see the 500 response.\r\n\r\nNote that I have not tried editing anything else through api v2 yet, so it might be that some other fields also don't work.\r\n\n", "before_files": [{"content": "from rest_framework import serializers\n\nfrom members.api.v2.serializers.profile import ProfileSerializer\nfrom members.models import Member\nfrom members.services import member_achievements, member_societies\n\n\nclass MemberSerializer(serializers.ModelSerializer):\n def __init__(self, *args, **kwargs):\n # Don't pass the 'fields' arg up to the superclass\n detailed = kwargs.pop(\"detailed\", True)\n\n # Instantiate the superclass normally\n super().__init__(*args, **kwargs)\n\n if not detailed:\n hidden_fields = {\"achievements\", \"societies\"}\n existing = set(self.fields.keys())\n for field_name in existing & hidden_fields:\n self.fields.pop(field_name)\n\n class Meta:\n model = Member\n fields = (\"pk\", \"membership_type\", \"profile\", \"achievements\", \"societies\")\n\n membership_type = serializers.SerializerMethodField(\"_membership_type\")\n profile = ProfileSerializer(\n fields=(\n \"photo\",\n \"display_name\",\n \"short_display_name\",\n \"programme\",\n \"starting_year\",\n \"birthday\",\n \"website\",\n \"profile_description\",\n )\n )\n achievements = serializers.SerializerMethodField(\"_achievements\")\n societies = serializers.SerializerMethodField(\"_societies\")\n\n def _achievements(self, instance):\n return member_achievements(instance)\n\n def _societies(self, instance):\n return member_societies(instance)\n\n def _membership_type(self, instance):\n membership = instance.current_membership\n if membership:\n return membership.type\n return None\n\n def update(self, instance, validated_data):\n profile_data = validated_data.pop(\"profile\")\n instance.profile = self.fields[\"profile\"].update(\n instance=instance.profile, validated_data=profile_data\n )\n return instance\n\n\nclass MemberListSerializer(MemberSerializer):\n class Meta:\n model = Member\n fields = (\n \"pk\",\n \"membership_type\",\n \"profile\",\n )\n\n\nclass MemberCurrentSerializer(MemberSerializer):\n class Meta:\n model = Member\n fields = (\"pk\", \"membership_type\", \"profile\", \"achievements\", \"societies\")\n\n profile = ProfileSerializer(\n fields=(\n \"photo\",\n \"display_name\",\n \"short_display_name\",\n \"programme\",\n \"starting_year\",\n \"birthday\",\n \"show_birthday\",\n \"website\",\n \"profile_description\",\n \"address_street\",\n \"address_street2\",\n \"address_postal_code\",\n \"address_city\",\n \"address_country\",\n \"phone_number\",\n \"website\",\n \"emergency_contact\",\n \"emergency_contact_phone_number\",\n \"profile_description\",\n \"nickname\",\n \"initials\",\n \"display_name_preference\",\n \"receive_optin\",\n \"receive_newsletter\",\n \"receive_magazine\",\n \"email_gsuite_only\",\n ),\n force_show_birthday=True,\n )\n", "path": "website/members/api/v2/serializers/member.py"}], "after_files": [{"content": "from rest_framework import serializers\nfrom rest_framework.exceptions import ValidationError\n\nfrom members.api.v2.serializers.profile import ProfileSerializer\nfrom members.models import Member\nfrom members.services import member_achievements, member_societies\n\n\nclass MemberSerializer(serializers.ModelSerializer):\n def __init__(self, *args, **kwargs):\n # Don't pass the 'fields' arg up to the superclass\n detailed = kwargs.pop(\"detailed\", True)\n\n # Instantiate the superclass normally\n super().__init__(*args, **kwargs)\n\n if not detailed:\n hidden_fields = {\"achievements\", \"societies\"}\n existing = set(self.fields.keys())\n for field_name in existing & hidden_fields:\n self.fields.pop(field_name)\n\n class Meta:\n model = Member\n fields = (\"pk\", \"membership_type\", \"profile\", \"achievements\", \"societies\")\n\n membership_type = serializers.SerializerMethodField(\"_membership_type\")\n profile = ProfileSerializer(\n fields=(\n \"photo\",\n \"display_name\",\n \"short_display_name\",\n \"programme\",\n \"starting_year\",\n \"birthday\",\n \"website\",\n \"profile_description\",\n )\n )\n achievements = serializers.SerializerMethodField(\"_achievements\")\n societies = serializers.SerializerMethodField(\"_societies\")\n\n def _achievements(self, instance):\n return member_achievements(instance)\n\n def _societies(self, instance):\n return member_societies(instance)\n\n def _membership_type(self, instance):\n membership = instance.current_membership\n if membership:\n return membership.type\n return None\n\n def update(self, instance, validated_data):\n if \"profile\" not in validated_data:\n raise ValidationError(\"profile field is missing\")\n\n profile_data = validated_data.pop(\"profile\")\n instance.profile = self.fields[\"profile\"].update(\n instance=instance.profile, validated_data=profile_data\n )\n return instance\n\n\nclass MemberListSerializer(MemberSerializer):\n class Meta:\n model = Member\n fields = (\n \"pk\",\n \"membership_type\",\n \"profile\",\n )\n\n\nclass MemberCurrentSerializer(MemberSerializer):\n class Meta:\n model = Member\n fields = (\"pk\", \"membership_type\", \"profile\", \"achievements\", \"societies\")\n\n profile = ProfileSerializer(\n fields=(\n \"photo\",\n \"display_name\",\n \"short_display_name\",\n \"programme\",\n \"starting_year\",\n \"birthday\",\n \"show_birthday\",\n \"website\",\n \"profile_description\",\n \"address_street\",\n \"address_street2\",\n \"address_postal_code\",\n \"address_city\",\n \"address_country\",\n \"phone_number\",\n \"website\",\n \"emergency_contact\",\n \"emergency_contact_phone_number\",\n \"profile_description\",\n \"nickname\",\n \"initials\",\n \"display_name_preference\",\n \"receive_optin\",\n \"receive_newsletter\",\n \"receive_magazine\",\n \"email_gsuite_only\",\n ),\n force_show_birthday=True,\n )\n", "path": "website/members/api/v2/serializers/member.py"}]} | 1,229 | 183 |
gh_patches_debug_14366 | rasdani/github-patches | git_diff | conan-io__conan-13211 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[bug] Conan build command does not support conanfile.txt as described
### Description
The documentation about [build](https://docs.conan.io/2/reference/commands/build.html) command says:
```
usage: conan build [-h] [-v [V]] [--logger] [--name NAME] [--version VERSION] [--user USER] [--channel CHANNEL] [-of OUTPUT_FOLDER] [-b BUILD] [-r REMOTE | -nr] [-u] [-o OPTIONS_HOST] [-o:b OPTIONS_BUILD] [-o:h OPTIONS_HOST] [-pr PROFILE_HOST] [-pr:b PROFILE_BUILD]
[-pr:h PROFILE_HOST] [-s SETTINGS_HOST] [-s:b SETTINGS_BUILD] [-s:h SETTINGS_HOST] [-c CONF_HOST] [-c:b CONF_BUILD] [-c:h CONF_HOST] [-l LOCKFILE] [--lockfile-partial] [--lockfile-out LOCKFILE_OUT] [--lockfile-packages] [--lockfile-clean]
[path]
Install dependencies and call the build() method.
positional arguments:
path Path to a folder containing a recipe (conanfile.py or conanfile.txt) or to a recipe file. e.g., ./my_project/conanfile.txt.
```
However, `conanfile.txt` is not acceptable by build command.
As the documentation is extracted from the command output, it should be fixed on Conan client first.
### Environment details
* Operating System+version: OSX 13
* Compiler+version: Apple-Clang 14
* Conan version: 2.0.0
* Python version: 3.10
### Steps to reproduce
1. mkdir /tmp/foo && cd /tmp/foo
2. echo "[requires]\nzlib/1.2.13" > conanfile.txt
3. conan build .
4. Or, conan build ./conanfile.txt
### Logs
```
% conan build .
ERROR: Conanfile not found at /private/tmp/foo/conanfile.py
% conan build ./conanfile.txt
ERROR: A conanfile.py is needed, /private/tmp/conantxt/conanfile.txt is not acceptable
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conan/cli/commands/build.py`
Content:
```
1 import os
2
3 from conan.api.output import ConanOutput
4 from conan.cli.command import conan_command
5 from conan.cli.commands import make_abs_path
6 from conan.cli.args import add_lockfile_args, add_common_install_arguments, add_reference_args
7 from conan.internal.conan_app import ConanApp
8 from conan.cli.printers.graph import print_graph_packages, print_graph_basic
9 from conans.client.conanfile.build import run_build_method
10
11
12 @conan_command(group='Creator')
13 def build(conan_api, parser, *args):
14 """
15 Install dependencies and call the build() method.
16 """
17 parser.add_argument("path", nargs="?",
18 help="Path to a folder containing a recipe (conanfile.py "
19 "or conanfile.txt) or to a recipe file. e.g., "
20 "./my_project/conanfile.txt.")
21 add_reference_args(parser)
22 # TODO: Missing --build-require argument and management
23 parser.add_argument("-of", "--output-folder",
24 help='The root output folder for generated and build files')
25 add_common_install_arguments(parser)
26 add_lockfile_args(parser)
27 args = parser.parse_args(*args)
28
29 cwd = os.getcwd()
30 path = conan_api.local.get_conanfile_path(args.path, cwd, py=True)
31 folder = os.path.dirname(path)
32 remotes = conan_api.remotes.list(args.remote) if not args.no_remote else []
33
34 lockfile = conan_api.lockfile.get_lockfile(lockfile=args.lockfile,
35 conanfile_path=path,
36 cwd=cwd,
37 partial=args.lockfile_partial)
38 profile_host, profile_build = conan_api.profiles.get_profiles_from_args(args)
39
40 deps_graph = conan_api.graph.load_graph_consumer(path, args.name, args.version,
41 args.user, args.channel,
42 profile_host, profile_build, lockfile, remotes,
43 args.update)
44 print_graph_basic(deps_graph)
45 deps_graph.report_graph_error()
46 conan_api.graph.analyze_binaries(deps_graph, args.build, remotes=remotes, update=args.update,
47 lockfile=lockfile)
48 print_graph_packages(deps_graph)
49
50 out = ConanOutput()
51 out.title("Installing packages")
52 conan_api.install.install_binaries(deps_graph=deps_graph, remotes=remotes)
53
54 source_folder = folder
55 output_folder = make_abs_path(args.output_folder, cwd) if args.output_folder else None
56 out.title("Finalizing install (deploy, generators)")
57 conan_api.install.install_consumer(deps_graph=deps_graph, source_folder=source_folder,
58 output_folder=output_folder)
59
60 # TODO: Decide API to put this
61 app = ConanApp(conan_api.cache_folder)
62 conanfile = deps_graph.root.conanfile
63 conanfile.folders.set_base_package(conanfile.folders.base_build)
64 run_build_method(conanfile, app.hook_manager)
65
66 lockfile = conan_api.lockfile.update_lockfile(lockfile, deps_graph, args.lockfile_packages,
67 clean=args.lockfile_clean)
68 conan_api.lockfile.save_lockfile(lockfile, args.lockfile_out, source_folder)
69 return deps_graph
70
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conan/cli/commands/build.py b/conan/cli/commands/build.py
--- a/conan/cli/commands/build.py
+++ b/conan/cli/commands/build.py
@@ -15,9 +15,9 @@
Install dependencies and call the build() method.
"""
parser.add_argument("path", nargs="?",
- help="Path to a folder containing a recipe (conanfile.py "
- "or conanfile.txt) or to a recipe file. e.g., "
- "./my_project/conanfile.txt.")
+ help='Path to a python-based recipe file or a folder '
+ 'containing a conanfile.py recipe. conanfile.txt '
+ 'cannot be used with conan build.')
add_reference_args(parser)
# TODO: Missing --build-require argument and management
parser.add_argument("-of", "--output-folder",
| {"golden_diff": "diff --git a/conan/cli/commands/build.py b/conan/cli/commands/build.py\n--- a/conan/cli/commands/build.py\n+++ b/conan/cli/commands/build.py\n@@ -15,9 +15,9 @@\n Install dependencies and call the build() method.\n \"\"\"\n parser.add_argument(\"path\", nargs=\"?\",\n- help=\"Path to a folder containing a recipe (conanfile.py \"\n- \"or conanfile.txt) or to a recipe file. e.g., \"\n- \"./my_project/conanfile.txt.\")\n+ help='Path to a python-based recipe file or a folder '\n+ 'containing a conanfile.py recipe. conanfile.txt '\n+ 'cannot be used with conan build.')\n add_reference_args(parser)\n # TODO: Missing --build-require argument and management\n parser.add_argument(\"-of\", \"--output-folder\",\n", "issue": "[bug] Conan build command does not support conanfile.txt as described\n### Description\r\n\r\nThe documentation about [build](https://docs.conan.io/2/reference/commands/build.html) command says:\r\n\r\n```\r\nusage: conan build [-h] [-v [V]] [--logger] [--name NAME] [--version VERSION] [--user USER] [--channel CHANNEL] [-of OUTPUT_FOLDER] [-b BUILD] [-r REMOTE | -nr] [-u] [-o OPTIONS_HOST] [-o:b OPTIONS_BUILD] [-o:h OPTIONS_HOST] [-pr PROFILE_HOST] [-pr:b PROFILE_BUILD]\r\n [-pr:h PROFILE_HOST] [-s SETTINGS_HOST] [-s:b SETTINGS_BUILD] [-s:h SETTINGS_HOST] [-c CONF_HOST] [-c:b CONF_BUILD] [-c:h CONF_HOST] [-l LOCKFILE] [--lockfile-partial] [--lockfile-out LOCKFILE_OUT] [--lockfile-packages] [--lockfile-clean]\r\n [path]\r\n\r\nInstall dependencies and call the build() method.\r\n\r\npositional arguments:\r\n path Path to a folder containing a recipe (conanfile.py or conanfile.txt) or to a recipe file. e.g., ./my_project/conanfile.txt.\r\n```\r\n\r\nHowever, `conanfile.txt` is not acceptable by build command.\r\n\r\nAs the documentation is extracted from the command output, it should be fixed on Conan client first.\r\n\r\n\r\n### Environment details\r\n\r\n* Operating System+version: OSX 13\r\n* Compiler+version: Apple-Clang 14\r\n* Conan version: 2.0.0\r\n* Python version: 3.10\r\n\r\n\r\n### Steps to reproduce\r\n\r\n1. mkdir /tmp/foo && cd /tmp/foo\r\n2. echo \"[requires]\\nzlib/1.2.13\" > conanfile.txt\r\n3. conan build .\r\n4. Or, conan build ./conanfile.txt\r\n\r\n### Logs\r\n\r\n```\r\n% conan build .\r\nERROR: Conanfile not found at /private/tmp/foo/conanfile.py\r\n\r\n% conan build ./conanfile.txt \r\nERROR: A conanfile.py is needed, /private/tmp/conantxt/conanfile.txt is not acceptable\r\n```\n", "before_files": [{"content": "import os\n\nfrom conan.api.output import ConanOutput\nfrom conan.cli.command import conan_command\nfrom conan.cli.commands import make_abs_path\nfrom conan.cli.args import add_lockfile_args, add_common_install_arguments, add_reference_args\nfrom conan.internal.conan_app import ConanApp\nfrom conan.cli.printers.graph import print_graph_packages, print_graph_basic\nfrom conans.client.conanfile.build import run_build_method\n\n\n@conan_command(group='Creator')\ndef build(conan_api, parser, *args):\n \"\"\"\n Install dependencies and call the build() method.\n \"\"\"\n parser.add_argument(\"path\", nargs=\"?\",\n help=\"Path to a folder containing a recipe (conanfile.py \"\n \"or conanfile.txt) or to a recipe file. e.g., \"\n \"./my_project/conanfile.txt.\")\n add_reference_args(parser)\n # TODO: Missing --build-require argument and management\n parser.add_argument(\"-of\", \"--output-folder\",\n help='The root output folder for generated and build files')\n add_common_install_arguments(parser)\n add_lockfile_args(parser)\n args = parser.parse_args(*args)\n\n cwd = os.getcwd()\n path = conan_api.local.get_conanfile_path(args.path, cwd, py=True)\n folder = os.path.dirname(path)\n remotes = conan_api.remotes.list(args.remote) if not args.no_remote else []\n\n lockfile = conan_api.lockfile.get_lockfile(lockfile=args.lockfile,\n conanfile_path=path,\n cwd=cwd,\n partial=args.lockfile_partial)\n profile_host, profile_build = conan_api.profiles.get_profiles_from_args(args)\n\n deps_graph = conan_api.graph.load_graph_consumer(path, args.name, args.version,\n args.user, args.channel,\n profile_host, profile_build, lockfile, remotes,\n args.update)\n print_graph_basic(deps_graph)\n deps_graph.report_graph_error()\n conan_api.graph.analyze_binaries(deps_graph, args.build, remotes=remotes, update=args.update,\n lockfile=lockfile)\n print_graph_packages(deps_graph)\n\n out = ConanOutput()\n out.title(\"Installing packages\")\n conan_api.install.install_binaries(deps_graph=deps_graph, remotes=remotes)\n\n source_folder = folder\n output_folder = make_abs_path(args.output_folder, cwd) if args.output_folder else None\n out.title(\"Finalizing install (deploy, generators)\")\n conan_api.install.install_consumer(deps_graph=deps_graph, source_folder=source_folder,\n output_folder=output_folder)\n\n # TODO: Decide API to put this\n app = ConanApp(conan_api.cache_folder)\n conanfile = deps_graph.root.conanfile\n conanfile.folders.set_base_package(conanfile.folders.base_build)\n run_build_method(conanfile, app.hook_manager)\n\n lockfile = conan_api.lockfile.update_lockfile(lockfile, deps_graph, args.lockfile_packages,\n clean=args.lockfile_clean)\n conan_api.lockfile.save_lockfile(lockfile, args.lockfile_out, source_folder)\n return deps_graph\n", "path": "conan/cli/commands/build.py"}], "after_files": [{"content": "import os\n\nfrom conan.api.output import ConanOutput\nfrom conan.cli.command import conan_command\nfrom conan.cli.commands import make_abs_path\nfrom conan.cli.args import add_lockfile_args, add_common_install_arguments, add_reference_args\nfrom conan.internal.conan_app import ConanApp\nfrom conan.cli.printers.graph import print_graph_packages, print_graph_basic\nfrom conans.client.conanfile.build import run_build_method\n\n\n@conan_command(group='Creator')\ndef build(conan_api, parser, *args):\n \"\"\"\n Install dependencies and call the build() method.\n \"\"\"\n parser.add_argument(\"path\", nargs=\"?\",\n help='Path to a python-based recipe file or a folder '\n 'containing a conanfile.py recipe. conanfile.txt '\n 'cannot be used with conan build.')\n add_reference_args(parser)\n # TODO: Missing --build-require argument and management\n parser.add_argument(\"-of\", \"--output-folder\",\n help='The root output folder for generated and build files')\n add_common_install_arguments(parser)\n add_lockfile_args(parser)\n args = parser.parse_args(*args)\n\n cwd = os.getcwd()\n path = conan_api.local.get_conanfile_path(args.path, cwd, py=True)\n folder = os.path.dirname(path)\n remotes = conan_api.remotes.list(args.remote) if not args.no_remote else []\n\n lockfile = conan_api.lockfile.get_lockfile(lockfile=args.lockfile,\n conanfile_path=path,\n cwd=cwd,\n partial=args.lockfile_partial)\n profile_host, profile_build = conan_api.profiles.get_profiles_from_args(args)\n\n deps_graph = conan_api.graph.load_graph_consumer(path, args.name, args.version,\n args.user, args.channel,\n profile_host, profile_build, lockfile, remotes,\n args.update)\n print_graph_basic(deps_graph)\n deps_graph.report_graph_error()\n conan_api.graph.analyze_binaries(deps_graph, args.build, remotes=remotes, update=args.update,\n lockfile=lockfile)\n print_graph_packages(deps_graph)\n\n out = ConanOutput()\n out.title(\"Installing packages\")\n conan_api.install.install_binaries(deps_graph=deps_graph, remotes=remotes)\n\n source_folder = folder\n output_folder = make_abs_path(args.output_folder, cwd) if args.output_folder else None\n out.title(\"Finalizing install (deploy, generators)\")\n conan_api.install.install_consumer(deps_graph=deps_graph, source_folder=source_folder,\n output_folder=output_folder)\n\n # TODO: Decide API to put this\n app = ConanApp(conan_api.cache_folder)\n conanfile = deps_graph.root.conanfile\n conanfile.folders.set_base_package(conanfile.folders.base_build)\n run_build_method(conanfile, app.hook_manager)\n\n lockfile = conan_api.lockfile.update_lockfile(lockfile, deps_graph, args.lockfile_packages,\n clean=args.lockfile_clean)\n conan_api.lockfile.save_lockfile(lockfile, args.lockfile_out, source_folder)\n return deps_graph\n", "path": "conan/cli/commands/build.py"}]} | 1,525 | 193 |
gh_patches_debug_2269 | rasdani/github-patches | git_diff | bookwyrm-social__bookwyrm-2501 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AASIN and isfdb not editable
Somehow during the merge some code most be gone lost because...
<img width="640" alt="Bildschirmfoto 2022-12-11 um 21 29 47" src="https://user-images.githubusercontent.com/2017105/206927195-f9b27bcc-2f3a-46eb-ab1d-84340e5fa061.png">
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bookwyrm/forms/books.py`
Content:
```
1 """ using django model forms """
2 from django import forms
3
4 from bookwyrm import models
5 from bookwyrm.models.fields import ClearableFileInputWithWarning
6 from .custom_form import CustomForm
7 from .widgets import ArrayWidget, SelectDateWidget, Select
8
9
10 # pylint: disable=missing-class-docstring
11 class CoverForm(CustomForm):
12 class Meta:
13 model = models.Book
14 fields = ["cover"]
15 help_texts = {f: None for f in fields}
16
17
18 class EditionForm(CustomForm):
19 class Meta:
20 model = models.Edition
21 fields = [
22 "title",
23 "subtitle",
24 "description",
25 "series",
26 "series_number",
27 "languages",
28 "subjects",
29 "publishers",
30 "first_published_date",
31 "published_date",
32 "cover",
33 "physical_format",
34 "physical_format_detail",
35 "pages",
36 "isbn_13",
37 "isbn_10",
38 "openlibrary_key",
39 "inventaire_id",
40 "goodreads_key",
41 "oclc_number",
42 "asin",
43 ]
44 widgets = {
45 "title": forms.TextInput(attrs={"aria-describedby": "desc_title"}),
46 "subtitle": forms.TextInput(attrs={"aria-describedby": "desc_subtitle"}),
47 "description": forms.Textarea(
48 attrs={"aria-describedby": "desc_description"}
49 ),
50 "series": forms.TextInput(attrs={"aria-describedby": "desc_series"}),
51 "series_number": forms.TextInput(
52 attrs={"aria-describedby": "desc_series_number"}
53 ),
54 "subjects": ArrayWidget(),
55 "languages": forms.TextInput(
56 attrs={"aria-describedby": "desc_languages_help desc_languages"}
57 ),
58 "publishers": forms.TextInput(
59 attrs={"aria-describedby": "desc_publishers_help desc_publishers"}
60 ),
61 "first_published_date": SelectDateWidget(
62 attrs={"aria-describedby": "desc_first_published_date"}
63 ),
64 "published_date": SelectDateWidget(
65 attrs={"aria-describedby": "desc_published_date"}
66 ),
67 "cover": ClearableFileInputWithWarning(
68 attrs={"aria-describedby": "desc_cover"}
69 ),
70 "physical_format": Select(
71 attrs={"aria-describedby": "desc_physical_format"}
72 ),
73 "physical_format_detail": forms.TextInput(
74 attrs={"aria-describedby": "desc_physical_format_detail"}
75 ),
76 "pages": forms.NumberInput(attrs={"aria-describedby": "desc_pages"}),
77 "isbn_13": forms.TextInput(attrs={"aria-describedby": "desc_isbn_13"}),
78 "isbn_10": forms.TextInput(attrs={"aria-describedby": "desc_isbn_10"}),
79 "openlibrary_key": forms.TextInput(
80 attrs={"aria-describedby": "desc_openlibrary_key"}
81 ),
82 "inventaire_id": forms.TextInput(
83 attrs={"aria-describedby": "desc_inventaire_id"}
84 ),
85 "goodreads_key": forms.TextInput(
86 attrs={"aria-describedby": "desc_goodreads_key"}
87 ),
88 "oclc_number": forms.TextInput(
89 attrs={"aria-describedby": "desc_oclc_number"}
90 ),
91 "ASIN": forms.TextInput(attrs={"aria-describedby": "desc_ASIN"}),
92 "AASIN": forms.TextInput(attrs={"aria-describedby": "desc_AASIN"}),
93 "isfdb": forms.TextInput(attrs={"aria-describedby": "desc_isfdb"}),
94 }
95
96
97 class EditionFromWorkForm(CustomForm):
98 def __init__(self, *args, **kwargs):
99 super().__init__(*args, **kwargs)
100 # make all fields hidden
101 for visible in self.visible_fields():
102 visible.field.widget = forms.HiddenInput()
103
104 class Meta:
105 model = models.Work
106 fields = [
107 "title",
108 "subtitle",
109 "authors",
110 "description",
111 "languages",
112 "series",
113 "series_number",
114 "subjects",
115 "subject_places",
116 "cover",
117 "first_published_date",
118 ]
119
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bookwyrm/forms/books.py b/bookwyrm/forms/books.py
--- a/bookwyrm/forms/books.py
+++ b/bookwyrm/forms/books.py
@@ -40,6 +40,8 @@
"goodreads_key",
"oclc_number",
"asin",
+ "aasin",
+ "isfdb",
]
widgets = {
"title": forms.TextInput(attrs={"aria-describedby": "desc_title"}),
| {"golden_diff": "diff --git a/bookwyrm/forms/books.py b/bookwyrm/forms/books.py\n--- a/bookwyrm/forms/books.py\n+++ b/bookwyrm/forms/books.py\n@@ -40,6 +40,8 @@\n \"goodreads_key\",\n \"oclc_number\",\n \"asin\",\n+ \"aasin\",\n+ \"isfdb\",\n ]\n widgets = {\n \"title\": forms.TextInput(attrs={\"aria-describedby\": \"desc_title\"}),\n", "issue": "AASIN and isfdb not editable\nSomehow during the merge some code most be gone lost because...\r\n\r\n<img width=\"640\" alt=\"Bildschirm\u00adfoto 2022-12-11 um 21 29 47\" src=\"https://user-images.githubusercontent.com/2017105/206927195-f9b27bcc-2f3a-46eb-ab1d-84340e5fa061.png\">\r\n\n", "before_files": [{"content": "\"\"\" using django model forms \"\"\"\nfrom django import forms\n\nfrom bookwyrm import models\nfrom bookwyrm.models.fields import ClearableFileInputWithWarning\nfrom .custom_form import CustomForm\nfrom .widgets import ArrayWidget, SelectDateWidget, Select\n\n\n# pylint: disable=missing-class-docstring\nclass CoverForm(CustomForm):\n class Meta:\n model = models.Book\n fields = [\"cover\"]\n help_texts = {f: None for f in fields}\n\n\nclass EditionForm(CustomForm):\n class Meta:\n model = models.Edition\n fields = [\n \"title\",\n \"subtitle\",\n \"description\",\n \"series\",\n \"series_number\",\n \"languages\",\n \"subjects\",\n \"publishers\",\n \"first_published_date\",\n \"published_date\",\n \"cover\",\n \"physical_format\",\n \"physical_format_detail\",\n \"pages\",\n \"isbn_13\",\n \"isbn_10\",\n \"openlibrary_key\",\n \"inventaire_id\",\n \"goodreads_key\",\n \"oclc_number\",\n \"asin\",\n ]\n widgets = {\n \"title\": forms.TextInput(attrs={\"aria-describedby\": \"desc_title\"}),\n \"subtitle\": forms.TextInput(attrs={\"aria-describedby\": \"desc_subtitle\"}),\n \"description\": forms.Textarea(\n attrs={\"aria-describedby\": \"desc_description\"}\n ),\n \"series\": forms.TextInput(attrs={\"aria-describedby\": \"desc_series\"}),\n \"series_number\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_series_number\"}\n ),\n \"subjects\": ArrayWidget(),\n \"languages\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_languages_help desc_languages\"}\n ),\n \"publishers\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_publishers_help desc_publishers\"}\n ),\n \"first_published_date\": SelectDateWidget(\n attrs={\"aria-describedby\": \"desc_first_published_date\"}\n ),\n \"published_date\": SelectDateWidget(\n attrs={\"aria-describedby\": \"desc_published_date\"}\n ),\n \"cover\": ClearableFileInputWithWarning(\n attrs={\"aria-describedby\": \"desc_cover\"}\n ),\n \"physical_format\": Select(\n attrs={\"aria-describedby\": \"desc_physical_format\"}\n ),\n \"physical_format_detail\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_physical_format_detail\"}\n ),\n \"pages\": forms.NumberInput(attrs={\"aria-describedby\": \"desc_pages\"}),\n \"isbn_13\": forms.TextInput(attrs={\"aria-describedby\": \"desc_isbn_13\"}),\n \"isbn_10\": forms.TextInput(attrs={\"aria-describedby\": \"desc_isbn_10\"}),\n \"openlibrary_key\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_openlibrary_key\"}\n ),\n \"inventaire_id\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_inventaire_id\"}\n ),\n \"goodreads_key\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_goodreads_key\"}\n ),\n \"oclc_number\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_oclc_number\"}\n ),\n \"ASIN\": forms.TextInput(attrs={\"aria-describedby\": \"desc_ASIN\"}),\n \"AASIN\": forms.TextInput(attrs={\"aria-describedby\": \"desc_AASIN\"}),\n \"isfdb\": forms.TextInput(attrs={\"aria-describedby\": \"desc_isfdb\"}),\n }\n\n\nclass EditionFromWorkForm(CustomForm):\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n # make all fields hidden\n for visible in self.visible_fields():\n visible.field.widget = forms.HiddenInput()\n\n class Meta:\n model = models.Work\n fields = [\n \"title\",\n \"subtitle\",\n \"authors\",\n \"description\",\n \"languages\",\n \"series\",\n \"series_number\",\n \"subjects\",\n \"subject_places\",\n \"cover\",\n \"first_published_date\",\n ]\n", "path": "bookwyrm/forms/books.py"}], "after_files": [{"content": "\"\"\" using django model forms \"\"\"\nfrom django import forms\n\nfrom bookwyrm import models\nfrom bookwyrm.models.fields import ClearableFileInputWithWarning\nfrom .custom_form import CustomForm\nfrom .widgets import ArrayWidget, SelectDateWidget, Select\n\n\n# pylint: disable=missing-class-docstring\nclass CoverForm(CustomForm):\n class Meta:\n model = models.Book\n fields = [\"cover\"]\n help_texts = {f: None for f in fields}\n\n\nclass EditionForm(CustomForm):\n class Meta:\n model = models.Edition\n fields = [\n \"title\",\n \"subtitle\",\n \"description\",\n \"series\",\n \"series_number\",\n \"languages\",\n \"subjects\",\n \"publishers\",\n \"first_published_date\",\n \"published_date\",\n \"cover\",\n \"physical_format\",\n \"physical_format_detail\",\n \"pages\",\n \"isbn_13\",\n \"isbn_10\",\n \"openlibrary_key\",\n \"inventaire_id\",\n \"goodreads_key\",\n \"oclc_number\",\n \"asin\",\n \"aasin\",\n \"isfdb\",\n ]\n widgets = {\n \"title\": forms.TextInput(attrs={\"aria-describedby\": \"desc_title\"}),\n \"subtitle\": forms.TextInput(attrs={\"aria-describedby\": \"desc_subtitle\"}),\n \"description\": forms.Textarea(\n attrs={\"aria-describedby\": \"desc_description\"}\n ),\n \"series\": forms.TextInput(attrs={\"aria-describedby\": \"desc_series\"}),\n \"series_number\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_series_number\"}\n ),\n \"subjects\": ArrayWidget(),\n \"languages\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_languages_help desc_languages\"}\n ),\n \"publishers\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_publishers_help desc_publishers\"}\n ),\n \"first_published_date\": SelectDateWidget(\n attrs={\"aria-describedby\": \"desc_first_published_date\"}\n ),\n \"published_date\": SelectDateWidget(\n attrs={\"aria-describedby\": \"desc_published_date\"}\n ),\n \"cover\": ClearableFileInputWithWarning(\n attrs={\"aria-describedby\": \"desc_cover\"}\n ),\n \"physical_format\": Select(\n attrs={\"aria-describedby\": \"desc_physical_format\"}\n ),\n \"physical_format_detail\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_physical_format_detail\"}\n ),\n \"pages\": forms.NumberInput(attrs={\"aria-describedby\": \"desc_pages\"}),\n \"isbn_13\": forms.TextInput(attrs={\"aria-describedby\": \"desc_isbn_13\"}),\n \"isbn_10\": forms.TextInput(attrs={\"aria-describedby\": \"desc_isbn_10\"}),\n \"openlibrary_key\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_openlibrary_key\"}\n ),\n \"inventaire_id\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_inventaire_id\"}\n ),\n \"goodreads_key\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_goodreads_key\"}\n ),\n \"oclc_number\": forms.TextInput(\n attrs={\"aria-describedby\": \"desc_oclc_number\"}\n ),\n \"ASIN\": forms.TextInput(attrs={\"aria-describedby\": \"desc_ASIN\"}),\n \"AASIN\": forms.TextInput(attrs={\"aria-describedby\": \"desc_AASIN\"}),\n \"isfdb\": forms.TextInput(attrs={\"aria-describedby\": \"desc_isfdb\"}),\n }\n\n\nclass EditionFromWorkForm(CustomForm):\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n # make all fields hidden\n for visible in self.visible_fields():\n visible.field.widget = forms.HiddenInput()\n\n class Meta:\n model = models.Work\n fields = [\n \"title\",\n \"subtitle\",\n \"authors\",\n \"description\",\n \"languages\",\n \"series\",\n \"series_number\",\n \"subjects\",\n \"subject_places\",\n \"cover\",\n \"first_published_date\",\n ]\n", "path": "bookwyrm/forms/books.py"}]} | 1,452 | 97 |
gh_patches_debug_2626 | rasdani/github-patches | git_diff | dbt-labs__dbt-core-7221 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[CT-1943] Loosen pin on `jsonschema` (via `hologram`)
For more context on our latest thinking around dependencies (how & why we pin today, and how we want it to change):
- https://github.com/dbt-labs/dbt-core/discussions/6495
### Summary
`dbt-core` depends on `hologram`, and as such it also includes `hologram`'s transitive dependencies on `jsonschema` and `python-dateutil`. `hologram`'s upper bound on `jsonschema` in particular is causing issues for some folks trying to install `dbt-core` alongside other popular tools, such as Airflow:
- https://github.com/dbt-labs/hologram/issues/52
- https://github.com/dbt-labs/hologram/pull/51
### Short term
- Try removing upper bound on `jsonschema`
- Release a new version of `hologram` with no / looser upper bound
- Support the new version of `hologram` [in `dbt-core`](https://github.com/dbt-labs/dbt-core/blob/a8abc496323f741d3218d298d5d2bb118fa01017/core/setup.py#L54)
### Medium term
Remove `dbt-core`'s dependency on `hologram` entirely. It doesn't do nearly as much for us today as it used to, and the validation errors it raises aren't even all that nice.
- https://github.com/dbt-labs/dbt-core/issues/6776
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/setup.py`
Content:
```
1 #!/usr/bin/env python
2 import os
3 import sys
4
5 if sys.version_info < (3, 7, 2):
6 print("Error: dbt does not support this version of Python.")
7 print("Please upgrade to Python 3.7.2 or higher.")
8 sys.exit(1)
9
10
11 from setuptools import setup
12
13 try:
14 from setuptools import find_namespace_packages
15 except ImportError:
16 # the user has a downlevel version of setuptools.
17 print("Error: dbt requires setuptools v40.1.0 or higher.")
18 print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again")
19 sys.exit(1)
20
21
22 this_directory = os.path.abspath(os.path.dirname(__file__))
23 with open(os.path.join(this_directory, "README.md")) as f:
24 long_description = f.read()
25
26
27 package_name = "dbt-core"
28 package_version = "1.5.0b4"
29 description = """With dbt, data analysts and engineers can build analytics \
30 the way engineers build applications."""
31
32
33 setup(
34 name=package_name,
35 version=package_version,
36 description=description,
37 long_description=long_description,
38 long_description_content_type="text/markdown",
39 author="dbt Labs",
40 author_email="[email protected]",
41 url="https://github.com/dbt-labs/dbt-core",
42 packages=find_namespace_packages(include=["dbt", "dbt.*"]),
43 include_package_data=True,
44 test_suite="test",
45 entry_points={
46 "console_scripts": ["dbt = dbt.cli.main:cli"],
47 },
48 install_requires=[
49 "Jinja2==3.1.2",
50 "agate>=1.6,<1.7.1",
51 "click>=7.0,<9",
52 "colorama>=0.3.9,<0.4.7",
53 "hologram>=0.0.14,<=0.0.15",
54 "isodate>=0.6,<0.7",
55 "logbook>=1.5,<1.6",
56 "mashumaro[msgpack]==3.3.1",
57 "minimal-snowplow-tracker==0.0.2",
58 "networkx>=2.3,<2.8.1;python_version<'3.8'",
59 "networkx>=2.3,<3;python_version>='3.8'",
60 "packaging>20.9",
61 "sqlparse>=0.2.3,<0.5",
62 "dbt-extractor~=0.4.1",
63 "typing-extensions>=3.7.4",
64 "werkzeug>=1,<3",
65 "pathspec>=0.9,<0.12",
66 "protobuf>=3.18.3",
67 "pytz>=2015.7",
68 # the following are all to match snowflake-connector-python
69 "requests<3.0.0",
70 "idna>=2.5,<4",
71 "cffi>=1.9,<2.0.0",
72 "pyyaml>=6.0",
73 ],
74 zip_safe=False,
75 classifiers=[
76 "Development Status :: 5 - Production/Stable",
77 "License :: OSI Approved :: Apache Software License",
78 "Operating System :: Microsoft :: Windows",
79 "Operating System :: MacOS :: MacOS X",
80 "Operating System :: POSIX :: Linux",
81 "Programming Language :: Python :: 3.7",
82 "Programming Language :: Python :: 3.8",
83 "Programming Language :: Python :: 3.9",
84 "Programming Language :: Python :: 3.10",
85 "Programming Language :: Python :: 3.11",
86 ],
87 python_requires=">=3.7.2",
88 )
89
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/setup.py b/core/setup.py
--- a/core/setup.py
+++ b/core/setup.py
@@ -50,7 +50,7 @@
"agate>=1.6,<1.7.1",
"click>=7.0,<9",
"colorama>=0.3.9,<0.4.7",
- "hologram>=0.0.14,<=0.0.15",
+ "hologram>=0.0.14,<=0.0.16",
"isodate>=0.6,<0.7",
"logbook>=1.5,<1.6",
"mashumaro[msgpack]==3.3.1",
| {"golden_diff": "diff --git a/core/setup.py b/core/setup.py\n--- a/core/setup.py\n+++ b/core/setup.py\n@@ -50,7 +50,7 @@\n \"agate>=1.6,<1.7.1\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.7\",\n- \"hologram>=0.0.14,<=0.0.15\",\n+ \"hologram>=0.0.14,<=0.0.16\",\n \"isodate>=0.6,<0.7\",\n \"logbook>=1.5,<1.6\",\n \"mashumaro[msgpack]==3.3.1\",\n", "issue": "[CT-1943] Loosen pin on `jsonschema` (via `hologram`)\nFor more context on our latest thinking around dependencies (how & why we pin today, and how we want it to change):\r\n- https://github.com/dbt-labs/dbt-core/discussions/6495\r\n\r\n### Summary\r\n\r\n`dbt-core` depends on `hologram`, and as such it also includes `hologram`'s transitive dependencies on `jsonschema` and `python-dateutil`. `hologram`'s upper bound on `jsonschema` in particular is causing issues for some folks trying to install `dbt-core` alongside other popular tools, such as Airflow:\r\n- https://github.com/dbt-labs/hologram/issues/52\r\n- https://github.com/dbt-labs/hologram/pull/51\r\n\r\n### Short term\r\n\r\n- Try removing upper bound on `jsonschema`\r\n- Release a new version of `hologram` with no / looser upper bound\r\n- Support the new version of `hologram` [in `dbt-core`](https://github.com/dbt-labs/dbt-core/blob/a8abc496323f741d3218d298d5d2bb118fa01017/core/setup.py#L54)\r\n\r\n### Medium term\r\n\r\nRemove `dbt-core`'s dependency on `hologram` entirely. It doesn't do nearly as much for us today as it used to, and the validation errors it raises aren't even all that nice.\r\n- https://github.com/dbt-labs/dbt-core/issues/6776\n", "before_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif sys.version_info < (3, 7, 2):\n print(\"Error: dbt does not support this version of Python.\")\n print(\"Please upgrade to Python 3.7.2 or higher.\")\n sys.exit(1)\n\n\nfrom setuptools import setup\n\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print(\"Error: dbt requires setuptools v40.1.0 or higher.\")\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" ' \"and try again\")\n sys.exit(1)\n\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, \"README.md\")) as f:\n long_description = f.read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"1.5.0b4\"\ndescription = \"\"\"With dbt, data analysts and engineers can build analytics \\\nthe way engineers build applications.\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"dbt Labs\",\n author_email=\"[email protected]\",\n url=\"https://github.com/dbt-labs/dbt-core\",\n packages=find_namespace_packages(include=[\"dbt\", \"dbt.*\"]),\n include_package_data=True,\n test_suite=\"test\",\n entry_points={\n \"console_scripts\": [\"dbt = dbt.cli.main:cli\"],\n },\n install_requires=[\n \"Jinja2==3.1.2\",\n \"agate>=1.6,<1.7.1\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.7\",\n \"hologram>=0.0.14,<=0.0.15\",\n \"isodate>=0.6,<0.7\",\n \"logbook>=1.5,<1.6\",\n \"mashumaro[msgpack]==3.3.1\",\n \"minimal-snowplow-tracker==0.0.2\",\n \"networkx>=2.3,<2.8.1;python_version<'3.8'\",\n \"networkx>=2.3,<3;python_version>='3.8'\",\n \"packaging>20.9\",\n \"sqlparse>=0.2.3,<0.5\",\n \"dbt-extractor~=0.4.1\",\n \"typing-extensions>=3.7.4\",\n \"werkzeug>=1,<3\",\n \"pathspec>=0.9,<0.12\",\n \"protobuf>=3.18.3\",\n \"pytz>=2015.7\",\n # the following are all to match snowflake-connector-python\n \"requests<3.0.0\",\n \"idna>=2.5,<4\",\n \"cffi>=1.9,<2.0.0\",\n \"pyyaml>=6.0\",\n ],\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: Microsoft :: Windows\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n ],\n python_requires=\">=3.7.2\",\n)\n", "path": "core/setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\nimport os\nimport sys\n\nif sys.version_info < (3, 7, 2):\n print(\"Error: dbt does not support this version of Python.\")\n print(\"Please upgrade to Python 3.7.2 or higher.\")\n sys.exit(1)\n\n\nfrom setuptools import setup\n\ntry:\n from setuptools import find_namespace_packages\nexcept ImportError:\n # the user has a downlevel version of setuptools.\n print(\"Error: dbt requires setuptools v40.1.0 or higher.\")\n print('Please upgrade setuptools with \"pip install --upgrade setuptools\" ' \"and try again\")\n sys.exit(1)\n\n\nthis_directory = os.path.abspath(os.path.dirname(__file__))\nwith open(os.path.join(this_directory, \"README.md\")) as f:\n long_description = f.read()\n\n\npackage_name = \"dbt-core\"\npackage_version = \"1.5.0b4\"\ndescription = \"\"\"With dbt, data analysts and engineers can build analytics \\\nthe way engineers build applications.\"\"\"\n\n\nsetup(\n name=package_name,\n version=package_version,\n description=description,\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n author=\"dbt Labs\",\n author_email=\"[email protected]\",\n url=\"https://github.com/dbt-labs/dbt-core\",\n packages=find_namespace_packages(include=[\"dbt\", \"dbt.*\"]),\n include_package_data=True,\n test_suite=\"test\",\n entry_points={\n \"console_scripts\": [\"dbt = dbt.cli.main:cli\"],\n },\n install_requires=[\n \"Jinja2==3.1.2\",\n \"agate>=1.6,<1.7.1\",\n \"click>=7.0,<9\",\n \"colorama>=0.3.9,<0.4.7\",\n \"hologram>=0.0.14,<=0.0.16\",\n \"isodate>=0.6,<0.7\",\n \"logbook>=1.5,<1.6\",\n \"mashumaro[msgpack]==3.3.1\",\n \"minimal-snowplow-tracker==0.0.2\",\n \"networkx>=2.3,<2.8.1;python_version<'3.8'\",\n \"networkx>=2.3,<3;python_version>='3.8'\",\n \"packaging>20.9\",\n \"sqlparse>=0.2.3,<0.5\",\n \"dbt-extractor~=0.4.1\",\n \"typing-extensions>=3.7.4\",\n \"werkzeug>=1,<3\",\n \"pathspec>=0.9,<0.12\",\n \"protobuf>=3.18.3\",\n \"pytz>=2015.7\",\n # the following are all to match snowflake-connector-python\n \"requests<3.0.0\",\n \"idna>=2.5,<4\",\n \"cffi>=1.9,<2.0.0\",\n \"pyyaml>=6.0\",\n ],\n zip_safe=False,\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: Microsoft :: Windows\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Programming Language :: Python :: 3.11\",\n ],\n python_requires=\">=3.7.2\",\n)\n", "path": "core/setup.py"}]} | 1,596 | 162 |
gh_patches_debug_31646 | rasdani/github-patches | git_diff | lightly-ai__lightly-272 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CO2 regularizer breaks with PyTorch 1.4.0
# CO2 regularizer breaks with PyTorch 1.4.0
The `KLDivLoss` in PyTorch only received the `log_target` flag after 1.4.0 so we need to handle the case where the flag is not available.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lightly/loss/regularizer/co2.py`
Content:
```
1 """ CO2 Regularizer """
2
3 # Copyright (c) 2020. Lightly AG and its affiliates.
4 # All Rights Reserved
5
6 import torch
7 from lightly.loss.memory_bank import MemoryBankModule
8
9
10 class CO2Regularizer(MemoryBankModule):
11 """Implementation of the CO2 regularizer [0] for self-supervised learning.
12
13 [0] CO2, 2021, https://arxiv.org/abs/2010.02217
14
15 Attributes:
16 alpha:
17 Weight of the regularization term.
18 t_consistency:
19 Temperature used during softmax calculations.
20 memory_bank_size:
21 Number of negative samples to store in the memory bank.
22 Use 0 to use the second batch for negative samples.
23
24 Examples:
25 >>> # initialize loss function for MoCo
26 >>> loss_fn = NTXentLoss(memory_bank_size=4096)
27 >>>
28 >>> # initialize CO2 regularizer
29 >>> co2 = CO2Regularizer(alpha=1.0, memory_bank_size=4096)
30 >>>
31 >>> # generate two random trasnforms of images
32 >>> t0 = transforms(images)
33 >>> t1 = transforms(images)
34 >>>
35 >>> # feed through the MoCo model
36 >>> out0, out1 = model(t0, t1)
37 >>>
38 >>> # calculate loss and apply regularizer
39 >>> loss = loss_fn(out0, out1) + co2(out0, out1)
40
41 """
42
43 def __init__(self,
44 alpha: float = 1,
45 t_consistency: float = 0.05,
46 memory_bank_size: int = 0):
47
48 super(CO2Regularizer, self).__init__(size=memory_bank_size)
49 self.kl_div = torch.nn.KLDivLoss(reduction='batchmean', log_target=True)
50 self.t_consistency = t_consistency
51 self.alpha = alpha
52
53 def _get_pseudo_labels(self,
54 out0: torch.Tensor,
55 out1: torch.Tensor,
56 negatives: torch.Tensor = None):
57 """Computes the soft pseudo labels across negative samples.
58
59 Args:
60 out0:
61 Output projections of the first set of transformed images (query).
62 Shape: bsz x n_ftrs
63 out1:
64 Output projections of the second set of transformed images (positive sample).
65 Shape: bsz x n_ftrs
66 negatives:
67 Negative samples to compare against. If this is None, the second
68 batch of images will be used as negative samples.
69 Shape: memory_bank_size x n_ftrs
70
71 Returns:
72 Log probability that a positive samples will classify each negative
73 sample as the positive sample.
74 Shape: bsz x (bsz - 1) or bsz x memory_bank_size
75
76 """
77 batch_size, _ = out0.shape
78 if negatives is None:
79 # use second batch as negative samples
80 # l_pos has shape bsz x 1 and l_neg has shape bsz x bsz
81 l_pos = torch.einsum('nc,nc->n', [out0, out1]).unsqueeze(-1)
82 l_neg = torch.einsum('nc,ck->nk', [out0, out1.t()])
83 # remove elements on the diagonal
84 # l_neg has shape bsz x (bsz - 1)
85 l_neg = l_neg.masked_select(
86 ~torch.eye(batch_size, dtype=bool, device=l_neg.device)
87 ).view(batch_size, batch_size - 1)
88 else:
89 # use memory bank as negative samples
90 # l_pos has shape bsz x 1 and l_neg has shape bsz x memory_bank_size
91 negatives = negatives.to(out0.device)
92 l_pos = torch.einsum('nc,nc->n', [out0, out1]).unsqueeze(-1)
93 l_neg = torch.einsum('nc,ck->nk', [out0, negatives.clone().detach()])
94
95 # concatenate such that positive samples are at index 0
96 logits = torch.cat([l_pos, l_neg], dim=1)
97 # divide by temperature
98 logits = logits / self.t_consistency
99
100 # the input to kl_div is expected to be log(p) and we set the
101 # flag log_target to True, so both probabilities should be passed as log
102 log_probs = torch.nn.functional.log_softmax(logits, dim=-1)
103 return log_probs
104
105
106 def forward(self,
107 out0: torch.Tensor,
108 out1: torch.Tensor):
109 """Computes the CO2 regularization term for two model outputs.
110
111 Args:
112 out0:
113 Output projections of the first set of transformed images.
114 out1:
115 Output projections of the second set of transformed images.
116
117 Returns:
118 The regularization term multiplied by the weight factor alpha.
119
120 """
121
122 # normalize the output to length 1
123 out0 = torch.nn.functional.normalize(out0, dim=1)
124 out1 = torch.nn.functional.normalize(out1, dim=1)
125
126 # ask memory bank for negative samples and extend it with out1 if
127 # out1 requires a gradient, otherwise keep the same vectors in the
128 # memory bank (this allows for keeping the memory bank constant e.g.
129 # for evaluating the loss on the test set)
130 # if the memory_bank size is 0, negatives will be None
131 out1, negatives = \
132 super(CO2Regularizer, self).forward(out1, update=True)
133
134 # get log probabilities
135 p = self._get_pseudo_labels(out0, out1, negatives)
136 q = self._get_pseudo_labels(out1, out0, negatives)
137
138 # calculate kullback leibler divergence from log probabilities
139 return self.alpha * 0.5 * (self.kl_div(p, q) + self.kl_div(q, p))
140
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lightly/loss/regularizer/co2.py b/lightly/loss/regularizer/co2.py
--- a/lightly/loss/regularizer/co2.py
+++ b/lightly/loss/regularizer/co2.py
@@ -46,7 +46,19 @@
memory_bank_size: int = 0):
super(CO2Regularizer, self).__init__(size=memory_bank_size)
- self.kl_div = torch.nn.KLDivLoss(reduction='batchmean', log_target=True)
+ # try-catch the KLDivLoss construction for backwards compatability
+ self.log_target = True
+ try:
+ self.kl_div = torch.nn.KLDivLoss(
+ reduction='batchmean',
+ log_target=True
+ )
+ except TypeError:
+ self.log_target = False
+ self.kl_div = torch.nn.KLDivLoss(
+ reduction='batchmean'
+ )
+
self.t_consistency = t_consistency
self.alpha = alpha
@@ -97,10 +109,8 @@
# divide by temperature
logits = logits / self.t_consistency
- # the input to kl_div is expected to be log(p) and we set the
- # flag log_target to True, so both probabilities should be passed as log
- log_probs = torch.nn.functional.log_softmax(logits, dim=-1)
- return log_probs
+ # the input to kl_div is expected to be log(p)
+ return torch.nn.functional.log_softmax(logits, dim=-1)
def forward(self,
@@ -135,5 +145,11 @@
p = self._get_pseudo_labels(out0, out1, negatives)
q = self._get_pseudo_labels(out1, out0, negatives)
- # calculate kullback leibler divergence from log probabilities
- return self.alpha * 0.5 * (self.kl_div(p, q) + self.kl_div(q, p))
+ # calculate symmetrized kullback leibler divergence
+ if self.log_target:
+ div = self.kl_div(p, q) + self.kl_div(q, p)
+ else:
+ # can't use log_target because of early torch version
+ div = self.kl_div(p, torch.exp(q)) + self.kl_div(q, torch.exp(p))
+
+ return self.alpha * 0.5 * div
| {"golden_diff": "diff --git a/lightly/loss/regularizer/co2.py b/lightly/loss/regularizer/co2.py\n--- a/lightly/loss/regularizer/co2.py\n+++ b/lightly/loss/regularizer/co2.py\n@@ -46,7 +46,19 @@\n memory_bank_size: int = 0):\n \n super(CO2Regularizer, self).__init__(size=memory_bank_size)\n- self.kl_div = torch.nn.KLDivLoss(reduction='batchmean', log_target=True)\n+ # try-catch the KLDivLoss construction for backwards compatability\n+ self.log_target = True\n+ try:\n+ self.kl_div = torch.nn.KLDivLoss(\n+ reduction='batchmean',\n+ log_target=True\n+ )\n+ except TypeError:\n+ self.log_target = False\n+ self.kl_div = torch.nn.KLDivLoss(\n+ reduction='batchmean'\n+ )\n+\n self.t_consistency = t_consistency\n self.alpha = alpha\n \n@@ -97,10 +109,8 @@\n # divide by temperature\n logits = logits / self.t_consistency\n \n- # the input to kl_div is expected to be log(p) and we set the\n- # flag log_target to True, so both probabilities should be passed as log\n- log_probs = torch.nn.functional.log_softmax(logits, dim=-1)\n- return log_probs\n+ # the input to kl_div is expected to be log(p) \n+ return torch.nn.functional.log_softmax(logits, dim=-1)\n \n \n def forward(self,\n@@ -135,5 +145,11 @@\n p = self._get_pseudo_labels(out0, out1, negatives)\n q = self._get_pseudo_labels(out1, out0, negatives)\n \n- # calculate kullback leibler divergence from log probabilities\n- return self.alpha * 0.5 * (self.kl_div(p, q) + self.kl_div(q, p))\n+ # calculate symmetrized kullback leibler divergence\n+ if self.log_target:\n+ div = self.kl_div(p, q) + self.kl_div(q, p)\n+ else:\n+ # can't use log_target because of early torch version\n+ div = self.kl_div(p, torch.exp(q)) + self.kl_div(q, torch.exp(p))\n+\n+ return self.alpha * 0.5 * div\n", "issue": "CO2 regularizer breaks with PyTorch 1.4.0\n# CO2 regularizer breaks with PyTorch 1.4.0\r\n\r\nThe `KLDivLoss` in PyTorch only received the `log_target` flag after 1.4.0 so we need to handle the case where the flag is not available.\n", "before_files": [{"content": "\"\"\" CO2 Regularizer \"\"\"\n\n# Copyright (c) 2020. Lightly AG and its affiliates.\n# All Rights Reserved\n\nimport torch\nfrom lightly.loss.memory_bank import MemoryBankModule\n\n\nclass CO2Regularizer(MemoryBankModule):\n \"\"\"Implementation of the CO2 regularizer [0] for self-supervised learning.\n\n [0] CO2, 2021, https://arxiv.org/abs/2010.02217\n\n Attributes:\n alpha:\n Weight of the regularization term.\n t_consistency:\n Temperature used during softmax calculations.\n memory_bank_size:\n Number of negative samples to store in the memory bank.\n Use 0 to use the second batch for negative samples.\n\n Examples:\n >>> # initialize loss function for MoCo\n >>> loss_fn = NTXentLoss(memory_bank_size=4096)\n >>>\n >>> # initialize CO2 regularizer\n >>> co2 = CO2Regularizer(alpha=1.0, memory_bank_size=4096)\n >>>\n >>> # generate two random trasnforms of images\n >>> t0 = transforms(images)\n >>> t1 = transforms(images)\n >>>\n >>> # feed through the MoCo model\n >>> out0, out1 = model(t0, t1)\n >>> \n >>> # calculate loss and apply regularizer\n >>> loss = loss_fn(out0, out1) + co2(out0, out1)\n\n \"\"\"\n\n def __init__(self,\n alpha: float = 1,\n t_consistency: float = 0.05,\n memory_bank_size: int = 0):\n\n super(CO2Regularizer, self).__init__(size=memory_bank_size)\n self.kl_div = torch.nn.KLDivLoss(reduction='batchmean', log_target=True)\n self.t_consistency = t_consistency\n self.alpha = alpha\n\n def _get_pseudo_labels(self,\n out0: torch.Tensor,\n out1: torch.Tensor,\n negatives: torch.Tensor = None):\n \"\"\"Computes the soft pseudo labels across negative samples.\n\n Args:\n out0:\n Output projections of the first set of transformed images (query).\n Shape: bsz x n_ftrs\n out1:\n Output projections of the second set of transformed images (positive sample).\n Shape: bsz x n_ftrs\n negatives:\n Negative samples to compare against. If this is None, the second\n batch of images will be used as negative samples.\n Shape: memory_bank_size x n_ftrs\n\n Returns:\n Log probability that a positive samples will classify each negative\n sample as the positive sample.\n Shape: bsz x (bsz - 1) or bsz x memory_bank_size\n\n \"\"\"\n batch_size, _ = out0.shape\n if negatives is None:\n # use second batch as negative samples\n # l_pos has shape bsz x 1 and l_neg has shape bsz x bsz\n l_pos = torch.einsum('nc,nc->n', [out0, out1]).unsqueeze(-1)\n l_neg = torch.einsum('nc,ck->nk', [out0, out1.t()])\n # remove elements on the diagonal\n # l_neg has shape bsz x (bsz - 1)\n l_neg = l_neg.masked_select(\n ~torch.eye(batch_size, dtype=bool, device=l_neg.device)\n ).view(batch_size, batch_size - 1)\n else:\n # use memory bank as negative samples\n # l_pos has shape bsz x 1 and l_neg has shape bsz x memory_bank_size\n negatives = negatives.to(out0.device)\n l_pos = torch.einsum('nc,nc->n', [out0, out1]).unsqueeze(-1)\n l_neg = torch.einsum('nc,ck->nk', [out0, negatives.clone().detach()])\n \n # concatenate such that positive samples are at index 0\n logits = torch.cat([l_pos, l_neg], dim=1)\n # divide by temperature\n logits = logits / self.t_consistency\n\n # the input to kl_div is expected to be log(p) and we set the\n # flag log_target to True, so both probabilities should be passed as log\n log_probs = torch.nn.functional.log_softmax(logits, dim=-1)\n return log_probs\n\n\n def forward(self,\n out0: torch.Tensor,\n out1: torch.Tensor):\n \"\"\"Computes the CO2 regularization term for two model outputs.\n\n Args:\n out0:\n Output projections of the first set of transformed images.\n out1:\n Output projections of the second set of transformed images.\n\n Returns:\n The regularization term multiplied by the weight factor alpha.\n\n \"\"\"\n\n # normalize the output to length 1\n out0 = torch.nn.functional.normalize(out0, dim=1)\n out1 = torch.nn.functional.normalize(out1, dim=1)\n\n # ask memory bank for negative samples and extend it with out1 if \n # out1 requires a gradient, otherwise keep the same vectors in the \n # memory bank (this allows for keeping the memory bank constant e.g.\n # for evaluating the loss on the test set)\n # if the memory_bank size is 0, negatives will be None\n out1, negatives = \\\n super(CO2Regularizer, self).forward(out1, update=True)\n \n # get log probabilities\n p = self._get_pseudo_labels(out0, out1, negatives)\n q = self._get_pseudo_labels(out1, out0, negatives)\n \n # calculate kullback leibler divergence from log probabilities\n return self.alpha * 0.5 * (self.kl_div(p, q) + self.kl_div(q, p))\n", "path": "lightly/loss/regularizer/co2.py"}], "after_files": [{"content": "\"\"\" CO2 Regularizer \"\"\"\n\n# Copyright (c) 2020. Lightly AG and its affiliates.\n# All Rights Reserved\n\nimport torch\nfrom lightly.loss.memory_bank import MemoryBankModule\n\n\nclass CO2Regularizer(MemoryBankModule):\n \"\"\"Implementation of the CO2 regularizer [0] for self-supervised learning.\n\n [0] CO2, 2021, https://arxiv.org/abs/2010.02217\n\n Attributes:\n alpha:\n Weight of the regularization term.\n t_consistency:\n Temperature used during softmax calculations.\n memory_bank_size:\n Number of negative samples to store in the memory bank.\n Use 0 to use the second batch for negative samples.\n\n Examples:\n >>> # initialize loss function for MoCo\n >>> loss_fn = NTXentLoss(memory_bank_size=4096)\n >>>\n >>> # initialize CO2 regularizer\n >>> co2 = CO2Regularizer(alpha=1.0, memory_bank_size=4096)\n >>>\n >>> # generate two random trasnforms of images\n >>> t0 = transforms(images)\n >>> t1 = transforms(images)\n >>>\n >>> # feed through the MoCo model\n >>> out0, out1 = model(t0, t1)\n >>> \n >>> # calculate loss and apply regularizer\n >>> loss = loss_fn(out0, out1) + co2(out0, out1)\n\n \"\"\"\n\n def __init__(self,\n alpha: float = 1,\n t_consistency: float = 0.05,\n memory_bank_size: int = 0):\n\n super(CO2Regularizer, self).__init__(size=memory_bank_size)\n # try-catch the KLDivLoss construction for backwards compatability\n self.log_target = True\n try:\n self.kl_div = torch.nn.KLDivLoss(\n reduction='batchmean',\n log_target=True\n )\n except TypeError:\n self.log_target = False\n self.kl_div = torch.nn.KLDivLoss(\n reduction='batchmean'\n )\n\n self.t_consistency = t_consistency\n self.alpha = alpha\n\n def _get_pseudo_labels(self,\n out0: torch.Tensor,\n out1: torch.Tensor,\n negatives: torch.Tensor = None):\n \"\"\"Computes the soft pseudo labels across negative samples.\n\n Args:\n out0:\n Output projections of the first set of transformed images (query).\n Shape: bsz x n_ftrs\n out1:\n Output projections of the second set of transformed images (positive sample).\n Shape: bsz x n_ftrs\n negatives:\n Negative samples to compare against. If this is None, the second\n batch of images will be used as negative samples.\n Shape: memory_bank_size x n_ftrs\n\n Returns:\n Log probability that a positive samples will classify each negative\n sample as the positive sample.\n Shape: bsz x (bsz - 1) or bsz x memory_bank_size\n\n \"\"\"\n batch_size, _ = out0.shape\n if negatives is None:\n # use second batch as negative samples\n # l_pos has shape bsz x 1 and l_neg has shape bsz x bsz\n l_pos = torch.einsum('nc,nc->n', [out0, out1]).unsqueeze(-1)\n l_neg = torch.einsum('nc,ck->nk', [out0, out1.t()])\n # remove elements on the diagonal\n # l_neg has shape bsz x (bsz - 1)\n l_neg = l_neg.masked_select(\n ~torch.eye(batch_size, dtype=bool, device=l_neg.device)\n ).view(batch_size, batch_size - 1)\n else:\n # use memory bank as negative samples\n # l_pos has shape bsz x 1 and l_neg has shape bsz x memory_bank_size\n negatives = negatives.to(out0.device)\n l_pos = torch.einsum('nc,nc->n', [out0, out1]).unsqueeze(-1)\n l_neg = torch.einsum('nc,ck->nk', [out0, negatives.clone().detach()])\n \n # concatenate such that positive samples are at index 0\n logits = torch.cat([l_pos, l_neg], dim=1)\n # divide by temperature\n logits = logits / self.t_consistency\n\n # the input to kl_div is expected to be log(p) \n return torch.nn.functional.log_softmax(logits, dim=-1)\n\n\n def forward(self,\n out0: torch.Tensor,\n out1: torch.Tensor):\n \"\"\"Computes the CO2 regularization term for two model outputs.\n\n Args:\n out0:\n Output projections of the first set of transformed images.\n out1:\n Output projections of the second set of transformed images.\n\n Returns:\n The regularization term multiplied by the weight factor alpha.\n\n \"\"\"\n\n # normalize the output to length 1\n out0 = torch.nn.functional.normalize(out0, dim=1)\n out1 = torch.nn.functional.normalize(out1, dim=1)\n\n # ask memory bank for negative samples and extend it with out1 if \n # out1 requires a gradient, otherwise keep the same vectors in the \n # memory bank (this allows for keeping the memory bank constant e.g.\n # for evaluating the loss on the test set)\n # if the memory_bank size is 0, negatives will be None\n out1, negatives = \\\n super(CO2Regularizer, self).forward(out1, update=True)\n \n # get log probabilities\n p = self._get_pseudo_labels(out0, out1, negatives)\n q = self._get_pseudo_labels(out1, out0, negatives)\n \n # calculate symmetrized kullback leibler divergence\n if self.log_target:\n div = self.kl_div(p, q) + self.kl_div(q, p)\n else:\n # can't use log_target because of early torch version\n div = self.kl_div(p, torch.exp(q)) + self.kl_div(q, torch.exp(p))\n\n return self.alpha * 0.5 * div\n", "path": "lightly/loss/regularizer/co2.py"}]} | 1,926 | 550 |
gh_patches_debug_12289 | rasdani/github-patches | git_diff | modin-project__modin-794 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pyarrow is a dependency but is not in `install_requires`
### Describe the problem
<!-- Describe the problem clearly here. -->
The source comes from this file: https://github.com/modin-project/modin/blob/master/modin/experimental/engines/pyarrow_on_ray/io.py#L4-L5
### Source code / logs
<!-- Include any logs or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached. Try to provide a reproducible test case that is the bare minimum necessary to generate the problem. -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `modin/experimental/engines/pyarrow_on_ray/io.py`
Content:
```
1 from io import BytesIO
2
3 import pandas
4 import pyarrow as pa
5 import pyarrow.csv as csv
6
7 from modin.backends.pyarrow.query_compiler import PyarrowQueryCompiler
8 from modin.data_management.utils import get_default_chunksize
9 from modin.engines.ray.generic.io import RayIO
10 from modin.experimental.engines.pyarrow_on_ray.frame.data import PyarrowOnRayFrame
11 from modin.experimental.engines.pyarrow_on_ray.frame.partition import (
12 PyarrowOnRayFramePartition,
13 )
14 from modin import __execution_engine__
15
16 if __execution_engine__ == "Ray":
17 import ray
18
19 @ray.remote
20 def _read_csv_with_offset_pyarrow_on_ray(
21 fname, num_splits, start, end, kwargs, header
22 ): # pragma: no cover
23 """Use a Ray task to read a chunk of a CSV into a pyarrow Table.
24 Note: Ray functions are not detected by codecov (thus pragma: no cover)
25 Args:
26 fname: The filename of the file to open.
27 num_splits: The number of splits (partitions) to separate the DataFrame into.
28 start: The start byte offset.
29 end: The end byte offset.
30 kwargs: The kwargs for the pyarrow `read_csv` function.
31 header: The header of the file.
32 Returns:
33 A list containing the split pyarrow Tables and the the number of
34 rows of the tables as the last element. This is used to determine
35 the total length of the DataFrame to build a default Index.
36 """
37 bio = open(fname, "rb")
38 # The header line for the CSV file
39 first_line = bio.readline()
40 bio.seek(start)
41 to_read = header + first_line + bio.read(end - start)
42 bio.close()
43 table = csv.read_csv(
44 BytesIO(to_read), parse_options=csv.ParseOptions(header_rows=1)
45 )
46 chunksize = get_default_chunksize(table.num_columns, num_splits)
47 chunks = [
48 pa.Table.from_arrays(table.columns[chunksize * i : chunksize * (i + 1)])
49 for i in range(num_splits)
50 ]
51 return chunks + [
52 table.num_rows,
53 pandas.Series(
54 [t.to_pandas_dtype() for t in table.schema.types],
55 index=table.schema.names,
56 ),
57 ]
58
59
60 class PyarrowOnRayIO(RayIO):
61 frame_cls = PyarrowOnRayFrame
62 frame_partition_cls = PyarrowOnRayFramePartition
63 query_compiler_cls = PyarrowQueryCompiler
64
65 read_parquet_remote_task = None
66 if __execution_engine__ == "Ray":
67 read_csv_remote_task = _read_csv_with_offset_pyarrow_on_ray
68 read_hdf_remote_task = None
69 read_feather_remote_task = None
70 read_sql_remote_task = None
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/modin/experimental/engines/pyarrow_on_ray/io.py b/modin/experimental/engines/pyarrow_on_ray/io.py
--- a/modin/experimental/engines/pyarrow_on_ray/io.py
+++ b/modin/experimental/engines/pyarrow_on_ray/io.py
@@ -1,8 +1,6 @@
from io import BytesIO
import pandas
-import pyarrow as pa
-import pyarrow.csv as csv
from modin.backends.pyarrow.query_compiler import PyarrowQueryCompiler
from modin.data_management.utils import get_default_chunksize
@@ -15,6 +13,8 @@
if __execution_engine__ == "Ray":
import ray
+ import pyarrow as pa
+ import pyarrow.csv as csv
@ray.remote
def _read_csv_with_offset_pyarrow_on_ray(
| {"golden_diff": "diff --git a/modin/experimental/engines/pyarrow_on_ray/io.py b/modin/experimental/engines/pyarrow_on_ray/io.py\n--- a/modin/experimental/engines/pyarrow_on_ray/io.py\n+++ b/modin/experimental/engines/pyarrow_on_ray/io.py\n@@ -1,8 +1,6 @@\n from io import BytesIO\n \n import pandas\n-import pyarrow as pa\n-import pyarrow.csv as csv\n \n from modin.backends.pyarrow.query_compiler import PyarrowQueryCompiler\n from modin.data_management.utils import get_default_chunksize\n@@ -15,6 +13,8 @@\n \n if __execution_engine__ == \"Ray\":\n import ray\n+ import pyarrow as pa\n+ import pyarrow.csv as csv\n \n @ray.remote\n def _read_csv_with_offset_pyarrow_on_ray(\n", "issue": "pyarrow is a dependency but is not in `install_requires`\n\r\n### Describe the problem\r\n<!-- Describe the problem clearly here. -->\r\nThe source comes from this file: https://github.com/modin-project/modin/blob/master/modin/experimental/engines/pyarrow_on_ray/io.py#L4-L5\r\n\r\n### Source code / logs\r\n<!-- Include any logs or source code that would be helpful to diagnose the problem. If including tracebacks, please include the full traceback. Large logs and files should be attached. Try to provide a reproducible test case that is the bare minimum necessary to generate the problem. -->\r\n\n", "before_files": [{"content": "from io import BytesIO\n\nimport pandas\nimport pyarrow as pa\nimport pyarrow.csv as csv\n\nfrom modin.backends.pyarrow.query_compiler import PyarrowQueryCompiler\nfrom modin.data_management.utils import get_default_chunksize\nfrom modin.engines.ray.generic.io import RayIO\nfrom modin.experimental.engines.pyarrow_on_ray.frame.data import PyarrowOnRayFrame\nfrom modin.experimental.engines.pyarrow_on_ray.frame.partition import (\n PyarrowOnRayFramePartition,\n)\nfrom modin import __execution_engine__\n\nif __execution_engine__ == \"Ray\":\n import ray\n\n @ray.remote\n def _read_csv_with_offset_pyarrow_on_ray(\n fname, num_splits, start, end, kwargs, header\n ): # pragma: no cover\n \"\"\"Use a Ray task to read a chunk of a CSV into a pyarrow Table.\n Note: Ray functions are not detected by codecov (thus pragma: no cover)\n Args:\n fname: The filename of the file to open.\n num_splits: The number of splits (partitions) to separate the DataFrame into.\n start: The start byte offset.\n end: The end byte offset.\n kwargs: The kwargs for the pyarrow `read_csv` function.\n header: The header of the file.\n Returns:\n A list containing the split pyarrow Tables and the the number of\n rows of the tables as the last element. This is used to determine\n the total length of the DataFrame to build a default Index.\n \"\"\"\n bio = open(fname, \"rb\")\n # The header line for the CSV file\n first_line = bio.readline()\n bio.seek(start)\n to_read = header + first_line + bio.read(end - start)\n bio.close()\n table = csv.read_csv(\n BytesIO(to_read), parse_options=csv.ParseOptions(header_rows=1)\n )\n chunksize = get_default_chunksize(table.num_columns, num_splits)\n chunks = [\n pa.Table.from_arrays(table.columns[chunksize * i : chunksize * (i + 1)])\n for i in range(num_splits)\n ]\n return chunks + [\n table.num_rows,\n pandas.Series(\n [t.to_pandas_dtype() for t in table.schema.types],\n index=table.schema.names,\n ),\n ]\n\n\nclass PyarrowOnRayIO(RayIO):\n frame_cls = PyarrowOnRayFrame\n frame_partition_cls = PyarrowOnRayFramePartition\n query_compiler_cls = PyarrowQueryCompiler\n\n read_parquet_remote_task = None\n if __execution_engine__ == \"Ray\":\n read_csv_remote_task = _read_csv_with_offset_pyarrow_on_ray\n read_hdf_remote_task = None\n read_feather_remote_task = None\n read_sql_remote_task = None\n", "path": "modin/experimental/engines/pyarrow_on_ray/io.py"}], "after_files": [{"content": "from io import BytesIO\n\nimport pandas\n\nfrom modin.backends.pyarrow.query_compiler import PyarrowQueryCompiler\nfrom modin.data_management.utils import get_default_chunksize\nfrom modin.engines.ray.generic.io import RayIO\nfrom modin.experimental.engines.pyarrow_on_ray.frame.data import PyarrowOnRayFrame\nfrom modin.experimental.engines.pyarrow_on_ray.frame.partition import (\n PyarrowOnRayFramePartition,\n)\nfrom modin import __execution_engine__\n\nif __execution_engine__ == \"Ray\":\n import ray\n import pyarrow as pa\n import pyarrow.csv as csv\n\n @ray.remote\n def _read_csv_with_offset_pyarrow_on_ray(\n fname, num_splits, start, end, kwargs, header\n ): # pragma: no cover\n \"\"\"Use a Ray task to read a chunk of a CSV into a pyarrow Table.\n Note: Ray functions are not detected by codecov (thus pragma: no cover)\n Args:\n fname: The filename of the file to open.\n num_splits: The number of splits (partitions) to separate the DataFrame into.\n start: The start byte offset.\n end: The end byte offset.\n kwargs: The kwargs for the pyarrow `read_csv` function.\n header: The header of the file.\n Returns:\n A list containing the split pyarrow Tables and the the number of\n rows of the tables as the last element. This is used to determine\n the total length of the DataFrame to build a default Index.\n \"\"\"\n bio = open(fname, \"rb\")\n # The header line for the CSV file\n first_line = bio.readline()\n bio.seek(start)\n to_read = header + first_line + bio.read(end - start)\n bio.close()\n table = csv.read_csv(\n BytesIO(to_read), parse_options=csv.ParseOptions(header_rows=1)\n )\n chunksize = get_default_chunksize(table.num_columns, num_splits)\n chunks = [\n pa.Table.from_arrays(table.columns[chunksize * i : chunksize * (i + 1)])\n for i in range(num_splits)\n ]\n return chunks + [\n table.num_rows,\n pandas.Series(\n [t.to_pandas_dtype() for t in table.schema.types],\n index=table.schema.names,\n ),\n ]\n\n\nclass PyarrowOnRayIO(RayIO):\n frame_cls = PyarrowOnRayFrame\n frame_partition_cls = PyarrowOnRayFramePartition\n query_compiler_cls = PyarrowQueryCompiler\n\n read_parquet_remote_task = None\n if __execution_engine__ == \"Ray\":\n read_csv_remote_task = _read_csv_with_offset_pyarrow_on_ray\n read_hdf_remote_task = None\n read_feather_remote_task = None\n read_sql_remote_task = None\n", "path": "modin/experimental/engines/pyarrow_on_ray/io.py"}]} | 1,126 | 182 |
gh_patches_debug_64991 | rasdani/github-patches | git_diff | conda-forge__conda-smithy-864 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Autogenerated README.md missing final newline
## The Problem
As I've confirmed is the case on multiple repos here, including our own ``spyder-feedstock`` and ``spyder-kernels-feedstock`` as well as two arbitrary conda-forge repos I checked conda-forge, the last line in README.md lacks a terminating newline (LF/``x0D``), and is thus ill-formed. I'd be happy to submit a PR to fix it since I imagine it is probably pretty trivial, if someone more knowlegable than me can let me know how to approach it.
## Proposed Solutions
A naive hack would seem to be just writing an additional ``\n`` [here](https://github.com/conda-forge/conda-smithy/blob/855f23bb96efb1cbdbdc5e60dfb9bbdd3e142d31/conda_smithy/configure_feedstock.py#L718), but editing the [template ](https://github.com/conda-forge/conda-smithy/blob/master/conda_smithy/templates/README.md.tmpl) would seem to make far more sense. However, the template *has* a trailing newline and hasn't been edited in a while, so not sure what's going on—is it not writing the last one; is it getting stripped, or what?
Thanks!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conda_smithy/vendored/__init__.py`
Content:
```
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conda_smithy/vendored/__init__.py b/conda_smithy/vendored/__init__.py
--- a/conda_smithy/vendored/__init__.py
+++ b/conda_smithy/vendored/__init__.py
@@ -0,0 +1 @@
+
| {"golden_diff": "diff --git a/conda_smithy/vendored/__init__.py b/conda_smithy/vendored/__init__.py\n--- a/conda_smithy/vendored/__init__.py\n+++ b/conda_smithy/vendored/__init__.py\n@@ -0,0 +1 @@\n+\n", "issue": "Autogenerated README.md missing final newline\n## The Problem\r\n\r\nAs I've confirmed is the case on multiple repos here, including our own ``spyder-feedstock`` and ``spyder-kernels-feedstock`` as well as two arbitrary conda-forge repos I checked conda-forge, the last line in README.md lacks a terminating newline (LF/``x0D``), and is thus ill-formed. I'd be happy to submit a PR to fix it since I imagine it is probably pretty trivial, if someone more knowlegable than me can let me know how to approach it. \r\n\r\n## Proposed Solutions\r\n\r\nA naive hack would seem to be just writing an additional ``\\n`` [here](https://github.com/conda-forge/conda-smithy/blob/855f23bb96efb1cbdbdc5e60dfb9bbdd3e142d31/conda_smithy/configure_feedstock.py#L718), but editing the [template ](https://github.com/conda-forge/conda-smithy/blob/master/conda_smithy/templates/README.md.tmpl) would seem to make far more sense. However, the template *has* a trailing newline and hasn't been edited in a while, so not sure what's going on\u2014is it not writing the last one; is it getting stripped, or what?\r\n\r\nThanks!\n", "before_files": [{"content": "", "path": "conda_smithy/vendored/__init__.py"}], "after_files": [{"content": "\n", "path": "conda_smithy/vendored/__init__.py"}]} | 558 | 69 |
gh_patches_debug_2591 | rasdani/github-patches | git_diff | cloud-custodian__cloud-custodian-6375 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
The s3 action "remove-statements" errors out when it encounters a bucket policy statement without a sid
**Describe the bug**
s3.remove-statements fails when a sid-less bucket policy statement is encountered
You can see the key error in the traceback. Bucket policy statements do not require Sids and S3 omits the key from describeBucketPolicy response when it does not exist.
**To Reproduce**
Attempt to use remove-statements to remove a statement from a bucket with a sid-less statement (one example of which is the "aws-sam-cli-managed-default-samclisourcebucket-..." buckets created by AWS SAM CLI.)
**Expected behavior**
I expected the statement which does not contain a SID to be iterated over as non-matching.
**Background (please complete the following information):**
- OS: AWS Lambda
- Python Version: Python 3.8
- Custodian Version: 0.9.8
- Tool Version: n/a
- Cloud Provider: AWS
- Policy: [please exclude any account/sensitive information]
```json
{
"statement_ids": [
"denyAccessToBucket"
],
"type": "remove-statements"
}
```
- Traceback: [if applicable, please exclude sensitive/account information]
[ERROR] KeyError: 'Sid'
Traceback (most recent call last):
File "/var/task/custodian_policy.py", line 4, in run
return handler.dispatch_event(event, context)
File "/var/task/c7n/handler.py", line 165, in dispatch_event
p.push(event, context)
File "/var/task/c7n/policy.py", line 1140, in push
return mode.run(event, lambda_ctx)
File "/var/task/c7n/policy.py", line 853, in run
resources = super(ConfigRuleMode, self).run(event, lambda_context)
File "/var/task/c7n/policy.py", line 453, in run
return self.run_resource_set(event, resources)
File "/var/task/c7n/policy.py", line 483, in run_resource_set
results = action.process(resources)
File "/var/task/c7n/resources/s3.py", line 1272, in process
results += filter(None, [f.result()])
File "/var/lang/lib/python3.8/concurrent/futures/_base.py", line 432, in result
return self.__get_result()
File "/var/lang/lib/python3.8/concurrent/futures/_base.py", line 388, in __get_result
raise self._exception
File "/var/lang/lib/python3.8/concurrent/futures/thread.py", line 57, in run
result = self.fn(*self.args, **self.kwargs)
File "/var/task/c7n/resources/s3.py", line 1282, in process_bucket
statements, found = self.process_policy(
File "/var/task/c7n/actions/policy.py", line 21, in process_policy
return remove_statements(
File "/var/task/c7n/actions/policy.py", line 37, in remove_statements
elif s['Sid'] in match_ids:
- `custodian version --debug` output: n/a
**Additional context**
Add any other context about the problem here.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `c7n/actions/policy.py`
Content:
```
1 # Copyright The Cloud Custodian Authors.
2 # SPDX-License-Identifier: Apache-2.0
3
4 from .core import BaseAction
5 from c7n import utils
6
7
8 class RemovePolicyBase(BaseAction):
9
10 schema = utils.type_schema(
11 'remove-statements',
12 required=['statement_ids'],
13 statement_ids={'oneOf': [
14 {'enum': ['matched', "*"]},
15 {'type': 'array', 'items': {'type': 'string'}}]})
16
17 def process_policy(self, policy, resource, matched_key):
18 statements = policy.get('Statement', [])
19 resource_statements = resource.get(matched_key, ())
20
21 return remove_statements(
22 self.data['statement_ids'], statements, resource_statements)
23
24
25 def remove_statements(match_ids, statements, matched=()):
26 found = []
27 for s in list(statements):
28 s_found = False
29 if match_ids == '*':
30 s_found = True
31 elif match_ids == 'matched':
32 if s in matched:
33 s_found = True
34 elif s['Sid'] in match_ids:
35 s_found = True
36 if s_found:
37 found.append(s)
38 statements.remove(s)
39 if not found:
40 return None, found
41 return statements, found
42
43
44 class ModifyPolicyBase(BaseAction):
45 """Action to modify resource IAM policy statements.
46
47 Applies to all resources with embedded IAM Policies.
48
49 :example:
50
51 .. code-block:: yaml
52
53 policies:
54 - name: sns-yank-cross-account
55 resource: sns
56 filters:
57 - type: cross-account
58 actions:
59 - type: modify-policy
60 add-statements: [{
61 "Sid": "ReplaceWithMe",
62 "Effect": "Allow",
63 "Principal": "*",
64 "Action": ["SNS:GetTopicAttributes"],
65 "Resource": topic_arn,
66 }]
67 remove-statements: '*'
68 """
69
70 schema_alias = True
71 schema = utils.type_schema(
72 'modify-policy',
73 **{
74 'add-statements': {
75 'type': 'array',
76 'items': {'$ref': '#/definitions/iam-statement'},
77 },
78 'remove-statements': {
79 'type': ['array', 'string'],
80 'oneOf': [
81 {'enum': ['matched', '*']},
82 {'type': 'array', 'items': {'type': 'string'}}
83 ],
84 }
85 }
86 )
87
88 def __init__(self, data=None, manager=None):
89 if manager is not None:
90 config_args = {
91 'account_id': manager.config.account_id,
92 'region': manager.config.region
93 }
94 self.data = utils.format_string_values(data, **config_args)
95 else:
96 self.data = utils.format_string_values(data)
97 self.manager = manager
98
99 def add_statements(self, policy_statements):
100 current = {s['Sid']: s for s in policy_statements}
101 additional = {s['Sid']: s for s in self.data.get('add-statements', [])}
102 current.update(additional)
103 return list(current.values()), bool(additional)
104
105 def remove_statements(self, policy_statements, resource, matched_key):
106 statement_ids = self.data.get('remove-statements', [])
107 found = []
108 if len(statement_ids) == 0:
109 return policy_statements, found
110 resource_statements = resource.get(matched_key, ())
111 return remove_statements(
112 statement_ids, policy_statements, resource_statements)
113
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/c7n/actions/policy.py b/c7n/actions/policy.py
--- a/c7n/actions/policy.py
+++ b/c7n/actions/policy.py
@@ -31,7 +31,7 @@
elif match_ids == 'matched':
if s in matched:
s_found = True
- elif s['Sid'] in match_ids:
+ elif 'Sid' in s and s['Sid'] in match_ids:
s_found = True
if s_found:
found.append(s)
| {"golden_diff": "diff --git a/c7n/actions/policy.py b/c7n/actions/policy.py\n--- a/c7n/actions/policy.py\n+++ b/c7n/actions/policy.py\n@@ -31,7 +31,7 @@\n elif match_ids == 'matched':\n if s in matched:\n s_found = True\n- elif s['Sid'] in match_ids:\n+ elif 'Sid' in s and s['Sid'] in match_ids:\n s_found = True\n if s_found:\n found.append(s)\n", "issue": "The s3 action \"remove-statements\" errors out when it encounters a bucket policy statement without a sid\n**Describe the bug**\r\ns3.remove-statements fails when a sid-less bucket policy statement is encountered\r\n\r\nYou can see the key error in the traceback. Bucket policy statements do not require Sids and S3 omits the key from describeBucketPolicy response when it does not exist.\r\n\r\n**To Reproduce**\r\nAttempt to use remove-statements to remove a statement from a bucket with a sid-less statement (one example of which is the \"aws-sam-cli-managed-default-samclisourcebucket-...\" buckets created by AWS SAM CLI.)\r\n\r\n**Expected behavior**\r\nI expected the statement which does not contain a SID to be iterated over as non-matching.\r\n\r\n**Background (please complete the following information):**\r\n - OS: AWS Lambda\r\n - Python Version: Python 3.8\r\n - Custodian Version: 0.9.8\r\n - Tool Version: n/a\r\n - Cloud Provider: AWS\r\n - Policy: [please exclude any account/sensitive information]\r\n```json\r\n {\r\n \"statement_ids\": [\r\n \"denyAccessToBucket\"\r\n ],\r\n \"type\": \"remove-statements\"\r\n }\r\n```\r\n - Traceback: [if applicable, please exclude sensitive/account information]\r\n [ERROR] KeyError: 'Sid'\r\nTraceback (most recent call last):\r\n File \"/var/task/custodian_policy.py\", line 4, in run\r\n return handler.dispatch_event(event, context)\r\n File \"/var/task/c7n/handler.py\", line 165, in dispatch_event\r\n p.push(event, context)\r\n File \"/var/task/c7n/policy.py\", line 1140, in push\r\n return mode.run(event, lambda_ctx)\r\n File \"/var/task/c7n/policy.py\", line 853, in run\r\n resources = super(ConfigRuleMode, self).run(event, lambda_context)\r\n File \"/var/task/c7n/policy.py\", line 453, in run\r\n return self.run_resource_set(event, resources)\r\n File \"/var/task/c7n/policy.py\", line 483, in run_resource_set\r\n results = action.process(resources)\r\n File \"/var/task/c7n/resources/s3.py\", line 1272, in process\r\n results += filter(None, [f.result()])\r\n File \"/var/lang/lib/python3.8/concurrent/futures/_base.py\", line 432, in result\r\n return self.__get_result()\r\n File \"/var/lang/lib/python3.8/concurrent/futures/_base.py\", line 388, in __get_result\r\n raise self._exception\r\n File \"/var/lang/lib/python3.8/concurrent/futures/thread.py\", line 57, in run\r\n result = self.fn(*self.args, **self.kwargs)\r\n File \"/var/task/c7n/resources/s3.py\", line 1282, in process_bucket\r\n statements, found = self.process_policy(\r\n File \"/var/task/c7n/actions/policy.py\", line 21, in process_policy\r\n return remove_statements(\r\n File \"/var/task/c7n/actions/policy.py\", line 37, in remove_statements\r\n elif s['Sid'] in match_ids:\r\n - `custodian version --debug` output: n/a\r\n\r\n**Additional context**\r\nAdd any other context about the problem here.\r\n\n", "before_files": [{"content": "# Copyright The Cloud Custodian Authors.\n# SPDX-License-Identifier: Apache-2.0\n\nfrom .core import BaseAction\nfrom c7n import utils\n\n\nclass RemovePolicyBase(BaseAction):\n\n schema = utils.type_schema(\n 'remove-statements',\n required=['statement_ids'],\n statement_ids={'oneOf': [\n {'enum': ['matched', \"*\"]},\n {'type': 'array', 'items': {'type': 'string'}}]})\n\n def process_policy(self, policy, resource, matched_key):\n statements = policy.get('Statement', [])\n resource_statements = resource.get(matched_key, ())\n\n return remove_statements(\n self.data['statement_ids'], statements, resource_statements)\n\n\ndef remove_statements(match_ids, statements, matched=()):\n found = []\n for s in list(statements):\n s_found = False\n if match_ids == '*':\n s_found = True\n elif match_ids == 'matched':\n if s in matched:\n s_found = True\n elif s['Sid'] in match_ids:\n s_found = True\n if s_found:\n found.append(s)\n statements.remove(s)\n if not found:\n return None, found\n return statements, found\n\n\nclass ModifyPolicyBase(BaseAction):\n \"\"\"Action to modify resource IAM policy statements.\n\n Applies to all resources with embedded IAM Policies.\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: sns-yank-cross-account\n resource: sns\n filters:\n - type: cross-account\n actions:\n - type: modify-policy\n add-statements: [{\n \"Sid\": \"ReplaceWithMe\",\n \"Effect\": \"Allow\",\n \"Principal\": \"*\",\n \"Action\": [\"SNS:GetTopicAttributes\"],\n \"Resource\": topic_arn,\n }]\n remove-statements: '*'\n \"\"\"\n\n schema_alias = True\n schema = utils.type_schema(\n 'modify-policy',\n **{\n 'add-statements': {\n 'type': 'array',\n 'items': {'$ref': '#/definitions/iam-statement'},\n },\n 'remove-statements': {\n 'type': ['array', 'string'],\n 'oneOf': [\n {'enum': ['matched', '*']},\n {'type': 'array', 'items': {'type': 'string'}}\n ],\n }\n }\n )\n\n def __init__(self, data=None, manager=None):\n if manager is not None:\n config_args = {\n 'account_id': manager.config.account_id,\n 'region': manager.config.region\n }\n self.data = utils.format_string_values(data, **config_args)\n else:\n self.data = utils.format_string_values(data)\n self.manager = manager\n\n def add_statements(self, policy_statements):\n current = {s['Sid']: s for s in policy_statements}\n additional = {s['Sid']: s for s in self.data.get('add-statements', [])}\n current.update(additional)\n return list(current.values()), bool(additional)\n\n def remove_statements(self, policy_statements, resource, matched_key):\n statement_ids = self.data.get('remove-statements', [])\n found = []\n if len(statement_ids) == 0:\n return policy_statements, found\n resource_statements = resource.get(matched_key, ())\n return remove_statements(\n statement_ids, policy_statements, resource_statements)\n", "path": "c7n/actions/policy.py"}], "after_files": [{"content": "# Copyright The Cloud Custodian Authors.\n# SPDX-License-Identifier: Apache-2.0\n\nfrom .core import BaseAction\nfrom c7n import utils\n\n\nclass RemovePolicyBase(BaseAction):\n\n schema = utils.type_schema(\n 'remove-statements',\n required=['statement_ids'],\n statement_ids={'oneOf': [\n {'enum': ['matched', \"*\"]},\n {'type': 'array', 'items': {'type': 'string'}}]})\n\n def process_policy(self, policy, resource, matched_key):\n statements = policy.get('Statement', [])\n resource_statements = resource.get(matched_key, ())\n\n return remove_statements(\n self.data['statement_ids'], statements, resource_statements)\n\n\ndef remove_statements(match_ids, statements, matched=()):\n found = []\n for s in list(statements):\n s_found = False\n if match_ids == '*':\n s_found = True\n elif match_ids == 'matched':\n if s in matched:\n s_found = True\n elif 'Sid' in s and s['Sid'] in match_ids:\n s_found = True\n if s_found:\n found.append(s)\n statements.remove(s)\n if not found:\n return None, found\n return statements, found\n\n\nclass ModifyPolicyBase(BaseAction):\n \"\"\"Action to modify resource IAM policy statements.\n\n Applies to all resources with embedded IAM Policies.\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: sns-yank-cross-account\n resource: sns\n filters:\n - type: cross-account\n actions:\n - type: modify-policy\n add-statements: [{\n \"Sid\": \"ReplaceWithMe\",\n \"Effect\": \"Allow\",\n \"Principal\": \"*\",\n \"Action\": [\"SNS:GetTopicAttributes\"],\n \"Resource\": topic_arn,\n }]\n remove-statements: '*'\n \"\"\"\n\n schema_alias = True\n schema = utils.type_schema(\n 'modify-policy',\n **{\n 'add-statements': {\n 'type': 'array',\n 'items': {'$ref': '#/definitions/iam-statement'},\n },\n 'remove-statements': {\n 'type': ['array', 'string'],\n 'oneOf': [\n {'enum': ['matched', '*']},\n {'type': 'array', 'items': {'type': 'string'}}\n ],\n }\n }\n )\n\n def __init__(self, data=None, manager=None):\n if manager is not None:\n config_args = {\n 'account_id': manager.config.account_id,\n 'region': manager.config.region\n }\n self.data = utils.format_string_values(data, **config_args)\n else:\n self.data = utils.format_string_values(data)\n self.manager = manager\n\n def add_statements(self, policy_statements):\n current = {s['Sid']: s for s in policy_statements}\n additional = {s['Sid']: s for s in self.data.get('add-statements', [])}\n current.update(additional)\n return list(current.values()), bool(additional)\n\n def remove_statements(self, policy_statements, resource, matched_key):\n statement_ids = self.data.get('remove-statements', [])\n found = []\n if len(statement_ids) == 0:\n return policy_statements, found\n resource_statements = resource.get(matched_key, ())\n return remove_statements(\n statement_ids, policy_statements, resource_statements)\n", "path": "c7n/actions/policy.py"}]} | 1,969 | 116 |
gh_patches_debug_3018 | rasdani/github-patches | git_diff | Mailu__Mailu-958 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Using external smtp relay server for outgoing emails
Hi,
I need to use mailchannels.com to relay all outgoing emails from my Mailu install. In this doc is what I need to change in Postfix:
https://mailchannels.zendesk.com/hc/en-us/articles/200262640-Setting-up-for-Postfix
Is there any way to do this in Mailu ?
Thanks,
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `core/postfix/start.py`
Content:
```
1 #!/usr/bin/python3
2
3 import os
4 import glob
5 import shutil
6 import multiprocessing
7 import logging as log
8 import sys
9 from mailustart import resolve, convert
10
11 from podop import run_server
12
13 log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING"))
14
15 def start_podop():
16 os.setuid(100)
17 url = "http://" + os.environ["ADMIN_ADDRESS"] + "/internal/postfix/"
18 # TODO: Remove verbosity setting from Podop?
19 run_server(0, "postfix", "/tmp/podop.socket", [
20 ("transport", "url", url + "transport/§"),
21 ("alias", "url", url + "alias/§"),
22 ("domain", "url", url + "domain/§"),
23 ("mailbox", "url", url + "mailbox/§"),
24 ("senderaccess", "url", url + "sender/access/§"),
25 ("senderlogin", "url", url + "sender/login/§")
26 ])
27
28 # Actual startup script
29 os.environ["FRONT_ADDRESS"] = resolve(os.environ.get("FRONT_ADDRESS", "front"))
30 os.environ["ADMIN_ADDRESS"] = resolve(os.environ.get("ADMIN_ADDRESS", "admin"))
31 os.environ["HOST_ANTISPAM"] = resolve(os.environ.get("HOST_ANTISPAM", "antispam:11332"))
32 os.environ["HOST_LMTP"] = resolve(os.environ.get("HOST_LMTP", "imap:2525"))
33
34 for postfix_file in glob.glob("/conf/*.cf"):
35 convert(postfix_file, os.path.join("/etc/postfix", os.path.basename(postfix_file)))
36
37 if os.path.exists("/overrides/postfix.cf"):
38 for line in open("/overrides/postfix.cf").read().strip().split("\n"):
39 os.system('postconf -e "{}"'.format(line))
40
41 if os.path.exists("/overrides/postfix.master"):
42 for line in open("/overrides/postfix.master").read().strip().split("\n"):
43 os.system('postconf -Me "{}"'.format(line))
44
45 for map_file in glob.glob("/overrides/*.map"):
46 destination = os.path.join("/etc/postfix", os.path.basename(map_file))
47 shutil.copyfile(map_file, destination)
48 os.system("postmap {}".format(destination))
49 os.remove(destination)
50
51 convert("/conf/rsyslog.conf", "/etc/rsyslog.conf")
52
53 # Run Podop and Postfix
54 multiprocessing.Process(target=start_podop).start()
55 if os.path.exists("/var/run/rsyslogd.pid"):
56 os.remove("/var/run/rsyslogd.pid")
57 os.system("/usr/lib/postfix/post-install meta_directory=/etc/postfix create-missing")
58 os.system("/usr/lib/postfix/master &")
59 os.execv("/usr/sbin/rsyslogd", ["rsyslogd", "-n"])
60
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/core/postfix/start.py b/core/postfix/start.py
--- a/core/postfix/start.py
+++ b/core/postfix/start.py
@@ -48,6 +48,11 @@
os.system("postmap {}".format(destination))
os.remove(destination)
+if "RELAYUSER" in os.environ:
+ path = "/etc/postfix/sasl_passwd"
+ convert("/conf/sasl_passwd", path)
+ os.system("postmap {}".format(path))
+
convert("/conf/rsyslog.conf", "/etc/rsyslog.conf")
# Run Podop and Postfix
| {"golden_diff": "diff --git a/core/postfix/start.py b/core/postfix/start.py\n--- a/core/postfix/start.py\n+++ b/core/postfix/start.py\n@@ -48,6 +48,11 @@\n os.system(\"postmap {}\".format(destination))\n os.remove(destination)\n \n+if \"RELAYUSER\" in os.environ:\n+ path = \"/etc/postfix/sasl_passwd\"\n+ convert(\"/conf/sasl_passwd\", path)\n+ os.system(\"postmap {}\".format(path))\n+\n convert(\"/conf/rsyslog.conf\", \"/etc/rsyslog.conf\")\n \n # Run Podop and Postfix\n", "issue": "Using external smtp relay server for outgoing emails\nHi,\r\n\r\nI need to use mailchannels.com to relay all outgoing emails from my Mailu install. In this doc is what I need to change in Postfix:\r\n\r\nhttps://mailchannels.zendesk.com/hc/en-us/articles/200262640-Setting-up-for-Postfix\r\n\r\nIs there any way to do this in Mailu ?\r\n\r\nThanks,\r\n\n", "before_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport glob\nimport shutil\nimport multiprocessing\nimport logging as log\nimport sys\nfrom mailustart import resolve, convert\n\nfrom podop import run_server\n\nlog.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n\ndef start_podop():\n os.setuid(100)\n url = \"http://\" + os.environ[\"ADMIN_ADDRESS\"] + \"/internal/postfix/\"\n # TODO: Remove verbosity setting from Podop?\n run_server(0, \"postfix\", \"/tmp/podop.socket\", [\n\t\t(\"transport\", \"url\", url + \"transport/\u00a7\"),\n\t\t(\"alias\", \"url\", url + \"alias/\u00a7\"),\n\t\t(\"domain\", \"url\", url + \"domain/\u00a7\"),\n (\"mailbox\", \"url\", url + \"mailbox/\u00a7\"),\n (\"senderaccess\", \"url\", url + \"sender/access/\u00a7\"),\n (\"senderlogin\", \"url\", url + \"sender/login/\u00a7\")\n ])\n\n# Actual startup script\nos.environ[\"FRONT_ADDRESS\"] = resolve(os.environ.get(\"FRONT_ADDRESS\", \"front\"))\nos.environ[\"ADMIN_ADDRESS\"] = resolve(os.environ.get(\"ADMIN_ADDRESS\", \"admin\"))\nos.environ[\"HOST_ANTISPAM\"] = resolve(os.environ.get(\"HOST_ANTISPAM\", \"antispam:11332\"))\nos.environ[\"HOST_LMTP\"] = resolve(os.environ.get(\"HOST_LMTP\", \"imap:2525\"))\n\nfor postfix_file in glob.glob(\"/conf/*.cf\"):\n convert(postfix_file, os.path.join(\"/etc/postfix\", os.path.basename(postfix_file)))\n\nif os.path.exists(\"/overrides/postfix.cf\"):\n for line in open(\"/overrides/postfix.cf\").read().strip().split(\"\\n\"):\n os.system('postconf -e \"{}\"'.format(line))\n\nif os.path.exists(\"/overrides/postfix.master\"):\n for line in open(\"/overrides/postfix.master\").read().strip().split(\"\\n\"):\n os.system('postconf -Me \"{}\"'.format(line))\n\nfor map_file in glob.glob(\"/overrides/*.map\"):\n destination = os.path.join(\"/etc/postfix\", os.path.basename(map_file))\n shutil.copyfile(map_file, destination)\n os.system(\"postmap {}\".format(destination))\n os.remove(destination)\n\nconvert(\"/conf/rsyslog.conf\", \"/etc/rsyslog.conf\")\n\n# Run Podop and Postfix\nmultiprocessing.Process(target=start_podop).start()\nif os.path.exists(\"/var/run/rsyslogd.pid\"):\n os.remove(\"/var/run/rsyslogd.pid\")\nos.system(\"/usr/lib/postfix/post-install meta_directory=/etc/postfix create-missing\")\nos.system(\"/usr/lib/postfix/master &\")\nos.execv(\"/usr/sbin/rsyslogd\", [\"rsyslogd\", \"-n\"])\n", "path": "core/postfix/start.py"}], "after_files": [{"content": "#!/usr/bin/python3\n\nimport os\nimport glob\nimport shutil\nimport multiprocessing\nimport logging as log\nimport sys\nfrom mailustart import resolve, convert\n\nfrom podop import run_server\n\nlog.basicConfig(stream=sys.stderr, level=os.environ.get(\"LOG_LEVEL\", \"WARNING\"))\n\ndef start_podop():\n os.setuid(100)\n url = \"http://\" + os.environ[\"ADMIN_ADDRESS\"] + \"/internal/postfix/\"\n # TODO: Remove verbosity setting from Podop?\n run_server(0, \"postfix\", \"/tmp/podop.socket\", [\n\t\t(\"transport\", \"url\", url + \"transport/\u00a7\"),\n\t\t(\"alias\", \"url\", url + \"alias/\u00a7\"),\n\t\t(\"domain\", \"url\", url + \"domain/\u00a7\"),\n (\"mailbox\", \"url\", url + \"mailbox/\u00a7\"),\n (\"senderaccess\", \"url\", url + \"sender/access/\u00a7\"),\n (\"senderlogin\", \"url\", url + \"sender/login/\u00a7\")\n ])\n\n# Actual startup script\nos.environ[\"FRONT_ADDRESS\"] = resolve(os.environ.get(\"FRONT_ADDRESS\", \"front\"))\nos.environ[\"ADMIN_ADDRESS\"] = resolve(os.environ.get(\"ADMIN_ADDRESS\", \"admin\"))\nos.environ[\"HOST_ANTISPAM\"] = resolve(os.environ.get(\"HOST_ANTISPAM\", \"antispam:11332\"))\nos.environ[\"HOST_LMTP\"] = resolve(os.environ.get(\"HOST_LMTP\", \"imap:2525\"))\n\nfor postfix_file in glob.glob(\"/conf/*.cf\"):\n convert(postfix_file, os.path.join(\"/etc/postfix\", os.path.basename(postfix_file)))\n\nif os.path.exists(\"/overrides/postfix.cf\"):\n for line in open(\"/overrides/postfix.cf\").read().strip().split(\"\\n\"):\n os.system('postconf -e \"{}\"'.format(line))\n\nif os.path.exists(\"/overrides/postfix.master\"):\n for line in open(\"/overrides/postfix.master\").read().strip().split(\"\\n\"):\n os.system('postconf -Me \"{}\"'.format(line))\n\nfor map_file in glob.glob(\"/overrides/*.map\"):\n destination = os.path.join(\"/etc/postfix\", os.path.basename(map_file))\n shutil.copyfile(map_file, destination)\n os.system(\"postmap {}\".format(destination))\n os.remove(destination)\n\nif \"RELAYUSER\" in os.environ:\n path = \"/etc/postfix/sasl_passwd\"\n convert(\"/conf/sasl_passwd\", path)\n os.system(\"postmap {}\".format(path))\n\nconvert(\"/conf/rsyslog.conf\", \"/etc/rsyslog.conf\")\n\n# Run Podop and Postfix\nmultiprocessing.Process(target=start_podop).start()\nif os.path.exists(\"/var/run/rsyslogd.pid\"):\n os.remove(\"/var/run/rsyslogd.pid\")\nos.system(\"/usr/lib/postfix/post-install meta_directory=/etc/postfix create-missing\")\nos.system(\"/usr/lib/postfix/master &\")\nos.execv(\"/usr/sbin/rsyslogd\", [\"rsyslogd\", \"-n\"])\n", "path": "core/postfix/start.py"}]} | 1,061 | 131 |
gh_patches_debug_8744 | rasdani/github-patches | git_diff | mindsdb__mindsdb-1749 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error when importing MindsDB in a Jupyter notebook
**Your Environment**
* Python version: 3.6
* Operating system: Ubuntu
* Mindsdb version: 2.12.2
**Describe the bug**
Importing MindsDB from a Jupyter Notebook fails, apparently because the HTTP API triggers.
**To Reproduce**
1. Run a new Jupyter notebook
2. Execute a cell with `import mindsdb`
The following error should occur:
```usage: ipykernel_launcher.py [-h] [--api API] [--config CONFIG] [--verbose] [-v]
ipykernel_launcher.py: error: unrecognized arguments: -f /home/user/.local/share/jupyter/runtime/kernel.json
An exception has occurred, use %tb to see the full traceback.
SystemExit: 2
/env/lib/python3.6/site-packages/IPython/core/interactiveshell.py:3351: UserWarning: To exit: use 'exit', 'quit', or Ctrl-D.
warn("To exit: use 'exit', 'quit', or Ctrl-D.", stacklevel=1)
```
**Expected behavior**
MindsDB should import successfully.
**Additional note**
`import mindsdb_native` works fine.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mindsdb/utilities/functions.py`
Content:
```
1 import argparse
2 import datetime
3 import requests
4 from functools import wraps
5
6 from mindsdb.utilities.fs import create_process_mark, delete_process_mark
7
8
9 def args_parse():
10 parser = argparse.ArgumentParser(description='CL argument for mindsdb server')
11 parser.add_argument('--api', type=str, default=None)
12 parser.add_argument('--config', type=str, default=None)
13 parser.add_argument('--verbose', action='store_true')
14 parser.add_argument('--no_studio', action='store_true')
15 parser.add_argument('-v', '--version', action='store_true')
16 parser.add_argument('--ray', action='store_true', default=None)
17 return parser.parse_args()
18
19
20 def cast_row_types(row, field_types):
21 '''
22 '''
23 keys = [x for x in row.keys() if x in field_types]
24 for key in keys:
25 t = field_types[key]
26 if t == 'Timestamp' and isinstance(row[key], (int, float)):
27 timestamp = datetime.datetime.utcfromtimestamp(row[key])
28 row[key] = timestamp.strftime('%Y-%m-%d %H:%M:%S')
29 elif t == 'Date' and isinstance(row[key], (int, float)):
30 timestamp = datetime.datetime.utcfromtimestamp(row[key])
31 row[key] = timestamp.strftime('%Y-%m-%d')
32 elif t == 'Int' and isinstance(row[key], (int, float, str)):
33 try:
34 print(f'cast {row[key]} to {int(row[key])}')
35 row[key] = int(row[key])
36 except Exception:
37 pass
38
39
40 def is_notebook():
41 try:
42 shell = get_ipython().__class__.__name__
43 if shell == 'ZMQInteractiveShell':
44 return True # Jupyter notebook or qtconsole
45 elif shell == 'TerminalInteractiveShell':
46 return False # Terminal running IPython
47 else:
48 return False # Other type (?)
49 except NameError:
50 return False # Probably standard Python interpreter
51
52
53 def mark_process(name):
54 def mark_process_wrapper(func):
55 @wraps(func)
56 def wrapper(*args, **kwargs):
57 mark = create_process_mark(name)
58 try:
59 return func(*args, **kwargs)
60 finally:
61 delete_process_mark(name, mark)
62 return wrapper
63 return mark_process_wrapper
64
65
66 def get_versions_where_predictors_become_obsolete():
67 """ Get list of MindsDB versions in which predictors should be retrained
68 Returns:
69 list of str or False
70 """
71 versions_for_updating_predictors = []
72 try:
73 try:
74 res = requests.get(
75 'https://mindsdb-cloud-public-service-files.s3.us-east-2.amazonaws.com/version_for_updating_predictors.txt',
76 timeout=0.5
77 )
78 except (ConnectionError, requests.exceptions.ConnectionError) as e:
79 print(f'Is no connection. {e}')
80 raise
81 except Exception as e:
82 print(f'Is something wrong with getting version_for_updating_predictors.txt: {e}')
83 raise
84
85 if res.status_code != 200:
86 print(f'Cant get version_for_updating_predictors.txt: returned status code = {res.status_code}')
87 raise
88
89 try:
90 versions_for_updating_predictors = res.text.replace(' \t\r', '').split('\n')
91 except Exception as e:
92 print(f'Cant decode compatible-config.json: {e}')
93 raise
94 except Exception:
95 return False, versions_for_updating_predictors
96
97 versions_for_updating_predictors = [x for x in versions_for_updating_predictors if len(x) > 0]
98 return True, versions_for_updating_predictors
99
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mindsdb/utilities/functions.py b/mindsdb/utilities/functions.py
--- a/mindsdb/utilities/functions.py
+++ b/mindsdb/utilities/functions.py
@@ -39,13 +39,10 @@
def is_notebook():
try:
- shell = get_ipython().__class__.__name__
- if shell == 'ZMQInteractiveShell':
- return True # Jupyter notebook or qtconsole
- elif shell == 'TerminalInteractiveShell':
- return False # Terminal running IPython
+ if 'IPKernelApp' in get_ipython().config:
+ return True
else:
- return False # Other type (?)
+ return False
except NameError:
return False # Probably standard Python interpreter
| {"golden_diff": "diff --git a/mindsdb/utilities/functions.py b/mindsdb/utilities/functions.py\n--- a/mindsdb/utilities/functions.py\n+++ b/mindsdb/utilities/functions.py\n@@ -39,13 +39,10 @@\n \n def is_notebook():\n try:\n- shell = get_ipython().__class__.__name__\n- if shell == 'ZMQInteractiveShell':\n- return True # Jupyter notebook or qtconsole\n- elif shell == 'TerminalInteractiveShell':\n- return False # Terminal running IPython\n+ if 'IPKernelApp' in get_ipython().config:\n+ return True\n else:\n- return False # Other type (?)\n+ return False\n except NameError:\n return False # Probably standard Python interpreter\n", "issue": "Error when importing MindsDB in a Jupyter notebook\n**Your Environment**\r\n\r\n* Python version: 3.6\r\n* Operating system: Ubuntu\r\n* Mindsdb version: 2.12.2\r\n\r\n**Describe the bug**\r\nImporting MindsDB from a Jupyter Notebook fails, apparently because the HTTP API triggers.\r\n\r\n**To Reproduce**\r\n1. Run a new Jupyter notebook\r\n2. Execute a cell with `import mindsdb`\r\n\r\nThe following error should occur:\r\n```usage: ipykernel_launcher.py [-h] [--api API] [--config CONFIG] [--verbose] [-v]\r\nipykernel_launcher.py: error: unrecognized arguments: -f /home/user/.local/share/jupyter/runtime/kernel.json\r\n\r\nAn exception has occurred, use %tb to see the full traceback.\r\nSystemExit: 2\r\n\r\n/env/lib/python3.6/site-packages/IPython/core/interactiveshell.py:3351: UserWarning: To exit: use 'exit', 'quit', or Ctrl-D.\r\n warn(\"To exit: use 'exit', 'quit', or Ctrl-D.\", stacklevel=1)\r\n```\r\n\r\n**Expected behavior**\r\nMindsDB should import successfully.\r\n\r\n**Additional note**\r\n`import mindsdb_native` works fine.\n", "before_files": [{"content": "import argparse\nimport datetime\nimport requests\nfrom functools import wraps\n\nfrom mindsdb.utilities.fs import create_process_mark, delete_process_mark\n\n\ndef args_parse():\n parser = argparse.ArgumentParser(description='CL argument for mindsdb server')\n parser.add_argument('--api', type=str, default=None)\n parser.add_argument('--config', type=str, default=None)\n parser.add_argument('--verbose', action='store_true')\n parser.add_argument('--no_studio', action='store_true')\n parser.add_argument('-v', '--version', action='store_true')\n parser.add_argument('--ray', action='store_true', default=None)\n return parser.parse_args()\n\n\ndef cast_row_types(row, field_types):\n '''\n '''\n keys = [x for x in row.keys() if x in field_types]\n for key in keys:\n t = field_types[key]\n if t == 'Timestamp' and isinstance(row[key], (int, float)):\n timestamp = datetime.datetime.utcfromtimestamp(row[key])\n row[key] = timestamp.strftime('%Y-%m-%d %H:%M:%S')\n elif t == 'Date' and isinstance(row[key], (int, float)):\n timestamp = datetime.datetime.utcfromtimestamp(row[key])\n row[key] = timestamp.strftime('%Y-%m-%d')\n elif t == 'Int' and isinstance(row[key], (int, float, str)):\n try:\n print(f'cast {row[key]} to {int(row[key])}')\n row[key] = int(row[key])\n except Exception:\n pass\n\n\ndef is_notebook():\n try:\n shell = get_ipython().__class__.__name__\n if shell == 'ZMQInteractiveShell':\n return True # Jupyter notebook or qtconsole\n elif shell == 'TerminalInteractiveShell':\n return False # Terminal running IPython\n else:\n return False # Other type (?)\n except NameError:\n return False # Probably standard Python interpreter\n\n\ndef mark_process(name):\n def mark_process_wrapper(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n mark = create_process_mark(name)\n try:\n return func(*args, **kwargs)\n finally:\n delete_process_mark(name, mark)\n return wrapper\n return mark_process_wrapper\n\n\ndef get_versions_where_predictors_become_obsolete():\n \"\"\" Get list of MindsDB versions in which predictors should be retrained\n Returns:\n list of str or False\n \"\"\"\n versions_for_updating_predictors = []\n try:\n try:\n res = requests.get(\n 'https://mindsdb-cloud-public-service-files.s3.us-east-2.amazonaws.com/version_for_updating_predictors.txt',\n timeout=0.5\n )\n except (ConnectionError, requests.exceptions.ConnectionError) as e:\n print(f'Is no connection. {e}')\n raise\n except Exception as e:\n print(f'Is something wrong with getting version_for_updating_predictors.txt: {e}')\n raise\n\n if res.status_code != 200:\n print(f'Cant get version_for_updating_predictors.txt: returned status code = {res.status_code}')\n raise\n\n try:\n versions_for_updating_predictors = res.text.replace(' \\t\\r', '').split('\\n')\n except Exception as e:\n print(f'Cant decode compatible-config.json: {e}')\n raise\n except Exception:\n return False, versions_for_updating_predictors\n\n versions_for_updating_predictors = [x for x in versions_for_updating_predictors if len(x) > 0]\n return True, versions_for_updating_predictors\n", "path": "mindsdb/utilities/functions.py"}], "after_files": [{"content": "import argparse\nimport datetime\nimport requests\nfrom functools import wraps\n\nfrom mindsdb.utilities.fs import create_process_mark, delete_process_mark\n\n\ndef args_parse():\n parser = argparse.ArgumentParser(description='CL argument for mindsdb server')\n parser.add_argument('--api', type=str, default=None)\n parser.add_argument('--config', type=str, default=None)\n parser.add_argument('--verbose', action='store_true')\n parser.add_argument('--no_studio', action='store_true')\n parser.add_argument('-v', '--version', action='store_true')\n parser.add_argument('--ray', action='store_true', default=None)\n return parser.parse_args()\n\n\ndef cast_row_types(row, field_types):\n '''\n '''\n keys = [x for x in row.keys() if x in field_types]\n for key in keys:\n t = field_types[key]\n if t == 'Timestamp' and isinstance(row[key], (int, float)):\n timestamp = datetime.datetime.utcfromtimestamp(row[key])\n row[key] = timestamp.strftime('%Y-%m-%d %H:%M:%S')\n elif t == 'Date' and isinstance(row[key], (int, float)):\n timestamp = datetime.datetime.utcfromtimestamp(row[key])\n row[key] = timestamp.strftime('%Y-%m-%d')\n elif t == 'Int' and isinstance(row[key], (int, float, str)):\n try:\n print(f'cast {row[key]} to {int(row[key])}')\n row[key] = int(row[key])\n except Exception:\n pass\n\n\ndef is_notebook():\n try:\n if 'IPKernelApp' in get_ipython().config:\n return True\n else:\n return False\n except NameError:\n return False # Probably standard Python interpreter\n\n\ndef mark_process(name):\n def mark_process_wrapper(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n mark = create_process_mark(name)\n try:\n return func(*args, **kwargs)\n finally:\n delete_process_mark(name, mark)\n return wrapper\n return mark_process_wrapper\n\n\ndef get_versions_where_predictors_become_obsolete():\n \"\"\" Get list of MindsDB versions in which predictors should be retrained\n Returns:\n list of str or False\n \"\"\"\n versions_for_updating_predictors = []\n try:\n try:\n res = requests.get(\n 'https://mindsdb-cloud-public-service-files.s3.us-east-2.amazonaws.com/version_for_updating_predictors.txt',\n timeout=0.5\n )\n except (ConnectionError, requests.exceptions.ConnectionError) as e:\n print(f'Is no connection. {e}')\n raise\n except Exception as e:\n print(f'Is something wrong with getting version_for_updating_predictors.txt: {e}')\n raise\n\n if res.status_code != 200:\n print(f'Cant get version_for_updating_predictors.txt: returned status code = {res.status_code}')\n raise\n\n try:\n versions_for_updating_predictors = res.text.replace(' \\t\\r', '').split('\\n')\n except Exception as e:\n print(f'Cant decode compatible-config.json: {e}')\n raise\n except Exception:\n return False, versions_for_updating_predictors\n\n versions_for_updating_predictors = [x for x in versions_for_updating_predictors if len(x) > 0]\n return True, versions_for_updating_predictors\n", "path": "mindsdb/utilities/functions.py"}]} | 1,485 | 173 |
gh_patches_debug_3236 | rasdani/github-patches | git_diff | pre-commit__pre-commit-1092 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Globally disable color?
I cannot find the way to globally disable color in the pre-commit output. Setting only the background color to green and not changing the foreground color does not work for my terminal with the following settings in the Xt resources (as set in the `${HOME}/.Xresources` file):
````properties
Rxvt.background: black
Rxvt.foreground: deepSkyBlue
````
Is there a way? It would be great to respect https://no-color.org/ environment variable. And, while we are here, maybe understand the following git config setting:
````ini
[color]
ui = never
````
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/color.py`
Content:
```
1 from __future__ import unicode_literals
2
3 import os
4 import sys
5
6 terminal_supports_color = True
7 if os.name == 'nt': # pragma: no cover (windows)
8 from pre_commit.color_windows import enable_virtual_terminal_processing
9 try:
10 enable_virtual_terminal_processing()
11 except WindowsError:
12 terminal_supports_color = False
13
14 RED = '\033[41m'
15 GREEN = '\033[42m'
16 YELLOW = '\033[43;30m'
17 TURQUOISE = '\033[46;30m'
18 NORMAL = '\033[0m'
19
20
21 class InvalidColorSetting(ValueError):
22 pass
23
24
25 def format_color(text, color, use_color_setting):
26 """Format text with color.
27
28 Args:
29 text - Text to be formatted with color if `use_color`
30 color - The color start string
31 use_color_setting - Whether or not to color
32 """
33 if not use_color_setting:
34 return text
35 else:
36 return '{}{}{}'.format(color, text, NORMAL)
37
38
39 COLOR_CHOICES = ('auto', 'always', 'never')
40
41
42 def use_color(setting):
43 """Choose whether to use color based on the command argument.
44
45 Args:
46 setting - Either `auto`, `always`, or `never`
47 """
48 if setting not in COLOR_CHOICES:
49 raise InvalidColorSetting(setting)
50
51 return (
52 setting == 'always' or
53 (setting == 'auto' and sys.stdout.isatty() and terminal_supports_color)
54 )
55
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pre_commit/color.py b/pre_commit/color.py
--- a/pre_commit/color.py
+++ b/pre_commit/color.py
@@ -48,6 +48,9 @@
if setting not in COLOR_CHOICES:
raise InvalidColorSetting(setting)
+ if os.environ.get('NO_COLOR'):
+ return False
+
return (
setting == 'always' or
(setting == 'auto' and sys.stdout.isatty() and terminal_supports_color)
| {"golden_diff": "diff --git a/pre_commit/color.py b/pre_commit/color.py\n--- a/pre_commit/color.py\n+++ b/pre_commit/color.py\n@@ -48,6 +48,9 @@\n if setting not in COLOR_CHOICES:\n raise InvalidColorSetting(setting)\n \n+ if os.environ.get('NO_COLOR'):\n+ return False\n+\n return (\n setting == 'always' or\n (setting == 'auto' and sys.stdout.isatty() and terminal_supports_color)\n", "issue": "Globally disable color?\nI cannot find the way to globally disable color in the pre-commit output. Setting only the background color to green and not changing the foreground color does not work for my terminal with the following settings in the Xt resources (as set in the `${HOME}/.Xresources` file):\r\n\r\n````properties\r\nRxvt.background: black\r\nRxvt.foreground: deepSkyBlue\r\n````\r\n\r\nIs there a way? It would be great to respect https://no-color.org/ environment variable. And, while we are here, maybe understand the following git config setting:\r\n\r\n````ini\r\n[color]\r\n ui = never\r\n````\r\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport os\nimport sys\n\nterminal_supports_color = True\nif os.name == 'nt': # pragma: no cover (windows)\n from pre_commit.color_windows import enable_virtual_terminal_processing\n try:\n enable_virtual_terminal_processing()\n except WindowsError:\n terminal_supports_color = False\n\nRED = '\\033[41m'\nGREEN = '\\033[42m'\nYELLOW = '\\033[43;30m'\nTURQUOISE = '\\033[46;30m'\nNORMAL = '\\033[0m'\n\n\nclass InvalidColorSetting(ValueError):\n pass\n\n\ndef format_color(text, color, use_color_setting):\n \"\"\"Format text with color.\n\n Args:\n text - Text to be formatted with color if `use_color`\n color - The color start string\n use_color_setting - Whether or not to color\n \"\"\"\n if not use_color_setting:\n return text\n else:\n return '{}{}{}'.format(color, text, NORMAL)\n\n\nCOLOR_CHOICES = ('auto', 'always', 'never')\n\n\ndef use_color(setting):\n \"\"\"Choose whether to use color based on the command argument.\n\n Args:\n setting - Either `auto`, `always`, or `never`\n \"\"\"\n if setting not in COLOR_CHOICES:\n raise InvalidColorSetting(setting)\n\n return (\n setting == 'always' or\n (setting == 'auto' and sys.stdout.isatty() and terminal_supports_color)\n )\n", "path": "pre_commit/color.py"}], "after_files": [{"content": "from __future__ import unicode_literals\n\nimport os\nimport sys\n\nterminal_supports_color = True\nif os.name == 'nt': # pragma: no cover (windows)\n from pre_commit.color_windows import enable_virtual_terminal_processing\n try:\n enable_virtual_terminal_processing()\n except WindowsError:\n terminal_supports_color = False\n\nRED = '\\033[41m'\nGREEN = '\\033[42m'\nYELLOW = '\\033[43;30m'\nTURQUOISE = '\\033[46;30m'\nNORMAL = '\\033[0m'\n\n\nclass InvalidColorSetting(ValueError):\n pass\n\n\ndef format_color(text, color, use_color_setting):\n \"\"\"Format text with color.\n\n Args:\n text - Text to be formatted with color if `use_color`\n color - The color start string\n use_color_setting - Whether or not to color\n \"\"\"\n if not use_color_setting:\n return text\n else:\n return '{}{}{}'.format(color, text, NORMAL)\n\n\nCOLOR_CHOICES = ('auto', 'always', 'never')\n\n\ndef use_color(setting):\n \"\"\"Choose whether to use color based on the command argument.\n\n Args:\n setting - Either `auto`, `always`, or `never`\n \"\"\"\n if setting not in COLOR_CHOICES:\n raise InvalidColorSetting(setting)\n\n if os.environ.get('NO_COLOR'):\n return False\n\n return (\n setting == 'always' or\n (setting == 'auto' and sys.stdout.isatty() and terminal_supports_color)\n )\n", "path": "pre_commit/color.py"}]} | 829 | 102 |
gh_patches_debug_15385 | rasdani/github-patches | git_diff | bookwyrm-social__bookwyrm-622 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Still get notifications for follows from blocked user
**Describe the bug**
I blocked https://bookwyrm.social/user/[email protected], but I'm still getting follow notifications from them. I also can still see them in my followers list
**To Reproduce**
1. Block user that currently follows you
2. View own follow list, see that they still follow you
3. Have that user (while blocked) refollow you
4. See notification for that follow
**Expected behavior**
I expect that a block would force a user to unfollow me. Even if this didn't happen, though, I'd like to no longer receive notifications from activity of users that I've blocked, including follow notifications
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bookwyrm/models/relationship.py`
Content:
```
1 ''' defines relationships between users '''
2 from django.apps import apps
3 from django.db import models, transaction
4 from django.db.models import Q
5 from django.dispatch import receiver
6
7 from bookwyrm import activitypub
8 from .activitypub_mixin import ActivitypubMixin, ActivityMixin
9 from .base_model import BookWyrmModel
10 from . import fields
11
12
13 class UserRelationship(BookWyrmModel):
14 ''' many-to-many through table for followers '''
15 user_subject = fields.ForeignKey(
16 'User',
17 on_delete=models.PROTECT,
18 related_name='%(class)s_user_subject',
19 activitypub_field='actor',
20 )
21 user_object = fields.ForeignKey(
22 'User',
23 on_delete=models.PROTECT,
24 related_name='%(class)s_user_object',
25 activitypub_field='object',
26 )
27
28 @property
29 def privacy(self):
30 ''' all relationships are handled directly with the participants '''
31 return 'direct'
32
33 @property
34 def recipients(self):
35 ''' the remote user needs to recieve direct broadcasts '''
36 return [u for u in [self.user_subject, self.user_object] if not u.local]
37
38 class Meta:
39 ''' relationships should be unique '''
40 abstract = True
41 constraints = [
42 models.UniqueConstraint(
43 fields=['user_subject', 'user_object'],
44 name='%(class)s_unique'
45 ),
46 models.CheckConstraint(
47 check=~models.Q(user_subject=models.F('user_object')),
48 name='%(class)s_no_self'
49 )
50 ]
51
52 def get_remote_id(self, status=None):# pylint: disable=arguments-differ
53 ''' use shelf identifier in remote_id '''
54 status = status or 'follows'
55 base_path = self.user_subject.remote_id
56 return '%s#%s/%d' % (base_path, status, self.id)
57
58
59 class UserFollows(ActivitypubMixin, UserRelationship):
60 ''' Following a user '''
61 status = 'follows'
62 activity_serializer = activitypub.Follow
63
64
65 @classmethod
66 def from_request(cls, follow_request):
67 ''' converts a follow request into a follow relationship '''
68 return cls.objects.create(
69 user_subject=follow_request.user_subject,
70 user_object=follow_request.user_object,
71 remote_id=follow_request.remote_id,
72 )
73
74
75 class UserFollowRequest(ActivitypubMixin, UserRelationship):
76 ''' following a user requires manual or automatic confirmation '''
77 status = 'follow_request'
78 activity_serializer = activitypub.Follow
79
80 def save(self, *args, broadcast=True, **kwargs):
81 ''' make sure the follow or block relationship doesn't already exist '''
82 try:
83 UserFollows.objects.get(
84 user_subject=self.user_subject,
85 user_object=self.user_object
86 )
87 UserBlocks.objects.get(
88 user_subject=self.user_subject,
89 user_object=self.user_object
90 )
91 return None
92 except (UserFollows.DoesNotExist, UserBlocks.DoesNotExist):
93 super().save(*args, **kwargs)
94
95 if broadcast and self.user_subject.local and not self.user_object.local:
96 self.broadcast(self.to_activity(), self.user_subject)
97
98 if self.user_object.local:
99 model = apps.get_model('bookwyrm.Notification', require_ready=True)
100 notification_type = 'FOLLOW_REQUEST' \
101 if self.user_object.manually_approves_followers else 'FOLLOW'
102 model.objects.create(
103 user=self.user_object,
104 related_user=self.user_subject,
105 notification_type=notification_type,
106 )
107
108
109 def accept(self):
110 ''' turn this request into the real deal'''
111 user = self.user_object
112 activity = activitypub.Accept(
113 id=self.get_remote_id(status='accepts'),
114 actor=self.user_object.remote_id,
115 object=self.to_activity()
116 ).serialize()
117 with transaction.atomic():
118 UserFollows.from_request(self)
119 self.delete()
120
121 self.broadcast(activity, user)
122
123
124 def reject(self):
125 ''' generate a Reject for this follow request '''
126 user = self.user_object
127 activity = activitypub.Reject(
128 id=self.get_remote_id(status='rejects'),
129 actor=self.user_object.remote_id,
130 object=self.to_activity()
131 ).serialize()
132 self.delete()
133 self.broadcast(activity, user)
134
135
136 class UserBlocks(ActivityMixin, UserRelationship):
137 ''' prevent another user from following you and seeing your posts '''
138 status = 'blocks'
139 activity_serializer = activitypub.Block
140
141
142 @receiver(models.signals.post_save, sender=UserBlocks)
143 #pylint: disable=unused-argument
144 def execute_after_save(sender, instance, created, *args, **kwargs):
145 ''' remove follow or follow request rels after a block is created '''
146 UserFollows.objects.filter(
147 Q(user_subject=instance.user_subject,
148 user_object=instance.user_object) | \
149 Q(user_subject=instance.user_object,
150 user_object=instance.user_subject)
151 ).delete()
152 UserFollowRequest.objects.filter(
153 Q(user_subject=instance.user_subject,
154 user_object=instance.user_object) | \
155 Q(user_subject=instance.user_object,
156 user_object=instance.user_subject)
157 ).delete()
158
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bookwyrm/models/relationship.py b/bookwyrm/models/relationship.py
--- a/bookwyrm/models/relationship.py
+++ b/bookwyrm/models/relationship.py
@@ -82,11 +82,16 @@
try:
UserFollows.objects.get(
user_subject=self.user_subject,
- user_object=self.user_object
+ user_object=self.user_object,
)
+ # blocking in either direction is a no-go
UserBlocks.objects.get(
user_subject=self.user_subject,
- user_object=self.user_object
+ user_object=self.user_object,
+ )
+ UserBlocks.objects.get(
+ user_subject=self.user_object,
+ user_object=self.user_subject,
)
return None
except (UserFollows.DoesNotExist, UserBlocks.DoesNotExist):
| {"golden_diff": "diff --git a/bookwyrm/models/relationship.py b/bookwyrm/models/relationship.py\n--- a/bookwyrm/models/relationship.py\n+++ b/bookwyrm/models/relationship.py\n@@ -82,11 +82,16 @@\n try:\n UserFollows.objects.get(\n user_subject=self.user_subject,\n- user_object=self.user_object\n+ user_object=self.user_object,\n )\n+ # blocking in either direction is a no-go\n UserBlocks.objects.get(\n user_subject=self.user_subject,\n- user_object=self.user_object\n+ user_object=self.user_object,\n+ )\n+ UserBlocks.objects.get(\n+ user_subject=self.user_object,\n+ user_object=self.user_subject,\n )\n return None\n except (UserFollows.DoesNotExist, UserBlocks.DoesNotExist):\n", "issue": "Still get notifications for follows from blocked user\n**Describe the bug**\r\nI blocked https://bookwyrm.social/user/[email protected], but I'm still getting follow notifications from them. I also can still see them in my followers list\r\n\r\n**To Reproduce**\r\n1. Block user that currently follows you\r\n2. View own follow list, see that they still follow you\r\n3. Have that user (while blocked) refollow you\r\n4. See notification for that follow\r\n\r\n**Expected behavior**\r\nI expect that a block would force a user to unfollow me. Even if this didn't happen, though, I'd like to no longer receive notifications from activity of users that I've blocked, including follow notifications\n", "before_files": [{"content": "''' defines relationships between users '''\nfrom django.apps import apps\nfrom django.db import models, transaction\nfrom django.db.models import Q\nfrom django.dispatch import receiver\n\nfrom bookwyrm import activitypub\nfrom .activitypub_mixin import ActivitypubMixin, ActivityMixin\nfrom .base_model import BookWyrmModel\nfrom . import fields\n\n\nclass UserRelationship(BookWyrmModel):\n ''' many-to-many through table for followers '''\n user_subject = fields.ForeignKey(\n 'User',\n on_delete=models.PROTECT,\n related_name='%(class)s_user_subject',\n activitypub_field='actor',\n )\n user_object = fields.ForeignKey(\n 'User',\n on_delete=models.PROTECT,\n related_name='%(class)s_user_object',\n activitypub_field='object',\n )\n\n @property\n def privacy(self):\n ''' all relationships are handled directly with the participants '''\n return 'direct'\n\n @property\n def recipients(self):\n ''' the remote user needs to recieve direct broadcasts '''\n return [u for u in [self.user_subject, self.user_object] if not u.local]\n\n class Meta:\n ''' relationships should be unique '''\n abstract = True\n constraints = [\n models.UniqueConstraint(\n fields=['user_subject', 'user_object'],\n name='%(class)s_unique'\n ),\n models.CheckConstraint(\n check=~models.Q(user_subject=models.F('user_object')),\n name='%(class)s_no_self'\n )\n ]\n\n def get_remote_id(self, status=None):# pylint: disable=arguments-differ\n ''' use shelf identifier in remote_id '''\n status = status or 'follows'\n base_path = self.user_subject.remote_id\n return '%s#%s/%d' % (base_path, status, self.id)\n\n\nclass UserFollows(ActivitypubMixin, UserRelationship):\n ''' Following a user '''\n status = 'follows'\n activity_serializer = activitypub.Follow\n\n\n @classmethod\n def from_request(cls, follow_request):\n ''' converts a follow request into a follow relationship '''\n return cls.objects.create(\n user_subject=follow_request.user_subject,\n user_object=follow_request.user_object,\n remote_id=follow_request.remote_id,\n )\n\n\nclass UserFollowRequest(ActivitypubMixin, UserRelationship):\n ''' following a user requires manual or automatic confirmation '''\n status = 'follow_request'\n activity_serializer = activitypub.Follow\n\n def save(self, *args, broadcast=True, **kwargs):\n ''' make sure the follow or block relationship doesn't already exist '''\n try:\n UserFollows.objects.get(\n user_subject=self.user_subject,\n user_object=self.user_object\n )\n UserBlocks.objects.get(\n user_subject=self.user_subject,\n user_object=self.user_object\n )\n return None\n except (UserFollows.DoesNotExist, UserBlocks.DoesNotExist):\n super().save(*args, **kwargs)\n\n if broadcast and self.user_subject.local and not self.user_object.local:\n self.broadcast(self.to_activity(), self.user_subject)\n\n if self.user_object.local:\n model = apps.get_model('bookwyrm.Notification', require_ready=True)\n notification_type = 'FOLLOW_REQUEST' \\\n if self.user_object.manually_approves_followers else 'FOLLOW'\n model.objects.create(\n user=self.user_object,\n related_user=self.user_subject,\n notification_type=notification_type,\n )\n\n\n def accept(self):\n ''' turn this request into the real deal'''\n user = self.user_object\n activity = activitypub.Accept(\n id=self.get_remote_id(status='accepts'),\n actor=self.user_object.remote_id,\n object=self.to_activity()\n ).serialize()\n with transaction.atomic():\n UserFollows.from_request(self)\n self.delete()\n\n self.broadcast(activity, user)\n\n\n def reject(self):\n ''' generate a Reject for this follow request '''\n user = self.user_object\n activity = activitypub.Reject(\n id=self.get_remote_id(status='rejects'),\n actor=self.user_object.remote_id,\n object=self.to_activity()\n ).serialize()\n self.delete()\n self.broadcast(activity, user)\n\n\nclass UserBlocks(ActivityMixin, UserRelationship):\n ''' prevent another user from following you and seeing your posts '''\n status = 'blocks'\n activity_serializer = activitypub.Block\n\n\n@receiver(models.signals.post_save, sender=UserBlocks)\n#pylint: disable=unused-argument\ndef execute_after_save(sender, instance, created, *args, **kwargs):\n ''' remove follow or follow request rels after a block is created '''\n UserFollows.objects.filter(\n Q(user_subject=instance.user_subject,\n user_object=instance.user_object) | \\\n Q(user_subject=instance.user_object,\n user_object=instance.user_subject)\n ).delete()\n UserFollowRequest.objects.filter(\n Q(user_subject=instance.user_subject,\n user_object=instance.user_object) | \\\n Q(user_subject=instance.user_object,\n user_object=instance.user_subject)\n ).delete()\n", "path": "bookwyrm/models/relationship.py"}], "after_files": [{"content": "''' defines relationships between users '''\nfrom django.apps import apps\nfrom django.db import models, transaction\nfrom django.db.models import Q\nfrom django.dispatch import receiver\n\nfrom bookwyrm import activitypub\nfrom .activitypub_mixin import ActivitypubMixin, ActivityMixin\nfrom .base_model import BookWyrmModel\nfrom . import fields\n\n\nclass UserRelationship(BookWyrmModel):\n ''' many-to-many through table for followers '''\n user_subject = fields.ForeignKey(\n 'User',\n on_delete=models.PROTECT,\n related_name='%(class)s_user_subject',\n activitypub_field='actor',\n )\n user_object = fields.ForeignKey(\n 'User',\n on_delete=models.PROTECT,\n related_name='%(class)s_user_object',\n activitypub_field='object',\n )\n\n @property\n def privacy(self):\n ''' all relationships are handled directly with the participants '''\n return 'direct'\n\n @property\n def recipients(self):\n ''' the remote user needs to recieve direct broadcasts '''\n return [u for u in [self.user_subject, self.user_object] if not u.local]\n\n class Meta:\n ''' relationships should be unique '''\n abstract = True\n constraints = [\n models.UniqueConstraint(\n fields=['user_subject', 'user_object'],\n name='%(class)s_unique'\n ),\n models.CheckConstraint(\n check=~models.Q(user_subject=models.F('user_object')),\n name='%(class)s_no_self'\n )\n ]\n\n def get_remote_id(self, status=None):# pylint: disable=arguments-differ\n ''' use shelf identifier in remote_id '''\n status = status or 'follows'\n base_path = self.user_subject.remote_id\n return '%s#%s/%d' % (base_path, status, self.id)\n\n\nclass UserFollows(ActivitypubMixin, UserRelationship):\n ''' Following a user '''\n status = 'follows'\n activity_serializer = activitypub.Follow\n\n\n @classmethod\n def from_request(cls, follow_request):\n ''' converts a follow request into a follow relationship '''\n return cls.objects.create(\n user_subject=follow_request.user_subject,\n user_object=follow_request.user_object,\n remote_id=follow_request.remote_id,\n )\n\n\nclass UserFollowRequest(ActivitypubMixin, UserRelationship):\n ''' following a user requires manual or automatic confirmation '''\n status = 'follow_request'\n activity_serializer = activitypub.Follow\n\n def save(self, *args, broadcast=True, **kwargs):\n ''' make sure the follow or block relationship doesn't already exist '''\n try:\n UserFollows.objects.get(\n user_subject=self.user_subject,\n user_object=self.user_object,\n )\n # blocking in either direction is a no-go\n UserBlocks.objects.get(\n user_subject=self.user_subject,\n user_object=self.user_object,\n )\n UserBlocks.objects.get(\n user_subject=self.user_object,\n user_object=self.user_subject,\n )\n return None\n except (UserFollows.DoesNotExist, UserBlocks.DoesNotExist):\n super().save(*args, **kwargs)\n\n if broadcast and self.user_subject.local and not self.user_object.local:\n self.broadcast(self.to_activity(), self.user_subject)\n\n if self.user_object.local:\n model = apps.get_model('bookwyrm.Notification', require_ready=True)\n notification_type = 'FOLLOW_REQUEST' \\\n if self.user_object.manually_approves_followers else 'FOLLOW'\n model.objects.create(\n user=self.user_object,\n related_user=self.user_subject,\n notification_type=notification_type,\n )\n\n\n def accept(self):\n ''' turn this request into the real deal'''\n user = self.user_object\n activity = activitypub.Accept(\n id=self.get_remote_id(status='accepts'),\n actor=self.user_object.remote_id,\n object=self.to_activity()\n ).serialize()\n with transaction.atomic():\n UserFollows.from_request(self)\n self.delete()\n\n self.broadcast(activity, user)\n\n\n def reject(self):\n ''' generate a Reject for this follow request '''\n user = self.user_object\n activity = activitypub.Reject(\n id=self.get_remote_id(status='rejects'),\n actor=self.user_object.remote_id,\n object=self.to_activity()\n ).serialize()\n self.delete()\n self.broadcast(activity, user)\n\n\nclass UserBlocks(ActivityMixin, UserRelationship):\n ''' prevent another user from following you and seeing your posts '''\n status = 'blocks'\n activity_serializer = activitypub.Block\n\n\n@receiver(models.signals.post_save, sender=UserBlocks)\n#pylint: disable=unused-argument\ndef execute_after_save(sender, instance, created, *args, **kwargs):\n ''' remove follow or follow request rels after a block is created '''\n UserFollows.objects.filter(\n Q(user_subject=instance.user_subject,\n user_object=instance.user_object) | \\\n Q(user_subject=instance.user_object,\n user_object=instance.user_subject)\n ).delete()\n UserFollowRequest.objects.filter(\n Q(user_subject=instance.user_subject,\n user_object=instance.user_object) | \\\n Q(user_subject=instance.user_object,\n user_object=instance.user_subject)\n ).delete()\n", "path": "bookwyrm/models/relationship.py"}]} | 1,842 | 174 |
gh_patches_debug_60893 | rasdani/github-patches | git_diff | webkom__lego-2342 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Phone number not saved from registration form
When creating a new user, LEGO ignores the phone number inserted into the registration form.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lego/apps/users/serializers/registration.py`
Content:
```
1 from django.contrib.auth import password_validation
2 from rest_framework import exceptions, serializers
3
4 from lego.apps.users.models import User
5 from lego.utils.functions import verify_captcha
6
7
8 class RegistrationSerializer(serializers.ModelSerializer):
9 captcha_response = serializers.CharField(required=True)
10
11 def validate_captcha_response(self, captcha_response):
12 if not verify_captcha(captcha_response):
13 raise exceptions.ValidationError("invalid_captcha")
14 return captcha_response
15
16 class Meta:
17 model = User
18 fields = ("email", "captcha_response")
19
20
21 class RegistrationConfirmationSerializer(serializers.ModelSerializer):
22
23 password = serializers.CharField(required=True, write_only=True)
24
25 def validate_username(self, username):
26 username_exists = User.objects.filter(username__iexact=username).exists()
27 if username_exists:
28 raise exceptions.ValidationError("Username exists")
29 return username
30
31 def validate_password(self, password):
32 password_validation.validate_password(password)
33 return password
34
35 class Meta:
36 model = User
37 fields = (
38 "username",
39 "first_name",
40 "last_name",
41 "gender",
42 "password",
43 "allergies",
44 )
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lego/apps/users/serializers/registration.py b/lego/apps/users/serializers/registration.py
--- a/lego/apps/users/serializers/registration.py
+++ b/lego/apps/users/serializers/registration.py
@@ -41,4 +41,5 @@
"gender",
"password",
"allergies",
+ "phone_number",
)
| {"golden_diff": "diff --git a/lego/apps/users/serializers/registration.py b/lego/apps/users/serializers/registration.py\n--- a/lego/apps/users/serializers/registration.py\n+++ b/lego/apps/users/serializers/registration.py\n@@ -41,4 +41,5 @@\n \"gender\",\n \"password\",\n \"allergies\",\n+ \"phone_number\",\n )\n", "issue": "Phone number not saved from registration form\nWhen creating a new user, LEGO ignores the phone number inserted into the registration form.\n", "before_files": [{"content": "from django.contrib.auth import password_validation\nfrom rest_framework import exceptions, serializers\n\nfrom lego.apps.users.models import User\nfrom lego.utils.functions import verify_captcha\n\n\nclass RegistrationSerializer(serializers.ModelSerializer):\n captcha_response = serializers.CharField(required=True)\n\n def validate_captcha_response(self, captcha_response):\n if not verify_captcha(captcha_response):\n raise exceptions.ValidationError(\"invalid_captcha\")\n return captcha_response\n\n class Meta:\n model = User\n fields = (\"email\", \"captcha_response\")\n\n\nclass RegistrationConfirmationSerializer(serializers.ModelSerializer):\n\n password = serializers.CharField(required=True, write_only=True)\n\n def validate_username(self, username):\n username_exists = User.objects.filter(username__iexact=username).exists()\n if username_exists:\n raise exceptions.ValidationError(\"Username exists\")\n return username\n\n def validate_password(self, password):\n password_validation.validate_password(password)\n return password\n\n class Meta:\n model = User\n fields = (\n \"username\",\n \"first_name\",\n \"last_name\",\n \"gender\",\n \"password\",\n \"allergies\",\n )\n", "path": "lego/apps/users/serializers/registration.py"}], "after_files": [{"content": "from django.contrib.auth import password_validation\nfrom rest_framework import exceptions, serializers\n\nfrom lego.apps.users.models import User\nfrom lego.utils.functions import verify_captcha\n\n\nclass RegistrationSerializer(serializers.ModelSerializer):\n captcha_response = serializers.CharField(required=True)\n\n def validate_captcha_response(self, captcha_response):\n if not verify_captcha(captcha_response):\n raise exceptions.ValidationError(\"invalid_captcha\")\n return captcha_response\n\n class Meta:\n model = User\n fields = (\"email\", \"captcha_response\")\n\n\nclass RegistrationConfirmationSerializer(serializers.ModelSerializer):\n\n password = serializers.CharField(required=True, write_only=True)\n\n def validate_username(self, username):\n username_exists = User.objects.filter(username__iexact=username).exists()\n if username_exists:\n raise exceptions.ValidationError(\"Username exists\")\n return username\n\n def validate_password(self, password):\n password_validation.validate_password(password)\n return password\n\n class Meta:\n model = User\n fields = (\n \"username\",\n \"first_name\",\n \"last_name\",\n \"gender\",\n \"password\",\n \"allergies\",\n \"phone_number\",\n )\n", "path": "lego/apps/users/serializers/registration.py"}]} | 606 | 90 |
gh_patches_debug_24477 | rasdani/github-patches | git_diff | wemake-services__wemake-python-styleguide-2529 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
609: Allow __enter__() inside an __enter__()
### What's wrong
One design pattern is to wrap a context manager. It would be nice to avoid WPS609 errors with this code, which seems to require accessing the direct magic methods.
### How it should be
Allow code like:
```
class Foo:
...
def __enter__(self):
self._conn.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._conn.__exit__(exc_type, exc_value, traceback)
```
I guess the same for aenter/aexit as well.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wemake_python_styleguide/visitors/ast/attributes.py`
Content:
```
1 import ast
2 from typing import ClassVar, FrozenSet
3
4 from typing_extensions import final
5
6 from wemake_python_styleguide.constants import ALL_MAGIC_METHODS
7 from wemake_python_styleguide.logic.naming import access
8 from wemake_python_styleguide.violations.best_practices import (
9 ProtectedAttributeViolation,
10 )
11 from wemake_python_styleguide.violations.oop import (
12 DirectMagicAttributeAccessViolation,
13 )
14 from wemake_python_styleguide.visitors.base import BaseNodeVisitor
15
16
17 @final
18 class WrongAttributeVisitor(BaseNodeVisitor):
19 """Ensures that attributes are used correctly."""
20
21 _allowed_to_use_protected: ClassVar[FrozenSet[str]] = frozenset((
22 'self',
23 'cls',
24 'mcs',
25 ))
26
27 def visit_Attribute(self, node: ast.Attribute) -> None:
28 """Checks the `Attribute` node."""
29 self._check_protected_attribute(node)
30 self._check_magic_attribute(node)
31 self.generic_visit(node)
32
33 def _is_super_called(self, node: ast.Call) -> bool:
34 return isinstance(node.func, ast.Name) and node.func.id == 'super'
35
36 def _ensure_attribute_type(self, node: ast.Attribute, exception) -> None:
37 if isinstance(node.value, ast.Name):
38 if node.value.id in self._allowed_to_use_protected:
39 return
40
41 if isinstance(node.value, ast.Call):
42 if self._is_super_called(node.value):
43 return
44
45 self.add_violation(exception(node, text=node.attr))
46
47 def _check_protected_attribute(self, node: ast.Attribute) -> None:
48 if access.is_protected(node.attr):
49 self._ensure_attribute_type(node, ProtectedAttributeViolation)
50
51 def _check_magic_attribute(self, node: ast.Attribute) -> None:
52 if access.is_magic(node.attr):
53 if node.attr in ALL_MAGIC_METHODS:
54 self._ensure_attribute_type(
55 node, DirectMagicAttributeAccessViolation,
56 )
57
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/wemake_python_styleguide/visitors/ast/attributes.py b/wemake_python_styleguide/visitors/ast/attributes.py
--- a/wemake_python_styleguide/visitors/ast/attributes.py
+++ b/wemake_python_styleguide/visitors/ast/attributes.py
@@ -3,7 +3,9 @@
from typing_extensions import final
+from wemake_python_styleguide.compat.aliases import FunctionNodes
from wemake_python_styleguide.constants import ALL_MAGIC_METHODS
+from wemake_python_styleguide.logic import nodes
from wemake_python_styleguide.logic.naming import access
from wemake_python_styleguide.violations.best_practices import (
ProtectedAttributeViolation,
@@ -50,6 +52,15 @@
def _check_magic_attribute(self, node: ast.Attribute) -> None:
if access.is_magic(node.attr):
+ # If "magic" method being called has the same name as
+ # the enclosing function, then it is a "wrapper" and thus
+ # a "false positive".
+
+ ctx = nodes.get_context(node)
+ if isinstance(ctx, FunctionNodes):
+ if node.attr == ctx.name:
+ return
+
if node.attr in ALL_MAGIC_METHODS:
self._ensure_attribute_type(
node, DirectMagicAttributeAccessViolation,
| {"golden_diff": "diff --git a/wemake_python_styleguide/visitors/ast/attributes.py b/wemake_python_styleguide/visitors/ast/attributes.py\n--- a/wemake_python_styleguide/visitors/ast/attributes.py\n+++ b/wemake_python_styleguide/visitors/ast/attributes.py\n@@ -3,7 +3,9 @@\n \n from typing_extensions import final\n \n+from wemake_python_styleguide.compat.aliases import FunctionNodes\n from wemake_python_styleguide.constants import ALL_MAGIC_METHODS\n+from wemake_python_styleguide.logic import nodes\n from wemake_python_styleguide.logic.naming import access\n from wemake_python_styleguide.violations.best_practices import (\n ProtectedAttributeViolation,\n@@ -50,6 +52,15 @@\n \n def _check_magic_attribute(self, node: ast.Attribute) -> None:\n if access.is_magic(node.attr):\n+ # If \"magic\" method being called has the same name as\n+ # the enclosing function, then it is a \"wrapper\" and thus\n+ # a \"false positive\".\n+\n+ ctx = nodes.get_context(node)\n+ if isinstance(ctx, FunctionNodes):\n+ if node.attr == ctx.name:\n+ return\n+\n if node.attr in ALL_MAGIC_METHODS:\n self._ensure_attribute_type(\n node, DirectMagicAttributeAccessViolation,\n", "issue": "609: Allow __enter__() inside an __enter__()\n### What's wrong\r\n\r\nOne design pattern is to wrap a context manager. It would be nice to avoid WPS609 errors with this code, which seems to require accessing the direct magic methods.\r\n\r\n### How it should be\r\n\r\nAllow code like:\r\n```\r\nclass Foo:\r\n ...\r\n\r\n def __enter__(self):\r\n self._conn.__enter__()\r\n return self\r\n\r\n def __exit__(self, exc_type, exc_value, traceback):\r\n self._conn.__exit__(exc_type, exc_value, traceback)\r\n```\r\n\r\nI guess the same for aenter/aexit as well.\n", "before_files": [{"content": "import ast\nfrom typing import ClassVar, FrozenSet\n\nfrom typing_extensions import final\n\nfrom wemake_python_styleguide.constants import ALL_MAGIC_METHODS\nfrom wemake_python_styleguide.logic.naming import access\nfrom wemake_python_styleguide.violations.best_practices import (\n ProtectedAttributeViolation,\n)\nfrom wemake_python_styleguide.violations.oop import (\n DirectMagicAttributeAccessViolation,\n)\nfrom wemake_python_styleguide.visitors.base import BaseNodeVisitor\n\n\n@final\nclass WrongAttributeVisitor(BaseNodeVisitor):\n \"\"\"Ensures that attributes are used correctly.\"\"\"\n\n _allowed_to_use_protected: ClassVar[FrozenSet[str]] = frozenset((\n 'self',\n 'cls',\n 'mcs',\n ))\n\n def visit_Attribute(self, node: ast.Attribute) -> None:\n \"\"\"Checks the `Attribute` node.\"\"\"\n self._check_protected_attribute(node)\n self._check_magic_attribute(node)\n self.generic_visit(node)\n\n def _is_super_called(self, node: ast.Call) -> bool:\n return isinstance(node.func, ast.Name) and node.func.id == 'super'\n\n def _ensure_attribute_type(self, node: ast.Attribute, exception) -> None:\n if isinstance(node.value, ast.Name):\n if node.value.id in self._allowed_to_use_protected:\n return\n\n if isinstance(node.value, ast.Call):\n if self._is_super_called(node.value):\n return\n\n self.add_violation(exception(node, text=node.attr))\n\n def _check_protected_attribute(self, node: ast.Attribute) -> None:\n if access.is_protected(node.attr):\n self._ensure_attribute_type(node, ProtectedAttributeViolation)\n\n def _check_magic_attribute(self, node: ast.Attribute) -> None:\n if access.is_magic(node.attr):\n if node.attr in ALL_MAGIC_METHODS:\n self._ensure_attribute_type(\n node, DirectMagicAttributeAccessViolation,\n )\n", "path": "wemake_python_styleguide/visitors/ast/attributes.py"}], "after_files": [{"content": "import ast\nfrom typing import ClassVar, FrozenSet\n\nfrom typing_extensions import final\n\nfrom wemake_python_styleguide.compat.aliases import FunctionNodes\nfrom wemake_python_styleguide.constants import ALL_MAGIC_METHODS\nfrom wemake_python_styleguide.logic import nodes\nfrom wemake_python_styleguide.logic.naming import access\nfrom wemake_python_styleguide.violations.best_practices import (\n ProtectedAttributeViolation,\n)\nfrom wemake_python_styleguide.violations.oop import (\n DirectMagicAttributeAccessViolation,\n)\nfrom wemake_python_styleguide.visitors.base import BaseNodeVisitor\n\n\n@final\nclass WrongAttributeVisitor(BaseNodeVisitor):\n \"\"\"Ensures that attributes are used correctly.\"\"\"\n\n _allowed_to_use_protected: ClassVar[FrozenSet[str]] = frozenset((\n 'self',\n 'cls',\n 'mcs',\n ))\n\n def visit_Attribute(self, node: ast.Attribute) -> None:\n \"\"\"Checks the `Attribute` node.\"\"\"\n self._check_protected_attribute(node)\n self._check_magic_attribute(node)\n self.generic_visit(node)\n\n def _is_super_called(self, node: ast.Call) -> bool:\n return isinstance(node.func, ast.Name) and node.func.id == 'super'\n\n def _ensure_attribute_type(self, node: ast.Attribute, exception) -> None:\n if isinstance(node.value, ast.Name):\n if node.value.id in self._allowed_to_use_protected:\n return\n\n if isinstance(node.value, ast.Call):\n if self._is_super_called(node.value):\n return\n\n self.add_violation(exception(node, text=node.attr))\n\n def _check_protected_attribute(self, node: ast.Attribute) -> None:\n if access.is_protected(node.attr):\n self._ensure_attribute_type(node, ProtectedAttributeViolation)\n\n def _check_magic_attribute(self, node: ast.Attribute) -> None:\n if access.is_magic(node.attr):\n # If \"magic\" method being called has the same name as\n # the enclosing function, then it is a \"wrapper\" and thus\n # a \"false positive\".\n\n ctx = nodes.get_context(node)\n if isinstance(ctx, FunctionNodes):\n if node.attr == ctx.name:\n return\n\n if node.attr in ALL_MAGIC_METHODS:\n self._ensure_attribute_type(\n node, DirectMagicAttributeAccessViolation,\n )\n", "path": "wemake_python_styleguide/visitors/ast/attributes.py"}]} | 928 | 297 |
gh_patches_debug_18253 | rasdani/github-patches | git_diff | google__jax-1473 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
advi.py example is broken with jit
advi.py example is broken with jit: @partial(jit, static_argnums=(0, 1, 2, 4))
If jit is removed, then It works.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/advi.py`
Content:
```
1 # Copyright 2018 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Automatic differentiation variational inference in Numpy and JAX.
16
17 This demo fits a Gaussian approximation to an intractable, unnormalized
18 density, by differentiating through a Monte Carlo estimate of the
19 variational evidence lower bound (ELBO)."""
20
21
22 from functools import partial
23 import matplotlib.pyplot as plt
24
25 from jax.api import jit, grad, vmap
26 from jax import random
27 from jax.experimental import optimizers
28 import jax.numpy as np
29 import jax.scipy.stats.norm as norm
30
31
32 # ========= Functions to define the evidence lower bound. =========
33
34 def diag_gaussian_sample(rng, mean, log_std):
35 # Take a single sample from a diagonal multivariate Gaussian.
36 return mean + np.exp(log_std) * random.normal(rng, mean.shape)
37
38 def diag_gaussian_logpdf(x, mean, log_std):
39 # Evaluate a single point on a diagonal multivariate Gaussian.
40 return np.sum(vmap(norm.logpdf)(x, mean, np.exp(log_std)))
41
42 def elbo(logprob, rng, mean, log_std):
43 # Single-sample Monte Carlo estimate of the variational lower bound.
44 sample = diag_gaussian_sample(rng, mean, log_std)
45 return logprob(sample) - diag_gaussian_logpdf(sample, mean, log_std)
46
47 def batch_elbo(logprob, rng, params, num_samples):
48 # Average over a batch of random samples.
49 rngs = random.split(rng, num_samples)
50 vectorized_elbo = vmap(partial(elbo, logprob), in_axes=(0, None, None))
51 return np.mean(vectorized_elbo(rngs, *params))
52
53
54 # ========= Helper function for plotting. =========
55
56 @partial(jit, static_argnums=(0, 1, 2, 4))
57 def mesh_eval(func, x_limits, y_limits, params, num_ticks=101):
58 # Evaluate func on a 2D grid defined by x_limits and y_limits.
59 x = np.linspace(*x_limits, num=num_ticks)
60 y = np.linspace(*y_limits, num=num_ticks)
61 X, Y = np.meshgrid(x, y)
62 xy_vec = np.stack([X.ravel(), Y.ravel()]).T
63 zs = vmap(func, in_axes=(0, None))(xy_vec, params)
64 return X, Y, zs.reshape(X.shape)
65
66
67 # ========= Define an intractable unnormalized density =========
68
69 def funnel_log_density(params):
70 return norm.logpdf(params[0], 0, np.exp(params[1])) + \
71 norm.logpdf(params[1], 0, 1.35)
72
73
74 if __name__ == "__main__":
75 num_samples = 40
76
77 @jit
78 def objective(params, t):
79 rng = random.PRNGKey(t)
80 return -batch_elbo(funnel_log_density, rng, params, num_samples)
81
82 # Set up figure.
83 fig = plt.figure(figsize=(8,8), facecolor='white')
84 ax = fig.add_subplot(111, frameon=False)
85 plt.ion()
86 plt.show(block=False)
87 x_limits = [-2, 2]
88 y_limits = [-4, 2]
89 target_dist = lambda x, _: np.exp(funnel_log_density(x))
90 approx_dist = lambda x, params: np.exp(diag_gaussian_logpdf(x, *params))
91
92 def callback(params, t):
93 print("Iteration {} lower bound {}".format(t, objective(params, t)))
94
95 plt.cla()
96 X, Y, Z = mesh_eval(target_dist, x_limits, y_limits, 1)
97 ax.contour(X, Y, Z, cmap='summer')
98 X, Y, Z = mesh_eval(approx_dist, x_limits, y_limits, params)
99 ax.contour(X, Y, Z, cmap='winter')
100 ax.set_xlim(x_limits)
101 ax.set_ylim(y_limits)
102 ax.set_yticks([])
103 ax.set_xticks([])
104
105 # Plot random samples from variational distribution.
106 # Here we clone the rng used in computing the objective
107 # so that we can show exactly the same samples.
108 rngs = random.split(random.PRNGKey(t), num_samples)
109 samples = vmap(diag_gaussian_sample, in_axes=(0, None, None))(rngs, *params)
110 ax.plot(samples[:, 0], samples[:, 1], 'b.')
111
112 plt.draw()
113 plt.pause(1.0/60.0)
114
115
116 # Set up optimizer.
117 D = 2
118 init_mean = np.zeros(D)
119 init_std = np.zeros(D)
120 init_params = (init_mean, init_std)
121 opt_init, opt_update, get_params = optimizers.momentum(step_size=0.1, mass=0.9)
122 opt_state = opt_init(init_params)
123
124 @jit
125 def update(i, opt_state):
126 params = get_params(opt_state)
127 gradient = grad(objective)(params, i)
128 return opt_update(i, gradient, opt_state)
129
130
131 # Main loop.
132 print("Optimizing variational parameters...")
133 for t in range(100):
134 opt_state = update(t, opt_state)
135 params = get_params(opt_state)
136 callback(params, t)
137 plt.show(block=True)
138
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/advi.py b/examples/advi.py
--- a/examples/advi.py
+++ b/examples/advi.py
@@ -54,7 +54,7 @@
# ========= Helper function for plotting. =========
@partial(jit, static_argnums=(0, 1, 2, 4))
-def mesh_eval(func, x_limits, y_limits, params, num_ticks=101):
+def _mesh_eval(func, x_limits, y_limits, params, num_ticks):
# Evaluate func on a 2D grid defined by x_limits and y_limits.
x = np.linspace(*x_limits, num=num_ticks)
y = np.linspace(*y_limits, num=num_ticks)
@@ -63,6 +63,8 @@
zs = vmap(func, in_axes=(0, None))(xy_vec, params)
return X, Y, zs.reshape(X.shape)
+def mesh_eval(func, x_limits, y_limits, params, num_ticks=101):
+ return _mesh_eval(func, x_limits, y_limits, params, num_ticks)
# ========= Define an intractable unnormalized density =========
| {"golden_diff": "diff --git a/examples/advi.py b/examples/advi.py\n--- a/examples/advi.py\n+++ b/examples/advi.py\n@@ -54,7 +54,7 @@\n # ========= Helper function for plotting. =========\n \n @partial(jit, static_argnums=(0, 1, 2, 4))\n-def mesh_eval(func, x_limits, y_limits, params, num_ticks=101):\n+def _mesh_eval(func, x_limits, y_limits, params, num_ticks):\n # Evaluate func on a 2D grid defined by x_limits and y_limits.\n x = np.linspace(*x_limits, num=num_ticks)\n y = np.linspace(*y_limits, num=num_ticks)\n@@ -63,6 +63,8 @@\n zs = vmap(func, in_axes=(0, None))(xy_vec, params)\n return X, Y, zs.reshape(X.shape)\n \n+def mesh_eval(func, x_limits, y_limits, params, num_ticks=101):\n+ return _mesh_eval(func, x_limits, y_limits, params, num_ticks)\n \n # ========= Define an intractable unnormalized density =========\n", "issue": "advi.py example is broken with jit\nadvi.py example is broken with jit: @partial(jit, static_argnums=(0, 1, 2, 4))\r\nIf jit is removed, then It works.\n", "before_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Automatic differentiation variational inference in Numpy and JAX.\n\nThis demo fits a Gaussian approximation to an intractable, unnormalized\ndensity, by differentiating through a Monte Carlo estimate of the\nvariational evidence lower bound (ELBO).\"\"\"\n\n\nfrom functools import partial\nimport matplotlib.pyplot as plt\n\nfrom jax.api import jit, grad, vmap\nfrom jax import random\nfrom jax.experimental import optimizers\nimport jax.numpy as np\nimport jax.scipy.stats.norm as norm\n\n\n# ========= Functions to define the evidence lower bound. =========\n\ndef diag_gaussian_sample(rng, mean, log_std):\n # Take a single sample from a diagonal multivariate Gaussian.\n return mean + np.exp(log_std) * random.normal(rng, mean.shape)\n\ndef diag_gaussian_logpdf(x, mean, log_std):\n # Evaluate a single point on a diagonal multivariate Gaussian.\n return np.sum(vmap(norm.logpdf)(x, mean, np.exp(log_std)))\n\ndef elbo(logprob, rng, mean, log_std):\n # Single-sample Monte Carlo estimate of the variational lower bound.\n sample = diag_gaussian_sample(rng, mean, log_std)\n return logprob(sample) - diag_gaussian_logpdf(sample, mean, log_std)\n\ndef batch_elbo(logprob, rng, params, num_samples):\n # Average over a batch of random samples.\n rngs = random.split(rng, num_samples)\n vectorized_elbo = vmap(partial(elbo, logprob), in_axes=(0, None, None))\n return np.mean(vectorized_elbo(rngs, *params))\n\n\n# ========= Helper function for plotting. =========\n\n@partial(jit, static_argnums=(0, 1, 2, 4))\ndef mesh_eval(func, x_limits, y_limits, params, num_ticks=101):\n # Evaluate func on a 2D grid defined by x_limits and y_limits.\n x = np.linspace(*x_limits, num=num_ticks)\n y = np.linspace(*y_limits, num=num_ticks)\n X, Y = np.meshgrid(x, y)\n xy_vec = np.stack([X.ravel(), Y.ravel()]).T\n zs = vmap(func, in_axes=(0, None))(xy_vec, params)\n return X, Y, zs.reshape(X.shape)\n\n\n# ========= Define an intractable unnormalized density =========\n\ndef funnel_log_density(params):\n return norm.logpdf(params[0], 0, np.exp(params[1])) + \\\n norm.logpdf(params[1], 0, 1.35)\n\n\nif __name__ == \"__main__\":\n num_samples = 40\n\n @jit\n def objective(params, t):\n rng = random.PRNGKey(t)\n return -batch_elbo(funnel_log_density, rng, params, num_samples)\n\n # Set up figure.\n fig = plt.figure(figsize=(8,8), facecolor='white')\n ax = fig.add_subplot(111, frameon=False)\n plt.ion()\n plt.show(block=False)\n x_limits = [-2, 2]\n y_limits = [-4, 2]\n target_dist = lambda x, _: np.exp(funnel_log_density(x))\n approx_dist = lambda x, params: np.exp(diag_gaussian_logpdf(x, *params))\n\n def callback(params, t):\n print(\"Iteration {} lower bound {}\".format(t, objective(params, t)))\n\n plt.cla()\n X, Y, Z = mesh_eval(target_dist, x_limits, y_limits, 1)\n ax.contour(X, Y, Z, cmap='summer')\n X, Y, Z = mesh_eval(approx_dist, x_limits, y_limits, params)\n ax.contour(X, Y, Z, cmap='winter')\n ax.set_xlim(x_limits)\n ax.set_ylim(y_limits)\n ax.set_yticks([])\n ax.set_xticks([])\n\n # Plot random samples from variational distribution.\n # Here we clone the rng used in computing the objective\n # so that we can show exactly the same samples.\n rngs = random.split(random.PRNGKey(t), num_samples)\n samples = vmap(diag_gaussian_sample, in_axes=(0, None, None))(rngs, *params)\n ax.plot(samples[:, 0], samples[:, 1], 'b.')\n\n plt.draw()\n plt.pause(1.0/60.0)\n\n\n # Set up optimizer.\n D = 2\n init_mean = np.zeros(D)\n init_std = np.zeros(D)\n init_params = (init_mean, init_std)\n opt_init, opt_update, get_params = optimizers.momentum(step_size=0.1, mass=0.9)\n opt_state = opt_init(init_params)\n\n @jit\n def update(i, opt_state):\n params = get_params(opt_state)\n gradient = grad(objective)(params, i)\n return opt_update(i, gradient, opt_state)\n\n\n # Main loop.\n print(\"Optimizing variational parameters...\")\n for t in range(100):\n opt_state = update(t, opt_state)\n params = get_params(opt_state)\n callback(params, t)\n plt.show(block=True)\n", "path": "examples/advi.py"}], "after_files": [{"content": "# Copyright 2018 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Automatic differentiation variational inference in Numpy and JAX.\n\nThis demo fits a Gaussian approximation to an intractable, unnormalized\ndensity, by differentiating through a Monte Carlo estimate of the\nvariational evidence lower bound (ELBO).\"\"\"\n\n\nfrom functools import partial\nimport matplotlib.pyplot as plt\n\nfrom jax.api import jit, grad, vmap\nfrom jax import random\nfrom jax.experimental import optimizers\nimport jax.numpy as np\nimport jax.scipy.stats.norm as norm\n\n\n# ========= Functions to define the evidence lower bound. =========\n\ndef diag_gaussian_sample(rng, mean, log_std):\n # Take a single sample from a diagonal multivariate Gaussian.\n return mean + np.exp(log_std) * random.normal(rng, mean.shape)\n\ndef diag_gaussian_logpdf(x, mean, log_std):\n # Evaluate a single point on a diagonal multivariate Gaussian.\n return np.sum(vmap(norm.logpdf)(x, mean, np.exp(log_std)))\n\ndef elbo(logprob, rng, mean, log_std):\n # Single-sample Monte Carlo estimate of the variational lower bound.\n sample = diag_gaussian_sample(rng, mean, log_std)\n return logprob(sample) - diag_gaussian_logpdf(sample, mean, log_std)\n\ndef batch_elbo(logprob, rng, params, num_samples):\n # Average over a batch of random samples.\n rngs = random.split(rng, num_samples)\n vectorized_elbo = vmap(partial(elbo, logprob), in_axes=(0, None, None))\n return np.mean(vectorized_elbo(rngs, *params))\n\n\n# ========= Helper function for plotting. =========\n\n@partial(jit, static_argnums=(0, 1, 2, 4))\ndef _mesh_eval(func, x_limits, y_limits, params, num_ticks):\n # Evaluate func on a 2D grid defined by x_limits and y_limits.\n x = np.linspace(*x_limits, num=num_ticks)\n y = np.linspace(*y_limits, num=num_ticks)\n X, Y = np.meshgrid(x, y)\n xy_vec = np.stack([X.ravel(), Y.ravel()]).T\n zs = vmap(func, in_axes=(0, None))(xy_vec, params)\n return X, Y, zs.reshape(X.shape)\n\ndef mesh_eval(func, x_limits, y_limits, params, num_ticks=101):\n return _mesh_eval(func, x_limits, y_limits, params, num_ticks)\n\n# ========= Define an intractable unnormalized density =========\n\ndef funnel_log_density(params):\n return norm.logpdf(params[0], 0, np.exp(params[1])) + \\\n norm.logpdf(params[1], 0, 1.35)\n\n\nif __name__ == \"__main__\":\n num_samples = 40\n\n @jit\n def objective(params, t):\n rng = random.PRNGKey(t)\n return -batch_elbo(funnel_log_density, rng, params, num_samples)\n\n # Set up figure.\n fig = plt.figure(figsize=(8,8), facecolor='white')\n ax = fig.add_subplot(111, frameon=False)\n plt.ion()\n plt.show(block=False)\n x_limits = [-2, 2]\n y_limits = [-4, 2]\n target_dist = lambda x, _: np.exp(funnel_log_density(x))\n approx_dist = lambda x, params: np.exp(diag_gaussian_logpdf(x, *params))\n\n def callback(params, t):\n print(\"Iteration {} lower bound {}\".format(t, objective(params, t)))\n\n plt.cla()\n X, Y, Z = mesh_eval(target_dist, x_limits, y_limits, 1)\n ax.contour(X, Y, Z, cmap='summer')\n X, Y, Z = mesh_eval(approx_dist, x_limits, y_limits, params)\n ax.contour(X, Y, Z, cmap='winter')\n ax.set_xlim(x_limits)\n ax.set_ylim(y_limits)\n ax.set_yticks([])\n ax.set_xticks([])\n\n # Plot random samples from variational distribution.\n # Here we clone the rng used in computing the objective\n # so that we can show exactly the same samples.\n rngs = random.split(random.PRNGKey(t), num_samples)\n samples = vmap(diag_gaussian_sample, in_axes=(0, None, None))(rngs, *params)\n ax.plot(samples[:, 0], samples[:, 1], 'b.')\n\n plt.draw()\n plt.pause(1.0/60.0)\n\n\n # Set up optimizer.\n D = 2\n init_mean = np.zeros(D)\n init_std = np.zeros(D)\n init_params = (init_mean, init_std)\n opt_init, opt_update, get_params = optimizers.momentum(step_size=0.1, mass=0.9)\n opt_state = opt_init(init_params)\n\n @jit\n def update(i, opt_state):\n params = get_params(opt_state)\n gradient = grad(objective)(params, i)\n return opt_update(i, gradient, opt_state)\n\n\n # Main loop.\n print(\"Optimizing variational parameters...\")\n for t in range(100):\n opt_state = update(t, opt_state)\n params = get_params(opt_state)\n callback(params, t)\n plt.show(block=True)\n", "path": "examples/advi.py"}]} | 1,884 | 250 |
gh_patches_debug_15104 | rasdani/github-patches | git_diff | dotkom__onlineweb4-606 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Order of events in event archive is non-logical
As it is now:
28.11.13
04.04.14
16.01.14
23.11.13
27.02.14
and so on...
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/events/views.py`
Content:
```
1 #-*- coding: utf-8 -*-
2
3 import datetime
4
5 from django.utils import timezone
6
7 from django.conf import settings
8 from django.contrib import messages
9 from django.contrib.auth.decorators import login_required, user_passes_test
10 from django.core.urlresolvers import reverse
11 from django.http import HttpResponseRedirect
12 from django.shortcuts import render, get_object_or_404, redirect
13 from django.utils.translation import ugettext as _
14
15 import watson
16
17 from apps.events.forms import CaptchaForm
18 from apps.events.models import Event, AttendanceEvent, Attendee
19 from apps.events.pdf_generator import EventPDF
20
21
22 def index(request):
23 return render(request, 'events/index.html', {})
24
25 def details(request, event_id, event_slug):
26 event = get_object_or_404(Event, pk=event_id)
27
28 is_attendance_event = False
29 user_anonymous = True
30 user_attending = False
31 place_on_wait_list = 0
32 will_be_on_wait_list = False
33 rules = []
34 user_status = False
35
36 try:
37 attendance_event = AttendanceEvent.objects.get(pk=event_id)
38 is_attendance_event = True
39 form = CaptchaForm(user=request.user)
40
41 if attendance_event.rule_bundles:
42 for rule_bundle in attendance_event.rule_bundles.all():
43 rules.append(rule_bundle.get_rule_strings)
44
45 if request.user.is_authenticated():
46 user_anonymous = False
47 if attendance_event.is_attendee(request.user):
48 user_attending = True
49
50
51 will_be_on_wait_list = attendance_event.will_i_be_on_wait_list
52
53 user_status = event.is_eligible_for_signup(request.user)
54
55 # Check if this user is on the waitlist
56 place_on_wait_list = event.what_place_is_user_on_wait_list(request.user)
57
58 except AttendanceEvent.DoesNotExist:
59 pass
60
61 if is_attendance_event:
62 context = {
63 'event': event,
64 'attendance_event': attendance_event,
65 'user_anonymous': user_anonymous,
66 'user_attending': user_attending,
67 'will_be_on_wait_list': will_be_on_wait_list,
68 'rules': rules,
69 'user_status': user_status,
70 'place_on_wait_list': int(place_on_wait_list),
71 #'position_in_wait_list': position_in_wait_list,
72 'captcha_form': form,
73 }
74
75 return render(request, 'events/details.html', context)
76 else:
77 return render(request, 'events/details.html', {'event': event})
78
79
80 def get_attendee(attendee_id):
81 return get_object_or_404(Attendee, pk=attendee_id)
82
83 @login_required
84 def attendEvent(request, event_id):
85
86 event = get_object_or_404(Event, pk=event_id)
87
88 if not request.POST:
89 messages.error(request, _(u'Vennligst fyll ut skjemaet.'))
90 return redirect(event)
91 form = CaptchaForm(request.POST, user=request.user)
92
93 if not form.is_valid():
94 if not 'mark_rules' in request.POST and not request.user.mark_rules:
95 error_message = u'Du må godta prikkreglene for å melde deg på.'
96 else:
97 error_message = u'Du klarte ikke captcha-en. Er du en bot?'
98 messages.error(request, _(error_message))
99 return redirect(event)
100
101 # Check if the user is eligible to attend this event.
102 # If not, an error message will be present in the returned dict
103 attendance_event = event.attendance_event
104
105 response = event.is_eligible_for_signup(request.user);
106
107 if response['status']:
108 # First time accepting mark rules
109 if 'mark_rules' in form.cleaned_data:
110 request.user.mark_rules = True
111 request.user.save()
112 Attendee(event=attendance_event, user=request.user).save()
113 messages.success(request, _(u"Du er nå påmeldt på arrangementet!"))
114 return redirect(event)
115 else:
116 messages.error(request, response['message'])
117 return redirect(event)
118
119 @login_required
120 def unattendEvent(request, event_id):
121
122 event = get_object_or_404(Event, pk=event_id)
123 attendance_event = event.attendance_event
124 Attendee.objects.get(event=attendance_event, user=request.user).delete()
125
126 messages.success(request, _(u"Du ble meldt av arrangementet."))
127 return redirect(event)
128
129 def search_events(request):
130 query = request.GET.get('query')
131 filters = {
132 'future' : request.GET.get('future'),
133 'myevents' : request.GET.get('myevents')
134 }
135 events = _search_indexed(request, query, filters)
136
137 return render(request, 'events/search.html', {'events': events})
138
139
140 def _search_indexed(request, query, filters):
141 results = []
142 kwargs = {}
143
144 if filters['future'] == 'true':
145 kwargs['event_start__gte'] = timezone.now()
146
147 if filters['myevents'] == 'true':
148 kwargs['attendance_event__attendees'] = request.user
149
150 if query:
151 for result in watson.search(query, models=(
152 Event.objects.filter(**kwargs).prefetch_related(
153 'attendance_event', 'attendance_event__attendees'),)):
154 results.append(result.object)
155 return results[:10]
156
157 return Event.objects.filter(**kwargs).prefetch_related(
158 'attendance_event', 'attendance_event__attendees')
159
160
161 @login_required()
162 @user_passes_test(lambda u: u.groups.filter(name='Komiteer').count() == 1)
163 def generate_pdf(request, event_id):
164 event = get_object_or_404(Event, pk=event_id)
165 return EventPDF(event).render_pdf()
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/apps/events/views.py b/apps/events/views.py
--- a/apps/events/views.py
+++ b/apps/events/views.py
@@ -147,15 +147,15 @@
if filters['myevents'] == 'true':
kwargs['attendance_event__attendees'] = request.user
+ events = Event.objects.filter(**kwargs).order_by('event_start').prefetch_related(
+ 'attendance_event', 'attendance_event__attendees')
+
if query:
- for result in watson.search(query, models=(
- Event.objects.filter(**kwargs).prefetch_related(
- 'attendance_event', 'attendance_event__attendees'),)):
+ for result in watson.search(query, models=(events,)):
results.append(result.object)
return results[:10]
- return Event.objects.filter(**kwargs).prefetch_related(
- 'attendance_event', 'attendance_event__attendees')
+ return events
@login_required()
| {"golden_diff": "diff --git a/apps/events/views.py b/apps/events/views.py\n--- a/apps/events/views.py\n+++ b/apps/events/views.py\n@@ -147,15 +147,15 @@\n if filters['myevents'] == 'true':\n kwargs['attendance_event__attendees'] = request.user\n \n+ events = Event.objects.filter(**kwargs).order_by('event_start').prefetch_related(\n+ 'attendance_event', 'attendance_event__attendees')\n+\n if query:\n- for result in watson.search(query, models=(\n- Event.objects.filter(**kwargs).prefetch_related(\n- 'attendance_event', 'attendance_event__attendees'),)):\n+ for result in watson.search(query, models=(events,)):\n results.append(result.object)\n return results[:10]\n \n- return Event.objects.filter(**kwargs).prefetch_related(\n- 'attendance_event', 'attendance_event__attendees')\n+ return events\n \n \n @login_required()\n", "issue": "Order of events in event archive is non-logical\nAs it is now:\n\n28.11.13\n04.04.14\n16.01.14\n23.11.13\n27.02.14\n\nand so on...\n\n", "before_files": [{"content": "#-*- coding: utf-8 -*-\n\nimport datetime\n\nfrom django.utils import timezone\n\nfrom django.conf import settings\nfrom django.contrib import messages\nfrom django.contrib.auth.decorators import login_required, user_passes_test\nfrom django.core.urlresolvers import reverse\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import render, get_object_or_404, redirect\nfrom django.utils.translation import ugettext as _\n\nimport watson\n\nfrom apps.events.forms import CaptchaForm\nfrom apps.events.models import Event, AttendanceEvent, Attendee\nfrom apps.events.pdf_generator import EventPDF\n\n\ndef index(request):\n return render(request, 'events/index.html', {})\n\ndef details(request, event_id, event_slug):\n event = get_object_or_404(Event, pk=event_id)\n\n is_attendance_event = False\n user_anonymous = True\n user_attending = False\n place_on_wait_list = 0\n will_be_on_wait_list = False\n rules = []\n user_status = False\n\n try:\n attendance_event = AttendanceEvent.objects.get(pk=event_id)\n is_attendance_event = True\n form = CaptchaForm(user=request.user)\n\n if attendance_event.rule_bundles:\n for rule_bundle in attendance_event.rule_bundles.all():\n rules.append(rule_bundle.get_rule_strings)\n\n if request.user.is_authenticated():\n user_anonymous = False\n if attendance_event.is_attendee(request.user):\n user_attending = True\n\n \n will_be_on_wait_list = attendance_event.will_i_be_on_wait_list\n\n user_status = event.is_eligible_for_signup(request.user)\n\n # Check if this user is on the waitlist\n place_on_wait_list = event.what_place_is_user_on_wait_list(request.user)\n\n except AttendanceEvent.DoesNotExist:\n pass\n\n if is_attendance_event:\n context = {\n 'event': event,\n 'attendance_event': attendance_event,\n 'user_anonymous': user_anonymous,\n 'user_attending': user_attending,\n 'will_be_on_wait_list': will_be_on_wait_list,\n 'rules': rules,\n 'user_status': user_status,\n 'place_on_wait_list': int(place_on_wait_list),\n #'position_in_wait_list': position_in_wait_list,\n 'captcha_form': form,\n }\n \n return render(request, 'events/details.html', context)\n else:\n return render(request, 'events/details.html', {'event': event})\n\n\ndef get_attendee(attendee_id):\n return get_object_or_404(Attendee, pk=attendee_id)\n\n@login_required\ndef attendEvent(request, event_id):\n \n event = get_object_or_404(Event, pk=event_id)\n\n if not request.POST:\n messages.error(request, _(u'Vennligst fyll ut skjemaet.'))\n return redirect(event)\n form = CaptchaForm(request.POST, user=request.user)\n\n if not form.is_valid():\n if not 'mark_rules' in request.POST and not request.user.mark_rules:\n error_message = u'Du m\u00e5 godta prikkreglene for \u00e5 melde deg p\u00e5.'\n else:\n error_message = u'Du klarte ikke captcha-en. Er du en bot?'\n messages.error(request, _(error_message))\n return redirect(event)\n\n # Check if the user is eligible to attend this event.\n # If not, an error message will be present in the returned dict\n attendance_event = event.attendance_event\n\n response = event.is_eligible_for_signup(request.user);\n\n if response['status']: \n # First time accepting mark rules\n if 'mark_rules' in form.cleaned_data:\n request.user.mark_rules = True\n request.user.save()\n Attendee(event=attendance_event, user=request.user).save()\n messages.success(request, _(u\"Du er n\u00e5 p\u00e5meldt p\u00e5 arrangementet!\"))\n return redirect(event)\n else:\n messages.error(request, response['message'])\n return redirect(event)\n\n@login_required\ndef unattendEvent(request, event_id):\n\n event = get_object_or_404(Event, pk=event_id)\n attendance_event = event.attendance_event\n Attendee.objects.get(event=attendance_event, user=request.user).delete()\n\n messages.success(request, _(u\"Du ble meldt av arrangementet.\"))\n return redirect(event)\n\ndef search_events(request):\n query = request.GET.get('query')\n filters = {\n 'future' : request.GET.get('future'),\n 'myevents' : request.GET.get('myevents')\n }\n events = _search_indexed(request, query, filters)\n\n return render(request, 'events/search.html', {'events': events})\n\n\ndef _search_indexed(request, query, filters):\n results = []\n kwargs = {}\n\n if filters['future'] == 'true':\n kwargs['event_start__gte'] = timezone.now()\n\n if filters['myevents'] == 'true':\n kwargs['attendance_event__attendees'] = request.user\n\n if query:\n for result in watson.search(query, models=(\n Event.objects.filter(**kwargs).prefetch_related(\n 'attendance_event', 'attendance_event__attendees'),)):\n results.append(result.object)\n return results[:10]\n\n return Event.objects.filter(**kwargs).prefetch_related(\n 'attendance_event', 'attendance_event__attendees')\n\n\n@login_required()\n@user_passes_test(lambda u: u.groups.filter(name='Komiteer').count() == 1)\ndef generate_pdf(request, event_id):\n event = get_object_or_404(Event, pk=event_id)\n return EventPDF(event).render_pdf()", "path": "apps/events/views.py"}], "after_files": [{"content": "#-*- coding: utf-8 -*-\n\nimport datetime\n\nfrom django.utils import timezone\n\nfrom django.conf import settings\nfrom django.contrib import messages\nfrom django.contrib.auth.decorators import login_required, user_passes_test\nfrom django.core.urlresolvers import reverse\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import render, get_object_or_404, redirect\nfrom django.utils.translation import ugettext as _\n\nimport watson\n\nfrom apps.events.forms import CaptchaForm\nfrom apps.events.models import Event, AttendanceEvent, Attendee\nfrom apps.events.pdf_generator import EventPDF\n\n\ndef index(request):\n return render(request, 'events/index.html', {})\n\ndef details(request, event_id, event_slug):\n event = get_object_or_404(Event, pk=event_id)\n\n is_attendance_event = False\n user_anonymous = True\n user_attending = False\n place_on_wait_list = 0\n will_be_on_wait_list = False\n rules = []\n user_status = False\n\n try:\n attendance_event = AttendanceEvent.objects.get(pk=event_id)\n is_attendance_event = True\n form = CaptchaForm(user=request.user)\n\n if attendance_event.rule_bundles:\n for rule_bundle in attendance_event.rule_bundles.all():\n rules.append(rule_bundle.get_rule_strings)\n\n if request.user.is_authenticated():\n user_anonymous = False\n if attendance_event.is_attendee(request.user):\n user_attending = True\n\n \n will_be_on_wait_list = attendance_event.will_i_be_on_wait_list\n\n user_status = event.is_eligible_for_signup(request.user)\n\n # Check if this user is on the waitlist\n place_on_wait_list = event.what_place_is_user_on_wait_list(request.user)\n\n except AttendanceEvent.DoesNotExist:\n pass\n\n if is_attendance_event:\n context = {\n 'event': event,\n 'attendance_event': attendance_event,\n 'user_anonymous': user_anonymous,\n 'user_attending': user_attending,\n 'will_be_on_wait_list': will_be_on_wait_list,\n 'rules': rules,\n 'user_status': user_status,\n 'place_on_wait_list': int(place_on_wait_list),\n #'position_in_wait_list': position_in_wait_list,\n 'captcha_form': form,\n }\n \n return render(request, 'events/details.html', context)\n else:\n return render(request, 'events/details.html', {'event': event})\n\n\ndef get_attendee(attendee_id):\n return get_object_or_404(Attendee, pk=attendee_id)\n\n@login_required\ndef attendEvent(request, event_id):\n \n event = get_object_or_404(Event, pk=event_id)\n\n if not request.POST:\n messages.error(request, _(u'Vennligst fyll ut skjemaet.'))\n return redirect(event)\n form = CaptchaForm(request.POST, user=request.user)\n\n if not form.is_valid():\n if not 'mark_rules' in request.POST and not request.user.mark_rules:\n error_message = u'Du m\u00e5 godta prikkreglene for \u00e5 melde deg p\u00e5.'\n else:\n error_message = u'Du klarte ikke captcha-en. Er du en bot?'\n messages.error(request, _(error_message))\n return redirect(event)\n\n # Check if the user is eligible to attend this event.\n # If not, an error message will be present in the returned dict\n attendance_event = event.attendance_event\n\n response = event.is_eligible_for_signup(request.user);\n\n if response['status']: \n # First time accepting mark rules\n if 'mark_rules' in form.cleaned_data:\n request.user.mark_rules = True\n request.user.save()\n Attendee(event=attendance_event, user=request.user).save()\n messages.success(request, _(u\"Du er n\u00e5 p\u00e5meldt p\u00e5 arrangementet!\"))\n return redirect(event)\n else:\n messages.error(request, response['message'])\n return redirect(event)\n\n@login_required\ndef unattendEvent(request, event_id):\n\n event = get_object_or_404(Event, pk=event_id)\n attendance_event = event.attendance_event\n Attendee.objects.get(event=attendance_event, user=request.user).delete()\n\n messages.success(request, _(u\"Du ble meldt av arrangementet.\"))\n return redirect(event)\n\ndef search_events(request):\n query = request.GET.get('query')\n filters = {\n 'future' : request.GET.get('future'),\n 'myevents' : request.GET.get('myevents')\n }\n events = _search_indexed(request, query, filters)\n\n return render(request, 'events/search.html', {'events': events})\n\n\ndef _search_indexed(request, query, filters):\n results = []\n kwargs = {}\n\n if filters['future'] == 'true':\n kwargs['event_start__gte'] = timezone.now()\n\n if filters['myevents'] == 'true':\n kwargs['attendance_event__attendees'] = request.user\n\n events = Event.objects.filter(**kwargs).order_by('event_start').prefetch_related(\n 'attendance_event', 'attendance_event__attendees')\n\n if query:\n for result in watson.search(query, models=(events,)):\n results.append(result.object)\n return results[:10]\n\n return events\n\n\n@login_required()\n@user_passes_test(lambda u: u.groups.filter(name='Komiteer').count() == 1)\ndef generate_pdf(request, event_id):\n event = get_object_or_404(Event, pk=event_id)\n return EventPDF(event).render_pdf()", "path": "apps/events/views.py"}]} | 1,936 | 209 |
gh_patches_debug_38138 | rasdani/github-patches | git_diff | aws__aws-cli-483 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Option to associate public ip address in ec2 run-instance
There doesn't seem to be any way to associate a public ip address without also adding a network interface with the --network-interfaces parameter. Is it possible for this to be a top level parameter?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `awscli/customizations/ec2runinstances.py`
Content:
```
1 # Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License"). You
4 # may not use this file except in compliance with the License. A copy of
5 # the License is located at
6 #
7 # http://aws.amazon.com/apache2.0/
8 #
9 # or in the "license" file accompanying this file. This file is
10 # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 # ANY KIND, either express or implied. See the License for the specific
12 # language governing permissions and limitations under the License.
13 """
14 This customization adds two new parameters to the ``ec2 run-instance``
15 command. The first, ``--secondary-private-ip-addresses`` allows a list
16 of IP addresses within the specified subnet to be associated with the
17 new instance. The second, ``--secondary-ip-address-count`` allows you
18 to specify how many additional IP addresses you want but the actual
19 address will be assigned for you.
20
21 This functionality (and much more) is also available using the
22 ``--network-interfaces`` complex argument. This just makes two of
23 the most commonly used features available more easily.
24 """
25 from awscli.arguments import CustomArgument
26
27
28 # --secondary-private-ip-address
29 SECONDARY_PRIVATE_IP_ADDRESSES_DOCS = (
30 '[EC2-VPC] A secondary private IP address for the network interface '
31 'or instance. You can specify this multiple times to assign multiple '
32 'secondary IP addresses. If you want additional private IP addresses '
33 'but do not need a specific address, use the '
34 '--secondary-private-ip-address-count option.')
35
36 # --secondary-private-ip-address-count
37 SECONDARY_PRIVATE_IP_ADDRESS_COUNT_DOCS = (
38 '[EC2-VPC] The number of secondary IP addresses to assign to '
39 'the network interface or instance.')
40
41
42 def _add_params(argument_table, operation, **kwargs):
43 arg = SecondaryPrivateIpAddressesArgument(
44 name='secondary-private-ip-addresses',
45 help_text=SECONDARY_PRIVATE_IP_ADDRESSES_DOCS)
46 argument_table['secondary-private-ip-addresses'] = arg
47 arg = SecondaryPrivateIpAddressCountArgument(
48 name='secondary-private-ip-address-count',
49 help_text=SECONDARY_PRIVATE_IP_ADDRESS_COUNT_DOCS)
50 argument_table['secondary-private-ip-address-count'] = arg
51
52
53 def _check_args(parsed_args, **kwargs):
54 # This function checks the parsed args. If the user specified
55 # the --network-interfaces option with any of the scalar options we
56 # raise an error.
57 arg_dict = vars(parsed_args)
58 if arg_dict['network_interfaces']:
59 for key in ('secondary_private_ip_addresses',
60 'secondary_private_ip_address_count'):
61 if arg_dict[key]:
62 msg = ('Mixing the --network-interfaces option '
63 'with the simple, scalar options is '
64 'not supported.')
65 raise ValueError(msg)
66
67 EVENTS = [
68 ('building-argument-table.ec2.run-instances', _add_params),
69 ('operation-args-parsed.ec2.run-instances', _check_args),
70 ]
71
72
73 def register_runinstances(event_handler):
74 # Register all of the events for customizing BundleInstance
75 for event, handler in EVENTS:
76 event_handler.register(event, handler)
77
78
79 def _build_network_interfaces(params, key, value):
80 # Build up the NetworkInterfaces data structure
81 if 'network_interfaces' not in params:
82 params['network_interfaces'] = [{'DeviceIndex': 0}]
83
84 if key == 'PrivateIpAddresses':
85 if 'PrivateIpAddresses' not in params['network_interfaces'][0]:
86 params['network_interfaces'][0]['PrivateIpAddresses'] = value
87 else:
88 params['network_interfaces'][0][key] = value
89
90
91 class SecondaryPrivateIpAddressesArgument(CustomArgument):
92
93 def add_to_parser(self, parser, cli_name=None):
94 parser.add_argument(self.cli_name, dest=self.py_name,
95 default=self._default, nargs='*')
96
97 def add_to_params(self, parameters, value):
98 if value:
99 value = [{'PrivateIpAddress': v, 'Primary': False} for
100 v in value]
101 _build_network_interfaces(parameters,
102 'PrivateIpAddresses',
103 value)
104
105
106 class SecondaryPrivateIpAddressCountArgument(CustomArgument):
107
108 def add_to_parser(self, parser, cli_name=None):
109 parser.add_argument(self.cli_name, dest=self.py_name,
110 default=self._default, type=int)
111
112 def add_to_params(self, parameters, value):
113 if value:
114 _build_network_interfaces(parameters,
115 'SecondaryPrivateIpAddressCount',
116 value)
117
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/awscli/customizations/ec2runinstances.py b/awscli/customizations/ec2runinstances.py
--- a/awscli/customizations/ec2runinstances.py
+++ b/awscli/customizations/ec2runinstances.py
@@ -38,6 +38,12 @@
'[EC2-VPC] The number of secondary IP addresses to assign to '
'the network interface or instance.')
+# --associate-public-ip-address
+ASSOCIATE_PUBLIC_IP_ADDRESS_DOCS = (
+ '[EC2-VPC] If specified a public IP address will be assigned '
+ 'to the new instance in a VPC.')
+
+
def _add_params(argument_table, operation, **kwargs):
arg = SecondaryPrivateIpAddressesArgument(
@@ -48,6 +54,16 @@
name='secondary-private-ip-address-count',
help_text=SECONDARY_PRIVATE_IP_ADDRESS_COUNT_DOCS)
argument_table['secondary-private-ip-address-count'] = arg
+ arg = AssociatePublicIpAddressArgument(
+ name='associate-public-ip-address',
+ help_text=ASSOCIATE_PUBLIC_IP_ADDRESS_DOCS,
+ action='store_true', group_name='associate_public_ip')
+ argument_table['associate-public-ip-address'] = arg
+ arg = NoAssociatePublicIpAddressArgument(
+ name='no-associate-public-ip-address',
+ help_text=ASSOCIATE_PUBLIC_IP_ADDRESS_DOCS,
+ action='store_false', group_name='associate_public_ip')
+ argument_table['no-associate-public-ip-address'] = arg
def _check_args(parsed_args, **kwargs):
@@ -57,7 +73,8 @@
arg_dict = vars(parsed_args)
if arg_dict['network_interfaces']:
for key in ('secondary_private_ip_addresses',
- 'secondary_private_ip_address_count'):
+ 'secondary_private_ip_address_count',
+ 'associate_public_ip_address'):
if arg_dict[key]:
msg = ('Mixing the --network-interfaces option '
'with the simple, scalar options is '
@@ -114,3 +131,21 @@
_build_network_interfaces(parameters,
'SecondaryPrivateIpAddressCount',
value)
+
+
+class AssociatePublicIpAddressArgument(CustomArgument):
+
+ def add_to_params(self, parameters, value):
+ if value is True:
+ _build_network_interfaces(parameters,
+ 'AssociatePublicIpAddress',
+ value)
+
+
+class NoAssociatePublicIpAddressArgument(CustomArgument):
+
+ def add_to_params(self, parameters, value):
+ if value is False:
+ _build_network_interfaces(parameters,
+ 'AssociatePublicIpAddress',
+ value)
| {"golden_diff": "diff --git a/awscli/customizations/ec2runinstances.py b/awscli/customizations/ec2runinstances.py\n--- a/awscli/customizations/ec2runinstances.py\n+++ b/awscli/customizations/ec2runinstances.py\n@@ -38,6 +38,12 @@\n '[EC2-VPC] The number of secondary IP addresses to assign to '\n 'the network interface or instance.')\n \n+# --associate-public-ip-address\n+ASSOCIATE_PUBLIC_IP_ADDRESS_DOCS = (\n+ '[EC2-VPC] If specified a public IP address will be assigned '\n+ 'to the new instance in a VPC.')\n+\n+\n \n def _add_params(argument_table, operation, **kwargs):\n arg = SecondaryPrivateIpAddressesArgument(\n@@ -48,6 +54,16 @@\n name='secondary-private-ip-address-count',\n help_text=SECONDARY_PRIVATE_IP_ADDRESS_COUNT_DOCS)\n argument_table['secondary-private-ip-address-count'] = arg\n+ arg = AssociatePublicIpAddressArgument(\n+ name='associate-public-ip-address',\n+ help_text=ASSOCIATE_PUBLIC_IP_ADDRESS_DOCS,\n+ action='store_true', group_name='associate_public_ip')\n+ argument_table['associate-public-ip-address'] = arg\n+ arg = NoAssociatePublicIpAddressArgument(\n+ name='no-associate-public-ip-address',\n+ help_text=ASSOCIATE_PUBLIC_IP_ADDRESS_DOCS,\n+ action='store_false', group_name='associate_public_ip')\n+ argument_table['no-associate-public-ip-address'] = arg\n \n \n def _check_args(parsed_args, **kwargs):\n@@ -57,7 +73,8 @@\n arg_dict = vars(parsed_args)\n if arg_dict['network_interfaces']:\n for key in ('secondary_private_ip_addresses',\n- 'secondary_private_ip_address_count'):\n+ 'secondary_private_ip_address_count',\n+ 'associate_public_ip_address'):\n if arg_dict[key]:\n msg = ('Mixing the --network-interfaces option '\n 'with the simple, scalar options is '\n@@ -114,3 +131,21 @@\n _build_network_interfaces(parameters,\n 'SecondaryPrivateIpAddressCount',\n value)\n+\n+\n+class AssociatePublicIpAddressArgument(CustomArgument):\n+\n+ def add_to_params(self, parameters, value):\n+ if value is True:\n+ _build_network_interfaces(parameters,\n+ 'AssociatePublicIpAddress',\n+ value)\n+\n+\n+class NoAssociatePublicIpAddressArgument(CustomArgument):\n+\n+ def add_to_params(self, parameters, value):\n+ if value is False:\n+ _build_network_interfaces(parameters,\n+ 'AssociatePublicIpAddress',\n+ value)\n", "issue": "Option to associate public ip address in ec2 run-instance\nThere doesn't seem to be any way to associate a public ip address without also adding a network interface with the --network-interfaces parameter. Is it possible for this to be a top level parameter?\n\n", "before_files": [{"content": "# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\n\"\"\"\nThis customization adds two new parameters to the ``ec2 run-instance``\ncommand. The first, ``--secondary-private-ip-addresses`` allows a list\nof IP addresses within the specified subnet to be associated with the\nnew instance. The second, ``--secondary-ip-address-count`` allows you\nto specify how many additional IP addresses you want but the actual\naddress will be assigned for you.\n\nThis functionality (and much more) is also available using the\n``--network-interfaces`` complex argument. This just makes two of\nthe most commonly used features available more easily.\n\"\"\"\nfrom awscli.arguments import CustomArgument\n\n\n# --secondary-private-ip-address\nSECONDARY_PRIVATE_IP_ADDRESSES_DOCS = (\n '[EC2-VPC] A secondary private IP address for the network interface '\n 'or instance. You can specify this multiple times to assign multiple '\n 'secondary IP addresses. If you want additional private IP addresses '\n 'but do not need a specific address, use the '\n '--secondary-private-ip-address-count option.')\n\n# --secondary-private-ip-address-count\nSECONDARY_PRIVATE_IP_ADDRESS_COUNT_DOCS = (\n '[EC2-VPC] The number of secondary IP addresses to assign to '\n 'the network interface or instance.')\n\n\ndef _add_params(argument_table, operation, **kwargs):\n arg = SecondaryPrivateIpAddressesArgument(\n name='secondary-private-ip-addresses',\n help_text=SECONDARY_PRIVATE_IP_ADDRESSES_DOCS)\n argument_table['secondary-private-ip-addresses'] = arg\n arg = SecondaryPrivateIpAddressCountArgument(\n name='secondary-private-ip-address-count',\n help_text=SECONDARY_PRIVATE_IP_ADDRESS_COUNT_DOCS)\n argument_table['secondary-private-ip-address-count'] = arg\n\n\ndef _check_args(parsed_args, **kwargs):\n # This function checks the parsed args. If the user specified\n # the --network-interfaces option with any of the scalar options we\n # raise an error.\n arg_dict = vars(parsed_args)\n if arg_dict['network_interfaces']:\n for key in ('secondary_private_ip_addresses',\n 'secondary_private_ip_address_count'):\n if arg_dict[key]:\n msg = ('Mixing the --network-interfaces option '\n 'with the simple, scalar options is '\n 'not supported.')\n raise ValueError(msg)\n\nEVENTS = [\n ('building-argument-table.ec2.run-instances', _add_params),\n ('operation-args-parsed.ec2.run-instances', _check_args),\n ]\n\n\ndef register_runinstances(event_handler):\n # Register all of the events for customizing BundleInstance\n for event, handler in EVENTS:\n event_handler.register(event, handler)\n\n\ndef _build_network_interfaces(params, key, value):\n # Build up the NetworkInterfaces data structure\n if 'network_interfaces' not in params:\n params['network_interfaces'] = [{'DeviceIndex': 0}]\n\n if key == 'PrivateIpAddresses':\n if 'PrivateIpAddresses' not in params['network_interfaces'][0]:\n params['network_interfaces'][0]['PrivateIpAddresses'] = value\n else:\n params['network_interfaces'][0][key] = value\n\n\nclass SecondaryPrivateIpAddressesArgument(CustomArgument):\n\n def add_to_parser(self, parser, cli_name=None):\n parser.add_argument(self.cli_name, dest=self.py_name,\n default=self._default, nargs='*')\n\n def add_to_params(self, parameters, value):\n if value:\n value = [{'PrivateIpAddress': v, 'Primary': False} for\n v in value]\n _build_network_interfaces(parameters,\n 'PrivateIpAddresses',\n value)\n\n\nclass SecondaryPrivateIpAddressCountArgument(CustomArgument):\n\n def add_to_parser(self, parser, cli_name=None):\n parser.add_argument(self.cli_name, dest=self.py_name,\n default=self._default, type=int)\n\n def add_to_params(self, parameters, value):\n if value:\n _build_network_interfaces(parameters,\n 'SecondaryPrivateIpAddressCount',\n value)\n", "path": "awscli/customizations/ec2runinstances.py"}], "after_files": [{"content": "# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\"). You\n# may not use this file except in compliance with the License. A copy of\n# the License is located at\n#\n# http://aws.amazon.com/apache2.0/\n#\n# or in the \"license\" file accompanying this file. This file is\n# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF\n# ANY KIND, either express or implied. See the License for the specific\n# language governing permissions and limitations under the License.\n\"\"\"\nThis customization adds two new parameters to the ``ec2 run-instance``\ncommand. The first, ``--secondary-private-ip-addresses`` allows a list\nof IP addresses within the specified subnet to be associated with the\nnew instance. The second, ``--secondary-ip-address-count`` allows you\nto specify how many additional IP addresses you want but the actual\naddress will be assigned for you.\n\nThis functionality (and much more) is also available using the\n``--network-interfaces`` complex argument. This just makes two of\nthe most commonly used features available more easily.\n\"\"\"\nfrom awscli.arguments import CustomArgument\n\n\n# --secondary-private-ip-address\nSECONDARY_PRIVATE_IP_ADDRESSES_DOCS = (\n '[EC2-VPC] A secondary private IP address for the network interface '\n 'or instance. You can specify this multiple times to assign multiple '\n 'secondary IP addresses. If you want additional private IP addresses '\n 'but do not need a specific address, use the '\n '--secondary-private-ip-address-count option.')\n\n# --secondary-private-ip-address-count\nSECONDARY_PRIVATE_IP_ADDRESS_COUNT_DOCS = (\n '[EC2-VPC] The number of secondary IP addresses to assign to '\n 'the network interface or instance.')\n\n# --associate-public-ip-address\nASSOCIATE_PUBLIC_IP_ADDRESS_DOCS = (\n '[EC2-VPC] If specified a public IP address will be assigned '\n 'to the new instance in a VPC.')\n\n\n\ndef _add_params(argument_table, operation, **kwargs):\n arg = SecondaryPrivateIpAddressesArgument(\n name='secondary-private-ip-addresses',\n help_text=SECONDARY_PRIVATE_IP_ADDRESSES_DOCS)\n argument_table['secondary-private-ip-addresses'] = arg\n arg = SecondaryPrivateIpAddressCountArgument(\n name='secondary-private-ip-address-count',\n help_text=SECONDARY_PRIVATE_IP_ADDRESS_COUNT_DOCS)\n argument_table['secondary-private-ip-address-count'] = arg\n arg = AssociatePublicIpAddressArgument(\n name='associate-public-ip-address',\n help_text=ASSOCIATE_PUBLIC_IP_ADDRESS_DOCS,\n action='store_true', group_name='associate_public_ip')\n argument_table['associate-public-ip-address'] = arg\n arg = NoAssociatePublicIpAddressArgument(\n name='no-associate-public-ip-address',\n help_text=ASSOCIATE_PUBLIC_IP_ADDRESS_DOCS,\n action='store_false', group_name='associate_public_ip')\n argument_table['no-associate-public-ip-address'] = arg\n\n\ndef _check_args(parsed_args, **kwargs):\n # This function checks the parsed args. If the user specified\n # the --network-interfaces option with any of the scalar options we\n # raise an error.\n arg_dict = vars(parsed_args)\n if arg_dict['network_interfaces']:\n for key in ('secondary_private_ip_addresses',\n 'secondary_private_ip_address_count',\n 'associate_public_ip_address'):\n if arg_dict[key]:\n msg = ('Mixing the --network-interfaces option '\n 'with the simple, scalar options is '\n 'not supported.')\n raise ValueError(msg)\n\nEVENTS = [\n ('building-argument-table.ec2.run-instances', _add_params),\n ('operation-args-parsed.ec2.run-instances', _check_args),\n ]\n\n\ndef register_runinstances(event_handler):\n # Register all of the events for customizing BundleInstance\n for event, handler in EVENTS:\n event_handler.register(event, handler)\n\n\ndef _build_network_interfaces(params, key, value):\n # Build up the NetworkInterfaces data structure\n if 'network_interfaces' not in params:\n params['network_interfaces'] = [{'DeviceIndex': 0}]\n\n if key == 'PrivateIpAddresses':\n if 'PrivateIpAddresses' not in params['network_interfaces'][0]:\n params['network_interfaces'][0]['PrivateIpAddresses'] = value\n else:\n params['network_interfaces'][0][key] = value\n\n\nclass SecondaryPrivateIpAddressesArgument(CustomArgument):\n\n def add_to_parser(self, parser, cli_name=None):\n parser.add_argument(self.cli_name, dest=self.py_name,\n default=self._default, nargs='*')\n\n def add_to_params(self, parameters, value):\n if value:\n value = [{'PrivateIpAddress': v, 'Primary': False} for\n v in value]\n _build_network_interfaces(parameters,\n 'PrivateIpAddresses',\n value)\n\n\nclass SecondaryPrivateIpAddressCountArgument(CustomArgument):\n\n def add_to_parser(self, parser, cli_name=None):\n parser.add_argument(self.cli_name, dest=self.py_name,\n default=self._default, type=int)\n\n def add_to_params(self, parameters, value):\n if value:\n _build_network_interfaces(parameters,\n 'SecondaryPrivateIpAddressCount',\n value)\n\n\nclass AssociatePublicIpAddressArgument(CustomArgument):\n\n def add_to_params(self, parameters, value):\n if value is True:\n _build_network_interfaces(parameters,\n 'AssociatePublicIpAddress',\n value)\n\n\nclass NoAssociatePublicIpAddressArgument(CustomArgument):\n\n def add_to_params(self, parameters, value):\n if value is False:\n _build_network_interfaces(parameters,\n 'AssociatePublicIpAddress',\n value)\n", "path": "awscli/customizations/ec2runinstances.py"}]} | 1,547 | 569 |
gh_patches_debug_31329 | rasdani/github-patches | git_diff | dbt-labs__dbt-core-1174 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
don't rely on master branch for latest version number
## Feature
### Feature description
The `master` branch of dbt isn't really a thing anymore. Instead of relying on the [master](https://github.com/fishtown-analytics/dbt/blob/51f68e3aabcda57afbe5051983d1d17e092be665/dbt/version.py#L12) branch to grab the latest release number, we should pull it from PyPi.
We can use [this api](https://warehouse.readthedocs.io/api-reference/json/) to fetch [some JSON info](https://pypi.org/pypi/dbt/json) about dbt releases.
We need to confirm that pre-releases are not shown as the latest version for a package on PyPi.
### Who will this benefit?
dbt maintainers :)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dbt/version.py`
Content:
```
1 import re
2
3 import dbt.semver
4
5 try:
6 # For Python 3.0 and later
7 from urllib.request import urlopen
8 except ImportError:
9 # Fall back to Python 2's urllib2
10 from urllib2 import urlopen
11
12 REMOTE_VERSION_FILE = \
13 'https://raw.githubusercontent.com/fishtown-analytics/dbt/' \
14 'master/.bumpversion.cfg'
15
16
17 def get_version_string_from_text(contents):
18 matches = re.search(r"current_version = ([\.0-9a-z]+)", contents)
19 if matches is None or len(matches.groups()) != 1:
20 return ""
21 version = matches.groups()[0]
22 return version
23
24
25 def get_remote_version_file_contents(url=REMOTE_VERSION_FILE):
26 try:
27 f = urlopen(url)
28 contents = f.read()
29 except Exception:
30 contents = ''
31 if hasattr(contents, 'decode'):
32 contents = contents.decode('utf-8')
33 return contents
34
35
36 def get_latest_version():
37 contents = get_remote_version_file_contents()
38 if contents == '':
39 return None
40 version_string = get_version_string_from_text(contents)
41 return dbt.semver.VersionSpecifier.from_version_string(version_string)
42
43
44 def get_installed_version():
45 return dbt.semver.VersionSpecifier.from_version_string(__version__)
46
47
48 def get_version_information():
49 installed = get_installed_version()
50 latest = get_latest_version()
51
52 installed_s = installed.to_version_string(skip_matcher=True)
53 if latest is None:
54 latest_s = 'unknown'
55 else:
56 latest_s = latest.to_version_string(skip_matcher=True)
57
58 version_msg = ("installed version: {}\n"
59 " latest version: {}\n\n".format(installed_s, latest_s))
60
61 if latest is None:
62 return ("{}The latest version of dbt could not be determined!\n"
63 "Make sure that the following URL is accessible:\n{}"
64 .format(version_msg, REMOTE_VERSION_FILE))
65
66 if installed == latest:
67 return "{}Up to date!".format(version_msg)
68
69 elif installed > latest:
70 return ("{}Your version of dbt is ahead of the latest "
71 "release!".format(version_msg))
72
73 else:
74 return ("{}Your version of dbt is out of date! "
75 "You can find instructions for upgrading here:\n"
76 "https://docs.getdbt.com/docs/installation"
77 .format(version_msg))
78
79
80 __version__ = '0.12.1'
81 installed = get_installed_version()
82
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dbt/version.py b/dbt/version.py
--- a/dbt/version.py
+++ b/dbt/version.py
@@ -1,43 +1,23 @@
+import json
import re
-import dbt.semver
-
-try:
- # For Python 3.0 and later
- from urllib.request import urlopen
-except ImportError:
- # Fall back to Python 2's urllib2
- from urllib2 import urlopen
-
-REMOTE_VERSION_FILE = \
- 'https://raw.githubusercontent.com/fishtown-analytics/dbt/' \
- 'master/.bumpversion.cfg'
-
+import requests
-def get_version_string_from_text(contents):
- matches = re.search(r"current_version = ([\.0-9a-z]+)", contents)
- if matches is None or len(matches.groups()) != 1:
- return ""
- version = matches.groups()[0]
- return version
+import dbt.exceptions
+import dbt.semver
-def get_remote_version_file_contents(url=REMOTE_VERSION_FILE):
- try:
- f = urlopen(url)
- contents = f.read()
- except Exception:
- contents = ''
- if hasattr(contents, 'decode'):
- contents = contents.decode('utf-8')
- return contents
+PYPI_VERSION_URL = 'https://pypi.org/pypi/dbt/json'
def get_latest_version():
- contents = get_remote_version_file_contents()
- if contents == '':
+ try:
+ resp = requests.get(PYPI_VERSION_URL)
+ data = resp.json()
+ version_string = data['info']['version']
+ except (json.JSONDecodeError, KeyError, requests.RequestException):
return None
- version_string = get_version_string_from_text(contents)
+
return dbt.semver.VersionSpecifier.from_version_string(version_string)
@@ -61,7 +41,7 @@
if latest is None:
return ("{}The latest version of dbt could not be determined!\n"
"Make sure that the following URL is accessible:\n{}"
- .format(version_msg, REMOTE_VERSION_FILE))
+ .format(version_msg, PYPI_VERSION_URL))
if installed == latest:
return "{}Up to date!".format(version_msg)
| {"golden_diff": "diff --git a/dbt/version.py b/dbt/version.py\n--- a/dbt/version.py\n+++ b/dbt/version.py\n@@ -1,43 +1,23 @@\n+import json\n import re\n \n-import dbt.semver\n-\n-try:\n- # For Python 3.0 and later\n- from urllib.request import urlopen\n-except ImportError:\n- # Fall back to Python 2's urllib2\n- from urllib2 import urlopen\n-\n-REMOTE_VERSION_FILE = \\\n- 'https://raw.githubusercontent.com/fishtown-analytics/dbt/' \\\n- 'master/.bumpversion.cfg'\n-\n+import requests\n \n-def get_version_string_from_text(contents):\n- matches = re.search(r\"current_version = ([\\.0-9a-z]+)\", contents)\n- if matches is None or len(matches.groups()) != 1:\n- return \"\"\n- version = matches.groups()[0]\n- return version\n+import dbt.exceptions\n+import dbt.semver\n \n \n-def get_remote_version_file_contents(url=REMOTE_VERSION_FILE):\n- try:\n- f = urlopen(url)\n- contents = f.read()\n- except Exception:\n- contents = ''\n- if hasattr(contents, 'decode'):\n- contents = contents.decode('utf-8')\n- return contents\n+PYPI_VERSION_URL = 'https://pypi.org/pypi/dbt/json'\n \n \n def get_latest_version():\n- contents = get_remote_version_file_contents()\n- if contents == '':\n+ try:\n+ resp = requests.get(PYPI_VERSION_URL)\n+ data = resp.json()\n+ version_string = data['info']['version']\n+ except (json.JSONDecodeError, KeyError, requests.RequestException):\n return None\n- version_string = get_version_string_from_text(contents)\n+\n return dbt.semver.VersionSpecifier.from_version_string(version_string)\n \n \n@@ -61,7 +41,7 @@\n if latest is None:\n return (\"{}The latest version of dbt could not be determined!\\n\"\n \"Make sure that the following URL is accessible:\\n{}\"\n- .format(version_msg, REMOTE_VERSION_FILE))\n+ .format(version_msg, PYPI_VERSION_URL))\n \n if installed == latest:\n return \"{}Up to date!\".format(version_msg)\n", "issue": "don't rely on master branch for latest version number\n## Feature\r\n\r\n### Feature description\r\nThe `master` branch of dbt isn't really a thing anymore. Instead of relying on the [master](https://github.com/fishtown-analytics/dbt/blob/51f68e3aabcda57afbe5051983d1d17e092be665/dbt/version.py#L12) branch to grab the latest release number, we should pull it from PyPi.\r\n\r\nWe can use [this api](https://warehouse.readthedocs.io/api-reference/json/) to fetch [some JSON info](https://pypi.org/pypi/dbt/json) about dbt releases.\r\n\r\nWe need to confirm that pre-releases are not shown as the latest version for a package on PyPi.\r\n\r\n### Who will this benefit?\r\ndbt maintainers :) \n", "before_files": [{"content": "import re\n\nimport dbt.semver\n\ntry:\n # For Python 3.0 and later\n from urllib.request import urlopen\nexcept ImportError:\n # Fall back to Python 2's urllib2\n from urllib2 import urlopen\n\nREMOTE_VERSION_FILE = \\\n 'https://raw.githubusercontent.com/fishtown-analytics/dbt/' \\\n 'master/.bumpversion.cfg'\n\n\ndef get_version_string_from_text(contents):\n matches = re.search(r\"current_version = ([\\.0-9a-z]+)\", contents)\n if matches is None or len(matches.groups()) != 1:\n return \"\"\n version = matches.groups()[0]\n return version\n\n\ndef get_remote_version_file_contents(url=REMOTE_VERSION_FILE):\n try:\n f = urlopen(url)\n contents = f.read()\n except Exception:\n contents = ''\n if hasattr(contents, 'decode'):\n contents = contents.decode('utf-8')\n return contents\n\n\ndef get_latest_version():\n contents = get_remote_version_file_contents()\n if contents == '':\n return None\n version_string = get_version_string_from_text(contents)\n return dbt.semver.VersionSpecifier.from_version_string(version_string)\n\n\ndef get_installed_version():\n return dbt.semver.VersionSpecifier.from_version_string(__version__)\n\n\ndef get_version_information():\n installed = get_installed_version()\n latest = get_latest_version()\n\n installed_s = installed.to_version_string(skip_matcher=True)\n if latest is None:\n latest_s = 'unknown'\n else:\n latest_s = latest.to_version_string(skip_matcher=True)\n\n version_msg = (\"installed version: {}\\n\"\n \" latest version: {}\\n\\n\".format(installed_s, latest_s))\n\n if latest is None:\n return (\"{}The latest version of dbt could not be determined!\\n\"\n \"Make sure that the following URL is accessible:\\n{}\"\n .format(version_msg, REMOTE_VERSION_FILE))\n\n if installed == latest:\n return \"{}Up to date!\".format(version_msg)\n\n elif installed > latest:\n return (\"{}Your version of dbt is ahead of the latest \"\n \"release!\".format(version_msg))\n\n else:\n return (\"{}Your version of dbt is out of date! \"\n \"You can find instructions for upgrading here:\\n\"\n \"https://docs.getdbt.com/docs/installation\"\n .format(version_msg))\n\n\n__version__ = '0.12.1'\ninstalled = get_installed_version()\n", "path": "dbt/version.py"}], "after_files": [{"content": "import json\nimport re\n\nimport requests\n\nimport dbt.exceptions\nimport dbt.semver\n\n\nPYPI_VERSION_URL = 'https://pypi.org/pypi/dbt/json'\n\n\ndef get_latest_version():\n try:\n resp = requests.get(PYPI_VERSION_URL)\n data = resp.json()\n version_string = data['info']['version']\n except (json.JSONDecodeError, KeyError, requests.RequestException):\n return None\n\n return dbt.semver.VersionSpecifier.from_version_string(version_string)\n\n\ndef get_installed_version():\n return dbt.semver.VersionSpecifier.from_version_string(__version__)\n\n\ndef get_version_information():\n installed = get_installed_version()\n latest = get_latest_version()\n\n installed_s = installed.to_version_string(skip_matcher=True)\n if latest is None:\n latest_s = 'unknown'\n else:\n latest_s = latest.to_version_string(skip_matcher=True)\n\n version_msg = (\"installed version: {}\\n\"\n \" latest version: {}\\n\\n\".format(installed_s, latest_s))\n\n if latest is None:\n return (\"{}The latest version of dbt could not be determined!\\n\"\n \"Make sure that the following URL is accessible:\\n{}\"\n .format(version_msg, PYPI_VERSION_URL))\n\n if installed == latest:\n return \"{}Up to date!\".format(version_msg)\n\n elif installed > latest:\n return (\"{}Your version of dbt is ahead of the latest \"\n \"release!\".format(version_msg))\n\n else:\n return (\"{}Your version of dbt is out of date! \"\n \"You can find instructions for upgrading here:\\n\"\n \"https://docs.getdbt.com/docs/installation\"\n .format(version_msg))\n\n\n__version__ = '0.12.1'\ninstalled = get_installed_version()\n", "path": "dbt/version.py"}]} | 1,140 | 498 |
gh_patches_debug_15688 | rasdani/github-patches | git_diff | iterative__dvc-796 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use dvc to store images for windows installer in our repo
We currently have 3 images stored in git in scripts/innosetup. We should start using dvc for them. Required for https://github.com/iterative/dvc/issues/735 . Depends on https://github.com/iterative/dvc/issues/785 .
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dvc/remote/base.py`
Content:
```
1 import os
2 import re
3 import tempfile
4
5 from dvc.config import Config
6 from dvc.logger import Logger
7 from dvc.exceptions import DvcException
8
9
10 STATUS_UNKNOWN = 0
11 STATUS_OK = 1
12 STATUS_MODIFIED = 2
13 STATUS_NEW = 3
14 STATUS_DELETED = 4
15
16
17 STATUS_MAP = {
18 # (local_exists, remote_exists, cmp)
19 (True, True, True) : STATUS_OK,
20 (True, True, False) : STATUS_MODIFIED,
21 (True, False, None) : STATUS_NEW,
22 (False, True, None) : STATUS_DELETED,
23 }
24
25
26 class DataCloudError(DvcException):
27 """ Data Cloud exception """
28 def __init__(self, msg):
29 super(DataCloudError, self).__init__('Data sync error: {}'.format(msg))
30
31
32
33 class RemoteBase(object):
34 REGEX = None
35
36 def __init__(self, project, config):
37 pass
38
39 @classmethod
40 def supported(cls, config):
41 url = config[Config.SECTION_REMOTE_URL]
42 return cls.match(url) != None
43
44 @classmethod
45 def match(cls, url):
46 return re.match(cls.REGEX, url)
47
48 def save_info(self, path_info):
49 raise NotImplementedError
50
51 def save(self, path_info):
52 raise NotImplementedError
53
54 def checkout(self, path_info, checksum_info):
55 raise NotImplementedError
56
57 def download(self, path_info, path):
58 raise NotImplementedError
59
60 def upload(self, path, path_info):
61 raise NotImplementedError
62
63 # Old code starting from here
64
65 def cache_file_key(self, fname):
66 """ Key of a file within the bucket """
67 relpath = os.path.relpath(fname, self.project.cache.local.cache_dir)
68 relpath = relpath.replace('\\', '/')
69 return '{}/{}'.format(self.prefix, relpath).strip('/')
70
71 def cache_key_name(self, path):
72 relpath = os.path.relpath(path, self.project.cache.local.cache_dir)
73 return relpath.replace('\\', '').replace('/', '')
74
75 @staticmethod
76 def tmp_file(fname):
77 """ Temporary name for a partial download """
78 return fname + '.part'
79
80 def _push_key(self, key, path):
81 pass
82
83 def collect(self, arg):
84 from dvc.remote.local import RemoteLOCAL
85
86 path, local = arg
87 ret = [path]
88
89 if not RemoteLOCAL.is_dir_cache(path):
90 return ret
91
92 if local:
93 if not os.path.isfile(path):
94 return ret
95 dir_path = path
96 else:
97 key = self._get_key(path)
98 if not key:
99 Logger.debug("File '{}' does not exist in the cloud".format(path))
100 return ret
101 tmp = os.path.join(tempfile.mkdtemp(), os.path.basename(path))
102 self._pull_key(key, tmp, no_progress_bar=True)
103 dir_path = tmp
104
105 for relpath, md5 in RemoteLOCAL.get_dir_cache(dir_path).items():
106 cache = self.project.cache.local.get(md5)
107 ret.append(cache)
108
109 return ret
110
111 def _cmp_checksum(self, blob, fname):
112 md5 = self.project.cache.local.path_to_md5(fname)
113 if self.project.cache.local.state.changed(fname, md5=md5):
114 return False
115
116 return True
117
118 def push(self, path):
119 key = self._get_key(path)
120 if key:
121 Logger.debug("File '{}' already uploaded to the cloud. Validating checksum...".format(path))
122 if self._cmp_checksum(key, path):
123 Logger.debug('File checksum matches. No uploading is needed.')
124 return []
125 Logger.debug('Checksum mismatch. Reuploading is required.')
126
127 key = self._new_key(path)
128 return self._push_key(key, path)
129
130 def _makedirs(self, fname):
131 dname = os.path.dirname(fname)
132 try:
133 os.makedirs(dname)
134 except OSError as e:
135 if e.errno != os.errno.EEXIST:
136 raise
137
138 def _pull_key(self, key, path, no_progress_bar=False):
139 """ Cloud-specific method of pulling keys """
140 pass
141
142 def _get_key(self, path):
143 """ Cloud-specific method of getting keys """
144 pass
145
146 def pull(self, path):
147 """ Generic method for pulling data from the cloud """
148 key = self._get_key(path)
149 if not key:
150 Logger.error("File '{}' does not exist in the cloud".format(path))
151 return None
152
153 return self._pull_key(key, path)
154
155 def _status(self, key, path):
156 remote_exists = key != None
157 local_exists = os.path.exists(path)
158
159 diff = None
160 if remote_exists and local_exists:
161 diff = self._cmp_checksum(key, path)
162
163 return STATUS_MAP.get((local_exists, remote_exists, diff), STATUS_UNKNOWN)
164
165 def status(self, path):
166 """
167 Generic method for checking data item status.
168 """
169 key = self._get_key(path)
170 if not key:
171 return STATUS_NEW
172
173 return self._status(key, path)
174
175 def connect(self):
176 pass
177
178 def disconnect(self):
179 pass
180
181 def __enter__(self):
182 self.connect()
183
184 def __exit__(self, type, value, tb):
185 self.disconnect()
186
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dvc/remote/base.py b/dvc/remote/base.py
--- a/dvc/remote/base.py
+++ b/dvc/remote/base.py
@@ -1,6 +1,7 @@
import os
import re
import tempfile
+import posixpath
from dvc.config import Config
from dvc.logger import Logger
@@ -66,7 +67,7 @@
""" Key of a file within the bucket """
relpath = os.path.relpath(fname, self.project.cache.local.cache_dir)
relpath = relpath.replace('\\', '/')
- return '{}/{}'.format(self.prefix, relpath).strip('/')
+ return posixpath.join(self.prefix, relpath).strip('/')
def cache_key_name(self, path):
relpath = os.path.relpath(path, self.project.cache.local.cache_dir)
| {"golden_diff": "diff --git a/dvc/remote/base.py b/dvc/remote/base.py\n--- a/dvc/remote/base.py\n+++ b/dvc/remote/base.py\n@@ -1,6 +1,7 @@\n import os\n import re\n import tempfile\n+import posixpath\n \n from dvc.config import Config\n from dvc.logger import Logger\n@@ -66,7 +67,7 @@\n \"\"\" Key of a file within the bucket \"\"\"\n relpath = os.path.relpath(fname, self.project.cache.local.cache_dir)\n relpath = relpath.replace('\\\\', '/')\n- return '{}/{}'.format(self.prefix, relpath).strip('/')\n+ return posixpath.join(self.prefix, relpath).strip('/')\n \n def cache_key_name(self, path):\n relpath = os.path.relpath(path, self.project.cache.local.cache_dir)\n", "issue": "Use dvc to store images for windows installer in our repo\nWe currently have 3 images stored in git in scripts/innosetup. We should start using dvc for them. Required for https://github.com/iterative/dvc/issues/735 . Depends on https://github.com/iterative/dvc/issues/785 .\n", "before_files": [{"content": "import os\nimport re\nimport tempfile\n\nfrom dvc.config import Config\nfrom dvc.logger import Logger\nfrom dvc.exceptions import DvcException\n\n\nSTATUS_UNKNOWN = 0\nSTATUS_OK = 1\nSTATUS_MODIFIED = 2\nSTATUS_NEW = 3\nSTATUS_DELETED = 4\n\n\nSTATUS_MAP = {\n # (local_exists, remote_exists, cmp)\n (True, True, True) : STATUS_OK,\n (True, True, False) : STATUS_MODIFIED,\n (True, False, None) : STATUS_NEW,\n (False, True, None) : STATUS_DELETED,\n}\n\n\nclass DataCloudError(DvcException):\n \"\"\" Data Cloud exception \"\"\"\n def __init__(self, msg):\n super(DataCloudError, self).__init__('Data sync error: {}'.format(msg))\n\n\n\nclass RemoteBase(object):\n REGEX = None\n\n def __init__(self, project, config):\n pass\n\n @classmethod\n def supported(cls, config):\n url = config[Config.SECTION_REMOTE_URL]\n return cls.match(url) != None\n\n @classmethod\n def match(cls, url):\n return re.match(cls.REGEX, url)\n\n def save_info(self, path_info):\n raise NotImplementedError\n\n def save(self, path_info):\n raise NotImplementedError\n\n def checkout(self, path_info, checksum_info):\n raise NotImplementedError\n\n def download(self, path_info, path):\n raise NotImplementedError\n\n def upload(self, path, path_info):\n raise NotImplementedError\n\n # Old code starting from here\n\n def cache_file_key(self, fname):\n \"\"\" Key of a file within the bucket \"\"\"\n relpath = os.path.relpath(fname, self.project.cache.local.cache_dir)\n relpath = relpath.replace('\\\\', '/')\n return '{}/{}'.format(self.prefix, relpath).strip('/')\n\n def cache_key_name(self, path):\n relpath = os.path.relpath(path, self.project.cache.local.cache_dir)\n return relpath.replace('\\\\', '').replace('/', '')\n\n @staticmethod\n def tmp_file(fname):\n \"\"\" Temporary name for a partial download \"\"\"\n return fname + '.part'\n\n def _push_key(self, key, path):\n pass\n\n def collect(self, arg):\n from dvc.remote.local import RemoteLOCAL\n\n path, local = arg\n ret = [path]\n\n if not RemoteLOCAL.is_dir_cache(path):\n return ret\n\n if local:\n if not os.path.isfile(path):\n return ret\n dir_path = path\n else:\n key = self._get_key(path)\n if not key:\n Logger.debug(\"File '{}' does not exist in the cloud\".format(path))\n return ret\n tmp = os.path.join(tempfile.mkdtemp(), os.path.basename(path))\n self._pull_key(key, tmp, no_progress_bar=True)\n dir_path = tmp\n\n for relpath, md5 in RemoteLOCAL.get_dir_cache(dir_path).items():\n cache = self.project.cache.local.get(md5)\n ret.append(cache)\n\n return ret\n\n def _cmp_checksum(self, blob, fname):\n md5 = self.project.cache.local.path_to_md5(fname)\n if self.project.cache.local.state.changed(fname, md5=md5):\n return False\n\n return True\n\n def push(self, path):\n key = self._get_key(path)\n if key:\n Logger.debug(\"File '{}' already uploaded to the cloud. Validating checksum...\".format(path))\n if self._cmp_checksum(key, path):\n Logger.debug('File checksum matches. No uploading is needed.')\n return []\n Logger.debug('Checksum mismatch. Reuploading is required.')\n\n key = self._new_key(path)\n return self._push_key(key, path)\n\n def _makedirs(self, fname):\n dname = os.path.dirname(fname)\n try:\n os.makedirs(dname)\n except OSError as e:\n if e.errno != os.errno.EEXIST:\n raise\n\n def _pull_key(self, key, path, no_progress_bar=False):\n \"\"\" Cloud-specific method of pulling keys \"\"\"\n pass\n\n def _get_key(self, path):\n \"\"\" Cloud-specific method of getting keys \"\"\"\n pass\n\n def pull(self, path):\n \"\"\" Generic method for pulling data from the cloud \"\"\"\n key = self._get_key(path)\n if not key:\n Logger.error(\"File '{}' does not exist in the cloud\".format(path))\n return None\n\n return self._pull_key(key, path)\n\n def _status(self, key, path):\n remote_exists = key != None\n local_exists = os.path.exists(path)\n\n diff = None\n if remote_exists and local_exists:\n diff = self._cmp_checksum(key, path)\n\n return STATUS_MAP.get((local_exists, remote_exists, diff), STATUS_UNKNOWN)\n\n def status(self, path):\n \"\"\"\n Generic method for checking data item status.\n \"\"\"\n key = self._get_key(path)\n if not key:\n return STATUS_NEW\n\n return self._status(key, path)\n\n def connect(self):\n pass\n\n def disconnect(self):\n pass\n\n def __enter__(self):\n self.connect()\n\n def __exit__(self, type, value, tb):\n self.disconnect()\n", "path": "dvc/remote/base.py"}], "after_files": [{"content": "import os\nimport re\nimport tempfile\nimport posixpath\n\nfrom dvc.config import Config\nfrom dvc.logger import Logger\nfrom dvc.exceptions import DvcException\n\n\nSTATUS_UNKNOWN = 0\nSTATUS_OK = 1\nSTATUS_MODIFIED = 2\nSTATUS_NEW = 3\nSTATUS_DELETED = 4\n\n\nSTATUS_MAP = {\n # (local_exists, remote_exists, cmp)\n (True, True, True) : STATUS_OK,\n (True, True, False) : STATUS_MODIFIED,\n (True, False, None) : STATUS_NEW,\n (False, True, None) : STATUS_DELETED,\n}\n\n\nclass DataCloudError(DvcException):\n \"\"\" Data Cloud exception \"\"\"\n def __init__(self, msg):\n super(DataCloudError, self).__init__('Data sync error: {}'.format(msg))\n\n\n\nclass RemoteBase(object):\n REGEX = None\n\n def __init__(self, project, config):\n pass\n\n @classmethod\n def supported(cls, config):\n url = config[Config.SECTION_REMOTE_URL]\n return cls.match(url) != None\n\n @classmethod\n def match(cls, url):\n return re.match(cls.REGEX, url)\n\n def save_info(self, path_info):\n raise NotImplementedError\n\n def save(self, path_info):\n raise NotImplementedError\n\n def checkout(self, path_info, checksum_info):\n raise NotImplementedError\n\n def download(self, path_info, path):\n raise NotImplementedError\n\n def upload(self, path, path_info):\n raise NotImplementedError\n\n # Old code starting from here\n\n def cache_file_key(self, fname):\n \"\"\" Key of a file within the bucket \"\"\"\n relpath = os.path.relpath(fname, self.project.cache.local.cache_dir)\n relpath = relpath.replace('\\\\', '/')\n return posixpath.join(self.prefix, relpath).strip('/')\n\n def cache_key_name(self, path):\n relpath = os.path.relpath(path, self.project.cache.local.cache_dir)\n return relpath.replace('\\\\', '').replace('/', '')\n\n @staticmethod\n def tmp_file(fname):\n \"\"\" Temporary name for a partial download \"\"\"\n return fname + '.part'\n\n def _push_key(self, key, path):\n pass\n\n def collect(self, arg):\n from dvc.remote.local import RemoteLOCAL\n\n path, local = arg\n ret = [path]\n\n if not RemoteLOCAL.is_dir_cache(path):\n return ret\n\n if local:\n if not os.path.isfile(path):\n return ret\n dir_path = path\n else:\n key = self._get_key(path)\n if not key:\n Logger.debug(\"File '{}' does not exist in the cloud\".format(path))\n return ret\n tmp = os.path.join(tempfile.mkdtemp(), os.path.basename(path))\n self._pull_key(key, tmp, no_progress_bar=True)\n dir_path = tmp\n\n for relpath, md5 in RemoteLOCAL.get_dir_cache(dir_path).items():\n cache = self.project.cache.local.get(md5)\n ret.append(cache)\n\n return ret\n\n def _cmp_checksum(self, blob, fname):\n md5 = self.project.cache.local.path_to_md5(fname)\n if self.project.cache.local.state.changed(fname, md5=md5):\n return False\n\n return True\n\n def push(self, path):\n key = self._get_key(path)\n if key:\n Logger.debug(\"File '{}' already uploaded to the cloud. Validating checksum...\".format(path))\n if self._cmp_checksum(key, path):\n Logger.debug('File checksum matches. No uploading is needed.')\n return []\n Logger.debug('Checksum mismatch. Reuploading is required.')\n\n key = self._new_key(path)\n return self._push_key(key, path)\n\n def _makedirs(self, fname):\n dname = os.path.dirname(fname)\n try:\n os.makedirs(dname)\n except OSError as e:\n if e.errno != os.errno.EEXIST:\n raise\n\n def _pull_key(self, key, path, no_progress_bar=False):\n \"\"\" Cloud-specific method of pulling keys \"\"\"\n pass\n\n def _get_key(self, path):\n \"\"\" Cloud-specific method of getting keys \"\"\"\n pass\n\n def pull(self, path):\n \"\"\" Generic method for pulling data from the cloud \"\"\"\n key = self._get_key(path)\n if not key:\n Logger.error(\"File '{}' does not exist in the cloud\".format(path))\n return None\n\n return self._pull_key(key, path)\n\n def _status(self, key, path):\n remote_exists = key != None\n local_exists = os.path.exists(path)\n\n diff = None\n if remote_exists and local_exists:\n diff = self._cmp_checksum(key, path)\n\n return STATUS_MAP.get((local_exists, remote_exists, diff), STATUS_UNKNOWN)\n\n def status(self, path):\n \"\"\"\n Generic method for checking data item status.\n \"\"\"\n key = self._get_key(path)\n if not key:\n return STATUS_NEW\n\n return self._status(key, path)\n\n def connect(self):\n pass\n\n def disconnect(self):\n pass\n\n def __enter__(self):\n self.connect()\n\n def __exit__(self, type, value, tb):\n self.disconnect()\n", "path": "dvc/remote/base.py"}]} | 1,932 | 179 |
gh_patches_debug_27759 | rasdani/github-patches | git_diff | mdn__kuma-6029 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Search json endpoint is not available in new front-end
**Summary**
https://twitter.com/klaascuvelier/status/1182203293117886464
**Steps To Reproduce (STR)**
_How can we reproduce the problem?_
Go to https://developer.mozilla.org/en-US/search.json?q=array
**Actual behavior**
Blank page
**Expected behavior**
JSON is returned like it is now only at https://wiki.developer.mozilla.org/en-US/search.json?q=array
**Additional context**
There might be a few external services, twitter bots etc. that depend on this endpoint.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kuma/search/views.py`
Content:
```
1 from django.shortcuts import render
2 from django.views.decorators.cache import never_cache
3 from django.views.decorators.http import require_GET
4 from ratelimit.decorators import ratelimit
5
6 from kuma.api.v1.views import search as search_api
7 from kuma.core.decorators import shared_cache_control
8 from kuma.core.utils import is_wiki
9
10 from .search import SearchView
11
12 # Since the search endpoint accepts user input (via query parameters) and its
13 # response is compressed, use rate limiting to mitigate the BREACH attack
14 # (see http://breachattack.com/). It still needs to allow a user to click
15 # the filter switches (bug 1426968).
16 # Alternate: forbid gzip by setting Content-Encoding: identity
17 @never_cache
18 @require_GET
19 @ratelimit(key='user_or_ip', rate='25/m', block=True)
20 def search(request, *args, **kwargs):
21 """
22 The search view.
23 """
24 if is_wiki(request):
25 return wiki_search(request, *args, **kwargs)
26
27 results = search_api(request, *args, **kwargs).data
28 context = {
29 'results': {
30 'results': None if results.get('error') else results
31 }
32 }
33
34 return render(request, 'search/react.html', context)
35
36
37 wiki_search = SearchView.as_view()
38
39
40 @shared_cache_control(s_maxage=60 * 60 * 24 * 7)
41 def plugin(request):
42 """Render an OpenSearch Plugin."""
43 return render(request, 'search/plugin.html', {
44 'locale': request.LANGUAGE_CODE
45 }, content_type='application/opensearchdescription+xml')
46
```
Path: `kuma/search/urls.py`
Content:
```
1 from django.conf.urls import url
2 from rest_framework.urlpatterns import format_suffix_patterns
3
4 from . import views
5
6 # this allows using ".json" extensions for the view to force json output
7 lang_base_urlpatterns = format_suffix_patterns(
8 [url(r'^$', views.search, name='search')])
9
10
11 lang_urlpatterns = [
12 url(r'^xml$',
13 views.plugin,
14 name='search.plugin'),
15 ]
16
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kuma/search/urls.py b/kuma/search/urls.py
--- a/kuma/search/urls.py
+++ b/kuma/search/urls.py
@@ -1,11 +1,11 @@
from django.conf.urls import url
-from rest_framework.urlpatterns import format_suffix_patterns
from . import views
-# this allows using ".json" extensions for the view to force json output
-lang_base_urlpatterns = format_suffix_patterns(
- [url(r'^$', views.search, name='search')])
+lang_base_urlpatterns = [
+ url(r'^$', views.search, name='search'),
+ url(r'^.(?P<format>json)$', views.SearchRedirectView.as_view())
+]
lang_urlpatterns = [
diff --git a/kuma/search/views.py b/kuma/search/views.py
--- a/kuma/search/views.py
+++ b/kuma/search/views.py
@@ -1,6 +1,8 @@
from django.shortcuts import render
+from django.urls import reverse_lazy
from django.views.decorators.cache import never_cache
from django.views.decorators.http import require_GET
+from django.views.generic import RedirectView
from ratelimit.decorators import ratelimit
from kuma.api.v1.views import search as search_api
@@ -37,6 +39,17 @@
wiki_search = SearchView.as_view()
+class SearchRedirectView(RedirectView):
+ permanent = True
+
+ def get_redirect_url(self, *args, **kwargs):
+ query_string = self.request.META.get('QUERY_STRING')
+ url = reverse_lazy('api.v1.search', kwargs={'locale': self.request.LANGUAGE_CODE})
+ if query_string:
+ url += '?' + query_string
+ return url
+
+
@shared_cache_control(s_maxage=60 * 60 * 24 * 7)
def plugin(request):
"""Render an OpenSearch Plugin."""
| {"golden_diff": "diff --git a/kuma/search/urls.py b/kuma/search/urls.py\n--- a/kuma/search/urls.py\n+++ b/kuma/search/urls.py\n@@ -1,11 +1,11 @@\n from django.conf.urls import url\n-from rest_framework.urlpatterns import format_suffix_patterns\n \n from . import views\n \n-# this allows using \".json\" extensions for the view to force json output\n-lang_base_urlpatterns = format_suffix_patterns(\n- [url(r'^$', views.search, name='search')])\n+lang_base_urlpatterns = [\n+ url(r'^$', views.search, name='search'),\n+ url(r'^.(?P<format>json)$', views.SearchRedirectView.as_view())\n+]\n \n \n lang_urlpatterns = [\ndiff --git a/kuma/search/views.py b/kuma/search/views.py\n--- a/kuma/search/views.py\n+++ b/kuma/search/views.py\n@@ -1,6 +1,8 @@\n from django.shortcuts import render\n+from django.urls import reverse_lazy\n from django.views.decorators.cache import never_cache\n from django.views.decorators.http import require_GET\n+from django.views.generic import RedirectView\n from ratelimit.decorators import ratelimit\n \n from kuma.api.v1.views import search as search_api\n@@ -37,6 +39,17 @@\n wiki_search = SearchView.as_view()\n \n \n+class SearchRedirectView(RedirectView):\n+ permanent = True\n+\n+ def get_redirect_url(self, *args, **kwargs):\n+ query_string = self.request.META.get('QUERY_STRING')\n+ url = reverse_lazy('api.v1.search', kwargs={'locale': self.request.LANGUAGE_CODE})\n+ if query_string:\n+ url += '?' + query_string\n+ return url\n+\n+\n @shared_cache_control(s_maxage=60 * 60 * 24 * 7)\n def plugin(request):\n \"\"\"Render an OpenSearch Plugin.\"\"\"\n", "issue": "Search json endpoint is not available in new front-end\n**Summary**\r\nhttps://twitter.com/klaascuvelier/status/1182203293117886464\r\n\r\n\r\n**Steps To Reproduce (STR)**\r\n_How can we reproduce the problem?_\r\n\r\nGo to https://developer.mozilla.org/en-US/search.json?q=array\r\n \r\n\r\n\r\n**Actual behavior**\r\nBlank page\r\n\r\n\r\n**Expected behavior**\r\nJSON is returned like it is now only at https://wiki.developer.mozilla.org/en-US/search.json?q=array\r\n\r\n\r\n**Additional context**\r\nThere might be a few external services, twitter bots etc. that depend on this endpoint.\r\n\n", "before_files": [{"content": "from django.shortcuts import render\nfrom django.views.decorators.cache import never_cache\nfrom django.views.decorators.http import require_GET\nfrom ratelimit.decorators import ratelimit\n\nfrom kuma.api.v1.views import search as search_api\nfrom kuma.core.decorators import shared_cache_control\nfrom kuma.core.utils import is_wiki\n\nfrom .search import SearchView\n\n# Since the search endpoint accepts user input (via query parameters) and its\n# response is compressed, use rate limiting to mitigate the BREACH attack\n# (see http://breachattack.com/). It still needs to allow a user to click\n# the filter switches (bug 1426968).\n# Alternate: forbid gzip by setting Content-Encoding: identity\n@never_cache\n@require_GET\n@ratelimit(key='user_or_ip', rate='25/m', block=True)\ndef search(request, *args, **kwargs):\n \"\"\"\n The search view.\n \"\"\"\n if is_wiki(request):\n return wiki_search(request, *args, **kwargs)\n\n results = search_api(request, *args, **kwargs).data\n context = {\n 'results': {\n 'results': None if results.get('error') else results\n }\n }\n\n return render(request, 'search/react.html', context)\n\n\nwiki_search = SearchView.as_view()\n\n\n@shared_cache_control(s_maxage=60 * 60 * 24 * 7)\ndef plugin(request):\n \"\"\"Render an OpenSearch Plugin.\"\"\"\n return render(request, 'search/plugin.html', {\n 'locale': request.LANGUAGE_CODE\n }, content_type='application/opensearchdescription+xml')\n", "path": "kuma/search/views.py"}, {"content": "from django.conf.urls import url\nfrom rest_framework.urlpatterns import format_suffix_patterns\n\nfrom . import views\n\n# this allows using \".json\" extensions for the view to force json output\nlang_base_urlpatterns = format_suffix_patterns(\n [url(r'^$', views.search, name='search')])\n\n\nlang_urlpatterns = [\n url(r'^xml$',\n views.plugin,\n name='search.plugin'),\n]\n", "path": "kuma/search/urls.py"}], "after_files": [{"content": "from django.shortcuts import render\nfrom django.urls import reverse_lazy\nfrom django.views.decorators.cache import never_cache\nfrom django.views.decorators.http import require_GET\nfrom django.views.generic import RedirectView\nfrom ratelimit.decorators import ratelimit\n\nfrom kuma.api.v1.views import search as search_api\nfrom kuma.core.decorators import shared_cache_control\nfrom kuma.core.utils import is_wiki\n\nfrom .search import SearchView\n\n# Since the search endpoint accepts user input (via query parameters) and its\n# response is compressed, use rate limiting to mitigate the BREACH attack\n# (see http://breachattack.com/). It still needs to allow a user to click\n# the filter switches (bug 1426968).\n# Alternate: forbid gzip by setting Content-Encoding: identity\n@never_cache\n@require_GET\n@ratelimit(key='user_or_ip', rate='25/m', block=True)\ndef search(request, *args, **kwargs):\n \"\"\"\n The search view.\n \"\"\"\n if is_wiki(request):\n return wiki_search(request, *args, **kwargs)\n\n results = search_api(request, *args, **kwargs).data\n context = {\n 'results': {\n 'results': None if results.get('error') else results\n }\n }\n\n return render(request, 'search/react.html', context)\n\n\nwiki_search = SearchView.as_view()\n\n\nclass SearchRedirectView(RedirectView):\n permanent = True\n\n def get_redirect_url(self, *args, **kwargs):\n query_string = self.request.META.get('QUERY_STRING')\n url = reverse_lazy('api.v1.search', kwargs={'locale': self.request.LANGUAGE_CODE})\n if query_string:\n url += '?' + query_string\n return url\n\n\n@shared_cache_control(s_maxage=60 * 60 * 24 * 7)\ndef plugin(request):\n \"\"\"Render an OpenSearch Plugin.\"\"\"\n return render(request, 'search/plugin.html', {\n 'locale': request.LANGUAGE_CODE\n }, content_type='application/opensearchdescription+xml')\n", "path": "kuma/search/views.py"}, {"content": "from django.conf.urls import url\n\nfrom . import views\n\nlang_base_urlpatterns = [\n url(r'^$', views.search, name='search'),\n url(r'^.(?P<format>json)$', views.SearchRedirectView.as_view())\n]\n\n\nlang_urlpatterns = [\n url(r'^xml$',\n views.plugin,\n name='search.plugin'),\n]\n", "path": "kuma/search/urls.py"}]} | 961 | 408 |
gh_patches_debug_39489 | rasdani/github-patches | git_diff | deepset-ai__haystack-6822 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Implement function to calculate F1 metric
As specified in proposal #5794 we need to implement a function to calculate the F1 metric.
Ideally the function should be part of the private interface and called only through the `calculate_metrics` function (see #6063). `_calculate_f1()` could be a nice name.
For more detailed information check out the original proposal.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `haystack/evaluation/eval.py`
Content:
```
1 from typing import Any, Callable, Dict, List, Union
2
3 import numpy as np
4
5 from haystack import Pipeline
6 from haystack.core.component import Component
7 from haystack.evaluation.eval_utils import get_answers_from_output, preprocess_text
8 from haystack.evaluation.metrics import Metric, MetricsResult
9
10
11 class EvaluationResult:
12 """
13 EvaluationResult keeps track of all the information related to evaluation, namely the runnable (Pipeline or
14 component), inputs, outputs, and expected outputs.
15 The EvaluationResult keeps track of all the information stored by eval.
16
17 :param runnable: The runnable (Pipeline or component) used for evaluation.
18 :param inputs: List of inputs used for evaluation.
19 :param outputs: List of outputs generated by the runnable.
20 :param expected_outputs: List of expected outputs used for evaluation.
21 """
22
23 def __init__(
24 self,
25 runnable: Union[Pipeline, Component],
26 inputs: List[Dict[str, Any]],
27 outputs: List[Dict[str, Any]],
28 expected_outputs: List[Dict[str, Any]],
29 ) -> None:
30 self.runnable = runnable
31 self.inputs = inputs
32 self.outputs = outputs
33 self.expected_outputs = expected_outputs
34
35 # Determine the type of the runnable
36 if str(type(runnable).__name__) == "Pipeline":
37 self.runnable_type = "pipeline"
38 else:
39 self.runnable_type = "component"
40
41 # Mapping of metrics to their corresponding functions.
42 # This should be kept in sync with the Metric enum
43 self._supported_metrics: Dict[Metric, Callable[..., MetricsResult]] = {
44 Metric.RECALL: self._calculate_recall,
45 Metric.MRR: self._calculate_mrr,
46 Metric.MAP: self._calculate_map,
47 Metric.F1: self._calculate_f1,
48 Metric.EM: self._calculate_em,
49 Metric.SAS: self._calculate_sas,
50 }
51
52 def calculate_metrics(self, metric: Union[Metric, Callable[..., MetricsResult]], **kwargs) -> MetricsResult:
53 """
54 Calculate evaluation metrics based on the provided Metric or using the custom metric function.
55
56 :param metric: The Metric indicating the type of metric to calculate or custom function to compute.
57 :return: MetricsResult containing the calculated metric.
58 """
59
60 if isinstance(metric, Metric):
61 return self._supported_metrics[metric](**kwargs)
62
63 return metric(self, **kwargs)
64
65 def _calculate_recall(self):
66 return MetricsResult({"recall": None})
67
68 def _calculate_map(self):
69 return MetricsResult({"mean_average_precision": None})
70
71 def _calculate_mrr(self):
72 return MetricsResult({"mean_reciprocal_rank": None})
73
74 def _calculate_f1(self):
75 return MetricsResult({"f1": None})
76
77 def _calculate_em(
78 self, output_key: str, regexes_to_ignore=None, ignore_case=False, ignore_punctuation=False, ignore_numbers=False
79 ) -> MetricsResult:
80 """
81 Calculates the Exact Match (EM) score between two lists of predictions and labels.
82 Exact Match (EM) score measures the percentage of samples where the predicted text exactly matches the
83 corresponding ground truth label.
84
85 :param output_key: The key of the output to use for comparison.
86 :param regexes_to_ignore (list, optional): A list of regular expressions. If provided, it removes substrings
87 matching these regular expressions from both predictions and labels before comparison. Defaults to None.
88 :param ignore_case (bool, optional): If True, performs case-insensitive comparison. Defaults to False.
89 :param ignore_punctuation (bool, optional): If True, removes punctuation from both predictions and labels before
90 comparison. Defaults to False.
91 :param ignore_numbers (bool, optional): If True, removes numerical digits from both predictions and labels
92 before comparison. Defaults to False.
93 :return: A MetricsResult object containing the calculated Exact Match (EM) score.
94 """
95
96 predictions = get_answers_from_output(
97 outputs=self.outputs, output_key=output_key, runnable_type=self.runnable_type
98 )
99 labels = get_answers_from_output(
100 outputs=self.expected_outputs, output_key=output_key, runnable_type=self.runnable_type
101 )
102
103 if len(predictions) != len(labels):
104 raise ValueError("The number of predictions and labels must be the same.")
105 if len(predictions) == len(labels) == 0:
106 # Return Exact Match as 0 for no inputs
107 return MetricsResult({"exact_match": 0.0})
108
109 predictions = preprocess_text(predictions, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)
110 labels = preprocess_text(labels, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)
111
112 score_list = np.array(predictions) == np.array(labels)
113 exact_match_score = np.mean(score_list)
114
115 return MetricsResult({"exact_match": exact_match_score})
116
117 def _calculate_sas(self):
118 return MetricsResult({"exact_match": None})
119
120
121 def eval(
122 runnable: Union[Pipeline, Component], inputs: List[Dict[str, Any]], expected_outputs: List[Dict[str, Any]]
123 ) -> EvaluationResult:
124 """
125 Evaluates the provided Pipeline or component based on the given inputs and expected outputs.
126
127 This function facilitates the evaluation of a given runnable (either a Pipeline or a component) using the provided
128 inputs and corresponding expected outputs.
129
130 :param runnable: The runnable (Pipeline or component) used for evaluation.
131 :param inputs: List of inputs used for evaluation.
132 :param expected_outputs: List of expected outputs used for evaluation.
133
134 :return: An instance of EvaluationResult containing information about the evaluation, including the runnable,
135 inputs, outputs, and expected outputs.
136 """
137
138 outputs = []
139
140 # Check that expected outputs has the correct shape
141 if len(inputs) != len(expected_outputs):
142 raise ValueError(
143 f"The number of inputs ({len(inputs)}) does not match the number of expected outputs "
144 f"({len(expected_outputs)}). Please ensure that each input has a corresponding expected output."
145 )
146
147 for input_ in inputs:
148 output = runnable.run(input_)
149 outputs.append(output)
150
151 return EvaluationResult(runnable, inputs, outputs, expected_outputs)
152
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/haystack/evaluation/eval.py b/haystack/evaluation/eval.py
--- a/haystack/evaluation/eval.py
+++ b/haystack/evaluation/eval.py
@@ -1,3 +1,4 @@
+import collections
from typing import Any, Callable, Dict, List, Union
import numpy as np
@@ -71,8 +72,68 @@
def _calculate_mrr(self):
return MetricsResult({"mean_reciprocal_rank": None})
- def _calculate_f1(self):
- return MetricsResult({"f1": None})
+ def _compute_f1_single(self, label_toks: List[str], pred_toks: List[str]) -> float:
+ """
+ Compute F1 score for a single sample.
+ """
+ common: collections.Counter = collections.Counter(label_toks) & collections.Counter(pred_toks)
+ num_same = sum(common.values())
+ if len(label_toks) == 0 or len(pred_toks) == 0:
+ # If either is no-answer, then F1 is 1 if they agree, 0 otherwise
+ return int(label_toks == pred_toks)
+ if num_same == 0:
+ return 0
+ precision = 1.0 * num_same / len(pred_toks)
+ recall = 1.0 * num_same / len(label_toks)
+ f1 = (2 * precision * recall) / (precision + recall)
+ return f1
+
+ def _calculate_f1(
+ self, output_key: str, regexes_to_ignore=None, ignore_case=False, ignore_punctuation=False, ignore_numbers=False
+ ) -> MetricsResult:
+ """
+ Calculates the F1 score between two lists of predictions and labels.
+ F1 score measures the word overlap between the predicted text and the corresponding ground truth label.
+
+ :param output_key: The key of the output to use for comparison.
+ :param regexes_to_ignore (list, optional): A list of regular expressions. If provided, it removes substrings
+ matching these regular expressions from both predictions and labels before comparison. Defaults to None.
+ :param ignore_case (bool, optional): If True, performs case-insensitive comparison. Defaults to False.
+ :param ignore_punctuation (bool, optional): If True, removes punctuation from both predictions and labels before
+ comparison. Defaults to False.
+ :param ignore_numbers (bool, optional): If True, removes numerical digits from both predictions and labels
+ before comparison. Defaults to False.
+ :return: A MetricsResult object containing the calculated Exact Match (EM) score.
+ """
+
+ predictions = get_answers_from_output(
+ outputs=self.outputs, output_key=output_key, runnable_type=self.runnable_type
+ )
+ labels = get_answers_from_output(
+ outputs=self.expected_outputs, output_key=output_key, runnable_type=self.runnable_type
+ )
+
+ if len(predictions) != len(labels):
+ raise ValueError("The number of predictions and labels must be the same.")
+ if len(predictions) == len(labels) == 0:
+ # Return F1 as 0 for no inputs
+ return MetricsResult({"f1": 0.0})
+
+ predictions = preprocess_text(predictions, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)
+ labels = preprocess_text(labels, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)
+
+ # Tokenize by splitting on spaces
+ tokenized_predictions = [pred.split() for pred in predictions]
+ tokenized_labels = [label.split() for label in labels]
+
+ f1_scores = [
+ self._compute_f1_single(label_toks, pred_toks)
+ for label_toks, pred_toks in zip(tokenized_labels, tokenized_predictions)
+ ]
+
+ f1 = np.mean(f1_scores)
+
+ return MetricsResult({"f1": f1})
def _calculate_em(
self, output_key: str, regexes_to_ignore=None, ignore_case=False, ignore_punctuation=False, ignore_numbers=False
| {"golden_diff": "diff --git a/haystack/evaluation/eval.py b/haystack/evaluation/eval.py\n--- a/haystack/evaluation/eval.py\n+++ b/haystack/evaluation/eval.py\n@@ -1,3 +1,4 @@\n+import collections\n from typing import Any, Callable, Dict, List, Union\n \n import numpy as np\n@@ -71,8 +72,68 @@\n def _calculate_mrr(self):\n return MetricsResult({\"mean_reciprocal_rank\": None})\n \n- def _calculate_f1(self):\n- return MetricsResult({\"f1\": None})\n+ def _compute_f1_single(self, label_toks: List[str], pred_toks: List[str]) -> float:\n+ \"\"\"\n+ Compute F1 score for a single sample.\n+ \"\"\"\n+ common: collections.Counter = collections.Counter(label_toks) & collections.Counter(pred_toks)\n+ num_same = sum(common.values())\n+ if len(label_toks) == 0 or len(pred_toks) == 0:\n+ # If either is no-answer, then F1 is 1 if they agree, 0 otherwise\n+ return int(label_toks == pred_toks)\n+ if num_same == 0:\n+ return 0\n+ precision = 1.0 * num_same / len(pred_toks)\n+ recall = 1.0 * num_same / len(label_toks)\n+ f1 = (2 * precision * recall) / (precision + recall)\n+ return f1\n+\n+ def _calculate_f1(\n+ self, output_key: str, regexes_to_ignore=None, ignore_case=False, ignore_punctuation=False, ignore_numbers=False\n+ ) -> MetricsResult:\n+ \"\"\"\n+ Calculates the F1 score between two lists of predictions and labels.\n+ F1 score measures the word overlap between the predicted text and the corresponding ground truth label.\n+\n+ :param output_key: The key of the output to use for comparison.\n+ :param regexes_to_ignore (list, optional): A list of regular expressions. If provided, it removes substrings\n+ matching these regular expressions from both predictions and labels before comparison. Defaults to None.\n+ :param ignore_case (bool, optional): If True, performs case-insensitive comparison. Defaults to False.\n+ :param ignore_punctuation (bool, optional): If True, removes punctuation from both predictions and labels before\n+ comparison. Defaults to False.\n+ :param ignore_numbers (bool, optional): If True, removes numerical digits from both predictions and labels\n+ before comparison. Defaults to False.\n+ :return: A MetricsResult object containing the calculated Exact Match (EM) score.\n+ \"\"\"\n+\n+ predictions = get_answers_from_output(\n+ outputs=self.outputs, output_key=output_key, runnable_type=self.runnable_type\n+ )\n+ labels = get_answers_from_output(\n+ outputs=self.expected_outputs, output_key=output_key, runnable_type=self.runnable_type\n+ )\n+\n+ if len(predictions) != len(labels):\n+ raise ValueError(\"The number of predictions and labels must be the same.\")\n+ if len(predictions) == len(labels) == 0:\n+ # Return F1 as 0 for no inputs\n+ return MetricsResult({\"f1\": 0.0})\n+\n+ predictions = preprocess_text(predictions, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)\n+ labels = preprocess_text(labels, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)\n+\n+ # Tokenize by splitting on spaces\n+ tokenized_predictions = [pred.split() for pred in predictions]\n+ tokenized_labels = [label.split() for label in labels]\n+\n+ f1_scores = [\n+ self._compute_f1_single(label_toks, pred_toks)\n+ for label_toks, pred_toks in zip(tokenized_labels, tokenized_predictions)\n+ ]\n+\n+ f1 = np.mean(f1_scores)\n+\n+ return MetricsResult({\"f1\": f1})\n \n def _calculate_em(\n self, output_key: str, regexes_to_ignore=None, ignore_case=False, ignore_punctuation=False, ignore_numbers=False\n", "issue": "Implement function to calculate F1 metric\nAs specified in proposal #5794 we need to implement a function to calculate the F1 metric.\r\n\r\nIdeally the function should be part of the private interface and called only through the `calculate_metrics` function (see #6063). `_calculate_f1()` could be a nice name.\r\n\r\nFor more detailed information check out the original proposal.\n", "before_files": [{"content": "from typing import Any, Callable, Dict, List, Union\n\nimport numpy as np\n\nfrom haystack import Pipeline\nfrom haystack.core.component import Component\nfrom haystack.evaluation.eval_utils import get_answers_from_output, preprocess_text\nfrom haystack.evaluation.metrics import Metric, MetricsResult\n\n\nclass EvaluationResult:\n \"\"\"\n EvaluationResult keeps track of all the information related to evaluation, namely the runnable (Pipeline or\n component), inputs, outputs, and expected outputs.\n The EvaluationResult keeps track of all the information stored by eval.\n\n :param runnable: The runnable (Pipeline or component) used for evaluation.\n :param inputs: List of inputs used for evaluation.\n :param outputs: List of outputs generated by the runnable.\n :param expected_outputs: List of expected outputs used for evaluation.\n \"\"\"\n\n def __init__(\n self,\n runnable: Union[Pipeline, Component],\n inputs: List[Dict[str, Any]],\n outputs: List[Dict[str, Any]],\n expected_outputs: List[Dict[str, Any]],\n ) -> None:\n self.runnable = runnable\n self.inputs = inputs\n self.outputs = outputs\n self.expected_outputs = expected_outputs\n\n # Determine the type of the runnable\n if str(type(runnable).__name__) == \"Pipeline\":\n self.runnable_type = \"pipeline\"\n else:\n self.runnable_type = \"component\"\n\n # Mapping of metrics to their corresponding functions.\n # This should be kept in sync with the Metric enum\n self._supported_metrics: Dict[Metric, Callable[..., MetricsResult]] = {\n Metric.RECALL: self._calculate_recall,\n Metric.MRR: self._calculate_mrr,\n Metric.MAP: self._calculate_map,\n Metric.F1: self._calculate_f1,\n Metric.EM: self._calculate_em,\n Metric.SAS: self._calculate_sas,\n }\n\n def calculate_metrics(self, metric: Union[Metric, Callable[..., MetricsResult]], **kwargs) -> MetricsResult:\n \"\"\"\n Calculate evaluation metrics based on the provided Metric or using the custom metric function.\n\n :param metric: The Metric indicating the type of metric to calculate or custom function to compute.\n :return: MetricsResult containing the calculated metric.\n \"\"\"\n\n if isinstance(metric, Metric):\n return self._supported_metrics[metric](**kwargs)\n\n return metric(self, **kwargs)\n\n def _calculate_recall(self):\n return MetricsResult({\"recall\": None})\n\n def _calculate_map(self):\n return MetricsResult({\"mean_average_precision\": None})\n\n def _calculate_mrr(self):\n return MetricsResult({\"mean_reciprocal_rank\": None})\n\n def _calculate_f1(self):\n return MetricsResult({\"f1\": None})\n\n def _calculate_em(\n self, output_key: str, regexes_to_ignore=None, ignore_case=False, ignore_punctuation=False, ignore_numbers=False\n ) -> MetricsResult:\n \"\"\"\n Calculates the Exact Match (EM) score between two lists of predictions and labels.\n Exact Match (EM) score measures the percentage of samples where the predicted text exactly matches the\n corresponding ground truth label.\n\n :param output_key: The key of the output to use for comparison.\n :param regexes_to_ignore (list, optional): A list of regular expressions. If provided, it removes substrings\n matching these regular expressions from both predictions and labels before comparison. Defaults to None.\n :param ignore_case (bool, optional): If True, performs case-insensitive comparison. Defaults to False.\n :param ignore_punctuation (bool, optional): If True, removes punctuation from both predictions and labels before\n comparison. Defaults to False.\n :param ignore_numbers (bool, optional): If True, removes numerical digits from both predictions and labels\n before comparison. Defaults to False.\n :return: A MetricsResult object containing the calculated Exact Match (EM) score.\n \"\"\"\n\n predictions = get_answers_from_output(\n outputs=self.outputs, output_key=output_key, runnable_type=self.runnable_type\n )\n labels = get_answers_from_output(\n outputs=self.expected_outputs, output_key=output_key, runnable_type=self.runnable_type\n )\n\n if len(predictions) != len(labels):\n raise ValueError(\"The number of predictions and labels must be the same.\")\n if len(predictions) == len(labels) == 0:\n # Return Exact Match as 0 for no inputs\n return MetricsResult({\"exact_match\": 0.0})\n\n predictions = preprocess_text(predictions, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)\n labels = preprocess_text(labels, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)\n\n score_list = np.array(predictions) == np.array(labels)\n exact_match_score = np.mean(score_list)\n\n return MetricsResult({\"exact_match\": exact_match_score})\n\n def _calculate_sas(self):\n return MetricsResult({\"exact_match\": None})\n\n\ndef eval(\n runnable: Union[Pipeline, Component], inputs: List[Dict[str, Any]], expected_outputs: List[Dict[str, Any]]\n) -> EvaluationResult:\n \"\"\"\n Evaluates the provided Pipeline or component based on the given inputs and expected outputs.\n\n This function facilitates the evaluation of a given runnable (either a Pipeline or a component) using the provided\n inputs and corresponding expected outputs.\n\n :param runnable: The runnable (Pipeline or component) used for evaluation.\n :param inputs: List of inputs used for evaluation.\n :param expected_outputs: List of expected outputs used for evaluation.\n\n :return: An instance of EvaluationResult containing information about the evaluation, including the runnable,\n inputs, outputs, and expected outputs.\n \"\"\"\n\n outputs = []\n\n # Check that expected outputs has the correct shape\n if len(inputs) != len(expected_outputs):\n raise ValueError(\n f\"The number of inputs ({len(inputs)}) does not match the number of expected outputs \"\n f\"({len(expected_outputs)}). Please ensure that each input has a corresponding expected output.\"\n )\n\n for input_ in inputs:\n output = runnable.run(input_)\n outputs.append(output)\n\n return EvaluationResult(runnable, inputs, outputs, expected_outputs)\n", "path": "haystack/evaluation/eval.py"}], "after_files": [{"content": "import collections\nfrom typing import Any, Callable, Dict, List, Union\n\nimport numpy as np\n\nfrom haystack import Pipeline\nfrom haystack.core.component import Component\nfrom haystack.evaluation.eval_utils import get_answers_from_output, preprocess_text\nfrom haystack.evaluation.metrics import Metric, MetricsResult\n\n\nclass EvaluationResult:\n \"\"\"\n EvaluationResult keeps track of all the information related to evaluation, namely the runnable (Pipeline or\n component), inputs, outputs, and expected outputs.\n The EvaluationResult keeps track of all the information stored by eval.\n\n :param runnable: The runnable (Pipeline or component) used for evaluation.\n :param inputs: List of inputs used for evaluation.\n :param outputs: List of outputs generated by the runnable.\n :param expected_outputs: List of expected outputs used for evaluation.\n \"\"\"\n\n def __init__(\n self,\n runnable: Union[Pipeline, Component],\n inputs: List[Dict[str, Any]],\n outputs: List[Dict[str, Any]],\n expected_outputs: List[Dict[str, Any]],\n ) -> None:\n self.runnable = runnable\n self.inputs = inputs\n self.outputs = outputs\n self.expected_outputs = expected_outputs\n\n # Determine the type of the runnable\n if str(type(runnable).__name__) == \"Pipeline\":\n self.runnable_type = \"pipeline\"\n else:\n self.runnable_type = \"component\"\n\n # Mapping of metrics to their corresponding functions.\n # This should be kept in sync with the Metric enum\n self._supported_metrics: Dict[Metric, Callable[..., MetricsResult]] = {\n Metric.RECALL: self._calculate_recall,\n Metric.MRR: self._calculate_mrr,\n Metric.MAP: self._calculate_map,\n Metric.F1: self._calculate_f1,\n Metric.EM: self._calculate_em,\n Metric.SAS: self._calculate_sas,\n }\n\n def calculate_metrics(self, metric: Union[Metric, Callable[..., MetricsResult]], **kwargs) -> MetricsResult:\n \"\"\"\n Calculate evaluation metrics based on the provided Metric or using the custom metric function.\n\n :param metric: The Metric indicating the type of metric to calculate or custom function to compute.\n :return: MetricsResult containing the calculated metric.\n \"\"\"\n\n if isinstance(metric, Metric):\n return self._supported_metrics[metric](**kwargs)\n\n return metric(self, **kwargs)\n\n def _calculate_recall(self):\n return MetricsResult({\"recall\": None})\n\n def _calculate_map(self):\n return MetricsResult({\"mean_average_precision\": None})\n\n def _calculate_mrr(self):\n return MetricsResult({\"mean_reciprocal_rank\": None})\n\n def _compute_f1_single(self, label_toks: List[str], pred_toks: List[str]) -> float:\n \"\"\"\n Compute F1 score for a single sample.\n \"\"\"\n common: collections.Counter = collections.Counter(label_toks) & collections.Counter(pred_toks)\n num_same = sum(common.values())\n if len(label_toks) == 0 or len(pred_toks) == 0:\n # If either is no-answer, then F1 is 1 if they agree, 0 otherwise\n return int(label_toks == pred_toks)\n if num_same == 0:\n return 0\n precision = 1.0 * num_same / len(pred_toks)\n recall = 1.0 * num_same / len(label_toks)\n f1 = (2 * precision * recall) / (precision + recall)\n return f1\n\n def _calculate_f1(\n self, output_key: str, regexes_to_ignore=None, ignore_case=False, ignore_punctuation=False, ignore_numbers=False\n ) -> MetricsResult:\n \"\"\"\n Calculates the F1 score between two lists of predictions and labels.\n F1 score measures the word overlap between the predicted text and the corresponding ground truth label.\n\n :param output_key: The key of the output to use for comparison.\n :param regexes_to_ignore (list, optional): A list of regular expressions. If provided, it removes substrings\n matching these regular expressions from both predictions and labels before comparison. Defaults to None.\n :param ignore_case (bool, optional): If True, performs case-insensitive comparison. Defaults to False.\n :param ignore_punctuation (bool, optional): If True, removes punctuation from both predictions and labels before\n comparison. Defaults to False.\n :param ignore_numbers (bool, optional): If True, removes numerical digits from both predictions and labels\n before comparison. Defaults to False.\n :return: A MetricsResult object containing the calculated Exact Match (EM) score.\n \"\"\"\n\n predictions = get_answers_from_output(\n outputs=self.outputs, output_key=output_key, runnable_type=self.runnable_type\n )\n labels = get_answers_from_output(\n outputs=self.expected_outputs, output_key=output_key, runnable_type=self.runnable_type\n )\n\n if len(predictions) != len(labels):\n raise ValueError(\"The number of predictions and labels must be the same.\")\n if len(predictions) == len(labels) == 0:\n # Return F1 as 0 for no inputs\n return MetricsResult({\"f1\": 0.0})\n\n predictions = preprocess_text(predictions, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)\n labels = preprocess_text(labels, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)\n\n # Tokenize by splitting on spaces\n tokenized_predictions = [pred.split() for pred in predictions]\n tokenized_labels = [label.split() for label in labels]\n\n f1_scores = [\n self._compute_f1_single(label_toks, pred_toks)\n for label_toks, pred_toks in zip(tokenized_labels, tokenized_predictions)\n ]\n\n f1 = np.mean(f1_scores)\n\n return MetricsResult({\"f1\": f1})\n\n def _calculate_em(\n self, output_key: str, regexes_to_ignore=None, ignore_case=False, ignore_punctuation=False, ignore_numbers=False\n ) -> MetricsResult:\n \"\"\"\n Calculates the Exact Match (EM) score between two lists of predictions and labels.\n Exact Match (EM) score measures the percentage of samples where the predicted text exactly matches the\n corresponding ground truth label.\n\n :param output_key: The key of the output to use for comparison.\n :param regexes_to_ignore (list, optional): A list of regular expressions. If provided, it removes substrings\n matching these regular expressions from both predictions and labels before comparison. Defaults to None.\n :param ignore_case (bool, optional): If True, performs case-insensitive comparison. Defaults to False.\n :param ignore_punctuation (bool, optional): If True, removes punctuation from both predictions and labels before\n comparison. Defaults to False.\n :param ignore_numbers (bool, optional): If True, removes numerical digits from both predictions and labels\n before comparison. Defaults to False.\n :return: A MetricsResult object containing the calculated Exact Match (EM) score.\n \"\"\"\n\n predictions = get_answers_from_output(\n outputs=self.outputs, output_key=output_key, runnable_type=self.runnable_type\n )\n labels = get_answers_from_output(\n outputs=self.expected_outputs, output_key=output_key, runnable_type=self.runnable_type\n )\n\n if len(predictions) != len(labels):\n raise ValueError(\"The number of predictions and labels must be the same.\")\n if len(predictions) == len(labels) == 0:\n # Return Exact Match as 0 for no inputs\n return MetricsResult({\"exact_match\": 0.0})\n\n predictions = preprocess_text(predictions, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)\n labels = preprocess_text(labels, regexes_to_ignore, ignore_case, ignore_punctuation, ignore_numbers)\n\n score_list = np.array(predictions) == np.array(labels)\n exact_match_score = np.mean(score_list)\n\n return MetricsResult({\"exact_match\": exact_match_score})\n\n def _calculate_sas(self):\n return MetricsResult({\"exact_match\": None})\n\n\ndef eval(\n runnable: Union[Pipeline, Component], inputs: List[Dict[str, Any]], expected_outputs: List[Dict[str, Any]]\n) -> EvaluationResult:\n \"\"\"\n Evaluates the provided Pipeline or component based on the given inputs and expected outputs.\n\n This function facilitates the evaluation of a given runnable (either a Pipeline or a component) using the provided\n inputs and corresponding expected outputs.\n\n :param runnable: The runnable (Pipeline or component) used for evaluation.\n :param inputs: List of inputs used for evaluation.\n :param expected_outputs: List of expected outputs used for evaluation.\n\n :return: An instance of EvaluationResult containing information about the evaluation, including the runnable,\n inputs, outputs, and expected outputs.\n \"\"\"\n\n outputs = []\n\n # Check that expected outputs has the correct shape\n if len(inputs) != len(expected_outputs):\n raise ValueError(\n f\"The number of inputs ({len(inputs)}) does not match the number of expected outputs \"\n f\"({len(expected_outputs)}). Please ensure that each input has a corresponding expected output.\"\n )\n\n for input_ in inputs:\n output = runnable.run(input_)\n outputs.append(output)\n\n return EvaluationResult(runnable, inputs, outputs, expected_outputs)\n", "path": "haystack/evaluation/eval.py"}]} | 2,008 | 912 |
gh_patches_debug_27319 | rasdani/github-patches | git_diff | medtagger__MedTagger-306 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Allow backend to return Slices in reverse order
## Expected Behavior
When user moves slider down, backend should send Slices in reverse order, so that UI will be able to show them first.
## Actual Behavior
Backend always send Slices in ascending order.
## Steps to Reproduce the Problem
1. Go to the marker page.
2. Move to the bottom of current view (let's assume that the last Slice on which you are now has index N).
3. UI will request backend to send Slices from range (N-10, N-1).
4. Backend will send Slices **in order**: (N-10, N-9, N-8, ..., N-1).
5. Marker will add (N-10)th Slice to the view from above response.
6. Marker will allow user to move between all Slices in range from N-10 but Slices (N-9, N-8, ...) won't be loaded yet!
## Additional comment
Marker should request backend to send Slices in descending order, so that it will be able to load them to the marker first. Such case should be enabled **only** if user wants to go back/down!
To debug this case, slow your Internet connection down in your browser's dev tools or apply huge load on the backend server.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `backend/medtagger/api/scans/service_web_socket.py`
Content:
```
1 """Module responsible for definition of Scans service available via WebSockets."""
2 from typing import Dict
3
4 from flask_socketio import Namespace, emit
5
6 from medtagger.api import web_socket
7 from medtagger.database.models import SliceOrientation
8 from medtagger.types import ScanID
9 from medtagger.api.exceptions import InvalidArgumentsException
10 from medtagger.api.scans import business
11
12
13 class Slices(Namespace):
14 """WebSocket handler for /slices namespace."""
15
16 MAX_NUMBER_OF_SLICES_PER_REQUEST = 25
17
18 def on_request_slices(self, request: Dict) -> None:
19 """Handle slices request triggered by `request_slices` event."""
20 assert request.get('scan_id'), 'ScanID is required!'
21 scan_id = ScanID(str(request['scan_id']))
22 begin = max(0, request.get('begin', 0))
23 count = request.get('count', 1)
24 orientation = request.get('orientation', SliceOrientation.Z.value)
25 self._raise_on_invalid_request_slices(count, orientation)
26
27 orientation = SliceOrientation[orientation]
28 slices = business.get_slices_for_scan(scan_id, begin, count, orientation=orientation)
29 for index, (_slice, image) in enumerate(slices):
30 emit('slice', {'scan_id': scan_id, 'index': begin + index, 'image': image})
31
32 def _raise_on_invalid_request_slices(self, count: int, orientation: str) -> None:
33 """Validate incoming request and raise an exception if there are issues with given arguments.
34
35 :param count: number of slices that should be returned
36 :param orientation: Slice's orientation as a string
37 """
38 # Make sure that passed orientation is proper one
39 if orientation not in SliceOrientation.__members__:
40 raise InvalidArgumentsException('Invalid Slice orientation.')
41
42 # Make sure that nobody will fetch whole scan at once. It could freeze our backend application.
43 if count > self.MAX_NUMBER_OF_SLICES_PER_REQUEST:
44 message = 'Cannot return more than {} slices per request.'.format(self.MAX_NUMBER_OF_SLICES_PER_REQUEST)
45 raise InvalidArgumentsException(message)
46
47
48 # Register above namespace
49 web_socket.on_namespace(Slices('/slices'))
50
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/backend/medtagger/api/scans/service_web_socket.py b/backend/medtagger/api/scans/service_web_socket.py
--- a/backend/medtagger/api/scans/service_web_socket.py
+++ b/backend/medtagger/api/scans/service_web_socket.py
@@ -21,13 +21,21 @@
scan_id = ScanID(str(request['scan_id']))
begin = max(0, request.get('begin', 0))
count = request.get('count', 1)
+ reversed_order = request.get('reversed', False)
orientation = request.get('orientation', SliceOrientation.Z.value)
self._raise_on_invalid_request_slices(count, orientation)
orientation = SliceOrientation[orientation]
slices = business.get_slices_for_scan(scan_id, begin, count, orientation=orientation)
- for index, (_slice, image) in enumerate(slices):
- emit('slice', {'scan_id': scan_id, 'index': begin + index, 'image': image})
+ slices_to_send = reversed(list(enumerate(slices))) if reversed_order else enumerate(slices)
+ last_in_batch = begin if reversed_order else begin + count - 1
+ for index, (_slice, image) in slices_to_send:
+ emit('slice', {
+ 'scan_id': scan_id,
+ 'index': begin + index,
+ 'last_in_batch': last_in_batch,
+ 'image': image,
+ })
def _raise_on_invalid_request_slices(self, count: int, orientation: str) -> None:
"""Validate incoming request and raise an exception if there are issues with given arguments.
| {"golden_diff": "diff --git a/backend/medtagger/api/scans/service_web_socket.py b/backend/medtagger/api/scans/service_web_socket.py\n--- a/backend/medtagger/api/scans/service_web_socket.py\n+++ b/backend/medtagger/api/scans/service_web_socket.py\n@@ -21,13 +21,21 @@\n scan_id = ScanID(str(request['scan_id']))\n begin = max(0, request.get('begin', 0))\n count = request.get('count', 1)\n+ reversed_order = request.get('reversed', False)\n orientation = request.get('orientation', SliceOrientation.Z.value)\n self._raise_on_invalid_request_slices(count, orientation)\n \n orientation = SliceOrientation[orientation]\n slices = business.get_slices_for_scan(scan_id, begin, count, orientation=orientation)\n- for index, (_slice, image) in enumerate(slices):\n- emit('slice', {'scan_id': scan_id, 'index': begin + index, 'image': image})\n+ slices_to_send = reversed(list(enumerate(slices))) if reversed_order else enumerate(slices)\n+ last_in_batch = begin if reversed_order else begin + count - 1\n+ for index, (_slice, image) in slices_to_send:\n+ emit('slice', {\n+ 'scan_id': scan_id,\n+ 'index': begin + index,\n+ 'last_in_batch': last_in_batch,\n+ 'image': image,\n+ })\n \n def _raise_on_invalid_request_slices(self, count: int, orientation: str) -> None:\n \"\"\"Validate incoming request and raise an exception if there are issues with given arguments.\n", "issue": "Allow backend to return Slices in reverse order\n## Expected Behavior\r\n\r\nWhen user moves slider down, backend should send Slices in reverse order, so that UI will be able to show them first.\r\n\r\n## Actual Behavior\r\n\r\nBackend always send Slices in ascending order.\r\n\r\n## Steps to Reproduce the Problem\r\n\r\n 1. Go to the marker page.\r\n 2. Move to the bottom of current view (let's assume that the last Slice on which you are now has index N).\r\n 3. UI will request backend to send Slices from range (N-10, N-1).\r\n 4. Backend will send Slices **in order**: (N-10, N-9, N-8, ..., N-1).\r\n 5. Marker will add (N-10)th Slice to the view from above response.\r\n 6. Marker will allow user to move between all Slices in range from N-10 but Slices (N-9, N-8, ...) won't be loaded yet!\r\n\r\n## Additional comment\r\n\r\nMarker should request backend to send Slices in descending order, so that it will be able to load them to the marker first. Such case should be enabled **only** if user wants to go back/down!\r\n\r\nTo debug this case, slow your Internet connection down in your browser's dev tools or apply huge load on the backend server.\n", "before_files": [{"content": "\"\"\"Module responsible for definition of Scans service available via WebSockets.\"\"\"\nfrom typing import Dict\n\nfrom flask_socketio import Namespace, emit\n\nfrom medtagger.api import web_socket\nfrom medtagger.database.models import SliceOrientation\nfrom medtagger.types import ScanID\nfrom medtagger.api.exceptions import InvalidArgumentsException\nfrom medtagger.api.scans import business\n\n\nclass Slices(Namespace):\n \"\"\"WebSocket handler for /slices namespace.\"\"\"\n\n MAX_NUMBER_OF_SLICES_PER_REQUEST = 25\n\n def on_request_slices(self, request: Dict) -> None:\n \"\"\"Handle slices request triggered by `request_slices` event.\"\"\"\n assert request.get('scan_id'), 'ScanID is required!'\n scan_id = ScanID(str(request['scan_id']))\n begin = max(0, request.get('begin', 0))\n count = request.get('count', 1)\n orientation = request.get('orientation', SliceOrientation.Z.value)\n self._raise_on_invalid_request_slices(count, orientation)\n\n orientation = SliceOrientation[orientation]\n slices = business.get_slices_for_scan(scan_id, begin, count, orientation=orientation)\n for index, (_slice, image) in enumerate(slices):\n emit('slice', {'scan_id': scan_id, 'index': begin + index, 'image': image})\n\n def _raise_on_invalid_request_slices(self, count: int, orientation: str) -> None:\n \"\"\"Validate incoming request and raise an exception if there are issues with given arguments.\n\n :param count: number of slices that should be returned\n :param orientation: Slice's orientation as a string\n \"\"\"\n # Make sure that passed orientation is proper one\n if orientation not in SliceOrientation.__members__:\n raise InvalidArgumentsException('Invalid Slice orientation.')\n\n # Make sure that nobody will fetch whole scan at once. It could freeze our backend application.\n if count > self.MAX_NUMBER_OF_SLICES_PER_REQUEST:\n message = 'Cannot return more than {} slices per request.'.format(self.MAX_NUMBER_OF_SLICES_PER_REQUEST)\n raise InvalidArgumentsException(message)\n\n\n# Register above namespace\nweb_socket.on_namespace(Slices('/slices'))\n", "path": "backend/medtagger/api/scans/service_web_socket.py"}], "after_files": [{"content": "\"\"\"Module responsible for definition of Scans service available via WebSockets.\"\"\"\nfrom typing import Dict\n\nfrom flask_socketio import Namespace, emit\n\nfrom medtagger.api import web_socket\nfrom medtagger.database.models import SliceOrientation\nfrom medtagger.types import ScanID\nfrom medtagger.api.exceptions import InvalidArgumentsException\nfrom medtagger.api.scans import business\n\n\nclass Slices(Namespace):\n \"\"\"WebSocket handler for /slices namespace.\"\"\"\n\n MAX_NUMBER_OF_SLICES_PER_REQUEST = 25\n\n def on_request_slices(self, request: Dict) -> None:\n \"\"\"Handle slices request triggered by `request_slices` event.\"\"\"\n assert request.get('scan_id'), 'ScanID is required!'\n scan_id = ScanID(str(request['scan_id']))\n begin = max(0, request.get('begin', 0))\n count = request.get('count', 1)\n reversed_order = request.get('reversed', False)\n orientation = request.get('orientation', SliceOrientation.Z.value)\n self._raise_on_invalid_request_slices(count, orientation)\n\n orientation = SliceOrientation[orientation]\n slices = business.get_slices_for_scan(scan_id, begin, count, orientation=orientation)\n slices_to_send = reversed(list(enumerate(slices))) if reversed_order else enumerate(slices)\n last_in_batch = begin if reversed_order else begin + count - 1\n for index, (_slice, image) in slices_to_send:\n emit('slice', {\n 'scan_id': scan_id,\n 'index': begin + index,\n 'last_in_batch': last_in_batch,\n 'image': image,\n })\n\n def _raise_on_invalid_request_slices(self, count: int, orientation: str) -> None:\n \"\"\"Validate incoming request and raise an exception if there are issues with given arguments.\n\n :param count: number of slices that should be returned\n :param orientation: Slice's orientation as a string\n \"\"\"\n # Make sure that passed orientation is proper one\n if orientation not in SliceOrientation.__members__:\n raise InvalidArgumentsException('Invalid Slice orientation.')\n\n # Make sure that nobody will fetch whole scan at once. It could freeze our backend application.\n if count > self.MAX_NUMBER_OF_SLICES_PER_REQUEST:\n message = 'Cannot return more than {} slices per request.'.format(self.MAX_NUMBER_OF_SLICES_PER_REQUEST)\n raise InvalidArgumentsException(message)\n\n\n# Register above namespace\nweb_socket.on_namespace(Slices('/slices'))\n", "path": "backend/medtagger/api/scans/service_web_socket.py"}]} | 1,111 | 357 |
gh_patches_debug_23334 | rasdani/github-patches | git_diff | NVIDIA__NVFlare-318 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Unable to run poc command if nvflare is installed by pip install -e .
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nvflare/lighter/poc.py`
Content:
```
1 # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import argparse
16 import os
17 import pathlib
18 import shutil
19
20
21 def clone_client(num_clients: int):
22 current_path = os.getcwd()
23 poc_folder = os.path.join(current_path, "poc")
24 src_folder = os.path.join(poc_folder, "client")
25 for index in range(1, num_clients + 1):
26 dst_folder = os.path.join(poc_folder, f"site-{index}")
27 shutil.copytree(src_folder, dst_folder)
28 start_sh = open(os.path.join(dst_folder, "startup", "start.sh"), "rt")
29 content = start_sh.read()
30 start_sh.close()
31 content = content.replace("NNN", f"{index}")
32 with open(os.path.join(dst_folder, "startup", "start.sh"), "wt") as f:
33 f.write(content)
34 shutil.rmtree(src_folder)
35
36
37 def main():
38 parser = argparse.ArgumentParser()
39 parser.add_argument("-n", "--num_clients", type=int, default=1, help="number of client folders to create")
40
41 args = parser.parse_args()
42
43 file_dir_path = pathlib.Path(__file__).parent.absolute()
44 poc_zip_path = file_dir_path.parent / "poc.zip"
45 answer = input("This will delete poc folder in current directory and create a new one. Is it OK to proceed? (y/N) ")
46 if answer.strip().upper() == "Y":
47 dest_poc_folder = os.path.join(os.getcwd(), "poc")
48 shutil.rmtree(dest_poc_folder, ignore_errors=True)
49 shutil.unpack_archive(poc_zip_path)
50 for root, dirs, files in os.walk(dest_poc_folder):
51 for file in files:
52 if file.endswith(".sh"):
53 os.chmod(os.path.join(root, file), 0o755)
54 clone_client(args.num_clients)
55 print("Successfully creating poc folder. Please read poc/Readme.rst for user guide.")
56
57
58 if __name__ == "__main__":
59 main()
60
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nvflare/lighter/poc.py b/nvflare/lighter/poc.py
--- a/nvflare/lighter/poc.py
+++ b/nvflare/lighter/poc.py
@@ -42,11 +42,20 @@
file_dir_path = pathlib.Path(__file__).parent.absolute()
poc_zip_path = file_dir_path.parent / "poc.zip"
+ poc_folder_path = file_dir_path.parent / "poc"
answer = input("This will delete poc folder in current directory and create a new one. Is it OK to proceed? (y/N) ")
if answer.strip().upper() == "Y":
dest_poc_folder = os.path.join(os.getcwd(), "poc")
shutil.rmtree(dest_poc_folder, ignore_errors=True)
- shutil.unpack_archive(poc_zip_path)
+ try:
+ shutil.unpack_archive(poc_zip_path)
+ except shutil.ReadError:
+ print(f"poc.zip not found at {poc_zip_path}, try to use template poc folder")
+ try:
+ shutil.copytree(poc_folder_path, dest_poc_folder)
+ except BaseException:
+ print(f"Unable to copy poc folder from {poc_folder_path}. Exit")
+ exit(1)
for root, dirs, files in os.walk(dest_poc_folder):
for file in files:
if file.endswith(".sh"):
| {"golden_diff": "diff --git a/nvflare/lighter/poc.py b/nvflare/lighter/poc.py\n--- a/nvflare/lighter/poc.py\n+++ b/nvflare/lighter/poc.py\n@@ -42,11 +42,20 @@\n \n file_dir_path = pathlib.Path(__file__).parent.absolute()\n poc_zip_path = file_dir_path.parent / \"poc.zip\"\n+ poc_folder_path = file_dir_path.parent / \"poc\"\n answer = input(\"This will delete poc folder in current directory and create a new one. Is it OK to proceed? (y/N) \")\n if answer.strip().upper() == \"Y\":\n dest_poc_folder = os.path.join(os.getcwd(), \"poc\")\n shutil.rmtree(dest_poc_folder, ignore_errors=True)\n- shutil.unpack_archive(poc_zip_path)\n+ try:\n+ shutil.unpack_archive(poc_zip_path)\n+ except shutil.ReadError:\n+ print(f\"poc.zip not found at {poc_zip_path}, try to use template poc folder\")\n+ try:\n+ shutil.copytree(poc_folder_path, dest_poc_folder)\n+ except BaseException:\n+ print(f\"Unable to copy poc folder from {poc_folder_path}. Exit\")\n+ exit(1)\n for root, dirs, files in os.walk(dest_poc_folder):\n for file in files:\n if file.endswith(\".sh\"):\n", "issue": "Unable to run poc command if nvflare is installed by pip install -e .\n\n", "before_files": [{"content": "# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nimport pathlib\nimport shutil\n\n\ndef clone_client(num_clients: int):\n current_path = os.getcwd()\n poc_folder = os.path.join(current_path, \"poc\")\n src_folder = os.path.join(poc_folder, \"client\")\n for index in range(1, num_clients + 1):\n dst_folder = os.path.join(poc_folder, f\"site-{index}\")\n shutil.copytree(src_folder, dst_folder)\n start_sh = open(os.path.join(dst_folder, \"startup\", \"start.sh\"), \"rt\")\n content = start_sh.read()\n start_sh.close()\n content = content.replace(\"NNN\", f\"{index}\")\n with open(os.path.join(dst_folder, \"startup\", \"start.sh\"), \"wt\") as f:\n f.write(content)\n shutil.rmtree(src_folder)\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-n\", \"--num_clients\", type=int, default=1, help=\"number of client folders to create\")\n\n args = parser.parse_args()\n\n file_dir_path = pathlib.Path(__file__).parent.absolute()\n poc_zip_path = file_dir_path.parent / \"poc.zip\"\n answer = input(\"This will delete poc folder in current directory and create a new one. Is it OK to proceed? (y/N) \")\n if answer.strip().upper() == \"Y\":\n dest_poc_folder = os.path.join(os.getcwd(), \"poc\")\n shutil.rmtree(dest_poc_folder, ignore_errors=True)\n shutil.unpack_archive(poc_zip_path)\n for root, dirs, files in os.walk(dest_poc_folder):\n for file in files:\n if file.endswith(\".sh\"):\n os.chmod(os.path.join(root, file), 0o755)\n clone_client(args.num_clients)\n print(\"Successfully creating poc folder. Please read poc/Readme.rst for user guide.\")\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "nvflare/lighter/poc.py"}], "after_files": [{"content": "# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport argparse\nimport os\nimport pathlib\nimport shutil\n\n\ndef clone_client(num_clients: int):\n current_path = os.getcwd()\n poc_folder = os.path.join(current_path, \"poc\")\n src_folder = os.path.join(poc_folder, \"client\")\n for index in range(1, num_clients + 1):\n dst_folder = os.path.join(poc_folder, f\"site-{index}\")\n shutil.copytree(src_folder, dst_folder)\n start_sh = open(os.path.join(dst_folder, \"startup\", \"start.sh\"), \"rt\")\n content = start_sh.read()\n start_sh.close()\n content = content.replace(\"NNN\", f\"{index}\")\n with open(os.path.join(dst_folder, \"startup\", \"start.sh\"), \"wt\") as f:\n f.write(content)\n shutil.rmtree(src_folder)\n\n\ndef main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-n\", \"--num_clients\", type=int, default=1, help=\"number of client folders to create\")\n\n args = parser.parse_args()\n\n file_dir_path = pathlib.Path(__file__).parent.absolute()\n poc_zip_path = file_dir_path.parent / \"poc.zip\"\n poc_folder_path = file_dir_path.parent / \"poc\"\n answer = input(\"This will delete poc folder in current directory and create a new one. Is it OK to proceed? (y/N) \")\n if answer.strip().upper() == \"Y\":\n dest_poc_folder = os.path.join(os.getcwd(), \"poc\")\n shutil.rmtree(dest_poc_folder, ignore_errors=True)\n try:\n shutil.unpack_archive(poc_zip_path)\n except shutil.ReadError:\n print(f\"poc.zip not found at {poc_zip_path}, try to use template poc folder\")\n try:\n shutil.copytree(poc_folder_path, dest_poc_folder)\n except BaseException:\n print(f\"Unable to copy poc folder from {poc_folder_path}. Exit\")\n exit(1)\n for root, dirs, files in os.walk(dest_poc_folder):\n for file in files:\n if file.endswith(\".sh\"):\n os.chmod(os.path.join(root, file), 0o755)\n clone_client(args.num_clients)\n print(\"Successfully creating poc folder. Please read poc/Readme.rst for user guide.\")\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "nvflare/lighter/poc.py"}]} | 951 | 305 |
gh_patches_debug_33905 | rasdani/github-patches | git_diff | aws-cloudformation__cfn-lint-2873 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
(AWS::Lambda::Function.FileSystemConfigs.LocalMountPath) `The local mount path must start with /mnt/ and be a valid absolute path.` not being checked
### Is this feature request related to a new rule or cfn-lint capabilities?
New capability
### Describe the feature you'd like to request
The following CFN template doesn't check the path for `LocalMountPath`:
```yaml
LambdaFunction:
Type: AWS::Lambda::Function
Properties:
FileSystemConfigs:
- Arn: <arn>
LocalMountPath: /efs
```
According to the Lambda GUI the path needs to start with `/mnt/`:
```
Local mount path
Only absolute paths are supported.
The local mount path must start with /mnt/ and be a valid absolute path.
```
### Describe the solution you'd like
cfn-lint should give you a notification if the path doesn't start with `/mnt/`:
`The local mount path must start with /mnt/ and be a valid absolute path.`
### Additional context
cfn-lint v0.79.7
### Is this something that you'd be interested in working on?
- [ ] 👋 I may be able to implement this feature request
### Would this feature include a breaking change?
- [ ] ⚠️ This feature might incur a breaking change
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cfnlint/rules/resources/properties/AllowedPattern.py`
Content:
```
1 """
2 Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 SPDX-License-Identifier: MIT-0
4 """
5 import regex as re
6
7 from cfnlint.helpers import RESOURCE_SPECS
8 from cfnlint.rules import CloudFormationLintRule, RuleMatch
9
10
11 class AllowedPattern(CloudFormationLintRule):
12 """Check if properties have a valid value"""
13
14 id = "E3031"
15 shortdesc = "Check if property values adhere to a specific pattern"
16 description = "Check if properties have a valid value in case of a pattern (Regular Expression)"
17 source_url = "https://github.com/awslabs/cfn-python-lint/blob/main/docs/cfn-resource-specification.md#allowedpattern"
18 tags = ["resources", "property", "allowed pattern", "regex"]
19
20 def __init__(self):
21 """Init"""
22 super().__init__()
23 self.config_definition = {
24 "exceptions": {
25 "default": [],
26 "type": "list",
27 "itemtype": "string",
28 }
29 }
30 self.configure()
31
32 def initialize(self, cfn):
33 """Initialize the rule"""
34 for resource_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get(
35 "ResourceTypes"
36 ):
37 self.resource_property_types.append(resource_type_spec)
38 for property_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get(
39 "PropertyTypes"
40 ):
41 self.resource_sub_property_types.append(property_type_spec)
42
43 def check_value(self, value, path, property_name, **kwargs):
44 """Check Value"""
45 matches = []
46
47 # Get the Allowed Pattern Regex
48 value_pattern_regex = kwargs.get("value_specs", {}).get(
49 "AllowedPatternRegex", {}
50 )
51 # Get the "Human Readable" version for the error message. Optional, if not specified,
52 # the RegEx itself is used.
53 value_pattern = kwargs.get("value_specs", {}).get(
54 "AllowedPattern", value_pattern_regex
55 )
56
57 if isinstance(value, (int, float)):
58 value = str(value)
59
60 if isinstance(value, str):
61 if value_pattern_regex:
62 regex = re.compile(value_pattern_regex, re.ASCII)
63
64 # Ignore values with dynamic references. Simple check to prevent false-positives
65 # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html
66 if "{{resolve:" not in value:
67 if not regex.match(value):
68 for exception in self.config.get("exceptions"):
69 exception_regex = re.compile(exception)
70 if exception_regex.match(value):
71 return matches
72 full_path = "/".join(str(x) for x in path)
73
74 message = "{} contains invalid characters (Pattern: {}) at {}"
75 matches.append(
76 RuleMatch(
77 path,
78 message.format(property_name, value_pattern, full_path),
79 )
80 )
81
82 return matches
83
84 def check(self, cfn, properties, value_specs, property_specs, path):
85 """Check itself"""
86 matches = []
87 for p_value, p_path in properties.items_safe(path[:]):
88 for prop in p_value:
89 if prop in value_specs:
90 value = value_specs.get(prop).get("Value", {})
91 if value:
92 value_type = value.get("ValueType", "")
93 property_type = (
94 property_specs.get("Properties").get(prop).get("Type")
95 )
96 value_specs = (
97 RESOURCE_SPECS.get(cfn.regions[0])
98 .get("ValueTypes")
99 .get(value_type, {})
100 )
101 if value_specs == "CACHED":
102 value_specs = (
103 RESOURCE_SPECS.get("us-east-1")
104 .get("ValueTypes")
105 .get(value_type, {})
106 )
107 matches.extend(
108 cfn.check_value(
109 p_value,
110 prop,
111 p_path,
112 check_value=self.check_value,
113 value_specs=value_specs,
114 cfn=cfn,
115 property_type=property_type,
116 property_name=prop,
117 )
118 )
119 return matches
120
121 def match_resource_sub_properties(self, properties, property_type, path, cfn):
122 """Match for sub properties"""
123 matches = []
124
125 specs = (
126 RESOURCE_SPECS.get(cfn.regions[0])
127 .get("PropertyTypes")
128 .get(property_type, {})
129 .get("Properties", {})
130 )
131 property_specs = (
132 RESOURCE_SPECS.get(cfn.regions[0]).get("PropertyTypes").get(property_type)
133 )
134 matches.extend(self.check(cfn, properties, specs, property_specs, path))
135
136 return matches
137
138 def match_resource_properties(self, properties, resource_type, path, cfn):
139 """Check CloudFormation Properties"""
140 matches = []
141
142 specs = (
143 RESOURCE_SPECS.get(cfn.regions[0])
144 .get("ResourceTypes")
145 .get(resource_type, {})
146 .get("Properties", {})
147 )
148 resource_specs = (
149 RESOURCE_SPECS.get(cfn.regions[0]).get("ResourceTypes").get(resource_type)
150 )
151 matches.extend(self.check(cfn, properties, specs, resource_specs, path))
152
153 return matches
154
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/cfnlint/rules/resources/properties/AllowedPattern.py b/src/cfnlint/rules/resources/properties/AllowedPattern.py
--- a/src/cfnlint/rules/resources/properties/AllowedPattern.py
+++ b/src/cfnlint/rules/resources/properties/AllowedPattern.py
@@ -81,18 +81,16 @@
return matches
- def check(self, cfn, properties, value_specs, property_specs, path):
+ def check(self, cfn, properties, property_specs, path):
"""Check itself"""
matches = []
for p_value, p_path in properties.items_safe(path[:]):
for prop in p_value:
- if prop in value_specs:
- value = value_specs.get(prop).get("Value", {})
+ if prop in property_specs:
+ value = property_specs.get(prop).get("Value", {})
if value:
value_type = value.get("ValueType", "")
- property_type = (
- property_specs.get("Properties").get(prop).get("Type")
- )
+ property_type = property_specs.get(prop).get("Type")
value_specs = (
RESOURCE_SPECS.get(cfn.regions[0])
.get("ValueTypes")
@@ -128,10 +126,7 @@
.get(property_type, {})
.get("Properties", {})
)
- property_specs = (
- RESOURCE_SPECS.get(cfn.regions[0]).get("PropertyTypes").get(property_type)
- )
- matches.extend(self.check(cfn, properties, specs, property_specs, path))
+ matches.extend(self.check(cfn, properties, specs, path))
return matches
@@ -145,9 +140,6 @@
.get(resource_type, {})
.get("Properties", {})
)
- resource_specs = (
- RESOURCE_SPECS.get(cfn.regions[0]).get("ResourceTypes").get(resource_type)
- )
- matches.extend(self.check(cfn, properties, specs, resource_specs, path))
+ matches.extend(self.check(cfn, properties, specs, path))
return matches
| {"golden_diff": "diff --git a/src/cfnlint/rules/resources/properties/AllowedPattern.py b/src/cfnlint/rules/resources/properties/AllowedPattern.py\n--- a/src/cfnlint/rules/resources/properties/AllowedPattern.py\n+++ b/src/cfnlint/rules/resources/properties/AllowedPattern.py\n@@ -81,18 +81,16 @@\n \n return matches\n \n- def check(self, cfn, properties, value_specs, property_specs, path):\n+ def check(self, cfn, properties, property_specs, path):\n \"\"\"Check itself\"\"\"\n matches = []\n for p_value, p_path in properties.items_safe(path[:]):\n for prop in p_value:\n- if prop in value_specs:\n- value = value_specs.get(prop).get(\"Value\", {})\n+ if prop in property_specs:\n+ value = property_specs.get(prop).get(\"Value\", {})\n if value:\n value_type = value.get(\"ValueType\", \"\")\n- property_type = (\n- property_specs.get(\"Properties\").get(prop).get(\"Type\")\n- )\n+ property_type = property_specs.get(prop).get(\"Type\")\n value_specs = (\n RESOURCE_SPECS.get(cfn.regions[0])\n .get(\"ValueTypes\")\n@@ -128,10 +126,7 @@\n .get(property_type, {})\n .get(\"Properties\", {})\n )\n- property_specs = (\n- RESOURCE_SPECS.get(cfn.regions[0]).get(\"PropertyTypes\").get(property_type)\n- )\n- matches.extend(self.check(cfn, properties, specs, property_specs, path))\n+ matches.extend(self.check(cfn, properties, specs, path))\n \n return matches\n \n@@ -145,9 +140,6 @@\n .get(resource_type, {})\n .get(\"Properties\", {})\n )\n- resource_specs = (\n- RESOURCE_SPECS.get(cfn.regions[0]).get(\"ResourceTypes\").get(resource_type)\n- )\n- matches.extend(self.check(cfn, properties, specs, resource_specs, path))\n+ matches.extend(self.check(cfn, properties, specs, path))\n \n return matches\n", "issue": "(AWS::Lambda::Function.FileSystemConfigs.LocalMountPath) `The local mount path must start with /mnt/ and be a valid absolute path.` not being checked\n### Is this feature request related to a new rule or cfn-lint capabilities?\n\nNew capability\n\n### Describe the feature you'd like to request\n\nThe following CFN template doesn't check the path for `LocalMountPath`:\r\n\r\n```yaml\r\nLambdaFunction:\r\n Type: AWS::Lambda::Function\r\n Properties:\r\n FileSystemConfigs:\r\n - Arn: <arn>\r\n LocalMountPath: /efs\r\n```\r\n\r\nAccording to the Lambda GUI the path needs to start with `/mnt/`:\r\n\r\n```\r\nLocal mount path\r\nOnly absolute paths are supported.\r\nThe local mount path must start with /mnt/ and be a valid absolute path.\r\n```\n\n### Describe the solution you'd like\n\ncfn-lint should give you a notification if the path doesn't start with `/mnt/`:\r\n\r\n`The local mount path must start with /mnt/ and be a valid absolute path.`\n\n### Additional context\n\ncfn-lint v0.79.7\n\n### Is this something that you'd be interested in working on?\n\n- [ ] \ud83d\udc4b I may be able to implement this feature request\n\n### Would this feature include a breaking change?\n\n- [ ] \u26a0\ufe0f This feature might incur a breaking change\n", "before_files": [{"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\nimport regex as re\n\nfrom cfnlint.helpers import RESOURCE_SPECS\nfrom cfnlint.rules import CloudFormationLintRule, RuleMatch\n\n\nclass AllowedPattern(CloudFormationLintRule):\n \"\"\"Check if properties have a valid value\"\"\"\n\n id = \"E3031\"\n shortdesc = \"Check if property values adhere to a specific pattern\"\n description = \"Check if properties have a valid value in case of a pattern (Regular Expression)\"\n source_url = \"https://github.com/awslabs/cfn-python-lint/blob/main/docs/cfn-resource-specification.md#allowedpattern\"\n tags = [\"resources\", \"property\", \"allowed pattern\", \"regex\"]\n\n def __init__(self):\n \"\"\"Init\"\"\"\n super().__init__()\n self.config_definition = {\n \"exceptions\": {\n \"default\": [],\n \"type\": \"list\",\n \"itemtype\": \"string\",\n }\n }\n self.configure()\n\n def initialize(self, cfn):\n \"\"\"Initialize the rule\"\"\"\n for resource_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get(\n \"ResourceTypes\"\n ):\n self.resource_property_types.append(resource_type_spec)\n for property_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get(\n \"PropertyTypes\"\n ):\n self.resource_sub_property_types.append(property_type_spec)\n\n def check_value(self, value, path, property_name, **kwargs):\n \"\"\"Check Value\"\"\"\n matches = []\n\n # Get the Allowed Pattern Regex\n value_pattern_regex = kwargs.get(\"value_specs\", {}).get(\n \"AllowedPatternRegex\", {}\n )\n # Get the \"Human Readable\" version for the error message. Optional, if not specified,\n # the RegEx itself is used.\n value_pattern = kwargs.get(\"value_specs\", {}).get(\n \"AllowedPattern\", value_pattern_regex\n )\n\n if isinstance(value, (int, float)):\n value = str(value)\n\n if isinstance(value, str):\n if value_pattern_regex:\n regex = re.compile(value_pattern_regex, re.ASCII)\n\n # Ignore values with dynamic references. Simple check to prevent false-positives\n # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html\n if \"{{resolve:\" not in value:\n if not regex.match(value):\n for exception in self.config.get(\"exceptions\"):\n exception_regex = re.compile(exception)\n if exception_regex.match(value):\n return matches\n full_path = \"/\".join(str(x) for x in path)\n\n message = \"{} contains invalid characters (Pattern: {}) at {}\"\n matches.append(\n RuleMatch(\n path,\n message.format(property_name, value_pattern, full_path),\n )\n )\n\n return matches\n\n def check(self, cfn, properties, value_specs, property_specs, path):\n \"\"\"Check itself\"\"\"\n matches = []\n for p_value, p_path in properties.items_safe(path[:]):\n for prop in p_value:\n if prop in value_specs:\n value = value_specs.get(prop).get(\"Value\", {})\n if value:\n value_type = value.get(\"ValueType\", \"\")\n property_type = (\n property_specs.get(\"Properties\").get(prop).get(\"Type\")\n )\n value_specs = (\n RESOURCE_SPECS.get(cfn.regions[0])\n .get(\"ValueTypes\")\n .get(value_type, {})\n )\n if value_specs == \"CACHED\":\n value_specs = (\n RESOURCE_SPECS.get(\"us-east-1\")\n .get(\"ValueTypes\")\n .get(value_type, {})\n )\n matches.extend(\n cfn.check_value(\n p_value,\n prop,\n p_path,\n check_value=self.check_value,\n value_specs=value_specs,\n cfn=cfn,\n property_type=property_type,\n property_name=prop,\n )\n )\n return matches\n\n def match_resource_sub_properties(self, properties, property_type, path, cfn):\n \"\"\"Match for sub properties\"\"\"\n matches = []\n\n specs = (\n RESOURCE_SPECS.get(cfn.regions[0])\n .get(\"PropertyTypes\")\n .get(property_type, {})\n .get(\"Properties\", {})\n )\n property_specs = (\n RESOURCE_SPECS.get(cfn.regions[0]).get(\"PropertyTypes\").get(property_type)\n )\n matches.extend(self.check(cfn, properties, specs, property_specs, path))\n\n return matches\n\n def match_resource_properties(self, properties, resource_type, path, cfn):\n \"\"\"Check CloudFormation Properties\"\"\"\n matches = []\n\n specs = (\n RESOURCE_SPECS.get(cfn.regions[0])\n .get(\"ResourceTypes\")\n .get(resource_type, {})\n .get(\"Properties\", {})\n )\n resource_specs = (\n RESOURCE_SPECS.get(cfn.regions[0]).get(\"ResourceTypes\").get(resource_type)\n )\n matches.extend(self.check(cfn, properties, specs, resource_specs, path))\n\n return matches\n", "path": "src/cfnlint/rules/resources/properties/AllowedPattern.py"}], "after_files": [{"content": "\"\"\"\nCopyright Amazon.com, Inc. or its affiliates. All Rights Reserved.\nSPDX-License-Identifier: MIT-0\n\"\"\"\nimport regex as re\n\nfrom cfnlint.helpers import RESOURCE_SPECS\nfrom cfnlint.rules import CloudFormationLintRule, RuleMatch\n\n\nclass AllowedPattern(CloudFormationLintRule):\n \"\"\"Check if properties have a valid value\"\"\"\n\n id = \"E3031\"\n shortdesc = \"Check if property values adhere to a specific pattern\"\n description = \"Check if properties have a valid value in case of a pattern (Regular Expression)\"\n source_url = \"https://github.com/awslabs/cfn-python-lint/blob/main/docs/cfn-resource-specification.md#allowedpattern\"\n tags = [\"resources\", \"property\", \"allowed pattern\", \"regex\"]\n\n def __init__(self):\n \"\"\"Init\"\"\"\n super().__init__()\n self.config_definition = {\n \"exceptions\": {\n \"default\": [],\n \"type\": \"list\",\n \"itemtype\": \"string\",\n }\n }\n self.configure()\n\n def initialize(self, cfn):\n \"\"\"Initialize the rule\"\"\"\n for resource_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get(\n \"ResourceTypes\"\n ):\n self.resource_property_types.append(resource_type_spec)\n for property_type_spec in RESOURCE_SPECS.get(cfn.regions[0]).get(\n \"PropertyTypes\"\n ):\n self.resource_sub_property_types.append(property_type_spec)\n\n def check_value(self, value, path, property_name, **kwargs):\n \"\"\"Check Value\"\"\"\n matches = []\n\n # Get the Allowed Pattern Regex\n value_pattern_regex = kwargs.get(\"value_specs\", {}).get(\n \"AllowedPatternRegex\", {}\n )\n # Get the \"Human Readable\" version for the error message. Optional, if not specified,\n # the RegEx itself is used.\n value_pattern = kwargs.get(\"value_specs\", {}).get(\n \"AllowedPattern\", value_pattern_regex\n )\n\n if isinstance(value, (int, float)):\n value = str(value)\n\n if isinstance(value, str):\n if value_pattern_regex:\n regex = re.compile(value_pattern_regex, re.ASCII)\n\n # Ignore values with dynamic references. Simple check to prevent false-positives\n # See: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/dynamic-references.html\n if \"{{resolve:\" not in value:\n if not regex.match(value):\n for exception in self.config.get(\"exceptions\"):\n exception_regex = re.compile(exception)\n if exception_regex.match(value):\n return matches\n full_path = \"/\".join(str(x) for x in path)\n\n message = \"{} contains invalid characters (Pattern: {}) at {}\"\n matches.append(\n RuleMatch(\n path,\n message.format(property_name, value_pattern, full_path),\n )\n )\n\n return matches\n\n def check(self, cfn, properties, property_specs, path):\n \"\"\"Check itself\"\"\"\n matches = []\n for p_value, p_path in properties.items_safe(path[:]):\n for prop in p_value:\n if prop in property_specs:\n value = property_specs.get(prop).get(\"Value\", {})\n if value:\n value_type = value.get(\"ValueType\", \"\")\n property_type = property_specs.get(prop).get(\"Type\")\n value_specs = (\n RESOURCE_SPECS.get(cfn.regions[0])\n .get(\"ValueTypes\")\n .get(value_type, {})\n )\n if value_specs == \"CACHED\":\n value_specs = (\n RESOURCE_SPECS.get(\"us-east-1\")\n .get(\"ValueTypes\")\n .get(value_type, {})\n )\n matches.extend(\n cfn.check_value(\n p_value,\n prop,\n p_path,\n check_value=self.check_value,\n value_specs=value_specs,\n cfn=cfn,\n property_type=property_type,\n property_name=prop,\n )\n )\n return matches\n\n def match_resource_sub_properties(self, properties, property_type, path, cfn):\n \"\"\"Match for sub properties\"\"\"\n matches = []\n\n specs = (\n RESOURCE_SPECS.get(cfn.regions[0])\n .get(\"PropertyTypes\")\n .get(property_type, {})\n .get(\"Properties\", {})\n )\n matches.extend(self.check(cfn, properties, specs, path))\n\n return matches\n\n def match_resource_properties(self, properties, resource_type, path, cfn):\n \"\"\"Check CloudFormation Properties\"\"\"\n matches = []\n\n specs = (\n RESOURCE_SPECS.get(cfn.regions[0])\n .get(\"ResourceTypes\")\n .get(resource_type, {})\n .get(\"Properties\", {})\n )\n matches.extend(self.check(cfn, properties, specs, path))\n\n return matches\n", "path": "src/cfnlint/rules/resources/properties/AllowedPattern.py"}]} | 1,996 | 463 |
gh_patches_debug_41144 | rasdani/github-patches | git_diff | streamlink__streamlink-4029 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
artetv: de/fr Livestreams aren't playable anymore
### Checklist
- [X] This is a plugin issue and not a different kind of issue
- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)
- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)
- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)
### Streamlink version
Latest build from the master branch
### Description
Since about a week the live channels aren't playable anymore. However VODs working fine.
### Debug log
```text
streamlink https://www.arte.tv/de/live/ worst -l debug
[cli][debug] OS: Linux-5.14.3-arch1-1-x86_64-with-glibc2.33
[cli][debug] Python: 3.9.7
[cli][debug] Streamlink: 2.4.0+17.g24c59a2
[cli][debug] Requests(2.26.0), Socks(1.7.1), Websocket(0.59.0)
[cli][debug] Arguments:
[cli][debug] url=https://www.arte.tv/de/live/
[cli][debug] stream=['worst']
[cli][debug] --loglevel=debug
[cli][info] Found matching plugin artetv for URL https://www.arte.tv/de/live/
error: No playable streams found on this URL: https://www.arte.tv/de/live/
streamlink https://www.arte.tv/fr/direct/ best -l debug
[cli][debug] OS: Linux-5.14.3-arch1-1-x86_64-with-glibc2.33
[cli][debug] Python: 3.9.7
[cli][debug] Streamlink: 2.4.0+17.g24c59a2
[cli][debug] Requests(2.26.0), Socks(1.7.1), Websocket(0.59.0)
[cli][debug] Arguments:
[cli][debug] url=https://www.arte.tv/fr/direct/
[cli][debug] stream=['best']
[cli][debug] --loglevel=debug
[cli][info] Found matching plugin artetv for URL https://www.arte.tv/fr/direct/
error: No playable streams found on this URL: https://www.arte.tv/fr/direct/
```
plugins.arte: switch to arte.tv v2 API
The Arte.tv V1 API doens't seem to work anymore for live streams (see #4026).
Both web site and mobile app use the V2 API, which requires an authentication token. The one from the website is used here for this fix.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/artetv.py`
Content:
```
1 """Plugin for Arte.tv, bi-lingual art and culture channel."""
2
3 import logging
4 import re
5 from operator import itemgetter
6
7 from streamlink.plugin import Plugin, pluginmatcher
8 from streamlink.plugin.api import validate
9 from streamlink.stream import HLSStream
10
11 log = logging.getLogger(__name__)
12 JSON_VOD_URL = "https://api.arte.tv/api/player/v1/config/{0}/{1}?platform=ARTE_NEXT"
13 JSON_LIVE_URL = "https://api.arte.tv/api/player/v1/livestream/{0}"
14
15 _video_schema = validate.Schema({
16 "videoJsonPlayer": {
17 "VSR": validate.any(
18 [],
19 {
20 validate.text: {
21 "height": int,
22 "mediaType": validate.text,
23 "url": validate.text,
24 "versionProg": int,
25 "versionLibelle": validate.text
26 },
27 },
28 )
29 }
30 })
31
32
33 @pluginmatcher(re.compile(r"""
34 https?://(?:\w+\.)?arte\.tv/(?:guide/)?
35 (?P<language>[a-z]{2})/
36 (?:
37 (?:videos/)?(?P<video_id>(?!RC-|videos)[^/]+?)/.+
38 |
39 (?:direct|live)
40 )
41 """, re.VERBOSE))
42 class ArteTV(Plugin):
43 def _create_stream(self, streams):
44 variant, variantname = min([(stream["versionProg"], stream["versionLibelle"]) for stream in streams.values()],
45 key=itemgetter(0))
46 log.debug(f"Using the '{variantname}' stream variant")
47 for sname, stream in streams.items():
48 if stream["versionProg"] == variant:
49 if stream["mediaType"] == "hls":
50 try:
51 streams = HLSStream.parse_variant_playlist(self.session, stream["url"])
52 yield from streams.items()
53 except OSError as err:
54 log.warning(f"Failed to extract HLS streams for {sname}/{stream['versionLibelle']}: {err}")
55
56 def _get_streams(self):
57 language = self.match.group('language')
58 video_id = self.match.group('video_id')
59 if video_id is None:
60 json_url = JSON_LIVE_URL.format(language)
61 else:
62 json_url = JSON_VOD_URL.format(language, video_id)
63 res = self.session.http.get(json_url)
64 video = self.session.http.json(res, schema=_video_schema)
65
66 if not video["videoJsonPlayer"]["VSR"]:
67 return
68
69 vsr = video["videoJsonPlayer"]["VSR"]
70 return self._create_stream(vsr)
71
72
73 __plugin__ = ArteTV
74
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/streamlink/plugins/artetv.py b/src/streamlink/plugins/artetv.py
--- a/src/streamlink/plugins/artetv.py
+++ b/src/streamlink/plugins/artetv.py
@@ -1,5 +1,3 @@
-"""Plugin for Arte.tv, bi-lingual art and culture channel."""
-
import logging
import re
from operator import itemgetter
@@ -9,25 +7,6 @@
from streamlink.stream import HLSStream
log = logging.getLogger(__name__)
-JSON_VOD_URL = "https://api.arte.tv/api/player/v1/config/{0}/{1}?platform=ARTE_NEXT"
-JSON_LIVE_URL = "https://api.arte.tv/api/player/v1/livestream/{0}"
-
-_video_schema = validate.Schema({
- "videoJsonPlayer": {
- "VSR": validate.any(
- [],
- {
- validate.text: {
- "height": int,
- "mediaType": validate.text,
- "url": validate.text,
- "versionProg": int,
- "versionLibelle": validate.text
- },
- },
- )
- }
-})
@pluginmatcher(re.compile(r"""
@@ -40,34 +19,49 @@
)
""", re.VERBOSE))
class ArteTV(Plugin):
- def _create_stream(self, streams):
- variant, variantname = min([(stream["versionProg"], stream["versionLibelle"]) for stream in streams.values()],
- key=itemgetter(0))
- log.debug(f"Using the '{variantname}' stream variant")
- for sname, stream in streams.items():
- if stream["versionProg"] == variant:
- if stream["mediaType"] == "hls":
- try:
- streams = HLSStream.parse_variant_playlist(self.session, stream["url"])
- yield from streams.items()
- except OSError as err:
- log.warning(f"Failed to extract HLS streams for {sname}/{stream['versionLibelle']}: {err}")
+ API_URL = "https://api.arte.tv/api/player/v2/config/{0}/{1}"
+ API_TOKEN = "MzYyZDYyYmM1Y2Q3ZWRlZWFjMmIyZjZjNTRiMGY4MzY4NzBhOWQ5YjE4MGQ1NGFiODJmOTFlZDQwN2FkOTZjMQ"
def _get_streams(self):
- language = self.match.group('language')
- video_id = self.match.group('video_id')
- if video_id is None:
- json_url = JSON_LIVE_URL.format(language)
- else:
- json_url = JSON_VOD_URL.format(language, video_id)
- res = self.session.http.get(json_url)
- video = self.session.http.json(res, schema=_video_schema)
+ language = self.match.group("language")
+ video_id = self.match.group("video_id")
- if not video["videoJsonPlayer"]["VSR"]:
+ json_url = self.API_URL.format(language, video_id or "LIVE")
+ headers = {
+ "Authorization": f"Bearer {self.API_TOKEN}"
+ }
+ streams, metadata = self.session.http.get(json_url, headers=headers, schema=validate.Schema(
+ validate.parse_json(),
+ {"data": {"attributes": {
+ "streams": validate.any(
+ [],
+ [
+ validate.all(
+ {
+ "url": validate.url(),
+ "slot": int,
+ "protocol": validate.any("HLS", "HLS_NG"),
+ },
+ validate.union_get("slot", "protocol", "url")
+ )
+ ]
+ ),
+ "metadata": {
+ "title": str,
+ "subtitle": validate.any(None, str)
+ }
+ }}},
+ validate.get(("data", "attributes")),
+ validate.union_get("streams", "metadata")
+ ))
+
+ if not streams:
return
- vsr = video["videoJsonPlayer"]["VSR"]
- return self._create_stream(vsr)
+ self.title = f"{metadata['title']} - {metadata['subtitle']}" if metadata["subtitle"] else metadata["title"]
+
+ for slot, protocol, url in sorted(streams, key=itemgetter(0)):
+ return HLSStream.parse_variant_playlist(self.session, url)
__plugin__ = ArteTV
| {"golden_diff": "diff --git a/src/streamlink/plugins/artetv.py b/src/streamlink/plugins/artetv.py\n--- a/src/streamlink/plugins/artetv.py\n+++ b/src/streamlink/plugins/artetv.py\n@@ -1,5 +1,3 @@\n-\"\"\"Plugin for Arte.tv, bi-lingual art and culture channel.\"\"\"\n-\n import logging\n import re\n from operator import itemgetter\n@@ -9,25 +7,6 @@\n from streamlink.stream import HLSStream\n \n log = logging.getLogger(__name__)\n-JSON_VOD_URL = \"https://api.arte.tv/api/player/v1/config/{0}/{1}?platform=ARTE_NEXT\"\n-JSON_LIVE_URL = \"https://api.arte.tv/api/player/v1/livestream/{0}\"\n-\n-_video_schema = validate.Schema({\n- \"videoJsonPlayer\": {\n- \"VSR\": validate.any(\n- [],\n- {\n- validate.text: {\n- \"height\": int,\n- \"mediaType\": validate.text,\n- \"url\": validate.text,\n- \"versionProg\": int,\n- \"versionLibelle\": validate.text\n- },\n- },\n- )\n- }\n-})\n \n \n @pluginmatcher(re.compile(r\"\"\"\n@@ -40,34 +19,49 @@\n )\n \"\"\", re.VERBOSE))\n class ArteTV(Plugin):\n- def _create_stream(self, streams):\n- variant, variantname = min([(stream[\"versionProg\"], stream[\"versionLibelle\"]) for stream in streams.values()],\n- key=itemgetter(0))\n- log.debug(f\"Using the '{variantname}' stream variant\")\n- for sname, stream in streams.items():\n- if stream[\"versionProg\"] == variant:\n- if stream[\"mediaType\"] == \"hls\":\n- try:\n- streams = HLSStream.parse_variant_playlist(self.session, stream[\"url\"])\n- yield from streams.items()\n- except OSError as err:\n- log.warning(f\"Failed to extract HLS streams for {sname}/{stream['versionLibelle']}: {err}\")\n+ API_URL = \"https://api.arte.tv/api/player/v2/config/{0}/{1}\"\n+ API_TOKEN = \"MzYyZDYyYmM1Y2Q3ZWRlZWFjMmIyZjZjNTRiMGY4MzY4NzBhOWQ5YjE4MGQ1NGFiODJmOTFlZDQwN2FkOTZjMQ\"\n \n def _get_streams(self):\n- language = self.match.group('language')\n- video_id = self.match.group('video_id')\n- if video_id is None:\n- json_url = JSON_LIVE_URL.format(language)\n- else:\n- json_url = JSON_VOD_URL.format(language, video_id)\n- res = self.session.http.get(json_url)\n- video = self.session.http.json(res, schema=_video_schema)\n+ language = self.match.group(\"language\")\n+ video_id = self.match.group(\"video_id\")\n \n- if not video[\"videoJsonPlayer\"][\"VSR\"]:\n+ json_url = self.API_URL.format(language, video_id or \"LIVE\")\n+ headers = {\n+ \"Authorization\": f\"Bearer {self.API_TOKEN}\"\n+ }\n+ streams, metadata = self.session.http.get(json_url, headers=headers, schema=validate.Schema(\n+ validate.parse_json(),\n+ {\"data\": {\"attributes\": {\n+ \"streams\": validate.any(\n+ [],\n+ [\n+ validate.all(\n+ {\n+ \"url\": validate.url(),\n+ \"slot\": int,\n+ \"protocol\": validate.any(\"HLS\", \"HLS_NG\"),\n+ },\n+ validate.union_get(\"slot\", \"protocol\", \"url\")\n+ )\n+ ]\n+ ),\n+ \"metadata\": {\n+ \"title\": str,\n+ \"subtitle\": validate.any(None, str)\n+ }\n+ }}},\n+ validate.get((\"data\", \"attributes\")),\n+ validate.union_get(\"streams\", \"metadata\")\n+ ))\n+\n+ if not streams:\n return\n \n- vsr = video[\"videoJsonPlayer\"][\"VSR\"]\n- return self._create_stream(vsr)\n+ self.title = f\"{metadata['title']} - {metadata['subtitle']}\" if metadata[\"subtitle\"] else metadata[\"title\"]\n+\n+ for slot, protocol, url in sorted(streams, key=itemgetter(0)):\n+ return HLSStream.parse_variant_playlist(self.session, url)\n \n \n __plugin__ = ArteTV\n", "issue": "artetv: de/fr Livestreams aren't playable anymore\n### Checklist\n\n- [X] This is a plugin issue and not a different kind of issue\n- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)\n- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)\n- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)\n\n### Streamlink version\n\nLatest build from the master branch\n\n### Description\n\nSince about a week the live channels aren't playable anymore. However VODs working fine.\r\n\n\n### Debug log\n\n```text\nstreamlink https://www.arte.tv/de/live/ worst -l debug\r\n[cli][debug] OS: Linux-5.14.3-arch1-1-x86_64-with-glibc2.33\r\n[cli][debug] Python: 3.9.7\r\n[cli][debug] Streamlink: 2.4.0+17.g24c59a2\r\n[cli][debug] Requests(2.26.0), Socks(1.7.1), Websocket(0.59.0)\r\n[cli][debug] Arguments:\r\n[cli][debug] url=https://www.arte.tv/de/live/\r\n[cli][debug] stream=['worst']\r\n[cli][debug] --loglevel=debug\r\n[cli][info] Found matching plugin artetv for URL https://www.arte.tv/de/live/\r\nerror: No playable streams found on this URL: https://www.arte.tv/de/live/\r\n\r\nstreamlink https://www.arte.tv/fr/direct/ best -l debug\r\n[cli][debug] OS: Linux-5.14.3-arch1-1-x86_64-with-glibc2.33\r\n[cli][debug] Python: 3.9.7\r\n[cli][debug] Streamlink: 2.4.0+17.g24c59a2\r\n[cli][debug] Requests(2.26.0), Socks(1.7.1), Websocket(0.59.0)\r\n[cli][debug] Arguments:\r\n[cli][debug] url=https://www.arte.tv/fr/direct/\r\n[cli][debug] stream=['best']\r\n[cli][debug] --loglevel=debug\r\n[cli][info] Found matching plugin artetv for URL https://www.arte.tv/fr/direct/\r\nerror: No playable streams found on this URL: https://www.arte.tv/fr/direct/\n```\n\nplugins.arte: switch to arte.tv v2 API\nThe Arte.tv V1 API doens't seem to work anymore for live streams (see #4026).\r\n\r\nBoth web site and mobile app use the V2 API, which requires an authentication token. The one from the website is used here for this fix.\n", "before_files": [{"content": "\"\"\"Plugin for Arte.tv, bi-lingual art and culture channel.\"\"\"\n\nimport logging\nimport re\nfrom operator import itemgetter\n\nfrom streamlink.plugin import Plugin, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream\n\nlog = logging.getLogger(__name__)\nJSON_VOD_URL = \"https://api.arte.tv/api/player/v1/config/{0}/{1}?platform=ARTE_NEXT\"\nJSON_LIVE_URL = \"https://api.arte.tv/api/player/v1/livestream/{0}\"\n\n_video_schema = validate.Schema({\n \"videoJsonPlayer\": {\n \"VSR\": validate.any(\n [],\n {\n validate.text: {\n \"height\": int,\n \"mediaType\": validate.text,\n \"url\": validate.text,\n \"versionProg\": int,\n \"versionLibelle\": validate.text\n },\n },\n )\n }\n})\n\n\n@pluginmatcher(re.compile(r\"\"\"\n https?://(?:\\w+\\.)?arte\\.tv/(?:guide/)?\n (?P<language>[a-z]{2})/\n (?:\n (?:videos/)?(?P<video_id>(?!RC-|videos)[^/]+?)/.+\n |\n (?:direct|live)\n )\n\"\"\", re.VERBOSE))\nclass ArteTV(Plugin):\n def _create_stream(self, streams):\n variant, variantname = min([(stream[\"versionProg\"], stream[\"versionLibelle\"]) for stream in streams.values()],\n key=itemgetter(0))\n log.debug(f\"Using the '{variantname}' stream variant\")\n for sname, stream in streams.items():\n if stream[\"versionProg\"] == variant:\n if stream[\"mediaType\"] == \"hls\":\n try:\n streams = HLSStream.parse_variant_playlist(self.session, stream[\"url\"])\n yield from streams.items()\n except OSError as err:\n log.warning(f\"Failed to extract HLS streams for {sname}/{stream['versionLibelle']}: {err}\")\n\n def _get_streams(self):\n language = self.match.group('language')\n video_id = self.match.group('video_id')\n if video_id is None:\n json_url = JSON_LIVE_URL.format(language)\n else:\n json_url = JSON_VOD_URL.format(language, video_id)\n res = self.session.http.get(json_url)\n video = self.session.http.json(res, schema=_video_schema)\n\n if not video[\"videoJsonPlayer\"][\"VSR\"]:\n return\n\n vsr = video[\"videoJsonPlayer\"][\"VSR\"]\n return self._create_stream(vsr)\n\n\n__plugin__ = ArteTV\n", "path": "src/streamlink/plugins/artetv.py"}], "after_files": [{"content": "import logging\nimport re\nfrom operator import itemgetter\n\nfrom streamlink.plugin import Plugin, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream import HLSStream\n\nlog = logging.getLogger(__name__)\n\n\n@pluginmatcher(re.compile(r\"\"\"\n https?://(?:\\w+\\.)?arte\\.tv/(?:guide/)?\n (?P<language>[a-z]{2})/\n (?:\n (?:videos/)?(?P<video_id>(?!RC-|videos)[^/]+?)/.+\n |\n (?:direct|live)\n )\n\"\"\", re.VERBOSE))\nclass ArteTV(Plugin):\n API_URL = \"https://api.arte.tv/api/player/v2/config/{0}/{1}\"\n API_TOKEN = \"MzYyZDYyYmM1Y2Q3ZWRlZWFjMmIyZjZjNTRiMGY4MzY4NzBhOWQ5YjE4MGQ1NGFiODJmOTFlZDQwN2FkOTZjMQ\"\n\n def _get_streams(self):\n language = self.match.group(\"language\")\n video_id = self.match.group(\"video_id\")\n\n json_url = self.API_URL.format(language, video_id or \"LIVE\")\n headers = {\n \"Authorization\": f\"Bearer {self.API_TOKEN}\"\n }\n streams, metadata = self.session.http.get(json_url, headers=headers, schema=validate.Schema(\n validate.parse_json(),\n {\"data\": {\"attributes\": {\n \"streams\": validate.any(\n [],\n [\n validate.all(\n {\n \"url\": validate.url(),\n \"slot\": int,\n \"protocol\": validate.any(\"HLS\", \"HLS_NG\"),\n },\n validate.union_get(\"slot\", \"protocol\", \"url\")\n )\n ]\n ),\n \"metadata\": {\n \"title\": str,\n \"subtitle\": validate.any(None, str)\n }\n }}},\n validate.get((\"data\", \"attributes\")),\n validate.union_get(\"streams\", \"metadata\")\n ))\n\n if not streams:\n return\n\n self.title = f\"{metadata['title']} - {metadata['subtitle']}\" if metadata[\"subtitle\"] else metadata[\"title\"]\n\n for slot, protocol, url in sorted(streams, key=itemgetter(0)):\n return HLSStream.parse_variant_playlist(self.session, url)\n\n\n__plugin__ = ArteTV\n", "path": "src/streamlink/plugins/artetv.py"}]} | 1,646 | 990 |
gh_patches_debug_31351 | rasdani/github-patches | git_diff | iterative__dvc-2646 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
get/import: could not perform a HEAD request
```
DVC version: 0.62.1
Python version: 3.7.3
Platform: Darwin-18.7.0-x86_64-i386-64bit
Binary: False
Cache: reflink - True, hardlink - True, symlink - True
Filesystem type (cache directory): ('apfs', '/dev/disk1s1')
Filesystem type (workspace): ('apfs', '/dev/disk1s1')
```
I'm trying to import a directory versioned in our own [dataset registry](https://github.com/iterative/dataset-registry) project into an empty, non-Git DVC project, but getting this cryptic error:
```console
$ dvc import --rev 0547f58 \
[email protected]:iterative/dataset-registry.git \
use-cases/data
Importing 'use-cases/data ([email protected]:iterative/dataset-registry.git)' -> 'data'
ERROR: failed to import 'use-cases/data' from '[email protected]:iterative/dataset-registry.git'. - unable to find DVC-file with output '../../../../private/var/folders/_c/3mt_xn_d4xl2ddsx2m98h_r40000gn/T/tmphs83czecdvc-repo/use-cases/data'
Having any troubles? Hit us up at https://dvc.org/support, we are always happy to help!
```
The directory in question has file name `b6923e1e4ad16ea1a7e2b328842d56a2.dir ` (See [use-cases/cats-dogs.dvc](https://github.com/iterative/dataset-registry/blob/0547f58/use-cases/cats-dogs.dvc) of that version). And the default remote is [configured[(https://github.com/iterative/dataset-registry/blob/master/.dvc/config) to https://remote.dvc.org/dataset-registry (which is an HTTP redirect to the s3://dvc-public/remote/dataset-registry bucket). ~~The file seems to be in the remote~~
Am I just doing something wrong here (hopefully), or is `dvc import` broken?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dvc/remote/http.py`
Content:
```
1 from __future__ import unicode_literals
2
3 import logging
4 from dvc.scheme import Schemes
5 from dvc.utils.compat import open
6
7 from dvc.progress import Tqdm
8 from dvc.exceptions import DvcException
9 from dvc.config import Config, ConfigError
10 from dvc.remote.base import RemoteBASE
11
12 logger = logging.getLogger(__name__)
13
14
15 class RemoteHTTP(RemoteBASE):
16 scheme = Schemes.HTTP
17 REQUEST_TIMEOUT = 10
18 CHUNK_SIZE = 2 ** 16
19 PARAM_CHECKSUM = "etag"
20
21 def __init__(self, repo, config):
22 super(RemoteHTTP, self).__init__(repo, config)
23
24 url = config.get(Config.SECTION_REMOTE_URL)
25 self.path_info = self.path_cls(url) if url else None
26
27 if not self.no_traverse:
28 raise ConfigError(
29 "HTTP doesn't support traversing the remote to list existing "
30 "files. Use: `dvc remote modify <name> no_traverse true`"
31 )
32
33 def _download(self, from_info, to_file, name=None, no_progress_bar=False):
34 request = self._request("GET", from_info.url, stream=True)
35 with Tqdm(
36 total=None if no_progress_bar else self._content_length(from_info),
37 leave=False,
38 bytes=True,
39 desc=from_info.url if name is None else name,
40 disable=no_progress_bar,
41 ) as pbar:
42 with open(to_file, "wb") as fd:
43 for chunk in request.iter_content(chunk_size=self.CHUNK_SIZE):
44 fd.write(chunk)
45 fd.flush()
46 pbar.update(len(chunk))
47
48 def exists(self, path_info):
49 return bool(self._request("HEAD", path_info.url))
50
51 def _content_length(self, url_or_request):
52 headers = getattr(
53 url_or_request,
54 "headers",
55 self._request("HEAD", url_or_request).headers,
56 )
57 res = headers.get("Content-Length")
58 return int(res) if res else None
59
60 def get_file_checksum(self, path_info):
61 url = path_info.url
62 headers = self._request("HEAD", url).headers
63 etag = headers.get("ETag") or headers.get("Content-MD5")
64
65 if not etag:
66 raise DvcException(
67 "could not find an ETag or "
68 "Content-MD5 header for '{url}'".format(url=url)
69 )
70
71 if etag.startswith("W/"):
72 raise DvcException(
73 "Weak ETags are not supported."
74 " (Etag: '{etag}', URL: '{url}')".format(etag=etag, url=url)
75 )
76
77 return etag
78
79 def _request(self, method, url, **kwargs):
80 import requests
81
82 kwargs.setdefault("allow_redirects", True)
83 kwargs.setdefault("timeout", self.REQUEST_TIMEOUT)
84
85 try:
86 return requests.request(method, url, **kwargs)
87 except requests.exceptions.RequestException:
88 raise DvcException("could not perform a {} request".format(method))
89
90 def gc(self):
91 raise NotImplementedError
92
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dvc/remote/http.py b/dvc/remote/http.py
--- a/dvc/remote/http.py
+++ b/dvc/remote/http.py
@@ -1,9 +1,11 @@
from __future__ import unicode_literals
import logging
+
+from funcy import cached_property
+
from dvc.scheme import Schemes
from dvc.utils.compat import open
-
from dvc.progress import Tqdm
from dvc.exceptions import DvcException
from dvc.config import Config, ConfigError
@@ -14,6 +16,8 @@
class RemoteHTTP(RemoteBASE):
scheme = Schemes.HTTP
+ SESSION_RETRIES = 5
+ SESSION_BACKOFF_FACTOR = 0.1
REQUEST_TIMEOUT = 10
CHUNK_SIZE = 2 ** 16
PARAM_CHECKSUM = "etag"
@@ -76,6 +80,24 @@
return etag
+ @cached_property
+ def _session(self):
+ import requests
+ from requests.adapters import HTTPAdapter
+ from urllib3.util.retry import Retry
+
+ session = requests.Session()
+
+ retries = Retry(
+ total=self.SESSION_RETRIES,
+ backoff_factor=self.SESSION_BACKOFF_FACTOR,
+ )
+
+ session.mount("http://", HTTPAdapter(max_retries=retries))
+ session.mount("https://", HTTPAdapter(max_retries=retries))
+
+ return session
+
def _request(self, method, url, **kwargs):
import requests
@@ -83,7 +105,7 @@
kwargs.setdefault("timeout", self.REQUEST_TIMEOUT)
try:
- return requests.request(method, url, **kwargs)
+ return self._session.request(method, url, **kwargs)
except requests.exceptions.RequestException:
raise DvcException("could not perform a {} request".format(method))
| {"golden_diff": "diff --git a/dvc/remote/http.py b/dvc/remote/http.py\n--- a/dvc/remote/http.py\n+++ b/dvc/remote/http.py\n@@ -1,9 +1,11 @@\n from __future__ import unicode_literals\n \n import logging\n+\n+from funcy import cached_property\n+\n from dvc.scheme import Schemes\n from dvc.utils.compat import open\n-\n from dvc.progress import Tqdm\n from dvc.exceptions import DvcException\n from dvc.config import Config, ConfigError\n@@ -14,6 +16,8 @@\n \n class RemoteHTTP(RemoteBASE):\n scheme = Schemes.HTTP\n+ SESSION_RETRIES = 5\n+ SESSION_BACKOFF_FACTOR = 0.1\n REQUEST_TIMEOUT = 10\n CHUNK_SIZE = 2 ** 16\n PARAM_CHECKSUM = \"etag\"\n@@ -76,6 +80,24 @@\n \n return etag\n \n+ @cached_property\n+ def _session(self):\n+ import requests\n+ from requests.adapters import HTTPAdapter\n+ from urllib3.util.retry import Retry\n+\n+ session = requests.Session()\n+\n+ retries = Retry(\n+ total=self.SESSION_RETRIES,\n+ backoff_factor=self.SESSION_BACKOFF_FACTOR,\n+ )\n+\n+ session.mount(\"http://\", HTTPAdapter(max_retries=retries))\n+ session.mount(\"https://\", HTTPAdapter(max_retries=retries))\n+\n+ return session\n+\n def _request(self, method, url, **kwargs):\n import requests\n \n@@ -83,7 +105,7 @@\n kwargs.setdefault(\"timeout\", self.REQUEST_TIMEOUT)\n \n try:\n- return requests.request(method, url, **kwargs)\n+ return self._session.request(method, url, **kwargs)\n except requests.exceptions.RequestException:\n raise DvcException(\"could not perform a {} request\".format(method))\n", "issue": "get/import: could not perform a HEAD request\n```\r\nDVC version: 0.62.1\r\nPython version: 3.7.3\r\nPlatform: Darwin-18.7.0-x86_64-i386-64bit\r\nBinary: False\r\nCache: reflink - True, hardlink - True, symlink - True\r\nFilesystem type (cache directory): ('apfs', '/dev/disk1s1')\r\nFilesystem type (workspace): ('apfs', '/dev/disk1s1')\r\n```\r\n\r\nI'm trying to import a directory versioned in our own [dataset registry](https://github.com/iterative/dataset-registry) project into an empty, non-Git DVC project, but getting this cryptic error:\r\n\r\n```console\r\n$ dvc import --rev 0547f58 \\ \r\n [email protected]:iterative/dataset-registry.git \\\r\n use-cases/data\r\nImporting 'use-cases/data ([email protected]:iterative/dataset-registry.git)' -> 'data'\r\nERROR: failed to import 'use-cases/data' from '[email protected]:iterative/dataset-registry.git'. - unable to find DVC-file with output '../../../../private/var/folders/_c/3mt_xn_d4xl2ddsx2m98h_r40000gn/T/tmphs83czecdvc-repo/use-cases/data'\r\n\r\nHaving any troubles? Hit us up at https://dvc.org/support, we are always happy to help!\r\n```\r\n\r\nThe directory in question has file name `b6923e1e4ad16ea1a7e2b328842d56a2.dir ` (See [use-cases/cats-dogs.dvc](https://github.com/iterative/dataset-registry/blob/0547f58/use-cases/cats-dogs.dvc) of that version). And the default remote is [configured[(https://github.com/iterative/dataset-registry/blob/master/.dvc/config) to https://remote.dvc.org/dataset-registry (which is an HTTP redirect to the s3://dvc-public/remote/dataset-registry bucket). ~~The file seems to be in the remote~~\r\n\r\nAm I just doing something wrong here (hopefully), or is `dvc import` broken?\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport logging\nfrom dvc.scheme import Schemes\nfrom dvc.utils.compat import open\n\nfrom dvc.progress import Tqdm\nfrom dvc.exceptions import DvcException\nfrom dvc.config import Config, ConfigError\nfrom dvc.remote.base import RemoteBASE\n\nlogger = logging.getLogger(__name__)\n\n\nclass RemoteHTTP(RemoteBASE):\n scheme = Schemes.HTTP\n REQUEST_TIMEOUT = 10\n CHUNK_SIZE = 2 ** 16\n PARAM_CHECKSUM = \"etag\"\n\n def __init__(self, repo, config):\n super(RemoteHTTP, self).__init__(repo, config)\n\n url = config.get(Config.SECTION_REMOTE_URL)\n self.path_info = self.path_cls(url) if url else None\n\n if not self.no_traverse:\n raise ConfigError(\n \"HTTP doesn't support traversing the remote to list existing \"\n \"files. Use: `dvc remote modify <name> no_traverse true`\"\n )\n\n def _download(self, from_info, to_file, name=None, no_progress_bar=False):\n request = self._request(\"GET\", from_info.url, stream=True)\n with Tqdm(\n total=None if no_progress_bar else self._content_length(from_info),\n leave=False,\n bytes=True,\n desc=from_info.url if name is None else name,\n disable=no_progress_bar,\n ) as pbar:\n with open(to_file, \"wb\") as fd:\n for chunk in request.iter_content(chunk_size=self.CHUNK_SIZE):\n fd.write(chunk)\n fd.flush()\n pbar.update(len(chunk))\n\n def exists(self, path_info):\n return bool(self._request(\"HEAD\", path_info.url))\n\n def _content_length(self, url_or_request):\n headers = getattr(\n url_or_request,\n \"headers\",\n self._request(\"HEAD\", url_or_request).headers,\n )\n res = headers.get(\"Content-Length\")\n return int(res) if res else None\n\n def get_file_checksum(self, path_info):\n url = path_info.url\n headers = self._request(\"HEAD\", url).headers\n etag = headers.get(\"ETag\") or headers.get(\"Content-MD5\")\n\n if not etag:\n raise DvcException(\n \"could not find an ETag or \"\n \"Content-MD5 header for '{url}'\".format(url=url)\n )\n\n if etag.startswith(\"W/\"):\n raise DvcException(\n \"Weak ETags are not supported.\"\n \" (Etag: '{etag}', URL: '{url}')\".format(etag=etag, url=url)\n )\n\n return etag\n\n def _request(self, method, url, **kwargs):\n import requests\n\n kwargs.setdefault(\"allow_redirects\", True)\n kwargs.setdefault(\"timeout\", self.REQUEST_TIMEOUT)\n\n try:\n return requests.request(method, url, **kwargs)\n except requests.exceptions.RequestException:\n raise DvcException(\"could not perform a {} request\".format(method))\n\n def gc(self):\n raise NotImplementedError\n", "path": "dvc/remote/http.py"}], "after_files": [{"content": "from __future__ import unicode_literals\n\nimport logging\n\nfrom funcy import cached_property\n\nfrom dvc.scheme import Schemes\nfrom dvc.utils.compat import open\nfrom dvc.progress import Tqdm\nfrom dvc.exceptions import DvcException\nfrom dvc.config import Config, ConfigError\nfrom dvc.remote.base import RemoteBASE\n\nlogger = logging.getLogger(__name__)\n\n\nclass RemoteHTTP(RemoteBASE):\n scheme = Schemes.HTTP\n SESSION_RETRIES = 5\n SESSION_BACKOFF_FACTOR = 0.1\n REQUEST_TIMEOUT = 10\n CHUNK_SIZE = 2 ** 16\n PARAM_CHECKSUM = \"etag\"\n\n def __init__(self, repo, config):\n super(RemoteHTTP, self).__init__(repo, config)\n\n url = config.get(Config.SECTION_REMOTE_URL)\n self.path_info = self.path_cls(url) if url else None\n\n if not self.no_traverse:\n raise ConfigError(\n \"HTTP doesn't support traversing the remote to list existing \"\n \"files. Use: `dvc remote modify <name> no_traverse true`\"\n )\n\n def _download(self, from_info, to_file, name=None, no_progress_bar=False):\n request = self._request(\"GET\", from_info.url, stream=True)\n with Tqdm(\n total=None if no_progress_bar else self._content_length(from_info),\n leave=False,\n bytes=True,\n desc=from_info.url if name is None else name,\n disable=no_progress_bar,\n ) as pbar:\n with open(to_file, \"wb\") as fd:\n for chunk in request.iter_content(chunk_size=self.CHUNK_SIZE):\n fd.write(chunk)\n fd.flush()\n pbar.update(len(chunk))\n\n def exists(self, path_info):\n return bool(self._request(\"HEAD\", path_info.url))\n\n def _content_length(self, url_or_request):\n headers = getattr(\n url_or_request,\n \"headers\",\n self._request(\"HEAD\", url_or_request).headers,\n )\n res = headers.get(\"Content-Length\")\n return int(res) if res else None\n\n def get_file_checksum(self, path_info):\n url = path_info.url\n headers = self._request(\"HEAD\", url).headers\n etag = headers.get(\"ETag\") or headers.get(\"Content-MD5\")\n\n if not etag:\n raise DvcException(\n \"could not find an ETag or \"\n \"Content-MD5 header for '{url}'\".format(url=url)\n )\n\n if etag.startswith(\"W/\"):\n raise DvcException(\n \"Weak ETags are not supported.\"\n \" (Etag: '{etag}', URL: '{url}')\".format(etag=etag, url=url)\n )\n\n return etag\n\n @cached_property\n def _session(self):\n import requests\n from requests.adapters import HTTPAdapter\n from urllib3.util.retry import Retry\n\n session = requests.Session()\n\n retries = Retry(\n total=self.SESSION_RETRIES,\n backoff_factor=self.SESSION_BACKOFF_FACTOR,\n )\n\n session.mount(\"http://\", HTTPAdapter(max_retries=retries))\n session.mount(\"https://\", HTTPAdapter(max_retries=retries))\n\n return session\n\n def _request(self, method, url, **kwargs):\n import requests\n\n kwargs.setdefault(\"allow_redirects\", True)\n kwargs.setdefault(\"timeout\", self.REQUEST_TIMEOUT)\n\n try:\n return self._session.request(method, url, **kwargs)\n except requests.exceptions.RequestException:\n raise DvcException(\"could not perform a {} request\".format(method))\n\n def gc(self):\n raise NotImplementedError\n", "path": "dvc/remote/http.py"}]} | 1,618 | 417 |
gh_patches_debug_17448 | rasdani/github-patches | git_diff | akvo__akvo-rsr-2064 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Block Google from Indexing our Development Environments
## Test plan
GIVEN all dev environments (Test / UAT)
WHEN looking at the _head_ tag
THEN a _meta name="robots" content="none"_ node should be added
GIVEN the live environment
WHEN looking at the _head_ tag
THEN a _meta name="robots" content="none"_ node should not be added
## Issue description
We should add a robots.txt to all NON LIVE machines that prevents Google from indexing the site and displaying the content in search results.
This looks to be pretty simple: https://support.google.com/webmasters/answer/156449?rd=1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `akvo/rsr/context_processors.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 Akvo RSR is covered by the GNU Affero General Public License.
4
5 See more details in the license.txt file located at the root folder of the
6 Akvo RSR module. For additional details on the GNU license please see
7 < http://www.gnu.org/licenses/agpl.html >.
8 """
9
10 import django
11
12 from django.conf import settings
13 from django.core.exceptions import DisallowedHost
14 from django.contrib.sites.models import get_current_site
15
16
17 def extra_context(request, protocol="http"):
18 """Add information to the request context."""
19 try:
20 current_site = get_current_site(request)
21 except DisallowedHost:
22 current_site = None
23
24 django_version = django.get_version()
25 deploy_tag = getattr(settings, 'DEPLOY_TAG', 'Unknown')
26 deploy_branch = getattr(settings, 'DEPLOY_BRANCH', 'Unknown')
27 deploy_commit_id = getattr(settings, 'DEPLOY_COMMIT_ID', 'Unknown')
28 deploy_commit_full_id = getattr(settings, 'DEPLOY_COMMIT_FULL_ID', 'Unknown')
29
30 return dict(
31 current_site=current_site,
32 django_version=django_version,
33 deploy_tag=deploy_tag,
34 deploy_branch=deploy_branch,
35 deploy_commit_id=deploy_commit_id,
36 deploy_commit_full_id=deploy_commit_full_id
37 )
38
39
40 def get_current_path_without_lang(request):
41 """Return current path without lang."""
42 path = request.get_full_path()
43 path_bits = path.split('/')
44 path = '/'.join(path_bits[2:])
45 return {'current_path_without_lang': path}
46
47
48 def extra_pages_context(request):
49 """Add context information of an RSR Page."""
50 if request.rsr_page:
51 page = request.rsr_page
52 return {
53 'rsr_page': page,
54 'favicon': page.favicon,
55 'logo': page.logo,
56 'organisation': page.organisation,
57 'return_url': page.return_url,
58 'return_url_text': page.custom_return_url_text,
59 'stylesheet': page.stylesheet,
60 'akvoapp_root_url': '//{}'.format(settings.AKVOAPP_DOMAIN),
61 'domain_url': '//{}'.format(settings.RSR_DOMAIN),
62 'no_facebook': not page.facebook_button,
63 'facebook_app_id': page.facebook_app_id,
64 'no_twitter': not page.twitter_button,
65 }
66
67 return {}
68
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/akvo/rsr/context_processors.py b/akvo/rsr/context_processors.py
--- a/akvo/rsr/context_processors.py
+++ b/akvo/rsr/context_processors.py
@@ -22,6 +22,7 @@
current_site = None
django_version = django.get_version()
+ debug = getattr(settings, 'DEBUG', False)
deploy_tag = getattr(settings, 'DEPLOY_TAG', 'Unknown')
deploy_branch = getattr(settings, 'DEPLOY_BRANCH', 'Unknown')
deploy_commit_id = getattr(settings, 'DEPLOY_COMMIT_ID', 'Unknown')
@@ -30,6 +31,7 @@
return dict(
current_site=current_site,
django_version=django_version,
+ debug=debug,
deploy_tag=deploy_tag,
deploy_branch=deploy_branch,
deploy_commit_id=deploy_commit_id,
| {"golden_diff": "diff --git a/akvo/rsr/context_processors.py b/akvo/rsr/context_processors.py\n--- a/akvo/rsr/context_processors.py\n+++ b/akvo/rsr/context_processors.py\n@@ -22,6 +22,7 @@\n current_site = None\n \n django_version = django.get_version()\n+ debug = getattr(settings, 'DEBUG', False)\n deploy_tag = getattr(settings, 'DEPLOY_TAG', 'Unknown')\n deploy_branch = getattr(settings, 'DEPLOY_BRANCH', 'Unknown')\n deploy_commit_id = getattr(settings, 'DEPLOY_COMMIT_ID', 'Unknown')\n@@ -30,6 +31,7 @@\n return dict(\n current_site=current_site,\n django_version=django_version,\n+ debug=debug,\n deploy_tag=deploy_tag,\n deploy_branch=deploy_branch,\n deploy_commit_id=deploy_commit_id,\n", "issue": "Block Google from Indexing our Development Environments\n## Test plan\n\nGIVEN all dev environments (Test / UAT)\nWHEN looking at the _head_ tag\nTHEN a _meta name=\"robots\" content=\"none\"_ node should be added\n\nGIVEN the live environment\nWHEN looking at the _head_ tag\nTHEN a _meta name=\"robots\" content=\"none\"_ node should not be added\n## Issue description\n\nWe should add a robots.txt to all NON LIVE machines that prevents Google from indexing the site and displaying the content in search results.\n\nThis looks to be pretty simple: https://support.google.com/webmasters/answer/156449?rd=1\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nAkvo RSR is covered by the GNU Affero General Public License.\n\nSee more details in the license.txt file located at the root folder of the\nAkvo RSR module. For additional details on the GNU license please see\n< http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nimport django\n\nfrom django.conf import settings\nfrom django.core.exceptions import DisallowedHost\nfrom django.contrib.sites.models import get_current_site\n\n\ndef extra_context(request, protocol=\"http\"):\n \"\"\"Add information to the request context.\"\"\"\n try:\n current_site = get_current_site(request)\n except DisallowedHost:\n current_site = None\n\n django_version = django.get_version()\n deploy_tag = getattr(settings, 'DEPLOY_TAG', 'Unknown')\n deploy_branch = getattr(settings, 'DEPLOY_BRANCH', 'Unknown')\n deploy_commit_id = getattr(settings, 'DEPLOY_COMMIT_ID', 'Unknown')\n deploy_commit_full_id = getattr(settings, 'DEPLOY_COMMIT_FULL_ID', 'Unknown')\n\n return dict(\n current_site=current_site,\n django_version=django_version,\n deploy_tag=deploy_tag,\n deploy_branch=deploy_branch,\n deploy_commit_id=deploy_commit_id,\n deploy_commit_full_id=deploy_commit_full_id\n )\n\n\ndef get_current_path_without_lang(request):\n \"\"\"Return current path without lang.\"\"\"\n path = request.get_full_path()\n path_bits = path.split('/')\n path = '/'.join(path_bits[2:])\n return {'current_path_without_lang': path}\n\n\ndef extra_pages_context(request):\n \"\"\"Add context information of an RSR Page.\"\"\"\n if request.rsr_page:\n page = request.rsr_page\n return {\n 'rsr_page': page,\n 'favicon': page.favicon,\n 'logo': page.logo,\n 'organisation': page.organisation,\n 'return_url': page.return_url,\n 'return_url_text': page.custom_return_url_text,\n 'stylesheet': page.stylesheet,\n 'akvoapp_root_url': '//{}'.format(settings.AKVOAPP_DOMAIN),\n 'domain_url': '//{}'.format(settings.RSR_DOMAIN),\n 'no_facebook': not page.facebook_button,\n 'facebook_app_id': page.facebook_app_id,\n 'no_twitter': not page.twitter_button,\n }\n\n return {}\n", "path": "akvo/rsr/context_processors.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nAkvo RSR is covered by the GNU Affero General Public License.\n\nSee more details in the license.txt file located at the root folder of the\nAkvo RSR module. For additional details on the GNU license please see\n< http://www.gnu.org/licenses/agpl.html >.\n\"\"\"\n\nimport django\n\nfrom django.conf import settings\nfrom django.core.exceptions import DisallowedHost\nfrom django.contrib.sites.models import get_current_site\n\n\ndef extra_context(request, protocol=\"http\"):\n \"\"\"Add information to the request context.\"\"\"\n try:\n current_site = get_current_site(request)\n except DisallowedHost:\n current_site = None\n\n django_version = django.get_version()\n debug = getattr(settings, 'DEBUG', False)\n deploy_tag = getattr(settings, 'DEPLOY_TAG', 'Unknown')\n deploy_branch = getattr(settings, 'DEPLOY_BRANCH', 'Unknown')\n deploy_commit_id = getattr(settings, 'DEPLOY_COMMIT_ID', 'Unknown')\n deploy_commit_full_id = getattr(settings, 'DEPLOY_COMMIT_FULL_ID', 'Unknown')\n\n return dict(\n current_site=current_site,\n django_version=django_version,\n debug=debug,\n deploy_tag=deploy_tag,\n deploy_branch=deploy_branch,\n deploy_commit_id=deploy_commit_id,\n deploy_commit_full_id=deploy_commit_full_id\n )\n\n\ndef get_current_path_without_lang(request):\n \"\"\"Return current path without lang.\"\"\"\n path = request.get_full_path()\n path_bits = path.split('/')\n path = '/'.join(path_bits[2:])\n return {'current_path_without_lang': path}\n\n\ndef extra_pages_context(request):\n \"\"\"Add context information of an RSR Page.\"\"\"\n if request.rsr_page:\n page = request.rsr_page\n return {\n 'rsr_page': page,\n 'favicon': page.favicon,\n 'logo': page.logo,\n 'organisation': page.organisation,\n 'return_url': page.return_url,\n 'return_url_text': page.custom_return_url_text,\n 'stylesheet': page.stylesheet,\n 'akvoapp_root_url': '//{}'.format(settings.AKVOAPP_DOMAIN),\n 'domain_url': '//{}'.format(settings.RSR_DOMAIN),\n 'no_facebook': not page.facebook_button,\n 'facebook_app_id': page.facebook_app_id,\n 'no_twitter': not page.twitter_button,\n }\n\n return {}\n", "path": "akvo/rsr/context_processors.py"}]} | 1,024 | 187 |
gh_patches_debug_13551 | rasdani/github-patches | git_diff | vyperlang__vyper-3287 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`FunctionNodeVisitor` visits twice `sqrt` body
### Version Information
* vyper Version (output of `vyper --version`): 0.3.8+commit.d76c6ed2
* OS: OSX
* Python Version (output of `python --version`): 3.8.0
### What's your issue about?
The `FunctionNodeVisitor` seems to visit twice the body of `sqrt` builtin, the first time is in the `__init__` function of the `FunctionNodeVisitor` and the second time after its creation using a `for` loop over its body.
https://github.com/vyperlang/vyper/blob/187ab0eec8efbe19ed5046e4e947249e9d43141c/vyper/builtins/_utils.py#L28-L30
https://github.com/vyperlang/vyper/blob/187ab0eec8efbe19ed5046e4e947249e9d43141c/vyper/semantics/analysis/local.py#L178-L179
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `vyper/builtins/_utils.py`
Content:
```
1 from vyper.ast import parse_to_ast
2 from vyper.codegen.context import Context
3 from vyper.codegen.global_context import GlobalContext
4 from vyper.codegen.stmt import parse_body
5 from vyper.semantics.analysis.local import FunctionNodeVisitor
6 from vyper.semantics.namespace import Namespace, override_global_namespace
7 from vyper.semantics.types.function import ContractFunctionT, FunctionVisibility, StateMutability
8
9
10 def _strip_source_pos(ir_node):
11 ir_node.source_pos = None
12 for x in ir_node.args:
13 _strip_source_pos(x)
14
15
16 def generate_inline_function(code, variables, variables_2, memory_allocator):
17 ast_code = parse_to_ast(code, add_fn_node="dummy_fn")
18 # Annotate the AST with a temporary old (i.e. typecheck) namespace
19 namespace = Namespace()
20 namespace.update(variables_2)
21 with override_global_namespace(namespace):
22 # Initialise a placeholder `FunctionDef` AST node and corresponding
23 # `ContractFunctionT` type to rely on the annotation visitors in semantics
24 # module.
25 ast_code.body[0]._metadata["type"] = ContractFunctionT(
26 "sqrt_builtin", {}, 0, 0, None, FunctionVisibility.INTERNAL, StateMutability.NONPAYABLE
27 )
28 sv = FunctionNodeVisitor(ast_code, ast_code.body[0], namespace)
29 for n in ast_code.body[0].body:
30 sv.visit(n)
31
32 new_context = Context(
33 vars_=variables, global_ctx=GlobalContext(), memory_allocator=memory_allocator
34 )
35 generated_ir = parse_body(ast_code.body[0].body, new_context)
36 # strip source position info from the generated_ir since
37 # it doesn't make any sense (e.g. the line numbers will start from 0
38 # instead of where we are in the code)
39 # NOTE if we ever use this for inlining user-code, it would make
40 # sense to fix the offsets of the source positions in the generated
41 # code instead of stripping them.
42 _strip_source_pos(generated_ir)
43 return new_context, generated_ir
44
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/vyper/builtins/_utils.py b/vyper/builtins/_utils.py
--- a/vyper/builtins/_utils.py
+++ b/vyper/builtins/_utils.py
@@ -25,9 +25,9 @@
ast_code.body[0]._metadata["type"] = ContractFunctionT(
"sqrt_builtin", {}, 0, 0, None, FunctionVisibility.INTERNAL, StateMutability.NONPAYABLE
)
- sv = FunctionNodeVisitor(ast_code, ast_code.body[0], namespace)
- for n in ast_code.body[0].body:
- sv.visit(n)
+ # The FunctionNodeVisitor's constructor performs semantic checks
+ # annotate the AST as side effects.
+ FunctionNodeVisitor(ast_code, ast_code.body[0], namespace)
new_context = Context(
vars_=variables, global_ctx=GlobalContext(), memory_allocator=memory_allocator
| {"golden_diff": "diff --git a/vyper/builtins/_utils.py b/vyper/builtins/_utils.py\n--- a/vyper/builtins/_utils.py\n+++ b/vyper/builtins/_utils.py\n@@ -25,9 +25,9 @@\n ast_code.body[0]._metadata[\"type\"] = ContractFunctionT(\n \"sqrt_builtin\", {}, 0, 0, None, FunctionVisibility.INTERNAL, StateMutability.NONPAYABLE\n )\n- sv = FunctionNodeVisitor(ast_code, ast_code.body[0], namespace)\n- for n in ast_code.body[0].body:\n- sv.visit(n)\n+ # The FunctionNodeVisitor's constructor performs semantic checks\n+ # annotate the AST as side effects.\n+ FunctionNodeVisitor(ast_code, ast_code.body[0], namespace)\n \n new_context = Context(\n vars_=variables, global_ctx=GlobalContext(), memory_allocator=memory_allocator\n", "issue": "`FunctionNodeVisitor` visits twice `sqrt` body\n### Version Information\r\n\r\n* vyper Version (output of `vyper --version`): 0.3.8+commit.d76c6ed2\r\n* OS: OSX\r\n* Python Version (output of `python --version`): 3.8.0\r\n\r\n### What's your issue about?\r\n\r\nThe `FunctionNodeVisitor` seems to visit twice the body of `sqrt` builtin, the first time is in the `__init__` function of the `FunctionNodeVisitor` and the second time after its creation using a `for` loop over its body.\r\nhttps://github.com/vyperlang/vyper/blob/187ab0eec8efbe19ed5046e4e947249e9d43141c/vyper/builtins/_utils.py#L28-L30\r\n\r\nhttps://github.com/vyperlang/vyper/blob/187ab0eec8efbe19ed5046e4e947249e9d43141c/vyper/semantics/analysis/local.py#L178-L179\r\n\n", "before_files": [{"content": "from vyper.ast import parse_to_ast\nfrom vyper.codegen.context import Context\nfrom vyper.codegen.global_context import GlobalContext\nfrom vyper.codegen.stmt import parse_body\nfrom vyper.semantics.analysis.local import FunctionNodeVisitor\nfrom vyper.semantics.namespace import Namespace, override_global_namespace\nfrom vyper.semantics.types.function import ContractFunctionT, FunctionVisibility, StateMutability\n\n\ndef _strip_source_pos(ir_node):\n ir_node.source_pos = None\n for x in ir_node.args:\n _strip_source_pos(x)\n\n\ndef generate_inline_function(code, variables, variables_2, memory_allocator):\n ast_code = parse_to_ast(code, add_fn_node=\"dummy_fn\")\n # Annotate the AST with a temporary old (i.e. typecheck) namespace\n namespace = Namespace()\n namespace.update(variables_2)\n with override_global_namespace(namespace):\n # Initialise a placeholder `FunctionDef` AST node and corresponding\n # `ContractFunctionT` type to rely on the annotation visitors in semantics\n # module.\n ast_code.body[0]._metadata[\"type\"] = ContractFunctionT(\n \"sqrt_builtin\", {}, 0, 0, None, FunctionVisibility.INTERNAL, StateMutability.NONPAYABLE\n )\n sv = FunctionNodeVisitor(ast_code, ast_code.body[0], namespace)\n for n in ast_code.body[0].body:\n sv.visit(n)\n\n new_context = Context(\n vars_=variables, global_ctx=GlobalContext(), memory_allocator=memory_allocator\n )\n generated_ir = parse_body(ast_code.body[0].body, new_context)\n # strip source position info from the generated_ir since\n # it doesn't make any sense (e.g. the line numbers will start from 0\n # instead of where we are in the code)\n # NOTE if we ever use this for inlining user-code, it would make\n # sense to fix the offsets of the source positions in the generated\n # code instead of stripping them.\n _strip_source_pos(generated_ir)\n return new_context, generated_ir\n", "path": "vyper/builtins/_utils.py"}], "after_files": [{"content": "from vyper.ast import parse_to_ast\nfrom vyper.codegen.context import Context\nfrom vyper.codegen.global_context import GlobalContext\nfrom vyper.codegen.stmt import parse_body\nfrom vyper.semantics.analysis.local import FunctionNodeVisitor\nfrom vyper.semantics.namespace import Namespace, override_global_namespace\nfrom vyper.semantics.types.function import ContractFunctionT, FunctionVisibility, StateMutability\n\n\ndef _strip_source_pos(ir_node):\n ir_node.source_pos = None\n for x in ir_node.args:\n _strip_source_pos(x)\n\n\ndef generate_inline_function(code, variables, variables_2, memory_allocator):\n ast_code = parse_to_ast(code, add_fn_node=\"dummy_fn\")\n # Annotate the AST with a temporary old (i.e. typecheck) namespace\n namespace = Namespace()\n namespace.update(variables_2)\n with override_global_namespace(namespace):\n # Initialise a placeholder `FunctionDef` AST node and corresponding\n # `ContractFunctionT` type to rely on the annotation visitors in semantics\n # module.\n ast_code.body[0]._metadata[\"type\"] = ContractFunctionT(\n \"sqrt_builtin\", {}, 0, 0, None, FunctionVisibility.INTERNAL, StateMutability.NONPAYABLE\n )\n # The FunctionNodeVisitor's constructor performs semantic checks\n # annotate the AST as side effects.\n FunctionNodeVisitor(ast_code, ast_code.body[0], namespace)\n\n new_context = Context(\n vars_=variables, global_ctx=GlobalContext(), memory_allocator=memory_allocator\n )\n generated_ir = parse_body(ast_code.body[0].body, new_context)\n # strip source position info from the generated_ir since\n # it doesn't make any sense (e.g. the line numbers will start from 0\n # instead of where we are in the code)\n # NOTE if we ever use this for inlining user-code, it would make\n # sense to fix the offsets of the source positions in the generated\n # code instead of stripping them.\n _strip_source_pos(generated_ir)\n return new_context, generated_ir\n", "path": "vyper/builtins/_utils.py"}]} | 1,047 | 200 |
gh_patches_debug_22916 | rasdani/github-patches | git_diff | quantumlib__Cirq-1018 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add `_phase_by_` magic method to `ControlledGate`
Comes after https://github.com/quantumlib/Cirq/issues/947
The logic is as follows: if the qubit index is 0 (the control), the operation is returned unchanged. If it is larger then we delegate to phasing the sub gate with `cirq.phase_by` and a default result of NotImplemented. If it's NotImplemented, we return NotImplemented. Otherwise we return a controlled gate with the phased sub gate.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cirq/ops/controlled_gate.py`
Content:
```
1 # Copyright 2018 The Cirq Developers
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from typing import Union, Sequence, Any
16
17 import numpy as np
18
19 from cirq import linalg, protocols
20 from cirq.ops import raw_types
21 from cirq.type_workarounds import NotImplementedType
22
23
24 class ControlledGate(raw_types.Gate):
25 """Augments existing gates with a control qubit."""
26
27 def __init__(self, sub_gate: raw_types.Gate) -> None:
28 """Initializes the controlled gate.
29
30 Args:
31 sub_gate: The gate to add a control qubit to.
32 default_extensions: The extensions method that should be used when
33 determining if the controlled gate supports certain gate
34 features. For example, if this extensions instance is able to
35 cast sub_gate to a ExtrapolatableEffect then the controlled gate
36 can also be cast to a ExtrapolatableEffect. When this value is
37 None, an empty extensions instance is used instead.
38 """
39 self.sub_gate = sub_gate
40
41 def validate_args(self, qubits) -> None:
42 if len(qubits) < 1:
43 raise ValueError('No control qubit specified.')
44 self.sub_gate.validate_args(qubits[1:])
45
46 def __eq__(self, other):
47 if not isinstance(other, type(self)):
48 return NotImplemented
49 return self.sub_gate == other.sub_gate
50
51 def __ne__(self, other):
52 return not self == other
53
54 def __hash__(self):
55 return hash((ControlledGate, self.sub_gate))
56
57 def _apply_unitary_to_tensor_(self,
58 target_tensor: np.ndarray,
59 available_buffer: np.ndarray,
60 axes: Sequence[int],
61 ) -> np.ndarray:
62 control = axes[0]
63 rest = axes[1:]
64 active = linalg.slice_for_qubits_equal_to([control], 1)
65 sub_axes = [r - int(r > control) for r in rest]
66 target_view = target_tensor[active]
67 buffer_view = available_buffer[active]
68 result = protocols.apply_unitary_to_tensor(
69 self.sub_gate,
70 target_view,
71 buffer_view,
72 sub_axes,
73 default=NotImplemented)
74
75 if result is NotImplemented:
76 return NotImplemented
77
78 if result is target_view:
79 return target_tensor
80
81 if result is buffer_view:
82 inactive = linalg.slice_for_qubits_equal_to([control], 0)
83 available_buffer[inactive] = target_tensor[inactive]
84 return available_buffer
85
86 # HACK: assume they didn't somehow escape the slice view and edit the
87 # rest of target_tensor.
88 target_tensor[active] = result
89 return target_tensor
90
91 def _unitary_(self) -> Union[np.ndarray, NotImplementedType]:
92 sub_matrix = protocols.unitary(self.sub_gate, None)
93 if sub_matrix is None:
94 return NotImplemented
95 return linalg.block_diag(np.eye(sub_matrix.shape[0]), sub_matrix)
96
97 def __pow__(self, exponent: Any) -> 'ControlledGate':
98 new_sub_gate = protocols.pow(self.sub_gate,
99 exponent,
100 NotImplemented)
101 if new_sub_gate is NotImplemented:
102 return NotImplemented
103 return ControlledGate(new_sub_gate)
104
105 def _is_parameterized_(self):
106 return protocols.is_parameterized(self.sub_gate)
107
108 def _resolve_parameters_(self, param_resolver):
109 new_sub_gate = protocols.resolve_parameters(self.sub_gate,
110 param_resolver)
111 return ControlledGate(new_sub_gate)
112
113 def _trace_distance_bound_(self):
114 return protocols.trace_distance_bound(self.sub_gate)
115
116 def _circuit_diagram_info_(self,
117 args: protocols.CircuitDiagramInfoArgs
118 ) -> protocols.CircuitDiagramInfo:
119 sub_info = protocols.circuit_diagram_info(self.sub_gate, args, None)
120 if sub_info is None:
121 return NotImplemented
122 return protocols.CircuitDiagramInfo(
123 wire_symbols=('@',) + sub_info.wire_symbols,
124 exponent=sub_info.exponent)
125
126 def __str__(self):
127 return 'C' + str(self.sub_gate)
128
129 def __repr__(self):
130 return 'cirq.ControlledGate(sub_gate={!r})'.format(self.sub_gate)
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/cirq/ops/controlled_gate.py b/cirq/ops/controlled_gate.py
--- a/cirq/ops/controlled_gate.py
+++ b/cirq/ops/controlled_gate.py
@@ -29,12 +29,6 @@
Args:
sub_gate: The gate to add a control qubit to.
- default_extensions: The extensions method that should be used when
- determining if the controlled gate supports certain gate
- features. For example, if this extensions instance is able to
- cast sub_gate to a ExtrapolatableEffect then the controlled gate
- can also be cast to a ExtrapolatableEffect. When this value is
- None, an empty extensions instance is used instead.
"""
self.sub_gate = sub_gate
@@ -102,6 +96,15 @@
return NotImplemented
return ControlledGate(new_sub_gate)
+ def _phase_by_(self, phase_turns: float, qubit_index: int):
+ if qubit_index == 0:
+ return self
+ phased_gate = protocols.phase_by(
+ self.sub_gate, phase_turns, qubit_index-1, None)
+ if phased_gate is None:
+ return NotImplemented
+ return ControlledGate(phased_gate)
+
def _is_parameterized_(self):
return protocols.is_parameterized(self.sub_gate)
| {"golden_diff": "diff --git a/cirq/ops/controlled_gate.py b/cirq/ops/controlled_gate.py\n--- a/cirq/ops/controlled_gate.py\n+++ b/cirq/ops/controlled_gate.py\n@@ -29,12 +29,6 @@\n \n Args:\n sub_gate: The gate to add a control qubit to.\n- default_extensions: The extensions method that should be used when\n- determining if the controlled gate supports certain gate\n- features. For example, if this extensions instance is able to\n- cast sub_gate to a ExtrapolatableEffect then the controlled gate\n- can also be cast to a ExtrapolatableEffect. When this value is\n- None, an empty extensions instance is used instead.\n \"\"\"\n self.sub_gate = sub_gate\n \n@@ -102,6 +96,15 @@\n return NotImplemented\n return ControlledGate(new_sub_gate)\n \n+ def _phase_by_(self, phase_turns: float, qubit_index: int):\n+ if qubit_index == 0:\n+ return self\n+ phased_gate = protocols.phase_by(\n+ self.sub_gate, phase_turns, qubit_index-1, None)\n+ if phased_gate is None:\n+ return NotImplemented\n+ return ControlledGate(phased_gate)\n+\n def _is_parameterized_(self):\n return protocols.is_parameterized(self.sub_gate)\n", "issue": "Add `_phase_by_` magic method to `ControlledGate`\nComes after https://github.com/quantumlib/Cirq/issues/947\r\n\r\nThe logic is as follows: if the qubit index is 0 (the control), the operation is returned unchanged. If it is larger then we delegate to phasing the sub gate with `cirq.phase_by` and a default result of NotImplemented. If it's NotImplemented, we return NotImplemented. Otherwise we return a controlled gate with the phased sub gate.\n", "before_files": [{"content": "# Copyright 2018 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Union, Sequence, Any\n\nimport numpy as np\n\nfrom cirq import linalg, protocols\nfrom cirq.ops import raw_types\nfrom cirq.type_workarounds import NotImplementedType\n\n\nclass ControlledGate(raw_types.Gate):\n \"\"\"Augments existing gates with a control qubit.\"\"\"\n\n def __init__(self, sub_gate: raw_types.Gate) -> None:\n \"\"\"Initializes the controlled gate.\n\n Args:\n sub_gate: The gate to add a control qubit to.\n default_extensions: The extensions method that should be used when\n determining if the controlled gate supports certain gate\n features. For example, if this extensions instance is able to\n cast sub_gate to a ExtrapolatableEffect then the controlled gate\n can also be cast to a ExtrapolatableEffect. When this value is\n None, an empty extensions instance is used instead.\n \"\"\"\n self.sub_gate = sub_gate\n\n def validate_args(self, qubits) -> None:\n if len(qubits) < 1:\n raise ValueError('No control qubit specified.')\n self.sub_gate.validate_args(qubits[1:])\n\n def __eq__(self, other):\n if not isinstance(other, type(self)):\n return NotImplemented\n return self.sub_gate == other.sub_gate\n\n def __ne__(self, other):\n return not self == other\n\n def __hash__(self):\n return hash((ControlledGate, self.sub_gate))\n\n def _apply_unitary_to_tensor_(self,\n target_tensor: np.ndarray,\n available_buffer: np.ndarray,\n axes: Sequence[int],\n ) -> np.ndarray:\n control = axes[0]\n rest = axes[1:]\n active = linalg.slice_for_qubits_equal_to([control], 1)\n sub_axes = [r - int(r > control) for r in rest]\n target_view = target_tensor[active]\n buffer_view = available_buffer[active]\n result = protocols.apply_unitary_to_tensor(\n self.sub_gate,\n target_view,\n buffer_view,\n sub_axes,\n default=NotImplemented)\n\n if result is NotImplemented:\n return NotImplemented\n\n if result is target_view:\n return target_tensor\n\n if result is buffer_view:\n inactive = linalg.slice_for_qubits_equal_to([control], 0)\n available_buffer[inactive] = target_tensor[inactive]\n return available_buffer\n\n # HACK: assume they didn't somehow escape the slice view and edit the\n # rest of target_tensor.\n target_tensor[active] = result\n return target_tensor\n\n def _unitary_(self) -> Union[np.ndarray, NotImplementedType]:\n sub_matrix = protocols.unitary(self.sub_gate, None)\n if sub_matrix is None:\n return NotImplemented\n return linalg.block_diag(np.eye(sub_matrix.shape[0]), sub_matrix)\n\n def __pow__(self, exponent: Any) -> 'ControlledGate':\n new_sub_gate = protocols.pow(self.sub_gate,\n exponent,\n NotImplemented)\n if new_sub_gate is NotImplemented:\n return NotImplemented\n return ControlledGate(new_sub_gate)\n\n def _is_parameterized_(self):\n return protocols.is_parameterized(self.sub_gate)\n\n def _resolve_parameters_(self, param_resolver):\n new_sub_gate = protocols.resolve_parameters(self.sub_gate,\n param_resolver)\n return ControlledGate(new_sub_gate)\n\n def _trace_distance_bound_(self):\n return protocols.trace_distance_bound(self.sub_gate)\n\n def _circuit_diagram_info_(self,\n args: protocols.CircuitDiagramInfoArgs\n ) -> protocols.CircuitDiagramInfo:\n sub_info = protocols.circuit_diagram_info(self.sub_gate, args, None)\n if sub_info is None:\n return NotImplemented\n return protocols.CircuitDiagramInfo(\n wire_symbols=('@',) + sub_info.wire_symbols,\n exponent=sub_info.exponent)\n\n def __str__(self):\n return 'C' + str(self.sub_gate)\n\n def __repr__(self):\n return 'cirq.ControlledGate(sub_gate={!r})'.format(self.sub_gate)\n", "path": "cirq/ops/controlled_gate.py"}], "after_files": [{"content": "# Copyright 2018 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Union, Sequence, Any\n\nimport numpy as np\n\nfrom cirq import linalg, protocols\nfrom cirq.ops import raw_types\nfrom cirq.type_workarounds import NotImplementedType\n\n\nclass ControlledGate(raw_types.Gate):\n \"\"\"Augments existing gates with a control qubit.\"\"\"\n\n def __init__(self, sub_gate: raw_types.Gate) -> None:\n \"\"\"Initializes the controlled gate.\n\n Args:\n sub_gate: The gate to add a control qubit to.\n \"\"\"\n self.sub_gate = sub_gate\n\n def validate_args(self, qubits) -> None:\n if len(qubits) < 1:\n raise ValueError('No control qubit specified.')\n self.sub_gate.validate_args(qubits[1:])\n\n def __eq__(self, other):\n if not isinstance(other, type(self)):\n return NotImplemented\n return self.sub_gate == other.sub_gate\n\n def __ne__(self, other):\n return not self == other\n\n def __hash__(self):\n return hash((ControlledGate, self.sub_gate))\n\n def _apply_unitary_to_tensor_(self,\n target_tensor: np.ndarray,\n available_buffer: np.ndarray,\n axes: Sequence[int],\n ) -> np.ndarray:\n control = axes[0]\n rest = axes[1:]\n active = linalg.slice_for_qubits_equal_to([control], 1)\n sub_axes = [r - int(r > control) for r in rest]\n target_view = target_tensor[active]\n buffer_view = available_buffer[active]\n result = protocols.apply_unitary_to_tensor(\n self.sub_gate,\n target_view,\n buffer_view,\n sub_axes,\n default=NotImplemented)\n\n if result is NotImplemented:\n return NotImplemented\n\n if result is target_view:\n return target_tensor\n\n if result is buffer_view:\n inactive = linalg.slice_for_qubits_equal_to([control], 0)\n available_buffer[inactive] = target_tensor[inactive]\n return available_buffer\n\n # HACK: assume they didn't somehow escape the slice view and edit the\n # rest of target_tensor.\n target_tensor[active] = result\n return target_tensor\n\n def _unitary_(self) -> Union[np.ndarray, NotImplementedType]:\n sub_matrix = protocols.unitary(self.sub_gate, None)\n if sub_matrix is None:\n return NotImplemented\n return linalg.block_diag(np.eye(sub_matrix.shape[0]), sub_matrix)\n\n def __pow__(self, exponent: Any) -> 'ControlledGate':\n new_sub_gate = protocols.pow(self.sub_gate,\n exponent,\n NotImplemented)\n if new_sub_gate is NotImplemented:\n return NotImplemented\n return ControlledGate(new_sub_gate)\n\n def _phase_by_(self, phase_turns: float, qubit_index: int):\n if qubit_index == 0:\n return self\n phased_gate = protocols.phase_by(\n self.sub_gate, phase_turns, qubit_index-1, None)\n if phased_gate is None:\n return NotImplemented\n return ControlledGate(phased_gate)\n\n def _is_parameterized_(self):\n return protocols.is_parameterized(self.sub_gate)\n\n def _resolve_parameters_(self, param_resolver):\n new_sub_gate = protocols.resolve_parameters(self.sub_gate,\n param_resolver)\n return ControlledGate(new_sub_gate)\n\n def _trace_distance_bound_(self):\n return protocols.trace_distance_bound(self.sub_gate)\n\n def _circuit_diagram_info_(self,\n args: protocols.CircuitDiagramInfoArgs\n ) -> protocols.CircuitDiagramInfo:\n sub_info = protocols.circuit_diagram_info(self.sub_gate, args, None)\n if sub_info is None:\n return NotImplemented\n return protocols.CircuitDiagramInfo(\n wire_symbols=('@',) + sub_info.wire_symbols,\n exponent=sub_info.exponent)\n\n def __str__(self):\n return 'C' + str(self.sub_gate)\n\n def __repr__(self):\n return 'cirq.ControlledGate(sub_gate={!r})'.format(self.sub_gate)\n", "path": "cirq/ops/controlled_gate.py"}]} | 1,666 | 303 |
gh_patches_debug_4315 | rasdani/github-patches | git_diff | frappe__frappe-21985 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Create block in workspace
### Information about bug
### App Versions
```
{
"erpnext": "14.27.2",
"frappe": "14.39.0",
"hrms": "14.4.3",
"india_compliance": "14.10.1",
"payments": "0.0.1"
}
```
### Route
```
Workspaces/Home
```
### Traceback
```
Traceback (most recent call last):
File "apps/frappe/frappe/app.py", line 66, in application
response = frappe.api.handle()
File "apps/frappe/frappe/api.py", line 54, in handle
return frappe.handler.handle()
File "apps/frappe/frappe/handler.py", line 47, in handle
data = execute_cmd(cmd)
File "apps/frappe/frappe/handler.py", line 85, in execute_cmd
return frappe.call(method, **frappe.form_dict)
File "apps/frappe/frappe/__init__.py", line 1608, in call
return fn(*args, **newargs)
File "apps/frappe/frappe/desk/search.py", line 35, in search_link
search_widget(
File "apps/frappe/frappe/desk/search.py", line 106, in search_widget
raise e
File "apps/frappe/frappe/desk/search.py", line 83, in search_widget
frappe.response["values"] = frappe.call(
File "apps/frappe/frappe/__init__.py", line 1608, in call
return fn(*args, **newargs)
File "apps/frappe/frappe/desk/doctype/custom_html_block/custom_html_block.py", line 18, in get_custom_blocks_for_user
condition_query = frappe.qb.get_query(customHTMLBlock)
AttributeError: type object 'MariaDB' has no attribute 'get_query'
```
### Request Data
```
{
"type": "POST",
"args": {
"txt": "",
"doctype": "Custom HTML Block",
"reference_doctype": "",
"query": "frappe.desk.doctype.custom_html_block.custom_html_block.get_custom_blocks_for_user"
},
"headers": {},
"error_handlers": {},
"url": "/api/method/frappe.desk.search.search_link"
}
```
### Response Data
```
{
"exception": "AttributeError: type object 'MariaDB' has no attribute 'get_query'"
}
```
### Module
accounts, other
### Version
{
"erpnext": "14.27.2",
"frappe": "14.39.0",
"hrms": "14.4.3",
"india_compliance": "14.10.1",
"payments": "0.0.1"
}
### Installation method
FrappeCloud
### Relevant log output / Stack trace / Full Error Message.
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `frappe/desk/doctype/custom_html_block/custom_html_block.py`
Content:
```
1 # Copyright (c) 2023, Frappe Technologies and contributors
2 # For license information, please see license.txt
3
4 import frappe
5 from frappe.model.document import Document
6 from frappe.query_builder.utils import DocType
7
8
9 class CustomHTMLBlock(Document):
10 # begin: auto-generated types
11 # This code is auto-generated. Do not modify anything in this block.
12
13 from typing import TYPE_CHECKING
14
15 if TYPE_CHECKING:
16 from frappe.core.doctype.has_role.has_role import HasRole
17 from frappe.types import DF
18
19 html: DF.Code | None
20 private: DF.Check
21 roles: DF.Table[HasRole]
22 script: DF.Code | None
23 style: DF.Code | None
24 # end: auto-generated types
25 pass
26
27
28 @frappe.whitelist()
29 def get_custom_blocks_for_user(doctype, txt, searchfield, start, page_len, filters):
30 # return logged in users private blocks and all public blocks
31 customHTMLBlock = DocType("Custom HTML Block")
32
33 condition_query = frappe.qb.get_query(customHTMLBlock)
34
35 return (
36 condition_query.select(customHTMLBlock.name).where(
37 (customHTMLBlock.private == 0)
38 | ((customHTMLBlock.owner == frappe.session.user) & (customHTMLBlock.private == 1))
39 )
40 ).run()
41
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/frappe/desk/doctype/custom_html_block/custom_html_block.py b/frappe/desk/doctype/custom_html_block/custom_html_block.py
--- a/frappe/desk/doctype/custom_html_block/custom_html_block.py
+++ b/frappe/desk/doctype/custom_html_block/custom_html_block.py
@@ -30,7 +30,7 @@
# return logged in users private blocks and all public blocks
customHTMLBlock = DocType("Custom HTML Block")
- condition_query = frappe.qb.get_query(customHTMLBlock)
+ condition_query = frappe.qb.from_(customHTMLBlock)
return (
condition_query.select(customHTMLBlock.name).where(
| {"golden_diff": "diff --git a/frappe/desk/doctype/custom_html_block/custom_html_block.py b/frappe/desk/doctype/custom_html_block/custom_html_block.py\n--- a/frappe/desk/doctype/custom_html_block/custom_html_block.py\n+++ b/frappe/desk/doctype/custom_html_block/custom_html_block.py\n@@ -30,7 +30,7 @@\n \t# return logged in users private blocks and all public blocks\n \tcustomHTMLBlock = DocType(\"Custom HTML Block\")\n \n-\tcondition_query = frappe.qb.get_query(customHTMLBlock)\n+\tcondition_query = frappe.qb.from_(customHTMLBlock)\n \n \treturn (\n \t\tcondition_query.select(customHTMLBlock.name).where(\n", "issue": "Create block in workspace\n### Information about bug\n\n### App Versions\r\n```\r\n{\r\n\t\"erpnext\": \"14.27.2\",\r\n\t\"frappe\": \"14.39.0\",\r\n\t\"hrms\": \"14.4.3\",\r\n\t\"india_compliance\": \"14.10.1\",\r\n\t\"payments\": \"0.0.1\"\r\n}\r\n```\r\n### Route\r\n```\r\nWorkspaces/Home\r\n```\r\n### Traceback\r\n```\r\nTraceback (most recent call last):\r\n File \"apps/frappe/frappe/app.py\", line 66, in application\r\n response = frappe.api.handle()\r\n File \"apps/frappe/frappe/api.py\", line 54, in handle\r\n return frappe.handler.handle()\r\n File \"apps/frappe/frappe/handler.py\", line 47, in handle\r\n data = execute_cmd(cmd)\r\n File \"apps/frappe/frappe/handler.py\", line 85, in execute_cmd\r\n return frappe.call(method, **frappe.form_dict)\r\n File \"apps/frappe/frappe/__init__.py\", line 1608, in call\r\n return fn(*args, **newargs)\r\n File \"apps/frappe/frappe/desk/search.py\", line 35, in search_link\r\n search_widget(\r\n File \"apps/frappe/frappe/desk/search.py\", line 106, in search_widget\r\n raise e\r\n File \"apps/frappe/frappe/desk/search.py\", line 83, in search_widget\r\n frappe.response[\"values\"] = frappe.call(\r\n File \"apps/frappe/frappe/__init__.py\", line 1608, in call\r\n return fn(*args, **newargs)\r\n File \"apps/frappe/frappe/desk/doctype/custom_html_block/custom_html_block.py\", line 18, in get_custom_blocks_for_user\r\n condition_query = frappe.qb.get_query(customHTMLBlock)\r\nAttributeError: type object 'MariaDB' has no attribute 'get_query'\r\n\r\n```\r\n### Request Data\r\n```\r\n{\r\n\t\"type\": \"POST\",\r\n\t\"args\": {\r\n\t\t\"txt\": \"\",\r\n\t\t\"doctype\": \"Custom HTML Block\",\r\n\t\t\"reference_doctype\": \"\",\r\n\t\t\"query\": \"frappe.desk.doctype.custom_html_block.custom_html_block.get_custom_blocks_for_user\"\r\n\t},\r\n\t\"headers\": {},\r\n\t\"error_handlers\": {},\r\n\t\"url\": \"/api/method/frappe.desk.search.search_link\"\r\n}\r\n```\r\n### Response Data\r\n```\r\n{\r\n\t\"exception\": \"AttributeError: type object 'MariaDB' has no attribute 'get_query'\"\r\n}\r\n```\n\n### Module\n\naccounts, other\n\n### Version\n\n{\r\n\t\"erpnext\": \"14.27.2\",\r\n\t\"frappe\": \"14.39.0\",\r\n\t\"hrms\": \"14.4.3\",\r\n\t\"india_compliance\": \"14.10.1\",\r\n\t\"payments\": \"0.0.1\"\r\n}\n\n### Installation method\n\nFrappeCloud\n\n### Relevant log output / Stack trace / Full Error Message.\n\n_No response_\n", "before_files": [{"content": "# Copyright (c) 2023, Frappe Technologies and contributors\n# For license information, please see license.txt\n\nimport frappe\nfrom frappe.model.document import Document\nfrom frappe.query_builder.utils import DocType\n\n\nclass CustomHTMLBlock(Document):\n\t# begin: auto-generated types\n\t# This code is auto-generated. Do not modify anything in this block.\n\n\tfrom typing import TYPE_CHECKING\n\n\tif TYPE_CHECKING:\n\t\tfrom frappe.core.doctype.has_role.has_role import HasRole\n\t\tfrom frappe.types import DF\n\n\t\thtml: DF.Code | None\n\t\tprivate: DF.Check\n\t\troles: DF.Table[HasRole]\n\t\tscript: DF.Code | None\n\t\tstyle: DF.Code | None\n\t# end: auto-generated types\n\tpass\n\n\[email protected]()\ndef get_custom_blocks_for_user(doctype, txt, searchfield, start, page_len, filters):\n\t# return logged in users private blocks and all public blocks\n\tcustomHTMLBlock = DocType(\"Custom HTML Block\")\n\n\tcondition_query = frappe.qb.get_query(customHTMLBlock)\n\n\treturn (\n\t\tcondition_query.select(customHTMLBlock.name).where(\n\t\t\t(customHTMLBlock.private == 0)\n\t\t\t| ((customHTMLBlock.owner == frappe.session.user) & (customHTMLBlock.private == 1))\n\t\t)\n\t).run()\n", "path": "frappe/desk/doctype/custom_html_block/custom_html_block.py"}], "after_files": [{"content": "# Copyright (c) 2023, Frappe Technologies and contributors\n# For license information, please see license.txt\n\nimport frappe\nfrom frappe.model.document import Document\nfrom frappe.query_builder.utils import DocType\n\n\nclass CustomHTMLBlock(Document):\n\t# begin: auto-generated types\n\t# This code is auto-generated. Do not modify anything in this block.\n\n\tfrom typing import TYPE_CHECKING\n\n\tif TYPE_CHECKING:\n\t\tfrom frappe.core.doctype.has_role.has_role import HasRole\n\t\tfrom frappe.types import DF\n\n\t\thtml: DF.Code | None\n\t\tprivate: DF.Check\n\t\troles: DF.Table[HasRole]\n\t\tscript: DF.Code | None\n\t\tstyle: DF.Code | None\n\t# end: auto-generated types\n\tpass\n\n\[email protected]()\ndef get_custom_blocks_for_user(doctype, txt, searchfield, start, page_len, filters):\n\t# return logged in users private blocks and all public blocks\n\tcustomHTMLBlock = DocType(\"Custom HTML Block\")\n\n\tcondition_query = frappe.qb.from_(customHTMLBlock)\n\n\treturn (\n\t\tcondition_query.select(customHTMLBlock.name).where(\n\t\t\t(customHTMLBlock.private == 0)\n\t\t\t| ((customHTMLBlock.owner == frappe.session.user) & (customHTMLBlock.private == 1))\n\t\t)\n\t).run()\n", "path": "frappe/desk/doctype/custom_html_block/custom_html_block.py"}]} | 1,324 | 150 |
gh_patches_debug_9470 | rasdani/github-patches | git_diff | nextcloud__appstore-372 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
schema does not allow digits in app ids
Apparently app ids like ``twofactor_u2f`` are not allowed by the info.xml schema. Could we change that regex to allow digits too or are there any strong arguments against that?
ref https://github.com/nextcloud/appstore/blob/e4567ce707b332ca14eb35e322bff5ec4397191b/nextcloudappstore/core/api/v1/release/info.xsd#L245-L250
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nextcloudappstore/core/api/v1/urls.py`
Content:
```
1 from django.conf.urls import url
2 from django.views.decorators.http import etag
3 from nextcloudappstore.core.api.v1.views import AppView, AppReleaseView, \
4 CategoryView, SessionObtainAuthToken, RegenerateAuthToken, AppRatingView, \
5 AppRegisterView
6 from nextcloudappstore.core.caching import app_ratings_etag, categories_etag, \
7 apps_etag
8 from nextcloudappstore.core.versioning import SEMVER_REGEX
9
10 urlpatterns = [
11 url(r'^platform/(?P<version>\d+\.\d+\.\d+)/apps\.json$',
12 etag(apps_etag)(AppView.as_view()), name='app'),
13 url(r'^apps/releases/?$', AppReleaseView.as_view(),
14 name='app-release-create'),
15 url(r'^apps/?$', AppRegisterView.as_view(), name='app-register'),
16 url(r'^apps/(?P<pk>[a-z_]+)/?$', AppView.as_view(), name='app-delete'),
17 url(r'^ratings.json$',
18 etag(app_ratings_etag)(AppRatingView.as_view()),
19 name='app-ratings'),
20 url(r'^apps/(?P<app>[a-z_]+)/releases/(?:(?P<nightly>nightly)/)?'
21 r'(?P<version>' + SEMVER_REGEX + ')/?$',
22 AppReleaseView.as_view(), name='app-release-delete'),
23 url(r'^token/?$', SessionObtainAuthToken.as_view(), name='user-token'),
24 url(r'^token/new/?$', RegenerateAuthToken.as_view(),
25 name='user-token-new'),
26 url(r'^categories.json$',
27 etag(categories_etag)(CategoryView.as_view()), name='category'),
28 ]
29
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nextcloudappstore/core/api/v1/urls.py b/nextcloudappstore/core/api/v1/urls.py
--- a/nextcloudappstore/core/api/v1/urls.py
+++ b/nextcloudappstore/core/api/v1/urls.py
@@ -13,7 +13,7 @@
url(r'^apps/releases/?$', AppReleaseView.as_view(),
name='app-release-create'),
url(r'^apps/?$', AppRegisterView.as_view(), name='app-register'),
- url(r'^apps/(?P<pk>[a-z_]+)/?$', AppView.as_view(), name='app-delete'),
+ url(r'^apps/(?P<pk>[a-z0-9_]+)/?$', AppView.as_view(), name='app-delete'),
url(r'^ratings.json$',
etag(app_ratings_etag)(AppRatingView.as_view()),
name='app-ratings'),
| {"golden_diff": "diff --git a/nextcloudappstore/core/api/v1/urls.py b/nextcloudappstore/core/api/v1/urls.py\n--- a/nextcloudappstore/core/api/v1/urls.py\n+++ b/nextcloudappstore/core/api/v1/urls.py\n@@ -13,7 +13,7 @@\n url(r'^apps/releases/?$', AppReleaseView.as_view(),\n name='app-release-create'),\n url(r'^apps/?$', AppRegisterView.as_view(), name='app-register'),\n- url(r'^apps/(?P<pk>[a-z_]+)/?$', AppView.as_view(), name='app-delete'),\n+ url(r'^apps/(?P<pk>[a-z0-9_]+)/?$', AppView.as_view(), name='app-delete'),\n url(r'^ratings.json$',\n etag(app_ratings_etag)(AppRatingView.as_view()),\n name='app-ratings'),\n", "issue": "schema does not allow digits in app ids\nApparently app ids like ``twofactor_u2f`` are not allowed by the info.xml schema. Could we change that regex to allow digits too or are there any strong arguments against that?\r\n\r\nref https://github.com/nextcloud/appstore/blob/e4567ce707b332ca14eb35e322bff5ec4397191b/nextcloudappstore/core/api/v1/release/info.xsd#L245-L250\n", "before_files": [{"content": "from django.conf.urls import url\nfrom django.views.decorators.http import etag\nfrom nextcloudappstore.core.api.v1.views import AppView, AppReleaseView, \\\n CategoryView, SessionObtainAuthToken, RegenerateAuthToken, AppRatingView, \\\n AppRegisterView\nfrom nextcloudappstore.core.caching import app_ratings_etag, categories_etag, \\\n apps_etag\nfrom nextcloudappstore.core.versioning import SEMVER_REGEX\n\nurlpatterns = [\n url(r'^platform/(?P<version>\\d+\\.\\d+\\.\\d+)/apps\\.json$',\n etag(apps_etag)(AppView.as_view()), name='app'),\n url(r'^apps/releases/?$', AppReleaseView.as_view(),\n name='app-release-create'),\n url(r'^apps/?$', AppRegisterView.as_view(), name='app-register'),\n url(r'^apps/(?P<pk>[a-z_]+)/?$', AppView.as_view(), name='app-delete'),\n url(r'^ratings.json$',\n etag(app_ratings_etag)(AppRatingView.as_view()),\n name='app-ratings'),\n url(r'^apps/(?P<app>[a-z_]+)/releases/(?:(?P<nightly>nightly)/)?'\n r'(?P<version>' + SEMVER_REGEX + ')/?$',\n AppReleaseView.as_view(), name='app-release-delete'),\n url(r'^token/?$', SessionObtainAuthToken.as_view(), name='user-token'),\n url(r'^token/new/?$', RegenerateAuthToken.as_view(),\n name='user-token-new'),\n url(r'^categories.json$',\n etag(categories_etag)(CategoryView.as_view()), name='category'),\n]\n", "path": "nextcloudappstore/core/api/v1/urls.py"}], "after_files": [{"content": "from django.conf.urls import url\nfrom django.views.decorators.http import etag\nfrom nextcloudappstore.core.api.v1.views import AppView, AppReleaseView, \\\n CategoryView, SessionObtainAuthToken, RegenerateAuthToken, AppRatingView, \\\n AppRegisterView\nfrom nextcloudappstore.core.caching import app_ratings_etag, categories_etag, \\\n apps_etag\nfrom nextcloudappstore.core.versioning import SEMVER_REGEX\n\nurlpatterns = [\n url(r'^platform/(?P<version>\\d+\\.\\d+\\.\\d+)/apps\\.json$',\n etag(apps_etag)(AppView.as_view()), name='app'),\n url(r'^apps/releases/?$', AppReleaseView.as_view(),\n name='app-release-create'),\n url(r'^apps/?$', AppRegisterView.as_view(), name='app-register'),\n url(r'^apps/(?P<pk>[a-z0-9_]+)/?$', AppView.as_view(), name='app-delete'),\n url(r'^ratings.json$',\n etag(app_ratings_etag)(AppRatingView.as_view()),\n name='app-ratings'),\n url(r'^apps/(?P<app>[a-z_]+)/releases/(?:(?P<nightly>nightly)/)?'\n r'(?P<version>' + SEMVER_REGEX + ')/?$',\n AppReleaseView.as_view(), name='app-release-delete'),\n url(r'^token/?$', SessionObtainAuthToken.as_view(), name='user-token'),\n url(r'^token/new/?$', RegenerateAuthToken.as_view(),\n name='user-token-new'),\n url(r'^categories.json$',\n etag(categories_etag)(CategoryView.as_view()), name='category'),\n]\n", "path": "nextcloudappstore/core/api/v1/urls.py"}]} | 788 | 197 |
gh_patches_debug_6773 | rasdani/github-patches | git_diff | spacetelescope__jwql-517 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
DEPENDENCY_LINKS in setup.py causing bug in logging_functions
With the introduction of the `DEPENDENCY_LINKS` variable in `setup.py`, the logging of monitors is now failing to log the versions of depenencies listed, since the `REQUIRES` is not immediately followed by `setup()`:
```python
for i, line in enumerate(data):
if 'REQUIRES = [' in line:
begin = i + 1
elif 'setup(' in line:
end = i - 2
```
The solution is so simple move `DEPENDENCY _LINKS` to be defined before `REQUIRES`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import numpy as np
2 from setuptools import setup
3 from setuptools import find_packages
4
5 VERSION = '0.22.0'
6
7 AUTHORS = 'Matthew Bourque, Misty Cracraft, Joe Filippazzo, Bryan Hilbert, '
8 AUTHORS += 'Graham Kanarek, Catherine Martlin, Johannes Sahlmann, Ben Sunnquist'
9
10 DESCRIPTION = 'The James Webb Space Telescope Quicklook Project'
11
12 REQUIRES = [
13 'asdf>=2.3.3',
14 'astropy>=3.2.1',
15 'astroquery>=0.3.9',
16 'authlib',
17 'bokeh>=1.0',
18 'codecov',
19 'django>=2.0',
20 'flake8',
21 'inflection',
22 'ipython',
23 'jinja2',
24 'jsonschema==2.6.0',
25 'jwedb>=0.0.3',
26 'matplotlib',
27 'numpy',
28 'numpydoc',
29 'pandas',
30 'psycopg2',
31 'pysiaf',
32 'pytest',
33 'pytest-cov',
34 'scipy',
35 'sphinx',
36 'sqlalchemy',
37 'stsci_rtd_theme',
38 'twine'
39 ]
40
41 DEPENDENCY_LINKS = ['git+https://github.com/spacetelescope/jwst#0.13.0']
42
43 setup(
44 name='jwql',
45 version=VERSION,
46 description=DESCRIPTION,
47 url='https://github.com/spacetelescope/jwql.git',
48 author=AUTHORS,
49 author_email='[email protected]',
50 license='BSD',
51 keywords=['astronomy', 'python'],
52 classifiers=['Programming Language :: Python'],
53 packages=find_packages(),
54 install_requires=REQUIRES,
55 dependency_links=DEPENDENCY_LINKS,
56 include_package_data=True,
57 include_dirs=[np.get_include()],
58 )
59
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -9,6 +9,7 @@
DESCRIPTION = 'The James Webb Space Telescope Quicklook Project'
+DEPENDENCY_LINKS = ['git+https://github.com/spacetelescope/jwst#0.13.0']
REQUIRES = [
'asdf>=2.3.3',
'astropy>=3.2.1',
@@ -38,8 +39,6 @@
'twine'
]
-DEPENDENCY_LINKS = ['git+https://github.com/spacetelescope/jwst#0.13.0']
-
setup(
name='jwql',
version=VERSION,
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -9,6 +9,7 @@\n \n DESCRIPTION = 'The James Webb Space Telescope Quicklook Project'\n \n+DEPENDENCY_LINKS = ['git+https://github.com/spacetelescope/jwst#0.13.0']\n REQUIRES = [\n 'asdf>=2.3.3',\n 'astropy>=3.2.1',\n@@ -38,8 +39,6 @@\n 'twine'\n ]\n \n-DEPENDENCY_LINKS = ['git+https://github.com/spacetelescope/jwst#0.13.0']\n-\n setup(\n name='jwql',\n version=VERSION,\n", "issue": "DEPENDENCY_LINKS in setup.py causing bug in logging_functions\nWith the introduction of the `DEPENDENCY_LINKS` variable in `setup.py`, the logging of monitors is now failing to log the versions of depenencies listed, since the `REQUIRES` is not immediately followed by `setup()`:\r\n\r\n```python\r\nfor i, line in enumerate(data):\r\n if 'REQUIRES = [' in line:\r\n begin = i + 1\r\n elif 'setup(' in line:\r\n end = i - 2\r\n```\r\n\r\nThe solution is so simple move `DEPENDENCY _LINKS` to be defined before `REQUIRES`.\n", "before_files": [{"content": "import numpy as np\nfrom setuptools import setup\nfrom setuptools import find_packages\n\nVERSION = '0.22.0'\n\nAUTHORS = 'Matthew Bourque, Misty Cracraft, Joe Filippazzo, Bryan Hilbert, '\nAUTHORS += 'Graham Kanarek, Catherine Martlin, Johannes Sahlmann, Ben Sunnquist'\n\nDESCRIPTION = 'The James Webb Space Telescope Quicklook Project'\n\nREQUIRES = [\n 'asdf>=2.3.3',\n 'astropy>=3.2.1',\n 'astroquery>=0.3.9',\n 'authlib',\n 'bokeh>=1.0',\n 'codecov',\n 'django>=2.0',\n 'flake8',\n 'inflection',\n 'ipython',\n 'jinja2',\n 'jsonschema==2.6.0',\n 'jwedb>=0.0.3',\n 'matplotlib',\n 'numpy',\n 'numpydoc',\n 'pandas',\n 'psycopg2',\n 'pysiaf',\n 'pytest',\n 'pytest-cov',\n 'scipy',\n 'sphinx',\n 'sqlalchemy',\n 'stsci_rtd_theme',\n 'twine'\n]\n\nDEPENDENCY_LINKS = ['git+https://github.com/spacetelescope/jwst#0.13.0']\n\nsetup(\n name='jwql',\n version=VERSION,\n description=DESCRIPTION,\n url='https://github.com/spacetelescope/jwql.git',\n author=AUTHORS,\n author_email='[email protected]',\n license='BSD',\n keywords=['astronomy', 'python'],\n classifiers=['Programming Language :: Python'],\n packages=find_packages(),\n install_requires=REQUIRES,\n dependency_links=DEPENDENCY_LINKS,\n include_package_data=True,\n include_dirs=[np.get_include()],\n)\n", "path": "setup.py"}], "after_files": [{"content": "import numpy as np\nfrom setuptools import setup\nfrom setuptools import find_packages\n\nVERSION = '0.22.0'\n\nAUTHORS = 'Matthew Bourque, Misty Cracraft, Joe Filippazzo, Bryan Hilbert, '\nAUTHORS += 'Graham Kanarek, Catherine Martlin, Johannes Sahlmann, Ben Sunnquist'\n\nDESCRIPTION = 'The James Webb Space Telescope Quicklook Project'\n\nDEPENDENCY_LINKS = ['git+https://github.com/spacetelescope/jwst#0.13.0']\nREQUIRES = [\n 'asdf>=2.3.3',\n 'astropy>=3.2.1',\n 'astroquery>=0.3.9',\n 'authlib',\n 'bokeh>=1.0',\n 'codecov',\n 'django>=2.0',\n 'flake8',\n 'inflection',\n 'ipython',\n 'jinja2',\n 'jsonschema==2.6.0',\n 'jwedb>=0.0.3',\n 'matplotlib',\n 'numpy',\n 'numpydoc',\n 'pandas',\n 'psycopg2',\n 'pysiaf',\n 'pytest',\n 'pytest-cov',\n 'scipy',\n 'sphinx',\n 'sqlalchemy',\n 'stsci_rtd_theme',\n 'twine'\n]\n\nsetup(\n name='jwql',\n version=VERSION,\n description=DESCRIPTION,\n url='https://github.com/spacetelescope/jwql.git',\n author=AUTHORS,\n author_email='[email protected]',\n license='BSD',\n keywords=['astronomy', 'python'],\n classifiers=['Programming Language :: Python'],\n packages=find_packages(),\n install_requires=REQUIRES,\n dependency_links=DEPENDENCY_LINKS,\n include_package_data=True,\n include_dirs=[np.get_include()],\n)\n", "path": "setup.py"}]} | 901 | 159 |
gh_patches_debug_35261 | rasdani/github-patches | git_diff | electricitymaps__electricitymaps-contrib-3264 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
First try at building a parser using data from Quebec
Hopefully this will show up on the map somehow. I look forward to seeing what changes will be made in order to make this parser functional.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parsers/CA_QC.py`
Content:
```
1 import requests
2 import logging
3 from pprint import pprint
4 # The arrow library is used to handle datetimes
5 import arrow
6
7 PRODUCTION_URL = "https://www.hydroquebec.com/data/documents-donnees/donnees-ouvertes/json/production.json"
8 CONSUMPTION_URL = "https://www.hydroquebec.com/data/documents-donnees/donnees-ouvertes/json/demande.json"
9 # Reluctant to call it 'timezone', since we are importing 'timezone' from datetime
10 timezone_id = 'America/Montreal'
11
12 def fetch_production(
13 zone_key="CA-QC",
14 session=None,
15 target_datetime=None,
16 logger=logging.getLogger(__name__),
17 ) -> dict:
18 """Requests the last known production mix (in MW) of a given region.
19 In this particular case, translated mapping of JSON keys are also required"""
20
21 def if_exists(elem: dict, etype: str):
22
23 english = {
24 "hydraulique": "hydro",
25 "thermique": "thermal",
26 "solaire": "solar",
27 "eolien": "wind",
28 "autres": "unknown",
29 "valeurs": "values",
30 }
31 english = {v: k for k, v in english.items()}
32 try:
33 return elem["valeurs"][english[etype]]
34 except KeyError:
35 return 0.0
36
37 data = _fetch_quebec_production()
38 for elem in reversed(data["details"]):
39 if elem["valeurs"]["total"] != 0:
40
41 return {
42 "zoneKey": zone_key,
43 "datetime": arrow.get(elem["date"], tzinfo=timezone_id).datetime,
44 "production": {
45 "biomass": 0.0,
46 "coal": 0.0,
47
48 # per https://github.com/tmrowco/electricitymap-contrib/issues/3218 , thermal generation
49 # is at Bécancour gas turbine. It is reported with a delay, and data source returning 0.0
50 # can indicate either no generation or not-yet-reported generation.
51 # To handle this, if reported value is 0.0, overwrite it to None, so that backend can know
52 # this is not entirely reliable and might be updated later.
53 "gas": if_exists(elem, "thermal") or None,
54
55 "hydro": if_exists(elem, "hydro"),
56 "nuclear": 0.0,
57 "oil": 0.0,
58 "solar": if_exists(elem, "solar"),
59 "wind": if_exists(elem, "wind"),
60 "geothermal": 0.0,
61 "unknown": if_exists(elem, "unknown"),
62 },
63 "source": "hydroquebec.com",
64 }
65
66
67 def fetch_consumption(zone_key="CA-QC", session=None, target_datetime=None, logger=None):
68 data = _fetch_quebec_consumption()
69 for elem in reversed(data["details"]):
70 if "demandeTotal" in elem["valeurs"]:
71 return {
72 "zoneKey": zone_key,
73 "datetime": arrow.get(elem["date"], tzinfo=timezone_id).datetime,
74 "consumption": elem["valeurs"]["demandeTotal"],
75 "source": "hydroquebec.com",
76 }
77
78
79 def _fetch_quebec_production(logger=logging.getLogger(__name__)) -> str:
80 response = requests.get(PRODUCTION_URL)
81
82 if not response.ok:
83 logger.info('CA-QC: failed getting requested production data from hydroquebec - URL {}'.format(PRODUCTION_URL))
84 return response.json()
85
86
87 def _fetch_quebec_consumption(logger=logging.getLogger(__name__)) -> str:
88 response = requests.get(CONSUMPTION_URL)
89
90 if not response.ok:
91 logger.info('CA-QC: failed getting requested consumption data from hydroquebec - URL {}'.format(CONSUMPTION_URL))
92 return response.json()
93
94
95 if __name__ == '__main__':
96 """Main method, never used by the Electricity Map backend, but handy for testing."""
97
98 test_logger = logging.getLogger()
99
100 print('fetch_production() ->')
101 pprint(fetch_production(logger=test_logger))
102
103 print('fetch_consumption() ->')
104 pprint(fetch_consumption(logger=test_logger))
105
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/parsers/CA_QC.py b/parsers/CA_QC.py
--- a/parsers/CA_QC.py
+++ b/parsers/CA_QC.py
@@ -25,8 +25,9 @@
"thermique": "thermal",
"solaire": "solar",
"eolien": "wind",
- "autres": "unknown",
- "valeurs": "values",
+ # autres is all renewable, and mostly biomass. See Github #3218
+ "autres": "biomass",
+ "valeurs": "values"
}
english = {v: k for k, v in english.items()}
try:
@@ -42,21 +43,18 @@
"zoneKey": zone_key,
"datetime": arrow.get(elem["date"], tzinfo=timezone_id).datetime,
"production": {
- "biomass": 0.0,
+ "biomass": if_exists(elem, "biomass"),
"coal": 0.0,
-
- # per https://github.com/tmrowco/electricitymap-contrib/issues/3218 , thermal generation
- # is at Bécancour gas turbine. It is reported with a delay, and data source returning 0.0
- # can indicate either no generation or not-yet-reported generation.
- # To handle this, if reported value is 0.0, overwrite it to None, so that backend can know
- # this is not entirely reliable and might be updated later.
- "gas": if_exists(elem, "thermal") or None,
-
"hydro": if_exists(elem, "hydro"),
"nuclear": 0.0,
"oil": 0.0,
"solar": if_exists(elem, "solar"),
"wind": if_exists(elem, "wind"),
+ # See Github issue #3218, Québec's thermal generation is at Bécancour gas turbine.
+ # It is reported with a delay, and data source returning 0.0 can indicate either no generation or not-yet-reported generation.
+ # Thus, if value is 0.0, overwrite it to None, so that backend can know this is not entirely reliable and might be updated later.
+ "gas": if_exists(elem, "thermal") or None,
+ # There are no geothermal electricity generation stations in Québec (and all of Canada for that matter).
"geothermal": 0.0,
"unknown": if_exists(elem, "unknown"),
},
| {"golden_diff": "diff --git a/parsers/CA_QC.py b/parsers/CA_QC.py\n--- a/parsers/CA_QC.py\n+++ b/parsers/CA_QC.py\n@@ -25,8 +25,9 @@\n \"thermique\": \"thermal\",\n \"solaire\": \"solar\",\n \"eolien\": \"wind\",\n- \"autres\": \"unknown\",\n- \"valeurs\": \"values\",\n+ # autres is all renewable, and mostly biomass. See Github #3218\n+ \"autres\": \"biomass\",\n+ \"valeurs\": \"values\"\n }\n english = {v: k for k, v in english.items()}\n try:\n@@ -42,21 +43,18 @@\n \"zoneKey\": zone_key,\n \"datetime\": arrow.get(elem[\"date\"], tzinfo=timezone_id).datetime,\n \"production\": {\n- \"biomass\": 0.0,\n+ \"biomass\": if_exists(elem, \"biomass\"),\n \"coal\": 0.0,\n-\n- # per https://github.com/tmrowco/electricitymap-contrib/issues/3218 , thermal generation\n- # is at B\u00e9cancour gas turbine. It is reported with a delay, and data source returning 0.0\n- # can indicate either no generation or not-yet-reported generation.\n- # To handle this, if reported value is 0.0, overwrite it to None, so that backend can know\n- # this is not entirely reliable and might be updated later.\n- \"gas\": if_exists(elem, \"thermal\") or None,\n-\n \"hydro\": if_exists(elem, \"hydro\"),\n \"nuclear\": 0.0,\n \"oil\": 0.0,\n \"solar\": if_exists(elem, \"solar\"),\n \"wind\": if_exists(elem, \"wind\"),\n+ # See Github issue #3218, Qu\u00e9bec's thermal generation is at B\u00e9cancour gas turbine.\n+ # It is reported with a delay, and data source returning 0.0 can indicate either no generation or not-yet-reported generation.\n+ # Thus, if value is 0.0, overwrite it to None, so that backend can know this is not entirely reliable and might be updated later.\n+ \"gas\": if_exists(elem, \"thermal\") or None,\n+ # There are no geothermal electricity generation stations in Qu\u00e9bec (and all of Canada for that matter).\n \"geothermal\": 0.0,\n \"unknown\": if_exists(elem, \"unknown\"),\n },\n", "issue": "First try at building a parser using data from Quebec\nHopefully this will show up on the map somehow. I look forward to seeing what changes will be made in order to make this parser functional. \n", "before_files": [{"content": "import requests\nimport logging\nfrom pprint import pprint\n# The arrow library is used to handle datetimes\nimport arrow\n\nPRODUCTION_URL = \"https://www.hydroquebec.com/data/documents-donnees/donnees-ouvertes/json/production.json\"\nCONSUMPTION_URL = \"https://www.hydroquebec.com/data/documents-donnees/donnees-ouvertes/json/demande.json\"\n# Reluctant to call it 'timezone', since we are importing 'timezone' from datetime\ntimezone_id = 'America/Montreal'\n\ndef fetch_production(\n zone_key=\"CA-QC\",\n session=None,\n target_datetime=None,\n logger=logging.getLogger(__name__),\n) -> dict:\n \"\"\"Requests the last known production mix (in MW) of a given region.\n In this particular case, translated mapping of JSON keys are also required\"\"\"\n\n def if_exists(elem: dict, etype: str):\n\n english = {\n \"hydraulique\": \"hydro\",\n \"thermique\": \"thermal\",\n \"solaire\": \"solar\",\n \"eolien\": \"wind\",\n \"autres\": \"unknown\",\n \"valeurs\": \"values\",\n }\n english = {v: k for k, v in english.items()}\n try:\n return elem[\"valeurs\"][english[etype]]\n except KeyError:\n return 0.0\n\n data = _fetch_quebec_production()\n for elem in reversed(data[\"details\"]):\n if elem[\"valeurs\"][\"total\"] != 0:\n\n return {\n \"zoneKey\": zone_key,\n \"datetime\": arrow.get(elem[\"date\"], tzinfo=timezone_id).datetime,\n \"production\": {\n \"biomass\": 0.0,\n \"coal\": 0.0,\n\n # per https://github.com/tmrowco/electricitymap-contrib/issues/3218 , thermal generation\n # is at B\u00e9cancour gas turbine. It is reported with a delay, and data source returning 0.0\n # can indicate either no generation or not-yet-reported generation.\n # To handle this, if reported value is 0.0, overwrite it to None, so that backend can know\n # this is not entirely reliable and might be updated later.\n \"gas\": if_exists(elem, \"thermal\") or None,\n\n \"hydro\": if_exists(elem, \"hydro\"),\n \"nuclear\": 0.0,\n \"oil\": 0.0,\n \"solar\": if_exists(elem, \"solar\"),\n \"wind\": if_exists(elem, \"wind\"),\n \"geothermal\": 0.0,\n \"unknown\": if_exists(elem, \"unknown\"),\n },\n \"source\": \"hydroquebec.com\",\n }\n\n\ndef fetch_consumption(zone_key=\"CA-QC\", session=None, target_datetime=None, logger=None):\n data = _fetch_quebec_consumption()\n for elem in reversed(data[\"details\"]):\n if \"demandeTotal\" in elem[\"valeurs\"]:\n return {\n \"zoneKey\": zone_key,\n \"datetime\": arrow.get(elem[\"date\"], tzinfo=timezone_id).datetime,\n \"consumption\": elem[\"valeurs\"][\"demandeTotal\"],\n \"source\": \"hydroquebec.com\",\n }\n\n\ndef _fetch_quebec_production(logger=logging.getLogger(__name__)) -> str:\n response = requests.get(PRODUCTION_URL)\n\n if not response.ok:\n logger.info('CA-QC: failed getting requested production data from hydroquebec - URL {}'.format(PRODUCTION_URL))\n return response.json()\n\n\ndef _fetch_quebec_consumption(logger=logging.getLogger(__name__)) -> str:\n response = requests.get(CONSUMPTION_URL)\n\n if not response.ok:\n logger.info('CA-QC: failed getting requested consumption data from hydroquebec - URL {}'.format(CONSUMPTION_URL))\n return response.json()\n\n\nif __name__ == '__main__':\n \"\"\"Main method, never used by the Electricity Map backend, but handy for testing.\"\"\"\n\n test_logger = logging.getLogger()\n\n print('fetch_production() ->')\n pprint(fetch_production(logger=test_logger))\n\n print('fetch_consumption() ->')\n pprint(fetch_consumption(logger=test_logger))\n", "path": "parsers/CA_QC.py"}], "after_files": [{"content": "import requests\nimport logging\nfrom pprint import pprint\n# The arrow library is used to handle datetimes\nimport arrow\n\nPRODUCTION_URL = \"https://www.hydroquebec.com/data/documents-donnees/donnees-ouvertes/json/production.json\"\nCONSUMPTION_URL = \"https://www.hydroquebec.com/data/documents-donnees/donnees-ouvertes/json/demande.json\"\n# Reluctant to call it 'timezone', since we are importing 'timezone' from datetime\ntimezone_id = 'America/Montreal'\n\ndef fetch_production(\n zone_key=\"CA-QC\",\n session=None,\n target_datetime=None,\n logger=logging.getLogger(__name__),\n) -> dict:\n \"\"\"Requests the last known production mix (in MW) of a given region.\n In this particular case, translated mapping of JSON keys are also required\"\"\"\n\n def if_exists(elem: dict, etype: str):\n\n english = {\n \"hydraulique\": \"hydro\",\n \"thermique\": \"thermal\",\n \"solaire\": \"solar\",\n \"eolien\": \"wind\",\n # autres is all renewable, and mostly biomass. See Github #3218\n \"autres\": \"biomass\",\n \"valeurs\": \"values\"\n }\n english = {v: k for k, v in english.items()}\n try:\n return elem[\"valeurs\"][english[etype]]\n except KeyError:\n return 0.0\n\n data = _fetch_quebec_production()\n for elem in reversed(data[\"details\"]):\n if elem[\"valeurs\"][\"total\"] != 0:\n\n return {\n \"zoneKey\": zone_key,\n \"datetime\": arrow.get(elem[\"date\"], tzinfo=timezone_id).datetime,\n \"production\": {\n \"biomass\": if_exists(elem, \"biomass\"),\n \"coal\": 0.0,\n \"hydro\": if_exists(elem, \"hydro\"),\n \"nuclear\": 0.0,\n \"oil\": 0.0,\n \"solar\": if_exists(elem, \"solar\"),\n \"wind\": if_exists(elem, \"wind\"),\n # See Github issue #3218, Qu\u00e9bec's thermal generation is at B\u00e9cancour gas turbine.\n # It is reported with a delay, and data source returning 0.0 can indicate either no generation or not-yet-reported generation.\n # Thus, if value is 0.0, overwrite it to None, so that backend can know this is not entirely reliable and might be updated later.\n \"gas\": if_exists(elem, \"thermal\") or None,\n # There are no geothermal electricity generation stations in Qu\u00e9bec (and all of Canada for that matter).\n \"geothermal\": 0.0,\n \"unknown\": if_exists(elem, \"unknown\"),\n },\n \"source\": \"hydroquebec.com\",\n }\n\n\ndef fetch_consumption(zone_key=\"CA-QC\", session=None, target_datetime=None, logger=None):\n data = _fetch_quebec_consumption()\n for elem in reversed(data[\"details\"]):\n if \"demandeTotal\" in elem[\"valeurs\"]:\n return {\n \"zoneKey\": zone_key,\n \"datetime\": arrow.get(elem[\"date\"], tzinfo=timezone_id).datetime,\n \"consumption\": elem[\"valeurs\"][\"demandeTotal\"],\n \"source\": \"hydroquebec.com\",\n }\n\n\ndef _fetch_quebec_production(logger=logging.getLogger(__name__)) -> str:\n response = requests.get(PRODUCTION_URL)\n\n if not response.ok:\n logger.info('CA-QC: failed getting requested production data from hydroquebec - URL {}'.format(PRODUCTION_URL))\n return response.json()\n\n\ndef _fetch_quebec_consumption(logger=logging.getLogger(__name__)) -> str:\n response = requests.get(CONSUMPTION_URL)\n\n if not response.ok:\n logger.info('CA-QC: failed getting requested consumption data from hydroquebec - URL {}'.format(CONSUMPTION_URL))\n return response.json()\n\n\nif __name__ == '__main__':\n \"\"\"Main method, never used by the Electricity Map backend, but handy for testing.\"\"\"\n\n test_logger = logging.getLogger()\n\n print('fetch_production() ->')\n pprint(fetch_production(logger=test_logger))\n\n print('fetch_consumption() ->')\n pprint(fetch_consumption(logger=test_logger))\n", "path": "parsers/CA_QC.py"}]} | 1,417 | 574 |
gh_patches_debug_13280 | rasdani/github-patches | git_diff | pyca__cryptography-10345 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Allow verifying an x509 cert chain without making assertions about the subject name
Thanks to all who worked on the X.509 verification support in version 42.
I am trying to use this API for verifying a signing certificate, and realizing that the API requires me to assert a subject name (DNS name or IP address) to get the validation output. The subject name is not defined/not relevant in this application.
How can I verify that a certificate is in the chain of trust without asserting on the subject name?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cryptography/x509/verification.py`
Content:
```
1 # This file is dual licensed under the terms of the Apache License, Version
2 # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3 # for complete details.
4
5 from __future__ import annotations
6
7 import typing
8
9 from cryptography.hazmat.bindings._rust import x509 as rust_x509
10 from cryptography.x509.general_name import DNSName, IPAddress
11
12 __all__ = [
13 "Store",
14 "Subject",
15 "ServerVerifier",
16 "PolicyBuilder",
17 "VerificationError",
18 ]
19
20 Store = rust_x509.Store
21 Subject = typing.Union[DNSName, IPAddress]
22 ServerVerifier = rust_x509.ServerVerifier
23 PolicyBuilder = rust_x509.PolicyBuilder
24 VerificationError = rust_x509.VerificationError
25
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/cryptography/x509/verification.py b/src/cryptography/x509/verification.py
--- a/src/cryptography/x509/verification.py
+++ b/src/cryptography/x509/verification.py
@@ -12,6 +12,8 @@
__all__ = [
"Store",
"Subject",
+ "VerifiedClient",
+ "ClientVerifier",
"ServerVerifier",
"PolicyBuilder",
"VerificationError",
@@ -19,6 +21,8 @@
Store = rust_x509.Store
Subject = typing.Union[DNSName, IPAddress]
+VerifiedClient = rust_x509.VerifiedClient
+ClientVerifier = rust_x509.ClientVerifier
ServerVerifier = rust_x509.ServerVerifier
PolicyBuilder = rust_x509.PolicyBuilder
VerificationError = rust_x509.VerificationError
| {"golden_diff": "diff --git a/src/cryptography/x509/verification.py b/src/cryptography/x509/verification.py\n--- a/src/cryptography/x509/verification.py\n+++ b/src/cryptography/x509/verification.py\n@@ -12,6 +12,8 @@\n __all__ = [\n \"Store\",\n \"Subject\",\n+ \"VerifiedClient\",\n+ \"ClientVerifier\",\n \"ServerVerifier\",\n \"PolicyBuilder\",\n \"VerificationError\",\n@@ -19,6 +21,8 @@\n \n Store = rust_x509.Store\n Subject = typing.Union[DNSName, IPAddress]\n+VerifiedClient = rust_x509.VerifiedClient\n+ClientVerifier = rust_x509.ClientVerifier\n ServerVerifier = rust_x509.ServerVerifier\n PolicyBuilder = rust_x509.PolicyBuilder\n VerificationError = rust_x509.VerificationError\n", "issue": "Allow verifying an x509 cert chain without making assertions about the subject name\nThanks to all who worked on the X.509 verification support in version 42.\r\n\r\nI am trying to use this API for verifying a signing certificate, and realizing that the API requires me to assert a subject name (DNS name or IP address) to get the validation output. The subject name is not defined/not relevant in this application.\r\n\r\nHow can I verify that a certificate is in the chain of trust without asserting on the subject name?\n", "before_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import annotations\n\nimport typing\n\nfrom cryptography.hazmat.bindings._rust import x509 as rust_x509\nfrom cryptography.x509.general_name import DNSName, IPAddress\n\n__all__ = [\n \"Store\",\n \"Subject\",\n \"ServerVerifier\",\n \"PolicyBuilder\",\n \"VerificationError\",\n]\n\nStore = rust_x509.Store\nSubject = typing.Union[DNSName, IPAddress]\nServerVerifier = rust_x509.ServerVerifier\nPolicyBuilder = rust_x509.PolicyBuilder\nVerificationError = rust_x509.VerificationError\n", "path": "src/cryptography/x509/verification.py"}], "after_files": [{"content": "# This file is dual licensed under the terms of the Apache License, Version\n# 2.0, and the BSD License. See the LICENSE file in the root of this repository\n# for complete details.\n\nfrom __future__ import annotations\n\nimport typing\n\nfrom cryptography.hazmat.bindings._rust import x509 as rust_x509\nfrom cryptography.x509.general_name import DNSName, IPAddress\n\n__all__ = [\n \"Store\",\n \"Subject\",\n \"VerifiedClient\",\n \"ClientVerifier\",\n \"ServerVerifier\",\n \"PolicyBuilder\",\n \"VerificationError\",\n]\n\nStore = rust_x509.Store\nSubject = typing.Union[DNSName, IPAddress]\nVerifiedClient = rust_x509.VerifiedClient\nClientVerifier = rust_x509.ClientVerifier\nServerVerifier = rust_x509.ServerVerifier\nPolicyBuilder = rust_x509.PolicyBuilder\nVerificationError = rust_x509.VerificationError\n", "path": "src/cryptography/x509/verification.py"}]} | 589 | 197 |
gh_patches_debug_2909 | rasdani/github-patches | git_diff | mirumee__ariadne-799 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support Starlette 0.18.0
Was just released: https://github.com/encode/starlette/releases/tag/0.18.0
and currently the dependency is pinned at `<0.18.0`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #! /usr/bin/env python
2 import os
3 from setuptools import setup
4
5 CLASSIFIERS = [
6 "Development Status :: 4 - Beta",
7 "Intended Audience :: Developers",
8 "License :: OSI Approved :: BSD License",
9 "Operating System :: OS Independent",
10 "Programming Language :: Python",
11 "Programming Language :: Python :: 3.7",
12 "Programming Language :: Python :: 3.8",
13 "Programming Language :: Python :: 3.9",
14 "Programming Language :: Python :: 3.10",
15 "Topic :: Software Development :: Libraries :: Python Modules",
16 ]
17
18 README_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "README.md")
19 with open(README_PATH, "r", encoding="utf8") as f:
20 README = f.read()
21
22 setup(
23 name="ariadne",
24 author="Mirumee Software",
25 author_email="[email protected]",
26 description="Ariadne is a Python library for implementing GraphQL servers.",
27 long_description=README,
28 long_description_content_type="text/markdown",
29 license="BSD",
30 version="0.15.0.dev3",
31 url="https://github.com/mirumee/ariadne",
32 packages=["ariadne"],
33 include_package_data=True,
34 install_requires=[
35 "graphql-core>=3.2.0,<3.3",
36 "starlette<0.18",
37 "typing_extensions>=3.6.0",
38 ],
39 extras_require={"asgi-file-uploads": ["python-multipart>=0.0.5"]},
40 classifiers=CLASSIFIERS,
41 platforms=["any"],
42 zip_safe=False,
43 )
44
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -33,7 +33,7 @@
include_package_data=True,
install_requires=[
"graphql-core>=3.2.0,<3.3",
- "starlette<0.18",
+ "starlette<0.19",
"typing_extensions>=3.6.0",
],
extras_require={"asgi-file-uploads": ["python-multipart>=0.0.5"]},
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -33,7 +33,7 @@\n include_package_data=True,\n install_requires=[\n \"graphql-core>=3.2.0,<3.3\",\n- \"starlette<0.18\",\n+ \"starlette<0.19\",\n \"typing_extensions>=3.6.0\",\n ],\n extras_require={\"asgi-file-uploads\": [\"python-multipart>=0.0.5\"]},\n", "issue": "Support Starlette 0.18.0\nWas just released: https://github.com/encode/starlette/releases/tag/0.18.0\r\nand currently the dependency is pinned at `<0.18.0`.\n", "before_files": [{"content": "#! /usr/bin/env python\nimport os\nfrom setuptools import setup\n\nCLASSIFIERS = [\n \"Development Status :: 4 - Beta\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n]\n\nREADME_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\")\nwith open(README_PATH, \"r\", encoding=\"utf8\") as f:\n README = f.read()\n\nsetup(\n name=\"ariadne\",\n author=\"Mirumee Software\",\n author_email=\"[email protected]\",\n description=\"Ariadne is a Python library for implementing GraphQL servers.\",\n long_description=README,\n long_description_content_type=\"text/markdown\",\n license=\"BSD\",\n version=\"0.15.0.dev3\",\n url=\"https://github.com/mirumee/ariadne\",\n packages=[\"ariadne\"],\n include_package_data=True,\n install_requires=[\n \"graphql-core>=3.2.0,<3.3\",\n \"starlette<0.18\",\n \"typing_extensions>=3.6.0\",\n ],\n extras_require={\"asgi-file-uploads\": [\"python-multipart>=0.0.5\"]},\n classifiers=CLASSIFIERS,\n platforms=[\"any\"],\n zip_safe=False,\n)\n", "path": "setup.py"}], "after_files": [{"content": "#! /usr/bin/env python\nimport os\nfrom setuptools import setup\n\nCLASSIFIERS = [\n \"Development Status :: 4 - Beta\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n]\n\nREADME_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"README.md\")\nwith open(README_PATH, \"r\", encoding=\"utf8\") as f:\n README = f.read()\n\nsetup(\n name=\"ariadne\",\n author=\"Mirumee Software\",\n author_email=\"[email protected]\",\n description=\"Ariadne is a Python library for implementing GraphQL servers.\",\n long_description=README,\n long_description_content_type=\"text/markdown\",\n license=\"BSD\",\n version=\"0.15.0.dev3\",\n url=\"https://github.com/mirumee/ariadne\",\n packages=[\"ariadne\"],\n include_package_data=True,\n install_requires=[\n \"graphql-core>=3.2.0,<3.3\",\n \"starlette<0.19\",\n \"typing_extensions>=3.6.0\",\n ],\n extras_require={\"asgi-file-uploads\": [\"python-multipart>=0.0.5\"]},\n classifiers=CLASSIFIERS,\n platforms=[\"any\"],\n zip_safe=False,\n)\n", "path": "setup.py"}]} | 742 | 114 |
gh_patches_debug_2549 | rasdani/github-patches | git_diff | streamlit__streamlit-724 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix Danny's S3 sharing issue
It looks like `[s3] keyPrefix=...` isn't making it into the URLs being fetched from S3.
This is the address of a manifest protobuf we want to fetch:
`https://yelp-people-dev.s3-us-west-2.amazonaws.com/~dqn/st/0.49.0-A8NT/reports/NJphBiGR4twz88mU9wTegn/manifest.pb`
And this is the address that's being generated:
`https://yelp-people-dev.s3.amazonaws.com/~dqn/reports/NJphBiGR4twz88mU9wTegn/manifest.pb`
The generated address is missing the `st/<streamlit version>` bits. Looks like we're splitting on a forward slash on the pathname in `ConnectionManager.fetchManifest`, which is giving us the wrong result because the keyPrefix itself has a forward slash.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/bart_vs_bikes.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # Copyright 2018-2019 Streamlit Inc.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import copy
17 from urllib.parse import urljoin
18 import pandas as pd
19 import streamlit as st
20
21
22 st.title("BART stops vs. bike rentals")
23
24 st.write(
25 """
26 This plot shows two things:
27 * Bay Area Rapit Transit (BART) train lines plotted as arcs connecting the
28 stations.
29 * A 3D hexagonal histogram plot of bike-sharing rentals (origin locations).
30 """
31 )
32
33
34 @st.cache
35 def from_data_file(filename):
36 dirname = "https://raw.githubusercontent.com/streamlit/streamlit/develop/examples/data/"
37 url = urljoin(dirname, filename)
38 return pd.read_json(url)
39
40
41 # Grab some data
42 bart_stop_stats = copy.deepcopy(from_data_file("bart_stop_stats.json"))
43 bart_path_stats = from_data_file("bart_path_stats.json")
44 bike_rental_stats = from_data_file("bike_rental_stats.json")
45
46 # Move bart stop name to the 1st column, so it looks nicer when printed as a
47 # table.
48 bart_stop_names = bart_stop_stats["name"]
49 bart_stop_stats.drop(labels=["name"], axis=1, inplace=True)
50 bart_stop_stats.insert(0, "name", bart_stop_names)
51
52 st.deck_gl_chart(
53 viewport={"latitude": 37.76, "longitude": -122.4, "zoom": 11, "pitch": 50},
54 layers=[
55 {
56 # Plot number of bike rentals throughtout the city
57 "type": "HexagonLayer",
58 "data": bike_rental_stats,
59 "radius": 200,
60 "elevationScale": 4,
61 "elevationRange": [0, 1000],
62 "pickable": True,
63 "extruded": True,
64 },
65 {
66 # Now plot locations of Bart stops
67 # ...and let's size the stops according to traffic
68 "type": "ScatterplotLayer",
69 "data": bart_stop_stats,
70 "radiusScale": 10,
71 "getRadius": 50,
72 },
73 {
74 # Now Add names of Bart stops
75 "type": "TextLayer",
76 "data": bart_stop_stats,
77 "getText": "name",
78 "getColor": [0, 0, 0, 200],
79 "getSize": 15,
80 },
81 {
82 # And draw some arcs connecting the stops
83 "type": "ArcLayer",
84 "data": bart_path_stats,
85 "pickable": True,
86 "autoHighlight": True,
87 "getStrokeWidth": 10,
88 },
89 ],
90 )
91
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/bart_vs_bikes.py b/examples/bart_vs_bikes.py
--- a/examples/bart_vs_bikes.py
+++ b/examples/bart_vs_bikes.py
@@ -33,7 +33,9 @@
@st.cache
def from_data_file(filename):
- dirname = "https://raw.githubusercontent.com/streamlit/streamlit/develop/examples/data/"
+ dirname = (
+ "https://raw.githubusercontent.com/streamlit/streamlit/develop/examples/data/"
+ )
url = urljoin(dirname, filename)
return pd.read_json(url)
| {"golden_diff": "diff --git a/examples/bart_vs_bikes.py b/examples/bart_vs_bikes.py\n--- a/examples/bart_vs_bikes.py\n+++ b/examples/bart_vs_bikes.py\n@@ -33,7 +33,9 @@\n \n @st.cache\n def from_data_file(filename):\n- dirname = \"https://raw.githubusercontent.com/streamlit/streamlit/develop/examples/data/\" \n+ dirname = (\n+ \"https://raw.githubusercontent.com/streamlit/streamlit/develop/examples/data/\"\n+ )\n url = urljoin(dirname, filename)\n return pd.read_json(url)\n", "issue": "Fix Danny's S3 sharing issue\nIt looks like `[s3] keyPrefix=...` isn't making it into the URLs being fetched from S3.\r\n\r\nThis is the address of a manifest protobuf we want to fetch:\r\n`https://yelp-people-dev.s3-us-west-2.amazonaws.com/~dqn/st/0.49.0-A8NT/reports/NJphBiGR4twz88mU9wTegn/manifest.pb`\r\n\r\nAnd this is the address that's being generated:\r\n`https://yelp-people-dev.s3.amazonaws.com/~dqn/reports/NJphBiGR4twz88mU9wTegn/manifest.pb`\r\n\r\nThe generated address is missing the `st/<streamlit version>` bits. Looks like we're splitting on a forward slash on the pathname in `ConnectionManager.fetchManifest`, which is giving us the wrong result because the keyPrefix itself has a forward slash.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright 2018-2019 Streamlit Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport copy\nfrom urllib.parse import urljoin\nimport pandas as pd\nimport streamlit as st\n\n\nst.title(\"BART stops vs. bike rentals\")\n\nst.write(\n \"\"\"\n This plot shows two things:\n * Bay Area Rapit Transit (BART) train lines plotted as arcs connecting the\n stations.\n * A 3D hexagonal histogram plot of bike-sharing rentals (origin locations).\n\"\"\"\n)\n\n\[email protected]\ndef from_data_file(filename):\n dirname = \"https://raw.githubusercontent.com/streamlit/streamlit/develop/examples/data/\" \n url = urljoin(dirname, filename)\n return pd.read_json(url)\n\n\n# Grab some data\nbart_stop_stats = copy.deepcopy(from_data_file(\"bart_stop_stats.json\"))\nbart_path_stats = from_data_file(\"bart_path_stats.json\")\nbike_rental_stats = from_data_file(\"bike_rental_stats.json\")\n\n# Move bart stop name to the 1st column, so it looks nicer when printed as a\n# table.\nbart_stop_names = bart_stop_stats[\"name\"]\nbart_stop_stats.drop(labels=[\"name\"], axis=1, inplace=True)\nbart_stop_stats.insert(0, \"name\", bart_stop_names)\n\nst.deck_gl_chart(\n viewport={\"latitude\": 37.76, \"longitude\": -122.4, \"zoom\": 11, \"pitch\": 50},\n layers=[\n {\n # Plot number of bike rentals throughtout the city\n \"type\": \"HexagonLayer\",\n \"data\": bike_rental_stats,\n \"radius\": 200,\n \"elevationScale\": 4,\n \"elevationRange\": [0, 1000],\n \"pickable\": True,\n \"extruded\": True,\n },\n {\n # Now plot locations of Bart stops\n # ...and let's size the stops according to traffic\n \"type\": \"ScatterplotLayer\",\n \"data\": bart_stop_stats,\n \"radiusScale\": 10,\n \"getRadius\": 50,\n },\n {\n # Now Add names of Bart stops\n \"type\": \"TextLayer\",\n \"data\": bart_stop_stats,\n \"getText\": \"name\",\n \"getColor\": [0, 0, 0, 200],\n \"getSize\": 15,\n },\n {\n # And draw some arcs connecting the stops\n \"type\": \"ArcLayer\",\n \"data\": bart_path_stats,\n \"pickable\": True,\n \"autoHighlight\": True,\n \"getStrokeWidth\": 10,\n },\n ],\n)\n", "path": "examples/bart_vs_bikes.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright 2018-2019 Streamlit Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport copy\nfrom urllib.parse import urljoin\nimport pandas as pd\nimport streamlit as st\n\n\nst.title(\"BART stops vs. bike rentals\")\n\nst.write(\n \"\"\"\n This plot shows two things:\n * Bay Area Rapit Transit (BART) train lines plotted as arcs connecting the\n stations.\n * A 3D hexagonal histogram plot of bike-sharing rentals (origin locations).\n\"\"\"\n)\n\n\[email protected]\ndef from_data_file(filename):\n dirname = (\n \"https://raw.githubusercontent.com/streamlit/streamlit/develop/examples/data/\"\n )\n url = urljoin(dirname, filename)\n return pd.read_json(url)\n\n\n# Grab some data\nbart_stop_stats = copy.deepcopy(from_data_file(\"bart_stop_stats.json\"))\nbart_path_stats = from_data_file(\"bart_path_stats.json\")\nbike_rental_stats = from_data_file(\"bike_rental_stats.json\")\n\n# Move bart stop name to the 1st column, so it looks nicer when printed as a\n# table.\nbart_stop_names = bart_stop_stats[\"name\"]\nbart_stop_stats.drop(labels=[\"name\"], axis=1, inplace=True)\nbart_stop_stats.insert(0, \"name\", bart_stop_names)\n\nst.deck_gl_chart(\n viewport={\"latitude\": 37.76, \"longitude\": -122.4, \"zoom\": 11, \"pitch\": 50},\n layers=[\n {\n # Plot number of bike rentals throughtout the city\n \"type\": \"HexagonLayer\",\n \"data\": bike_rental_stats,\n \"radius\": 200,\n \"elevationScale\": 4,\n \"elevationRange\": [0, 1000],\n \"pickable\": True,\n \"extruded\": True,\n },\n {\n # Now plot locations of Bart stops\n # ...and let's size the stops according to traffic\n \"type\": \"ScatterplotLayer\",\n \"data\": bart_stop_stats,\n \"radiusScale\": 10,\n \"getRadius\": 50,\n },\n {\n # Now Add names of Bart stops\n \"type\": \"TextLayer\",\n \"data\": bart_stop_stats,\n \"getText\": \"name\",\n \"getColor\": [0, 0, 0, 200],\n \"getSize\": 15,\n },\n {\n # And draw some arcs connecting the stops\n \"type\": \"ArcLayer\",\n \"data\": bart_path_stats,\n \"pickable\": True,\n \"autoHighlight\": True,\n \"getStrokeWidth\": 10,\n },\n ],\n)\n", "path": "examples/bart_vs_bikes.py"}]} | 1,337 | 124 |
gh_patches_debug_28660 | rasdani/github-patches | git_diff | mozilla__pontoon-2675 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
No warnings when trying to submit empty translations
I've noticed an increase in the number of empty strings in Firefox, where I have [special checks](https://test.flod.org/checks/).
Apparently, we don't warn anymore when someone tries to submit an empty translation.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pontoon/checks/libraries/pontoon_db.py`
Content:
```
1 import html
2 import re
3
4 import bleach
5
6 from collections import defaultdict
7 from fluent.syntax import FluentParser, ast
8
9 from pontoon.sync.formats.ftl import localizable_entries
10
11
12 MAX_LENGTH_RE = re.compile(r"MAX_LENGTH:( *)(\d+)", re.MULTILINE)
13 parser = FluentParser()
14
15
16 def get_max_length(comment):
17 """
18 Return max length value for an entity with MAX_LENTH.
19 """
20 max_length = re.findall(MAX_LENGTH_RE, comment or "")
21
22 if max_length:
23 return int(max_length[0][1])
24
25 return None
26
27
28 def run_checks(entity, original, string):
29 """
30 Group all checks related to the base UI that get stored in the DB
31 :arg pontoon.base.models.Entity entity: Source entity
32 :arg basestring original: an original string
33 :arg basestring string: a translation
34 """
35 checks = defaultdict(list)
36 resource_ext = entity.resource.format
37
38 if resource_ext == "lang":
39 # Newlines are not allowed in .lang files (bug 1190754)
40 if "\n" in string:
41 checks["pErrors"].append("Newline characters are not allowed")
42
43 # Prevent translations exceeding the given length limit
44 max_length = get_max_length(entity.comment)
45
46 if max_length:
47 string_length = len(
48 html.unescape(bleach.clean(string, strip=True, tags=()))
49 )
50
51 if string_length > max_length:
52 checks["pErrors"].append("Translation too long")
53
54 # Bug 1599056: Original and translation must either both end in a newline,
55 # or none of them should.
56 if resource_ext == "po":
57 if original.endswith("\n") != string.endswith("\n"):
58 checks["pErrors"].append("Ending newline mismatch")
59
60 # Prevent empty translation submissions if not supported
61 if string == "" and not entity.resource.allows_empty_translations:
62 checks["pErrors"].append("Empty translations are not allowed")
63
64 # FTL checks
65 if resource_ext == "ftl" and string != "":
66 translation_ast = parser.parse_entry(string)
67 entity_ast = parser.parse_entry(entity.string)
68
69 # Parse error
70 if isinstance(translation_ast, ast.Junk):
71 checks["pErrors"].append(translation_ast.annotations[0].message)
72
73 # Not a localizable entry
74 elif not isinstance(translation_ast, localizable_entries):
75 checks["pErrors"].append(
76 "Translation needs to be a valid localizable entry"
77 )
78
79 # Message ID mismatch
80 elif entity_ast.id.name != translation_ast.id.name:
81 checks["pErrors"].append("Translation key needs to match source string key")
82
83 return checks
84
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pontoon/checks/libraries/pontoon_db.py b/pontoon/checks/libraries/pontoon_db.py
--- a/pontoon/checks/libraries/pontoon_db.py
+++ b/pontoon/checks/libraries/pontoon_db.py
@@ -5,6 +5,7 @@
from collections import defaultdict
from fluent.syntax import FluentParser, ast
+from fluent.syntax.visitor import Visitor
from pontoon.sync.formats.ftl import localizable_entries
@@ -25,6 +26,24 @@
return None
+class IsEmptyVisitor(Visitor):
+ def __init__(self):
+ self.is_empty = True
+
+ def visit_Placeable(self, node):
+ if isinstance(node.expression, ast.Literal):
+ if node.expression.parse()["value"]:
+ self.is_empty = False
+ elif isinstance(node.expression, ast.SelectExpression):
+ self.generic_visit(node.expression)
+ else:
+ self.is_empty = False
+
+ def visit_TextElement(self, node):
+ if node.value:
+ self.is_empty = False
+
+
def run_checks(entity, original, string):
"""
Group all checks related to the base UI that get stored in the DB
@@ -80,4 +99,12 @@
elif entity_ast.id.name != translation_ast.id.name:
checks["pErrors"].append("Translation key needs to match source string key")
+ # Empty translation entry warning; set here rather than pontoon_non_db.py
+ # to avoid needing to parse the Fluent message twice.
+ else:
+ visitor = IsEmptyVisitor()
+ visitor.visit(translation_ast)
+ if visitor.is_empty:
+ checks["pndbWarnings"].append("Empty translation")
+
return checks
| {"golden_diff": "diff --git a/pontoon/checks/libraries/pontoon_db.py b/pontoon/checks/libraries/pontoon_db.py\n--- a/pontoon/checks/libraries/pontoon_db.py\n+++ b/pontoon/checks/libraries/pontoon_db.py\n@@ -5,6 +5,7 @@\n \n from collections import defaultdict\n from fluent.syntax import FluentParser, ast\n+from fluent.syntax.visitor import Visitor\n \n from pontoon.sync.formats.ftl import localizable_entries\n \n@@ -25,6 +26,24 @@\n return None\n \n \n+class IsEmptyVisitor(Visitor):\n+ def __init__(self):\n+ self.is_empty = True\n+\n+ def visit_Placeable(self, node):\n+ if isinstance(node.expression, ast.Literal):\n+ if node.expression.parse()[\"value\"]:\n+ self.is_empty = False\n+ elif isinstance(node.expression, ast.SelectExpression):\n+ self.generic_visit(node.expression)\n+ else:\n+ self.is_empty = False\n+\n+ def visit_TextElement(self, node):\n+ if node.value:\n+ self.is_empty = False\n+\n+\n def run_checks(entity, original, string):\n \"\"\"\n Group all checks related to the base UI that get stored in the DB\n@@ -80,4 +99,12 @@\n elif entity_ast.id.name != translation_ast.id.name:\n checks[\"pErrors\"].append(\"Translation key needs to match source string key\")\n \n+ # Empty translation entry warning; set here rather than pontoon_non_db.py\n+ # to avoid needing to parse the Fluent message twice.\n+ else:\n+ visitor = IsEmptyVisitor()\n+ visitor.visit(translation_ast)\n+ if visitor.is_empty:\n+ checks[\"pndbWarnings\"].append(\"Empty translation\")\n+\n return checks\n", "issue": "No warnings when trying to submit empty translations\nI've noticed an increase in the number of empty strings in Firefox, where I have [special checks](https://test.flod.org/checks/).\r\n\r\nApparently, we don't warn anymore when someone tries to submit an empty translation.\n", "before_files": [{"content": "import html\nimport re\n\nimport bleach\n\nfrom collections import defaultdict\nfrom fluent.syntax import FluentParser, ast\n\nfrom pontoon.sync.formats.ftl import localizable_entries\n\n\nMAX_LENGTH_RE = re.compile(r\"MAX_LENGTH:( *)(\\d+)\", re.MULTILINE)\nparser = FluentParser()\n\n\ndef get_max_length(comment):\n \"\"\"\n Return max length value for an entity with MAX_LENTH.\n \"\"\"\n max_length = re.findall(MAX_LENGTH_RE, comment or \"\")\n\n if max_length:\n return int(max_length[0][1])\n\n return None\n\n\ndef run_checks(entity, original, string):\n \"\"\"\n Group all checks related to the base UI that get stored in the DB\n :arg pontoon.base.models.Entity entity: Source entity\n :arg basestring original: an original string\n :arg basestring string: a translation\n \"\"\"\n checks = defaultdict(list)\n resource_ext = entity.resource.format\n\n if resource_ext == \"lang\":\n # Newlines are not allowed in .lang files (bug 1190754)\n if \"\\n\" in string:\n checks[\"pErrors\"].append(\"Newline characters are not allowed\")\n\n # Prevent translations exceeding the given length limit\n max_length = get_max_length(entity.comment)\n\n if max_length:\n string_length = len(\n html.unescape(bleach.clean(string, strip=True, tags=()))\n )\n\n if string_length > max_length:\n checks[\"pErrors\"].append(\"Translation too long\")\n\n # Bug 1599056: Original and translation must either both end in a newline,\n # or none of them should.\n if resource_ext == \"po\":\n if original.endswith(\"\\n\") != string.endswith(\"\\n\"):\n checks[\"pErrors\"].append(\"Ending newline mismatch\")\n\n # Prevent empty translation submissions if not supported\n if string == \"\" and not entity.resource.allows_empty_translations:\n checks[\"pErrors\"].append(\"Empty translations are not allowed\")\n\n # FTL checks\n if resource_ext == \"ftl\" and string != \"\":\n translation_ast = parser.parse_entry(string)\n entity_ast = parser.parse_entry(entity.string)\n\n # Parse error\n if isinstance(translation_ast, ast.Junk):\n checks[\"pErrors\"].append(translation_ast.annotations[0].message)\n\n # Not a localizable entry\n elif not isinstance(translation_ast, localizable_entries):\n checks[\"pErrors\"].append(\n \"Translation needs to be a valid localizable entry\"\n )\n\n # Message ID mismatch\n elif entity_ast.id.name != translation_ast.id.name:\n checks[\"pErrors\"].append(\"Translation key needs to match source string key\")\n\n return checks\n", "path": "pontoon/checks/libraries/pontoon_db.py"}], "after_files": [{"content": "import html\nimport re\n\nimport bleach\n\nfrom collections import defaultdict\nfrom fluent.syntax import FluentParser, ast\nfrom fluent.syntax.visitor import Visitor\n\nfrom pontoon.sync.formats.ftl import localizable_entries\n\n\nMAX_LENGTH_RE = re.compile(r\"MAX_LENGTH:( *)(\\d+)\", re.MULTILINE)\nparser = FluentParser()\n\n\ndef get_max_length(comment):\n \"\"\"\n Return max length value for an entity with MAX_LENTH.\n \"\"\"\n max_length = re.findall(MAX_LENGTH_RE, comment or \"\")\n\n if max_length:\n return int(max_length[0][1])\n\n return None\n\n\nclass IsEmptyVisitor(Visitor):\n def __init__(self):\n self.is_empty = True\n\n def visit_Placeable(self, node):\n if isinstance(node.expression, ast.Literal):\n if node.expression.parse()[\"value\"]:\n self.is_empty = False\n elif isinstance(node.expression, ast.SelectExpression):\n self.generic_visit(node.expression)\n else:\n self.is_empty = False\n\n def visit_TextElement(self, node):\n if node.value:\n self.is_empty = False\n\n\ndef run_checks(entity, original, string):\n \"\"\"\n Group all checks related to the base UI that get stored in the DB\n :arg pontoon.base.models.Entity entity: Source entity\n :arg basestring original: an original string\n :arg basestring string: a translation\n \"\"\"\n checks = defaultdict(list)\n resource_ext = entity.resource.format\n\n if resource_ext == \"lang\":\n # Newlines are not allowed in .lang files (bug 1190754)\n if \"\\n\" in string:\n checks[\"pErrors\"].append(\"Newline characters are not allowed\")\n\n # Prevent translations exceeding the given length limit\n max_length = get_max_length(entity.comment)\n\n if max_length:\n string_length = len(\n html.unescape(bleach.clean(string, strip=True, tags=()))\n )\n\n if string_length > max_length:\n checks[\"pErrors\"].append(\"Translation too long\")\n\n # Bug 1599056: Original and translation must either both end in a newline,\n # or none of them should.\n if resource_ext == \"po\":\n if original.endswith(\"\\n\") != string.endswith(\"\\n\"):\n checks[\"pErrors\"].append(\"Ending newline mismatch\")\n\n # Prevent empty translation submissions if not supported\n if string == \"\" and not entity.resource.allows_empty_translations:\n checks[\"pErrors\"].append(\"Empty translations are not allowed\")\n\n # FTL checks\n if resource_ext == \"ftl\" and string != \"\":\n translation_ast = parser.parse_entry(string)\n entity_ast = parser.parse_entry(entity.string)\n\n # Parse error\n if isinstance(translation_ast, ast.Junk):\n checks[\"pErrors\"].append(translation_ast.annotations[0].message)\n\n # Not a localizable entry\n elif not isinstance(translation_ast, localizable_entries):\n checks[\"pErrors\"].append(\n \"Translation needs to be a valid localizable entry\"\n )\n\n # Message ID mismatch\n elif entity_ast.id.name != translation_ast.id.name:\n checks[\"pErrors\"].append(\"Translation key needs to match source string key\")\n\n # Empty translation entry warning; set here rather than pontoon_non_db.py\n # to avoid needing to parse the Fluent message twice.\n else:\n visitor = IsEmptyVisitor()\n visitor.visit(translation_ast)\n if visitor.is_empty:\n checks[\"pndbWarnings\"].append(\"Empty translation\")\n\n return checks\n", "path": "pontoon/checks/libraries/pontoon_db.py"}]} | 1,073 | 394 |
gh_patches_debug_6668 | rasdani/github-patches | git_diff | pyjanitor-devs__pyjanitor-635 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Natsort import error
# Brief Description
The module `natsort` isn't found because it was added (in #627) to dev requirements but needs to be in the main requirements file. It is imported with all functions from the init script.
Rather than requiring it, perhaps it could also be brought in with a `try`, `except` per #97
# Error Messages
```
/usr/local/lib/python3.7/site-packages/janitor/functions.py:25: in <module>
from natsort import index_natsorted, natsorted
E ModuleNotFoundError: No module named 'natsort'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import re
2 from pathlib import Path
3
4 from setuptools import setup
5
6
7 def requirements():
8 with open("requirements.txt", "r+") as f:
9 return f.read()
10
11
12 def generate_long_description() -> str:
13 """
14 Extra chunks from README for PyPI description.
15
16 Target chunks must be contained within `.. pypi-doc` pair comments,
17 so there must be an even number of comments in README.
18
19 :returns: Extracted description from README
20
21 """
22 # Read the contents of README file
23 this_directory = Path(__file__).parent
24 with open(this_directory / "README.rst", encoding="utf-8") as f:
25 readme = f.read()
26
27 # Find pypi-doc comments in README
28 indices = [m.start() for m in re.finditer(".. pypi-doc", readme)]
29 if len(indices) % 2 != 0:
30 raise Exception("Odd number of `.. pypi-doc` comments in README")
31
32 # Loop through pairs of comments and save text between pairs
33 long_description = ""
34 for i in range(0, len(indices), 2):
35 start_index = indices[i] + 11
36 end_index = indices[i + 1]
37 long_description += readme[start_index:end_index]
38 return long_description
39
40
41 extra_spark = ["pyspark"]
42 extra_biology = ["biopython"]
43 extra_chemistry = ["rdkit"]
44 extra_engineering = ["unyt"]
45 extra_all = extra_biology + extra_engineering + extra_spark
46
47 setup(
48 name="pyjanitor",
49 version="0.20.1",
50 description="Tools for cleaning pandas DataFrames",
51 author="Eric J. Ma",
52 author_email="[email protected]",
53 url="https://github.com/ericmjl/pyjanitor",
54 license="MIT",
55 packages=["janitor"],
56 install_requires=requirements(),
57 extras_require={
58 "all": extra_all,
59 "biology": extra_biology,
60 # "chemistry": extra_chemistry, should be inserted once rdkit
61 # fixes https://github.com/rdkit/rdkit/issues/1812
62 "engineering": extra_engineering,
63 "spark": extra_spark,
64 },
65 python_requires=">=3.6",
66 long_description=generate_long_description(),
67 long_description_content_type="text/x-rst",
68 )
69
```
Path: `janitor/__init__.py`
Content:
```
1 try:
2 import janitor.xarray
3 except ImportError:
4 pass
5
6 from .functions import * # noqa: F403, F401
7 from .math import *
8 from .ml import get_features_targets as _get_features_targets
9 from .utils import refactored_function
10
11 # from .dataframe import JanitorDataFrame as DataFrame # noqa: F401
12 # from .dataframe import JanitorSeries as Series # noqa: F401
13
14
15 @refactored_function(
16 "get_features_targets() has moved. Please use ml.get_features_targets()."
17 )
18 def get_features_targets(*args, **kwargs):
19 return _get_features_targets(*args, **kwargs)
20
21
22 __version__ = "0.20.1"
23
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/janitor/__init__.py b/janitor/__init__.py
--- a/janitor/__init__.py
+++ b/janitor/__init__.py
@@ -19,4 +19,4 @@
return _get_features_targets(*args, **kwargs)
-__version__ = "0.20.1"
+__version__ = "0.20.2"
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -46,7 +46,7 @@
setup(
name="pyjanitor",
- version="0.20.1",
+ version="0.20.2",
description="Tools for cleaning pandas DataFrames",
author="Eric J. Ma",
author_email="[email protected]",
| {"golden_diff": "diff --git a/janitor/__init__.py b/janitor/__init__.py\n--- a/janitor/__init__.py\n+++ b/janitor/__init__.py\n@@ -19,4 +19,4 @@\n return _get_features_targets(*args, **kwargs)\n \n \n-__version__ = \"0.20.1\"\n+__version__ = \"0.20.2\"\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -46,7 +46,7 @@\n \n setup(\n name=\"pyjanitor\",\n- version=\"0.20.1\",\n+ version=\"0.20.2\",\n description=\"Tools for cleaning pandas DataFrames\",\n author=\"Eric J. Ma\",\n author_email=\"[email protected]\",\n", "issue": "Natsort import error\n# Brief Description\r\n\r\nThe module `natsort` isn't found because it was added (in #627) to dev requirements but needs to be in the main requirements file. It is imported with all functions from the init script. \r\n\r\nRather than requiring it, perhaps it could also be brought in with a `try`, `except` per #97 \r\n\r\n# Error Messages\r\n\r\n```\r\n /usr/local/lib/python3.7/site-packages/janitor/functions.py:25: in <module>\r\n from natsort import index_natsorted, natsorted\r\n E ModuleNotFoundError: No module named 'natsort'\r\n```\n", "before_files": [{"content": "import re\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef requirements():\n with open(\"requirements.txt\", \"r+\") as f:\n return f.read()\n\n\ndef generate_long_description() -> str:\n \"\"\"\n Extra chunks from README for PyPI description.\n\n Target chunks must be contained within `.. pypi-doc` pair comments,\n so there must be an even number of comments in README.\n\n :returns: Extracted description from README\n\n \"\"\"\n # Read the contents of README file\n this_directory = Path(__file__).parent\n with open(this_directory / \"README.rst\", encoding=\"utf-8\") as f:\n readme = f.read()\n\n # Find pypi-doc comments in README\n indices = [m.start() for m in re.finditer(\".. pypi-doc\", readme)]\n if len(indices) % 2 != 0:\n raise Exception(\"Odd number of `.. pypi-doc` comments in README\")\n\n # Loop through pairs of comments and save text between pairs\n long_description = \"\"\n for i in range(0, len(indices), 2):\n start_index = indices[i] + 11\n end_index = indices[i + 1]\n long_description += readme[start_index:end_index]\n return long_description\n\n\nextra_spark = [\"pyspark\"]\nextra_biology = [\"biopython\"]\nextra_chemistry = [\"rdkit\"]\nextra_engineering = [\"unyt\"]\nextra_all = extra_biology + extra_engineering + extra_spark\n\nsetup(\n name=\"pyjanitor\",\n version=\"0.20.1\",\n description=\"Tools for cleaning pandas DataFrames\",\n author=\"Eric J. Ma\",\n author_email=\"[email protected]\",\n url=\"https://github.com/ericmjl/pyjanitor\",\n license=\"MIT\",\n packages=[\"janitor\"],\n install_requires=requirements(),\n extras_require={\n \"all\": extra_all,\n \"biology\": extra_biology,\n # \"chemistry\": extra_chemistry, should be inserted once rdkit\n # fixes https://github.com/rdkit/rdkit/issues/1812\n \"engineering\": extra_engineering,\n \"spark\": extra_spark,\n },\n python_requires=\">=3.6\",\n long_description=generate_long_description(),\n long_description_content_type=\"text/x-rst\",\n)\n", "path": "setup.py"}, {"content": "try:\n import janitor.xarray\nexcept ImportError:\n pass\n\nfrom .functions import * # noqa: F403, F401\nfrom .math import *\nfrom .ml import get_features_targets as _get_features_targets\nfrom .utils import refactored_function\n\n# from .dataframe import JanitorDataFrame as DataFrame # noqa: F401\n# from .dataframe import JanitorSeries as Series # noqa: F401\n\n\n@refactored_function(\n \"get_features_targets() has moved. Please use ml.get_features_targets().\"\n)\ndef get_features_targets(*args, **kwargs):\n return _get_features_targets(*args, **kwargs)\n\n\n__version__ = \"0.20.1\"\n", "path": "janitor/__init__.py"}], "after_files": [{"content": "import re\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef requirements():\n with open(\"requirements.txt\", \"r+\") as f:\n return f.read()\n\n\ndef generate_long_description() -> str:\n \"\"\"\n Extra chunks from README for PyPI description.\n\n Target chunks must be contained within `.. pypi-doc` pair comments,\n so there must be an even number of comments in README.\n\n :returns: Extracted description from README\n\n \"\"\"\n # Read the contents of README file\n this_directory = Path(__file__).parent\n with open(this_directory / \"README.rst\", encoding=\"utf-8\") as f:\n readme = f.read()\n\n # Find pypi-doc comments in README\n indices = [m.start() for m in re.finditer(\".. pypi-doc\", readme)]\n if len(indices) % 2 != 0:\n raise Exception(\"Odd number of `.. pypi-doc` comments in README\")\n\n # Loop through pairs of comments and save text between pairs\n long_description = \"\"\n for i in range(0, len(indices), 2):\n start_index = indices[i] + 11\n end_index = indices[i + 1]\n long_description += readme[start_index:end_index]\n return long_description\n\n\nextra_spark = [\"pyspark\"]\nextra_biology = [\"biopython\"]\nextra_chemistry = [\"rdkit\"]\nextra_engineering = [\"unyt\"]\nextra_all = extra_biology + extra_engineering + extra_spark\n\nsetup(\n name=\"pyjanitor\",\n version=\"0.20.2\",\n description=\"Tools for cleaning pandas DataFrames\",\n author=\"Eric J. Ma\",\n author_email=\"[email protected]\",\n url=\"https://github.com/ericmjl/pyjanitor\",\n license=\"MIT\",\n packages=[\"janitor\"],\n install_requires=requirements(),\n extras_require={\n \"all\": extra_all,\n \"biology\": extra_biology,\n # \"chemistry\": extra_chemistry, should be inserted once rdkit\n # fixes https://github.com/rdkit/rdkit/issues/1812\n \"engineering\": extra_engineering,\n \"spark\": extra_spark,\n },\n python_requires=\">=3.6\",\n long_description=generate_long_description(),\n long_description_content_type=\"text/x-rst\",\n)\n", "path": "setup.py"}, {"content": "try:\n import janitor.xarray\nexcept ImportError:\n pass\n\nfrom .functions import * # noqa: F403, F401\nfrom .math import *\nfrom .ml import get_features_targets as _get_features_targets\nfrom .utils import refactored_function\n\n# from .dataframe import JanitorDataFrame as DataFrame # noqa: F401\n# from .dataframe import JanitorSeries as Series # noqa: F401\n\n\n@refactored_function(\n \"get_features_targets() has moved. Please use ml.get_features_targets().\"\n)\ndef get_features_targets(*args, **kwargs):\n return _get_features_targets(*args, **kwargs)\n\n\n__version__ = \"0.20.2\"\n", "path": "janitor/__init__.py"}]} | 1,257 | 184 |
gh_patches_debug_23123 | rasdani/github-patches | git_diff | streamlink__streamlink-5762 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
plugins.vidio: 403 Client Error on stream token acquirement
### Checklist
- [X] This is a [plugin issue](https://streamlink.github.io/plugins.html) and not [a different kind of issue](https://github.com/streamlink/streamlink/issues/new/choose)
- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)
- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)
- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)
### Streamlink version
Unable to open URL: https://www.vidio.com/live/204/tokens (403 Client Error: Forbidden for url: https://www.vidio.com/live/204/tokens?type=hls)
### Description
The live stream: https://www.vidio.com/live/204-sctv
the output: https://www.vidio.com/live/204/tokens (403 Client Error: Forbidden for url: https://www.vidio.com/live/204/tokens?type=hls)
It is missing sctv
### Debug log
```text
streamlink https://www.vidio.com/live/204-sctv best
[cli][info] Found matching plugin vidio for URL https://www.vidio.com/live/204-sctv
error: Unable to open URL: https://www.vidio.com/live/204/tokens (403 Client Error: Forbidden for url: https://www.vidio.com/live/204/tokens?type=hls)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/streamlink/plugins/vidio.py`
Content:
```
1 """
2 $description Indonesian & international live TV channels and video on-demand service. OTT service from Vidio.
3 $url vidio.com
4 $type live, vod
5 """
6 import logging
7 import re
8 from urllib.parse import urlsplit, urlunsplit
9
10 from streamlink.plugin import Plugin, pluginmatcher
11 from streamlink.plugin.api import validate
12 from streamlink.stream.dash import DASHStream
13 from streamlink.stream.hls import HLSStream
14
15
16 log = logging.getLogger(__name__)
17
18
19 @pluginmatcher(re.compile(
20 r"https?://(?:www\.)?vidio\.com/",
21 ))
22 class Vidio(Plugin):
23 tokens_url = "https://www.vidio.com/live/{id}/tokens"
24
25 def _get_stream_token(self, stream_id, stream_type):
26 log.debug("Getting stream token")
27 return self.session.http.post(
28 self.tokens_url.format(id=stream_id),
29 params={"type": stream_type},
30 headers={"Referer": self.url},
31 schema=validate.Schema(
32 validate.parse_json(),
33 {"token": str},
34 validate.get("token"),
35 ),
36 )
37
38 def _get_streams(self):
39 stream_id, has_token, hls_url, dash_url = self.session.http.get(
40 self.url,
41 schema=validate.Schema(
42 validate.parse_html(),
43 validate.xml_find(".//*[@data-video-id]"),
44 validate.union((
45 validate.get("data-video-id"),
46 validate.all(
47 validate.get("data-video-has-token"),
48 validate.transform(lambda val: val and val != "false"),
49 ),
50 validate.get("data-vjs-clip-hls-url"),
51 validate.get("data-vjs-clip-dash-url"),
52 )),
53 ),
54 )
55
56 if dash_url and has_token:
57 token = self._get_stream_token(stream_id, "dash")
58 parsed = urlsplit(dash_url)
59 dash_url = urlunsplit(parsed._replace(path=f"{token}{parsed.path}"))
60 return DASHStream.parse_manifest(
61 self.session,
62 dash_url,
63 headers={"Referer": "https://www.vidio.com/"},
64 )
65
66 if not hls_url:
67 return
68
69 if has_token:
70 token = self._get_stream_token(stream_id, "hls")
71 hls_url = f"{hls_url}?{token}"
72
73 return HLSStream.parse_variant_playlist(
74 self.session,
75 hls_url,
76 headers={"Referer": "https://www.vidio.com/"},
77 )
78
79
80 __plugin__ = Vidio
81
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/streamlink/plugins/vidio.py b/src/streamlink/plugins/vidio.py
--- a/src/streamlink/plugins/vidio.py
+++ b/src/streamlink/plugins/vidio.py
@@ -6,6 +6,7 @@
import logging
import re
from urllib.parse import urlsplit, urlunsplit
+from uuid import uuid4
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
@@ -17,7 +18,7 @@
@pluginmatcher(re.compile(
- r"https?://(?:www\.)?vidio\.com/",
+ r"https?://(?:www\.)?vidio\.com/.+",
))
class Vidio(Plugin):
tokens_url = "https://www.vidio.com/live/{id}/tokens"
@@ -28,6 +29,10 @@
self.tokens_url.format(id=stream_id),
params={"type": stream_type},
headers={"Referer": self.url},
+ cookies={
+ "ahoy_visit": str(uuid4()),
+ "ahoy_visitor": str(uuid4()),
+ },
schema=validate.Schema(
validate.parse_json(),
{"token": str},
| {"golden_diff": "diff --git a/src/streamlink/plugins/vidio.py b/src/streamlink/plugins/vidio.py\n--- a/src/streamlink/plugins/vidio.py\n+++ b/src/streamlink/plugins/vidio.py\n@@ -6,6 +6,7 @@\n import logging\n import re\n from urllib.parse import urlsplit, urlunsplit\n+from uuid import uuid4\n \n from streamlink.plugin import Plugin, pluginmatcher\n from streamlink.plugin.api import validate\n@@ -17,7 +18,7 @@\n \n \n @pluginmatcher(re.compile(\n- r\"https?://(?:www\\.)?vidio\\.com/\",\n+ r\"https?://(?:www\\.)?vidio\\.com/.+\",\n ))\n class Vidio(Plugin):\n tokens_url = \"https://www.vidio.com/live/{id}/tokens\"\n@@ -28,6 +29,10 @@\n self.tokens_url.format(id=stream_id),\n params={\"type\": stream_type},\n headers={\"Referer\": self.url},\n+ cookies={\n+ \"ahoy_visit\": str(uuid4()),\n+ \"ahoy_visitor\": str(uuid4()),\n+ },\n schema=validate.Schema(\n validate.parse_json(),\n {\"token\": str},\n", "issue": "plugins.vidio: 403 Client Error on stream token acquirement\n### Checklist\n\n- [X] This is a [plugin issue](https://streamlink.github.io/plugins.html) and not [a different kind of issue](https://github.com/streamlink/streamlink/issues/new/choose)\n- [X] [I have read the contribution guidelines](https://github.com/streamlink/streamlink/blob/master/CONTRIBUTING.md#contributing-to-streamlink)\n- [X] [I have checked the list of open and recently closed plugin issues](https://github.com/streamlink/streamlink/issues?q=is%3Aissue+label%3A%22plugin+issue%22)\n- [X] [I have checked the commit log of the master branch](https://github.com/streamlink/streamlink/commits/master)\n\n### Streamlink version\n\nUnable to open URL: https://www.vidio.com/live/204/tokens (403 Client Error: Forbidden for url: https://www.vidio.com/live/204/tokens?type=hls)\n\n### Description\n\nThe live stream: https://www.vidio.com/live/204-sctv\r\nthe output: https://www.vidio.com/live/204/tokens (403 Client Error: Forbidden for url: https://www.vidio.com/live/204/tokens?type=hls)\r\n\r\nIt is missing sctv\n\n### Debug log\n\n```text\nstreamlink https://www.vidio.com/live/204-sctv best\r\n[cli][info] Found matching plugin vidio for URL https://www.vidio.com/live/204-sctv\r\nerror: Unable to open URL: https://www.vidio.com/live/204/tokens (403 Client Error: Forbidden for url: https://www.vidio.com/live/204/tokens?type=hls)\n```\n\n", "before_files": [{"content": "\"\"\"\n$description Indonesian & international live TV channels and video on-demand service. OTT service from Vidio.\n$url vidio.com\n$type live, vod\n\"\"\"\nimport logging\nimport re\nfrom urllib.parse import urlsplit, urlunsplit\n\nfrom streamlink.plugin import Plugin, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream.dash import DASHStream\nfrom streamlink.stream.hls import HLSStream\n\n\nlog = logging.getLogger(__name__)\n\n\n@pluginmatcher(re.compile(\n r\"https?://(?:www\\.)?vidio\\.com/\",\n))\nclass Vidio(Plugin):\n tokens_url = \"https://www.vidio.com/live/{id}/tokens\"\n\n def _get_stream_token(self, stream_id, stream_type):\n log.debug(\"Getting stream token\")\n return self.session.http.post(\n self.tokens_url.format(id=stream_id),\n params={\"type\": stream_type},\n headers={\"Referer\": self.url},\n schema=validate.Schema(\n validate.parse_json(),\n {\"token\": str},\n validate.get(\"token\"),\n ),\n )\n\n def _get_streams(self):\n stream_id, has_token, hls_url, dash_url = self.session.http.get(\n self.url,\n schema=validate.Schema(\n validate.parse_html(),\n validate.xml_find(\".//*[@data-video-id]\"),\n validate.union((\n validate.get(\"data-video-id\"),\n validate.all(\n validate.get(\"data-video-has-token\"),\n validate.transform(lambda val: val and val != \"false\"),\n ),\n validate.get(\"data-vjs-clip-hls-url\"),\n validate.get(\"data-vjs-clip-dash-url\"),\n )),\n ),\n )\n\n if dash_url and has_token:\n token = self._get_stream_token(stream_id, \"dash\")\n parsed = urlsplit(dash_url)\n dash_url = urlunsplit(parsed._replace(path=f\"{token}{parsed.path}\"))\n return DASHStream.parse_manifest(\n self.session,\n dash_url,\n headers={\"Referer\": \"https://www.vidio.com/\"},\n )\n\n if not hls_url:\n return\n\n if has_token:\n token = self._get_stream_token(stream_id, \"hls\")\n hls_url = f\"{hls_url}?{token}\"\n\n return HLSStream.parse_variant_playlist(\n self.session,\n hls_url,\n headers={\"Referer\": \"https://www.vidio.com/\"},\n )\n\n\n__plugin__ = Vidio\n", "path": "src/streamlink/plugins/vidio.py"}], "after_files": [{"content": "\"\"\"\n$description Indonesian & international live TV channels and video on-demand service. OTT service from Vidio.\n$url vidio.com\n$type live, vod\n\"\"\"\nimport logging\nimport re\nfrom urllib.parse import urlsplit, urlunsplit\nfrom uuid import uuid4\n\nfrom streamlink.plugin import Plugin, pluginmatcher\nfrom streamlink.plugin.api import validate\nfrom streamlink.stream.dash import DASHStream\nfrom streamlink.stream.hls import HLSStream\n\n\nlog = logging.getLogger(__name__)\n\n\n@pluginmatcher(re.compile(\n r\"https?://(?:www\\.)?vidio\\.com/.+\",\n))\nclass Vidio(Plugin):\n tokens_url = \"https://www.vidio.com/live/{id}/tokens\"\n\n def _get_stream_token(self, stream_id, stream_type):\n log.debug(\"Getting stream token\")\n return self.session.http.post(\n self.tokens_url.format(id=stream_id),\n params={\"type\": stream_type},\n headers={\"Referer\": self.url},\n cookies={\n \"ahoy_visit\": str(uuid4()),\n \"ahoy_visitor\": str(uuid4()),\n },\n schema=validate.Schema(\n validate.parse_json(),\n {\"token\": str},\n validate.get(\"token\"),\n ),\n )\n\n def _get_streams(self):\n stream_id, has_token, hls_url, dash_url = self.session.http.get(\n self.url,\n schema=validate.Schema(\n validate.parse_html(),\n validate.xml_find(\".//*[@data-video-id]\"),\n validate.union((\n validate.get(\"data-video-id\"),\n validate.all(\n validate.get(\"data-video-has-token\"),\n validate.transform(lambda val: val and val != \"false\"),\n ),\n validate.get(\"data-vjs-clip-hls-url\"),\n validate.get(\"data-vjs-clip-dash-url\"),\n )),\n ),\n )\n\n if dash_url and has_token:\n token = self._get_stream_token(stream_id, \"dash\")\n parsed = urlsplit(dash_url)\n dash_url = urlunsplit(parsed._replace(path=f\"{token}{parsed.path}\"))\n return DASHStream.parse_manifest(\n self.session,\n dash_url,\n headers={\"Referer\": \"https://www.vidio.com/\"},\n )\n\n if not hls_url:\n return\n\n if has_token:\n token = self._get_stream_token(stream_id, \"hls\")\n hls_url = f\"{hls_url}?{token}\"\n\n return HLSStream.parse_variant_playlist(\n self.session,\n hls_url,\n headers={\"Referer\": \"https://www.vidio.com/\"},\n )\n\n\n__plugin__ = Vidio\n", "path": "src/streamlink/plugins/vidio.py"}]} | 1,360 | 260 |
gh_patches_debug_27102 | rasdani/github-patches | git_diff | fossasia__open-event-server-5627 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
copied events have no organizer
**Describe the bug**
<!-- A clear and concise description of what the bug is. -->
Currently, copied events have no associated event roles and are being showed with no organizers
**To Reproduce**
Steps to reproduce the behavior:
1. Go to any event
2. Click on copy
3. Go to Admin/Events
4. See error

**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
The copied event should also be organized by previous organiser himself
**Additional details (please complete the following information):**
- OS: [e.g. MacOS, Ubuntu, CentOS] Ubuntu
- Python Version [e.g. `3.5`, `3.6`] 3.5
**Additional context**
<!-- Add any other context about the problem here. -->
Working on it
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/api/event_copy.py`
Content:
```
1 from flask import jsonify, Blueprint, abort, make_response
2 from sqlalchemy.orm import make_transient
3
4 from app.api.helpers.db import safe_query
5 from app.api.helpers.files import create_save_resized_image
6 from app.api.helpers.permission_manager import has_access
7 from app.models.custom_form import CustomForms
8 from app.models.discount_code import DiscountCode
9 from app.models.event import Event, get_new_event_identifier
10 from app.models import db
11 from app.models.microlocation import Microlocation
12 from app.models.social_link import SocialLink
13 from app.models.speakers_call import SpeakersCall
14 from app.models.sponsor import Sponsor
15 from app.models.ticket import Ticket
16 from app.models.track import Track
17
18 event_copy = Blueprint('event_copy', __name__, url_prefix='/v1/events')
19
20
21 @event_copy.route('/<identifier>/copy', methods=['POST'])
22 def create_event_copy(identifier):
23 id = 'identifier'
24
25 if identifier.isdigit():
26 id = 'id'
27
28 event = safe_query(db, Event, id, identifier, 'event_' + id)
29
30 if not has_access('is_coorganizer', event_id=event.id):
31 return abort(
32 make_response(jsonify(error="Access Forbidden"), 403)
33 )
34 tickets = Ticket.query.filter_by(event_id=event.id).all()
35 social_links = SocialLink.query.filter_by(event_id=event.id).all()
36 sponsors = Sponsor.query.filter_by(event_id=event.id).all()
37 microlocations = Microlocation.query.filter_by(event_id=event.id).all()
38 tracks = Track.query.filter_by(event_id=event.id).all()
39 custom_forms = CustomForms.query.filter_by(event_id=event.id).all()
40 discount_codes = DiscountCode.query.filter_by(event_id=event.id).all()
41 speaker_calls = SpeakersCall.query.filter_by(event_id=event.id).all()
42
43 db.session.expunge(event) # expunge the object from session
44 make_transient(event)
45 delattr(event, 'id')
46 event.identifier = get_new_event_identifier()
47 db.session.add(event)
48 db.session.commit()
49
50 # Removes access_codes, order_tickets, ticket_tags for the new tickets created.
51 for ticket in tickets:
52 ticket_id = ticket.id
53 db.session.expunge(ticket) # expunge the object from session
54 make_transient(ticket)
55 ticket.event_id = event.id
56 delattr(ticket, 'id')
57 db.session.add(ticket)
58 db.session.commit()
59
60 for link in social_links:
61 link_id = link.id
62 db.session.expunge(link) # expunge the object from session
63 make_transient(link)
64 link.event_id = event.id
65 delattr(link, 'id')
66 db.session.add(link)
67 db.session.commit()
68
69 for sponsor in sponsors:
70 sponsor_id = sponsor.id
71 db.session.expunge(sponsor) # expunge the object from session
72 make_transient(sponsor)
73 sponsor.event_id = event.id
74 logo_url = create_save_resized_image(image_file=sponsor.logo_url, resize=False)
75 delattr(sponsor, 'id')
76 sponsor.logo_url = logo_url
77 db.session.add(sponsor)
78 db.session.commit()
79
80 for location in microlocations:
81 location_id = location.id
82 db.session.expunge(location) # expunge the object from session
83 make_transient(location)
84 location.event_id = event.id
85 delattr(location, 'id')
86 db.session.add(location)
87 db.session.commit()
88
89 # No sessions are copied for new tracks
90 for track in tracks:
91 track_id = track.id
92 db.session.expunge(track) # expunge the object from session
93 make_transient(track)
94 track.event_id = event.id
95 delattr(track, 'id')
96 db.session.add(track)
97 db.session.commit()
98
99 for call in speaker_calls:
100 call_id = call.id
101 db.session.expunge(call) # expunge the object from session
102 make_transient(call)
103 call.event_id = event.id
104 delattr(call, 'id')
105 db.session.add(call)
106 db.session.commit()
107
108 for code in discount_codes:
109 code_id = code.id
110 db.session.expunge(code) # expunge the object from session
111 make_transient(code)
112 code.event_id = event.id
113 delattr(code, 'id')
114 db.session.add(code)
115 db.session.commit()
116
117 for form in custom_forms:
118 form_id = form.id
119 db.session.expunge(form) # expunge the object from session
120 make_transient(form)
121 form.event_id = event.id
122 delattr(form, 'id')
123 db.session.add(form)
124 db.session.commit()
125
126 return jsonify({
127 'id': event.id,
128 'identifier': event.identifier,
129 "copied": True
130 })
131
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/app/api/event_copy.py b/app/api/event_copy.py
--- a/app/api/event_copy.py
+++ b/app/api/event_copy.py
@@ -14,6 +14,7 @@
from app.models.sponsor import Sponsor
from app.models.ticket import Ticket
from app.models.track import Track
+from app.models.users_events_role import UsersEventsRoles
event_copy = Blueprint('event_copy', __name__, url_prefix='/v1/events')
@@ -39,6 +40,7 @@
custom_forms = CustomForms.query.filter_by(event_id=event.id).all()
discount_codes = DiscountCode.query.filter_by(event_id=event.id).all()
speaker_calls = SpeakersCall.query.filter_by(event_id=event.id).all()
+ user_event_roles = UsersEventsRoles.query.filter_by(event_id=event.id).all()
db.session.expunge(event) # expunge the object from session
make_transient(event)
@@ -123,6 +125,14 @@
db.session.add(form)
db.session.commit()
+ for user_role in user_event_roles:
+ db.session.expunge(user_role)
+ make_transient(user_role)
+ user_role.event_id = event.id
+ delattr(user_role, 'id')
+ db.session.add(user_role)
+ db.session.commit()
+
return jsonify({
'id': event.id,
'identifier': event.identifier,
| {"golden_diff": "diff --git a/app/api/event_copy.py b/app/api/event_copy.py\n--- a/app/api/event_copy.py\n+++ b/app/api/event_copy.py\n@@ -14,6 +14,7 @@\n from app.models.sponsor import Sponsor\n from app.models.ticket import Ticket\n from app.models.track import Track\n+from app.models.users_events_role import UsersEventsRoles\n \n event_copy = Blueprint('event_copy', __name__, url_prefix='/v1/events')\n \n@@ -39,6 +40,7 @@\n custom_forms = CustomForms.query.filter_by(event_id=event.id).all()\n discount_codes = DiscountCode.query.filter_by(event_id=event.id).all()\n speaker_calls = SpeakersCall.query.filter_by(event_id=event.id).all()\n+ user_event_roles = UsersEventsRoles.query.filter_by(event_id=event.id).all()\n \n db.session.expunge(event) # expunge the object from session\n make_transient(event)\n@@ -123,6 +125,14 @@\n db.session.add(form)\n db.session.commit()\n \n+ for user_role in user_event_roles:\n+ db.session.expunge(user_role)\n+ make_transient(user_role)\n+ user_role.event_id = event.id\n+ delattr(user_role, 'id')\n+ db.session.add(user_role)\n+ db.session.commit()\n+\n return jsonify({\n 'id': event.id,\n 'identifier': event.identifier,\n", "issue": "copied events have no organizer\n**Describe the bug**\r\n<!-- A clear and concise description of what the bug is. -->\r\nCurrently, copied events have no associated event roles and are being showed with no organizers\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Go to any event\r\n2. Click on copy\r\n3. Go to Admin/Events \r\n4. See error\r\n\r\n\r\n\r\n**Expected behavior**\r\n<!-- A clear and concise description of what you expected to happen. -->\r\nThe copied event should also be organized by previous organiser himself\r\n\r\n**Additional details (please complete the following information):**\r\n - OS: [e.g. MacOS, Ubuntu, CentOS] Ubuntu\r\n - Python Version [e.g. `3.5`, `3.6`] 3.5\r\n\r\n**Additional context**\r\n<!-- Add any other context about the problem here. -->\r\nWorking on it\n", "before_files": [{"content": "from flask import jsonify, Blueprint, abort, make_response\nfrom sqlalchemy.orm import make_transient\n\nfrom app.api.helpers.db import safe_query\nfrom app.api.helpers.files import create_save_resized_image\nfrom app.api.helpers.permission_manager import has_access\nfrom app.models.custom_form import CustomForms\nfrom app.models.discount_code import DiscountCode\nfrom app.models.event import Event, get_new_event_identifier\nfrom app.models import db\nfrom app.models.microlocation import Microlocation\nfrom app.models.social_link import SocialLink\nfrom app.models.speakers_call import SpeakersCall\nfrom app.models.sponsor import Sponsor\nfrom app.models.ticket import Ticket\nfrom app.models.track import Track\n\nevent_copy = Blueprint('event_copy', __name__, url_prefix='/v1/events')\n\n\n@event_copy.route('/<identifier>/copy', methods=['POST'])\ndef create_event_copy(identifier):\n id = 'identifier'\n\n if identifier.isdigit():\n id = 'id'\n\n event = safe_query(db, Event, id, identifier, 'event_' + id)\n\n if not has_access('is_coorganizer', event_id=event.id):\n return abort(\n make_response(jsonify(error=\"Access Forbidden\"), 403)\n )\n tickets = Ticket.query.filter_by(event_id=event.id).all()\n social_links = SocialLink.query.filter_by(event_id=event.id).all()\n sponsors = Sponsor.query.filter_by(event_id=event.id).all()\n microlocations = Microlocation.query.filter_by(event_id=event.id).all()\n tracks = Track.query.filter_by(event_id=event.id).all()\n custom_forms = CustomForms.query.filter_by(event_id=event.id).all()\n discount_codes = DiscountCode.query.filter_by(event_id=event.id).all()\n speaker_calls = SpeakersCall.query.filter_by(event_id=event.id).all()\n\n db.session.expunge(event) # expunge the object from session\n make_transient(event)\n delattr(event, 'id')\n event.identifier = get_new_event_identifier()\n db.session.add(event)\n db.session.commit()\n\n # Removes access_codes, order_tickets, ticket_tags for the new tickets created.\n for ticket in tickets:\n ticket_id = ticket.id\n db.session.expunge(ticket) # expunge the object from session\n make_transient(ticket)\n ticket.event_id = event.id\n delattr(ticket, 'id')\n db.session.add(ticket)\n db.session.commit()\n\n for link in social_links:\n link_id = link.id\n db.session.expunge(link) # expunge the object from session\n make_transient(link)\n link.event_id = event.id\n delattr(link, 'id')\n db.session.add(link)\n db.session.commit()\n\n for sponsor in sponsors:\n sponsor_id = sponsor.id\n db.session.expunge(sponsor) # expunge the object from session\n make_transient(sponsor)\n sponsor.event_id = event.id\n logo_url = create_save_resized_image(image_file=sponsor.logo_url, resize=False)\n delattr(sponsor, 'id')\n sponsor.logo_url = logo_url\n db.session.add(sponsor)\n db.session.commit()\n\n for location in microlocations:\n location_id = location.id\n db.session.expunge(location) # expunge the object from session\n make_transient(location)\n location.event_id = event.id\n delattr(location, 'id')\n db.session.add(location)\n db.session.commit()\n\n # No sessions are copied for new tracks\n for track in tracks:\n track_id = track.id\n db.session.expunge(track) # expunge the object from session\n make_transient(track)\n track.event_id = event.id\n delattr(track, 'id')\n db.session.add(track)\n db.session.commit()\n\n for call in speaker_calls:\n call_id = call.id\n db.session.expunge(call) # expunge the object from session\n make_transient(call)\n call.event_id = event.id\n delattr(call, 'id')\n db.session.add(call)\n db.session.commit()\n\n for code in discount_codes:\n code_id = code.id\n db.session.expunge(code) # expunge the object from session\n make_transient(code)\n code.event_id = event.id\n delattr(code, 'id')\n db.session.add(code)\n db.session.commit()\n\n for form in custom_forms:\n form_id = form.id\n db.session.expunge(form) # expunge the object from session\n make_transient(form)\n form.event_id = event.id\n delattr(form, 'id')\n db.session.add(form)\n db.session.commit()\n\n return jsonify({\n 'id': event.id,\n 'identifier': event.identifier,\n \"copied\": True\n })\n", "path": "app/api/event_copy.py"}], "after_files": [{"content": "from flask import jsonify, Blueprint, abort, make_response\nfrom sqlalchemy.orm import make_transient\n\nfrom app.api.helpers.db import safe_query\nfrom app.api.helpers.files import create_save_resized_image\nfrom app.api.helpers.permission_manager import has_access\nfrom app.models.custom_form import CustomForms\nfrom app.models.discount_code import DiscountCode\nfrom app.models.event import Event, get_new_event_identifier\nfrom app.models import db\nfrom app.models.microlocation import Microlocation\nfrom app.models.social_link import SocialLink\nfrom app.models.speakers_call import SpeakersCall\nfrom app.models.sponsor import Sponsor\nfrom app.models.ticket import Ticket\nfrom app.models.track import Track\nfrom app.models.users_events_role import UsersEventsRoles\n\nevent_copy = Blueprint('event_copy', __name__, url_prefix='/v1/events')\n\n\n@event_copy.route('/<identifier>/copy', methods=['POST'])\ndef create_event_copy(identifier):\n id = 'identifier'\n\n if identifier.isdigit():\n id = 'id'\n\n event = safe_query(db, Event, id, identifier, 'event_' + id)\n\n if not has_access('is_coorganizer', event_id=event.id):\n return abort(\n make_response(jsonify(error=\"Access Forbidden\"), 403)\n )\n tickets = Ticket.query.filter_by(event_id=event.id).all()\n social_links = SocialLink.query.filter_by(event_id=event.id).all()\n sponsors = Sponsor.query.filter_by(event_id=event.id).all()\n microlocations = Microlocation.query.filter_by(event_id=event.id).all()\n tracks = Track.query.filter_by(event_id=event.id).all()\n custom_forms = CustomForms.query.filter_by(event_id=event.id).all()\n discount_codes = DiscountCode.query.filter_by(event_id=event.id).all()\n speaker_calls = SpeakersCall.query.filter_by(event_id=event.id).all()\n user_event_roles = UsersEventsRoles.query.filter_by(event_id=event.id).all()\n\n db.session.expunge(event) # expunge the object from session\n make_transient(event)\n delattr(event, 'id')\n event.identifier = get_new_event_identifier()\n db.session.add(event)\n db.session.commit()\n\n # Removes access_codes, order_tickets, ticket_tags for the new tickets created.\n for ticket in tickets:\n ticket_id = ticket.id\n db.session.expunge(ticket) # expunge the object from session\n make_transient(ticket)\n ticket.event_id = event.id\n delattr(ticket, 'id')\n db.session.add(ticket)\n db.session.commit()\n\n for link in social_links:\n link_id = link.id\n db.session.expunge(link) # expunge the object from session\n make_transient(link)\n link.event_id = event.id\n delattr(link, 'id')\n db.session.add(link)\n db.session.commit()\n\n for sponsor in sponsors:\n sponsor_id = sponsor.id\n db.session.expunge(sponsor) # expunge the object from session\n make_transient(sponsor)\n sponsor.event_id = event.id\n logo_url = create_save_resized_image(image_file=sponsor.logo_url, resize=False)\n delattr(sponsor, 'id')\n sponsor.logo_url = logo_url\n db.session.add(sponsor)\n db.session.commit()\n\n for location in microlocations:\n location_id = location.id\n db.session.expunge(location) # expunge the object from session\n make_transient(location)\n location.event_id = event.id\n delattr(location, 'id')\n db.session.add(location)\n db.session.commit()\n\n # No sessions are copied for new tracks\n for track in tracks:\n track_id = track.id\n db.session.expunge(track) # expunge the object from session\n make_transient(track)\n track.event_id = event.id\n delattr(track, 'id')\n db.session.add(track)\n db.session.commit()\n\n for call in speaker_calls:\n call_id = call.id\n db.session.expunge(call) # expunge the object from session\n make_transient(call)\n call.event_id = event.id\n delattr(call, 'id')\n db.session.add(call)\n db.session.commit()\n\n for code in discount_codes:\n code_id = code.id\n db.session.expunge(code) # expunge the object from session\n make_transient(code)\n code.event_id = event.id\n delattr(code, 'id')\n db.session.add(code)\n db.session.commit()\n\n for form in custom_forms:\n form_id = form.id\n db.session.expunge(form) # expunge the object from session\n make_transient(form)\n form.event_id = event.id\n delattr(form, 'id')\n db.session.add(form)\n db.session.commit()\n\n for user_role in user_event_roles:\n db.session.expunge(user_role)\n make_transient(user_role)\n user_role.event_id = event.id\n delattr(user_role, 'id')\n db.session.add(user_role)\n db.session.commit()\n\n return jsonify({\n 'id': event.id,\n 'identifier': event.identifier,\n \"copied\": True\n })\n", "path": "app/api/event_copy.py"}]} | 1,821 | 304 |
gh_patches_debug_40419 | rasdani/github-patches | git_diff | cowrie__cowrie-1564 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
/etc/shadow file contents are incorrect
**Describe the bug**
In the latest honeypot from master branch, a “cat /etc/shadow“ outputs a readme file instead of password contents. It looks the commit 937402ece56a4d272713ea38be32c6dc4191390a replaced the file contents.
**To Reproduce**
- enter honeypot as root
- Run: cat /etc/shadow
**Expected behavior**
Exoecting a shadow file format output, not readme
**Server (please complete the following information):**
- Using the docker image built on 14 may 2021
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 from setuptools import setup
4
5 setup(
6 name="Cowrie",
7 description="Cowrie SSH/Telnet Honeypot.",
8 long_description="Cowrie SSH/Telnet Honeypot.",
9 author="Michel Oosterhof",
10 author_email="[email protected]",
11 maintainer="Michel Oosterhof",
12 maintainer_email="[email protected]",
13 keywords="ssh telnet honeypot",
14 platforms="Unix, Mac OSX",
15 license="BSD",
16 url="https://www.cowrie.org/",
17 packages=["cowrie", "twisted"],
18 include_package_data=True,
19 package_dir={"": "src"},
20 package_data={"": ["*.md"]},
21 use_incremental=True,
22 python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4",
23 scripts=["bin/fsctl", "bin/asciinema", "bin/cowrie", "bin/createfs", "bin/playlog"],
24 classifiers=[
25 "Development Status :: 5 - Production/Stable",
26 "Environment :: No Input/Output (Daemon)",
27 "Framework :: Twisted",
28 "Intended Audience :: Developers",
29 "Intended Audience :: System Administrators",
30 "License :: OSI Approved :: BSD License",
31 "Operating System :: MacOS :: MacOS X",
32 "Operating System :: POSIX :: Linux",
33 "Operating System :: POSIX",
34 "Programming Language :: Python",
35 "Topic :: Security",
36 ],
37 setup_requires=["incremental", "click"],
38 install_requires=[
39 "twisted>=17.1.0",
40 "cryptography>=0.9.1",
41 "configparser",
42 "pyopenssl",
43 "pyparsing",
44 "incremental",
45 "packaging",
46 "appdirs>=1.4.0",
47 "python-dateutil",
48 "service_identity>=14.0.0",
49 ],
50 extras_require={
51 "csirtg": ["csirtgsdk>=0.0.0a17"],
52 "dshield": ["requests"],
53 "elasticsearch": ["pyes"],
54 "mysql": ["mysqlclient"],
55 "mongodb": ["pymongo"],
56 "rethinkdblog": ["rethinkdb"],
57 "s3": ["botocore"],
58 "slack": ["slackclient"],
59 "influxdb": ["influxdb"],
60 },
61 )
62
```
Path: `src/cowrie/output/csirtg.py`
Content:
```
1 import os
2 from datetime import datetime
3
4 from csirtgsdk.client import Client
5 from csirtgsdk.indicator import Indicator
6
7 from twisted.python import log
8
9 import cowrie.core.output
10 from cowrie.core.config import CowrieConfig
11
12 USERNAME = os.environ.get("CSIRTG_USER")
13 FEED = os.environ.get("CSIRTG_FEED")
14 TOKEN = os.environ.get("CSIRG_TOKEN")
15 DESCRIPTION = os.environ.get("CSIRTG_DESCRIPTION", "random scanning activity")
16
17
18 class Output(cowrie.core.output.Output):
19 """
20 csirtg output
21 """
22
23 def start(
24 self,
25 ):
26 self.user = CowrieConfig.get("output_csirtg", "username") or USERNAME
27 self.feed = CowrieConfig.get("output_csirtg", "feed") or FEED
28 self.token = CowrieConfig.get("output_csirtg", "token") or TOKEN
29 self.description = CowrieConfig.get(
30 "output_csirtg", "description", fallback=DESCRIPTION
31 )
32 self.context = {}
33 self.client = Client(token=self.token)
34
35 def stop(self):
36 pass
37
38 def write(self, e):
39 peerIP = e["src_ip"]
40 ts = e["timestamp"]
41 system = e.get("system", None)
42
43 if system not in [
44 "cowrie.ssh.factory.CowrieSSHFactory",
45 "cowrie.telnet.transport.HoneyPotTelnetFactory",
46 ]:
47 return
48
49 today = str(datetime.now().date())
50
51 if not self.context.get(today):
52 self.context = {}
53 self.context[today] = set()
54
55 key = ",".join([peerIP, system])
56
57 if key in self.context[today]:
58 return
59
60 self.context[today].add(key)
61
62 tags = "scanner,ssh"
63 port = 22
64 if e["system"] == "cowrie.telnet.transport.HoneyPotTelnetFactory":
65 tags = "scanner,telnet"
66 port = 23
67
68 i = {
69 "user": self.user,
70 "feed": self.feed,
71 "indicator": peerIP,
72 "portlist": port,
73 "protocol": "tcp",
74 "tags": tags,
75 "firsttime": ts,
76 "lasttime": ts,
77 "description": self.description,
78 }
79
80 ret = Indicator(self.client, i).submit()
81 log.msg("logged to csirtg {} ".format(ret["location"]))
82
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -36,7 +36,7 @@
],
setup_requires=["incremental", "click"],
install_requires=[
- "twisted>=17.1.0",
+ "twisted==21.1.0",
"cryptography>=0.9.1",
"configparser",
"pyopenssl",
@@ -48,7 +48,7 @@
"service_identity>=14.0.0",
],
extras_require={
- "csirtg": ["csirtgsdk>=0.0.0a17"],
+ "csirtg": ["csirtgsdk==1.1.5"],
"dshield": ["requests"],
"elasticsearch": ["pyes"],
"mysql": ["mysqlclient"],
diff --git a/src/cowrie/output/csirtg.py b/src/cowrie/output/csirtg.py
--- a/src/cowrie/output/csirtg.py
+++ b/src/cowrie/output/csirtg.py
@@ -1,41 +1,50 @@
import os
from datetime import datetime
-from csirtgsdk.client import Client
-from csirtgsdk.indicator import Indicator
-
from twisted.python import log
import cowrie.core.output
from cowrie.core.config import CowrieConfig
-USERNAME = os.environ.get("CSIRTG_USER")
-FEED = os.environ.get("CSIRTG_FEED")
-TOKEN = os.environ.get("CSIRG_TOKEN")
-DESCRIPTION = os.environ.get("CSIRTG_DESCRIPTION", "random scanning activity")
+token = CowrieConfig.get("output_csirtg", "token", fallback="a1b2c3d4")
+if token == "a1b2c3d4":
+ log.msg("output_csirtg: token not found in configuration file")
+ exit(1)
+
+os.environ["CSIRTG_TOKEN"] = token
+import csirtgsdk # noqa: E402
class Output(cowrie.core.output.Output):
"""
- csirtg output
+ CSIRTG output
"""
- def start(
- self,
- ):
- self.user = CowrieConfig.get("output_csirtg", "username") or USERNAME
- self.feed = CowrieConfig.get("output_csirtg", "feed") or FEED
- self.token = CowrieConfig.get("output_csirtg", "token") or TOKEN
- self.description = CowrieConfig.get(
- "output_csirtg", "description", fallback=DESCRIPTION
- )
+ def start(self):
+ """
+ Start the output module.
+ Note that csirtsdk is imported here because it reads CSIRTG_TOKEN on import
+ Cowrie sets this environment variable.
+ """
+ self.user = CowrieConfig.get("output_csirtg", "username")
+ self.feed = CowrieConfig.get("output_csirtg", "feed")
+ self.debug = CowrieConfig.getboolean("output_csirtg", "debug", fallback=False)
+ self.description = CowrieConfig.get("output_csirtg", "description")
+
self.context = {}
- self.client = Client(token=self.token)
+ # self.client = csirtgsdk.client.Client()
def stop(self):
pass
def write(self, e):
+ """
+ Only pass on connection events
+ """
+ if e["eventid"] == "cowrie.session.connect":
+ self.submitIp(e)
+
+ def submitIp(self, e):
peerIP = e["src_ip"]
ts = e["timestamp"]
system = e.get("system", None)
@@ -77,5 +86,12 @@
"description": self.description,
}
- ret = Indicator(self.client, i).submit()
- log.msg("logged to csirtg {} ".format(ret["location"]))
+ if self.debug is True:
+ log.msg(f"output_csirtg: Submitting {i!r} to CSIRTG")
+
+ ind = csirtgsdk.indicator.Indicator(i).submit()
+
+ if self.debug is True:
+ log.msg(f"output_csirtg: Submitted {ind!r} to CSIRTG")
+
+ log.msg("output_csirtg: submitted to csirtg at {} ".format(ind["location"]))
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -36,7 +36,7 @@\n ],\n setup_requires=[\"incremental\", \"click\"],\n install_requires=[\n- \"twisted>=17.1.0\",\n+ \"twisted==21.1.0\",\n \"cryptography>=0.9.1\",\n \"configparser\",\n \"pyopenssl\",\n@@ -48,7 +48,7 @@\n \"service_identity>=14.0.0\",\n ],\n extras_require={\n- \"csirtg\": [\"csirtgsdk>=0.0.0a17\"],\n+ \"csirtg\": [\"csirtgsdk==1.1.5\"],\n \"dshield\": [\"requests\"],\n \"elasticsearch\": [\"pyes\"],\n \"mysql\": [\"mysqlclient\"],\ndiff --git a/src/cowrie/output/csirtg.py b/src/cowrie/output/csirtg.py\n--- a/src/cowrie/output/csirtg.py\n+++ b/src/cowrie/output/csirtg.py\n@@ -1,41 +1,50 @@\n import os\n from datetime import datetime\n \n-from csirtgsdk.client import Client\n-from csirtgsdk.indicator import Indicator\n-\n from twisted.python import log\n \n import cowrie.core.output\n from cowrie.core.config import CowrieConfig\n \n-USERNAME = os.environ.get(\"CSIRTG_USER\")\n-FEED = os.environ.get(\"CSIRTG_FEED\")\n-TOKEN = os.environ.get(\"CSIRG_TOKEN\")\n-DESCRIPTION = os.environ.get(\"CSIRTG_DESCRIPTION\", \"random scanning activity\")\n+token = CowrieConfig.get(\"output_csirtg\", \"token\", fallback=\"a1b2c3d4\")\n+if token == \"a1b2c3d4\":\n+ log.msg(\"output_csirtg: token not found in configuration file\")\n+ exit(1)\n+\n+os.environ[\"CSIRTG_TOKEN\"] = token\n+import csirtgsdk # noqa: E402\n \n \n class Output(cowrie.core.output.Output):\n \"\"\"\n- csirtg output\n+ CSIRTG output\n \"\"\"\n \n- def start(\n- self,\n- ):\n- self.user = CowrieConfig.get(\"output_csirtg\", \"username\") or USERNAME\n- self.feed = CowrieConfig.get(\"output_csirtg\", \"feed\") or FEED\n- self.token = CowrieConfig.get(\"output_csirtg\", \"token\") or TOKEN\n- self.description = CowrieConfig.get(\n- \"output_csirtg\", \"description\", fallback=DESCRIPTION\n- )\n+ def start(self):\n+ \"\"\"\n+ Start the output module.\n+ Note that csirtsdk is imported here because it reads CSIRTG_TOKEN on import\n+ Cowrie sets this environment variable.\n+ \"\"\"\n+ self.user = CowrieConfig.get(\"output_csirtg\", \"username\")\n+ self.feed = CowrieConfig.get(\"output_csirtg\", \"feed\")\n+ self.debug = CowrieConfig.getboolean(\"output_csirtg\", \"debug\", fallback=False)\n+ self.description = CowrieConfig.get(\"output_csirtg\", \"description\")\n+\n self.context = {}\n- self.client = Client(token=self.token)\n+ # self.client = csirtgsdk.client.Client()\n \n def stop(self):\n pass\n \n def write(self, e):\n+ \"\"\"\n+ Only pass on connection events\n+ \"\"\"\n+ if e[\"eventid\"] == \"cowrie.session.connect\":\n+ self.submitIp(e)\n+\n+ def submitIp(self, e):\n peerIP = e[\"src_ip\"]\n ts = e[\"timestamp\"]\n system = e.get(\"system\", None)\n@@ -77,5 +86,12 @@\n \"description\": self.description,\n }\n \n- ret = Indicator(self.client, i).submit()\n- log.msg(\"logged to csirtg {} \".format(ret[\"location\"]))\n+ if self.debug is True:\n+ log.msg(f\"output_csirtg: Submitting {i!r} to CSIRTG\")\n+\n+ ind = csirtgsdk.indicator.Indicator(i).submit()\n+\n+ if self.debug is True:\n+ log.msg(f\"output_csirtg: Submitted {ind!r} to CSIRTG\")\n+\n+ log.msg(\"output_csirtg: submitted to csirtg at {} \".format(ind[\"location\"]))\n", "issue": "/etc/shadow file contents are incorrect\n**Describe the bug**\r\nIn the latest honeypot from master branch, a \u201ccat /etc/shadow\u201c outputs a readme file instead of password contents. It looks the commit 937402ece56a4d272713ea38be32c6dc4191390a replaced the file contents.\r\n\r\n**To Reproduce**\r\n- enter honeypot as root\r\n- Run: cat /etc/shadow\r\n\r\n**Expected behavior**\r\nExoecting a shadow file format output, not readme\r\n\r\n**Server (please complete the following information):**\r\n - Using the docker image built on 14 may 2021\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\nfrom setuptools import setup\n\nsetup(\n name=\"Cowrie\",\n description=\"Cowrie SSH/Telnet Honeypot.\",\n long_description=\"Cowrie SSH/Telnet Honeypot.\",\n author=\"Michel Oosterhof\",\n author_email=\"[email protected]\",\n maintainer=\"Michel Oosterhof\",\n maintainer_email=\"[email protected]\",\n keywords=\"ssh telnet honeypot\",\n platforms=\"Unix, Mac OSX\",\n license=\"BSD\",\n url=\"https://www.cowrie.org/\",\n packages=[\"cowrie\", \"twisted\"],\n include_package_data=True,\n package_dir={\"\": \"src\"},\n package_data={\"\": [\"*.md\"]},\n use_incremental=True,\n python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4\",\n scripts=[\"bin/fsctl\", \"bin/asciinema\", \"bin/cowrie\", \"bin/createfs\", \"bin/playlog\"],\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: No Input/Output (Daemon)\",\n \"Framework :: Twisted\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: System Administrators\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: POSIX\",\n \"Programming Language :: Python\",\n \"Topic :: Security\",\n ],\n setup_requires=[\"incremental\", \"click\"],\n install_requires=[\n \"twisted>=17.1.0\",\n \"cryptography>=0.9.1\",\n \"configparser\",\n \"pyopenssl\",\n \"pyparsing\",\n \"incremental\",\n \"packaging\",\n \"appdirs>=1.4.0\",\n \"python-dateutil\",\n \"service_identity>=14.0.0\",\n ],\n extras_require={\n \"csirtg\": [\"csirtgsdk>=0.0.0a17\"],\n \"dshield\": [\"requests\"],\n \"elasticsearch\": [\"pyes\"],\n \"mysql\": [\"mysqlclient\"],\n \"mongodb\": [\"pymongo\"],\n \"rethinkdblog\": [\"rethinkdb\"],\n \"s3\": [\"botocore\"],\n \"slack\": [\"slackclient\"],\n \"influxdb\": [\"influxdb\"],\n },\n)\n", "path": "setup.py"}, {"content": "import os\nfrom datetime import datetime\n\nfrom csirtgsdk.client import Client\nfrom csirtgsdk.indicator import Indicator\n\nfrom twisted.python import log\n\nimport cowrie.core.output\nfrom cowrie.core.config import CowrieConfig\n\nUSERNAME = os.environ.get(\"CSIRTG_USER\")\nFEED = os.environ.get(\"CSIRTG_FEED\")\nTOKEN = os.environ.get(\"CSIRG_TOKEN\")\nDESCRIPTION = os.environ.get(\"CSIRTG_DESCRIPTION\", \"random scanning activity\")\n\n\nclass Output(cowrie.core.output.Output):\n \"\"\"\n csirtg output\n \"\"\"\n\n def start(\n self,\n ):\n self.user = CowrieConfig.get(\"output_csirtg\", \"username\") or USERNAME\n self.feed = CowrieConfig.get(\"output_csirtg\", \"feed\") or FEED\n self.token = CowrieConfig.get(\"output_csirtg\", \"token\") or TOKEN\n self.description = CowrieConfig.get(\n \"output_csirtg\", \"description\", fallback=DESCRIPTION\n )\n self.context = {}\n self.client = Client(token=self.token)\n\n def stop(self):\n pass\n\n def write(self, e):\n peerIP = e[\"src_ip\"]\n ts = e[\"timestamp\"]\n system = e.get(\"system\", None)\n\n if system not in [\n \"cowrie.ssh.factory.CowrieSSHFactory\",\n \"cowrie.telnet.transport.HoneyPotTelnetFactory\",\n ]:\n return\n\n today = str(datetime.now().date())\n\n if not self.context.get(today):\n self.context = {}\n self.context[today] = set()\n\n key = \",\".join([peerIP, system])\n\n if key in self.context[today]:\n return\n\n self.context[today].add(key)\n\n tags = \"scanner,ssh\"\n port = 22\n if e[\"system\"] == \"cowrie.telnet.transport.HoneyPotTelnetFactory\":\n tags = \"scanner,telnet\"\n port = 23\n\n i = {\n \"user\": self.user,\n \"feed\": self.feed,\n \"indicator\": peerIP,\n \"portlist\": port,\n \"protocol\": \"tcp\",\n \"tags\": tags,\n \"firsttime\": ts,\n \"lasttime\": ts,\n \"description\": self.description,\n }\n\n ret = Indicator(self.client, i).submit()\n log.msg(\"logged to csirtg {} \".format(ret[\"location\"]))\n", "path": "src/cowrie/output/csirtg.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\nfrom setuptools import setup\n\nsetup(\n name=\"Cowrie\",\n description=\"Cowrie SSH/Telnet Honeypot.\",\n long_description=\"Cowrie SSH/Telnet Honeypot.\",\n author=\"Michel Oosterhof\",\n author_email=\"[email protected]\",\n maintainer=\"Michel Oosterhof\",\n maintainer_email=\"[email protected]\",\n keywords=\"ssh telnet honeypot\",\n platforms=\"Unix, Mac OSX\",\n license=\"BSD\",\n url=\"https://www.cowrie.org/\",\n packages=[\"cowrie\", \"twisted\"],\n include_package_data=True,\n package_dir={\"\": \"src\"},\n package_data={\"\": [\"*.md\"]},\n use_incremental=True,\n python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4\",\n scripts=[\"bin/fsctl\", \"bin/asciinema\", \"bin/cowrie\", \"bin/createfs\", \"bin/playlog\"],\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: No Input/Output (Daemon)\",\n \"Framework :: Twisted\",\n \"Intended Audience :: Developers\",\n \"Intended Audience :: System Administrators\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: POSIX\",\n \"Programming Language :: Python\",\n \"Topic :: Security\",\n ],\n setup_requires=[\"incremental\", \"click\"],\n install_requires=[\n \"twisted==21.1.0\",\n \"cryptography>=0.9.1\",\n \"configparser\",\n \"pyopenssl\",\n \"pyparsing\",\n \"incremental\",\n \"packaging\",\n \"appdirs>=1.4.0\",\n \"python-dateutil\",\n \"service_identity>=14.0.0\",\n ],\n extras_require={\n \"csirtg\": [\"csirtgsdk==1.1.5\"],\n \"dshield\": [\"requests\"],\n \"elasticsearch\": [\"pyes\"],\n \"mysql\": [\"mysqlclient\"],\n \"mongodb\": [\"pymongo\"],\n \"rethinkdblog\": [\"rethinkdb\"],\n \"s3\": [\"botocore\"],\n \"slack\": [\"slackclient\"],\n \"influxdb\": [\"influxdb\"],\n },\n)\n", "path": "setup.py"}, {"content": "import os\nfrom datetime import datetime\n\nfrom twisted.python import log\n\nimport cowrie.core.output\nfrom cowrie.core.config import CowrieConfig\n\ntoken = CowrieConfig.get(\"output_csirtg\", \"token\", fallback=\"a1b2c3d4\")\nif token == \"a1b2c3d4\":\n log.msg(\"output_csirtg: token not found in configuration file\")\n exit(1)\n\nos.environ[\"CSIRTG_TOKEN\"] = token\nimport csirtgsdk # noqa: E402\n\n\nclass Output(cowrie.core.output.Output):\n \"\"\"\n CSIRTG output\n \"\"\"\n\n def start(self):\n \"\"\"\n Start the output module.\n Note that csirtsdk is imported here because it reads CSIRTG_TOKEN on import\n Cowrie sets this environment variable.\n \"\"\"\n self.user = CowrieConfig.get(\"output_csirtg\", \"username\")\n self.feed = CowrieConfig.get(\"output_csirtg\", \"feed\")\n self.debug = CowrieConfig.getboolean(\"output_csirtg\", \"debug\", fallback=False)\n self.description = CowrieConfig.get(\"output_csirtg\", \"description\")\n\n self.context = {}\n # self.client = csirtgsdk.client.Client()\n\n def stop(self):\n pass\n\n def write(self, e):\n \"\"\"\n Only pass on connection events\n \"\"\"\n if e[\"eventid\"] == \"cowrie.session.connect\":\n self.submitIp(e)\n\n def submitIp(self, e):\n peerIP = e[\"src_ip\"]\n ts = e[\"timestamp\"]\n system = e.get(\"system\", None)\n\n if system not in [\n \"cowrie.ssh.factory.CowrieSSHFactory\",\n \"cowrie.telnet.transport.HoneyPotTelnetFactory\",\n ]:\n return\n\n today = str(datetime.now().date())\n\n if not self.context.get(today):\n self.context = {}\n self.context[today] = set()\n\n key = \",\".join([peerIP, system])\n\n if key in self.context[today]:\n return\n\n self.context[today].add(key)\n\n tags = \"scanner,ssh\"\n port = 22\n if e[\"system\"] == \"cowrie.telnet.transport.HoneyPotTelnetFactory\":\n tags = \"scanner,telnet\"\n port = 23\n\n i = {\n \"user\": self.user,\n \"feed\": self.feed,\n \"indicator\": peerIP,\n \"portlist\": port,\n \"protocol\": \"tcp\",\n \"tags\": tags,\n \"firsttime\": ts,\n \"lasttime\": ts,\n \"description\": self.description,\n }\n\n if self.debug is True:\n log.msg(f\"output_csirtg: Submitting {i!r} to CSIRTG\")\n\n ind = csirtgsdk.indicator.Indicator(i).submit()\n\n if self.debug is True:\n log.msg(f\"output_csirtg: Submitted {ind!r} to CSIRTG\")\n\n log.msg(\"output_csirtg: submitted to csirtg at {} \".format(ind[\"location\"]))\n", "path": "src/cowrie/output/csirtg.py"}]} | 1,773 | 976 |
gh_patches_debug_50124 | rasdani/github-patches | git_diff | scrapy__scrapy-2649 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
After adding request flags subclasses of logformatter that rely on 'flags' format string are broken
#2082 added flags to request but it also renamed formatting string key from flags to response_flags/request_flags
```
CRAWLEDMSG = u"Crawled (%(status)s) %(request)s (referer: %(referer)s)%(flags)s"
+CRAWLEDMSG = u"Crawled (%(status)s) %(request)s%(request_flags)s (referer: %(referer)s)%(response_flags)s"
```
Scrapy allows you to override logformatter and this is what I have in my project. I have logformatter looking rouhgly like this
```python
# dirbot/logf.py
from scrapy.logformatter import LogFormatter
class CustomLogFormatter(LogFormatter):
def crawled(self, request, response, spider):
kwargs = super(CustomLogFormatter, self).crawled(
request, response, spider)
kwargs['msg'] = (
u"Crawled (%(status)s) %(request)s "
u"(referer: %(referer)s, latency: %(latency).2f s)%(flags)s"
)
kwargs['args']['latency'] = response.meta.get('download_latency', 0)
return kwargs
```
now if you enable it in settings `LOG_FORMATTER = 'dirbot.logf.CustomLogFormatter'
` and try to run it with recent master you'll get KeyError
```
2017-03-13 14:15:26 [scrapy.extensions.telnet] DEBUG: Telnet console listening on 127.0.0.1:6023
Traceback (most recent call last):
File "/usr/lib/python2.7/logging/__init__.py", line 851, in emit
msg = self.format(record)
File "/usr/lib/python2.7/logging/__init__.py", line 724, in format
return fmt.format(record)
File "/usr/lib/python2.7/logging/__init__.py", line 464, in format
record.message = record.getMessage()
File "/usr/lib/python2.7/logging/__init__.py", line 328, in getMessage
msg = msg % self.args
KeyError: u'flags'
Logged from file engine.py, line 238
Traceback (most recent call last):
File "/usr/lib/python2.7/logging/__init__.py", line 851, in emit
msg = self.format(record)
File "/usr/lib/python2.7/logging/__init__.py", line 724, in format
return fmt.format(record)
File "/usr/lib/python2.7/logging/__init__.py", line 464, in format
record.message = record.getMessage()
File "/usr/lib/python2.7/logging/__init__.py", line 328, in getMessage
msg = msg % self.args
KeyError: u'flags'
Logged from file engine.py, line 238
2017-03-13 14:15:27 [scrapy.core.scraper] DEBUG: Scraped from <200 http://www.dmoz.org/Computers/Programming/Languages/Python/Resources/>
```
So this change that renamed `flags` to `response_flags/request_flags` seems backward incompatible.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scrapy/logformatter.py`
Content:
```
1 import os
2 import logging
3
4 from twisted.python.failure import Failure
5
6 from scrapy.utils.request import referer_str
7
8 SCRAPEDMSG = u"Scraped from %(src)s" + os.linesep + "%(item)s"
9 DROPPEDMSG = u"Dropped: %(exception)s" + os.linesep + "%(item)s"
10 CRAWLEDMSG = u"Crawled (%(status)s) %(request)s%(request_flags)s (referer: %(referer)s)%(response_flags)s"
11
12
13 class LogFormatter(object):
14 """Class for generating log messages for different actions.
15
16 All methods must return a dictionary listing the parameters `level`, `msg`
17 and `args` which are going to be used for constructing the log message when
18 calling logging.log.
19
20 Dictionary keys for the method outputs:
21 * `level` should be the log level for that action, you can use those
22 from the python logging library: logging.DEBUG, logging.INFO,
23 logging.WARNING, logging.ERROR and logging.CRITICAL.
24
25 * `msg` should be a string that can contain different formatting
26 placeholders. This string, formatted with the provided `args`, is going
27 to be the log message for that action.
28
29 * `args` should be a tuple or dict with the formatting placeholders for
30 `msg`. The final log message is computed as output['msg'] %
31 output['args'].
32 """
33
34 def crawled(self, request, response, spider):
35 request_flags = ' %s' % str(request.flags) if request.flags else ''
36 response_flags = ' %s' % str(response.flags) if response.flags else ''
37 return {
38 'level': logging.DEBUG,
39 'msg': CRAWLEDMSG,
40 'args': {
41 'status': response.status,
42 'request': request,
43 'request_flags' : request_flags,
44 'referer': referer_str(request),
45 'response_flags': response_flags,
46 }
47 }
48
49 def scraped(self, item, response, spider):
50 if isinstance(response, Failure):
51 src = response.getErrorMessage()
52 else:
53 src = response
54 return {
55 'level': logging.DEBUG,
56 'msg': SCRAPEDMSG,
57 'args': {
58 'src': src,
59 'item': item,
60 }
61 }
62
63 def dropped(self, item, exception, response, spider):
64 return {
65 'level': logging.WARNING,
66 'msg': DROPPEDMSG,
67 'args': {
68 'exception': exception,
69 'item': item,
70 }
71 }
72
73 @classmethod
74 def from_crawler(cls, crawler):
75 return cls()
76
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scrapy/logformatter.py b/scrapy/logformatter.py
--- a/scrapy/logformatter.py
+++ b/scrapy/logformatter.py
@@ -43,6 +43,8 @@
'request_flags' : request_flags,
'referer': referer_str(request),
'response_flags': response_flags,
+ # backward compatibility with Scrapy logformatter below 1.4 version
+ 'flags': response_flags
}
}
| {"golden_diff": "diff --git a/scrapy/logformatter.py b/scrapy/logformatter.py\n--- a/scrapy/logformatter.py\n+++ b/scrapy/logformatter.py\n@@ -43,6 +43,8 @@\n 'request_flags' : request_flags,\n 'referer': referer_str(request),\n 'response_flags': response_flags,\n+ # backward compatibility with Scrapy logformatter below 1.4 version\n+ 'flags': response_flags\n }\n }\n", "issue": "After adding request flags subclasses of logformatter that rely on 'flags' format string are broken\n#2082 added flags to request but it also renamed formatting string key from flags to response_flags/request_flags\r\n```\r\nCRAWLEDMSG = u\"Crawled (%(status)s) %(request)s (referer: %(referer)s)%(flags)s\"\r\n +CRAWLEDMSG = u\"Crawled (%(status)s) %(request)s%(request_flags)s (referer: %(referer)s)%(response_flags)s\" \r\n```\r\n\r\nScrapy allows you to override logformatter and this is what I have in my project. I have logformatter looking rouhgly like this\r\n\r\n\r\n```python\r\n# dirbot/logf.py\r\nfrom scrapy.logformatter import LogFormatter\r\n\r\n\r\nclass CustomLogFormatter(LogFormatter):\r\n def crawled(self, request, response, spider):\r\n kwargs = super(CustomLogFormatter, self).crawled(\r\n request, response, spider)\r\n kwargs['msg'] = (\r\n u\"Crawled (%(status)s) %(request)s \"\r\n u\"(referer: %(referer)s, latency: %(latency).2f s)%(flags)s\"\r\n )\r\n kwargs['args']['latency'] = response.meta.get('download_latency', 0)\r\n return kwargs\r\n```\r\n\r\nnow if you enable it in settings `LOG_FORMATTER = 'dirbot.logf.CustomLogFormatter'\r\n` and try to run it with recent master you'll get KeyError\r\n\r\n```\r\n2017-03-13 14:15:26 [scrapy.extensions.telnet] DEBUG: Telnet console listening on 127.0.0.1:6023\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python2.7/logging/__init__.py\", line 851, in emit\r\n msg = self.format(record)\r\n File \"/usr/lib/python2.7/logging/__init__.py\", line 724, in format\r\n return fmt.format(record)\r\n File \"/usr/lib/python2.7/logging/__init__.py\", line 464, in format\r\n record.message = record.getMessage()\r\n File \"/usr/lib/python2.7/logging/__init__.py\", line 328, in getMessage\r\n msg = msg % self.args\r\nKeyError: u'flags'\r\nLogged from file engine.py, line 238\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python2.7/logging/__init__.py\", line 851, in emit\r\n msg = self.format(record)\r\n File \"/usr/lib/python2.7/logging/__init__.py\", line 724, in format\r\n return fmt.format(record)\r\n File \"/usr/lib/python2.7/logging/__init__.py\", line 464, in format\r\n record.message = record.getMessage()\r\n File \"/usr/lib/python2.7/logging/__init__.py\", line 328, in getMessage\r\n msg = msg % self.args\r\nKeyError: u'flags'\r\nLogged from file engine.py, line 238\r\n2017-03-13 14:15:27 [scrapy.core.scraper] DEBUG: Scraped from <200 http://www.dmoz.org/Computers/Programming/Languages/Python/Resources/>\r\n```\r\n\r\nSo this change that renamed `flags` to `response_flags/request_flags` seems backward incompatible. \n", "before_files": [{"content": "import os\nimport logging\n\nfrom twisted.python.failure import Failure\n\nfrom scrapy.utils.request import referer_str\n\nSCRAPEDMSG = u\"Scraped from %(src)s\" + os.linesep + \"%(item)s\"\nDROPPEDMSG = u\"Dropped: %(exception)s\" + os.linesep + \"%(item)s\"\nCRAWLEDMSG = u\"Crawled (%(status)s) %(request)s%(request_flags)s (referer: %(referer)s)%(response_flags)s\"\n\n\nclass LogFormatter(object):\n \"\"\"Class for generating log messages for different actions.\n\n All methods must return a dictionary listing the parameters `level`, `msg`\n and `args` which are going to be used for constructing the log message when\n calling logging.log.\n\n Dictionary keys for the method outputs:\n * `level` should be the log level for that action, you can use those\n from the python logging library: logging.DEBUG, logging.INFO,\n logging.WARNING, logging.ERROR and logging.CRITICAL.\n\n * `msg` should be a string that can contain different formatting\n placeholders. This string, formatted with the provided `args`, is going\n to be the log message for that action.\n\n * `args` should be a tuple or dict with the formatting placeholders for\n `msg`. The final log message is computed as output['msg'] %\n output['args'].\n \"\"\"\n\n def crawled(self, request, response, spider):\n request_flags = ' %s' % str(request.flags) if request.flags else ''\n response_flags = ' %s' % str(response.flags) if response.flags else ''\n return {\n 'level': logging.DEBUG,\n 'msg': CRAWLEDMSG,\n 'args': {\n 'status': response.status,\n 'request': request,\n 'request_flags' : request_flags,\n 'referer': referer_str(request),\n 'response_flags': response_flags,\n }\n }\n\n def scraped(self, item, response, spider):\n if isinstance(response, Failure):\n src = response.getErrorMessage()\n else:\n src = response\n return {\n 'level': logging.DEBUG,\n 'msg': SCRAPEDMSG,\n 'args': {\n 'src': src,\n 'item': item,\n }\n }\n\n def dropped(self, item, exception, response, spider):\n return {\n 'level': logging.WARNING,\n 'msg': DROPPEDMSG,\n 'args': {\n 'exception': exception,\n 'item': item,\n }\n }\n\n @classmethod\n def from_crawler(cls, crawler):\n return cls()\n", "path": "scrapy/logformatter.py"}], "after_files": [{"content": "import os\nimport logging\n\nfrom twisted.python.failure import Failure\n\nfrom scrapy.utils.request import referer_str\n\nSCRAPEDMSG = u\"Scraped from %(src)s\" + os.linesep + \"%(item)s\"\nDROPPEDMSG = u\"Dropped: %(exception)s\" + os.linesep + \"%(item)s\"\nCRAWLEDMSG = u\"Crawled (%(status)s) %(request)s%(request_flags)s (referer: %(referer)s)%(response_flags)s\"\n\n\nclass LogFormatter(object):\n \"\"\"Class for generating log messages for different actions.\n\n All methods must return a dictionary listing the parameters `level`, `msg`\n and `args` which are going to be used for constructing the log message when\n calling logging.log.\n\n Dictionary keys for the method outputs:\n * `level` should be the log level for that action, you can use those\n from the python logging library: logging.DEBUG, logging.INFO,\n logging.WARNING, logging.ERROR and logging.CRITICAL.\n\n * `msg` should be a string that can contain different formatting\n placeholders. This string, formatted with the provided `args`, is going\n to be the log message for that action.\n\n * `args` should be a tuple or dict with the formatting placeholders for\n `msg`. The final log message is computed as output['msg'] %\n output['args'].\n \"\"\"\n\n def crawled(self, request, response, spider):\n request_flags = ' %s' % str(request.flags) if request.flags else ''\n response_flags = ' %s' % str(response.flags) if response.flags else ''\n return {\n 'level': logging.DEBUG,\n 'msg': CRAWLEDMSG,\n 'args': {\n 'status': response.status,\n 'request': request,\n 'request_flags' : request_flags,\n 'referer': referer_str(request),\n 'response_flags': response_flags,\n # backward compatibility with Scrapy logformatter below 1.4 version\n 'flags': response_flags\n }\n }\n\n def scraped(self, item, response, spider):\n if isinstance(response, Failure):\n src = response.getErrorMessage()\n else:\n src = response\n return {\n 'level': logging.DEBUG,\n 'msg': SCRAPEDMSG,\n 'args': {\n 'src': src,\n 'item': item,\n }\n }\n\n def dropped(self, item, exception, response, spider):\n return {\n 'level': logging.WARNING,\n 'msg': DROPPEDMSG,\n 'args': {\n 'exception': exception,\n 'item': item,\n }\n }\n\n @classmethod\n def from_crawler(cls, crawler):\n return cls()\n", "path": "scrapy/logformatter.py"}]} | 1,697 | 99 |
gh_patches_debug_15426 | rasdani/github-patches | git_diff | airctic__icevision-734 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Can't save a full model using torch.save (at least with faster-RCNN)
It is not possible to save a full model using default settings of `torch.save` (see stack trace below). This is because of the implementation of `remove_internal_model_transforms`, which uses inner functions in its implementation. The default pickle module does not support inner functions.
Workaround: use the `dill` module instead, which does support inner functions.
Suggested fix: It does not look as if the internal functions are necessary. If there were moved to standard functions, then the default pickle module should work.
`torch.save(model, 'mod.pth', pickle_module=pickle)` causes an error.
`torch.save(model, 'mod.pth', pickle_module=dill)` is a workaround.
**To Reproduce**
`torch.save(model, 'mod1-full.pth', pickle_module=pickle)`
results in:
```python
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-12-50f3761f4f3c> in <module>
----> 1 torch.save(model, 'mod1-full.pth', pickle_module=pickle)
~/anaconda3/envs/dlm/lib/python3.8/site-packages/torch/serialization.py in save(obj, f, pickle_module, pickle_protocol, _use_new_zipfile_serialization)
370 if _use_new_zipfile_serialization:
371 with _open_zipfile_writer(opened_file) as opened_zipfile:
--> 372 _save(obj, opened_zipfile, pickle_module, pickle_protocol)
373 return
374 _legacy_save(obj, opened_file, pickle_module, pickle_protocol)
~/anaconda3/envs/dlm/lib/python3.8/site-packages/torch/serialization.py in _save(obj, zip_file, pickle_module, pickle_protocol)
474 pickler = pickle_module.Pickler(data_buf, protocol=pickle_protocol)
475 pickler.persistent_id = persistent_id
--> 476 pickler.dump(obj)
477 data_value = data_buf.getvalue()
478 zip_file.write_record('data.pkl', data_value, len(data_value))
AttributeError: Can't pickle local object 'remove_internal_model_transforms.<locals>.noop_normalize'
```
Relevant definition:
```
def remove_internal_model_transforms(model: GeneralizedRCNN):
def noop_normalize(image: Tensor) -> Tensor:
return image
def noop_resize(
image: Tensor, target: Optional[Dict[str, Tensor]]
) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]:
return image, target
model.transform.normalize = noop_normalize
model.transform.resize = noop_resize
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `icevision/models/torchvision/utils.py`
Content:
```
1 __all__ = [
2 "remove_internal_model_transforms",
3 "patch_rcnn_param_groups",
4 "patch_retinanet_param_groups",
5 ]
6
7 from icevision.imports import *
8 from icevision.utils import *
9 from torchvision.models.detection.generalized_rcnn import GeneralizedRCNN
10
11
12 def remove_internal_model_transforms(model: GeneralizedRCNN):
13 def noop_normalize(image: Tensor) -> Tensor:
14 return image
15
16 def noop_resize(
17 image: Tensor, target: Optional[Dict[str, Tensor]]
18 ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]:
19 return image, target
20
21 model.transform.normalize = noop_normalize
22 model.transform.resize = noop_resize
23
24
25 def patch_param_groups(
26 model: nn.Module,
27 head_layers: List[nn.Module],
28 backbone_param_groups: List[List[nn.Parameter]],
29 ):
30 def param_groups(model: nn.Module) -> List[List[nn.Parameter]]:
31 head_param_groups = [list(layer.parameters()) for layer in head_layers]
32
33 _param_groups = backbone_param_groups + head_param_groups
34 check_all_model_params_in_groups2(model, _param_groups)
35
36 return _param_groups
37
38 model.param_groups = MethodType(param_groups, model)
39
40
41 def patch_rcnn_param_groups(model: nn.Module):
42 return patch_param_groups(
43 model=model,
44 head_layers=[model.rpn, model.roi_heads],
45 backbone_param_groups=model.backbone.param_groups(),
46 )
47
48
49 def patch_retinanet_param_groups(model: nn.Module):
50 return patch_param_groups(
51 model=model,
52 head_layers=[model.head],
53 backbone_param_groups=model.backbone.param_groups(),
54 )
55
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/icevision/models/torchvision/utils.py b/icevision/models/torchvision/utils.py
--- a/icevision/models/torchvision/utils.py
+++ b/icevision/models/torchvision/utils.py
@@ -9,17 +9,19 @@
from torchvision.models.detection.generalized_rcnn import GeneralizedRCNN
-def remove_internal_model_transforms(model: GeneralizedRCNN):
- def noop_normalize(image: Tensor) -> Tensor:
- return image
+def _noop_normalize(image: Tensor) -> Tensor:
+ return image
+
- def noop_resize(
- image: Tensor, target: Optional[Dict[str, Tensor]]
- ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]:
- return image, target
+def _noop_resize(
+ image: Tensor, target: Optional[Dict[str, Tensor]]
+) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]:
+ return image, target
- model.transform.normalize = noop_normalize
- model.transform.resize = noop_resize
+
+def remove_internal_model_transforms(model: GeneralizedRCNN):
+ model.transform.normalize = _noop_normalize
+ model.transform.resize = _noop_resize
def patch_param_groups(
| {"golden_diff": "diff --git a/icevision/models/torchvision/utils.py b/icevision/models/torchvision/utils.py\n--- a/icevision/models/torchvision/utils.py\n+++ b/icevision/models/torchvision/utils.py\n@@ -9,17 +9,19 @@\n from torchvision.models.detection.generalized_rcnn import GeneralizedRCNN\n \n \n-def remove_internal_model_transforms(model: GeneralizedRCNN):\n- def noop_normalize(image: Tensor) -> Tensor:\n- return image\n+def _noop_normalize(image: Tensor) -> Tensor:\n+ return image\n+\n \n- def noop_resize(\n- image: Tensor, target: Optional[Dict[str, Tensor]]\n- ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]:\n- return image, target\n+def _noop_resize(\n+ image: Tensor, target: Optional[Dict[str, Tensor]]\n+) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]:\n+ return image, target\n \n- model.transform.normalize = noop_normalize\n- model.transform.resize = noop_resize\n+\n+def remove_internal_model_transforms(model: GeneralizedRCNN):\n+ model.transform.normalize = _noop_normalize\n+ model.transform.resize = _noop_resize\n \n \n def patch_param_groups(\n", "issue": "Can't save a full model using torch.save (at least with faster-RCNN)\nIt is not possible to save a full model using default settings of `torch.save` (see stack trace below). This is because of the implementation of `remove_internal_model_transforms`, which uses inner functions in its implementation. The default pickle module does not support inner functions.\r\n\r\nWorkaround: use the `dill` module instead, which does support inner functions.\r\n\r\nSuggested fix: It does not look as if the internal functions are necessary. If there were moved to standard functions, then the default pickle module should work.\r\n`torch.save(model, 'mod.pth', pickle_module=pickle)` causes an error.\r\n\r\n`torch.save(model, 'mod.pth', pickle_module=dill)` is a workaround.\r\n\r\n**To Reproduce**\r\n\r\n`torch.save(model, 'mod1-full.pth', pickle_module=pickle)`\r\nresults in:\r\n\r\n```python\r\n---------------------------------------------------------------------------\r\nAttributeError Traceback (most recent call last)\r\n<ipython-input-12-50f3761f4f3c> in <module>\r\n----> 1 torch.save(model, 'mod1-full.pth', pickle_module=pickle)\r\n\r\n~/anaconda3/envs/dlm/lib/python3.8/site-packages/torch/serialization.py in save(obj, f, pickle_module, pickle_protocol, _use_new_zipfile_serialization)\r\n 370 if _use_new_zipfile_serialization:\r\n 371 with _open_zipfile_writer(opened_file) as opened_zipfile:\r\n--> 372 _save(obj, opened_zipfile, pickle_module, pickle_protocol)\r\n 373 return\r\n 374 _legacy_save(obj, opened_file, pickle_module, pickle_protocol)\r\n\r\n~/anaconda3/envs/dlm/lib/python3.8/site-packages/torch/serialization.py in _save(obj, zip_file, pickle_module, pickle_protocol)\r\n 474 pickler = pickle_module.Pickler(data_buf, protocol=pickle_protocol)\r\n 475 pickler.persistent_id = persistent_id\r\n--> 476 pickler.dump(obj)\r\n 477 data_value = data_buf.getvalue()\r\n 478 zip_file.write_record('data.pkl', data_value, len(data_value))\r\n\r\nAttributeError: Can't pickle local object 'remove_internal_model_transforms.<locals>.noop_normalize'\r\n```\r\n\r\nRelevant definition:\r\n```\r\ndef remove_internal_model_transforms(model: GeneralizedRCNN):\r\n def noop_normalize(image: Tensor) -> Tensor:\r\n return image\r\n\r\n def noop_resize(\r\n image: Tensor, target: Optional[Dict[str, Tensor]]\r\n ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]:\r\n return image, target\r\n\r\n model.transform.normalize = noop_normalize\r\n model.transform.resize = noop_resize\r\n```\r\n\r\n\n", "before_files": [{"content": "__all__ = [\n \"remove_internal_model_transforms\",\n \"patch_rcnn_param_groups\",\n \"patch_retinanet_param_groups\",\n]\n\nfrom icevision.imports import *\nfrom icevision.utils import *\nfrom torchvision.models.detection.generalized_rcnn import GeneralizedRCNN\n\n\ndef remove_internal_model_transforms(model: GeneralizedRCNN):\n def noop_normalize(image: Tensor) -> Tensor:\n return image\n\n def noop_resize(\n image: Tensor, target: Optional[Dict[str, Tensor]]\n ) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]:\n return image, target\n\n model.transform.normalize = noop_normalize\n model.transform.resize = noop_resize\n\n\ndef patch_param_groups(\n model: nn.Module,\n head_layers: List[nn.Module],\n backbone_param_groups: List[List[nn.Parameter]],\n):\n def param_groups(model: nn.Module) -> List[List[nn.Parameter]]:\n head_param_groups = [list(layer.parameters()) for layer in head_layers]\n\n _param_groups = backbone_param_groups + head_param_groups\n check_all_model_params_in_groups2(model, _param_groups)\n\n return _param_groups\n\n model.param_groups = MethodType(param_groups, model)\n\n\ndef patch_rcnn_param_groups(model: nn.Module):\n return patch_param_groups(\n model=model,\n head_layers=[model.rpn, model.roi_heads],\n backbone_param_groups=model.backbone.param_groups(),\n )\n\n\ndef patch_retinanet_param_groups(model: nn.Module):\n return patch_param_groups(\n model=model,\n head_layers=[model.head],\n backbone_param_groups=model.backbone.param_groups(),\n )\n", "path": "icevision/models/torchvision/utils.py"}], "after_files": [{"content": "__all__ = [\n \"remove_internal_model_transforms\",\n \"patch_rcnn_param_groups\",\n \"patch_retinanet_param_groups\",\n]\n\nfrom icevision.imports import *\nfrom icevision.utils import *\nfrom torchvision.models.detection.generalized_rcnn import GeneralizedRCNN\n\n\ndef _noop_normalize(image: Tensor) -> Tensor:\n return image\n\n\ndef _noop_resize(\n image: Tensor, target: Optional[Dict[str, Tensor]]\n) -> Tuple[Tensor, Optional[Dict[str, Tensor]]]:\n return image, target\n\n\ndef remove_internal_model_transforms(model: GeneralizedRCNN):\n model.transform.normalize = _noop_normalize\n model.transform.resize = _noop_resize\n\n\ndef patch_param_groups(\n model: nn.Module,\n head_layers: List[nn.Module],\n backbone_param_groups: List[List[nn.Parameter]],\n):\n def param_groups(model: nn.Module) -> List[List[nn.Parameter]]:\n head_param_groups = [list(layer.parameters()) for layer in head_layers]\n\n _param_groups = backbone_param_groups + head_param_groups\n check_all_model_params_in_groups2(model, _param_groups)\n\n return _param_groups\n\n model.param_groups = MethodType(param_groups, model)\n\n\ndef patch_rcnn_param_groups(model: nn.Module):\n return patch_param_groups(\n model=model,\n head_layers=[model.rpn, model.roi_heads],\n backbone_param_groups=model.backbone.param_groups(),\n )\n\n\ndef patch_retinanet_param_groups(model: nn.Module):\n return patch_param_groups(\n model=model,\n head_layers=[model.head],\n backbone_param_groups=model.backbone.param_groups(),\n )\n", "path": "icevision/models/torchvision/utils.py"}]} | 1,316 | 270 |
gh_patches_debug_40489 | rasdani/github-patches | git_diff | mindsdb__lightwood-979 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug in SHAP analysis block
Trying out this module, the following error arises with the cloud tutorial for home rentals:
```python
File "/tmp/e74830c1ef5914dfafe52537b8b33cd0479265f508bfd6c616614666305205822.py", line 392, in predict
insights, global_insights = explain(data=data,encoded_data=encoded_data,predictions=df,ts_analysis=None,problem_definition=self.problem_definition,stat_analysis=self.statistical_analysis,runtime_analysis=self.runtime_analyzer,target_name=self.target,target_dtype=self.dtype_dict[self.target],explainer_blocks=self.analysis_blocks,pred_args=self.pred_args)
File "/MindsDB/lightwood/lightwood/analysis/explain.py", line 81, in explain
row_insights, global_insights = block.explain(row_insights, global_insights, **kwargs)
File "/MindsDB/lightwood/lightwood/analysis/helpers/shap.py", line 79, in explain
shap_values = shap_explainer.shap_values(ns.data, silent=True)
File "/MindsDB/nenv/lib/python3.8/site-packages/shap/explainers/_kernel.py", line 186, in shap_values
explanations.append(self.explain(data, **kwargs))
File "/MindsDB/nenv/lib/python3.8/site-packages/shap/explainers/_kernel.py", line 207, in explain
match_instance_to_data(instance, self.data)
File "/MindsDB/nenv/lib/python3.8/site-packages/shap/utils/_legacy.py", line 87, in match_instance_to_data
instance.group_display_values = [instance.x[0, group[0]] if len(group) == 1 else "" for group in data.groups]
File "/MindsDB/nenv/lib/python3.8/site-packages/shap/utils/_legacy.py", line 87, in <listcomp>
instance.group_display_values = [instance.x[0, group[0]] if len(group) == 1 else "" for group in data.groups]
IndexError: index 7 is out of bounds for axis 1 with size 7
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lightwood/analysis/helpers/shap.py`
Content:
```
1 import warnings
2 from types import SimpleNamespace
3 from typing import Dict, Optional, Tuple
4
5 import numpy as np
6 import pandas as pd
7 from lightwood.analysis.base import BaseAnalysisBlock
8 from lightwood.api import dtype
9 from lightwood.api.types import PredictionArguments
10 from lightwood.data.encoded_ds import EncodedDs
11 from lightwood.helpers.log import log
12 from sklearn.preprocessing import LabelEncoder
13
14 import shap
15
16
17 class ShapleyValues(BaseAnalysisBlock):
18 """
19 Analysis block that estimates column importance with SHAP (SHapley Additive exPlanations), a game theoretic approach
20 to explain the ouput of any machine learning model. SHAP assigns each feature an importance value for a particular
21 prediction.
22
23 Reference:
24 https://shap.readthedocs.io/en/stable/
25 https://proceedings.neurips.cc/paper/2017/file/8a20a8621978632d76c43dfd28b67767-Paper.pdf
26 """
27 label_encoder: LabelEncoder
28
29 def __init__(self, deps: Optional[Tuple] = ...):
30 super().__init__(deps=deps)
31 self.label_encoder = LabelEncoder()
32
33 def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:
34 log.info('Preparing to compute feature importance values with SHAP')
35 ns = SimpleNamespace(**kwargs)
36
37 output_dtype = ns.dtype_dict[ns.target]
38 train_data: EncodedDs = ns.train_data
39
40 if output_dtype in (dtype.integer, dtype.float, dtype.quantity):
41 pass
42 elif output_dtype in (dtype.binary, dtype.categorical, dtype.tags):
43 self.label_encoder.fit(train_data.data_frame[ns.target].values)
44 else:
45 log.warning(f'ShapleyValues analyzers not supported for type: {output_dtype}')
46 return info
47
48 def model(x: np.ndarray) -> np.ndarray:
49 assert(isinstance(x, np.ndarray))
50 df = pd.DataFrame(data=x, columns=train_data.data_frame.columns)
51 ds = EncodedDs(encoders=train_data.encoders, data_frame=df, target=train_data.target)
52
53 decoded_predictions = ns.predictor(ds=ds, args=PredictionArguments())
54 if output_dtype in (dtype.integer, dtype.float, dtype.quantity):
55 encoded_predictions = decoded_predictions['prediction'].values
56 elif output_dtype in (dtype.binary, dtype.categorical, dtype.tags):
57 encoded_predictions = self.label_encoder.transform(decoded_predictions['prediction'].values)
58
59 return encoded_predictions
60
61 info['shap_explainer'] = shap.KernelExplainer(model=model, data=train_data.data_frame)
62
63 return info
64
65 def explain(self,
66 row_insights: pd.DataFrame,
67 global_insights: Dict[str, object],
68 **kwargs
69 ) -> Tuple[pd.DataFrame, Dict[str, object]]:
70 log.info('Computing feature importance values with Kernel SHAP method')
71 ns = SimpleNamespace(**kwargs)
72
73 shap_explainer = ns.analysis.get('shap_explainer', None)
74 if shap_explainer is None:
75 return row_insights, global_insights
76
77 with warnings.catch_warnings():
78 warnings.filterwarnings("ignore", category=DeprecationWarning)
79 shap_values = shap_explainer.shap_values(ns.data, silent=True)
80
81 shap_values_df = pd.DataFrame(shap_values).rename(
82 mapper=lambda i: f"feature_{i}_impact", axis='columns')
83
84 if kwargs.get('target_dtype', None) in (dtype.binary, dtype.categorical, dtype.tags):
85 predictions = self.label_encoder.transform(row_insights['prediction'])
86 else:
87 predictions = row_insights['prediction']
88
89 base_response = (predictions - shap_values_df.sum(axis='columns')).mean()
90 global_insights['base_response'] = base_response
91
92 row_insights = pd.concat([row_insights, shap_values_df], axis='columns')
93
94 return row_insights, global_insights
95
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lightwood/analysis/helpers/shap.py b/lightwood/analysis/helpers/shap.py
--- a/lightwood/analysis/helpers/shap.py
+++ b/lightwood/analysis/helpers/shap.py
@@ -29,6 +29,8 @@
def __init__(self, deps: Optional[Tuple] = ...):
super().__init__(deps=deps)
self.label_encoder = LabelEncoder()
+ self.columns = []
+ self.target = None
def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:
log.info('Preparing to compute feature importance values with SHAP')
@@ -45,9 +47,13 @@
log.warning(f'ShapleyValues analyzers not supported for type: {output_dtype}')
return info
+ self.target = ns.target
+ self.columns = list(set(ns.dtype_dict.keys()) - {self.target})
+ input_df = train_data.data_frame[self.columns]
+
def model(x: np.ndarray) -> np.ndarray:
assert(isinstance(x, np.ndarray))
- df = pd.DataFrame(data=x, columns=train_data.data_frame.columns)
+ df = pd.DataFrame(data=x, columns=self.columns)
ds = EncodedDs(encoders=train_data.encoders, data_frame=df, target=train_data.target)
decoded_predictions = ns.predictor(ds=ds, args=PredictionArguments())
@@ -58,7 +64,7 @@
return encoded_predictions
- info['shap_explainer'] = shap.KernelExplainer(model=model, data=train_data.data_frame)
+ info['shap_explainer'] = shap.KernelExplainer(model=model, data=input_df)
return info
@@ -76,10 +82,10 @@
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
- shap_values = shap_explainer.shap_values(ns.data, silent=True)
+ shap_values = shap_explainer.shap_values(ns.data[self.columns], silent=True)
shap_values_df = pd.DataFrame(shap_values).rename(
- mapper=lambda i: f"feature_{i}_impact", axis='columns')
+ mapper=lambda i: f"shap_contribution_{self.columns[i]}", axis='columns')
if kwargs.get('target_dtype', None) in (dtype.binary, dtype.categorical, dtype.tags):
predictions = self.label_encoder.transform(row_insights['prediction'])
@@ -87,8 +93,9 @@
predictions = row_insights['prediction']
base_response = (predictions - shap_values_df.sum(axis='columns')).mean()
- global_insights['base_response'] = base_response
row_insights = pd.concat([row_insights, shap_values_df], axis='columns')
+ row_insights['shap_base_response'] = base_response
+ row_insights['shap_final_response'] = predictions
return row_insights, global_insights
| {"golden_diff": "diff --git a/lightwood/analysis/helpers/shap.py b/lightwood/analysis/helpers/shap.py\n--- a/lightwood/analysis/helpers/shap.py\n+++ b/lightwood/analysis/helpers/shap.py\n@@ -29,6 +29,8 @@\n def __init__(self, deps: Optional[Tuple] = ...):\n super().__init__(deps=deps)\n self.label_encoder = LabelEncoder()\n+ self.columns = []\n+ self.target = None\n \n def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:\n log.info('Preparing to compute feature importance values with SHAP')\n@@ -45,9 +47,13 @@\n log.warning(f'ShapleyValues analyzers not supported for type: {output_dtype}')\n return info\n \n+ self.target = ns.target\n+ self.columns = list(set(ns.dtype_dict.keys()) - {self.target})\n+ input_df = train_data.data_frame[self.columns]\n+\n def model(x: np.ndarray) -> np.ndarray:\n assert(isinstance(x, np.ndarray))\n- df = pd.DataFrame(data=x, columns=train_data.data_frame.columns)\n+ df = pd.DataFrame(data=x, columns=self.columns)\n ds = EncodedDs(encoders=train_data.encoders, data_frame=df, target=train_data.target)\n \n decoded_predictions = ns.predictor(ds=ds, args=PredictionArguments())\n@@ -58,7 +64,7 @@\n \n return encoded_predictions\n \n- info['shap_explainer'] = shap.KernelExplainer(model=model, data=train_data.data_frame)\n+ info['shap_explainer'] = shap.KernelExplainer(model=model, data=input_df)\n \n return info\n \n@@ -76,10 +82,10 @@\n \n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n- shap_values = shap_explainer.shap_values(ns.data, silent=True)\n+ shap_values = shap_explainer.shap_values(ns.data[self.columns], silent=True)\n \n shap_values_df = pd.DataFrame(shap_values).rename(\n- mapper=lambda i: f\"feature_{i}_impact\", axis='columns')\n+ mapper=lambda i: f\"shap_contribution_{self.columns[i]}\", axis='columns')\n \n if kwargs.get('target_dtype', None) in (dtype.binary, dtype.categorical, dtype.tags):\n predictions = self.label_encoder.transform(row_insights['prediction'])\n@@ -87,8 +93,9 @@\n predictions = row_insights['prediction']\n \n base_response = (predictions - shap_values_df.sum(axis='columns')).mean()\n- global_insights['base_response'] = base_response\n \n row_insights = pd.concat([row_insights, shap_values_df], axis='columns')\n+ row_insights['shap_base_response'] = base_response\n+ row_insights['shap_final_response'] = predictions\n \n return row_insights, global_insights\n", "issue": "Bug in SHAP analysis block\nTrying out this module, the following error arises with the cloud tutorial for home rentals:\r\n\r\n```python\r\n File \"/tmp/e74830c1ef5914dfafe52537b8b33cd0479265f508bfd6c616614666305205822.py\", line 392, in predict\r\n insights, global_insights = explain(data=data,encoded_data=encoded_data,predictions=df,ts_analysis=None,problem_definition=self.problem_definition,stat_analysis=self.statistical_analysis,runtime_analysis=self.runtime_analyzer,target_name=self.target,target_dtype=self.dtype_dict[self.target],explainer_blocks=self.analysis_blocks,pred_args=self.pred_args)\r\n File \"/MindsDB/lightwood/lightwood/analysis/explain.py\", line 81, in explain\r\n row_insights, global_insights = block.explain(row_insights, global_insights, **kwargs)\r\n File \"/MindsDB/lightwood/lightwood/analysis/helpers/shap.py\", line 79, in explain\r\n shap_values = shap_explainer.shap_values(ns.data, silent=True)\r\n File \"/MindsDB/nenv/lib/python3.8/site-packages/shap/explainers/_kernel.py\", line 186, in shap_values\r\n explanations.append(self.explain(data, **kwargs))\r\n File \"/MindsDB/nenv/lib/python3.8/site-packages/shap/explainers/_kernel.py\", line 207, in explain\r\n match_instance_to_data(instance, self.data)\r\n File \"/MindsDB/nenv/lib/python3.8/site-packages/shap/utils/_legacy.py\", line 87, in match_instance_to_data\r\n instance.group_display_values = [instance.x[0, group[0]] if len(group) == 1 else \"\" for group in data.groups]\r\n File \"/MindsDB/nenv/lib/python3.8/site-packages/shap/utils/_legacy.py\", line 87, in <listcomp>\r\n instance.group_display_values = [instance.x[0, group[0]] if len(group) == 1 else \"\" for group in data.groups]\r\nIndexError: index 7 is out of bounds for axis 1 with size 7\r\n\r\n```\n", "before_files": [{"content": "import warnings\nfrom types import SimpleNamespace\nfrom typing import Dict, Optional, Tuple\n\nimport numpy as np\nimport pandas as pd\nfrom lightwood.analysis.base import BaseAnalysisBlock\nfrom lightwood.api import dtype\nfrom lightwood.api.types import PredictionArguments\nfrom lightwood.data.encoded_ds import EncodedDs\nfrom lightwood.helpers.log import log\nfrom sklearn.preprocessing import LabelEncoder\n\nimport shap\n\n\nclass ShapleyValues(BaseAnalysisBlock):\n \"\"\"\n Analysis block that estimates column importance with SHAP (SHapley Additive exPlanations), a game theoretic approach\n to explain the ouput of any machine learning model. SHAP assigns each feature an importance value for a particular\n prediction.\n\n Reference:\n https://shap.readthedocs.io/en/stable/\n https://proceedings.neurips.cc/paper/2017/file/8a20a8621978632d76c43dfd28b67767-Paper.pdf\n \"\"\"\n label_encoder: LabelEncoder\n\n def __init__(self, deps: Optional[Tuple] = ...):\n super().__init__(deps=deps)\n self.label_encoder = LabelEncoder()\n\n def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:\n log.info('Preparing to compute feature importance values with SHAP')\n ns = SimpleNamespace(**kwargs)\n\n output_dtype = ns.dtype_dict[ns.target]\n train_data: EncodedDs = ns.train_data\n\n if output_dtype in (dtype.integer, dtype.float, dtype.quantity):\n pass\n elif output_dtype in (dtype.binary, dtype.categorical, dtype.tags):\n self.label_encoder.fit(train_data.data_frame[ns.target].values)\n else:\n log.warning(f'ShapleyValues analyzers not supported for type: {output_dtype}')\n return info\n\n def model(x: np.ndarray) -> np.ndarray:\n assert(isinstance(x, np.ndarray))\n df = pd.DataFrame(data=x, columns=train_data.data_frame.columns)\n ds = EncodedDs(encoders=train_data.encoders, data_frame=df, target=train_data.target)\n\n decoded_predictions = ns.predictor(ds=ds, args=PredictionArguments())\n if output_dtype in (dtype.integer, dtype.float, dtype.quantity):\n encoded_predictions = decoded_predictions['prediction'].values\n elif output_dtype in (dtype.binary, dtype.categorical, dtype.tags):\n encoded_predictions = self.label_encoder.transform(decoded_predictions['prediction'].values)\n\n return encoded_predictions\n\n info['shap_explainer'] = shap.KernelExplainer(model=model, data=train_data.data_frame)\n\n return info\n\n def explain(self,\n row_insights: pd.DataFrame,\n global_insights: Dict[str, object],\n **kwargs\n ) -> Tuple[pd.DataFrame, Dict[str, object]]:\n log.info('Computing feature importance values with Kernel SHAP method')\n ns = SimpleNamespace(**kwargs)\n\n shap_explainer = ns.analysis.get('shap_explainer', None)\n if shap_explainer is None:\n return row_insights, global_insights\n\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n shap_values = shap_explainer.shap_values(ns.data, silent=True)\n\n shap_values_df = pd.DataFrame(shap_values).rename(\n mapper=lambda i: f\"feature_{i}_impact\", axis='columns')\n\n if kwargs.get('target_dtype', None) in (dtype.binary, dtype.categorical, dtype.tags):\n predictions = self.label_encoder.transform(row_insights['prediction'])\n else:\n predictions = row_insights['prediction']\n\n base_response = (predictions - shap_values_df.sum(axis='columns')).mean()\n global_insights['base_response'] = base_response\n\n row_insights = pd.concat([row_insights, shap_values_df], axis='columns')\n\n return row_insights, global_insights\n", "path": "lightwood/analysis/helpers/shap.py"}], "after_files": [{"content": "import warnings\nfrom types import SimpleNamespace\nfrom typing import Dict, Optional, Tuple\n\nimport numpy as np\nimport pandas as pd\nfrom lightwood.analysis.base import BaseAnalysisBlock\nfrom lightwood.api import dtype\nfrom lightwood.api.types import PredictionArguments\nfrom lightwood.data.encoded_ds import EncodedDs\nfrom lightwood.helpers.log import log\nfrom sklearn.preprocessing import LabelEncoder\n\nimport shap\n\n\nclass ShapleyValues(BaseAnalysisBlock):\n \"\"\"\n Analysis block that estimates column importance with SHAP (SHapley Additive exPlanations), a game theoretic approach\n to explain the ouput of any machine learning model. SHAP assigns each feature an importance value for a particular\n prediction.\n\n Reference:\n https://shap.readthedocs.io/en/stable/\n https://proceedings.neurips.cc/paper/2017/file/8a20a8621978632d76c43dfd28b67767-Paper.pdf\n \"\"\"\n label_encoder: LabelEncoder\n\n def __init__(self, deps: Optional[Tuple] = ...):\n super().__init__(deps=deps)\n self.label_encoder = LabelEncoder()\n self.columns = []\n self.target = None\n\n def analyze(self, info: Dict[str, object], **kwargs) -> Dict[str, object]:\n log.info('Preparing to compute feature importance values with SHAP')\n ns = SimpleNamespace(**kwargs)\n\n output_dtype = ns.dtype_dict[ns.target]\n train_data: EncodedDs = ns.train_data\n\n if output_dtype in (dtype.integer, dtype.float, dtype.quantity):\n pass\n elif output_dtype in (dtype.binary, dtype.categorical, dtype.tags):\n self.label_encoder.fit(train_data.data_frame[ns.target].values)\n else:\n log.warning(f'ShapleyValues analyzers not supported for type: {output_dtype}')\n return info\n\n self.target = ns.target\n self.columns = list(set(ns.dtype_dict.keys()) - {self.target})\n input_df = train_data.data_frame[self.columns]\n\n def model(x: np.ndarray) -> np.ndarray:\n assert(isinstance(x, np.ndarray))\n df = pd.DataFrame(data=x, columns=self.columns)\n ds = EncodedDs(encoders=train_data.encoders, data_frame=df, target=train_data.target)\n\n decoded_predictions = ns.predictor(ds=ds, args=PredictionArguments())\n if output_dtype in (dtype.integer, dtype.float, dtype.quantity):\n encoded_predictions = decoded_predictions['prediction'].values\n elif output_dtype in (dtype.binary, dtype.categorical, dtype.tags):\n encoded_predictions = self.label_encoder.transform(decoded_predictions['prediction'].values)\n\n return encoded_predictions\n\n info['shap_explainer'] = shap.KernelExplainer(model=model, data=input_df)\n\n return info\n\n def explain(self,\n row_insights: pd.DataFrame,\n global_insights: Dict[str, object],\n **kwargs\n ) -> Tuple[pd.DataFrame, Dict[str, object]]:\n log.info('Computing feature importance values with Kernel SHAP method')\n ns = SimpleNamespace(**kwargs)\n\n shap_explainer = ns.analysis.get('shap_explainer', None)\n if shap_explainer is None:\n return row_insights, global_insights\n\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n shap_values = shap_explainer.shap_values(ns.data[self.columns], silent=True)\n\n shap_values_df = pd.DataFrame(shap_values).rename(\n mapper=lambda i: f\"shap_contribution_{self.columns[i]}\", axis='columns')\n\n if kwargs.get('target_dtype', None) in (dtype.binary, dtype.categorical, dtype.tags):\n predictions = self.label_encoder.transform(row_insights['prediction'])\n else:\n predictions = row_insights['prediction']\n\n base_response = (predictions - shap_values_df.sum(axis='columns')).mean()\n\n row_insights = pd.concat([row_insights, shap_values_df], axis='columns')\n row_insights['shap_base_response'] = base_response\n row_insights['shap_final_response'] = predictions\n\n return row_insights, global_insights\n", "path": "lightwood/analysis/helpers/shap.py"}]} | 1,807 | 653 |
gh_patches_debug_3901 | rasdani/github-patches | git_diff | carpentries__amy-646 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
API: don't return todos with unknown start
This breaks the timeline.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `api/views.py`
Content:
```
1 import datetime
2
3 from django.db.models import Q
4 from rest_framework.generics import ListAPIView
5 from rest_framework.metadata import SimpleMetadata
6 from rest_framework.permissions import (
7 IsAuthenticatedOrReadOnly, IsAuthenticated
8 )
9 from rest_framework.response import Response
10 from rest_framework.reverse import reverse
11 from rest_framework.views import APIView
12
13 from workshops.models import Badge, Airport, Event, TodoItem, Tag
14 from workshops.util import get_members, default_membership_cutoff
15
16 from .serializers import (
17 PersonNameEmailSerializer,
18 ExportBadgesSerializer,
19 ExportInstructorLocationsSerializer,
20 EventSerializer,
21 TodoSerializer,
22 )
23
24
25 class QueryMetadata(SimpleMetadata):
26 """Additionally include info about query parameters."""
27
28 def determine_metadata(self, request, view):
29 data = super().determine_metadata(request, view)
30
31 try:
32 data['query_params'] = view.get_query_params_description()
33 except AttributeError:
34 pass
35
36 return data
37
38
39 class ApiRoot(APIView):
40 def get(self, request, format=None):
41 return Response({
42 'export-badges': reverse('api:export-badges', request=request,
43 format=format),
44 'export-instructors': reverse('api:export-instructors',
45 request=request, format=format),
46 'export-members': reverse('api:export-members', request=request,
47 format=format),
48 'events-published': reverse('api:events-published',
49 request=request, format=format),
50 'user-todos': reverse('api:user-todos',
51 request=request, format=format),
52 })
53
54
55 class ExportBadgesView(ListAPIView):
56 """List all badges and people who have them."""
57 permission_classes = (IsAuthenticatedOrReadOnly, )
58 paginator = None # disable pagination
59
60 queryset = Badge.objects.prefetch_related('person_set')
61 serializer_class = ExportBadgesSerializer
62
63
64 class ExportInstructorLocationsView(ListAPIView):
65 """List all airports and instructors located near them."""
66 permission_classes = (IsAuthenticatedOrReadOnly, )
67 paginator = None # disable pagination
68
69 queryset = Airport.objects.exclude(person=None) \
70 .prefetch_related('person_set')
71 serializer_class = ExportInstructorLocationsSerializer
72
73
74 class ExportMembersView(ListAPIView):
75 """Show everyone who qualifies as an SCF member."""
76 permission_classes = (IsAuthenticatedOrReadOnly, )
77 paginator = None # disable pagination
78
79 serializer_class = PersonNameEmailSerializer
80
81 def get_queryset(self):
82 earliest_default, latest_default = default_membership_cutoff()
83
84 earliest = self.request.query_params.get('earliest', None)
85 if earliest is not None:
86 try:
87 earliest = datetime.datetime.strptime(earliest, '%Y-%m-%d') \
88 .date()
89 except ValueError:
90 earliest = earliest_default
91 else:
92 earliest = earliest_default
93
94 latest = self.request.query_params.get('latest', None)
95 if latest is not None:
96 try:
97 latest = datetime.datetime.strptime(latest, '%Y-%m-%d').date()
98 except ValueError:
99 latest = latest_default
100 else:
101 latest = latest_default
102
103 return get_members(earliest, latest)
104
105 def get_query_params_description(self):
106 return {
107 'earliest': 'Date of earliest workshop someone taught at.'
108 ' Defaults to -2*365 days from current date.',
109 'latest': 'Date of latest workshop someone taught at.'
110 ' Defaults to current date.',
111 }
112
113
114 class PublishedEvents(ListAPIView):
115 """List published events."""
116
117 # only events that have both a starting date and a URL
118 permission_classes = (IsAuthenticatedOrReadOnly, )
119 paginator = None # disable pagination
120
121 serializer_class = EventSerializer
122
123 metadata_class = QueryMetadata
124
125 def get_queryset(self):
126 """Optionally restrict the returned event set to events hosted by
127 specific host or administered by specific admin."""
128 queryset = Event.objects.published_events()
129
130 administrator = self.request.query_params.get('administrator', None)
131 if administrator is not None:
132 queryset = queryset.filter(administrator__pk=administrator)
133
134 host = self.request.query_params.get('host', None)
135 if host is not None:
136 queryset = queryset.filter(host__pk=host)
137
138 tags = self.request.query_params.getlist('tag', None)
139 if tags:
140 tags = Tag.objects.filter(name__in=tags)
141 for tag in tags:
142 queryset = queryset.filter(tags=tag)
143
144 return queryset
145
146 def get_query_params_description(self):
147 return {
148 'administrator': 'ID of the organization responsible for admin '
149 'work on events.',
150 'host': 'ID of the organization hosting the event.',
151 'tag': "Events' tag(s). You can use this parameter multiple "
152 "times.",
153 }
154
155
156 class UserTodoItems(ListAPIView):
157 permission_classes = (IsAuthenticated, )
158 paginator = None
159 serializer_class = TodoSerializer
160
161 def get_queryset(self):
162 """Return current TODOs for currently logged in user."""
163 return TodoItem.objects.user(self.request.user) \
164 .incomplete() \
165 .select_related('event')
166
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/api/views.py b/api/views.py
--- a/api/views.py
+++ b/api/views.py
@@ -162,4 +162,5 @@
"""Return current TODOs for currently logged in user."""
return TodoItem.objects.user(self.request.user) \
.incomplete() \
+ .exclude(due=None) \
.select_related('event')
| {"golden_diff": "diff --git a/api/views.py b/api/views.py\n--- a/api/views.py\n+++ b/api/views.py\n@@ -162,4 +162,5 @@\n \"\"\"Return current TODOs for currently logged in user.\"\"\"\n return TodoItem.objects.user(self.request.user) \\\n .incomplete() \\\n+ .exclude(due=None) \\\n .select_related('event')\n", "issue": "API: don't return todos with unknown start\nThis breaks the timeline.\n\n", "before_files": [{"content": "import datetime\n\nfrom django.db.models import Q\nfrom rest_framework.generics import ListAPIView\nfrom rest_framework.metadata import SimpleMetadata\nfrom rest_framework.permissions import (\n IsAuthenticatedOrReadOnly, IsAuthenticated\n)\nfrom rest_framework.response import Response\nfrom rest_framework.reverse import reverse\nfrom rest_framework.views import APIView\n\nfrom workshops.models import Badge, Airport, Event, TodoItem, Tag\nfrom workshops.util import get_members, default_membership_cutoff\n\nfrom .serializers import (\n PersonNameEmailSerializer,\n ExportBadgesSerializer,\n ExportInstructorLocationsSerializer,\n EventSerializer,\n TodoSerializer,\n)\n\n\nclass QueryMetadata(SimpleMetadata):\n \"\"\"Additionally include info about query parameters.\"\"\"\n\n def determine_metadata(self, request, view):\n data = super().determine_metadata(request, view)\n\n try:\n data['query_params'] = view.get_query_params_description()\n except AttributeError:\n pass\n\n return data\n\n\nclass ApiRoot(APIView):\n def get(self, request, format=None):\n return Response({\n 'export-badges': reverse('api:export-badges', request=request,\n format=format),\n 'export-instructors': reverse('api:export-instructors',\n request=request, format=format),\n 'export-members': reverse('api:export-members', request=request,\n format=format),\n 'events-published': reverse('api:events-published',\n request=request, format=format),\n 'user-todos': reverse('api:user-todos',\n request=request, format=format),\n })\n\n\nclass ExportBadgesView(ListAPIView):\n \"\"\"List all badges and people who have them.\"\"\"\n permission_classes = (IsAuthenticatedOrReadOnly, )\n paginator = None # disable pagination\n\n queryset = Badge.objects.prefetch_related('person_set')\n serializer_class = ExportBadgesSerializer\n\n\nclass ExportInstructorLocationsView(ListAPIView):\n \"\"\"List all airports and instructors located near them.\"\"\"\n permission_classes = (IsAuthenticatedOrReadOnly, )\n paginator = None # disable pagination\n\n queryset = Airport.objects.exclude(person=None) \\\n .prefetch_related('person_set')\n serializer_class = ExportInstructorLocationsSerializer\n\n\nclass ExportMembersView(ListAPIView):\n \"\"\"Show everyone who qualifies as an SCF member.\"\"\"\n permission_classes = (IsAuthenticatedOrReadOnly, )\n paginator = None # disable pagination\n\n serializer_class = PersonNameEmailSerializer\n\n def get_queryset(self):\n earliest_default, latest_default = default_membership_cutoff()\n\n earliest = self.request.query_params.get('earliest', None)\n if earliest is not None:\n try:\n earliest = datetime.datetime.strptime(earliest, '%Y-%m-%d') \\\n .date()\n except ValueError:\n earliest = earliest_default\n else:\n earliest = earliest_default\n\n latest = self.request.query_params.get('latest', None)\n if latest is not None:\n try:\n latest = datetime.datetime.strptime(latest, '%Y-%m-%d').date()\n except ValueError:\n latest = latest_default\n else:\n latest = latest_default\n\n return get_members(earliest, latest)\n\n def get_query_params_description(self):\n return {\n 'earliest': 'Date of earliest workshop someone taught at.'\n ' Defaults to -2*365 days from current date.',\n 'latest': 'Date of latest workshop someone taught at.'\n ' Defaults to current date.',\n }\n\n\nclass PublishedEvents(ListAPIView):\n \"\"\"List published events.\"\"\"\n\n # only events that have both a starting date and a URL\n permission_classes = (IsAuthenticatedOrReadOnly, )\n paginator = None # disable pagination\n\n serializer_class = EventSerializer\n\n metadata_class = QueryMetadata\n\n def get_queryset(self):\n \"\"\"Optionally restrict the returned event set to events hosted by\n specific host or administered by specific admin.\"\"\"\n queryset = Event.objects.published_events()\n\n administrator = self.request.query_params.get('administrator', None)\n if administrator is not None:\n queryset = queryset.filter(administrator__pk=administrator)\n\n host = self.request.query_params.get('host', None)\n if host is not None:\n queryset = queryset.filter(host__pk=host)\n\n tags = self.request.query_params.getlist('tag', None)\n if tags:\n tags = Tag.objects.filter(name__in=tags)\n for tag in tags:\n queryset = queryset.filter(tags=tag)\n\n return queryset\n\n def get_query_params_description(self):\n return {\n 'administrator': 'ID of the organization responsible for admin '\n 'work on events.',\n 'host': 'ID of the organization hosting the event.',\n 'tag': \"Events' tag(s). You can use this parameter multiple \"\n \"times.\",\n }\n\n\nclass UserTodoItems(ListAPIView):\n permission_classes = (IsAuthenticated, )\n paginator = None\n serializer_class = TodoSerializer\n\n def get_queryset(self):\n \"\"\"Return current TODOs for currently logged in user.\"\"\"\n return TodoItem.objects.user(self.request.user) \\\n .incomplete() \\\n .select_related('event')\n", "path": "api/views.py"}], "after_files": [{"content": "import datetime\n\nfrom django.db.models import Q\nfrom rest_framework.generics import ListAPIView\nfrom rest_framework.metadata import SimpleMetadata\nfrom rest_framework.permissions import (\n IsAuthenticatedOrReadOnly, IsAuthenticated\n)\nfrom rest_framework.response import Response\nfrom rest_framework.reverse import reverse\nfrom rest_framework.views import APIView\n\nfrom workshops.models import Badge, Airport, Event, TodoItem, Tag\nfrom workshops.util import get_members, default_membership_cutoff\n\nfrom .serializers import (\n PersonNameEmailSerializer,\n ExportBadgesSerializer,\n ExportInstructorLocationsSerializer,\n EventSerializer,\n TodoSerializer,\n)\n\n\nclass QueryMetadata(SimpleMetadata):\n \"\"\"Additionally include info about query parameters.\"\"\"\n\n def determine_metadata(self, request, view):\n data = super().determine_metadata(request, view)\n\n try:\n data['query_params'] = view.get_query_params_description()\n except AttributeError:\n pass\n\n return data\n\n\nclass ApiRoot(APIView):\n def get(self, request, format=None):\n return Response({\n 'export-badges': reverse('api:export-badges', request=request,\n format=format),\n 'export-instructors': reverse('api:export-instructors',\n request=request, format=format),\n 'export-members': reverse('api:export-members', request=request,\n format=format),\n 'events-published': reverse('api:events-published',\n request=request, format=format),\n 'user-todos': reverse('api:user-todos',\n request=request, format=format),\n })\n\n\nclass ExportBadgesView(ListAPIView):\n \"\"\"List all badges and people who have them.\"\"\"\n permission_classes = (IsAuthenticatedOrReadOnly, )\n paginator = None # disable pagination\n\n queryset = Badge.objects.prefetch_related('person_set')\n serializer_class = ExportBadgesSerializer\n\n\nclass ExportInstructorLocationsView(ListAPIView):\n \"\"\"List all airports and instructors located near them.\"\"\"\n permission_classes = (IsAuthenticatedOrReadOnly, )\n paginator = None # disable pagination\n\n queryset = Airport.objects.exclude(person=None) \\\n .prefetch_related('person_set')\n serializer_class = ExportInstructorLocationsSerializer\n\n\nclass ExportMembersView(ListAPIView):\n \"\"\"Show everyone who qualifies as an SCF member.\"\"\"\n permission_classes = (IsAuthenticatedOrReadOnly, )\n paginator = None # disable pagination\n\n serializer_class = PersonNameEmailSerializer\n\n def get_queryset(self):\n earliest_default, latest_default = default_membership_cutoff()\n\n earliest = self.request.query_params.get('earliest', None)\n if earliest is not None:\n try:\n earliest = datetime.datetime.strptime(earliest, '%Y-%m-%d') \\\n .date()\n except ValueError:\n earliest = earliest_default\n else:\n earliest = earliest_default\n\n latest = self.request.query_params.get('latest', None)\n if latest is not None:\n try:\n latest = datetime.datetime.strptime(latest, '%Y-%m-%d').date()\n except ValueError:\n latest = latest_default\n else:\n latest = latest_default\n\n return get_members(earliest, latest)\n\n def get_query_params_description(self):\n return {\n 'earliest': 'Date of earliest workshop someone taught at.'\n ' Defaults to -2*365 days from current date.',\n 'latest': 'Date of latest workshop someone taught at.'\n ' Defaults to current date.',\n }\n\n\nclass PublishedEvents(ListAPIView):\n \"\"\"List published events.\"\"\"\n\n # only events that have both a starting date and a URL\n permission_classes = (IsAuthenticatedOrReadOnly, )\n paginator = None # disable pagination\n\n serializer_class = EventSerializer\n\n metadata_class = QueryMetadata\n\n def get_queryset(self):\n \"\"\"Optionally restrict the returned event set to events hosted by\n specific host or administered by specific admin.\"\"\"\n queryset = Event.objects.published_events()\n\n administrator = self.request.query_params.get('administrator', None)\n if administrator is not None:\n queryset = queryset.filter(administrator__pk=administrator)\n\n host = self.request.query_params.get('host', None)\n if host is not None:\n queryset = queryset.filter(host__pk=host)\n\n tags = self.request.query_params.getlist('tag', None)\n if tags:\n tags = Tag.objects.filter(name__in=tags)\n for tag in tags:\n queryset = queryset.filter(tags=tag)\n\n return queryset\n\n def get_query_params_description(self):\n return {\n 'administrator': 'ID of the organization responsible for admin '\n 'work on events.',\n 'host': 'ID of the organization hosting the event.',\n 'tag': \"Events' tag(s). You can use this parameter multiple \"\n \"times.\",\n }\n\n\nclass UserTodoItems(ListAPIView):\n permission_classes = (IsAuthenticated, )\n paginator = None\n serializer_class = TodoSerializer\n\n def get_queryset(self):\n \"\"\"Return current TODOs for currently logged in user.\"\"\"\n return TodoItem.objects.user(self.request.user) \\\n .incomplete() \\\n .exclude(due=None) \\\n .select_related('event')\n", "path": "api/views.py"}]} | 1,754 | 83 |
gh_patches_debug_10225 | rasdani/github-patches | git_diff | wagtail__wagtail-822 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Unicode content in rich text causes error
Steps to reproduce:
1. Using the wagtail interface, create a new instance of a page derived model which has a rich text field
2. In the rich text field, include unicode characters such as: `©` or `’`
3. Publish the page
The page will be published fine, and probably rendered normally through a template.
4. Return to the wagtail admin, and edit the newly created page
The rich text field is not rendered in the editing page. Inspecting the html shows that wagtail is attempting to render the field, its heading is there. But the field is not rendered.
If you attempt to publish this new page, the form will submit a page with no content in the rich text field.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wagtail/utils/widgets.py`
Content:
```
1 from django.forms.widgets import Widget
2 from django.utils.safestring import mark_safe
3
4
5 class WidgetWithScript(Widget):
6 def render(self, name, value, attrs=None):
7 widget = super(WidgetWithScript, self).render(name, value, attrs)
8
9 final_attrs = self.build_attrs(attrs, name=name)
10 id_ = final_attrs.get('id', None)
11 if 'id_' is None:
12 return widget
13
14 js = self.render_js_init(id_, name, value)
15 out = '{0}<script>{1}</script>'.format(widget, js)
16 return mark_safe(out)
17
18 def render_js_init(self, id_, name, value):
19 return ''
20
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/wagtail/utils/widgets.py b/wagtail/utils/widgets.py
--- a/wagtail/utils/widgets.py
+++ b/wagtail/utils/widgets.py
@@ -1,3 +1,5 @@
+from __future__ import absolute_import, unicode_literals
+
from django.forms.widgets import Widget
from django.utils.safestring import mark_safe
@@ -8,7 +10,7 @@
final_attrs = self.build_attrs(attrs, name=name)
id_ = final_attrs.get('id', None)
- if 'id_' is None:
+ if id_ is None:
return widget
js = self.render_js_init(id_, name, value)
| {"golden_diff": "diff --git a/wagtail/utils/widgets.py b/wagtail/utils/widgets.py\n--- a/wagtail/utils/widgets.py\n+++ b/wagtail/utils/widgets.py\n@@ -1,3 +1,5 @@\n+from __future__ import absolute_import, unicode_literals\n+\n from django.forms.widgets import Widget\n from django.utils.safestring import mark_safe\n \n@@ -8,7 +10,7 @@\n \n final_attrs = self.build_attrs(attrs, name=name)\n id_ = final_attrs.get('id', None)\n- if 'id_' is None:\n+ if id_ is None:\n return widget\n \n js = self.render_js_init(id_, name, value)\n", "issue": "Unicode content in rich text causes error\nSteps to reproduce:\n1. Using the wagtail interface, create a new instance of a page derived model which has a rich text field\n2. In the rich text field, include unicode characters such as: `\u00a9` or `\u2019`\n3. Publish the page\n \n The page will be published fine, and probably rendered normally through a template.\n4. Return to the wagtail admin, and edit the newly created page\n \n The rich text field is not rendered in the editing page. Inspecting the html shows that wagtail is attempting to render the field, its heading is there. But the field is not rendered.\n \n If you attempt to publish this new page, the form will submit a page with no content in the rich text field.\n\n", "before_files": [{"content": "from django.forms.widgets import Widget\nfrom django.utils.safestring import mark_safe\n\n\nclass WidgetWithScript(Widget):\n def render(self, name, value, attrs=None):\n widget = super(WidgetWithScript, self).render(name, value, attrs)\n\n final_attrs = self.build_attrs(attrs, name=name)\n id_ = final_attrs.get('id', None)\n if 'id_' is None:\n return widget\n\n js = self.render_js_init(id_, name, value)\n out = '{0}<script>{1}</script>'.format(widget, js)\n return mark_safe(out)\n\n def render_js_init(self, id_, name, value):\n return ''\n", "path": "wagtail/utils/widgets.py"}], "after_files": [{"content": "from __future__ import absolute_import, unicode_literals\n\nfrom django.forms.widgets import Widget\nfrom django.utils.safestring import mark_safe\n\n\nclass WidgetWithScript(Widget):\n def render(self, name, value, attrs=None):\n widget = super(WidgetWithScript, self).render(name, value, attrs)\n\n final_attrs = self.build_attrs(attrs, name=name)\n id_ = final_attrs.get('id', None)\n if id_ is None:\n return widget\n\n js = self.render_js_init(id_, name, value)\n out = '{0}<script>{1}</script>'.format(widget, js)\n return mark_safe(out)\n\n def render_js_init(self, id_, name, value):\n return ''\n", "path": "wagtail/utils/widgets.py"}]} | 600 | 146 |
gh_patches_debug_4403 | rasdani/github-patches | git_diff | learningequality__kolibri-5037 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Facing error while kolibri 0.12.0 deb file installation
### Observed behavior
After running below command it shows error:
**sudo dpkg -i kolibri_0.12.0b4-0ubuntu1_all.deb**
File downloaded from https://github.com/learningequality/kolibri/releases.

### Context
Kolibri version : Kolibri 0.12.0
Operating system : Ubuntu 14.04
### Screenshots:


--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kolibri/__init__.py`
Content:
```
1 """
2 CAUTION! Keep everything here at at minimum. Do not import stuff.
3 This module is imported in setup.py, so you cannot for instance
4 import a dependency.
5 """
6 from __future__ import absolute_import
7 from __future__ import print_function
8 from __future__ import unicode_literals
9
10 from .utils import env
11 from .utils.version import get_version
12
13 # Setup the environment before loading anything else from the application
14 env.set_env()
15
16 #: This may not be the exact version as it's subject to modification with
17 #: get_version() - use ``kolibri.__version__`` for the exact version string.
18 VERSION = (0, 12, 0, 'alpha', 0)
19
20 __author__ = 'Learning Equality'
21 __email__ = '[email protected]'
22 __version__ = str(get_version(VERSION))
23
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kolibri/__init__.py b/kolibri/__init__.py
--- a/kolibri/__init__.py
+++ b/kolibri/__init__.py
@@ -15,7 +15,7 @@
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
-VERSION = (0, 12, 0, 'alpha', 0)
+VERSION = (0, 12, 0, 'beta', 0)
__author__ = 'Learning Equality'
__email__ = '[email protected]'
| {"golden_diff": "diff --git a/kolibri/__init__.py b/kolibri/__init__.py\n--- a/kolibri/__init__.py\n+++ b/kolibri/__init__.py\n@@ -15,7 +15,7 @@\n \n #: This may not be the exact version as it's subject to modification with\n #: get_version() - use ``kolibri.__version__`` for the exact version string.\n-VERSION = (0, 12, 0, 'alpha', 0)\n+VERSION = (0, 12, 0, 'beta', 0)\n \n __author__ = 'Learning Equality'\n __email__ = '[email protected]'\n", "issue": "Facing error while kolibri 0.12.0 deb file installation\n### Observed behavior\r\nAfter running below command it shows error:\r\n**sudo dpkg -i kolibri_0.12.0b4-0ubuntu1_all.deb**\r\n\r\nFile downloaded from https://github.com/learningequality/kolibri/releases.\r\n\r\n\r\n\r\n### Context\r\nKolibri version : Kolibri 0.12.0\r\nOperating system : Ubuntu 14.04\r\n\r\n### Screenshots:\r\n\r\n\r\n\n", "before_files": [{"content": "\"\"\"\nCAUTION! Keep everything here at at minimum. Do not import stuff.\nThis module is imported in setup.py, so you cannot for instance\nimport a dependency.\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nfrom .utils import env\nfrom .utils.version import get_version\n\n# Setup the environment before loading anything else from the application\nenv.set_env()\n\n#: This may not be the exact version as it's subject to modification with\n#: get_version() - use ``kolibri.__version__`` for the exact version string.\nVERSION = (0, 12, 0, 'alpha', 0)\n\n__author__ = 'Learning Equality'\n__email__ = '[email protected]'\n__version__ = str(get_version(VERSION))\n", "path": "kolibri/__init__.py"}], "after_files": [{"content": "\"\"\"\nCAUTION! Keep everything here at at minimum. Do not import stuff.\nThis module is imported in setup.py, so you cannot for instance\nimport a dependency.\n\"\"\"\nfrom __future__ import absolute_import\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nfrom .utils import env\nfrom .utils.version import get_version\n\n# Setup the environment before loading anything else from the application\nenv.set_env()\n\n#: This may not be the exact version as it's subject to modification with\n#: get_version() - use ``kolibri.__version__`` for the exact version string.\nVERSION = (0, 12, 0, 'beta', 0)\n\n__author__ = 'Learning Equality'\n__email__ = '[email protected]'\n__version__ = str(get_version(VERSION))\n", "path": "kolibri/__init__.py"}]} | 766 | 146 |
gh_patches_debug_21066 | rasdani/github-patches | git_diff | googleapis__google-auth-library-python-1428 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
deprecation notice for 3.7 modifies global state (user warning filters) as import side-effect
It is impossible to filter Python37DeprecationWarning after PR https://github.com/googleapis/google-auth-library-python/pull/1371.
Custom libraries should not configure warning filters, because it is user project's global state. Most of the times you cannot modify import order and insert new warning filters after your library modifies them.
#### Environment details
- OS: Ubuntu 22.04.3 LTS linux 5.15.0-89-generic
- Python version: 3.7.17
- pip version: 23.3.1
- `google-auth` version: 2.24.0
#### Steps to reproduce
1. install google-auth into your python3.7 project
2. configure filterwarning rule `ignore::DeprecationWarning` in pytest.ini
3. use google.auth or google.oauth2 somewhere in your project
4. run pytest
5. get Python37DeprecationWarning that you cannot filter
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `google/oauth2/__init__.py`
Content:
```
1 # Copyright 2016 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Google OAuth 2.0 Library for Python."""
16
17 import sys
18 import warnings
19
20
21 class Python37DeprecationWarning(DeprecationWarning): # pragma: NO COVER
22 """
23 Deprecation warning raised when Python 3.7 runtime is detected.
24 Python 3.7 support will be dropped after January 1, 2024. See
25 https://cloud.google.com/python/docs/python37-sunset/ for more information.
26 """
27
28 pass
29
30
31 # Checks if the current runtime is Python 3.7.
32 if sys.version_info.major == 3 and sys.version_info.minor == 7: # pragma: NO COVER
33 message = (
34 "After January 1, 2024, new releases of this library will drop support "
35 "for Python 3.7. More details about Python 3.7 support "
36 "can be found at https://cloud.google.com/python/docs/python37-sunset/"
37 )
38 # Configure the Python37DeprecationWarning warning so that it is only emitted once.
39 warnings.simplefilter("once", Python37DeprecationWarning)
40 warnings.warn(message, Python37DeprecationWarning)
41
```
Path: `google/auth/__init__.py`
Content:
```
1 # Copyright 2016 Google LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Google Auth Library for Python."""
16
17 import logging
18 import sys
19 import warnings
20
21 from google.auth import version as google_auth_version
22 from google.auth._default import (
23 default,
24 load_credentials_from_dict,
25 load_credentials_from_file,
26 )
27
28
29 __version__ = google_auth_version.__version__
30
31
32 __all__ = ["default", "load_credentials_from_file", "load_credentials_from_dict"]
33
34
35 class Python37DeprecationWarning(DeprecationWarning): # pragma: NO COVER
36 """
37 Deprecation warning raised when Python 3.7 runtime is detected.
38 Python 3.7 support will be dropped after January 1, 2024. See
39 https://cloud.google.com/python/docs/python37-sunset/ for more information.
40 """
41
42 pass
43
44
45 # Checks if the current runtime is Python 3.7.
46 if sys.version_info.major == 3 and sys.version_info.minor == 7: # pragma: NO COVER
47 message = (
48 "After January 1, 2024, new releases of this library will drop support "
49 "for Python 3.7. More details about Python 3.7 support "
50 "can be found at https://cloud.google.com/python/docs/python37-sunset/"
51 )
52
53 # Configure the Python37DeprecationWarning warning so that it is only emitted once.
54 warnings.simplefilter("once", Python37DeprecationWarning)
55 warnings.warn(message, Python37DeprecationWarning)
56
57 # Set default logging handler to avoid "No handler found" warnings.
58 logging.getLogger(__name__).addHandler(logging.NullHandler())
59
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/google/auth/__init__.py b/google/auth/__init__.py
--- a/google/auth/__init__.py
+++ b/google/auth/__init__.py
@@ -49,9 +49,6 @@
"for Python 3.7. More details about Python 3.7 support "
"can be found at https://cloud.google.com/python/docs/python37-sunset/"
)
-
- # Configure the Python37DeprecationWarning warning so that it is only emitted once.
- warnings.simplefilter("once", Python37DeprecationWarning)
warnings.warn(message, Python37DeprecationWarning)
# Set default logging handler to avoid "No handler found" warnings.
diff --git a/google/oauth2/__init__.py b/google/oauth2/__init__.py
--- a/google/oauth2/__init__.py
+++ b/google/oauth2/__init__.py
@@ -35,6 +35,4 @@
"for Python 3.7. More details about Python 3.7 support "
"can be found at https://cloud.google.com/python/docs/python37-sunset/"
)
- # Configure the Python37DeprecationWarning warning so that it is only emitted once.
- warnings.simplefilter("once", Python37DeprecationWarning)
warnings.warn(message, Python37DeprecationWarning)
| {"golden_diff": "diff --git a/google/auth/__init__.py b/google/auth/__init__.py\n--- a/google/auth/__init__.py\n+++ b/google/auth/__init__.py\n@@ -49,9 +49,6 @@\n \"for Python 3.7. More details about Python 3.7 support \"\n \"can be found at https://cloud.google.com/python/docs/python37-sunset/\"\n )\n-\n- # Configure the Python37DeprecationWarning warning so that it is only emitted once.\n- warnings.simplefilter(\"once\", Python37DeprecationWarning)\n warnings.warn(message, Python37DeprecationWarning)\n \n # Set default logging handler to avoid \"No handler found\" warnings.\ndiff --git a/google/oauth2/__init__.py b/google/oauth2/__init__.py\n--- a/google/oauth2/__init__.py\n+++ b/google/oauth2/__init__.py\n@@ -35,6 +35,4 @@\n \"for Python 3.7. More details about Python 3.7 support \"\n \"can be found at https://cloud.google.com/python/docs/python37-sunset/\"\n )\n- # Configure the Python37DeprecationWarning warning so that it is only emitted once.\n- warnings.simplefilter(\"once\", Python37DeprecationWarning)\n warnings.warn(message, Python37DeprecationWarning)\n", "issue": "deprecation notice for 3.7 modifies global state (user warning filters) as import side-effect\nIt is impossible to filter Python37DeprecationWarning after PR https://github.com/googleapis/google-auth-library-python/pull/1371.\r\n\r\nCustom libraries should not configure warning filters, because it is user project's global state. Most of the times you cannot modify import order and insert new warning filters after your library modifies them.\r\n\r\n#### Environment details\r\n\r\n - OS: Ubuntu 22.04.3 LTS linux 5.15.0-89-generic\r\n - Python version: 3.7.17\r\n - pip version: 23.3.1\r\n - `google-auth` version: 2.24.0\r\n\r\n#### Steps to reproduce\r\n\r\n 1. install google-auth into your python3.7 project\r\n 2. configure filterwarning rule `ignore::DeprecationWarning` in pytest.ini\r\n 3. use google.auth or google.oauth2 somewhere in your project\r\n 4. run pytest\r\n 5. get Python37DeprecationWarning that you cannot filter \n", "before_files": [{"content": "# Copyright 2016 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Google OAuth 2.0 Library for Python.\"\"\"\n\nimport sys\nimport warnings\n\n\nclass Python37DeprecationWarning(DeprecationWarning): # pragma: NO COVER\n \"\"\"\n Deprecation warning raised when Python 3.7 runtime is detected.\n Python 3.7 support will be dropped after January 1, 2024. See\n https://cloud.google.com/python/docs/python37-sunset/ for more information.\n \"\"\"\n\n pass\n\n\n# Checks if the current runtime is Python 3.7.\nif sys.version_info.major == 3 and sys.version_info.minor == 7: # pragma: NO COVER\n message = (\n \"After January 1, 2024, new releases of this library will drop support \"\n \"for Python 3.7. More details about Python 3.7 support \"\n \"can be found at https://cloud.google.com/python/docs/python37-sunset/\"\n )\n # Configure the Python37DeprecationWarning warning so that it is only emitted once.\n warnings.simplefilter(\"once\", Python37DeprecationWarning)\n warnings.warn(message, Python37DeprecationWarning)\n", "path": "google/oauth2/__init__.py"}, {"content": "# Copyright 2016 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Google Auth Library for Python.\"\"\"\n\nimport logging\nimport sys\nimport warnings\n\nfrom google.auth import version as google_auth_version\nfrom google.auth._default import (\n default,\n load_credentials_from_dict,\n load_credentials_from_file,\n)\n\n\n__version__ = google_auth_version.__version__\n\n\n__all__ = [\"default\", \"load_credentials_from_file\", \"load_credentials_from_dict\"]\n\n\nclass Python37DeprecationWarning(DeprecationWarning): # pragma: NO COVER\n \"\"\"\n Deprecation warning raised when Python 3.7 runtime is detected.\n Python 3.7 support will be dropped after January 1, 2024. See\n https://cloud.google.com/python/docs/python37-sunset/ for more information.\n \"\"\"\n\n pass\n\n\n# Checks if the current runtime is Python 3.7.\nif sys.version_info.major == 3 and sys.version_info.minor == 7: # pragma: NO COVER\n message = (\n \"After January 1, 2024, new releases of this library will drop support \"\n \"for Python 3.7. More details about Python 3.7 support \"\n \"can be found at https://cloud.google.com/python/docs/python37-sunset/\"\n )\n\n # Configure the Python37DeprecationWarning warning so that it is only emitted once.\n warnings.simplefilter(\"once\", Python37DeprecationWarning)\n warnings.warn(message, Python37DeprecationWarning)\n\n# Set default logging handler to avoid \"No handler found\" warnings.\nlogging.getLogger(__name__).addHandler(logging.NullHandler())\n", "path": "google/auth/__init__.py"}], "after_files": [{"content": "# Copyright 2016 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Google OAuth 2.0 Library for Python.\"\"\"\n\nimport sys\nimport warnings\n\n\nclass Python37DeprecationWarning(DeprecationWarning): # pragma: NO COVER\n \"\"\"\n Deprecation warning raised when Python 3.7 runtime is detected.\n Python 3.7 support will be dropped after January 1, 2024. See\n https://cloud.google.com/python/docs/python37-sunset/ for more information.\n \"\"\"\n\n pass\n\n\n# Checks if the current runtime is Python 3.7.\nif sys.version_info.major == 3 and sys.version_info.minor == 7: # pragma: NO COVER\n message = (\n \"After January 1, 2024, new releases of this library will drop support \"\n \"for Python 3.7. More details about Python 3.7 support \"\n \"can be found at https://cloud.google.com/python/docs/python37-sunset/\"\n )\n warnings.warn(message, Python37DeprecationWarning)\n", "path": "google/oauth2/__init__.py"}, {"content": "# Copyright 2016 Google LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Google Auth Library for Python.\"\"\"\n\nimport logging\nimport sys\nimport warnings\n\nfrom google.auth import version as google_auth_version\nfrom google.auth._default import (\n default,\n load_credentials_from_dict,\n load_credentials_from_file,\n)\n\n\n__version__ = google_auth_version.__version__\n\n\n__all__ = [\"default\", \"load_credentials_from_file\", \"load_credentials_from_dict\"]\n\n\nclass Python37DeprecationWarning(DeprecationWarning): # pragma: NO COVER\n \"\"\"\n Deprecation warning raised when Python 3.7 runtime is detected.\n Python 3.7 support will be dropped after January 1, 2024. See\n https://cloud.google.com/python/docs/python37-sunset/ for more information.\n \"\"\"\n\n pass\n\n\n# Checks if the current runtime is Python 3.7.\nif sys.version_info.major == 3 and sys.version_info.minor == 7: # pragma: NO COVER\n message = (\n \"After January 1, 2024, new releases of this library will drop support \"\n \"for Python 3.7. More details about Python 3.7 support \"\n \"can be found at https://cloud.google.com/python/docs/python37-sunset/\"\n )\n warnings.warn(message, Python37DeprecationWarning)\n\n# Set default logging handler to avoid \"No handler found\" warnings.\nlogging.getLogger(__name__).addHandler(logging.NullHandler())\n", "path": "google/auth/__init__.py"}]} | 1,578 | 296 |
gh_patches_debug_7882 | rasdani/github-patches | git_diff | numpy__numpy-15189 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
TST: Add the first test using hypothesis
This pull request adds the first test that uses hypothesis and hence brings in hypothesis as an additional test dependency.
@mattip Could you take a look at this please?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `numpy/conftest.py`
Content:
```
1 """
2 Pytest configuration and fixtures for the Numpy test suite.
3 """
4 import os
5
6 import pytest
7 import numpy
8
9 from numpy.core._multiarray_tests import get_fpu_mode
10
11
12 _old_fpu_mode = None
13 _collect_results = {}
14
15
16 def pytest_configure(config):
17 config.addinivalue_line("markers",
18 "valgrind_error: Tests that are known to error under valgrind.")
19 config.addinivalue_line("markers",
20 "leaks_references: Tests that are known to leak references.")
21 config.addinivalue_line("markers",
22 "slow: Tests that are very slow.")
23
24
25 def pytest_addoption(parser):
26 parser.addoption("--available-memory", action="store", default=None,
27 help=("Set amount of memory available for running the "
28 "test suite. This can result to tests requiring "
29 "especially large amounts of memory to be skipped. "
30 "Equivalent to setting environment variable "
31 "NPY_AVAILABLE_MEM. Default: determined"
32 "automatically."))
33
34
35 def pytest_sessionstart(session):
36 available_mem = session.config.getoption('available_memory')
37 if available_mem is not None:
38 os.environ['NPY_AVAILABLE_MEM'] = available_mem
39
40
41 #FIXME when yield tests are gone.
42 @pytest.hookimpl()
43 def pytest_itemcollected(item):
44 """
45 Check FPU precision mode was not changed during test collection.
46
47 The clumsy way we do it here is mainly necessary because numpy
48 still uses yield tests, which can execute code at test collection
49 time.
50 """
51 global _old_fpu_mode
52
53 mode = get_fpu_mode()
54
55 if _old_fpu_mode is None:
56 _old_fpu_mode = mode
57 elif mode != _old_fpu_mode:
58 _collect_results[item] = (_old_fpu_mode, mode)
59 _old_fpu_mode = mode
60
61
62 @pytest.fixture(scope="function", autouse=True)
63 def check_fpu_mode(request):
64 """
65 Check FPU precision mode was not changed during the test.
66 """
67 old_mode = get_fpu_mode()
68 yield
69 new_mode = get_fpu_mode()
70
71 if old_mode != new_mode:
72 raise AssertionError("FPU precision mode changed from {0:#x} to {1:#x}"
73 " during the test".format(old_mode, new_mode))
74
75 collect_result = _collect_results.get(request.node)
76 if collect_result is not None:
77 old_mode, new_mode = collect_result
78 raise AssertionError("FPU precision mode changed from {0:#x} to {1:#x}"
79 " when collecting the test".format(old_mode,
80 new_mode))
81
82
83 @pytest.fixture(autouse=True)
84 def add_np(doctest_namespace):
85 doctest_namespace['np'] = numpy
86
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/numpy/conftest.py b/numpy/conftest.py
--- a/numpy/conftest.py
+++ b/numpy/conftest.py
@@ -3,6 +3,7 @@
"""
import os
+import hypothesis
import pytest
import numpy
@@ -12,6 +13,12 @@
_old_fpu_mode = None
_collect_results = {}
+# See https://hypothesis.readthedocs.io/en/latest/settings.html
+hypothesis.settings.register_profile(
+ name="numpy-profile", deadline=None, print_blob=True,
+)
+hypothesis.settings.load_profile("numpy-profile")
+
def pytest_configure(config):
config.addinivalue_line("markers",
| {"golden_diff": "diff --git a/numpy/conftest.py b/numpy/conftest.py\n--- a/numpy/conftest.py\n+++ b/numpy/conftest.py\n@@ -3,6 +3,7 @@\n \"\"\"\n import os\n \n+import hypothesis\n import pytest\n import numpy\n \n@@ -12,6 +13,12 @@\n _old_fpu_mode = None\n _collect_results = {}\n \n+# See https://hypothesis.readthedocs.io/en/latest/settings.html\n+hypothesis.settings.register_profile(\n+ name=\"numpy-profile\", deadline=None, print_blob=True,\n+)\n+hypothesis.settings.load_profile(\"numpy-profile\")\n+\n \n def pytest_configure(config):\n config.addinivalue_line(\"markers\",\n", "issue": "TST: Add the first test using hypothesis\nThis pull request adds the first test that uses hypothesis and hence brings in hypothesis as an additional test dependency.\r\n\r\n@mattip Could you take a look at this please?\r\n\n", "before_files": [{"content": "\"\"\"\nPytest configuration and fixtures for the Numpy test suite.\n\"\"\"\nimport os\n\nimport pytest\nimport numpy\n\nfrom numpy.core._multiarray_tests import get_fpu_mode\n\n\n_old_fpu_mode = None\n_collect_results = {}\n\n\ndef pytest_configure(config):\n config.addinivalue_line(\"markers\",\n \"valgrind_error: Tests that are known to error under valgrind.\")\n config.addinivalue_line(\"markers\",\n \"leaks_references: Tests that are known to leak references.\")\n config.addinivalue_line(\"markers\",\n \"slow: Tests that are very slow.\")\n\n\ndef pytest_addoption(parser):\n parser.addoption(\"--available-memory\", action=\"store\", default=None,\n help=(\"Set amount of memory available for running the \"\n \"test suite. This can result to tests requiring \"\n \"especially large amounts of memory to be skipped. \"\n \"Equivalent to setting environment variable \"\n \"NPY_AVAILABLE_MEM. Default: determined\"\n \"automatically.\"))\n\n\ndef pytest_sessionstart(session):\n available_mem = session.config.getoption('available_memory')\n if available_mem is not None:\n os.environ['NPY_AVAILABLE_MEM'] = available_mem\n\n\n#FIXME when yield tests are gone.\[email protected]()\ndef pytest_itemcollected(item):\n \"\"\"\n Check FPU precision mode was not changed during test collection.\n\n The clumsy way we do it here is mainly necessary because numpy\n still uses yield tests, which can execute code at test collection\n time.\n \"\"\"\n global _old_fpu_mode\n\n mode = get_fpu_mode()\n\n if _old_fpu_mode is None:\n _old_fpu_mode = mode\n elif mode != _old_fpu_mode:\n _collect_results[item] = (_old_fpu_mode, mode)\n _old_fpu_mode = mode\n\n\[email protected](scope=\"function\", autouse=True)\ndef check_fpu_mode(request):\n \"\"\"\n Check FPU precision mode was not changed during the test.\n \"\"\"\n old_mode = get_fpu_mode()\n yield\n new_mode = get_fpu_mode()\n\n if old_mode != new_mode:\n raise AssertionError(\"FPU precision mode changed from {0:#x} to {1:#x}\"\n \" during the test\".format(old_mode, new_mode))\n\n collect_result = _collect_results.get(request.node)\n if collect_result is not None:\n old_mode, new_mode = collect_result\n raise AssertionError(\"FPU precision mode changed from {0:#x} to {1:#x}\"\n \" when collecting the test\".format(old_mode,\n new_mode))\n\n\[email protected](autouse=True)\ndef add_np(doctest_namespace):\n doctest_namespace['np'] = numpy\n", "path": "numpy/conftest.py"}], "after_files": [{"content": "\"\"\"\nPytest configuration and fixtures for the Numpy test suite.\n\"\"\"\nimport os\n\nimport hypothesis\nimport pytest\nimport numpy\n\nfrom numpy.core._multiarray_tests import get_fpu_mode\n\n\n_old_fpu_mode = None\n_collect_results = {}\n\n# See https://hypothesis.readthedocs.io/en/latest/settings.html\nhypothesis.settings.register_profile(\n name=\"numpy-profile\", deadline=None, print_blob=True,\n)\nhypothesis.settings.load_profile(\"numpy-profile\")\n\n\ndef pytest_configure(config):\n config.addinivalue_line(\"markers\",\n \"valgrind_error: Tests that are known to error under valgrind.\")\n config.addinivalue_line(\"markers\",\n \"leaks_references: Tests that are known to leak references.\")\n config.addinivalue_line(\"markers\",\n \"slow: Tests that are very slow.\")\n\n\ndef pytest_addoption(parser):\n parser.addoption(\"--available-memory\", action=\"store\", default=None,\n help=(\"Set amount of memory available for running the \"\n \"test suite. This can result to tests requiring \"\n \"especially large amounts of memory to be skipped. \"\n \"Equivalent to setting environment variable \"\n \"NPY_AVAILABLE_MEM. Default: determined\"\n \"automatically.\"))\n\n\ndef pytest_sessionstart(session):\n available_mem = session.config.getoption('available_memory')\n if available_mem is not None:\n os.environ['NPY_AVAILABLE_MEM'] = available_mem\n\n\n#FIXME when yield tests are gone.\[email protected]()\ndef pytest_itemcollected(item):\n \"\"\"\n Check FPU precision mode was not changed during test collection.\n\n The clumsy way we do it here is mainly necessary because numpy\n still uses yield tests, which can execute code at test collection\n time.\n \"\"\"\n global _old_fpu_mode\n\n mode = get_fpu_mode()\n\n if _old_fpu_mode is None:\n _old_fpu_mode = mode\n elif mode != _old_fpu_mode:\n _collect_results[item] = (_old_fpu_mode, mode)\n _old_fpu_mode = mode\n\n\[email protected](scope=\"function\", autouse=True)\ndef check_fpu_mode(request):\n \"\"\"\n Check FPU precision mode was not changed during the test.\n \"\"\"\n old_mode = get_fpu_mode()\n yield\n new_mode = get_fpu_mode()\n\n if old_mode != new_mode:\n raise AssertionError(\"FPU precision mode changed from {0:#x} to {1:#x}\"\n \" during the test\".format(old_mode, new_mode))\n\n collect_result = _collect_results.get(request.node)\n if collect_result is not None:\n old_mode, new_mode = collect_result\n raise AssertionError(\"FPU precision mode changed from {0:#x} to {1:#x}\"\n \" when collecting the test\".format(old_mode,\n new_mode))\n\n\[email protected](autouse=True)\ndef add_np(doctest_namespace):\n doctest_namespace['np'] = numpy\n", "path": "numpy/conftest.py"}]} | 1,062 | 153 |
gh_patches_debug_9956 | rasdani/github-patches | git_diff | iterative__dvc-8823 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
dvc repro: Tuples in parameters are always marked as modified
# Bug Report
<!--
## Issue name
Issue names must follow the pattern `command: description` where the command is the dvc command that you are trying to run. The description should describe the consequence of the bug.
Example: `repro: doesn't detect input changes`
-->
## Description
<!--
A clear and concise description of what the bug is.
-->
When tuples are used in parameters, dvc always marks them as modified.
```sh
dvc status
```
outputs
```yaml
train_model:
changed deps:
model/parameters.py:
modified: NGRAM_VECTORIZER_RANGE
```
regardless of whether `NGRAM_VECTORIZER_RANGE` has been changed
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dvc/dependency/param.py`
Content:
```
1 import logging
2 import os
3 import typing
4 from collections import defaultdict
5 from typing import Dict, cast
6
7 import dpath.util
8 from voluptuous import Any
9
10 from dvc.exceptions import DvcException
11 from dvc.utils.serialize import ParseError, load_path
12 from dvc_data.hashfile.hash_info import HashInfo
13
14 from .base import Dependency
15
16 logger = logging.getLogger(__name__)
17
18
19 class MissingParamsError(DvcException):
20 pass
21
22
23 class MissingParamsFile(DvcException):
24 pass
25
26
27 class ParamsIsADirectoryError(DvcException):
28 pass
29
30
31 class BadParamFileError(DvcException):
32 pass
33
34
35 class ParamsDependency(Dependency):
36 PARAM_PARAMS = "params"
37 PARAM_SCHEMA = {PARAM_PARAMS: Any(dict, list, None)}
38 DEFAULT_PARAMS_FILE = "params.yaml"
39
40 def __init__(self, stage, path, params=None, repo=None):
41 self.params = list(params) if params else []
42 hash_info = HashInfo()
43 if isinstance(params, dict):
44 hash_info = HashInfo(
45 self.PARAM_PARAMS,
46 params, # type: ignore[arg-type]
47 )
48 repo = repo or stage.repo
49 path = path or os.path.join(repo.root_dir, self.DEFAULT_PARAMS_FILE)
50 super().__init__(stage, path, repo=repo)
51 self.hash_info = hash_info
52
53 def dumpd(self, **kwargs):
54 ret = super().dumpd()
55 if not self.hash_info:
56 ret[self.PARAM_PARAMS] = self.params or {}
57 return ret
58
59 def fill_values(self, values=None):
60 """Load params values dynamically."""
61 if values is None:
62 return
63
64 info = {}
65 if not self.params:
66 info.update(values)
67 for param in self.params:
68 if param in values:
69 info[param] = values[param]
70 self.hash_info = HashInfo(
71 self.PARAM_PARAMS,
72 info, # type: ignore[arg-type]
73 )
74
75 def read_params(
76 self, flatten: bool = True, **kwargs: typing.Any
77 ) -> Dict[str, typing.Any]:
78 try:
79 config = self.read_file()
80 except MissingParamsFile:
81 config = {}
82
83 if not self.params:
84 return config
85
86 ret = {}
87 if flatten:
88 for param in self.params:
89 try:
90 ret[param] = dpath.util.get(config, param, separator=".")
91 except KeyError:
92 continue
93 return ret
94
95 from dpath.util import merge
96
97 for param in self.params:
98 merge(
99 ret,
100 dpath.util.search(config, param, separator="."),
101 separator=".",
102 )
103 return ret
104
105 def workspace_status(self):
106 if not self.exists:
107 return {str(self): "deleted"}
108 if self.hash_info.value is None:
109 return {str(self): "new"}
110
111 from funcy import ldistinct
112
113 status: Dict[str, Any] = defaultdict(dict)
114 info = cast(dict, self.hash_info.value) if self.hash_info else {}
115 actual = self.read_params()
116
117 # NOTE: we want to preserve the order of params as specified in the
118 # status. In case of tracking the whole file, the order is top-level
119 # keys in the file and then the keys in the `info` from `dvc.lock`
120 # (which are alphabetically sorted).
121 params = self.params or ldistinct([*actual.keys(), *info.keys()])
122 for param in params:
123 if param not in actual:
124 st = "deleted"
125 elif param not in info:
126 st = "new"
127 elif actual[param] != info[param]:
128 st = "modified"
129 else:
130 assert actual[param] == info[param]
131 continue
132
133 status[str(self)][param] = st
134
135 return status
136
137 def status(self):
138 return self.workspace_status()
139
140 def validate_filepath(self):
141 if not self.exists:
142 raise MissingParamsFile(f"Parameters file '{self}' does not exist")
143 if self.isdir():
144 raise ParamsIsADirectoryError(
145 f"'{self}' is a directory, expected a parameters file"
146 )
147
148 def read_file(self):
149 self.validate_filepath()
150 try:
151 return load_path(self.fs_path, self.repo.fs)
152 except ParseError as exc:
153 raise BadParamFileError(
154 f"Unable to read parameters from '{self}'"
155 ) from exc
156
157 def get_hash(self):
158 info = self.read_params()
159
160 missing_params = set(self.params) - set(info.keys())
161 if missing_params:
162 raise MissingParamsError(
163 "Parameters '{}' are missing from '{}'.".format(
164 ", ".join(missing_params), self
165 )
166 )
167
168 return HashInfo(self.PARAM_PARAMS, info) # type: ignore[arg-type]
169
170 def save(self):
171 if not self.exists:
172 raise self.DoesNotExistError(self)
173
174 if not self.isfile() and not self.isdir():
175 raise self.IsNotFileOrDirError(self)
176
177 self.ignore()
178 self.hash_info = self.get_hash()
179
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dvc/dependency/param.py b/dvc/dependency/param.py
--- a/dvc/dependency/param.py
+++ b/dvc/dependency/param.py
@@ -125,9 +125,14 @@
elif param not in info:
st = "new"
elif actual[param] != info[param]:
- st = "modified"
+ if (
+ isinstance(actual[param], tuple)
+ and list(actual[param]) == info[param]
+ ):
+ continue
+ else:
+ st = "modified"
else:
- assert actual[param] == info[param]
continue
status[str(self)][param] = st
| {"golden_diff": "diff --git a/dvc/dependency/param.py b/dvc/dependency/param.py\n--- a/dvc/dependency/param.py\n+++ b/dvc/dependency/param.py\n@@ -125,9 +125,14 @@\n elif param not in info:\n st = \"new\"\n elif actual[param] != info[param]:\n- st = \"modified\"\n+ if (\n+ isinstance(actual[param], tuple)\n+ and list(actual[param]) == info[param]\n+ ):\n+ continue\n+ else:\n+ st = \"modified\"\n else:\n- assert actual[param] == info[param]\n continue\n \n status[str(self)][param] = st\n", "issue": "dvc repro: Tuples in parameters are always marked as modified\n# Bug Report\r\n\r\n<!--\r\n## Issue name\r\n\r\nIssue names must follow the pattern `command: description` where the command is the dvc command that you are trying to run. The description should describe the consequence of the bug.\r\n\r\nExample: `repro: doesn't detect input changes`\r\n-->\r\n\r\n## Description\r\n\r\n<!--\r\nA clear and concise description of what the bug is.\r\n-->\r\nWhen tuples are used in parameters, dvc always marks them as modified.\r\n\r\n```sh\r\ndvc status\r\n```\r\noutputs\r\n```yaml\r\ntrain_model:\r\n changed deps:\r\n model/parameters.py: \r\n modified: NGRAM_VECTORIZER_RANGE\r\n```\r\n\r\nregardless of whether `NGRAM_VECTORIZER_RANGE` has been changed\r\n\n", "before_files": [{"content": "import logging\nimport os\nimport typing\nfrom collections import defaultdict\nfrom typing import Dict, cast\n\nimport dpath.util\nfrom voluptuous import Any\n\nfrom dvc.exceptions import DvcException\nfrom dvc.utils.serialize import ParseError, load_path\nfrom dvc_data.hashfile.hash_info import HashInfo\n\nfrom .base import Dependency\n\nlogger = logging.getLogger(__name__)\n\n\nclass MissingParamsError(DvcException):\n pass\n\n\nclass MissingParamsFile(DvcException):\n pass\n\n\nclass ParamsIsADirectoryError(DvcException):\n pass\n\n\nclass BadParamFileError(DvcException):\n pass\n\n\nclass ParamsDependency(Dependency):\n PARAM_PARAMS = \"params\"\n PARAM_SCHEMA = {PARAM_PARAMS: Any(dict, list, None)}\n DEFAULT_PARAMS_FILE = \"params.yaml\"\n\n def __init__(self, stage, path, params=None, repo=None):\n self.params = list(params) if params else []\n hash_info = HashInfo()\n if isinstance(params, dict):\n hash_info = HashInfo(\n self.PARAM_PARAMS,\n params, # type: ignore[arg-type]\n )\n repo = repo or stage.repo\n path = path or os.path.join(repo.root_dir, self.DEFAULT_PARAMS_FILE)\n super().__init__(stage, path, repo=repo)\n self.hash_info = hash_info\n\n def dumpd(self, **kwargs):\n ret = super().dumpd()\n if not self.hash_info:\n ret[self.PARAM_PARAMS] = self.params or {}\n return ret\n\n def fill_values(self, values=None):\n \"\"\"Load params values dynamically.\"\"\"\n if values is None:\n return\n\n info = {}\n if not self.params:\n info.update(values)\n for param in self.params:\n if param in values:\n info[param] = values[param]\n self.hash_info = HashInfo(\n self.PARAM_PARAMS,\n info, # type: ignore[arg-type]\n )\n\n def read_params(\n self, flatten: bool = True, **kwargs: typing.Any\n ) -> Dict[str, typing.Any]:\n try:\n config = self.read_file()\n except MissingParamsFile:\n config = {}\n\n if not self.params:\n return config\n\n ret = {}\n if flatten:\n for param in self.params:\n try:\n ret[param] = dpath.util.get(config, param, separator=\".\")\n except KeyError:\n continue\n return ret\n\n from dpath.util import merge\n\n for param in self.params:\n merge(\n ret,\n dpath.util.search(config, param, separator=\".\"),\n separator=\".\",\n )\n return ret\n\n def workspace_status(self):\n if not self.exists:\n return {str(self): \"deleted\"}\n if self.hash_info.value is None:\n return {str(self): \"new\"}\n\n from funcy import ldistinct\n\n status: Dict[str, Any] = defaultdict(dict)\n info = cast(dict, self.hash_info.value) if self.hash_info else {}\n actual = self.read_params()\n\n # NOTE: we want to preserve the order of params as specified in the\n # status. In case of tracking the whole file, the order is top-level\n # keys in the file and then the keys in the `info` from `dvc.lock`\n # (which are alphabetically sorted).\n params = self.params or ldistinct([*actual.keys(), *info.keys()])\n for param in params:\n if param not in actual:\n st = \"deleted\"\n elif param not in info:\n st = \"new\"\n elif actual[param] != info[param]:\n st = \"modified\"\n else:\n assert actual[param] == info[param]\n continue\n\n status[str(self)][param] = st\n\n return status\n\n def status(self):\n return self.workspace_status()\n\n def validate_filepath(self):\n if not self.exists:\n raise MissingParamsFile(f\"Parameters file '{self}' does not exist\")\n if self.isdir():\n raise ParamsIsADirectoryError(\n f\"'{self}' is a directory, expected a parameters file\"\n )\n\n def read_file(self):\n self.validate_filepath()\n try:\n return load_path(self.fs_path, self.repo.fs)\n except ParseError as exc:\n raise BadParamFileError(\n f\"Unable to read parameters from '{self}'\"\n ) from exc\n\n def get_hash(self):\n info = self.read_params()\n\n missing_params = set(self.params) - set(info.keys())\n if missing_params:\n raise MissingParamsError(\n \"Parameters '{}' are missing from '{}'.\".format(\n \", \".join(missing_params), self\n )\n )\n\n return HashInfo(self.PARAM_PARAMS, info) # type: ignore[arg-type]\n\n def save(self):\n if not self.exists:\n raise self.DoesNotExistError(self)\n\n if not self.isfile() and not self.isdir():\n raise self.IsNotFileOrDirError(self)\n\n self.ignore()\n self.hash_info = self.get_hash()\n", "path": "dvc/dependency/param.py"}], "after_files": [{"content": "import logging\nimport os\nimport typing\nfrom collections import defaultdict\nfrom typing import Dict, cast\n\nimport dpath.util\nfrom voluptuous import Any\n\nfrom dvc.exceptions import DvcException\nfrom dvc.utils.serialize import ParseError, load_path\nfrom dvc_data.hashfile.hash_info import HashInfo\n\nfrom .base import Dependency\n\nlogger = logging.getLogger(__name__)\n\n\nclass MissingParamsError(DvcException):\n pass\n\n\nclass MissingParamsFile(DvcException):\n pass\n\n\nclass ParamsIsADirectoryError(DvcException):\n pass\n\n\nclass BadParamFileError(DvcException):\n pass\n\n\nclass ParamsDependency(Dependency):\n PARAM_PARAMS = \"params\"\n PARAM_SCHEMA = {PARAM_PARAMS: Any(dict, list, None)}\n DEFAULT_PARAMS_FILE = \"params.yaml\"\n\n def __init__(self, stage, path, params=None, repo=None):\n self.params = list(params) if params else []\n hash_info = HashInfo()\n if isinstance(params, dict):\n hash_info = HashInfo(\n self.PARAM_PARAMS,\n params, # type: ignore[arg-type]\n )\n repo = repo or stage.repo\n path = path or os.path.join(repo.root_dir, self.DEFAULT_PARAMS_FILE)\n super().__init__(stage, path, repo=repo)\n self.hash_info = hash_info\n\n def dumpd(self, **kwargs):\n ret = super().dumpd()\n if not self.hash_info:\n ret[self.PARAM_PARAMS] = self.params or {}\n return ret\n\n def fill_values(self, values=None):\n \"\"\"Load params values dynamically.\"\"\"\n if values is None:\n return\n\n info = {}\n if not self.params:\n info.update(values)\n for param in self.params:\n if param in values:\n info[param] = values[param]\n self.hash_info = HashInfo(\n self.PARAM_PARAMS,\n info, # type: ignore[arg-type]\n )\n\n def read_params(\n self, flatten: bool = True, **kwargs: typing.Any\n ) -> Dict[str, typing.Any]:\n try:\n config = self.read_file()\n except MissingParamsFile:\n config = {}\n\n if not self.params:\n return config\n\n ret = {}\n if flatten:\n for param in self.params:\n try:\n ret[param] = dpath.util.get(config, param, separator=\".\")\n except KeyError:\n continue\n return ret\n\n from dpath.util import merge\n\n for param in self.params:\n merge(\n ret,\n dpath.util.search(config, param, separator=\".\"),\n separator=\".\",\n )\n return ret\n\n def workspace_status(self):\n if not self.exists:\n return {str(self): \"deleted\"}\n if self.hash_info.value is None:\n return {str(self): \"new\"}\n\n from funcy import ldistinct\n\n status: Dict[str, Any] = defaultdict(dict)\n info = cast(dict, self.hash_info.value) if self.hash_info else {}\n actual = self.read_params()\n\n # NOTE: we want to preserve the order of params as specified in the\n # status. In case of tracking the whole file, the order is top-level\n # keys in the file and then the keys in the `info` from `dvc.lock`\n # (which are alphabetically sorted).\n params = self.params or ldistinct([*actual.keys(), *info.keys()])\n for param in params:\n if param not in actual:\n st = \"deleted\"\n elif param not in info:\n st = \"new\"\n elif actual[param] != info[param]:\n if (\n isinstance(actual[param], tuple)\n and list(actual[param]) == info[param]\n ):\n continue\n else:\n st = \"modified\"\n else:\n continue\n\n status[str(self)][param] = st\n\n return status\n\n def status(self):\n return self.workspace_status()\n\n def validate_filepath(self):\n if not self.exists:\n raise MissingParamsFile(f\"Parameters file '{self}' does not exist\")\n if self.isdir():\n raise ParamsIsADirectoryError(\n f\"'{self}' is a directory, expected a parameters file\"\n )\n\n def read_file(self):\n self.validate_filepath()\n try:\n return load_path(self.fs_path, self.repo.fs)\n except ParseError as exc:\n raise BadParamFileError(\n f\"Unable to read parameters from '{self}'\"\n ) from exc\n\n def get_hash(self):\n info = self.read_params()\n\n missing_params = set(self.params) - set(info.keys())\n if missing_params:\n raise MissingParamsError(\n \"Parameters '{}' are missing from '{}'.\".format(\n \", \".join(missing_params), self\n )\n )\n\n return HashInfo(self.PARAM_PARAMS, info) # type: ignore[arg-type]\n\n def save(self):\n if not self.exists:\n raise self.DoesNotExistError(self)\n\n if not self.isfile() and not self.isdir():\n raise self.IsNotFileOrDirError(self)\n\n self.ignore()\n self.hash_info = self.get_hash()\n", "path": "dvc/dependency/param.py"}]} | 1,941 | 151 |
gh_patches_debug_5546 | rasdani/github-patches | git_diff | mitmproxy__mitmproxy-5695 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
mitmdump jumps to 100% CPU when parent process exits
#### Problem Description
It took me two days to make this reproduce in isolation. I hope someone with Python skills can figure out what is happening here. Depending on what the root cause is this might not even be related to my funny architecture.
I'm spawning `mitmdump` from Node.js. If the node process exits mitmdump will be re-assigned to become a child of `systemd` (some unix wizardry). It will then immediately jump to 100% CPU and stay there. This _only_ happens when an addon is using at least one network event (go figure...). E.g. I'm using `client_connected` (works with `clientconnect` on v6 as well). If the addon is only using sth. like `running` the bug does not occur. Even better: if the addon originally only has "running" nothing bad happens. But if I then add a `client_connected` and save the file (and the addon is automatically reloaded) it will instantly jump to 100% CPU.
My guess is that it might be related to stdout and the switcheroo with the parent process? In my actual architecture the mitmdump process will poll the parent via gRPC every second and shutdown if it's gone. But the 100% CPU prevents that.
Update: while trying to write down the exact steps it turns out this might only reproduce via local venv and and not if you download the binary. I'm not sure, it's confusing. I'm confused. But I have video proof, so I'm not completely insane.
#### Steps to reproduce the behavior:
index.js
```js
const path = require('path');
const { spawn } = require('child_process');
function handleStdOut(data) {
console.log(`mitmdump stdout: ${data}`);
}
function handleStdError(data) {
console.error(`mitmdump stderr: ${data}`);
}
function handleExit(code) {
console.log(`mitm process exited with code ${code}`);
}
const mitm = spawn(
// Adjust this path
'/home/alex/Projects/Bandsalat/src/forks/mitmproxy/venv/bin/mitmdump',
['--quiet', '--set', 'connection_strategy=lazy', '--scripts', 'addon.py'],
{
detached: true,
windowsHide: true,
env: {
PYTHONUNBUFFERED: '1',
},
}
);
console.log(mitm.spawnargs);
mitm.unref();
mitm.on('exit', handleExit);
mitm.stdout.on('data', handleStdOut);
mitm.stderr.on('data', handleStdError);
```
addon.py
```py
class MyAddon:
def running(self):
print('running')
def client_connected(self, client):
print('client_connected')
addons = [
MyAddon()
]
```
1. I'm on Ubuntu
2. Adjust index.js to point to your local mitmproxy git venv
3. Launch `node index.js` (Node 14 or 16 work both for me)
4. Now open Chromium with mitmproxy configured. You don't need to enter any URL, Chromium will phone home anyway.
5. Keep Chromium open and ctrl+c the node process
6. Observe your fan getting louder and `top` showing mitmdump at 100% CPU
https://user-images.githubusercontent.com/679144/124594746-740a7080-de60-11eb-9ffb-a5fc4b3ba24a.mp4
#### System Information
Happens with both v6 and HEAD.
```
Mitmproxy: 7.0.0.dev (+492, commit af27556)
Python: 3.8.10
OpenSSL: OpenSSL 1.1.1i 8 Dec 2020
Platform: Linux-5.8.0-59-generic-x86_64-with-glibc2.29
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mitmproxy/addons/termlog.py`
Content:
```
1 from __future__ import annotations
2 import asyncio
3 import logging
4 from typing import IO
5
6 import sys
7
8 from mitmproxy import ctx, log
9 from mitmproxy.utils import vt_codes
10
11
12 class TermLog:
13 def __init__(
14 self,
15 out: IO[str] | None = None
16 ):
17 self.logger = TermLogHandler(out)
18 self.logger.install()
19
20 def load(self, loader):
21 loader.add_option(
22 "termlog_verbosity", str, "info", "Log verbosity.", choices=log.LogLevels
23 )
24 self.logger.setLevel(logging.INFO)
25
26 def configure(self, updated):
27 if "termlog_verbosity" in updated:
28 self.logger.setLevel(ctx.options.termlog_verbosity.upper())
29
30 def done(self):
31 t = self._teardown()
32 try:
33 # try to delay teardown a bit.
34 asyncio.create_task(t)
35 except RuntimeError:
36 # no event loop, we're in a test.
37 asyncio.run(t)
38
39 async def _teardown(self):
40 self.logger.uninstall()
41
42
43 class TermLogHandler(log.MitmLogHandler):
44 def __init__(
45 self,
46 out: IO[str] | None = None
47 ):
48 super().__init__()
49 self.file: IO[str] = out or sys.stdout
50 self.has_vt_codes = vt_codes.ensure_supported(self.file)
51 self.formatter = log.MitmFormatter(self.has_vt_codes)
52
53 def emit(self, record: logging.LogRecord) -> None:
54 print(
55 self.format(record),
56 file=self.file
57 )
58
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mitmproxy/addons/termlog.py b/mitmproxy/addons/termlog.py
--- a/mitmproxy/addons/termlog.py
+++ b/mitmproxy/addons/termlog.py
@@ -51,7 +51,9 @@
self.formatter = log.MitmFormatter(self.has_vt_codes)
def emit(self, record: logging.LogRecord) -> None:
- print(
- self.format(record),
- file=self.file
- )
+ try:
+ print(self.format(record), file=self.file)
+ except OSError:
+ # We cannot print, exit immediately.
+ # See https://github.com/mitmproxy/mitmproxy/issues/4669
+ sys.exit(1)
| {"golden_diff": "diff --git a/mitmproxy/addons/termlog.py b/mitmproxy/addons/termlog.py\n--- a/mitmproxy/addons/termlog.py\n+++ b/mitmproxy/addons/termlog.py\n@@ -51,7 +51,9 @@\n self.formatter = log.MitmFormatter(self.has_vt_codes)\n \n def emit(self, record: logging.LogRecord) -> None:\n- print(\n- self.format(record),\n- file=self.file\n- )\n+ try:\n+ print(self.format(record), file=self.file)\n+ except OSError:\n+ # We cannot print, exit immediately.\n+ # See https://github.com/mitmproxy/mitmproxy/issues/4669\n+ sys.exit(1)\n", "issue": "mitmdump jumps to 100% CPU when parent process exits\n#### Problem Description\r\n\r\nIt took me two days to make this reproduce in isolation. I hope someone with Python skills can figure out what is happening here. Depending on what the root cause is this might not even be related to my funny architecture.\r\n\r\nI'm spawning `mitmdump` from Node.js. If the node process exits mitmdump will be re-assigned to become a child of `systemd` (some unix wizardry). It will then immediately jump to 100% CPU and stay there. This _only_ happens when an addon is using at least one network event (go figure...). E.g. I'm using `client_connected` (works with `clientconnect` on v6 as well). If the addon is only using sth. like `running` the bug does not occur. Even better: if the addon originally only has \"running\" nothing bad happens. But if I then add a `client_connected` and save the file (and the addon is automatically reloaded) it will instantly jump to 100% CPU.\r\n\r\nMy guess is that it might be related to stdout and the switcheroo with the parent process? In my actual architecture the mitmdump process will poll the parent via gRPC every second and shutdown if it's gone. But the 100% CPU prevents that.\r\n\r\nUpdate: while trying to write down the exact steps it turns out this might only reproduce via local venv and and not if you download the binary. I'm not sure, it's confusing. I'm confused. But I have video proof, so I'm not completely insane.\r\n\r\n#### Steps to reproduce the behavior:\r\n\r\nindex.js\r\n\r\n```js\r\nconst path = require('path');\r\nconst { spawn } = require('child_process');\r\n\r\nfunction handleStdOut(data) {\r\n console.log(`mitmdump stdout: ${data}`);\r\n}\r\n\r\nfunction handleStdError(data) {\r\n console.error(`mitmdump stderr: ${data}`);\r\n}\r\n\r\nfunction handleExit(code) {\r\n console.log(`mitm process exited with code ${code}`);\r\n}\r\n\r\nconst mitm = spawn(\r\n // Adjust this path\r\n '/home/alex/Projects/Bandsalat/src/forks/mitmproxy/venv/bin/mitmdump',\r\n ['--quiet', '--set', 'connection_strategy=lazy', '--scripts', 'addon.py'],\r\n {\r\n detached: true,\r\n windowsHide: true,\r\n env: {\r\n PYTHONUNBUFFERED: '1',\r\n },\r\n }\r\n);\r\n\r\nconsole.log(mitm.spawnargs);\r\n\r\nmitm.unref();\r\nmitm.on('exit', handleExit);\r\nmitm.stdout.on('data', handleStdOut);\r\nmitm.stderr.on('data', handleStdError);\r\n```\r\naddon.py\r\n\r\n```py\r\nclass MyAddon:\r\n def running(self):\r\n print('running')\r\n\r\n def client_connected(self, client):\r\n print('client_connected')\r\n\r\naddons = [\r\n MyAddon()\r\n]\r\n```\r\n\r\n1. I'm on Ubuntu\r\n2. Adjust index.js to point to your local mitmproxy git venv\r\n3. Launch `node index.js` (Node 14 or 16 work both for me)\r\n4. Now open Chromium with mitmproxy configured. You don't need to enter any URL, Chromium will phone home anyway.\r\n5. Keep Chromium open and ctrl+c the node process\r\n6. Observe your fan getting louder and `top` showing mitmdump at 100% CPU\r\n\r\nhttps://user-images.githubusercontent.com/679144/124594746-740a7080-de60-11eb-9ffb-a5fc4b3ba24a.mp4\r\n\r\n#### System Information\r\n\r\nHappens with both v6 and HEAD.\r\n\r\n```\r\nMitmproxy: 7.0.0.dev (+492, commit af27556)\r\nPython: 3.8.10\r\nOpenSSL: OpenSSL 1.1.1i 8 Dec 2020\r\nPlatform: Linux-5.8.0-59-generic-x86_64-with-glibc2.29\r\n```\r\n\n", "before_files": [{"content": "from __future__ import annotations\nimport asyncio\nimport logging\nfrom typing import IO\n\nimport sys\n\nfrom mitmproxy import ctx, log\nfrom mitmproxy.utils import vt_codes\n\n\nclass TermLog:\n def __init__(\n self,\n out: IO[str] | None = None\n ):\n self.logger = TermLogHandler(out)\n self.logger.install()\n\n def load(self, loader):\n loader.add_option(\n \"termlog_verbosity\", str, \"info\", \"Log verbosity.\", choices=log.LogLevels\n )\n self.logger.setLevel(logging.INFO)\n\n def configure(self, updated):\n if \"termlog_verbosity\" in updated:\n self.logger.setLevel(ctx.options.termlog_verbosity.upper())\n\n def done(self):\n t = self._teardown()\n try:\n # try to delay teardown a bit.\n asyncio.create_task(t)\n except RuntimeError:\n # no event loop, we're in a test.\n asyncio.run(t)\n\n async def _teardown(self):\n self.logger.uninstall()\n\n\nclass TermLogHandler(log.MitmLogHandler):\n def __init__(\n self,\n out: IO[str] | None = None\n ):\n super().__init__()\n self.file: IO[str] = out or sys.stdout\n self.has_vt_codes = vt_codes.ensure_supported(self.file)\n self.formatter = log.MitmFormatter(self.has_vt_codes)\n\n def emit(self, record: logging.LogRecord) -> None:\n print(\n self.format(record),\n file=self.file\n )\n", "path": "mitmproxy/addons/termlog.py"}], "after_files": [{"content": "from __future__ import annotations\nimport asyncio\nimport logging\nfrom typing import IO\n\nimport sys\n\nfrom mitmproxy import ctx, log\nfrom mitmproxy.utils import vt_codes\n\n\nclass TermLog:\n def __init__(\n self,\n out: IO[str] | None = None\n ):\n self.logger = TermLogHandler(out)\n self.logger.install()\n\n def load(self, loader):\n loader.add_option(\n \"termlog_verbosity\", str, \"info\", \"Log verbosity.\", choices=log.LogLevels\n )\n self.logger.setLevel(logging.INFO)\n\n def configure(self, updated):\n if \"termlog_verbosity\" in updated:\n self.logger.setLevel(ctx.options.termlog_verbosity.upper())\n\n def done(self):\n t = self._teardown()\n try:\n # try to delay teardown a bit.\n asyncio.create_task(t)\n except RuntimeError:\n # no event loop, we're in a test.\n asyncio.run(t)\n\n async def _teardown(self):\n self.logger.uninstall()\n\n\nclass TermLogHandler(log.MitmLogHandler):\n def __init__(\n self,\n out: IO[str] | None = None\n ):\n super().__init__()\n self.file: IO[str] = out or sys.stdout\n self.has_vt_codes = vt_codes.ensure_supported(self.file)\n self.formatter = log.MitmFormatter(self.has_vt_codes)\n\n def emit(self, record: logging.LogRecord) -> None:\n try:\n print(self.format(record), file=self.file)\n except OSError:\n # We cannot print, exit immediately.\n # See https://github.com/mitmproxy/mitmproxy/issues/4669\n sys.exit(1)\n", "path": "mitmproxy/addons/termlog.py"}]} | 1,587 | 167 |
gh_patches_debug_27961 | rasdani/github-patches | git_diff | sunpy__sunpy-6926 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add a "How do I..." page to our documentation
<!--
We know asking good questions takes effort, and we appreciate your time.
Thank you.
Please be aware that everyone has to follow our code of conduct:
https://sunpy.org/coc
These comments are hidden when you submit this github issue.
Please have a search on our GitHub repository to see if a similar issue has already been posted.
If a similar issue is closed, have a quick look to see if you are satisfied by the resolution.
If not please go ahead and open an issue!
-->
<!--
Provide a general description of the feature you would like.
If you prefer, you can also suggest a draft design or API.
-->
e.g. this page from the xarray docs: http://xarray.pydata.org/en/stable/howdoi.html
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/acquiring_data/searching_multiple_wavelengths.py`
Content:
```
1 """
2 ==============================================
3 Searching for multiple wavelengths with Fido
4 ==============================================
5
6 This example shows how you can search for several wavelengths of AIA data with Fido.
7 """
8 from astropy import units as u
9
10 from sunpy.net import Fido
11 from sunpy.net import attrs as a
12
13 ###############################################################################
14 # Here we are demonstrating how you can search for specific wavelengths of
15 # AIA data using `Fido <sunpy.net.fido_factory.UnifiedDownloaderFactory>`
16 # and the `sunpy.net.attrs.AttrOr` function.
17 # For example, you may only want a single wavelength, say 171 Angstrom:
18
19 aia_search = Fido.search(a.Time("2022-02-20 00:00", "2022-02-20 00:01"),
20 a.Instrument("AIA"),
21 a.Wavelength(171*u.angstrom))
22
23 print(aia_search)
24
25 ###############################################################################
26 # But say you actually want to search for several wavelengths, rather than just one.
27 # You could use the "|" operator, or instead you can use the `sunpy.net.attrs.AttrOr`
28 # function.
29
30 wavelengths = [94, 131, 171, 193, 211]*u.angstrom
31 aia_search = Fido.search(a.Time("2022-02-20 00:00", "2022-02-20 00:01"),
32 a.Instrument("AIA"),
33 a.AttrOr([a.Wavelength(wav) for wav in wavelengths]))
34
35 print(aia_search)
36
37 # This returns several searches for each of the wavelengths, which can be indexed.
38 # Here the first index is that of 94 angstrom.
39 print(aia_search[0])
40
41 ###############################################################################
42 # You can then pass the `Fido <sunpy.net.fido_factory.UnifiedDownloaderFactory>`
43 # result to :meth:`Fido.fetch <sunpy.net.fido_factory.UnifiedDownloaderFactory.fetch>`
44 # to download the data, i.e., ``Fido.fetch(aia_search)``.
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/acquiring_data/searching_multiple_wavelengths.py b/examples/acquiring_data/searching_multiple_wavelengths.py
deleted file mode 100644
--- a/examples/acquiring_data/searching_multiple_wavelengths.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""
-==============================================
-Searching for multiple wavelengths with Fido
-==============================================
-
-This example shows how you can search for several wavelengths of AIA data with Fido.
-"""
-from astropy import units as u
-
-from sunpy.net import Fido
-from sunpy.net import attrs as a
-
-###############################################################################
-# Here we are demonstrating how you can search for specific wavelengths of
-# AIA data using `Fido <sunpy.net.fido_factory.UnifiedDownloaderFactory>`
-# and the `sunpy.net.attrs.AttrOr` function.
-# For example, you may only want a single wavelength, say 171 Angstrom:
-
-aia_search = Fido.search(a.Time("2022-02-20 00:00", "2022-02-20 00:01"),
- a.Instrument("AIA"),
- a.Wavelength(171*u.angstrom))
-
-print(aia_search)
-
-###############################################################################
-# But say you actually want to search for several wavelengths, rather than just one.
-# You could use the "|" operator, or instead you can use the `sunpy.net.attrs.AttrOr`
-# function.
-
-wavelengths = [94, 131, 171, 193, 211]*u.angstrom
-aia_search = Fido.search(a.Time("2022-02-20 00:00", "2022-02-20 00:01"),
- a.Instrument("AIA"),
- a.AttrOr([a.Wavelength(wav) for wav in wavelengths]))
-
-print(aia_search)
-
-# This returns several searches for each of the wavelengths, which can be indexed.
-# Here the first index is that of 94 angstrom.
-print(aia_search[0])
-
-###############################################################################
-# You can then pass the `Fido <sunpy.net.fido_factory.UnifiedDownloaderFactory>`
-# result to :meth:`Fido.fetch <sunpy.net.fido_factory.UnifiedDownloaderFactory.fetch>`
-# to download the data, i.e., ``Fido.fetch(aia_search)``.
| {"golden_diff": "diff --git a/examples/acquiring_data/searching_multiple_wavelengths.py b/examples/acquiring_data/searching_multiple_wavelengths.py\ndeleted file mode 100644\n--- a/examples/acquiring_data/searching_multiple_wavelengths.py\n+++ /dev/null\n@@ -1,44 +0,0 @@\n-\"\"\"\n-==============================================\n-Searching for multiple wavelengths with Fido\n-==============================================\n-\n-This example shows how you can search for several wavelengths of AIA data with Fido.\n-\"\"\"\n-from astropy import units as u\n-\n-from sunpy.net import Fido\n-from sunpy.net import attrs as a\n-\n-###############################################################################\n-# Here we are demonstrating how you can search for specific wavelengths of\n-# AIA data using `Fido <sunpy.net.fido_factory.UnifiedDownloaderFactory>`\n-# and the `sunpy.net.attrs.AttrOr` function.\n-# For example, you may only want a single wavelength, say 171 Angstrom:\n-\n-aia_search = Fido.search(a.Time(\"2022-02-20 00:00\", \"2022-02-20 00:01\"),\n- a.Instrument(\"AIA\"),\n- a.Wavelength(171*u.angstrom))\n-\n-print(aia_search)\n-\n-###############################################################################\n-# But say you actually want to search for several wavelengths, rather than just one.\n-# You could use the \"|\" operator, or instead you can use the `sunpy.net.attrs.AttrOr`\n-# function.\n-\n-wavelengths = [94, 131, 171, 193, 211]*u.angstrom\n-aia_search = Fido.search(a.Time(\"2022-02-20 00:00\", \"2022-02-20 00:01\"),\n- a.Instrument(\"AIA\"),\n- a.AttrOr([a.Wavelength(wav) for wav in wavelengths]))\n-\n-print(aia_search)\n-\n-# This returns several searches for each of the wavelengths, which can be indexed.\n-# Here the first index is that of 94 angstrom.\n-print(aia_search[0])\n-\n-###############################################################################\n-# You can then pass the `Fido <sunpy.net.fido_factory.UnifiedDownloaderFactory>`\n-# result to :meth:`Fido.fetch <sunpy.net.fido_factory.UnifiedDownloaderFactory.fetch>`\n-# to download the data, i.e., ``Fido.fetch(aia_search)``.\n", "issue": "Add a \"How do I...\" page to our documentation\n<!--\r\nWe know asking good questions takes effort, and we appreciate your time.\r\nThank you.\r\n\r\nPlease be aware that everyone has to follow our code of conduct:\r\nhttps://sunpy.org/coc\r\n\r\nThese comments are hidden when you submit this github issue.\r\n\r\nPlease have a search on our GitHub repository to see if a similar issue has already been posted.\r\nIf a similar issue is closed, have a quick look to see if you are satisfied by the resolution.\r\nIf not please go ahead and open an issue!\r\n-->\r\n\r\n\r\n<!--\r\nProvide a general description of the feature you would like.\r\nIf you prefer, you can also suggest a draft design or API.\r\n-->\r\n\r\ne.g. this page from the xarray docs: http://xarray.pydata.org/en/stable/howdoi.html\r\n\n", "before_files": [{"content": "\"\"\"\n==============================================\nSearching for multiple wavelengths with Fido\n==============================================\n\nThis example shows how you can search for several wavelengths of AIA data with Fido.\n\"\"\"\nfrom astropy import units as u\n\nfrom sunpy.net import Fido\nfrom sunpy.net import attrs as a\n\n###############################################################################\n# Here we are demonstrating how you can search for specific wavelengths of\n# AIA data using `Fido <sunpy.net.fido_factory.UnifiedDownloaderFactory>`\n# and the `sunpy.net.attrs.AttrOr` function.\n# For example, you may only want a single wavelength, say 171 Angstrom:\n\naia_search = Fido.search(a.Time(\"2022-02-20 00:00\", \"2022-02-20 00:01\"),\n a.Instrument(\"AIA\"),\n a.Wavelength(171*u.angstrom))\n\nprint(aia_search)\n\n###############################################################################\n# But say you actually want to search for several wavelengths, rather than just one.\n# You could use the \"|\" operator, or instead you can use the `sunpy.net.attrs.AttrOr`\n# function.\n\nwavelengths = [94, 131, 171, 193, 211]*u.angstrom\naia_search = Fido.search(a.Time(\"2022-02-20 00:00\", \"2022-02-20 00:01\"),\n a.Instrument(\"AIA\"),\n a.AttrOr([a.Wavelength(wav) for wav in wavelengths]))\n\nprint(aia_search)\n\n# This returns several searches for each of the wavelengths, which can be indexed.\n# Here the first index is that of 94 angstrom.\nprint(aia_search[0])\n\n###############################################################################\n# You can then pass the `Fido <sunpy.net.fido_factory.UnifiedDownloaderFactory>`\n# result to :meth:`Fido.fetch <sunpy.net.fido_factory.UnifiedDownloaderFactory.fetch>`\n# to download the data, i.e., ``Fido.fetch(aia_search)``.\n", "path": "examples/acquiring_data/searching_multiple_wavelengths.py"}], "after_files": [{"content": null, "path": "examples/acquiring_data/searching_multiple_wavelengths.py"}]} | 974 | 551 |
gh_patches_debug_1057 | rasdani/github-patches | git_diff | StackStorm__st2-5091 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
St2Stream service broken when using SSL with mongodb
## SUMMARY
This issue is an extension to #4832 however this time it is the st2stream service, I have looked that the code and can see the same monkey patch code hasn't been applied to the st2stream app
### STACKSTORM VERSION
Paste the output of ``st2 --version``: 3.3.0
##### OS, environment, install method
Docker compose with the split services and mongo db references commented out so that an external db can be used https://github.com/StackStorm/st2-docker/blob/master/docker-compose.yml
All other services correctly connected to mongodb.net test instance with the exception of st2stream.
## Steps to reproduce the problem
use docker yaml at https://github.com/StackStorm/st2-docker/blob/master/docker-compose.yml, comment out mongo container and references, adjust files/st2-docker.conf to point to external DB with SSL = True enabled.
docker-compose up
## Expected Results
What did you expect to happen when running the steps above?
st2stream to operate correctly
## Actual Results
What happened? What output did you get?
2020-11-16 05:48:55,053 WARNING [-] Retry on ConnectionError - Cannot connect to database default :
maximum recursion depth exceeded
Adding monkey patch code to st2stream app resolves the issue (manually injected into container to test).
file: st2stream/cmd/api.py
Code:
from st2common.util.monkey_patch import monkey_patch
monkey_patch()
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `st2stream/st2stream/cmd/api.py`
Content:
```
1 # Copyright 2020 The StackStorm Authors.
2 # Copyright 2019 Extreme Networks, Inc.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import os
17 import sys
18
19 import eventlet
20 from oslo_config import cfg
21 from eventlet import wsgi
22
23 from st2common import log as logging
24 from st2common.service_setup import setup as common_setup
25 from st2common.service_setup import teardown as common_teardown
26 from st2common.stream.listener import get_listener_if_set
27 from st2common.util.wsgi import shutdown_server_kill_pending_requests
28 from st2stream.signal_handlers import register_stream_signal_handlers
29 from st2stream import config
30 config.register_opts()
31 from st2stream import app
32
33 __all__ = [
34 'main'
35 ]
36
37
38 eventlet.monkey_patch(
39 os=True,
40 select=True,
41 socket=True,
42 thread=False if '--use-debugger' in sys.argv else True,
43 time=True)
44
45 LOG = logging.getLogger(__name__)
46
47 # How much time to give to the request in progress to finish in seconds before killing them
48 WSGI_SERVER_REQUEST_SHUTDOWN_TIME = 2
49
50
51 def _setup():
52 capabilities = {
53 'name': 'stream',
54 'listen_host': cfg.CONF.stream.host,
55 'listen_port': cfg.CONF.stream.port,
56 'type': 'active'
57 }
58 common_setup(service='stream', config=config, setup_db=True, register_mq_exchanges=True,
59 register_signal_handlers=True, register_internal_trigger_types=False,
60 run_migrations=False, service_registry=True, capabilities=capabilities)
61
62
63 def _run_server():
64 host = cfg.CONF.stream.host
65 port = cfg.CONF.stream.port
66
67 LOG.info('(PID=%s) ST2 Stream API is serving on http://%s:%s.', os.getpid(), host, port)
68
69 max_pool_size = eventlet.wsgi.DEFAULT_MAX_SIMULTANEOUS_REQUESTS
70 worker_pool = eventlet.GreenPool(max_pool_size)
71 sock = eventlet.listen((host, port))
72
73 def queue_shutdown(signal_number, stack_frame):
74 eventlet.spawn_n(shutdown_server_kill_pending_requests, sock=sock,
75 worker_pool=worker_pool, wait_time=WSGI_SERVER_REQUEST_SHUTDOWN_TIME)
76
77 # We register a custom SIGINT handler which allows us to kill long running active requests.
78 # Note: Eventually we will support draining (waiting for short-running requests), but we
79 # will still want to kill long running stream requests.
80 register_stream_signal_handlers(handler_func=queue_shutdown)
81
82 wsgi.server(sock, app.setup_app(), custom_pool=worker_pool)
83 return 0
84
85
86 def _teardown():
87 common_teardown()
88
89
90 def main():
91 try:
92 _setup()
93 return _run_server()
94 except SystemExit as exit_code:
95 sys.exit(exit_code)
96 except KeyboardInterrupt:
97 listener = get_listener_if_set(name='stream')
98
99 if listener:
100 listener.shutdown()
101 except Exception:
102 LOG.exception('(PID=%s) ST2 Stream API quit due to exception.', os.getpid())
103 return 1
104 finally:
105 _teardown()
106
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/st2stream/st2stream/cmd/api.py b/st2stream/st2stream/cmd/api.py
--- a/st2stream/st2stream/cmd/api.py
+++ b/st2stream/st2stream/cmd/api.py
@@ -13,6 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from st2common.util.monkey_patch import monkey_patch
+monkey_patch()
+
import os
import sys
| {"golden_diff": "diff --git a/st2stream/st2stream/cmd/api.py b/st2stream/st2stream/cmd/api.py\n--- a/st2stream/st2stream/cmd/api.py\n+++ b/st2stream/st2stream/cmd/api.py\n@@ -13,6 +13,9 @@\n # See the License for the specific language governing permissions and\n # limitations under the License.\n \n+from st2common.util.monkey_patch import monkey_patch\n+monkey_patch()\n+\n import os\n import sys\n", "issue": "St2Stream service broken when using SSL with mongodb\n## SUMMARY\r\n\r\nThis issue is an extension to #4832 however this time it is the st2stream service, I have looked that the code and can see the same monkey patch code hasn't been applied to the st2stream app\r\n\r\n### STACKSTORM VERSION\r\n\r\nPaste the output of ``st2 --version``: 3.3.0\r\n\r\n##### OS, environment, install method\r\n\r\nDocker compose with the split services and mongo db references commented out so that an external db can be used https://github.com/StackStorm/st2-docker/blob/master/docker-compose.yml\r\n\r\nAll other services correctly connected to mongodb.net test instance with the exception of st2stream.\r\n\r\n## Steps to reproduce the problem\r\n\r\nuse docker yaml at https://github.com/StackStorm/st2-docker/blob/master/docker-compose.yml, comment out mongo container and references, adjust files/st2-docker.conf to point to external DB with SSL = True enabled.\r\ndocker-compose up\r\n\r\n## Expected Results\r\n\r\nWhat did you expect to happen when running the steps above?\r\n\r\nst2stream to operate correctly\r\n\r\n## Actual Results\r\n\r\nWhat happened? What output did you get?\r\n\r\n2020-11-16 05:48:55,053 WARNING [-] Retry on ConnectionError - Cannot connect to database default :\r\nmaximum recursion depth exceeded\r\n\r\n\r\n\r\nAdding monkey patch code to st2stream app resolves the issue (manually injected into container to test).\r\n\r\nfile: st2stream/cmd/api.py\r\nCode: \r\nfrom st2common.util.monkey_patch import monkey_patch\r\nmonkey_patch()\r\n\r\n\n", "before_files": [{"content": "# Copyright 2020 The StackStorm Authors.\n# Copyright 2019 Extreme Networks, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport sys\n\nimport eventlet\nfrom oslo_config import cfg\nfrom eventlet import wsgi\n\nfrom st2common import log as logging\nfrom st2common.service_setup import setup as common_setup\nfrom st2common.service_setup import teardown as common_teardown\nfrom st2common.stream.listener import get_listener_if_set\nfrom st2common.util.wsgi import shutdown_server_kill_pending_requests\nfrom st2stream.signal_handlers import register_stream_signal_handlers\nfrom st2stream import config\nconfig.register_opts()\nfrom st2stream import app\n\n__all__ = [\n 'main'\n]\n\n\neventlet.monkey_patch(\n os=True,\n select=True,\n socket=True,\n thread=False if '--use-debugger' in sys.argv else True,\n time=True)\n\nLOG = logging.getLogger(__name__)\n\n# How much time to give to the request in progress to finish in seconds before killing them\nWSGI_SERVER_REQUEST_SHUTDOWN_TIME = 2\n\n\ndef _setup():\n capabilities = {\n 'name': 'stream',\n 'listen_host': cfg.CONF.stream.host,\n 'listen_port': cfg.CONF.stream.port,\n 'type': 'active'\n }\n common_setup(service='stream', config=config, setup_db=True, register_mq_exchanges=True,\n register_signal_handlers=True, register_internal_trigger_types=False,\n run_migrations=False, service_registry=True, capabilities=capabilities)\n\n\ndef _run_server():\n host = cfg.CONF.stream.host\n port = cfg.CONF.stream.port\n\n LOG.info('(PID=%s) ST2 Stream API is serving on http://%s:%s.', os.getpid(), host, port)\n\n max_pool_size = eventlet.wsgi.DEFAULT_MAX_SIMULTANEOUS_REQUESTS\n worker_pool = eventlet.GreenPool(max_pool_size)\n sock = eventlet.listen((host, port))\n\n def queue_shutdown(signal_number, stack_frame):\n eventlet.spawn_n(shutdown_server_kill_pending_requests, sock=sock,\n worker_pool=worker_pool, wait_time=WSGI_SERVER_REQUEST_SHUTDOWN_TIME)\n\n # We register a custom SIGINT handler which allows us to kill long running active requests.\n # Note: Eventually we will support draining (waiting for short-running requests), but we\n # will still want to kill long running stream requests.\n register_stream_signal_handlers(handler_func=queue_shutdown)\n\n wsgi.server(sock, app.setup_app(), custom_pool=worker_pool)\n return 0\n\n\ndef _teardown():\n common_teardown()\n\n\ndef main():\n try:\n _setup()\n return _run_server()\n except SystemExit as exit_code:\n sys.exit(exit_code)\n except KeyboardInterrupt:\n listener = get_listener_if_set(name='stream')\n\n if listener:\n listener.shutdown()\n except Exception:\n LOG.exception('(PID=%s) ST2 Stream API quit due to exception.', os.getpid())\n return 1\n finally:\n _teardown()\n", "path": "st2stream/st2stream/cmd/api.py"}], "after_files": [{"content": "# Copyright 2020 The StackStorm Authors.\n# Copyright 2019 Extreme Networks, Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom st2common.util.monkey_patch import monkey_patch\nmonkey_patch()\n\nimport os\nimport sys\n\nimport eventlet\nfrom oslo_config import cfg\nfrom eventlet import wsgi\n\nfrom st2common import log as logging\nfrom st2common.service_setup import setup as common_setup\nfrom st2common.service_setup import teardown as common_teardown\nfrom st2common.stream.listener import get_listener_if_set\nfrom st2common.util.wsgi import shutdown_server_kill_pending_requests\nfrom st2stream.signal_handlers import register_stream_signal_handlers\nfrom st2stream import config\nconfig.register_opts()\nfrom st2stream import app\n\n__all__ = [\n 'main'\n]\n\n\neventlet.monkey_patch(\n os=True,\n select=True,\n socket=True,\n thread=False if '--use-debugger' in sys.argv else True,\n time=True)\n\nLOG = logging.getLogger(__name__)\n\n# How much time to give to the request in progress to finish in seconds before killing them\nWSGI_SERVER_REQUEST_SHUTDOWN_TIME = 2\n\n\ndef _setup():\n capabilities = {\n 'name': 'stream',\n 'listen_host': cfg.CONF.stream.host,\n 'listen_port': cfg.CONF.stream.port,\n 'type': 'active'\n }\n common_setup(service='stream', config=config, setup_db=True, register_mq_exchanges=True,\n register_signal_handlers=True, register_internal_trigger_types=False,\n run_migrations=False, service_registry=True, capabilities=capabilities)\n\n\ndef _run_server():\n host = cfg.CONF.stream.host\n port = cfg.CONF.stream.port\n\n LOG.info('(PID=%s) ST2 Stream API is serving on http://%s:%s.', os.getpid(), host, port)\n\n max_pool_size = eventlet.wsgi.DEFAULT_MAX_SIMULTANEOUS_REQUESTS\n worker_pool = eventlet.GreenPool(max_pool_size)\n sock = eventlet.listen((host, port))\n\n def queue_shutdown(signal_number, stack_frame):\n eventlet.spawn_n(shutdown_server_kill_pending_requests, sock=sock,\n worker_pool=worker_pool, wait_time=WSGI_SERVER_REQUEST_SHUTDOWN_TIME)\n\n # We register a custom SIGINT handler which allows us to kill long running active requests.\n # Note: Eventually we will support draining (waiting for short-running requests), but we\n # will still want to kill long running stream requests.\n register_stream_signal_handlers(handler_func=queue_shutdown)\n\n wsgi.server(sock, app.setup_app(), custom_pool=worker_pool)\n return 0\n\n\ndef _teardown():\n common_teardown()\n\n\ndef main():\n try:\n _setup()\n return _run_server()\n except SystemExit as exit_code:\n sys.exit(exit_code)\n except KeyboardInterrupt:\n listener = get_listener_if_set(name='stream')\n\n if listener:\n listener.shutdown()\n except Exception:\n LOG.exception('(PID=%s) ST2 Stream API quit due to exception.', os.getpid())\n return 1\n finally:\n _teardown()\n", "path": "st2stream/st2stream/cmd/api.py"}]} | 1,590 | 103 |
gh_patches_debug_8235 | rasdani/github-patches | git_diff | googleapis__google-api-python-client-1824 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
google-api-python-client 2.6.0, 2.7.0, 2.8.0, 2.9.0 cannot work with google-auth 1.18.0 and earlier
These versions of `google-api-python-client` requires `google-auth>=1.16.0,<2dev`. However, some of the code cannot work with `google-auth` 1.18.0 and earlier because`google.auth.load_credentials_from_file()` and `google.auth.default()` do not accept the `quota_project_id` parameter. I think it will be better to increase the minimum version of `google-auth` for these `google-api-python-client` versions.
Here are the details:
#### Environment details
- OS type and version: Ubuntu 20.04
- Python version: 3.9.7
- pip version: 21.2.4
- `google-api-python-client` version: 2.6.0, 2.7.0, 2.8.0, 2.9.0
#### Steps to reproduce
Create a clean Python environment and install relevant packages
```
conda create -n test python=3.9
conda activate test
pip install --no-cache-dir google-auth==1.18.0 google-api-core==1.21.0 google-api-python-client==2.9.0
```
#### Code example
```
(test) heh@ubuntu:~$ python
Python 3.9.7 (default, Sep 16 2021, 13:09:58)
[GCC 7.5.0] :: Anaconda, Inc. on linux
Type "help", "copyright", "credits" or "license" for more information.
>>> import googleapiclient._auth as _auth
>>> _auth.credentials_from_file("a")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/heh/anaconda3/envs/test/lib/python3.9/site-packages/googleapiclient/_auth.py", line 44, in credentials_from_file
credentials, _ = google.auth.load_credentials_from_file(filename, scopes=scopes, quota_project_id=quota_project_id)
TypeError: load_credentials_from_file() got an unexpected keyword argument 'quota_project_id'
>>> _auth.default_credentials("a")
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/heh/anaconda3/envs/test/lib/python3.9/site-packages/googleapiclient/_auth.py", line 54, in default_credentials
credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id)
TypeError: default() got an unexpected keyword argument 'quota_project_id'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2014 Google Inc. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Setup script for Google API Python client.
16
17 Also installs included versions of third party libraries, if those libraries
18 are not already installed.
19 """
20 from __future__ import print_function
21
22 import sys
23
24 if sys.version_info < (3, 6):
25 print("google-api-python-client requires python3 version >= 3.6.", file=sys.stderr)
26 sys.exit(1)
27
28 import io
29 import os
30
31 from setuptools import setup
32
33 packages = ["apiclient", "googleapiclient", "googleapiclient/discovery_cache"]
34
35 install_requires = [
36 "httplib2>=0.15.0,<1dev",
37 # NOTE: Maintainers, please do not require google-auth>=2.x.x
38 # Until this issue is closed
39 # https://github.com/googleapis/google-cloud-python/issues/10566
40 "google-auth>=1.16.0,<3.0.0dev",
41 "google-auth-httplib2>=0.1.0",
42 # NOTE: Maintainers, please do not require google-api-core>=2.x.x
43 # Until this issue is closed
44 # https://github.com/googleapis/google-cloud-python/issues/10566
45 "google-api-core >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0",
46 "uritemplate>=3.0.1,<5",
47 ]
48
49 package_root = os.path.abspath(os.path.dirname(__file__))
50
51 readme_filename = os.path.join(package_root, "README.md")
52 with io.open(readme_filename, encoding="utf-8") as readme_file:
53 readme = readme_file.read()
54
55 package_root = os.path.abspath(os.path.dirname(__file__))
56
57 version = {}
58 with open(os.path.join(package_root, "googleapiclient/version.py")) as fp:
59 exec(fp.read(), version)
60 version = version["__version__"]
61
62 setup(
63 name="google-api-python-client",
64 version=version,
65 description="Google API Client Library for Python",
66 long_description=readme,
67 long_description_content_type="text/markdown",
68 author="Google LLC",
69 author_email="[email protected]",
70 url="https://github.com/googleapis/google-api-python-client/",
71 install_requires=install_requires,
72 python_requires=">=3.6",
73 packages=packages,
74 package_data={"googleapiclient": ["discovery_cache/documents/*.json"]},
75 license="Apache 2.0",
76 keywords="google api client",
77 classifiers=[
78 "Programming Language :: Python :: 3",
79 "Programming Language :: Python :: 3.6",
80 "Programming Language :: Python :: 3.7",
81 "Programming Language :: Python :: 3.8",
82 "Programming Language :: Python :: 3.9",
83 "Programming Language :: Python :: 3.10",
84 "Development Status :: 5 - Production/Stable",
85 "Intended Audience :: Developers",
86 "License :: OSI Approved :: Apache Software License",
87 "Operating System :: OS Independent",
88 "Topic :: Internet :: WWW/HTTP",
89 ],
90 )
91
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -37,7 +37,7 @@
# NOTE: Maintainers, please do not require google-auth>=2.x.x
# Until this issue is closed
# https://github.com/googleapis/google-cloud-python/issues/10566
- "google-auth>=1.16.0,<3.0.0dev",
+ "google-auth>=1.19.0,<3.0.0dev",
"google-auth-httplib2>=0.1.0",
# NOTE: Maintainers, please do not require google-api-core>=2.x.x
# Until this issue is closed
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -37,7 +37,7 @@\n # NOTE: Maintainers, please do not require google-auth>=2.x.x\n # Until this issue is closed\n # https://github.com/googleapis/google-cloud-python/issues/10566\n- \"google-auth>=1.16.0,<3.0.0dev\",\n+ \"google-auth>=1.19.0,<3.0.0dev\",\n \"google-auth-httplib2>=0.1.0\",\n # NOTE: Maintainers, please do not require google-api-core>=2.x.x\n # Until this issue is closed\n", "issue": "google-api-python-client 2.6.0, 2.7.0, 2.8.0, 2.9.0 cannot work with google-auth 1.18.0 and earlier\nThese versions of `google-api-python-client` requires `google-auth>=1.16.0,<2dev`. However, some of the code cannot work with `google-auth` 1.18.0 and earlier because`google.auth.load_credentials_from_file()` and `google.auth.default()` do not accept the `quota_project_id` parameter. I think it will be better to increase the minimum version of `google-auth` for these `google-api-python-client` versions.\r\n\r\nHere are the details:\r\n\r\n#### Environment details\r\n\r\n - OS type and version: Ubuntu 20.04\r\n - Python version: 3.9.7\r\n - pip version: 21.2.4\r\n - `google-api-python-client` version: 2.6.0, 2.7.0, 2.8.0, 2.9.0\r\n\r\n#### Steps to reproduce\r\n\r\nCreate a clean Python environment and install relevant packages\r\n \r\n```\r\nconda create -n test python=3.9\r\nconda activate test\r\npip install --no-cache-dir google-auth==1.18.0 google-api-core==1.21.0 google-api-python-client==2.9.0\r\n```\r\n\r\n#### Code example\r\n\r\n```\r\n(test) heh@ubuntu:~$ python\r\nPython 3.9.7 (default, Sep 16 2021, 13:09:58)\r\n[GCC 7.5.0] :: Anaconda, Inc. on linux\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n>>> import googleapiclient._auth as _auth\r\n>>> _auth.credentials_from_file(\"a\")\r\nTraceback (most recent call last):\r\n File \"<stdin>\", line 1, in <module>\r\n File \"/home/heh/anaconda3/envs/test/lib/python3.9/site-packages/googleapiclient/_auth.py\", line 44, in credentials_from_file\r\n credentials, _ = google.auth.load_credentials_from_file(filename, scopes=scopes, quota_project_id=quota_project_id)\r\nTypeError: load_credentials_from_file() got an unexpected keyword argument 'quota_project_id'\r\n>>> _auth.default_credentials(\"a\")\r\nTraceback (most recent call last):\r\n File \"<stdin>\", line 1, in <module>\r\n File \"/home/heh/anaconda3/envs/test/lib/python3.9/site-packages/googleapiclient/_auth.py\", line 54, in default_credentials\r\n credentials, _ = google.auth.default(scopes=scopes, quota_project_id=quota_project_id)\r\nTypeError: default() got an unexpected keyword argument 'quota_project_id'\r\n```\r\n\n", "before_files": [{"content": "# Copyright 2014 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Setup script for Google API Python client.\n\nAlso installs included versions of third party libraries, if those libraries\nare not already installed.\n\"\"\"\nfrom __future__ import print_function\n\nimport sys\n\nif sys.version_info < (3, 6):\n print(\"google-api-python-client requires python3 version >= 3.6.\", file=sys.stderr)\n sys.exit(1)\n\nimport io\nimport os\n\nfrom setuptools import setup\n\npackages = [\"apiclient\", \"googleapiclient\", \"googleapiclient/discovery_cache\"]\n\ninstall_requires = [\n \"httplib2>=0.15.0,<1dev\",\n # NOTE: Maintainers, please do not require google-auth>=2.x.x\n # Until this issue is closed\n # https://github.com/googleapis/google-cloud-python/issues/10566\n \"google-auth>=1.16.0,<3.0.0dev\",\n \"google-auth-httplib2>=0.1.0\",\n # NOTE: Maintainers, please do not require google-api-core>=2.x.x\n # Until this issue is closed\n # https://github.com/googleapis/google-cloud-python/issues/10566\n \"google-api-core >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0\",\n \"uritemplate>=3.0.1,<5\",\n]\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, \"README.md\")\nwith io.open(readme_filename, encoding=\"utf-8\") as readme_file:\n readme = readme_file.read()\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nversion = {}\nwith open(os.path.join(package_root, \"googleapiclient/version.py\")) as fp:\n exec(fp.read(), version)\nversion = version[\"__version__\"]\n\nsetup(\n name=\"google-api-python-client\",\n version=version,\n description=\"Google API Client Library for Python\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n author=\"Google LLC\",\n author_email=\"[email protected]\",\n url=\"https://github.com/googleapis/google-api-python-client/\",\n install_requires=install_requires,\n python_requires=\">=3.6\",\n packages=packages,\n package_data={\"googleapiclient\": [\"discovery_cache/documents/*.json\"]},\n license=\"Apache 2.0\",\n keywords=\"google api client\",\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2014 Google Inc. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Setup script for Google API Python client.\n\nAlso installs included versions of third party libraries, if those libraries\nare not already installed.\n\"\"\"\nfrom __future__ import print_function\n\nimport sys\n\nif sys.version_info < (3, 6):\n print(\"google-api-python-client requires python3 version >= 3.6.\", file=sys.stderr)\n sys.exit(1)\n\nimport io\nimport os\n\nfrom setuptools import setup\n\npackages = [\"apiclient\", \"googleapiclient\", \"googleapiclient/discovery_cache\"]\n\ninstall_requires = [\n \"httplib2>=0.15.0,<1dev\",\n # NOTE: Maintainers, please do not require google-auth>=2.x.x\n # Until this issue is closed\n # https://github.com/googleapis/google-cloud-python/issues/10566\n \"google-auth>=1.19.0,<3.0.0dev\",\n \"google-auth-httplib2>=0.1.0\",\n # NOTE: Maintainers, please do not require google-api-core>=2.x.x\n # Until this issue is closed\n # https://github.com/googleapis/google-cloud-python/issues/10566\n \"google-api-core >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0\",\n \"uritemplate>=3.0.1,<5\",\n]\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nreadme_filename = os.path.join(package_root, \"README.md\")\nwith io.open(readme_filename, encoding=\"utf-8\") as readme_file:\n readme = readme_file.read()\n\npackage_root = os.path.abspath(os.path.dirname(__file__))\n\nversion = {}\nwith open(os.path.join(package_root, \"googleapiclient/version.py\")) as fp:\n exec(fp.read(), version)\nversion = version[\"__version__\"]\n\nsetup(\n name=\"google-api-python-client\",\n version=version,\n description=\"Google API Client Library for Python\",\n long_description=readme,\n long_description_content_type=\"text/markdown\",\n author=\"Google LLC\",\n author_email=\"[email protected]\",\n url=\"https://github.com/googleapis/google-api-python-client/\",\n install_requires=install_requires,\n python_requires=\">=3.6\",\n packages=packages,\n package_data={\"googleapiclient\": [\"discovery_cache/documents/*.json\"]},\n license=\"Apache 2.0\",\n keywords=\"google api client\",\n classifiers=[\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: 3.10\",\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: Apache Software License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n ],\n)\n", "path": "setup.py"}]} | 1,852 | 159 |
gh_patches_debug_11914 | rasdani/github-patches | git_diff | pytorch__ignite-2984 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix warning in fast_neural_style example
Here is another good first issue to improve the ignite project. Currently, we have a warning on this line: https://github.com/pytorch/ignite/blob/master/examples/fast_neural_style/vgg.py#L10 (fast neural style example)
/opt/hostedtoolcache/Python/3.9.17/x64/lib/python3.9/site-packages/torchvision/models/_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead.
warnings.warn(
/opt/hostedtoolcache/Python/3.9.17/x64/lib/python3.9/site-packages/torchvision/models/_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=VGG16_Weights.IMAGENET1K_V1`. You can also use `weights=VGG16_Weights.DEFAULT` to get the most up-to-date weights.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/fast_neural_style/vgg.py`
Content:
```
1 from collections import namedtuple
2
3 import torch
4 from torchvision import models
5
6
7 class Vgg16(torch.nn.Module):
8 def __init__(self, requires_grad=False):
9 super(Vgg16, self).__init__()
10 vgg_pretrained_features = models.vgg16(pretrained=True).features
11 self.slice1 = torch.nn.Sequential()
12 self.slice2 = torch.nn.Sequential()
13 self.slice3 = torch.nn.Sequential()
14 self.slice4 = torch.nn.Sequential()
15 for x in range(4):
16 self.slice1.add_module(str(x), vgg_pretrained_features[x])
17 for x in range(4, 9):
18 self.slice2.add_module(str(x), vgg_pretrained_features[x])
19 for x in range(9, 16):
20 self.slice3.add_module(str(x), vgg_pretrained_features[x])
21 for x in range(16, 23):
22 self.slice4.add_module(str(x), vgg_pretrained_features[x])
23 if not requires_grad:
24 for param in self.parameters():
25 param.requires_grad = False
26
27 def forward(self, X):
28 h = self.slice1(X)
29 h_relu1_2 = h
30 h = self.slice2(h)
31 h_relu2_2 = h
32 h = self.slice3(h)
33 h_relu3_3 = h
34 h = self.slice4(h)
35 h_relu4_3 = h
36 vgg_outputs = namedtuple("VggOutputs", ["relu1_2", "relu2_2", "relu3_3", "relu4_3"])
37 out = vgg_outputs(h_relu1_2, h_relu2_2, h_relu3_3, h_relu4_3)
38 return out
39
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/fast_neural_style/vgg.py b/examples/fast_neural_style/vgg.py
--- a/examples/fast_neural_style/vgg.py
+++ b/examples/fast_neural_style/vgg.py
@@ -2,12 +2,13 @@
import torch
from torchvision import models
+from torchvision.models.vgg import VGG16_Weights
class Vgg16(torch.nn.Module):
def __init__(self, requires_grad=False):
super(Vgg16, self).__init__()
- vgg_pretrained_features = models.vgg16(pretrained=True).features
+ vgg_pretrained_features = models.vgg16(weights=VGG16_Weights.IMAGENET1K_V1).features
self.slice1 = torch.nn.Sequential()
self.slice2 = torch.nn.Sequential()
self.slice3 = torch.nn.Sequential()
| {"golden_diff": "diff --git a/examples/fast_neural_style/vgg.py b/examples/fast_neural_style/vgg.py\n--- a/examples/fast_neural_style/vgg.py\n+++ b/examples/fast_neural_style/vgg.py\n@@ -2,12 +2,13 @@\n \n import torch\n from torchvision import models\n+from torchvision.models.vgg import VGG16_Weights\n \n \n class Vgg16(torch.nn.Module):\n def __init__(self, requires_grad=False):\n super(Vgg16, self).__init__()\n- vgg_pretrained_features = models.vgg16(pretrained=True).features\n+ vgg_pretrained_features = models.vgg16(weights=VGG16_Weights.IMAGENET1K_V1).features\n self.slice1 = torch.nn.Sequential()\n self.slice2 = torch.nn.Sequential()\n self.slice3 = torch.nn.Sequential()\n", "issue": "Fix warning in fast_neural_style example\nHere is another good first issue to improve the ignite project. Currently, we have a warning on this line: https://github.com/pytorch/ignite/blob/master/examples/fast_neural_style/vgg.py#L10 (fast neural style example)\r\n /opt/hostedtoolcache/Python/3.9.17/x64/lib/python3.9/site-packages/torchvision/models/_utils.py:208: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and may be removed in the future, please use 'weights' instead.\r\n warnings.warn(\r\n/opt/hostedtoolcache/Python/3.9.17/x64/lib/python3.9/site-packages/torchvision/models/_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and may be removed in the future. The current behavior is equivalent to passing `weights=VGG16_Weights.IMAGENET1K_V1`. You can also use `weights=VGG16_Weights.DEFAULT` to get the most up-to-date weights.\n", "before_files": [{"content": "from collections import namedtuple\n\nimport torch\nfrom torchvision import models\n\n\nclass Vgg16(torch.nn.Module):\n def __init__(self, requires_grad=False):\n super(Vgg16, self).__init__()\n vgg_pretrained_features = models.vgg16(pretrained=True).features\n self.slice1 = torch.nn.Sequential()\n self.slice2 = torch.nn.Sequential()\n self.slice3 = torch.nn.Sequential()\n self.slice4 = torch.nn.Sequential()\n for x in range(4):\n self.slice1.add_module(str(x), vgg_pretrained_features[x])\n for x in range(4, 9):\n self.slice2.add_module(str(x), vgg_pretrained_features[x])\n for x in range(9, 16):\n self.slice3.add_module(str(x), vgg_pretrained_features[x])\n for x in range(16, 23):\n self.slice4.add_module(str(x), vgg_pretrained_features[x])\n if not requires_grad:\n for param in self.parameters():\n param.requires_grad = False\n\n def forward(self, X):\n h = self.slice1(X)\n h_relu1_2 = h\n h = self.slice2(h)\n h_relu2_2 = h\n h = self.slice3(h)\n h_relu3_3 = h\n h = self.slice4(h)\n h_relu4_3 = h\n vgg_outputs = namedtuple(\"VggOutputs\", [\"relu1_2\", \"relu2_2\", \"relu3_3\", \"relu4_3\"])\n out = vgg_outputs(h_relu1_2, h_relu2_2, h_relu3_3, h_relu4_3)\n return out\n", "path": "examples/fast_neural_style/vgg.py"}], "after_files": [{"content": "from collections import namedtuple\n\nimport torch\nfrom torchvision import models\nfrom torchvision.models.vgg import VGG16_Weights\n\n\nclass Vgg16(torch.nn.Module):\n def __init__(self, requires_grad=False):\n super(Vgg16, self).__init__()\n vgg_pretrained_features = models.vgg16(weights=VGG16_Weights.IMAGENET1K_V1).features\n self.slice1 = torch.nn.Sequential()\n self.slice2 = torch.nn.Sequential()\n self.slice3 = torch.nn.Sequential()\n self.slice4 = torch.nn.Sequential()\n for x in range(4):\n self.slice1.add_module(str(x), vgg_pretrained_features[x])\n for x in range(4, 9):\n self.slice2.add_module(str(x), vgg_pretrained_features[x])\n for x in range(9, 16):\n self.slice3.add_module(str(x), vgg_pretrained_features[x])\n for x in range(16, 23):\n self.slice4.add_module(str(x), vgg_pretrained_features[x])\n if not requires_grad:\n for param in self.parameters():\n param.requires_grad = False\n\n def forward(self, X):\n h = self.slice1(X)\n h_relu1_2 = h\n h = self.slice2(h)\n h_relu2_2 = h\n h = self.slice3(h)\n h_relu3_3 = h\n h = self.slice4(h)\n h_relu4_3 = h\n vgg_outputs = namedtuple(\"VggOutputs\", [\"relu1_2\", \"relu2_2\", \"relu3_3\", \"relu4_3\"])\n out = vgg_outputs(h_relu1_2, h_relu2_2, h_relu3_3, h_relu4_3)\n return out\n", "path": "examples/fast_neural_style/vgg.py"}]} | 955 | 189 |
gh_patches_debug_4532 | rasdani/github-patches | git_diff | huggingface__dataset-viewer-2789 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Truncate all the logs
We sometimes have very big logs (one row > 5MB). It's not useful at all and triggers warnings from infra. When we setup the logs configuration, we could try to set a maximum length
https://github.com/huggingface/dataset-viewer/blob/95527c2f1f0b8f077ed9ec74d3c75e45dbc1d00a/libs/libcommon/src/libcommon/log.py#L7-L9
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `libs/libcommon/src/libcommon/log.py`
Content:
```
1 # SPDX-License-Identifier: Apache-2.0
2 # Copyright 2022 The HuggingFace Authors.
3
4 import logging
5
6
7 def init_logging(level: int = logging.INFO) -> None:
8 logging.basicConfig(level=level, format="%(levelname)s: %(asctime)s - %(name)s - %(message)s")
9 logging.debug(f"Log level set to: {logging.getLevelName(logging.getLogger().getEffectiveLevel())}")
10
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/libs/libcommon/src/libcommon/log.py b/libs/libcommon/src/libcommon/log.py
--- a/libs/libcommon/src/libcommon/log.py
+++ b/libs/libcommon/src/libcommon/log.py
@@ -5,5 +5,5 @@
def init_logging(level: int = logging.INFO) -> None:
- logging.basicConfig(level=level, format="%(levelname)s: %(asctime)s - %(name)s - %(message)s")
+ logging.basicConfig(level=level, format="%(levelname)s: %(asctime)s - %(name)s - %(message).5000s")
logging.debug(f"Log level set to: {logging.getLevelName(logging.getLogger().getEffectiveLevel())}")
| {"golden_diff": "diff --git a/libs/libcommon/src/libcommon/log.py b/libs/libcommon/src/libcommon/log.py\n--- a/libs/libcommon/src/libcommon/log.py\n+++ b/libs/libcommon/src/libcommon/log.py\n@@ -5,5 +5,5 @@\n \n \n def init_logging(level: int = logging.INFO) -> None:\n- logging.basicConfig(level=level, format=\"%(levelname)s: %(asctime)s - %(name)s - %(message)s\")\n+ logging.basicConfig(level=level, format=\"%(levelname)s: %(asctime)s - %(name)s - %(message).5000s\")\n logging.debug(f\"Log level set to: {logging.getLevelName(logging.getLogger().getEffectiveLevel())}\")\n", "issue": "Truncate all the logs\nWe sometimes have very big logs (one row > 5MB). It's not useful at all and triggers warnings from infra. When we setup the logs configuration, we could try to set a maximum length\r\n\r\nhttps://github.com/huggingface/dataset-viewer/blob/95527c2f1f0b8f077ed9ec74d3c75e45dbc1d00a/libs/libcommon/src/libcommon/log.py#L7-L9\r\n\r\n\n", "before_files": [{"content": "# SPDX-License-Identifier: Apache-2.0\n# Copyright 2022 The HuggingFace Authors.\n\nimport logging\n\n\ndef init_logging(level: int = logging.INFO) -> None:\n logging.basicConfig(level=level, format=\"%(levelname)s: %(asctime)s - %(name)s - %(message)s\")\n logging.debug(f\"Log level set to: {logging.getLevelName(logging.getLogger().getEffectiveLevel())}\")\n", "path": "libs/libcommon/src/libcommon/log.py"}], "after_files": [{"content": "# SPDX-License-Identifier: Apache-2.0\n# Copyright 2022 The HuggingFace Authors.\n\nimport logging\n\n\ndef init_logging(level: int = logging.INFO) -> None:\n logging.basicConfig(level=level, format=\"%(levelname)s: %(asctime)s - %(name)s - %(message).5000s\")\n logging.debug(f\"Log level set to: {logging.getLevelName(logging.getLogger().getEffectiveLevel())}\")\n", "path": "libs/libcommon/src/libcommon/log.py"}]} | 477 | 148 |
gh_patches_debug_23578 | rasdani/github-patches | git_diff | Flexget__Flexget-2271 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Kodi API has been changed in v18 (Leia) such that HTTP POST is required
<!---
Before opening an issue, verify:
- Is this a feature request? Post it on https://feathub.com/Flexget/Flexget
- Did you recently upgrade? Look at the Change Log and Upgrade Actions to make sure that you don't need to make any changes to your config https://flexget.com/ChangeLog https://flexget.com/UpgradeActions
- Are you running FlexGet as a daemon? Stop it completely and then start it again https://flexget.com/CLI/daemon
- Did you search to see if the issue already exists? https://github.com/Flexget/Flexget/issues
- Did you fill out the issue template as completely as possible?
The issue template is here because it helps to ensure you submitted all the necessary information the first time, and allows us to more quickly review issues. Please fill it out correctly and do not ignore it, no matter how irrelevant you think it may be. Thanks in advance for your help with this!
--->
### Expected behaviour:
<!---
Please don't just say "it doesn't crash" or "it works". Explain what the expected result is.
--->
Updates should work
### Actual behaviour:
Error message: `JSONRPC failed. Error -32099: Bad client permission`
### Steps to reproduce:
- Step 1: Call a kodi library scan from a task
#### Config:
```
kodi_library:
action: scan
category: video
url: http://192.168.1.214
port: 80
```
### Details
The kodi API has been changed in v18 Leia and up. In the old API, all requests were HTTP GET (even API calls that update/mutate state). They've finally updated the API to require HTTP POST for updates, but they've completely failed to update the API version or even provide sensible error messages.
https://forum.kodi.tv/showthread.php?tid=324598
https://discuss.flexget.com/t/kodi-plugin-not-working-on-kodi-18/4196
**NOTE**: I no longer use Kodi, so I'm simply creating an issue based on a forum post to keep track of the issue in case other users begin to experience it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `flexget/plugins/services/kodi_library.py`
Content:
```
1 from __future__ import unicode_literals, division, absolute_import
2 from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
3
4 import logging
5 import json
6
7 from flexget import plugin
8 from flexget.event import event
9 from flexget.utils.requests import RequestException
10
11 log = logging.getLogger('kodi_library')
12
13 JSON_URI = '/jsonrpc'
14
15
16 class KodiLibrary(object):
17 schema = {
18 'type': 'object',
19 'properties': {
20 'action': {'type': 'string', 'enum': ['clean', 'scan']},
21 'category': {'type': 'string', 'enum': ['audio', 'video']},
22 'url': {'type': 'string', 'format': 'url'},
23 'port': {'type': 'integer', 'default': 8080},
24 'username': {'type': 'string'},
25 'password': {'type': 'string'},
26 'only_on_accepted': {'type': 'boolean', 'default': True}
27 },
28 'required': ['url', 'action', 'category'],
29 'additionalProperties': False,
30 }
31
32 @plugin.priority(-255)
33 def on_task_exit(self, task, config):
34 if task.accepted or not config['only_on_accepted']:
35 # make the url without trailing slash
36 base_url = config['url'][:-1] if config['url'].endswith('/') else config['url']
37 base_url += ':{0}'.format(config['port'])
38
39 url = base_url + JSON_URI
40 # create the params
41 json_params = {"id": 1, "jsonrpc": "2.0",
42 'method': '{category}Library.{action}'.format(category=config['category'].title(),
43 action=config['action'].title())}
44 params = {'request': json.dumps(json_params)}
45 log.debug('Sending request params %s', params)
46
47 try:
48 r = task.requests.get(url, params=params, auth=(config.get('username'), config.get('password'))).json()
49 if r.get('result') == 'OK':
50 log.info('Successfully sent a %s request for the %s library', config['action'], config['category'])
51 else:
52 if r.get('error'):
53 log.error('Kodi JSONRPC failed. Error %s: %s', r['error']['code'], r['error']['message'])
54 else:
55 # this should never happen as Kodi say they follow the JSON-RPC 2.0 spec
56 log.debug('Received error response %s', json.dumps(r))
57 log.error('Kodi JSONRPC failed with unrecognized message: %s', json.dumps(r))
58 except RequestException as e:
59 raise plugin.PluginError('Failed to send request to Kodi: %s' % e.args[0])
60 else:
61 log.info('No entries were accepted. No request is sent.')
62
63
64 @event('plugin.register')
65 def register_plugin():
66 plugin.register(KodiLibrary, 'kodi_library', api_ver=2)
67
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/flexget/plugins/services/kodi_library.py b/flexget/plugins/services/kodi_library.py
--- a/flexget/plugins/services/kodi_library.py
+++ b/flexget/plugins/services/kodi_library.py
@@ -38,14 +38,13 @@
url = base_url + JSON_URI
# create the params
- json_params = {"id": 1, "jsonrpc": "2.0",
- 'method': '{category}Library.{action}'.format(category=config['category'].title(),
- action=config['action'].title())}
- params = {'request': json.dumps(json_params)}
+ params = {"id": 1, "jsonrpc": "2.0",
+ 'method': '{category}Library.{action}'.format(category=config['category'].title(),
+ action=config['action'].title())}
log.debug('Sending request params %s', params)
try:
- r = task.requests.get(url, params=params, auth=(config.get('username'), config.get('password'))).json()
+ r = task.requests.post(url, json=params, auth=(config.get('username'), config.get('password'))).json()
if r.get('result') == 'OK':
log.info('Successfully sent a %s request for the %s library', config['action'], config['category'])
else:
| {"golden_diff": "diff --git a/flexget/plugins/services/kodi_library.py b/flexget/plugins/services/kodi_library.py\n--- a/flexget/plugins/services/kodi_library.py\n+++ b/flexget/plugins/services/kodi_library.py\n@@ -38,14 +38,13 @@\n \n url = base_url + JSON_URI\n # create the params\n- json_params = {\"id\": 1, \"jsonrpc\": \"2.0\",\n- 'method': '{category}Library.{action}'.format(category=config['category'].title(),\n- action=config['action'].title())}\n- params = {'request': json.dumps(json_params)}\n+ params = {\"id\": 1, \"jsonrpc\": \"2.0\",\n+ 'method': '{category}Library.{action}'.format(category=config['category'].title(),\n+ action=config['action'].title())}\n log.debug('Sending request params %s', params)\n \n try:\n- r = task.requests.get(url, params=params, auth=(config.get('username'), config.get('password'))).json()\n+ r = task.requests.post(url, json=params, auth=(config.get('username'), config.get('password'))).json()\n if r.get('result') == 'OK':\n log.info('Successfully sent a %s request for the %s library', config['action'], config['category'])\n else:\n", "issue": "Kodi API has been changed in v18 (Leia) such that HTTP POST is required\n<!---\r\nBefore opening an issue, verify:\r\n\r\n- Is this a feature request? Post it on https://feathub.com/Flexget/Flexget\r\n- Did you recently upgrade? Look at the Change Log and Upgrade Actions to make sure that you don't need to make any changes to your config https://flexget.com/ChangeLog https://flexget.com/UpgradeActions\r\n- Are you running FlexGet as a daemon? Stop it completely and then start it again https://flexget.com/CLI/daemon\r\n- Did you search to see if the issue already exists? https://github.com/Flexget/Flexget/issues\r\n- Did you fill out the issue template as completely as possible?\r\n\r\nThe issue template is here because it helps to ensure you submitted all the necessary information the first time, and allows us to more quickly review issues. Please fill it out correctly and do not ignore it, no matter how irrelevant you think it may be. Thanks in advance for your help with this!\r\n--->\r\n### Expected behaviour:\r\n<!---\r\nPlease don't just say \"it doesn't crash\" or \"it works\". Explain what the expected result is.\r\n--->\r\nUpdates should work\r\n### Actual behaviour:\r\nError message: `JSONRPC failed. Error -32099: Bad client permission`\r\n### Steps to reproduce:\r\n- Step 1: Call a kodi library scan from a task\r\n\r\n#### Config:\r\n```\r\nkodi_library:\r\n action: scan\r\n category: video\r\n url: http://192.168.1.214\r\n port: 80\r\n```\r\n\r\n### Details\r\nThe kodi API has been changed in v18 Leia and up. In the old API, all requests were HTTP GET (even API calls that update/mutate state). They've finally updated the API to require HTTP POST for updates, but they've completely failed to update the API version or even provide sensible error messages.\r\n\r\nhttps://forum.kodi.tv/showthread.php?tid=324598\r\nhttps://discuss.flexget.com/t/kodi-plugin-not-working-on-kodi-18/4196\r\n\r\n**NOTE**: I no longer use Kodi, so I'm simply creating an issue based on a forum post to keep track of the issue in case other users begin to experience it.\n", "before_files": [{"content": "from __future__ import unicode_literals, division, absolute_import\nfrom builtins import * # noqa pylint: disable=unused-import, redefined-builtin\n\nimport logging\nimport json\n\nfrom flexget import plugin\nfrom flexget.event import event\nfrom flexget.utils.requests import RequestException\n\nlog = logging.getLogger('kodi_library')\n\nJSON_URI = '/jsonrpc'\n\n\nclass KodiLibrary(object):\n schema = {\n 'type': 'object',\n 'properties': {\n 'action': {'type': 'string', 'enum': ['clean', 'scan']},\n 'category': {'type': 'string', 'enum': ['audio', 'video']},\n 'url': {'type': 'string', 'format': 'url'},\n 'port': {'type': 'integer', 'default': 8080},\n 'username': {'type': 'string'},\n 'password': {'type': 'string'},\n 'only_on_accepted': {'type': 'boolean', 'default': True}\n },\n 'required': ['url', 'action', 'category'],\n 'additionalProperties': False,\n }\n\n @plugin.priority(-255)\n def on_task_exit(self, task, config):\n if task.accepted or not config['only_on_accepted']:\n # make the url without trailing slash\n base_url = config['url'][:-1] if config['url'].endswith('/') else config['url']\n base_url += ':{0}'.format(config['port'])\n\n url = base_url + JSON_URI\n # create the params\n json_params = {\"id\": 1, \"jsonrpc\": \"2.0\",\n 'method': '{category}Library.{action}'.format(category=config['category'].title(),\n action=config['action'].title())}\n params = {'request': json.dumps(json_params)}\n log.debug('Sending request params %s', params)\n\n try:\n r = task.requests.get(url, params=params, auth=(config.get('username'), config.get('password'))).json()\n if r.get('result') == 'OK':\n log.info('Successfully sent a %s request for the %s library', config['action'], config['category'])\n else:\n if r.get('error'):\n log.error('Kodi JSONRPC failed. Error %s: %s', r['error']['code'], r['error']['message'])\n else:\n # this should never happen as Kodi say they follow the JSON-RPC 2.0 spec\n log.debug('Received error response %s', json.dumps(r))\n log.error('Kodi JSONRPC failed with unrecognized message: %s', json.dumps(r))\n except RequestException as e:\n raise plugin.PluginError('Failed to send request to Kodi: %s' % e.args[0])\n else:\n log.info('No entries were accepted. No request is sent.')\n\n\n@event('plugin.register')\ndef register_plugin():\n plugin.register(KodiLibrary, 'kodi_library', api_ver=2)\n", "path": "flexget/plugins/services/kodi_library.py"}], "after_files": [{"content": "from __future__ import unicode_literals, division, absolute_import\nfrom builtins import * # noqa pylint: disable=unused-import, redefined-builtin\n\nimport logging\nimport json\n\nfrom flexget import plugin\nfrom flexget.event import event\nfrom flexget.utils.requests import RequestException\n\nlog = logging.getLogger('kodi_library')\n\nJSON_URI = '/jsonrpc'\n\n\nclass KodiLibrary(object):\n schema = {\n 'type': 'object',\n 'properties': {\n 'action': {'type': 'string', 'enum': ['clean', 'scan']},\n 'category': {'type': 'string', 'enum': ['audio', 'video']},\n 'url': {'type': 'string', 'format': 'url'},\n 'port': {'type': 'integer', 'default': 8080},\n 'username': {'type': 'string'},\n 'password': {'type': 'string'},\n 'only_on_accepted': {'type': 'boolean', 'default': True}\n },\n 'required': ['url', 'action', 'category'],\n 'additionalProperties': False,\n }\n\n @plugin.priority(-255)\n def on_task_exit(self, task, config):\n if task.accepted or not config['only_on_accepted']:\n # make the url without trailing slash\n base_url = config['url'][:-1] if config['url'].endswith('/') else config['url']\n base_url += ':{0}'.format(config['port'])\n\n url = base_url + JSON_URI\n # create the params\n params = {\"id\": 1, \"jsonrpc\": \"2.0\",\n 'method': '{category}Library.{action}'.format(category=config['category'].title(),\n action=config['action'].title())}\n log.debug('Sending request params %s', params)\n\n try:\n r = task.requests.post(url, json=params, auth=(config.get('username'), config.get('password'))).json()\n if r.get('result') == 'OK':\n log.info('Successfully sent a %s request for the %s library', config['action'], config['category'])\n else:\n if r.get('error'):\n log.error('Kodi JSONRPC failed. Error %s: %s', r['error']['code'], r['error']['message'])\n else:\n # this should never happen as Kodi say they follow the JSON-RPC 2.0 spec\n log.debug('Received error response %s', json.dumps(r))\n log.error('Kodi JSONRPC failed with unrecognized message: %s', json.dumps(r))\n except RequestException as e:\n raise plugin.PluginError('Failed to send request to Kodi: %s' % e.args[0])\n else:\n log.info('No entries were accepted. No request is sent.')\n\n\n@event('plugin.register')\ndef register_plugin():\n plugin.register(KodiLibrary, 'kodi_library', api_ver=2)\n", "path": "flexget/plugins/services/kodi_library.py"}]} | 1,531 | 297 |
gh_patches_debug_17345 | rasdani/github-patches | git_diff | RedHatInsights__insights-core-3074 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
The smt combiner is raising IndexError exceptions in production.
The CpuTopology combiner is throwing a large number of the exception IndexError('list index out of range',) in production.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `insights/combiners/smt.py`
Content:
```
1 """
2 Simultaneous Multithreading (SMT) combiner
3 ==========================================
4
5 Combiner for Simultaneous Multithreading (SMT). It uses the results of the following parsers:
6 :class:`insights.parsers.smt.CpuCoreOnline`,
7 :class:`insights.parsers.smt.CpuSiblings`.
8 """
9
10 from insights.core.plugins import combiner
11 from insights.parsers.smt import CpuCoreOnline, CpuSiblings
12
13
14 @combiner(CpuCoreOnline, CpuSiblings)
15 class CpuTopology(object):
16 """
17 Class for collecting the online/siblings status for all CPU cores.
18
19 Sample output of the ``CpuCoreOnline`` parser is::
20
21 [[Core 0: Online], [Core 1: Online], [Core 2: Online], [Core 3: Online]]
22
23 Sample output of the ``CpuSiblings`` parser is::
24
25 [[Core 0 Siblings: [0, 2]], [Core 1 Siblings: [1, 3]], [Core 2 Siblings: [0, 2]], [Core 3 Siblings: [1, 3]]]
26
27 Attributes:
28 cores (list of dictionaries): List of all cores.
29 all_solitary (bool): True, if hyperthreading is not used.
30
31 Examples:
32 >>> type(cpu_topology)
33 <class 'insights.combiners.smt.CpuTopology'>
34 >>> cpu_topology.cores == [{'online': True, 'siblings': [0, 2]}, {'online': True, 'siblings': [1, 3]}, {'online': True, 'siblings': [0, 2]}, {'online': True, 'siblings': [1, 3]}]
35 True
36 >>> cpu_topology.all_solitary
37 False
38 """
39
40 def __init__(self, cpu_online, cpu_siblings):
41 self.cores = []
42
43 max_cpu_core_id = max([core.core_id for core in cpu_online])
44 for n in range(max_cpu_core_id + 1):
45 online = [core for core in cpu_online if core.core_id == n]
46 online = online[0].on
47 siblings = [sibling for sibling in cpu_siblings if sibling.core_id == n]
48 if len(siblings) != 0:
49 siblings = siblings[0].siblings
50
51 one_core = {"online": online, "siblings": siblings}
52 self.cores.append(one_core)
53
54 self.all_solitary = all([len(core["siblings"]) <= 1 for core in self.cores])
55
56 def online(self, core_id):
57 """
58 Returns bool value obtained from "online" file for given core_id.
59 """
60 if core_id >= len(self.cores) or core_id < 0:
61 return None
62 return self.cores[core_id]["online"]
63
64 def siblings(self, core_id):
65 """
66 Returns list of siblings for given core_id.
67 """
68 if core_id >= len(self.cores) or core_id < 0:
69 return None
70 return self.cores[core_id]["siblings"]
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/insights/combiners/smt.py b/insights/combiners/smt.py
--- a/insights/combiners/smt.py
+++ b/insights/combiners/smt.py
@@ -43,7 +43,13 @@
max_cpu_core_id = max([core.core_id for core in cpu_online])
for n in range(max_cpu_core_id + 1):
online = [core for core in cpu_online if core.core_id == n]
- online = online[0].on
+ # On some boxes cpu0 doesn't have the online file, since technically cpu0 will always
+ # be online. So check if online returns anything before trying to access online[0].
+ # If it returns nothing and n is 0 set online to True.
+ if online:
+ online = online[0].on
+ elif not online and n == 0:
+ online = True
siblings = [sibling for sibling in cpu_siblings if sibling.core_id == n]
if len(siblings) != 0:
siblings = siblings[0].siblings
| {"golden_diff": "diff --git a/insights/combiners/smt.py b/insights/combiners/smt.py\n--- a/insights/combiners/smt.py\n+++ b/insights/combiners/smt.py\n@@ -43,7 +43,13 @@\n max_cpu_core_id = max([core.core_id for core in cpu_online])\n for n in range(max_cpu_core_id + 1):\n online = [core for core in cpu_online if core.core_id == n]\n- online = online[0].on\n+ # On some boxes cpu0 doesn't have the online file, since technically cpu0 will always\n+ # be online. So check if online returns anything before trying to access online[0].\n+ # If it returns nothing and n is 0 set online to True.\n+ if online:\n+ online = online[0].on\n+ elif not online and n == 0:\n+ online = True\n siblings = [sibling for sibling in cpu_siblings if sibling.core_id == n]\n if len(siblings) != 0:\n siblings = siblings[0].siblings\n", "issue": "The smt combiner is raising IndexError exceptions in production.\nThe CpuTopology combiner is throwing a large number of the exception IndexError('list index out of range',) in production.\n", "before_files": [{"content": "\"\"\"\nSimultaneous Multithreading (SMT) combiner\n==========================================\n\nCombiner for Simultaneous Multithreading (SMT). It uses the results of the following parsers:\n:class:`insights.parsers.smt.CpuCoreOnline`,\n:class:`insights.parsers.smt.CpuSiblings`.\n\"\"\"\n\nfrom insights.core.plugins import combiner\nfrom insights.parsers.smt import CpuCoreOnline, CpuSiblings\n\n\n@combiner(CpuCoreOnline, CpuSiblings)\nclass CpuTopology(object):\n \"\"\"\n Class for collecting the online/siblings status for all CPU cores.\n\n Sample output of the ``CpuCoreOnline`` parser is::\n\n [[Core 0: Online], [Core 1: Online], [Core 2: Online], [Core 3: Online]]\n\n Sample output of the ``CpuSiblings`` parser is::\n\n [[Core 0 Siblings: [0, 2]], [Core 1 Siblings: [1, 3]], [Core 2 Siblings: [0, 2]], [Core 3 Siblings: [1, 3]]]\n\n Attributes:\n cores (list of dictionaries): List of all cores.\n all_solitary (bool): True, if hyperthreading is not used.\n\n Examples:\n >>> type(cpu_topology)\n <class 'insights.combiners.smt.CpuTopology'>\n >>> cpu_topology.cores == [{'online': True, 'siblings': [0, 2]}, {'online': True, 'siblings': [1, 3]}, {'online': True, 'siblings': [0, 2]}, {'online': True, 'siblings': [1, 3]}]\n True\n >>> cpu_topology.all_solitary\n False\n \"\"\"\n\n def __init__(self, cpu_online, cpu_siblings):\n self.cores = []\n\n max_cpu_core_id = max([core.core_id for core in cpu_online])\n for n in range(max_cpu_core_id + 1):\n online = [core for core in cpu_online if core.core_id == n]\n online = online[0].on\n siblings = [sibling for sibling in cpu_siblings if sibling.core_id == n]\n if len(siblings) != 0:\n siblings = siblings[0].siblings\n\n one_core = {\"online\": online, \"siblings\": siblings}\n self.cores.append(one_core)\n\n self.all_solitary = all([len(core[\"siblings\"]) <= 1 for core in self.cores])\n\n def online(self, core_id):\n \"\"\"\n Returns bool value obtained from \"online\" file for given core_id.\n \"\"\"\n if core_id >= len(self.cores) or core_id < 0:\n return None\n return self.cores[core_id][\"online\"]\n\n def siblings(self, core_id):\n \"\"\"\n Returns list of siblings for given core_id.\n \"\"\"\n if core_id >= len(self.cores) or core_id < 0:\n return None\n return self.cores[core_id][\"siblings\"]\n", "path": "insights/combiners/smt.py"}], "after_files": [{"content": "\"\"\"\nSimultaneous Multithreading (SMT) combiner\n==========================================\n\nCombiner for Simultaneous Multithreading (SMT). It uses the results of the following parsers:\n:class:`insights.parsers.smt.CpuCoreOnline`,\n:class:`insights.parsers.smt.CpuSiblings`.\n\"\"\"\n\nfrom insights.core.plugins import combiner\nfrom insights.parsers.smt import CpuCoreOnline, CpuSiblings\n\n\n@combiner(CpuCoreOnline, CpuSiblings)\nclass CpuTopology(object):\n \"\"\"\n Class for collecting the online/siblings status for all CPU cores.\n\n Sample output of the ``CpuCoreOnline`` parser is::\n\n [[Core 0: Online], [Core 1: Online], [Core 2: Online], [Core 3: Online]]\n\n Sample output of the ``CpuSiblings`` parser is::\n\n [[Core 0 Siblings: [0, 2]], [Core 1 Siblings: [1, 3]], [Core 2 Siblings: [0, 2]], [Core 3 Siblings: [1, 3]]]\n\n Attributes:\n cores (list of dictionaries): List of all cores.\n all_solitary (bool): True, if hyperthreading is not used.\n\n Examples:\n >>> type(cpu_topology)\n <class 'insights.combiners.smt.CpuTopology'>\n >>> cpu_topology.cores == [{'online': True, 'siblings': [0, 2]}, {'online': True, 'siblings': [1, 3]}, {'online': True, 'siblings': [0, 2]}, {'online': True, 'siblings': [1, 3]}]\n True\n >>> cpu_topology.all_solitary\n False\n \"\"\"\n\n def __init__(self, cpu_online, cpu_siblings):\n self.cores = []\n\n max_cpu_core_id = max([core.core_id for core in cpu_online])\n for n in range(max_cpu_core_id + 1):\n online = [core for core in cpu_online if core.core_id == n]\n # On some boxes cpu0 doesn't have the online file, since technically cpu0 will always\n # be online. So check if online returns anything before trying to access online[0].\n # If it returns nothing and n is 0 set online to True.\n if online:\n online = online[0].on\n elif not online and n == 0:\n online = True\n siblings = [sibling for sibling in cpu_siblings if sibling.core_id == n]\n if len(siblings) != 0:\n siblings = siblings[0].siblings\n\n one_core = {\"online\": online, \"siblings\": siblings}\n self.cores.append(one_core)\n\n self.all_solitary = all([len(core[\"siblings\"]) <= 1 for core in self.cores])\n\n def online(self, core_id):\n \"\"\"\n Returns bool value obtained from \"online\" file for given core_id.\n \"\"\"\n if core_id >= len(self.cores) or core_id < 0:\n return None\n return self.cores[core_id][\"online\"]\n\n def siblings(self, core_id):\n \"\"\"\n Returns list of siblings for given core_id.\n \"\"\"\n if core_id >= len(self.cores) or core_id < 0:\n return None\n return self.cores[core_id][\"siblings\"]\n", "path": "insights/combiners/smt.py"}]} | 1,094 | 247 |
gh_patches_debug_39470 | rasdani/github-patches | git_diff | microsoft__onnxscript-120 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Set up linters for the project
It helps if we set up linters early in the development process (less big PRs for fixes in the future). We may consider: mypy, pylint, black, isort, pydocstyle, flake8, bandit and xdoctest.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # -------------------------------------------------------------------------
2 # Copyright (c) Microsoft Corporation. All rights reserved.
3 # Licensed under the MIT License.
4 # --------------------------------------------------------------------------
5
6 # -*- coding: utf-8 -*-
7
8 from distutils.core import setup
9 from setuptools import find_packages
10 import os
11 this = os.path.dirname(__file__)
12
13 with open(os.path.join(this, "requirements.txt"), "r") as f:
14 requirements = [_ for _ in [_.strip("\r\n ")
15 for _ in f.readlines()] if _ is not None]
16
17 packages = find_packages()
18 assert packages
19
20 # read version from the package file.
21 version_str = '1.0.0'
22 with (open(os.path.join(this, 'onnxscript/__init__.py'), "r")) as f:
23 line = [_ for _ in [_.strip("\r\n ")
24 for _ in f.readlines()] if _.startswith("__version__")]
25 if len(line) > 0:
26 version_str = line[0].split('=')[1].strip('" ')
27
28 README = os.path.join(os.getcwd(), "README.md")
29 with open(README) as f:
30 long_description = f.read()
31 start_pos = long_description.find('## Contributing')
32 if start_pos >= 0:
33 long_description = long_description[:start_pos]
34
35 setup(
36 name='onnx-script',
37 version=version_str,
38 description="Authoring ONNX functions in Python",
39 long_description=long_description,
40 long_description_content_type='text/markdown',
41 license='Apache License v2.0',
42 author='Microsoft Corporation',
43 author_email='[email protected]',
44 url='https://github.com/onnx/onnx-script',
45 packages=packages,
46 include_package_data=True,
47 install_requires=requirements,
48 classifiers=[
49 'Development Status :: 4 - Beta',
50 'Environment :: Console',
51 'Intended Audience :: Developers',
52 'Operating System :: MacOS :: MacOS X',
53 'Operating System :: Microsoft :: Windows',
54 'Programming Language :: Python',
55 'Programming Language :: Python :: 3.7',
56 'Programming Language :: Python :: 3.8',
57 'Programming Language :: Python :: 3.9',
58 'License :: OSI Approved :: Apache Software License'],
59 )
60
```
Path: `onnxscript/__init__.py`
Content:
```
1 # -------------------------------------------------------------------------
2 # Copyright (c) Microsoft Corporation. All rights reserved.
3 # Licensed under the MIT License.
4 # --------------------------------------------------------------------------
5
6 __version__ = '0.1'
7
8 from .main import script, export_onnx_lib, OnnxFunction
9 from .backend.onnx_export import export2python as proto2python
10
11 __all__ = [script, export_onnx_lib, OnnxFunction, proto2python]
12
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/onnxscript/__init__.py b/onnxscript/__init__.py
--- a/onnxscript/__init__.py
+++ b/onnxscript/__init__.py
@@ -3,9 +3,22 @@
# Licensed under the MIT License.
# --------------------------------------------------------------------------
-__version__ = '0.1'
-
+import sys
from .main import script, export_onnx_lib, OnnxFunction
from .backend.onnx_export import export2python as proto2python
-__all__ = [script, export_onnx_lib, OnnxFunction, proto2python]
+if sys.version_info[0:2] >= (3, 8):
+ import importlib.metadata as importlib_metadata
+else:
+ # TODO: Remove this when Python 3.7 is deprecated
+ import importlib_metadata
+
+try:
+ # TODO: should we algin the folder name with package name?
+ # It's onnxscript and onnx-script now. That way, we can use __package__ here.
+ __version__ = importlib_metadata.version("onnx-script")
+except importlib_metadata.PackageNotFoundError:
+ __version__ = None
+
+
+__all__ = ["script", "export_onnx_lib", "OnnxFunction", "proto2python"]
diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -5,55 +5,26 @@
# -*- coding: utf-8 -*-
-from distutils.core import setup
-from setuptools import find_packages
import os
-this = os.path.dirname(__file__)
+import setuptools
-with open(os.path.join(this, "requirements.txt"), "r") as f:
- requirements = [_ for _ in [_.strip("\r\n ")
- for _ in f.readlines()] if _ is not None]
+this = os.path.dirname(__file__)
-packages = find_packages()
+packages = setuptools.find_packages()
assert packages
-# read version from the package file.
-version_str = '1.0.0'
-with (open(os.path.join(this, 'onnxscript/__init__.py'), "r")) as f:
- line = [_ for _ in [_.strip("\r\n ")
- for _ in f.readlines()] if _.startswith("__version__")]
- if len(line) > 0:
- version_str = line[0].split('=')[1].strip('" ')
-
README = os.path.join(os.getcwd(), "README.md")
-with open(README) as f:
+with open(README, encoding="utf-8") as f:
long_description = f.read()
start_pos = long_description.find('## Contributing')
if start_pos >= 0:
long_description = long_description[:start_pos]
-setup(
- name='onnx-script',
- version=version_str,
- description="Authoring ONNX functions in Python",
+setuptools.setup(
long_description=long_description,
long_description_content_type='text/markdown',
- license='Apache License v2.0',
- author='Microsoft Corporation',
- author_email='[email protected]',
url='https://github.com/onnx/onnx-script',
packages=packages,
include_package_data=True,
- install_requires=requirements,
- classifiers=[
- 'Development Status :: 4 - Beta',
- 'Environment :: Console',
- 'Intended Audience :: Developers',
- 'Operating System :: MacOS :: MacOS X',
- 'Operating System :: Microsoft :: Windows',
- 'Programming Language :: Python',
- 'Programming Language :: Python :: 3.7',
- 'Programming Language :: Python :: 3.8',
- 'Programming Language :: Python :: 3.9',
- 'License :: OSI Approved :: Apache Software License'],
+ package_data={"onnx-script": ["py.typed"], "onnx": ["py.typed"],},
)
| {"golden_diff": "diff --git a/onnxscript/__init__.py b/onnxscript/__init__.py\n--- a/onnxscript/__init__.py\n+++ b/onnxscript/__init__.py\n@@ -3,9 +3,22 @@\n # Licensed under the MIT License.\n # --------------------------------------------------------------------------\n \n-__version__ = '0.1'\n-\n+import sys\n from .main import script, export_onnx_lib, OnnxFunction\n from .backend.onnx_export import export2python as proto2python\n \n-__all__ = [script, export_onnx_lib, OnnxFunction, proto2python]\n+if sys.version_info[0:2] >= (3, 8):\n+ import importlib.metadata as importlib_metadata\n+else:\n+ # TODO: Remove this when Python 3.7 is deprecated\n+ import importlib_metadata\n+\n+try:\n+ # TODO: should we algin the folder name with package name?\n+ # It's onnxscript and onnx-script now. That way, we can use __package__ here.\n+ __version__ = importlib_metadata.version(\"onnx-script\")\n+except importlib_metadata.PackageNotFoundError:\n+ __version__ = None\n+\n+\n+__all__ = [\"script\", \"export_onnx_lib\", \"OnnxFunction\", \"proto2python\"]\ndiff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -5,55 +5,26 @@\n \n # -*- coding: utf-8 -*-\n \n-from distutils.core import setup\n-from setuptools import find_packages\n import os\n-this = os.path.dirname(__file__)\n+import setuptools\n \n-with open(os.path.join(this, \"requirements.txt\"), \"r\") as f:\n- requirements = [_ for _ in [_.strip(\"\\r\\n \")\n- for _ in f.readlines()] if _ is not None]\n+this = os.path.dirname(__file__)\n \n-packages = find_packages()\n+packages = setuptools.find_packages()\n assert packages\n \n-# read version from the package file.\n-version_str = '1.0.0'\n-with (open(os.path.join(this, 'onnxscript/__init__.py'), \"r\")) as f:\n- line = [_ for _ in [_.strip(\"\\r\\n \")\n- for _ in f.readlines()] if _.startswith(\"__version__\")]\n- if len(line) > 0:\n- version_str = line[0].split('=')[1].strip('\" ')\n-\n README = os.path.join(os.getcwd(), \"README.md\")\n-with open(README) as f:\n+with open(README, encoding=\"utf-8\") as f:\n long_description = f.read()\n start_pos = long_description.find('## Contributing')\n if start_pos >= 0:\n long_description = long_description[:start_pos]\n \n-setup(\n- name='onnx-script',\n- version=version_str,\n- description=\"Authoring ONNX functions in Python\",\n+setuptools.setup(\n long_description=long_description,\n long_description_content_type='text/markdown',\n- license='Apache License v2.0',\n- author='Microsoft Corporation',\n- author_email='[email protected]',\n url='https://github.com/onnx/onnx-script',\n packages=packages,\n include_package_data=True,\n- install_requires=requirements,\n- classifiers=[\n- 'Development Status :: 4 - Beta',\n- 'Environment :: Console',\n- 'Intended Audience :: Developers',\n- 'Operating System :: MacOS :: MacOS X',\n- 'Operating System :: Microsoft :: Windows',\n- 'Programming Language :: Python',\n- 'Programming Language :: Python :: 3.7',\n- 'Programming Language :: Python :: 3.8',\n- 'Programming Language :: Python :: 3.9',\n- 'License :: OSI Approved :: Apache Software License'],\n+ package_data={\"onnx-script\": [\"py.typed\"], \"onnx\": [\"py.typed\"],},\n )\n", "issue": "Set up linters for the project\nIt helps if we set up linters early in the development process (less big PRs for fixes in the future). We may consider: mypy, pylint, black, isort, pydocstyle, flake8, bandit and xdoctest.\n", "before_files": [{"content": "# -------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n# --------------------------------------------------------------------------\n\n# -*- coding: utf-8 -*-\n\nfrom distutils.core import setup\nfrom setuptools import find_packages\nimport os\nthis = os.path.dirname(__file__)\n\nwith open(os.path.join(this, \"requirements.txt\"), \"r\") as f:\n requirements = [_ for _ in [_.strip(\"\\r\\n \")\n for _ in f.readlines()] if _ is not None]\n\npackages = find_packages()\nassert packages\n\n# read version from the package file.\nversion_str = '1.0.0'\nwith (open(os.path.join(this, 'onnxscript/__init__.py'), \"r\")) as f:\n line = [_ for _ in [_.strip(\"\\r\\n \")\n for _ in f.readlines()] if _.startswith(\"__version__\")]\n if len(line) > 0:\n version_str = line[0].split('=')[1].strip('\" ')\n\nREADME = os.path.join(os.getcwd(), \"README.md\")\nwith open(README) as f:\n long_description = f.read()\n start_pos = long_description.find('## Contributing')\n if start_pos >= 0:\n long_description = long_description[:start_pos]\n\nsetup(\n name='onnx-script',\n version=version_str,\n description=\"Authoring ONNX functions in Python\",\n long_description=long_description,\n long_description_content_type='text/markdown',\n license='Apache License v2.0',\n author='Microsoft Corporation',\n author_email='[email protected]',\n url='https://github.com/onnx/onnx-script',\n packages=packages,\n include_package_data=True,\n install_requires=requirements,\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'Operating System :: MacOS :: MacOS X',\n 'Operating System :: Microsoft :: Windows',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: 3.9',\n 'License :: OSI Approved :: Apache Software License'],\n)\n", "path": "setup.py"}, {"content": "# -------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n# --------------------------------------------------------------------------\n\n__version__ = '0.1'\n\nfrom .main import script, export_onnx_lib, OnnxFunction\nfrom .backend.onnx_export import export2python as proto2python\n\n__all__ = [script, export_onnx_lib, OnnxFunction, proto2python]\n", "path": "onnxscript/__init__.py"}], "after_files": [{"content": "# -------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n# --------------------------------------------------------------------------\n\n# -*- coding: utf-8 -*-\n\nimport os\nimport setuptools\n\nthis = os.path.dirname(__file__)\n\npackages = setuptools.find_packages()\nassert packages\n\nREADME = os.path.join(os.getcwd(), \"README.md\")\nwith open(README, encoding=\"utf-8\") as f:\n long_description = f.read()\n start_pos = long_description.find('## Contributing')\n if start_pos >= 0:\n long_description = long_description[:start_pos]\n\nsetuptools.setup(\n long_description=long_description,\n long_description_content_type='text/markdown',\n url='https://github.com/onnx/onnx-script',\n packages=packages,\n include_package_data=True,\n package_data={\"onnx-script\": [\"py.typed\"], \"onnx\": [\"py.typed\"],},\n)\n", "path": "setup.py"}, {"content": "# -------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License.\n# --------------------------------------------------------------------------\n\nimport sys\nfrom .main import script, export_onnx_lib, OnnxFunction\nfrom .backend.onnx_export import export2python as proto2python\n\nif sys.version_info[0:2] >= (3, 8):\n import importlib.metadata as importlib_metadata\nelse:\n # TODO: Remove this when Python 3.7 is deprecated\n import importlib_metadata\n\ntry:\n # TODO: should we algin the folder name with package name?\n # It's onnxscript and onnx-script now. That way, we can use __package__ here.\n __version__ = importlib_metadata.version(\"onnx-script\")\nexcept importlib_metadata.PackageNotFoundError:\n __version__ = None\n\n\n__all__ = [\"script\", \"export_onnx_lib\", \"OnnxFunction\", \"proto2python\"]\n", "path": "onnxscript/__init__.py"}]} | 1,011 | 844 |
gh_patches_debug_1350 | rasdani/github-patches | git_diff | fossasia__open-event-server-7659 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Preset roles deletion is allowed
**Describe the bug**
Currently the preset roles like "organizer, coorganizer etc" should not be deleted from the db. But right now it is possible to delete these entries.
**To Reproduce**
Steps to reproduce the behavior:
1. Hit the delete endpoint for role
2. Choose any of the ids pointing to any of the 7 preset roles
3. You can find deletion to be successful
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Stacktrace**
<!-- If applicable, add stacktrace to help explain your problem. -->
**Additional details (please complete the following information):**
- OS: [e.g. MacOS, Ubuntu, CentOS]
- Python Version [e.g. `3.5`, `3.6`]
- `HEAD` Commit hash [e.g. `4629c62`]
**Additional context**
<!-- Add any other context about the problem here. -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/api/roles.py`
Content:
```
1 from flask_rest_jsonapi import ResourceDetail, ResourceList
2
3 from app.api.bootstrap import api
4 from app.api.helpers.db import safe_query_kwargs
5 from app.api.helpers.errors import UnprocessableEntityError
6 from app.api.schema.roles import RoleSchema
7 from app.models import db
8 from app.models.role import Role
9 from app.models.role_invite import RoleInvite
10 from app.models.users_events_role import UsersEventsRoles
11
12
13 class RoleList(ResourceList):
14 """
15 List and create role
16 """
17
18 decorators = (api.has_permission('is_admin', methods="POST"),)
19 schema = RoleSchema
20 data_layer = {'session': db.session, 'model': Role}
21
22
23 class RoleDetail(ResourceDetail):
24 """
25 Role detail by id
26 """
27
28 def before_get_object(self, view_kwargs):
29 """
30 before get method to get the resource id for fetching details
31 :param view_kwargs:
32 :return:
33 """
34 if view_kwargs.get('role_invite_id') is not None:
35 role_invite = safe_query_kwargs(RoleInvite, view_kwargs, 'role_invite_id')
36 if role_invite.role_id is not None:
37 view_kwargs['id'] = role_invite.role_id
38 else:
39 view_kwargs['id'] = None
40
41 if view_kwargs.get('users_events_roles_id') is not None:
42 users_events_role = safe_query_kwargs(
43 UsersEventsRoles,
44 view_kwargs,
45 'users_events_roles_id',
46 )
47
48 if users_events_role.role_id is not None:
49 view_kwargs['id'] = users_events_role.role_id
50 else:
51 view_kwargs['id'] = None
52
53 def before_update_object(self, role, data, view_kwargs):
54 """
55 Method to edit object
56 :param role:
57 :param data:
58 :param view_kwargs:
59 :return:
60 """
61 if data.get('name'):
62 if data['name'] in [
63 'owner',
64 'organizer',
65 'coorganizer',
66 'registrar',
67 'moderator',
68 'attendee',
69 'track_organizer',
70 ]:
71 raise UnprocessableEntityError(
72 {'data': 'name'}, "The given name cannot be updated"
73 )
74
75 def before_delete_object(self, obj, kwargs):
76 """
77 method to check proper resource name before deleting
78 :param obj:
79 :param kwargs:
80 :return:
81 """
82 if obj.name in [
83 'owner',
84 'organizer',
85 'coorganizer',
86 'registrar',
87 'moderator',
88 'attendee',
89 'track_organizer',
90 ]:
91 raise UnprocessableEntityError(
92 {'data': 'name'}, "The resource with given name cannot be deleted"
93 )
94
95 decorators = (api.has_permission('is_admin', methods="PATCH,DELETE"),)
96 schema = RoleSchema
97 data_layer = {
98 'session': db.session,
99 'model': Role,
100 'methods': {'before_get_object': before_get_object},
101 }
102
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/app/api/roles.py b/app/api/roles.py
--- a/app/api/roles.py
+++ b/app/api/roles.py
@@ -97,5 +97,8 @@
data_layer = {
'session': db.session,
'model': Role,
- 'methods': {'before_get_object': before_get_object},
+ 'methods': {
+ 'before_get_object': before_get_object,
+ 'before_delete_object': before_delete_object,
+ },
}
| {"golden_diff": "diff --git a/app/api/roles.py b/app/api/roles.py\n--- a/app/api/roles.py\n+++ b/app/api/roles.py\n@@ -97,5 +97,8 @@\n data_layer = {\n 'session': db.session,\n 'model': Role,\n- 'methods': {'before_get_object': before_get_object},\n+ 'methods': {\n+ 'before_get_object': before_get_object,\n+ 'before_delete_object': before_delete_object,\n+ },\n }\n", "issue": "Preset roles deletion is allowed\n**Describe the bug**\r\nCurrently the preset roles like \"organizer, coorganizer etc\" should not be deleted from the db. But right now it is possible to delete these entries.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Hit the delete endpoint for role \r\n2. Choose any of the ids pointing to any of the 7 preset roles\r\n3. You can find deletion to be successful\r\n\r\n**Expected behavior**\r\n<!-- A clear and concise description of what you expected to happen. -->\r\n\r\n**Stacktrace**\r\n<!-- If applicable, add stacktrace to help explain your problem. -->\r\n\r\n**Additional details (please complete the following information):**\r\n - OS: [e.g. MacOS, Ubuntu, CentOS]\r\n - Python Version [e.g. `3.5`, `3.6`]\r\n - `HEAD` Commit hash [e.g. `4629c62`]\r\n\r\n**Additional context**\r\n<!-- Add any other context about the problem here. -->\r\n\n", "before_files": [{"content": "from flask_rest_jsonapi import ResourceDetail, ResourceList\n\nfrom app.api.bootstrap import api\nfrom app.api.helpers.db import safe_query_kwargs\nfrom app.api.helpers.errors import UnprocessableEntityError\nfrom app.api.schema.roles import RoleSchema\nfrom app.models import db\nfrom app.models.role import Role\nfrom app.models.role_invite import RoleInvite\nfrom app.models.users_events_role import UsersEventsRoles\n\n\nclass RoleList(ResourceList):\n \"\"\"\n List and create role\n \"\"\"\n\n decorators = (api.has_permission('is_admin', methods=\"POST\"),)\n schema = RoleSchema\n data_layer = {'session': db.session, 'model': Role}\n\n\nclass RoleDetail(ResourceDetail):\n \"\"\"\n Role detail by id\n \"\"\"\n\n def before_get_object(self, view_kwargs):\n \"\"\"\n before get method to get the resource id for fetching details\n :param view_kwargs:\n :return:\n \"\"\"\n if view_kwargs.get('role_invite_id') is not None:\n role_invite = safe_query_kwargs(RoleInvite, view_kwargs, 'role_invite_id')\n if role_invite.role_id is not None:\n view_kwargs['id'] = role_invite.role_id\n else:\n view_kwargs['id'] = None\n\n if view_kwargs.get('users_events_roles_id') is not None:\n users_events_role = safe_query_kwargs(\n UsersEventsRoles,\n view_kwargs,\n 'users_events_roles_id',\n )\n\n if users_events_role.role_id is not None:\n view_kwargs['id'] = users_events_role.role_id\n else:\n view_kwargs['id'] = None\n\n def before_update_object(self, role, data, view_kwargs):\n \"\"\"\n Method to edit object\n :param role:\n :param data:\n :param view_kwargs:\n :return:\n \"\"\"\n if data.get('name'):\n if data['name'] in [\n 'owner',\n 'organizer',\n 'coorganizer',\n 'registrar',\n 'moderator',\n 'attendee',\n 'track_organizer',\n ]:\n raise UnprocessableEntityError(\n {'data': 'name'}, \"The given name cannot be updated\"\n )\n\n def before_delete_object(self, obj, kwargs):\n \"\"\"\n method to check proper resource name before deleting\n :param obj:\n :param kwargs:\n :return:\n \"\"\"\n if obj.name in [\n 'owner',\n 'organizer',\n 'coorganizer',\n 'registrar',\n 'moderator',\n 'attendee',\n 'track_organizer',\n ]:\n raise UnprocessableEntityError(\n {'data': 'name'}, \"The resource with given name cannot be deleted\"\n )\n\n decorators = (api.has_permission('is_admin', methods=\"PATCH,DELETE\"),)\n schema = RoleSchema\n data_layer = {\n 'session': db.session,\n 'model': Role,\n 'methods': {'before_get_object': before_get_object},\n }\n", "path": "app/api/roles.py"}], "after_files": [{"content": "from flask_rest_jsonapi import ResourceDetail, ResourceList\n\nfrom app.api.bootstrap import api\nfrom app.api.helpers.db import safe_query_kwargs\nfrom app.api.helpers.errors import UnprocessableEntityError\nfrom app.api.schema.roles import RoleSchema\nfrom app.models import db\nfrom app.models.role import Role\nfrom app.models.role_invite import RoleInvite\nfrom app.models.users_events_role import UsersEventsRoles\n\n\nclass RoleList(ResourceList):\n \"\"\"\n List and create role\n \"\"\"\n\n decorators = (api.has_permission('is_admin', methods=\"POST\"),)\n schema = RoleSchema\n data_layer = {'session': db.session, 'model': Role}\n\n\nclass RoleDetail(ResourceDetail):\n \"\"\"\n Role detail by id\n \"\"\"\n\n def before_get_object(self, view_kwargs):\n \"\"\"\n before get method to get the resource id for fetching details\n :param view_kwargs:\n :return:\n \"\"\"\n if view_kwargs.get('role_invite_id') is not None:\n role_invite = safe_query_kwargs(RoleInvite, view_kwargs, 'role_invite_id')\n if role_invite.role_id is not None:\n view_kwargs['id'] = role_invite.role_id\n else:\n view_kwargs['id'] = None\n\n if view_kwargs.get('users_events_roles_id') is not None:\n users_events_role = safe_query_kwargs(\n UsersEventsRoles,\n view_kwargs,\n 'users_events_roles_id',\n )\n\n if users_events_role.role_id is not None:\n view_kwargs['id'] = users_events_role.role_id\n else:\n view_kwargs['id'] = None\n\n def before_update_object(self, role, data, view_kwargs):\n \"\"\"\n Method to edit object\n :param role:\n :param data:\n :param view_kwargs:\n :return:\n \"\"\"\n if data.get('name'):\n if data['name'] in [\n 'owner',\n 'organizer',\n 'coorganizer',\n 'registrar',\n 'moderator',\n 'attendee',\n 'track_organizer',\n ]:\n raise UnprocessableEntityError(\n {'data': 'name'}, \"The given name cannot be updated\"\n )\n\n def before_delete_object(self, obj, kwargs):\n \"\"\"\n method to check proper resource name before deleting\n :param obj:\n :param kwargs:\n :return:\n \"\"\"\n if obj.name in [\n 'owner',\n 'organizer',\n 'coorganizer',\n 'registrar',\n 'moderator',\n 'attendee',\n 'track_organizer',\n ]:\n raise UnprocessableEntityError(\n {'data': 'name'}, \"The resource with given name cannot be deleted\"\n )\n\n decorators = (api.has_permission('is_admin', methods=\"PATCH,DELETE\"),)\n schema = RoleSchema\n data_layer = {\n 'session': db.session,\n 'model': Role,\n 'methods': {\n 'before_get_object': before_get_object,\n 'before_delete_object': before_delete_object,\n },\n }\n", "path": "app/api/roles.py"}]} | 1,299 | 109 |
gh_patches_debug_7221 | rasdani/github-patches | git_diff | StackStorm__st2-3038 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cache filters API response for web UI
As the number of actions, rules, etc has grown in our StackStorm cluster, the UI is becoming slower. In particular, the filters view (which we rely on heavily with this many executions) is very slow to become visible and usable. It might help to cache this `/api/v1/executions/views/filters` API response and asynchronously reload this value outside of the request thread, since this is slowly-changing data.
Our typical workflow is to
1. load the main execution page
2. wait for the filters to appear
3. apply some set of filters
4. wait for the next page to load
Here's a waterfall showing a 20s load time for the filters response. This is pretty common for us now.

For reference, we have 572 rules, 1200 actions, 143 triggers, 19 trigger types, and 600k+ executions.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `st2common/st2common/models/db/execution.py`
Content:
```
1 # Licensed to the StackStorm, Inc ('StackStorm') under one or more
2 # contributor license agreements. See the NOTICE file distributed with
3 # this work for additional information regarding copyright ownership.
4 # The ASF licenses this file to You under the Apache License, Version 2.0
5 # (the "License"); you may not use this file except in compliance with
6 # the License. You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 import copy
17
18 import mongoengine as me
19
20 from st2common import log as logging
21 from st2common.models.db import stormbase
22 from st2common.fields import ComplexDateTimeField
23 from st2common.util import date as date_utils
24 from st2common.util.secrets import get_secret_parameters
25 from st2common.util.secrets import mask_secret_parameters
26 from st2common.constants.types import ResourceType
27
28 __all__ = [
29 'ActionExecutionDB'
30 ]
31
32
33 LOG = logging.getLogger(__name__)
34
35
36 class ActionExecutionDB(stormbase.StormFoundationDB):
37 RESOURCE_TYPE = ResourceType.EXECUTION
38 UID_FIELDS = ['id']
39
40 trigger = stormbase.EscapedDictField()
41 trigger_type = stormbase.EscapedDictField()
42 trigger_instance = stormbase.EscapedDictField()
43 rule = stormbase.EscapedDictField()
44 action = stormbase.EscapedDictField(required=True)
45 runner = stormbase.EscapedDictField(required=True)
46 # Only the diff between the liveaction type and what is replicated
47 # in the ActionExecutionDB object.
48 liveaction = stormbase.EscapedDictField(required=True)
49 status = me.StringField(
50 required=True,
51 help_text='The current status of the liveaction.')
52 start_timestamp = ComplexDateTimeField(
53 default=date_utils.get_datetime_utc_now,
54 help_text='The timestamp when the liveaction was created.')
55 end_timestamp = ComplexDateTimeField(
56 help_text='The timestamp when the liveaction has finished.')
57 parameters = stormbase.EscapedDynamicField(
58 default={},
59 help_text='The key-value pairs passed as to the action runner & action.')
60 result = stormbase.EscapedDynamicField(
61 default={},
62 help_text='Action defined result.')
63 context = me.DictField(
64 default={},
65 help_text='Contextual information on the action execution.')
66 parent = me.StringField()
67 children = me.ListField(field=me.StringField())
68 log = me.ListField(field=me.DictField())
69 # Do not use URLField for web_url. If host doesn't have FQDN set, URLField validation blows.
70 web_url = me.StringField(required=False)
71
72 meta = {
73 'indexes': [
74 {'fields': ['rule.ref']},
75 {'fields': ['action.ref']},
76 {'fields': ['liveaction.id']},
77 {'fields': ['start_timestamp']},
78 {'fields': ['end_timestamp']},
79 {'fields': ['status']},
80 {'fields': ['parent']},
81 {'fields': ['-start_timestamp', 'action.ref', 'status']}
82 ]
83 }
84
85 def get_uid(self):
86 # TODO Construct od from non id field:
87 uid = [self.RESOURCE_TYPE, str(self.id)]
88 return ':'.join(uid)
89
90 def mask_secrets(self, value):
91 result = copy.deepcopy(value)
92
93 execution_parameters = value['parameters']
94 parameters = {}
95 # pylint: disable=no-member
96 parameters.update(value.get('action', {}).get('parameters', {}))
97 parameters.update(value.get('runner', {}).get('runner_parameters', {}))
98
99 secret_parameters = get_secret_parameters(parameters=parameters)
100 result['parameters'] = mask_secret_parameters(parameters=execution_parameters,
101 secret_parameters=secret_parameters)
102 return result
103
104 def get_masked_parameters(self):
105 """
106 Retrieve parameters with the secrets masked.
107
108 :rtype: ``dict``
109 """
110 serializable_dict = self.to_serializable_dict(mask_secrets=True)
111 return serializable_dict['parameters']
112
113
114 MODELS = [ActionExecutionDB]
115
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/st2common/st2common/models/db/execution.py b/st2common/st2common/models/db/execution.py
--- a/st2common/st2common/models/db/execution.py
+++ b/st2common/st2common/models/db/execution.py
@@ -78,6 +78,11 @@
{'fields': ['end_timestamp']},
{'fields': ['status']},
{'fields': ['parent']},
+ {'fields': ['rule.name']},
+ {'fields': ['runner.name']},
+ {'fields': ['trigger.name']},
+ {'fields': ['trigger_type.name']},
+ {'fields': ['context.user']},
{'fields': ['-start_timestamp', 'action.ref', 'status']}
]
}
| {"golden_diff": "diff --git a/st2common/st2common/models/db/execution.py b/st2common/st2common/models/db/execution.py\n--- a/st2common/st2common/models/db/execution.py\n+++ b/st2common/st2common/models/db/execution.py\n@@ -78,6 +78,11 @@\n {'fields': ['end_timestamp']},\n {'fields': ['status']},\n {'fields': ['parent']},\n+ {'fields': ['rule.name']},\n+ {'fields': ['runner.name']},\n+ {'fields': ['trigger.name']},\n+ {'fields': ['trigger_type.name']},\n+ {'fields': ['context.user']},\n {'fields': ['-start_timestamp', 'action.ref', 'status']}\n ]\n }\n", "issue": "Cache filters API response for web UI\nAs the number of actions, rules, etc has grown in our StackStorm cluster, the UI is becoming slower. In particular, the filters view (which we rely on heavily with this many executions) is very slow to become visible and usable. It might help to cache this `/api/v1/executions/views/filters` API response and asynchronously reload this value outside of the request thread, since this is slowly-changing data.\n\nOur typical workflow is to\n1. load the main execution page\n2. wait for the filters to appear\n3. apply some set of filters\n4. wait for the next page to load\n\nHere's a waterfall showing a 20s load time for the filters response. This is pretty common for us now.\n\n\nFor reference, we have 572 rules, 1200 actions, 143 triggers, 19 trigger types, and 600k+ executions.\n\n", "before_files": [{"content": "# Licensed to the StackStorm, Inc ('StackStorm') under one or more\n# contributor license agreements. See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport copy\n\nimport mongoengine as me\n\nfrom st2common import log as logging\nfrom st2common.models.db import stormbase\nfrom st2common.fields import ComplexDateTimeField\nfrom st2common.util import date as date_utils\nfrom st2common.util.secrets import get_secret_parameters\nfrom st2common.util.secrets import mask_secret_parameters\nfrom st2common.constants.types import ResourceType\n\n__all__ = [\n 'ActionExecutionDB'\n]\n\n\nLOG = logging.getLogger(__name__)\n\n\nclass ActionExecutionDB(stormbase.StormFoundationDB):\n RESOURCE_TYPE = ResourceType.EXECUTION\n UID_FIELDS = ['id']\n\n trigger = stormbase.EscapedDictField()\n trigger_type = stormbase.EscapedDictField()\n trigger_instance = stormbase.EscapedDictField()\n rule = stormbase.EscapedDictField()\n action = stormbase.EscapedDictField(required=True)\n runner = stormbase.EscapedDictField(required=True)\n # Only the diff between the liveaction type and what is replicated\n # in the ActionExecutionDB object.\n liveaction = stormbase.EscapedDictField(required=True)\n status = me.StringField(\n required=True,\n help_text='The current status of the liveaction.')\n start_timestamp = ComplexDateTimeField(\n default=date_utils.get_datetime_utc_now,\n help_text='The timestamp when the liveaction was created.')\n end_timestamp = ComplexDateTimeField(\n help_text='The timestamp when the liveaction has finished.')\n parameters = stormbase.EscapedDynamicField(\n default={},\n help_text='The key-value pairs passed as to the action runner & action.')\n result = stormbase.EscapedDynamicField(\n default={},\n help_text='Action defined result.')\n context = me.DictField(\n default={},\n help_text='Contextual information on the action execution.')\n parent = me.StringField()\n children = me.ListField(field=me.StringField())\n log = me.ListField(field=me.DictField())\n # Do not use URLField for web_url. If host doesn't have FQDN set, URLField validation blows.\n web_url = me.StringField(required=False)\n\n meta = {\n 'indexes': [\n {'fields': ['rule.ref']},\n {'fields': ['action.ref']},\n {'fields': ['liveaction.id']},\n {'fields': ['start_timestamp']},\n {'fields': ['end_timestamp']},\n {'fields': ['status']},\n {'fields': ['parent']},\n {'fields': ['-start_timestamp', 'action.ref', 'status']}\n ]\n }\n\n def get_uid(self):\n # TODO Construct od from non id field:\n uid = [self.RESOURCE_TYPE, str(self.id)]\n return ':'.join(uid)\n\n def mask_secrets(self, value):\n result = copy.deepcopy(value)\n\n execution_parameters = value['parameters']\n parameters = {}\n # pylint: disable=no-member\n parameters.update(value.get('action', {}).get('parameters', {}))\n parameters.update(value.get('runner', {}).get('runner_parameters', {}))\n\n secret_parameters = get_secret_parameters(parameters=parameters)\n result['parameters'] = mask_secret_parameters(parameters=execution_parameters,\n secret_parameters=secret_parameters)\n return result\n\n def get_masked_parameters(self):\n \"\"\"\n Retrieve parameters with the secrets masked.\n\n :rtype: ``dict``\n \"\"\"\n serializable_dict = self.to_serializable_dict(mask_secrets=True)\n return serializable_dict['parameters']\n\n\nMODELS = [ActionExecutionDB]\n", "path": "st2common/st2common/models/db/execution.py"}], "after_files": [{"content": "# Licensed to the StackStorm, Inc ('StackStorm') under one or more\n# contributor license agreements. See the NOTICE file distributed with\n# this work for additional information regarding copyright ownership.\n# The ASF licenses this file to You under the Apache License, Version 2.0\n# (the \"License\"); you may not use this file except in compliance with\n# the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport copy\n\nimport mongoengine as me\n\nfrom st2common import log as logging\nfrom st2common.models.db import stormbase\nfrom st2common.fields import ComplexDateTimeField\nfrom st2common.util import date as date_utils\nfrom st2common.util.secrets import get_secret_parameters\nfrom st2common.util.secrets import mask_secret_parameters\nfrom st2common.constants.types import ResourceType\n\n__all__ = [\n 'ActionExecutionDB'\n]\n\n\nLOG = logging.getLogger(__name__)\n\n\nclass ActionExecutionDB(stormbase.StormFoundationDB):\n RESOURCE_TYPE = ResourceType.EXECUTION\n UID_FIELDS = ['id']\n\n trigger = stormbase.EscapedDictField()\n trigger_type = stormbase.EscapedDictField()\n trigger_instance = stormbase.EscapedDictField()\n rule = stormbase.EscapedDictField()\n action = stormbase.EscapedDictField(required=True)\n runner = stormbase.EscapedDictField(required=True)\n # Only the diff between the liveaction type and what is replicated\n # in the ActionExecutionDB object.\n liveaction = stormbase.EscapedDictField(required=True)\n status = me.StringField(\n required=True,\n help_text='The current status of the liveaction.')\n start_timestamp = ComplexDateTimeField(\n default=date_utils.get_datetime_utc_now,\n help_text='The timestamp when the liveaction was created.')\n end_timestamp = ComplexDateTimeField(\n help_text='The timestamp when the liveaction has finished.')\n parameters = stormbase.EscapedDynamicField(\n default={},\n help_text='The key-value pairs passed as to the action runner & action.')\n result = stormbase.EscapedDynamicField(\n default={},\n help_text='Action defined result.')\n context = me.DictField(\n default={},\n help_text='Contextual information on the action execution.')\n parent = me.StringField()\n children = me.ListField(field=me.StringField())\n log = me.ListField(field=me.DictField())\n # Do not use URLField for web_url. If host doesn't have FQDN set, URLField validation blows.\n web_url = me.StringField(required=False)\n\n meta = {\n 'indexes': [\n {'fields': ['rule.ref']},\n {'fields': ['action.ref']},\n {'fields': ['liveaction.id']},\n {'fields': ['start_timestamp']},\n {'fields': ['end_timestamp']},\n {'fields': ['status']},\n {'fields': ['parent']},\n {'fields': ['rule.name']},\n {'fields': ['runner.name']},\n {'fields': ['trigger.name']},\n {'fields': ['trigger_type.name']},\n {'fields': ['context.user']},\n {'fields': ['-start_timestamp', 'action.ref', 'status']}\n ]\n }\n\n def get_uid(self):\n # TODO Construct od from non id field:\n uid = [self.RESOURCE_TYPE, str(self.id)]\n return ':'.join(uid)\n\n def mask_secrets(self, value):\n result = copy.deepcopy(value)\n\n execution_parameters = value['parameters']\n parameters = {}\n # pylint: disable=no-member\n parameters.update(value.get('action', {}).get('parameters', {}))\n parameters.update(value.get('runner', {}).get('runner_parameters', {}))\n\n secret_parameters = get_secret_parameters(parameters=parameters)\n result['parameters'] = mask_secret_parameters(parameters=execution_parameters,\n secret_parameters=secret_parameters)\n return result\n\n def get_masked_parameters(self):\n \"\"\"\n Retrieve parameters with the secrets masked.\n\n :rtype: ``dict``\n \"\"\"\n serializable_dict = self.to_serializable_dict(mask_secrets=True)\n return serializable_dict['parameters']\n\n\nMODELS = [ActionExecutionDB]\n", "path": "st2common/st2common/models/db/execution.py"}]} | 1,707 | 156 |
gh_patches_debug_946 | rasdani/github-patches | git_diff | xonsh__xonsh-2332 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
xoreutils: echo fails with KeyError: 'help'
Any `echo` invocation fails:
```shell
$ $XONSH_SHOW_TRACEBACK = True
$ echo
xonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 13061, in run
r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 12896, in proxy_four
return f(args, stdin, stdout, stderr)
File "/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py", line 9, in echo
if opts['help']:
KeyError: 'help'
$ echo foo
xonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 13061, in run
r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 12896, in proxy_four
return f(args, stdin, stdout, stderr)
File "/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py", line 9, in echo
if opts['help']:
KeyError: 'help'
$ echo "foo"
xonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 13061, in run
r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)
File "/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py", line 12896, in proxy_four
return f(args, stdin, stdout, stderr)
File "/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py", line 9, in echo
if opts['help']:
KeyError: 'help'
```
Obviously, the problem is that `help` is looked up but missing: http://xon.sh/_modules/xonsh/xoreutils/echo.html#echo
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `xonsh/xoreutils/echo.py`
Content:
```
1 """Implements a simple echo command for xonsh."""
2
3
4 def echo(args, stdin, stdout, stderr):
5 """A simple echo command."""
6 opts = _echo_parse_args(args)
7 if opts is None:
8 return
9 if opts['help']:
10 print(ECHO_HELP, file=stdout)
11 return 0
12 ender = opts['end']
13 args = map(str, args)
14 if opts['escapes']:
15 args = map(lambda x: x.encode().decode('unicode_escape'), args)
16 print(*args, end=ender, file=stdout)
17
18
19 def _echo_parse_args(args):
20 out = {'escapes': False, 'end': '\n'}
21 if '-e' in args:
22 args.remove('-e')
23 out['escapes'] = True
24 if '-E' in args:
25 args.remove('-E')
26 out['escapes'] = False
27 if '-n' in args:
28 args.remove('-n')
29 out['end'] = ''
30 if '-h' in args or '--help' in args:
31 out['help'] = True
32 return out
33
34
35 ECHO_HELP = """Usage: echo [OPTIONS]... [STRING]...
36 Echo the STRING(s) to standard output.
37
38 -n do not include the trailing newline
39 -e enable interpretation of backslash escapes
40 -E disable interpretation of backslash escapes (default)
41 -h --help display this message and exit
42
43 This version of echo was written in Python for the xonsh project: http://xon.sh
44 Based on echo from GNU coreutils: http://www.gnu.org/software/coreutils/"""
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/xonsh/xoreutils/echo.py b/xonsh/xoreutils/echo.py
--- a/xonsh/xoreutils/echo.py
+++ b/xonsh/xoreutils/echo.py
@@ -17,7 +17,7 @@
def _echo_parse_args(args):
- out = {'escapes': False, 'end': '\n'}
+ out = {'escapes': False, 'end': '\n', 'help': False}
if '-e' in args:
args.remove('-e')
out['escapes'] = True
| {"golden_diff": "diff --git a/xonsh/xoreutils/echo.py b/xonsh/xoreutils/echo.py\n--- a/xonsh/xoreutils/echo.py\n+++ b/xonsh/xoreutils/echo.py\n@@ -17,7 +17,7 @@\n \n \n def _echo_parse_args(args):\n- out = {'escapes': False, 'end': '\\n'}\n+ out = {'escapes': False, 'end': '\\n', 'help': False}\n if '-e' in args:\n args.remove('-e')\n out['escapes'] = True\n", "issue": "xoreutils: echo fails with KeyError: 'help'\nAny `echo` invocation fails:\r\n\r\n```shell\r\n$ $XONSH_SHOW_TRACEBACK = True\r\n$ echo\r\nxonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 13061, in run\r\n r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 12896, in proxy_four\r\n return f(args, stdin, stdout, stderr)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py\", line 9, in echo\r\n if opts['help']:\r\nKeyError: 'help'\r\n$ echo foo\r\nxonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 13061, in run\r\n r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 12896, in proxy_four\r\n return f(args, stdin, stdout, stderr)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py\", line 9, in echo\r\n if opts['help']:\r\nKeyError: 'help'\r\n$ echo \"foo\"\r\nxonsh: To log full traceback to a file set: $XONSH_TRACEBACK_LOGFILE = <filename>\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 13061, in run\r\n r = self.f(self.args, sp_stdin, sp_stdout, sp_stderr, spec)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/__amalgam__.py\", line 12896, in proxy_four\r\n return f(args, stdin, stdout, stderr)\r\n File \"/usr/local/lib/python3.6/site-packages/xonsh/xoreutils/echo.py\", line 9, in echo\r\n if opts['help']:\r\nKeyError: 'help'\r\n```\r\n\r\nObviously, the problem is that `help` is looked up but missing: http://xon.sh/_modules/xonsh/xoreutils/echo.html#echo\n", "before_files": [{"content": "\"\"\"Implements a simple echo command for xonsh.\"\"\"\n\n\ndef echo(args, stdin, stdout, stderr):\n \"\"\"A simple echo command.\"\"\"\n opts = _echo_parse_args(args)\n if opts is None:\n return\n if opts['help']:\n print(ECHO_HELP, file=stdout)\n return 0\n ender = opts['end']\n args = map(str, args)\n if opts['escapes']:\n args = map(lambda x: x.encode().decode('unicode_escape'), args)\n print(*args, end=ender, file=stdout)\n\n\ndef _echo_parse_args(args):\n out = {'escapes': False, 'end': '\\n'}\n if '-e' in args:\n args.remove('-e')\n out['escapes'] = True\n if '-E' in args:\n args.remove('-E')\n out['escapes'] = False\n if '-n' in args:\n args.remove('-n')\n out['end'] = ''\n if '-h' in args or '--help' in args:\n out['help'] = True\n return out\n\n\nECHO_HELP = \"\"\"Usage: echo [OPTIONS]... [STRING]...\nEcho the STRING(s) to standard output.\n\n -n do not include the trailing newline\n -e enable interpretation of backslash escapes\n -E disable interpretation of backslash escapes (default)\n -h --help display this message and exit\n\nThis version of echo was written in Python for the xonsh project: http://xon.sh\nBased on echo from GNU coreutils: http://www.gnu.org/software/coreutils/\"\"\"\n", "path": "xonsh/xoreutils/echo.py"}], "after_files": [{"content": "\"\"\"Implements a simple echo command for xonsh.\"\"\"\n\n\ndef echo(args, stdin, stdout, stderr):\n \"\"\"A simple echo command.\"\"\"\n opts = _echo_parse_args(args)\n if opts is None:\n return\n if opts['help']:\n print(ECHO_HELP, file=stdout)\n return 0\n ender = opts['end']\n args = map(str, args)\n if opts['escapes']:\n args = map(lambda x: x.encode().decode('unicode_escape'), args)\n print(*args, end=ender, file=stdout)\n\n\ndef _echo_parse_args(args):\n out = {'escapes': False, 'end': '\\n', 'help': False}\n if '-e' in args:\n args.remove('-e')\n out['escapes'] = True\n if '-E' in args:\n args.remove('-E')\n out['escapes'] = False\n if '-n' in args:\n args.remove('-n')\n out['end'] = ''\n if '-h' in args or '--help' in args:\n out['help'] = True\n return out\n\n\nECHO_HELP = \"\"\"Usage: echo [OPTIONS]... [STRING]...\nEcho the STRING(s) to standard output.\n\n -n do not include the trailing newline\n -e enable interpretation of backslash escapes\n -E disable interpretation of backslash escapes (default)\n -h --help display this message and exit\n\nThis version of echo was written in Python for the xonsh project: http://xon.sh\nBased on echo from GNU coreutils: http://www.gnu.org/software/coreutils/\"\"\"\n", "path": "xonsh/xoreutils/echo.py"}]} | 1,293 | 128 |
gh_patches_debug_56453 | rasdani/github-patches | git_diff | netket__netket-506 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ExactSampler is not resetting at construction time
Exact Sampler should call Reset at construction time
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `netket/sampler/exact_sampler.py`
Content:
```
1 import numpy as _np
2 from .abstract_sampler import AbstractSampler
3 from ..machine.density_matrix import AbstractDensityMatrix
4 from ..hilbert import DoubledHilbert
5 import netket.random
6
7
8 class ExactSampler(AbstractSampler):
9 r"""
10 This sampler generates i.i.d. samples from $$|\Psi(s)|^2$$.
11 In order to perform exact sampling, $$|\Psi(s)|^2$$ is precomputed an all
12 the possible values of the quantum numbers $$s$$. This sampler has thus an
13 exponential cost with the number of degrees of freedom, and cannot be used
14 for large systems, where Metropolis-based sampling are instead a viable
15 option.
16 """
17
18 def __init__(self, machine, sample_size=16):
19 r"""
20 Constructs a new ``ExactSampler`` given a machine.
21
22 Args:
23 machine: A machine $$\Psi(s)$$ used for the sampling.
24 The probability distribution being sampled
25 from is $$F(\Psi(s))$$, where the function
26 $$F(X)$$, is arbitrary, by default $$F(X)=|X|^2$$.
27
28 sample_size: The number of independent samples to be generated at each invocation of __next__.
29 """
30 super().__init__(machine, sample_size)
31 if isinstance(machine, AbstractDensityMatrix):
32 self.hilbert = DoubledHilbert(machine.hilbert)
33 else:
34 self.hilbert = machine.hilbert
35 self._machine_pow = 2.0
36
37 def reset(self, init_random=False):
38 self._prob = _np.absolute(self.machine.to_array()) ** self.machine_pow
39 self._prob /= self._prob.sum()
40
41 def __next__(self):
42 numbers = netket.random.choice(
43 self._prob.size, size=self.sample_shape[0], replace=True, p=self._prob
44 )
45 return self.hilbert.numbers_to_states(numbers)
46
47 def generate_samples(self, n_samples, init_random=False, samples=None):
48
49 if samples is None:
50 samples = _np.zeros((n_samples, self.sample_shape[0], self.sample_shape[1]))
51
52 numbers = netket.random.choice(
53 self._prob.size,
54 size=self.sample_shape[0] * n_samples,
55 replace=True,
56 p=self._prob,
57 )
58 samples[:] = self.hilbert.numbers_to_states(numbers).reshape(samples.shape)
59
60 return samples
61
62 @property
63 def machine_pow(self):
64 return self._machine_pow
65
66 @machine_pow.setter
67 def machine_pow(self, m_power):
68 self._machine_pow = m_power
69 self.reset()
70
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/netket/sampler/exact_sampler.py b/netket/sampler/exact_sampler.py
--- a/netket/sampler/exact_sampler.py
+++ b/netket/sampler/exact_sampler.py
@@ -33,6 +33,7 @@
else:
self.hilbert = machine.hilbert
self._machine_pow = 2.0
+ self.reset()
def reset(self, init_random=False):
self._prob = _np.absolute(self.machine.to_array()) ** self.machine_pow
| {"golden_diff": "diff --git a/netket/sampler/exact_sampler.py b/netket/sampler/exact_sampler.py\n--- a/netket/sampler/exact_sampler.py\n+++ b/netket/sampler/exact_sampler.py\n@@ -33,6 +33,7 @@\n else:\n self.hilbert = machine.hilbert\n self._machine_pow = 2.0\n+ self.reset()\n \n def reset(self, init_random=False):\n self._prob = _np.absolute(self.machine.to_array()) ** self.machine_pow\n", "issue": "ExactSampler is not resetting at construction time\nExact Sampler should call Reset at construction time \r\n\n", "before_files": [{"content": "import numpy as _np\nfrom .abstract_sampler import AbstractSampler\nfrom ..machine.density_matrix import AbstractDensityMatrix\nfrom ..hilbert import DoubledHilbert\nimport netket.random\n\n\nclass ExactSampler(AbstractSampler):\n r\"\"\"\n This sampler generates i.i.d. samples from $$|\\Psi(s)|^2$$.\n In order to perform exact sampling, $$|\\Psi(s)|^2$$ is precomputed an all\n the possible values of the quantum numbers $$s$$. This sampler has thus an\n exponential cost with the number of degrees of freedom, and cannot be used\n for large systems, where Metropolis-based sampling are instead a viable\n option.\n \"\"\"\n\n def __init__(self, machine, sample_size=16):\n r\"\"\"\n Constructs a new ``ExactSampler`` given a machine.\n\n Args:\n machine: A machine $$\\Psi(s)$$ used for the sampling.\n The probability distribution being sampled\n from is $$F(\\Psi(s))$$, where the function\n $$F(X)$$, is arbitrary, by default $$F(X)=|X|^2$$.\n\n sample_size: The number of independent samples to be generated at each invocation of __next__.\n \"\"\"\n super().__init__(machine, sample_size)\n if isinstance(machine, AbstractDensityMatrix):\n self.hilbert = DoubledHilbert(machine.hilbert)\n else:\n self.hilbert = machine.hilbert\n self._machine_pow = 2.0\n\n def reset(self, init_random=False):\n self._prob = _np.absolute(self.machine.to_array()) ** self.machine_pow\n self._prob /= self._prob.sum()\n\n def __next__(self):\n numbers = netket.random.choice(\n self._prob.size, size=self.sample_shape[0], replace=True, p=self._prob\n )\n return self.hilbert.numbers_to_states(numbers)\n\n def generate_samples(self, n_samples, init_random=False, samples=None):\n\n if samples is None:\n samples = _np.zeros((n_samples, self.sample_shape[0], self.sample_shape[1]))\n\n numbers = netket.random.choice(\n self._prob.size,\n size=self.sample_shape[0] * n_samples,\n replace=True,\n p=self._prob,\n )\n samples[:] = self.hilbert.numbers_to_states(numbers).reshape(samples.shape)\n\n return samples\n\n @property\n def machine_pow(self):\n return self._machine_pow\n\n @machine_pow.setter\n def machine_pow(self, m_power):\n self._machine_pow = m_power\n self.reset()\n", "path": "netket/sampler/exact_sampler.py"}], "after_files": [{"content": "import numpy as _np\nfrom .abstract_sampler import AbstractSampler\nfrom ..machine.density_matrix import AbstractDensityMatrix\nfrom ..hilbert import DoubledHilbert\nimport netket.random\n\n\nclass ExactSampler(AbstractSampler):\n r\"\"\"\n This sampler generates i.i.d. samples from $$|\\Psi(s)|^2$$.\n In order to perform exact sampling, $$|\\Psi(s)|^2$$ is precomputed an all\n the possible values of the quantum numbers $$s$$. This sampler has thus an\n exponential cost with the number of degrees of freedom, and cannot be used\n for large systems, where Metropolis-based sampling are instead a viable\n option.\n \"\"\"\n\n def __init__(self, machine, sample_size=16):\n r\"\"\"\n Constructs a new ``ExactSampler`` given a machine.\n\n Args:\n machine: A machine $$\\Psi(s)$$ used for the sampling.\n The probability distribution being sampled\n from is $$F(\\Psi(s))$$, where the function\n $$F(X)$$, is arbitrary, by default $$F(X)=|X|^2$$.\n\n sample_size: The number of independent samples to be generated at each invocation of __next__.\n \"\"\"\n super().__init__(machine, sample_size)\n if isinstance(machine, AbstractDensityMatrix):\n self.hilbert = DoubledHilbert(machine.hilbert)\n else:\n self.hilbert = machine.hilbert\n self._machine_pow = 2.0\n self.reset()\n\n def reset(self, init_random=False):\n self._prob = _np.absolute(self.machine.to_array()) ** self.machine_pow\n self._prob /= self._prob.sum()\n\n def __next__(self):\n numbers = netket.random.choice(\n self._prob.size, size=self.sample_shape[0], replace=True, p=self._prob\n )\n return self.hilbert.numbers_to_states(numbers)\n\n def generate_samples(self, n_samples, init_random=False, samples=None):\n\n if samples is None:\n samples = _np.zeros((n_samples, self.sample_shape[0], self.sample_shape[1]))\n\n numbers = netket.random.choice(\n self._prob.size,\n size=self.sample_shape[0] * n_samples,\n replace=True,\n p=self._prob,\n )\n samples[:] = self.hilbert.numbers_to_states(numbers).reshape(samples.shape)\n\n return samples\n\n @property\n def machine_pow(self):\n return self._machine_pow\n\n @machine_pow.setter\n def machine_pow(self, m_power):\n self._machine_pow = m_power\n self.reset()\n", "path": "netket/sampler/exact_sampler.py"}]} | 982 | 114 |
gh_patches_debug_1487 | rasdani/github-patches | git_diff | huggingface__diffusers-1149 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Flax] 🚨 0.7.0 not working 🚨
### Describe the bug

### Reproduction
_No response_
### Logs
_No response_
### System Info
TPU v3-8
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/diffusers/models/embeddings_flax.py`
Content:
```
1 # Copyright 2022 The HuggingFace Team. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 import math
15
16 import flax.linen as nn
17 import jax.numpy as jnp
18
19
20 def get_sinusoidal_embeddings(
21 timesteps: jnp.ndarray,
22 embedding_dim: int,
23 freq_shift: float = 1,
24 min_timescale: float = 1,
25 max_timescale: float = 1.0e4,
26 flip_sin_to_cos: bool = False,
27 scale: float = 1.0,
28 ) -> jnp.ndarray:
29 """Returns the positional encoding (same as Tensor2Tensor).
30 Args:
31 timesteps: a 1-D Tensor of N indices, one per batch element.
32 These may be fractional.
33 embedding_dim: The number of output channels.
34 min_timescale: The smallest time unit (should probably be 0.0).
35 max_timescale: The largest time unit.
36 Returns:
37 a Tensor of timing signals [N, num_channels]
38 """
39 assert timesteps.ndim == 1, "Timesteps should be a 1d-array"
40 assert embedding_dim % 2 == 0, f"Embedding dimension {embedding_dim} should be even"
41 num_timescales = float(embedding_dim // 2)
42 log_timescale_increment = math.log(max_timescale / min_timescale) / (num_timescales - freq_shift)
43 inv_timescales = min_timescale * jnp.exp(jnp.arange(num_timescales, dtype=jnp.float32) * -log_timescale_increment)
44 emb = jnp.expand_dims(timesteps, 1) * jnp.expand_dims(inv_timescales, 0)
45
46 # scale embeddings
47 scaled_time = scale * emb
48
49 if flip_sin_to_cos:
50 signal = jnp.concatenate([jnp.cos(scaled_time), jnp.sin(scaled_time)], axis=1)
51 else:
52 signal = jnp.concatenate([jnp.sin(scaled_time), jnp.cos(scaled_time)], axis=1)
53 signal = jnp.reshape(signal, [jnp.shape(timesteps)[0], embedding_dim])
54 return signal
55
56
57 class FlaxTimestepEmbedding(nn.Module):
58 r"""
59 Time step Embedding Module. Learns embeddings for input time steps.
60
61 Args:
62 time_embed_dim (`int`, *optional*, defaults to `32`):
63 Time step embedding dimension
64 dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):
65 Parameters `dtype`
66 """
67 time_embed_dim: int = 32
68 dtype: jnp.dtype = jnp.float32
69
70 @nn.compact
71 def __call__(self, temb):
72 temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name="linear_1")(temb)
73 temb = nn.silu(temb)
74 temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name="linear_2")(temb)
75 return temb
76
77
78 class FlaxTimesteps(nn.Module):
79 r"""
80 Wrapper Module for sinusoidal Time step Embeddings as described in https://arxiv.org/abs/2006.11239
81
82 Args:
83 dim (`int`, *optional*, defaults to `32`):
84 Time step embedding dimension
85 """
86 dim: int = 32
87 freq_shift: float = 1
88
89 @nn.compact
90 def __call__(self, timesteps):
91 return get_sinusoidal_embeddings(timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift)
92
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/diffusers/models/embeddings_flax.py b/src/diffusers/models/embeddings_flax.py
--- a/src/diffusers/models/embeddings_flax.py
+++ b/src/diffusers/models/embeddings_flax.py
@@ -88,4 +88,6 @@
@nn.compact
def __call__(self, timesteps):
- return get_sinusoidal_embeddings(timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift)
+ return get_sinusoidal_embeddings(
+ timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift, flip_sin_to_cos=True
+ )
| {"golden_diff": "diff --git a/src/diffusers/models/embeddings_flax.py b/src/diffusers/models/embeddings_flax.py\n--- a/src/diffusers/models/embeddings_flax.py\n+++ b/src/diffusers/models/embeddings_flax.py\n@@ -88,4 +88,6 @@\n \n @nn.compact\n def __call__(self, timesteps):\n- return get_sinusoidal_embeddings(timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift)\n+ return get_sinusoidal_embeddings(\n+ timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift, flip_sin_to_cos=True\n+ )\n", "issue": "[Flax] \ud83d\udea8 0.7.0 not working \ud83d\udea8\n### Describe the bug\n\n\r\n\n\n### Reproduction\n\n_No response_\n\n### Logs\n\n_No response_\n\n### System Info\n\nTPU v3-8\n", "before_files": [{"content": "# Copyright 2022 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport math\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\n\ndef get_sinusoidal_embeddings(\n timesteps: jnp.ndarray,\n embedding_dim: int,\n freq_shift: float = 1,\n min_timescale: float = 1,\n max_timescale: float = 1.0e4,\n flip_sin_to_cos: bool = False,\n scale: float = 1.0,\n) -> jnp.ndarray:\n \"\"\"Returns the positional encoding (same as Tensor2Tensor).\n Args:\n timesteps: a 1-D Tensor of N indices, one per batch element.\n These may be fractional.\n embedding_dim: The number of output channels.\n min_timescale: The smallest time unit (should probably be 0.0).\n max_timescale: The largest time unit.\n Returns:\n a Tensor of timing signals [N, num_channels]\n \"\"\"\n assert timesteps.ndim == 1, \"Timesteps should be a 1d-array\"\n assert embedding_dim % 2 == 0, f\"Embedding dimension {embedding_dim} should be even\"\n num_timescales = float(embedding_dim // 2)\n log_timescale_increment = math.log(max_timescale / min_timescale) / (num_timescales - freq_shift)\n inv_timescales = min_timescale * jnp.exp(jnp.arange(num_timescales, dtype=jnp.float32) * -log_timescale_increment)\n emb = jnp.expand_dims(timesteps, 1) * jnp.expand_dims(inv_timescales, 0)\n\n # scale embeddings\n scaled_time = scale * emb\n\n if flip_sin_to_cos:\n signal = jnp.concatenate([jnp.cos(scaled_time), jnp.sin(scaled_time)], axis=1)\n else:\n signal = jnp.concatenate([jnp.sin(scaled_time), jnp.cos(scaled_time)], axis=1)\n signal = jnp.reshape(signal, [jnp.shape(timesteps)[0], embedding_dim])\n return signal\n\n\nclass FlaxTimestepEmbedding(nn.Module):\n r\"\"\"\n Time step Embedding Module. Learns embeddings for input time steps.\n\n Args:\n time_embed_dim (`int`, *optional*, defaults to `32`):\n Time step embedding dimension\n dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n Parameters `dtype`\n \"\"\"\n time_embed_dim: int = 32\n dtype: jnp.dtype = jnp.float32\n\n @nn.compact\n def __call__(self, temb):\n temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name=\"linear_1\")(temb)\n temb = nn.silu(temb)\n temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name=\"linear_2\")(temb)\n return temb\n\n\nclass FlaxTimesteps(nn.Module):\n r\"\"\"\n Wrapper Module for sinusoidal Time step Embeddings as described in https://arxiv.org/abs/2006.11239\n\n Args:\n dim (`int`, *optional*, defaults to `32`):\n Time step embedding dimension\n \"\"\"\n dim: int = 32\n freq_shift: float = 1\n\n @nn.compact\n def __call__(self, timesteps):\n return get_sinusoidal_embeddings(timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift)\n", "path": "src/diffusers/models/embeddings_flax.py"}], "after_files": [{"content": "# Copyright 2022 The HuggingFace Team. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport math\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\n\ndef get_sinusoidal_embeddings(\n timesteps: jnp.ndarray,\n embedding_dim: int,\n freq_shift: float = 1,\n min_timescale: float = 1,\n max_timescale: float = 1.0e4,\n flip_sin_to_cos: bool = False,\n scale: float = 1.0,\n) -> jnp.ndarray:\n \"\"\"Returns the positional encoding (same as Tensor2Tensor).\n Args:\n timesteps: a 1-D Tensor of N indices, one per batch element.\n These may be fractional.\n embedding_dim: The number of output channels.\n min_timescale: The smallest time unit (should probably be 0.0).\n max_timescale: The largest time unit.\n Returns:\n a Tensor of timing signals [N, num_channels]\n \"\"\"\n assert timesteps.ndim == 1, \"Timesteps should be a 1d-array\"\n assert embedding_dim % 2 == 0, f\"Embedding dimension {embedding_dim} should be even\"\n num_timescales = float(embedding_dim // 2)\n log_timescale_increment = math.log(max_timescale / min_timescale) / (num_timescales - freq_shift)\n inv_timescales = min_timescale * jnp.exp(jnp.arange(num_timescales, dtype=jnp.float32) * -log_timescale_increment)\n emb = jnp.expand_dims(timesteps, 1) * jnp.expand_dims(inv_timescales, 0)\n\n # scale embeddings\n scaled_time = scale * emb\n\n if flip_sin_to_cos:\n signal = jnp.concatenate([jnp.cos(scaled_time), jnp.sin(scaled_time)], axis=1)\n else:\n signal = jnp.concatenate([jnp.sin(scaled_time), jnp.cos(scaled_time)], axis=1)\n signal = jnp.reshape(signal, [jnp.shape(timesteps)[0], embedding_dim])\n return signal\n\n\nclass FlaxTimestepEmbedding(nn.Module):\n r\"\"\"\n Time step Embedding Module. Learns embeddings for input time steps.\n\n Args:\n time_embed_dim (`int`, *optional*, defaults to `32`):\n Time step embedding dimension\n dtype (:obj:`jnp.dtype`, *optional*, defaults to jnp.float32):\n Parameters `dtype`\n \"\"\"\n time_embed_dim: int = 32\n dtype: jnp.dtype = jnp.float32\n\n @nn.compact\n def __call__(self, temb):\n temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name=\"linear_1\")(temb)\n temb = nn.silu(temb)\n temb = nn.Dense(self.time_embed_dim, dtype=self.dtype, name=\"linear_2\")(temb)\n return temb\n\n\nclass FlaxTimesteps(nn.Module):\n r\"\"\"\n Wrapper Module for sinusoidal Time step Embeddings as described in https://arxiv.org/abs/2006.11239\n\n Args:\n dim (`int`, *optional*, defaults to `32`):\n Time step embedding dimension\n \"\"\"\n dim: int = 32\n freq_shift: float = 1\n\n @nn.compact\n def __call__(self, timesteps):\n return get_sinusoidal_embeddings(\n timesteps, embedding_dim=self.dim, freq_shift=self.freq_shift, flip_sin_to_cos=True\n )\n", "path": "src/diffusers/models/embeddings_flax.py"}]} | 1,458 | 137 |
gh_patches_debug_48383 | rasdani/github-patches | git_diff | DDMAL__CantusDB-900 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
we need to re-add a restart policy to docker-compose.yml
A recent change to docker-compose.yml removed the `restart: always` policy we added to our containers a couple of weeks ago. We should re-instate this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django/cantusdb_project/main_app/widgets.py`
Content:
```
1 from django.forms.widgets import TextInput, Select, Textarea, CheckboxInput
2 from django.utils.safestring import mark_safe
3
4 class TextInputWidget(TextInput):
5 def __init__(self):
6 self.attrs = {"class": "form-control form-control-sm"}
7
8
9 class SelectWidget(Select):
10 """
11 not used, this widget does work, but we cannot order the choices by name
12 """
13
14 def __init__(self):
15 attrs = {"class": "form-control custom-select custom-select-sm"}
16 super().__init__(attrs=attrs)
17 # super().choices = choices
18 # self.choices = super().choices
19
20
21 class TextAreaWidget(Textarea):
22 def __init__(self):
23 self.attrs = {"class": "form-control", "rows": "3"}
24
25
26 class VolpianoAreaWidget(Textarea):
27 def __init__(self):
28 self.attrs = {
29 "class": "form-control",
30 "rows": "1.5",
31 "style": "font-family: Volpiano; font-size: xx-large",
32 }
33
34
35 class VolpianoInputWidget(TextInput):
36 def __init__(self):
37 self.attrs = {
38 "class": "form-control form-control-sm",
39 "style": "font-family: Volpiano; font-size: xx-large",
40 }
41
42
43 class CheckboxWidget(CheckboxInput):
44 pass
45
46
47 class AdminTextAreaWidget(Textarea):
48 def __init__(self):
49 self.attrs = {"class": "form-control", "rows": 10, "cols": 75}
50
51 def render(self, name, value, attrs=None, renderer=None):
52 return super().render(name, value, attrs=self.attrs) + mark_safe(
53 '<span style="color: red; font-weight: bold;"> * </span>'
54 )
55
56
57 class AdminTextInputWidget(TextInputWidget):
58 def render(self, name, value, attrs=None, renderer=None):
59 return super().render(name, value) + mark_safe(
60 '<span style="color: red; font-weight: bold;"> * </span>'
61 )
62
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/django/cantusdb_project/main_app/widgets.py b/django/cantusdb_project/main_app/widgets.py
--- a/django/cantusdb_project/main_app/widgets.py
+++ b/django/cantusdb_project/main_app/widgets.py
@@ -1,6 +1,7 @@
from django.forms.widgets import TextInput, Select, Textarea, CheckboxInput
from django.utils.safestring import mark_safe
+
class TextInputWidget(TextInput):
def __init__(self):
self.attrs = {"class": "form-control form-control-sm"}
| {"golden_diff": "diff --git a/django/cantusdb_project/main_app/widgets.py b/django/cantusdb_project/main_app/widgets.py\n--- a/django/cantusdb_project/main_app/widgets.py\n+++ b/django/cantusdb_project/main_app/widgets.py\n@@ -1,6 +1,7 @@\n from django.forms.widgets import TextInput, Select, Textarea, CheckboxInput\n from django.utils.safestring import mark_safe\n \n+\n class TextInputWidget(TextInput):\n def __init__(self):\n self.attrs = {\"class\": \"form-control form-control-sm\"}\n", "issue": "we need to re-add a restart policy to docker-compose.yml\nA recent change to docker-compose.yml removed the `restart: always` policy we added to our containers a couple of weeks ago. We should re-instate this.\n", "before_files": [{"content": "from django.forms.widgets import TextInput, Select, Textarea, CheckboxInput\nfrom django.utils.safestring import mark_safe\n\nclass TextInputWidget(TextInput):\n def __init__(self):\n self.attrs = {\"class\": \"form-control form-control-sm\"}\n\n\nclass SelectWidget(Select):\n \"\"\"\n not used, this widget does work, but we cannot order the choices by name\n \"\"\"\n\n def __init__(self):\n attrs = {\"class\": \"form-control custom-select custom-select-sm\"}\n super().__init__(attrs=attrs)\n # super().choices = choices\n # self.choices = super().choices\n\n\nclass TextAreaWidget(Textarea):\n def __init__(self):\n self.attrs = {\"class\": \"form-control\", \"rows\": \"3\"}\n\n\nclass VolpianoAreaWidget(Textarea):\n def __init__(self):\n self.attrs = {\n \"class\": \"form-control\",\n \"rows\": \"1.5\",\n \"style\": \"font-family: Volpiano; font-size: xx-large\",\n }\n\n\nclass VolpianoInputWidget(TextInput):\n def __init__(self):\n self.attrs = {\n \"class\": \"form-control form-control-sm\",\n \"style\": \"font-family: Volpiano; font-size: xx-large\",\n }\n\n\nclass CheckboxWidget(CheckboxInput):\n pass\n\n\nclass AdminTextAreaWidget(Textarea):\n def __init__(self):\n self.attrs = {\"class\": \"form-control\", \"rows\": 10, \"cols\": 75}\n\n def render(self, name, value, attrs=None, renderer=None):\n return super().render(name, value, attrs=self.attrs) + mark_safe(\n '<span style=\"color: red; font-weight: bold;\"> * </span>'\n )\n\n\nclass AdminTextInputWidget(TextInputWidget):\n def render(self, name, value, attrs=None, renderer=None):\n return super().render(name, value) + mark_safe(\n '<span style=\"color: red; font-weight: bold;\"> * </span>'\n )\n", "path": "django/cantusdb_project/main_app/widgets.py"}], "after_files": [{"content": "from django.forms.widgets import TextInput, Select, Textarea, CheckboxInput\nfrom django.utils.safestring import mark_safe\n\n\nclass TextInputWidget(TextInput):\n def __init__(self):\n self.attrs = {\"class\": \"form-control form-control-sm\"}\n\n\nclass SelectWidget(Select):\n \"\"\"\n not used, this widget does work, but we cannot order the choices by name\n \"\"\"\n\n def __init__(self):\n attrs = {\"class\": \"form-control custom-select custom-select-sm\"}\n super().__init__(attrs=attrs)\n # super().choices = choices\n # self.choices = super().choices\n\n\nclass TextAreaWidget(Textarea):\n def __init__(self):\n self.attrs = {\"class\": \"form-control\", \"rows\": \"3\"}\n\n\nclass VolpianoAreaWidget(Textarea):\n def __init__(self):\n self.attrs = {\n \"class\": \"form-control\",\n \"rows\": \"1.5\",\n \"style\": \"font-family: Volpiano; font-size: xx-large\",\n }\n\n\nclass VolpianoInputWidget(TextInput):\n def __init__(self):\n self.attrs = {\n \"class\": \"form-control form-control-sm\",\n \"style\": \"font-family: Volpiano; font-size: xx-large\",\n }\n\n\nclass CheckboxWidget(CheckboxInput):\n pass\n\n\nclass AdminTextAreaWidget(Textarea):\n def __init__(self):\n self.attrs = {\"class\": \"form-control\", \"rows\": 10, \"cols\": 75}\n\n def render(self, name, value, attrs=None, renderer=None):\n return super().render(name, value, attrs=self.attrs) + mark_safe(\n '<span style=\"color: red; font-weight: bold;\"> * </span>'\n )\n\n\nclass AdminTextInputWidget(TextInputWidget):\n def render(self, name, value, attrs=None, renderer=None):\n return super().render(name, value) + mark_safe(\n '<span style=\"color: red; font-weight: bold;\"> * </span>'\n )\n", "path": "django/cantusdb_project/main_app/widgets.py"}]} | 874 | 119 |
gh_patches_debug_568 | rasdani/github-patches | git_diff | pex-tool__pex-836 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.0
On the docket:
The prime motivator:
+ [x] Pex does not download foreign abi3 wheels correctly #823
Changes to support the above as well as others:
+ [x] Fix pex resolving for foreign platforms. #835
+ [x] Use pypa/packaging. #831
+ [x] Upgrade vendored setuptools to 42.0.2. #832
+ [x] De-vendor pex just once per version. #833
+ [x] Support VCS urls for vendoring. #834
+ [x] Support python 3.8 in CI. #829
+ [x] Fix pex resolution to respect --ignore-errors. #828
+ [x] Kill `pkg_resources` finders monkey-patching. #827
+ [x] Use flit to distribute pex. #826
+ [x] Cleanup extras_require. #825
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = '2.0.3'
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = '2.0.3'
+__version__ = '2.1.0'
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = '2.0.3'\n+__version__ = '2.1.0'\n", "issue": "Release 2.1.0\nOn the docket:\r\n\r\nThe prime motivator:\r\n+ [x] Pex does not download foreign abi3 wheels correctly #823\r\n\r\nChanges to support the above as well as others:\r\n+ [x] Fix pex resolving for foreign platforms. #835 \r\n+ [x] Use pypa/packaging. #831\r\n+ [x] Upgrade vendored setuptools to 42.0.2. #832\r\n+ [x] De-vendor pex just once per version. #833\r\n+ [x] Support VCS urls for vendoring. #834\r\n+ [x] Support python 3.8 in CI. #829\r\n+ [x] Fix pex resolution to respect --ignore-errors. #828\r\n+ [x] Kill `pkg_resources` finders monkey-patching. #827\r\n+ [x] Use flit to distribute pex. #826\r\n+ [x] Cleanup extras_require. #825\r\n\r\n\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '2.0.3'\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '2.1.0'\n", "path": "pex/version.py"}]} | 534 | 94 |
gh_patches_debug_16389 | rasdani/github-patches | git_diff | facebookresearch__hydra-1968 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Feature Request] submitit: add `stderr_to_stdout` ass supported by submitit
# 🚀 Feature Request
`stderr_to_stdout` option was added in submitit (not the plugin) [last year](https://github.com/facebookincubator/submitit/pull/1611) but cannot currently be given from submitit plugin.
We should add support for passing this `stderr_to_stdout` via the plugin as well.
cc @Jasha10
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `plugins/hydra_submitit_launcher/setup.py`
Content:
```
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 # type: ignore
3 from pathlib import Path
4
5 from read_version import read_version
6 from setuptools import find_namespace_packages, setup
7
8 setup(
9 name="hydra-submitit-launcher",
10 version=read_version("hydra_plugins/hydra_submitit_launcher", "__init__.py"),
11 author="Jeremy Rapin, Jieru Hu, Omry Yadan",
12 author_email="[email protected], [email protected], [email protected]",
13 description="Submitit Launcher for Hydra apps",
14 long_description=(Path(__file__).parent / "README.md").read_text(),
15 long_description_content_type="text/markdown",
16 url="https://github.com/facebookincubator/submitit",
17 packages=find_namespace_packages(include=["hydra_plugins.*"]),
18 classifiers=[
19 "License :: OSI Approved :: MIT License",
20 "Programming Language :: Python :: 3.7",
21 "Programming Language :: Python :: 3.8",
22 "Programming Language :: Python :: 3.9",
23 "Operating System :: MacOS",
24 "Operating System :: POSIX :: Linux",
25 "Development Status :: 4 - Beta",
26 ],
27 install_requires=[
28 "hydra-core>=1.1.0.dev7",
29 "submitit>=1.0.0",
30 ],
31 include_package_data=True,
32 )
33
```
Path: `plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py`
Content:
```
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 from dataclasses import dataclass, field
3 from typing import Any, Dict, List, Optional
4
5 from hydra.core.config_store import ConfigStore
6
7
8 @dataclass
9 class BaseQueueConf:
10 """Configuration shared by all executors"""
11
12 submitit_folder: str = "${hydra.sweep.dir}/.submitit/%j"
13
14 # maximum time for the job in minutes
15 timeout_min: int = 60
16 # number of cpus to use for each task
17 cpus_per_task: Optional[int] = None
18 # number of gpus to use on each node
19 gpus_per_node: Optional[int] = None
20 # number of tasks to spawn on each node
21 tasks_per_node: int = 1
22 # memory to reserve for the job on each node (in GB)
23 mem_gb: Optional[int] = None
24 # number of nodes to use for the job
25 nodes: int = 1
26 # name of the job
27 name: str = "${hydra.job.name}"
28
29
30 @dataclass
31 class SlurmQueueConf(BaseQueueConf):
32 """Slurm configuration overrides and specific parameters"""
33
34 _target_: str = (
35 "hydra_plugins.hydra_submitit_launcher.submitit_launcher.SlurmLauncher"
36 )
37
38 # Params are used to configure sbatch, for more info check:
39 # https://github.com/facebookincubator/submitit/blob/master/submitit/slurm/slurm.py
40
41 # Following parameters are slurm specific
42 # More information: https://slurm.schedmd.com/sbatch.html
43 #
44 # slurm partition to use on the cluster
45 partition: Optional[str] = None
46 qos: Optional[str] = None
47 comment: Optional[str] = None
48 constraint: Optional[str] = None
49 exclude: Optional[str] = None
50 gres: Optional[str] = None
51 cpus_per_gpu: Optional[int] = None
52 gpus_per_task: Optional[int] = None
53 mem_per_gpu: Optional[str] = None
54 mem_per_cpu: Optional[str] = None
55
56 # Following parameters are submitit specifics
57 #
58 # USR1 signal delay before timeout
59 signal_delay_s: int = 120
60 # Maximum number of retries on job timeout.
61 # Change this only after you confirmed your code can handle re-submission
62 # by properly resuming from the latest stored checkpoint.
63 # check the following for more info on slurm_max_num_timeout
64 # https://github.com/facebookincubator/submitit/blob/master/docs/checkpointing.md
65 max_num_timeout: int = 0
66 # Useful to add parameters which are not currently available in the plugin.
67 # Eg: {"mail-user": "[email protected]", "mail-type": "BEGIN"}
68 additional_parameters: Dict[str, Any] = field(default_factory=dict)
69 # Maximum number of jobs running in parallel
70 array_parallelism: int = 256
71 # A list of commands to run in sbatch befure running srun
72 setup: Optional[List[str]] = None
73
74
75 @dataclass
76 class LocalQueueConf(BaseQueueConf):
77 _target_: str = (
78 "hydra_plugins.hydra_submitit_launcher.submitit_launcher.LocalLauncher"
79 )
80
81
82 # finally, register two different choices:
83 ConfigStore.instance().store(
84 group="hydra/launcher",
85 name="submitit_local",
86 node=LocalQueueConf(),
87 provider="submitit_launcher",
88 )
89
90
91 ConfigStore.instance().store(
92 group="hydra/launcher",
93 name="submitit_slurm",
94 node=SlurmQueueConf(),
95 provider="submitit_launcher",
96 )
97
```
Path: `plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py`
Content:
```
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2
3 __version__ = "1.2.0dev1"
4
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py b/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py
--- a/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py
+++ b/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py
@@ -1,3 +1,3 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
-__version__ = "1.2.0dev1"
+__version__ = "1.2.0dev2"
diff --git a/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py b/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py
--- a/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py
+++ b/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py
@@ -25,6 +25,8 @@
nodes: int = 1
# name of the job
name: str = "${hydra.job.name}"
+ # redirect stderr to stdout
+ stderr_to_stdout: bool = False
@dataclass
diff --git a/plugins/hydra_submitit_launcher/setup.py b/plugins/hydra_submitit_launcher/setup.py
--- a/plugins/hydra_submitit_launcher/setup.py
+++ b/plugins/hydra_submitit_launcher/setup.py
@@ -26,7 +26,7 @@
],
install_requires=[
"hydra-core>=1.1.0.dev7",
- "submitit>=1.0.0",
+ "submitit>=1.3.3",
],
include_package_data=True,
)
| {"golden_diff": "diff --git a/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py b/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py\n--- a/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py\n+++ b/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py\n@@ -1,3 +1,3 @@\n # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n \n-__version__ = \"1.2.0dev1\"\n+__version__ = \"1.2.0dev2\"\ndiff --git a/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py b/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py\n--- a/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py\n+++ b/plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py\n@@ -25,6 +25,8 @@\n nodes: int = 1\n # name of the job\n name: str = \"${hydra.job.name}\"\n+ # redirect stderr to stdout\n+ stderr_to_stdout: bool = False\n \n \n @dataclass\ndiff --git a/plugins/hydra_submitit_launcher/setup.py b/plugins/hydra_submitit_launcher/setup.py\n--- a/plugins/hydra_submitit_launcher/setup.py\n+++ b/plugins/hydra_submitit_launcher/setup.py\n@@ -26,7 +26,7 @@\n ],\n install_requires=[\n \"hydra-core>=1.1.0.dev7\",\n- \"submitit>=1.0.0\",\n+ \"submitit>=1.3.3\",\n ],\n include_package_data=True,\n )\n", "issue": "[Feature Request] submitit: add `stderr_to_stdout` ass supported by submitit\n# \ud83d\ude80 Feature Request\r\n\r\n`stderr_to_stdout` option was added in submitit (not the plugin) [last year](https://github.com/facebookincubator/submitit/pull/1611) but cannot currently be given from submitit plugin. \r\n\r\nWe should add support for passing this `stderr_to_stdout` via the plugin as well.\r\n\r\ncc @Jasha10 \n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# type: ignore\nfrom pathlib import Path\n\nfrom read_version import read_version\nfrom setuptools import find_namespace_packages, setup\n\nsetup(\n name=\"hydra-submitit-launcher\",\n version=read_version(\"hydra_plugins/hydra_submitit_launcher\", \"__init__.py\"),\n author=\"Jeremy Rapin, Jieru Hu, Omry Yadan\",\n author_email=\"[email protected], [email protected], [email protected]\",\n description=\"Submitit Launcher for Hydra apps\",\n long_description=(Path(__file__).parent / \"README.md\").read_text(),\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/facebookincubator/submitit\",\n packages=find_namespace_packages(include=[\"hydra_plugins.*\"]),\n classifiers=[\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Operating System :: MacOS\",\n \"Operating System :: POSIX :: Linux\",\n \"Development Status :: 4 - Beta\",\n ],\n install_requires=[\n \"hydra-core>=1.1.0.dev7\",\n \"submitit>=1.0.0\",\n ],\n include_package_data=True,\n)\n", "path": "plugins/hydra_submitit_launcher/setup.py"}, {"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom dataclasses import dataclass, field\nfrom typing import Any, Dict, List, Optional\n\nfrom hydra.core.config_store import ConfigStore\n\n\n@dataclass\nclass BaseQueueConf:\n \"\"\"Configuration shared by all executors\"\"\"\n\n submitit_folder: str = \"${hydra.sweep.dir}/.submitit/%j\"\n\n # maximum time for the job in minutes\n timeout_min: int = 60\n # number of cpus to use for each task\n cpus_per_task: Optional[int] = None\n # number of gpus to use on each node\n gpus_per_node: Optional[int] = None\n # number of tasks to spawn on each node\n tasks_per_node: int = 1\n # memory to reserve for the job on each node (in GB)\n mem_gb: Optional[int] = None\n # number of nodes to use for the job\n nodes: int = 1\n # name of the job\n name: str = \"${hydra.job.name}\"\n\n\n@dataclass\nclass SlurmQueueConf(BaseQueueConf):\n \"\"\"Slurm configuration overrides and specific parameters\"\"\"\n\n _target_: str = (\n \"hydra_plugins.hydra_submitit_launcher.submitit_launcher.SlurmLauncher\"\n )\n\n # Params are used to configure sbatch, for more info check:\n # https://github.com/facebookincubator/submitit/blob/master/submitit/slurm/slurm.py\n\n # Following parameters are slurm specific\n # More information: https://slurm.schedmd.com/sbatch.html\n #\n # slurm partition to use on the cluster\n partition: Optional[str] = None\n qos: Optional[str] = None\n comment: Optional[str] = None\n constraint: Optional[str] = None\n exclude: Optional[str] = None\n gres: Optional[str] = None\n cpus_per_gpu: Optional[int] = None\n gpus_per_task: Optional[int] = None\n mem_per_gpu: Optional[str] = None\n mem_per_cpu: Optional[str] = None\n\n # Following parameters are submitit specifics\n #\n # USR1 signal delay before timeout\n signal_delay_s: int = 120\n # Maximum number of retries on job timeout.\n # Change this only after you confirmed your code can handle re-submission\n # by properly resuming from the latest stored checkpoint.\n # check the following for more info on slurm_max_num_timeout\n # https://github.com/facebookincubator/submitit/blob/master/docs/checkpointing.md\n max_num_timeout: int = 0\n # Useful to add parameters which are not currently available in the plugin.\n # Eg: {\"mail-user\": \"[email protected]\", \"mail-type\": \"BEGIN\"}\n additional_parameters: Dict[str, Any] = field(default_factory=dict)\n # Maximum number of jobs running in parallel\n array_parallelism: int = 256\n # A list of commands to run in sbatch befure running srun\n setup: Optional[List[str]] = None\n\n\n@dataclass\nclass LocalQueueConf(BaseQueueConf):\n _target_: str = (\n \"hydra_plugins.hydra_submitit_launcher.submitit_launcher.LocalLauncher\"\n )\n\n\n# finally, register two different choices:\nConfigStore.instance().store(\n group=\"hydra/launcher\",\n name=\"submitit_local\",\n node=LocalQueueConf(),\n provider=\"submitit_launcher\",\n)\n\n\nConfigStore.instance().store(\n group=\"hydra/launcher\",\n name=\"submitit_slurm\",\n node=SlurmQueueConf(),\n provider=\"submitit_launcher\",\n)\n", "path": "plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py"}, {"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n__version__ = \"1.2.0dev1\"\n", "path": "plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py"}], "after_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# type: ignore\nfrom pathlib import Path\n\nfrom read_version import read_version\nfrom setuptools import find_namespace_packages, setup\n\nsetup(\n name=\"hydra-submitit-launcher\",\n version=read_version(\"hydra_plugins/hydra_submitit_launcher\", \"__init__.py\"),\n author=\"Jeremy Rapin, Jieru Hu, Omry Yadan\",\n author_email=\"[email protected], [email protected], [email protected]\",\n description=\"Submitit Launcher for Hydra apps\",\n long_description=(Path(__file__).parent / \"README.md\").read_text(),\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/facebookincubator/submitit\",\n packages=find_namespace_packages(include=[\"hydra_plugins.*\"]),\n classifiers=[\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Operating System :: MacOS\",\n \"Operating System :: POSIX :: Linux\",\n \"Development Status :: 4 - Beta\",\n ],\n install_requires=[\n \"hydra-core>=1.1.0.dev7\",\n \"submitit>=1.3.3\",\n ],\n include_package_data=True,\n)\n", "path": "plugins/hydra_submitit_launcher/setup.py"}, {"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom dataclasses import dataclass, field\nfrom typing import Any, Dict, List, Optional\n\nfrom hydra.core.config_store import ConfigStore\n\n\n@dataclass\nclass BaseQueueConf:\n \"\"\"Configuration shared by all executors\"\"\"\n\n submitit_folder: str = \"${hydra.sweep.dir}/.submitit/%j\"\n\n # maximum time for the job in minutes\n timeout_min: int = 60\n # number of cpus to use for each task\n cpus_per_task: Optional[int] = None\n # number of gpus to use on each node\n gpus_per_node: Optional[int] = None\n # number of tasks to spawn on each node\n tasks_per_node: int = 1\n # memory to reserve for the job on each node (in GB)\n mem_gb: Optional[int] = None\n # number of nodes to use for the job\n nodes: int = 1\n # name of the job\n name: str = \"${hydra.job.name}\"\n # redirect stderr to stdout\n stderr_to_stdout: bool = False\n\n\n@dataclass\nclass SlurmQueueConf(BaseQueueConf):\n \"\"\"Slurm configuration overrides and specific parameters\"\"\"\n\n _target_: str = (\n \"hydra_plugins.hydra_submitit_launcher.submitit_launcher.SlurmLauncher\"\n )\n\n # Params are used to configure sbatch, for more info check:\n # https://github.com/facebookincubator/submitit/blob/master/submitit/slurm/slurm.py\n\n # Following parameters are slurm specific\n # More information: https://slurm.schedmd.com/sbatch.html\n #\n # slurm partition to use on the cluster\n partition: Optional[str] = None\n qos: Optional[str] = None\n comment: Optional[str] = None\n constraint: Optional[str] = None\n exclude: Optional[str] = None\n gres: Optional[str] = None\n cpus_per_gpu: Optional[int] = None\n gpus_per_task: Optional[int] = None\n mem_per_gpu: Optional[str] = None\n mem_per_cpu: Optional[str] = None\n\n # Following parameters are submitit specifics\n #\n # USR1 signal delay before timeout\n signal_delay_s: int = 120\n # Maximum number of retries on job timeout.\n # Change this only after you confirmed your code can handle re-submission\n # by properly resuming from the latest stored checkpoint.\n # check the following for more info on slurm_max_num_timeout\n # https://github.com/facebookincubator/submitit/blob/master/docs/checkpointing.md\n max_num_timeout: int = 0\n # Useful to add parameters which are not currently available in the plugin.\n # Eg: {\"mail-user\": \"[email protected]\", \"mail-type\": \"BEGIN\"}\n additional_parameters: Dict[str, Any] = field(default_factory=dict)\n # Maximum number of jobs running in parallel\n array_parallelism: int = 256\n # A list of commands to run in sbatch befure running srun\n setup: Optional[List[str]] = None\n\n\n@dataclass\nclass LocalQueueConf(BaseQueueConf):\n _target_: str = (\n \"hydra_plugins.hydra_submitit_launcher.submitit_launcher.LocalLauncher\"\n )\n\n\n# finally, register two different choices:\nConfigStore.instance().store(\n group=\"hydra/launcher\",\n name=\"submitit_local\",\n node=LocalQueueConf(),\n provider=\"submitit_launcher\",\n)\n\n\nConfigStore.instance().store(\n group=\"hydra/launcher\",\n name=\"submitit_slurm\",\n node=SlurmQueueConf(),\n provider=\"submitit_launcher\",\n)\n", "path": "plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/config.py"}, {"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n__version__ = \"1.2.0dev2\"\n", "path": "plugins/hydra_submitit_launcher/hydra_plugins/hydra_submitit_launcher/__init__.py"}]} | 1,822 | 413 |
gh_patches_debug_13695 | rasdani/github-patches | git_diff | projectmesa__mesa-373 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Allow fixed seed for replication
Figure out how to best handle running a model with a fixed seed, to ensure that it will produce the same result. and implement that.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mesa/model.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 """
3 The model class for Mesa framework.
4
5 Core Objects: Model
6
7 """
8 import datetime as dt
9 import random
10
11
12 class Model:
13 """ Base class for models. """
14 def __init__(self, seed=None):
15 """ Create a new model. Overload this method with the actual code to
16 start the model.
17
18 Args:
19 seed: seed for the random number generator
20
21 Attributes:
22 schedule: schedule object
23 running: a bool indicating if the model should continue running
24
25 """
26 if seed is None:
27 self.seed = dt.datetime.now()
28 else:
29 self.seed = seed
30 random.seed(seed)
31 self.running = True
32 self.schedule = None
33
34 def run_model(self):
35 """ Run the model until the end condition is reached. Overload as
36 needed.
37
38 """
39 while self.running:
40 self.step()
41
42 def step(self):
43 """ A single step. Fill in here. """
44 pass
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mesa/model.py b/mesa/model.py
--- a/mesa/model.py
+++ b/mesa/model.py
@@ -7,6 +7,7 @@
"""
import datetime as dt
import random
+import numpy
class Model:
@@ -23,11 +24,14 @@
running: a bool indicating if the model should continue running
"""
+ # seed both the numpy and Python random number generators
if seed is None:
self.seed = dt.datetime.now()
else:
self.seed = seed
random.seed(seed)
+ numpy.random.seed(seed)
+
self.running = True
self.schedule = None
| {"golden_diff": "diff --git a/mesa/model.py b/mesa/model.py\n--- a/mesa/model.py\n+++ b/mesa/model.py\n@@ -7,6 +7,7 @@\n \"\"\"\n import datetime as dt\n import random\n+import numpy\n \n \n class Model:\n@@ -23,11 +24,14 @@\n running: a bool indicating if the model should continue running\n \n \"\"\"\n+ # seed both the numpy and Python random number generators\n if seed is None:\n self.seed = dt.datetime.now()\n else:\n self.seed = seed\n random.seed(seed)\n+ numpy.random.seed(seed)\n+\n self.running = True\n self.schedule = None\n", "issue": "Allow fixed seed for replication\nFigure out how to best handle running a model with a fixed seed, to ensure that it will produce the same result. and implement that.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nThe model class for Mesa framework.\n\nCore Objects: Model\n\n\"\"\"\nimport datetime as dt\nimport random\n\n\nclass Model:\n \"\"\" Base class for models. \"\"\"\n def __init__(self, seed=None):\n \"\"\" Create a new model. Overload this method with the actual code to\n start the model.\n\n Args:\n seed: seed for the random number generator\n\n Attributes:\n schedule: schedule object\n running: a bool indicating if the model should continue running\n\n \"\"\"\n if seed is None:\n self.seed = dt.datetime.now()\n else:\n self.seed = seed\n random.seed(seed)\n self.running = True\n self.schedule = None\n\n def run_model(self):\n \"\"\" Run the model until the end condition is reached. Overload as\n needed.\n\n \"\"\"\n while self.running:\n self.step()\n\n def step(self):\n \"\"\" A single step. Fill in here. \"\"\"\n pass\n", "path": "mesa/model.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nThe model class for Mesa framework.\n\nCore Objects: Model\n\n\"\"\"\nimport datetime as dt\nimport random\nimport numpy\n\n\nclass Model:\n \"\"\" Base class for models. \"\"\"\n def __init__(self, seed=None):\n \"\"\" Create a new model. Overload this method with the actual code to\n start the model.\n\n Args:\n seed: seed for the random number generator\n\n Attributes:\n schedule: schedule object\n running: a bool indicating if the model should continue running\n\n \"\"\"\n # seed both the numpy and Python random number generators\n if seed is None:\n self.seed = dt.datetime.now()\n else:\n self.seed = seed\n random.seed(seed)\n numpy.random.seed(seed)\n\n self.running = True\n self.schedule = None\n\n def run_model(self):\n \"\"\" Run the model until the end condition is reached. Overload as\n needed.\n\n \"\"\"\n while self.running:\n self.step()\n\n def step(self):\n \"\"\" A single step. Fill in here. \"\"\"\n pass\n", "path": "mesa/model.py"}]} | 588 | 145 |
gh_patches_debug_29583 | rasdani/github-patches | git_diff | Parsl__parsl-2301 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Remove IPyParallel executor
**Is your feature request related to a problem? Please describe.**
The parsl ipp code isn't really maintained any more. As I try to tidy up some stuff in the test suite I find myself spending more time than I'd like to debugging what is happening inside the IPP parsl executor, while at the same time the folklore is "don't use IPP".
**Describe the solution you'd like**
I'd like to remove all IPP tests from the test suite now. This will inevitably lead to the ipp code rotting fast, and so in addition the IPP executor should probably be removed in its entirely.
**Describe alternatives you've considered**
An alternative is to put developer effort into maintaining IPP as a supported executor.
**Additional context**
The folklore on slack seems to be "don't use IPP". This feature request turns that folklore into reality.
I invite comment in support or against from @kylechard @yadudoc @annawoodard especially as regards how this would affect existing users who have not moved to htex (if any?).
see #1328
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parsl/executors/ipp_controller.py`
Content:
```
1 class Controller():
2 """This stub exists to issue a more helpful warning about the IPyParallel
3 executor being removed from parsl some time after v0.9.
4
5 It can eventually be removed entirely - perhaps after v0.10
6 """
7 def __init__(self, *args, **kwargs):
8 raise RuntimeError("The IPyParallel executor has been removed from parsl")
9
```
Path: `parsl/executors/__init__.py`
Content:
```
1 from parsl.executors.threads import ThreadPoolExecutor
2 from parsl.executors.ipp import IPyParallelExecutor
3 from parsl.executors.workqueue.executor import WorkQueueExecutor
4 from parsl.executors.high_throughput.executor import HighThroughputExecutor
5 from parsl.executors.extreme_scale.executor import ExtremeScaleExecutor
6 from parsl.executors.low_latency.executor import LowLatencyExecutor
7 from parsl.executors.flux.executor import FluxExecutor
8
9 __all__ = ['IPyParallelExecutor',
10 'ThreadPoolExecutor',
11 'HighThroughputExecutor',
12 'ExtremeScaleExecutor',
13 'LowLatencyExecutor',
14 'WorkQueueExecutor',
15 'FluxExecutor']
16
```
Path: `parsl/executors/ipp.py`
Content:
```
1 from parsl.executors.base import ParslExecutor
2
3
4 class IPyParallelExecutor(ParslExecutor):
5 """This stub exists to issue a more helpful warning about the IPyParallel
6 executor being removed from parsl some time after v0.9.
7
8 It can eventually be removed entirely - perhaps after v0.10
9 """
10
11 def __new__(*args, **kwargs):
12 raise RuntimeError("The IPyParallel executor has been removed from parsl")
13
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/parsl/executors/__init__.py b/parsl/executors/__init__.py
--- a/parsl/executors/__init__.py
+++ b/parsl/executors/__init__.py
@@ -1,13 +1,11 @@
from parsl.executors.threads import ThreadPoolExecutor
-from parsl.executors.ipp import IPyParallelExecutor
from parsl.executors.workqueue.executor import WorkQueueExecutor
from parsl.executors.high_throughput.executor import HighThroughputExecutor
from parsl.executors.extreme_scale.executor import ExtremeScaleExecutor
from parsl.executors.low_latency.executor import LowLatencyExecutor
from parsl.executors.flux.executor import FluxExecutor
-__all__ = ['IPyParallelExecutor',
- 'ThreadPoolExecutor',
+__all__ = ['ThreadPoolExecutor',
'HighThroughputExecutor',
'ExtremeScaleExecutor',
'LowLatencyExecutor',
diff --git a/parsl/executors/ipp.py b/parsl/executors/ipp.py
deleted file mode 100644
--- a/parsl/executors/ipp.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from parsl.executors.base import ParslExecutor
-
-
-class IPyParallelExecutor(ParslExecutor):
- """This stub exists to issue a more helpful warning about the IPyParallel
- executor being removed from parsl some time after v0.9.
-
- It can eventually be removed entirely - perhaps after v0.10
- """
-
- def __new__(*args, **kwargs):
- raise RuntimeError("The IPyParallel executor has been removed from parsl")
diff --git a/parsl/executors/ipp_controller.py b/parsl/executors/ipp_controller.py
deleted file mode 100644
--- a/parsl/executors/ipp_controller.py
+++ /dev/null
@@ -1,8 +0,0 @@
-class Controller():
- """This stub exists to issue a more helpful warning about the IPyParallel
- executor being removed from parsl some time after v0.9.
-
- It can eventually be removed entirely - perhaps after v0.10
- """
- def __init__(self, *args, **kwargs):
- raise RuntimeError("The IPyParallel executor has been removed from parsl")
| {"golden_diff": "diff --git a/parsl/executors/__init__.py b/parsl/executors/__init__.py\n--- a/parsl/executors/__init__.py\n+++ b/parsl/executors/__init__.py\n@@ -1,13 +1,11 @@\n from parsl.executors.threads import ThreadPoolExecutor\n-from parsl.executors.ipp import IPyParallelExecutor\n from parsl.executors.workqueue.executor import WorkQueueExecutor\n from parsl.executors.high_throughput.executor import HighThroughputExecutor\n from parsl.executors.extreme_scale.executor import ExtremeScaleExecutor\n from parsl.executors.low_latency.executor import LowLatencyExecutor\n from parsl.executors.flux.executor import FluxExecutor\n \n-__all__ = ['IPyParallelExecutor',\n- 'ThreadPoolExecutor',\n+__all__ = ['ThreadPoolExecutor',\n 'HighThroughputExecutor',\n 'ExtremeScaleExecutor',\n 'LowLatencyExecutor',\ndiff --git a/parsl/executors/ipp.py b/parsl/executors/ipp.py\ndeleted file mode 100644\n--- a/parsl/executors/ipp.py\n+++ /dev/null\n@@ -1,12 +0,0 @@\n-from parsl.executors.base import ParslExecutor\n-\n-\n-class IPyParallelExecutor(ParslExecutor):\n- \"\"\"This stub exists to issue a more helpful warning about the IPyParallel\n- executor being removed from parsl some time after v0.9.\n-\n- It can eventually be removed entirely - perhaps after v0.10\n- \"\"\"\n-\n- def __new__(*args, **kwargs):\n- raise RuntimeError(\"The IPyParallel executor has been removed from parsl\")\ndiff --git a/parsl/executors/ipp_controller.py b/parsl/executors/ipp_controller.py\ndeleted file mode 100644\n--- a/parsl/executors/ipp_controller.py\n+++ /dev/null\n@@ -1,8 +0,0 @@\n-class Controller():\n- \"\"\"This stub exists to issue a more helpful warning about the IPyParallel\n- executor being removed from parsl some time after v0.9.\n-\n- It can eventually be removed entirely - perhaps after v0.10\n- \"\"\"\n- def __init__(self, *args, **kwargs):\n- raise RuntimeError(\"The IPyParallel executor has been removed from parsl\")\n", "issue": "Remove IPyParallel executor\n**Is your feature request related to a problem? Please describe.**\r\nThe parsl ipp code isn't really maintained any more. As I try to tidy up some stuff in the test suite I find myself spending more time than I'd like to debugging what is happening inside the IPP parsl executor, while at the same time the folklore is \"don't use IPP\".\r\n\r\n**Describe the solution you'd like**\r\nI'd like to remove all IPP tests from the test suite now. This will inevitably lead to the ipp code rotting fast, and so in addition the IPP executor should probably be removed in its entirely.\r\n\r\n**Describe alternatives you've considered**\r\nAn alternative is to put developer effort into maintaining IPP as a supported executor.\r\n\r\n**Additional context**\r\nThe folklore on slack seems to be \"don't use IPP\". This feature request turns that folklore into reality.\r\n\r\nI invite comment in support or against from @kylechard @yadudoc @annawoodard especially as regards how this would affect existing users who have not moved to htex (if any?).\r\n\r\nsee #1328 \n", "before_files": [{"content": "class Controller():\n \"\"\"This stub exists to issue a more helpful warning about the IPyParallel\n executor being removed from parsl some time after v0.9.\n\n It can eventually be removed entirely - perhaps after v0.10\n \"\"\"\n def __init__(self, *args, **kwargs):\n raise RuntimeError(\"The IPyParallel executor has been removed from parsl\")\n", "path": "parsl/executors/ipp_controller.py"}, {"content": "from parsl.executors.threads import ThreadPoolExecutor\nfrom parsl.executors.ipp import IPyParallelExecutor\nfrom parsl.executors.workqueue.executor import WorkQueueExecutor\nfrom parsl.executors.high_throughput.executor import HighThroughputExecutor\nfrom parsl.executors.extreme_scale.executor import ExtremeScaleExecutor\nfrom parsl.executors.low_latency.executor import LowLatencyExecutor\nfrom parsl.executors.flux.executor import FluxExecutor\n\n__all__ = ['IPyParallelExecutor',\n 'ThreadPoolExecutor',\n 'HighThroughputExecutor',\n 'ExtremeScaleExecutor',\n 'LowLatencyExecutor',\n 'WorkQueueExecutor',\n 'FluxExecutor']\n", "path": "parsl/executors/__init__.py"}, {"content": "from parsl.executors.base import ParslExecutor\n\n\nclass IPyParallelExecutor(ParslExecutor):\n \"\"\"This stub exists to issue a more helpful warning about the IPyParallel\n executor being removed from parsl some time after v0.9.\n\n It can eventually be removed entirely - perhaps after v0.10\n \"\"\"\n\n def __new__(*args, **kwargs):\n raise RuntimeError(\"The IPyParallel executor has been removed from parsl\")\n", "path": "parsl/executors/ipp.py"}], "after_files": [{"content": null, "path": "parsl/executors/ipp_controller.py"}, {"content": "from parsl.executors.threads import ThreadPoolExecutor\nfrom parsl.executors.workqueue.executor import WorkQueueExecutor\nfrom parsl.executors.high_throughput.executor import HighThroughputExecutor\nfrom parsl.executors.extreme_scale.executor import ExtremeScaleExecutor\nfrom parsl.executors.low_latency.executor import LowLatencyExecutor\nfrom parsl.executors.flux.executor import FluxExecutor\n\n__all__ = ['ThreadPoolExecutor',\n 'HighThroughputExecutor',\n 'ExtremeScaleExecutor',\n 'LowLatencyExecutor',\n 'WorkQueueExecutor',\n 'FluxExecutor']\n", "path": "parsl/executors/__init__.py"}, {"content": null, "path": "parsl/executors/ipp.py"}]} | 904 | 516 |
gh_patches_debug_15651 | rasdani/github-patches | git_diff | explosion__spaCy-1389 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Lemmatizer exceptions: `lemma_strings` get sorted anyway
Lemmatization exceptions have been working inconsistently, even when they are added directly in `corpora/en/wordnet/dict/verb.exc`
The minimal test case of `coping` at https://github.com/explosion/spaCy/issues/389 reveals that, at
https://github.com/explosion/spaCy/blob/master/spacy/lemmatizer.py#L94, the lemmatizer's list of potential forms (where the thing in lemmatizer.exceptions is item [0]) is cast to a `set` (and so loses ordering); then in https://github.com/explosion/spaCy/blob/master/spacy/morphology.pyx#L149, that `set` gets sorted. So lemmatizer exceptions only work if they also come first alphabetically!
I've implemented the fix for this, but I need this issue in order to submit the PR!
One question, though, for @honnibal: this can be fixed either as I did it locally -- return the whole list, with exceptions and then anything that comes back from the lemmatizer's `rules` -- or skip the rules altogether if we used an exception. I think it's more useful downstream if we keep all lemma candidates, even if we're not using them in the default pipeline. But it also seems only destructive to do `sorted(set())` on them!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `spacy/lemmatizer.py`
Content:
```
1 # coding: utf8
2 from __future__ import unicode_literals
3
4 from .symbols import POS, NOUN, VERB, ADJ, PUNCT
5 from .symbols import VerbForm_inf, VerbForm_none, Number_sing, Degree_pos
6
7
8 class Lemmatizer(object):
9 @classmethod
10 def load(cls, path, index=None, exc=None, rules=None):
11 return cls(index or {}, exc or {}, rules or {})
12
13 def __init__(self, index, exceptions, rules):
14 self.index = index
15 self.exc = exceptions
16 self.rules = rules
17
18 def __call__(self, string, univ_pos, morphology=None):
19 if univ_pos == NOUN:
20 univ_pos = 'noun'
21 elif univ_pos == VERB:
22 univ_pos = 'verb'
23 elif univ_pos == ADJ:
24 univ_pos = 'adj'
25 elif univ_pos == PUNCT:
26 univ_pos = 'punct'
27 # See Issue #435 for example of where this logic is requied.
28 if self.is_base_form(univ_pos, morphology):
29 return set([string.lower()])
30 lemmas = lemmatize(string, self.index.get(univ_pos, {}),
31 self.exc.get(univ_pos, {}),
32 self.rules.get(univ_pos, []))
33 return lemmas
34
35 def is_base_form(self, univ_pos, morphology=None):
36 """
37 Check whether we're dealing with an uninflected paradigm, so we can
38 avoid lemmatization entirely.
39 """
40 morphology = {} if morphology is None else morphology
41 others = [key for key in morphology if key not in (POS, 'number', 'pos', 'verbform')]
42 true_morph_key = morphology.get('morph', 0)
43 if univ_pos == 'noun' and morphology.get('Number') == 'sing':
44 return True
45 elif univ_pos == 'verb' and morphology.get('VerbForm') == 'inf':
46 return True
47 elif univ_pos == 'adj' and morphology.get('Degree') == 'pos':
48 return True
49 elif VerbForm_inf in morphology:
50 return True
51 elif VerbForm_none in morphology:
52 return True
53 elif Number_sing in morphology:
54 return True
55 elif Degree_pos in morphology:
56 return True
57 else:
58 return False
59
60 def noun(self, string, morphology=None):
61 return self(string, 'noun', morphology)
62
63 def verb(self, string, morphology=None):
64 return self(string, 'verb', morphology)
65
66 def adj(self, string, morphology=None):
67 return self(string, 'adj', morphology)
68
69 def punct(self, string, morphology=None):
70 return self(string, 'punct', morphology)
71
72
73 def lemmatize(string, index, exceptions, rules):
74 string = string.lower()
75 forms = []
76 # TODO: Is this correct? See discussion in Issue #435.
77 #if string in index:
78 # forms.append(string)
79 forms.extend(exceptions.get(string, []))
80 oov_forms = []
81 for old, new in rules:
82 if string.endswith(old):
83 form = string[:len(string) - len(old)] + new
84 if not form:
85 pass
86 elif form in index or not form.isalpha():
87 forms.append(form)
88 else:
89 oov_forms.append(form)
90 if not forms:
91 forms.extend(oov_forms)
92 if not forms:
93 forms.append(string)
94 return set(forms)
95
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/spacy/lemmatizer.py b/spacy/lemmatizer.py
--- a/spacy/lemmatizer.py
+++ b/spacy/lemmatizer.py
@@ -78,15 +78,16 @@
# forms.append(string)
forms.extend(exceptions.get(string, []))
oov_forms = []
- for old, new in rules:
- if string.endswith(old):
- form = string[:len(string) - len(old)] + new
- if not form:
- pass
- elif form in index or not form.isalpha():
- forms.append(form)
- else:
- oov_forms.append(form)
+ if not forms:
+ for old, new in rules:
+ if string.endswith(old):
+ form = string[:len(string) - len(old)] + new
+ if not form:
+ pass
+ elif form in index or not form.isalpha():
+ forms.append(form)
+ else:
+ oov_forms.append(form)
if not forms:
forms.extend(oov_forms)
if not forms:
| {"golden_diff": "diff --git a/spacy/lemmatizer.py b/spacy/lemmatizer.py\n--- a/spacy/lemmatizer.py\n+++ b/spacy/lemmatizer.py\n@@ -78,15 +78,16 @@\n # forms.append(string)\n forms.extend(exceptions.get(string, []))\n oov_forms = []\n- for old, new in rules:\n- if string.endswith(old):\n- form = string[:len(string) - len(old)] + new\n- if not form:\n- pass\n- elif form in index or not form.isalpha():\n- forms.append(form)\n- else:\n- oov_forms.append(form)\n+ if not forms:\n+ for old, new in rules:\n+ if string.endswith(old):\n+ form = string[:len(string) - len(old)] + new\n+ if not form:\n+ pass\n+ elif form in index or not form.isalpha():\n+ forms.append(form)\n+ else:\n+ oov_forms.append(form)\n if not forms:\n forms.extend(oov_forms)\n if not forms:\n", "issue": "Lemmatizer exceptions: `lemma_strings` get sorted anyway\nLemmatization exceptions have been working inconsistently, even when they are added directly in `corpora/en/wordnet/dict/verb.exc`\r\n\r\nThe minimal test case of `coping` at https://github.com/explosion/spaCy/issues/389 reveals that, at \r\nhttps://github.com/explosion/spaCy/blob/master/spacy/lemmatizer.py#L94, the lemmatizer's list of potential forms (where the thing in lemmatizer.exceptions is item [0]) is cast to a `set` (and so loses ordering); then in https://github.com/explosion/spaCy/blob/master/spacy/morphology.pyx#L149, that `set` gets sorted. So lemmatizer exceptions only work if they also come first alphabetically!\r\n\r\nI've implemented the fix for this, but I need this issue in order to submit the PR!\r\n\r\nOne question, though, for @honnibal: this can be fixed either as I did it locally -- return the whole list, with exceptions and then anything that comes back from the lemmatizer's `rules` -- or skip the rules altogether if we used an exception. I think it's more useful downstream if we keep all lemma candidates, even if we're not using them in the default pipeline. But it also seems only destructive to do `sorted(set())` on them!\n", "before_files": [{"content": "# coding: utf8\nfrom __future__ import unicode_literals\n\nfrom .symbols import POS, NOUN, VERB, ADJ, PUNCT\nfrom .symbols import VerbForm_inf, VerbForm_none, Number_sing, Degree_pos\n\n\nclass Lemmatizer(object):\n @classmethod\n def load(cls, path, index=None, exc=None, rules=None):\n return cls(index or {}, exc or {}, rules or {})\n\n def __init__(self, index, exceptions, rules):\n self.index = index\n self.exc = exceptions\n self.rules = rules\n\n def __call__(self, string, univ_pos, morphology=None):\n if univ_pos == NOUN:\n univ_pos = 'noun'\n elif univ_pos == VERB:\n univ_pos = 'verb'\n elif univ_pos == ADJ:\n univ_pos = 'adj'\n elif univ_pos == PUNCT:\n univ_pos = 'punct'\n # See Issue #435 for example of where this logic is requied.\n if self.is_base_form(univ_pos, morphology):\n return set([string.lower()])\n lemmas = lemmatize(string, self.index.get(univ_pos, {}),\n self.exc.get(univ_pos, {}),\n self.rules.get(univ_pos, []))\n return lemmas\n\n def is_base_form(self, univ_pos, morphology=None):\n \"\"\"\n Check whether we're dealing with an uninflected paradigm, so we can\n avoid lemmatization entirely.\n \"\"\"\n morphology = {} if morphology is None else morphology\n others = [key for key in morphology if key not in (POS, 'number', 'pos', 'verbform')]\n true_morph_key = morphology.get('morph', 0)\n if univ_pos == 'noun' and morphology.get('Number') == 'sing':\n return True\n elif univ_pos == 'verb' and morphology.get('VerbForm') == 'inf':\n return True\n elif univ_pos == 'adj' and morphology.get('Degree') == 'pos':\n return True\n elif VerbForm_inf in morphology:\n return True\n elif VerbForm_none in morphology:\n return True\n elif Number_sing in morphology:\n return True\n elif Degree_pos in morphology:\n return True\n else:\n return False\n\n def noun(self, string, morphology=None):\n return self(string, 'noun', morphology)\n\n def verb(self, string, morphology=None):\n return self(string, 'verb', morphology)\n\n def adj(self, string, morphology=None):\n return self(string, 'adj', morphology)\n\n def punct(self, string, morphology=None):\n return self(string, 'punct', morphology)\n\n\ndef lemmatize(string, index, exceptions, rules):\n string = string.lower()\n forms = []\n # TODO: Is this correct? See discussion in Issue #435.\n #if string in index:\n # forms.append(string)\n forms.extend(exceptions.get(string, []))\n oov_forms = []\n for old, new in rules:\n if string.endswith(old):\n form = string[:len(string) - len(old)] + new\n if not form:\n pass\n elif form in index or not form.isalpha():\n forms.append(form)\n else:\n oov_forms.append(form)\n if not forms:\n forms.extend(oov_forms)\n if not forms:\n forms.append(string)\n return set(forms)\n", "path": "spacy/lemmatizer.py"}], "after_files": [{"content": "# coding: utf8\nfrom __future__ import unicode_literals\n\nfrom .symbols import POS, NOUN, VERB, ADJ, PUNCT\nfrom .symbols import VerbForm_inf, VerbForm_none, Number_sing, Degree_pos\n\n\nclass Lemmatizer(object):\n @classmethod\n def load(cls, path, index=None, exc=None, rules=None):\n return cls(index or {}, exc or {}, rules or {})\n\n def __init__(self, index, exceptions, rules):\n self.index = index\n self.exc = exceptions\n self.rules = rules\n\n def __call__(self, string, univ_pos, morphology=None):\n if univ_pos == NOUN:\n univ_pos = 'noun'\n elif univ_pos == VERB:\n univ_pos = 'verb'\n elif univ_pos == ADJ:\n univ_pos = 'adj'\n elif univ_pos == PUNCT:\n univ_pos = 'punct'\n # See Issue #435 for example of where this logic is requied.\n if self.is_base_form(univ_pos, morphology):\n return set([string.lower()])\n lemmas = lemmatize(string, self.index.get(univ_pos, {}),\n self.exc.get(univ_pos, {}),\n self.rules.get(univ_pos, []))\n return lemmas\n\n def is_base_form(self, univ_pos, morphology=None):\n \"\"\"\n Check whether we're dealing with an uninflected paradigm, so we can\n avoid lemmatization entirely.\n \"\"\"\n morphology = {} if morphology is None else morphology\n others = [key for key in morphology if key not in (POS, 'number', 'pos', 'verbform')]\n true_morph_key = morphology.get('morph', 0)\n if univ_pos == 'noun' and morphology.get('Number') == 'sing':\n return True\n elif univ_pos == 'verb' and morphology.get('VerbForm') == 'inf':\n return True\n elif univ_pos == 'adj' and morphology.get('Degree') == 'pos':\n return True\n elif VerbForm_inf in morphology:\n return True\n elif VerbForm_none in morphology:\n return True\n elif Number_sing in morphology:\n return True\n elif Degree_pos in morphology:\n return True\n else:\n return False\n\n def noun(self, string, morphology=None):\n return self(string, 'noun', morphology)\n\n def verb(self, string, morphology=None):\n return self(string, 'verb', morphology)\n\n def adj(self, string, morphology=None):\n return self(string, 'adj', morphology)\n\n def punct(self, string, morphology=None):\n return self(string, 'punct', morphology)\n\n\ndef lemmatize(string, index, exceptions, rules):\n string = string.lower()\n forms = []\n # TODO: Is this correct? See discussion in Issue #435.\n #if string in index:\n # forms.append(string)\n forms.extend(exceptions.get(string, []))\n oov_forms = []\n if not forms:\n for old, new in rules:\n if string.endswith(old):\n form = string[:len(string) - len(old)] + new\n if not form:\n pass\n elif form in index or not form.isalpha():\n forms.append(form)\n else:\n oov_forms.append(form)\n if not forms:\n forms.extend(oov_forms)\n if not forms:\n forms.append(string)\n return set(forms)\n", "path": "spacy/lemmatizer.py"}]} | 1,504 | 244 |
gh_patches_debug_5114 | rasdani/github-patches | git_diff | pre-commit__pre-commit-177 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Stashed changes lost if hook fails
I've run into this particular (in my eyes, critical) bug.
If I want to do a partial commit, e.g. I have 2 files but I only add 1 file to the staging area and the staged file will cause a hook to fail, I loose the changes in the 2nd file because pre-commit fails to reroll the patch it stashed before running.
Here's my terminal log and the steps to reproduce:
## Version
$ pre-commit -V
pre-commit 0.3.0
## Commands to reproduce
```
$ cat unstaged.py
"""I am unstaged"""
$ echo "'''I am unstaged, but I have changes'''" > unstaged.py
$ echo "x = 'This is the loooooooooooooooooooooooooooooooooooongest liiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiine eveeeeeeeeer'" > foo.py
$ git status
On branch master
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
(use "git checkout -- <file>..." to discard changes in working directory)
modified: unstaged.py
modified: foo.py
no changes added to commit (use "git add" and/or "git commit -a")
$ git add foo.py
$ git commit -m "Adding a long line"
[WARNING] Unstaged files detected.
[INFO] Stashing unstaged files to /home/k/.pre-commit/patch1412683352.
Flake8...............................................Failed
hookid: flake8
foo.py:1:80: E501 line too long (112 > 79 characters)
[WARNING] Stashed changes conflicted with hook auto-fixes... Rolling back fixes...
An unexpected error has occurred: CalledProcessError: Command: [u'git', u'apply', u'/home/k/.pre-commit/patch1412683352']
Return code: 128
Expected return code: 0
Output: (u'', u'fatal: unrecognized input\n')
Check the log at ~/.pre-commit/pre-commit.log
$ git status
On branch master
Changes to be committed:
(use "git reset HEAD <file>..." to unstage)
modified: foo.py
$ echo "x = 'This is a shorter line, its better'" > foo.py
$ git status
On branch master
Changes to be committed:
(use "git reset HEAD <file>..." to unstage)
modified: foo.py
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
(use "git checkout -- <file>..." to discard changes in working directory)
modified: foo.py
$ git add foo.py
$ git commit -m "Fixed the error I got from the flake8 hook"
Flake8...............................................Passed
[master 78568e8] Fixed the error I got from the flake8 hook
1 file changed, 1 insertion(+), 1 deletion(-)
$ git status
On branch master
nothing to commit, working directory clean
$ cat unstaged.py
"""I am unstaged"""
```
## Log
```
$ cat ~/.pre-commit/pre-commit.log
Traceback (most recent call last):
File "/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/error_handler.py", line 34, in error_handler
yield
File "/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/main.py", line 108, in main
return run(runner, args)
File "/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/commands/run.py", line 151, in run
return _run_hooks(runner, args, write=write, environ=environ)
File "/usr/lib/python2.7/contextlib.py", line 24, in __exit__
self.gen.next()
File "/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/staged_files_only.py", line 55, in staged_files_only
cmd_runner.run(['git', 'apply', patch_filename])
File "/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py", line 82, in run
returncode, replaced_cmd, retcode, output=(stdout, stderr),
CalledProcessError: Command: [u'git', u'apply', u'/home/k/.pre-commit/patch1412683352']
Return code: 128
Expected return code: 0
Output: (u'', u'fatal: unrecognized input\n')
```
## .pre-commit-config.yaml
```
$ cat .pre-commit-config.yaml
- repo: [email protected]:pre-commit/pre-commit-hooks
sha: 6343700aa063fe30acc319d2dc84353a35a3d6d0
hooks:
- id: flake8
args: ['--ignore=E712,F821']
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pre_commit/staged_files_only.py`
Content:
```
1 from __future__ import unicode_literals
2
3 import contextlib
4 import io
5 import logging
6 import time
7
8 from pre_commit.util import CalledProcessError
9
10
11 logger = logging.getLogger('pre_commit')
12
13
14 @contextlib.contextmanager
15 def staged_files_only(cmd_runner):
16 """Clear any unstaged changes from the git working directory inside this
17 context.
18
19 Args:
20 cmd_runner - PrefixedCommandRunner
21 """
22 # Determine if there are unstaged files
23 retcode, diff_stdout_binary, _ = cmd_runner.run(
24 ['git', 'diff', '--ignore-submodules', '--binary', '--exit-code'],
25 retcode=None,
26 encoding=None,
27 )
28 if retcode and diff_stdout_binary.strip():
29 patch_filename = cmd_runner.path('patch{0}'.format(int(time.time())))
30 logger.warning('Unstaged files detected.')
31 logger.info(
32 'Stashing unstaged files to {0}.'.format(patch_filename),
33 )
34 # Save the current unstaged changes as a patch
35 with io.open(patch_filename, 'wb') as patch_file:
36 patch_file.write(diff_stdout_binary)
37
38 # Clear the working directory of unstaged changes
39 cmd_runner.run(['git', 'checkout', '--', '.'])
40 try:
41 yield
42 finally:
43 # Try to apply the patch we saved
44 try:
45 cmd_runner.run(['git', 'apply', patch_filename])
46 except CalledProcessError:
47 logger.warning(
48 'Stashed changes conflicted with hook auto-fixes... '
49 'Rolling back fixes...'
50 )
51 # We failed to apply the patch, presumably due to fixes made
52 # by hooks.
53 # Roll back the changes made by hooks.
54 cmd_runner.run(['git', 'checkout', '--', '.'])
55 cmd_runner.run(['git', 'apply', patch_filename])
56 logger.info('Restored changes from {0}.'.format(patch_filename))
57 else:
58 # There weren't any staged files so we don't need to do anything
59 # special
60 yield
61
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pre_commit/staged_files_only.py b/pre_commit/staged_files_only.py
--- a/pre_commit/staged_files_only.py
+++ b/pre_commit/staged_files_only.py
@@ -21,7 +21,10 @@
"""
# Determine if there are unstaged files
retcode, diff_stdout_binary, _ = cmd_runner.run(
- ['git', 'diff', '--ignore-submodules', '--binary', '--exit-code'],
+ [
+ 'git', 'diff', '--ignore-submodules', '--binary', '--exit-code',
+ '--no-color',
+ ],
retcode=None,
encoding=None,
)
| {"golden_diff": "diff --git a/pre_commit/staged_files_only.py b/pre_commit/staged_files_only.py\n--- a/pre_commit/staged_files_only.py\n+++ b/pre_commit/staged_files_only.py\n@@ -21,7 +21,10 @@\n \"\"\"\n # Determine if there are unstaged files\n retcode, diff_stdout_binary, _ = cmd_runner.run(\n- ['git', 'diff', '--ignore-submodules', '--binary', '--exit-code'],\n+ [\n+ 'git', 'diff', '--ignore-submodules', '--binary', '--exit-code',\n+ '--no-color',\n+ ],\n retcode=None,\n encoding=None,\n )\n", "issue": "Stashed changes lost if hook fails\nI've run into this particular (in my eyes, critical) bug.\nIf I want to do a partial commit, e.g. I have 2 files but I only add 1 file to the staging area and the staged file will cause a hook to fail, I loose the changes in the 2nd file because pre-commit fails to reroll the patch it stashed before running.\n\nHere's my terminal log and the steps to reproduce:\n## Version\n\n$ pre-commit -V\npre-commit 0.3.0\n## Commands to reproduce\n\n```\n$ cat unstaged.py\n\"\"\"I am unstaged\"\"\"\n$ echo \"'''I am unstaged, but I have changes'''\" > unstaged.py\n$ echo \"x = 'This is the loooooooooooooooooooooooooooooooooooongest liiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiine eveeeeeeeeer'\" > foo.py \n$ git status\nOn branch master\nChanges not staged for commit:\n (use \"git add <file>...\" to update what will be committed)\n (use \"git checkout -- <file>...\" to discard changes in working directory)\n\n modified: unstaged.py\n modified: foo.py\n\nno changes added to commit (use \"git add\" and/or \"git commit -a\")\n$ git add foo.py \n$ git commit -m \"Adding a long line\"\n[WARNING] Unstaged files detected.\n[INFO] Stashing unstaged files to /home/k/.pre-commit/patch1412683352.\nFlake8...............................................Failed\nhookid: flake8\n\nfoo.py:1:80: E501 line too long (112 > 79 characters)\n\n[WARNING] Stashed changes conflicted with hook auto-fixes... Rolling back fixes...\nAn unexpected error has occurred: CalledProcessError: Command: [u'git', u'apply', u'/home/k/.pre-commit/patch1412683352']\nReturn code: 128\nExpected return code: 0\nOutput: (u'', u'fatal: unrecognized input\\n')\n\nCheck the log at ~/.pre-commit/pre-commit.log\n$ git status\nOn branch master\nChanges to be committed:\n (use \"git reset HEAD <file>...\" to unstage)\n\n modified: foo.py\n\n$ echo \"x = 'This is a shorter line, its better'\" > foo.py\n$ git status\nOn branch master\nChanges to be committed:\n (use \"git reset HEAD <file>...\" to unstage)\n\n modified: foo.py\n\nChanges not staged for commit:\n (use \"git add <file>...\" to update what will be committed)\n (use \"git checkout -- <file>...\" to discard changes in working directory)\n\n modified: foo.py\n\n$ git add foo.py \n$ git commit -m \"Fixed the error I got from the flake8 hook\"\nFlake8...............................................Passed\n[master 78568e8] Fixed the error I got from the flake8 hook\n 1 file changed, 1 insertion(+), 1 deletion(-)\n$ git status\nOn branch master\nnothing to commit, working directory clean\n$ cat unstaged.py\n\"\"\"I am unstaged\"\"\"\n```\n## Log\n\n```\n$ cat ~/.pre-commit/pre-commit.log\nTraceback (most recent call last):\n File \"/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/error_handler.py\", line 34, in error_handler\n yield\n File \"/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/main.py\", line 108, in main\n return run(runner, args)\n File \"/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 151, in run\n return _run_hooks(runner, args, write=write, environ=environ)\n File \"/usr/lib/python2.7/contextlib.py\", line 24, in __exit__\n self.gen.next()\n File \"/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/staged_files_only.py\", line 55, in staged_files_only\n cmd_runner.run(['git', 'apply', patch_filename])\n File \"/home/k/.virtualenvs/pre-commit-test/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py\", line 82, in run\n returncode, replaced_cmd, retcode, output=(stdout, stderr),\nCalledProcessError: Command: [u'git', u'apply', u'/home/k/.pre-commit/patch1412683352']\nReturn code: 128\nExpected return code: 0\nOutput: (u'', u'fatal: unrecognized input\\n')\n```\n## .pre-commit-config.yaml\n\n```\n$ cat .pre-commit-config.yaml \n- repo: [email protected]:pre-commit/pre-commit-hooks\n sha: 6343700aa063fe30acc319d2dc84353a35a3d6d0\n hooks:\n - id: flake8\n args: ['--ignore=E712,F821']\n```\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport contextlib\nimport io\nimport logging\nimport time\n\nfrom pre_commit.util import CalledProcessError\n\n\nlogger = logging.getLogger('pre_commit')\n\n\[email protected]\ndef staged_files_only(cmd_runner):\n \"\"\"Clear any unstaged changes from the git working directory inside this\n context.\n\n Args:\n cmd_runner - PrefixedCommandRunner\n \"\"\"\n # Determine if there are unstaged files\n retcode, diff_stdout_binary, _ = cmd_runner.run(\n ['git', 'diff', '--ignore-submodules', '--binary', '--exit-code'],\n retcode=None,\n encoding=None,\n )\n if retcode and diff_stdout_binary.strip():\n patch_filename = cmd_runner.path('patch{0}'.format(int(time.time())))\n logger.warning('Unstaged files detected.')\n logger.info(\n 'Stashing unstaged files to {0}.'.format(patch_filename),\n )\n # Save the current unstaged changes as a patch\n with io.open(patch_filename, 'wb') as patch_file:\n patch_file.write(diff_stdout_binary)\n\n # Clear the working directory of unstaged changes\n cmd_runner.run(['git', 'checkout', '--', '.'])\n try:\n yield\n finally:\n # Try to apply the patch we saved\n try:\n cmd_runner.run(['git', 'apply', patch_filename])\n except CalledProcessError:\n logger.warning(\n 'Stashed changes conflicted with hook auto-fixes... '\n 'Rolling back fixes...'\n )\n # We failed to apply the patch, presumably due to fixes made\n # by hooks.\n # Roll back the changes made by hooks.\n cmd_runner.run(['git', 'checkout', '--', '.'])\n cmd_runner.run(['git', 'apply', patch_filename])\n logger.info('Restored changes from {0}.'.format(patch_filename))\n else:\n # There weren't any staged files so we don't need to do anything\n # special\n yield\n", "path": "pre_commit/staged_files_only.py"}], "after_files": [{"content": "from __future__ import unicode_literals\n\nimport contextlib\nimport io\nimport logging\nimport time\n\nfrom pre_commit.util import CalledProcessError\n\n\nlogger = logging.getLogger('pre_commit')\n\n\[email protected]\ndef staged_files_only(cmd_runner):\n \"\"\"Clear any unstaged changes from the git working directory inside this\n context.\n\n Args:\n cmd_runner - PrefixedCommandRunner\n \"\"\"\n # Determine if there are unstaged files\n retcode, diff_stdout_binary, _ = cmd_runner.run(\n [\n 'git', 'diff', '--ignore-submodules', '--binary', '--exit-code',\n '--no-color',\n ],\n retcode=None,\n encoding=None,\n )\n if retcode and diff_stdout_binary.strip():\n patch_filename = cmd_runner.path('patch{0}'.format(int(time.time())))\n logger.warning('Unstaged files detected.')\n logger.info(\n 'Stashing unstaged files to {0}.'.format(patch_filename),\n )\n # Save the current unstaged changes as a patch\n with io.open(patch_filename, 'wb') as patch_file:\n patch_file.write(diff_stdout_binary)\n\n # Clear the working directory of unstaged changes\n cmd_runner.run(['git', 'checkout', '--', '.'])\n try:\n yield\n finally:\n # Try to apply the patch we saved\n try:\n cmd_runner.run(['git', 'apply', patch_filename])\n except CalledProcessError:\n logger.warning(\n 'Stashed changes conflicted with hook auto-fixes... '\n 'Rolling back fixes...'\n )\n # We failed to apply the patch, presumably due to fixes made\n # by hooks.\n # Roll back the changes made by hooks.\n cmd_runner.run(['git', 'checkout', '--', '.'])\n cmd_runner.run(['git', 'apply', patch_filename])\n logger.info('Restored changes from {0}.'.format(patch_filename))\n else:\n # There weren't any staged files so we don't need to do anything\n # special\n yield\n", "path": "pre_commit/staged_files_only.py"}]} | 1,965 | 142 |
gh_patches_debug_1628 | rasdani/github-patches | git_diff | apache__tvm-12178 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Exercise TVM under minimal configuration in CI
We have seen a couple bugs due to microTVM being presumed-ON in config.cmake. Namely, you get python errors importing TVM right now when USE_MICRO is OFF. We should have a regression test that verifies basic functionality with everything (or nearly everything) OFF.
Context: apache/tvm#9617
And another micro-related issue of the same kind, which i don't have handy right now.
cc @gigiblender
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ci/jenkins/generate.py`
Content:
```
1 #!/usr/bin/env python3
2 # Licensed to the Apache Software Foundation (ASF) under one
3 # or more contributor license agreements. See the NOTICE file
4 # distributed with this work for additional information
5 # regarding copyright ownership. The ASF licenses this file
6 # to you under the Apache License, Version 2.0 (the
7 # "License"); you may not use this file except in compliance
8 # with the License. You may obtain a copy of the License at
9 #
10 # http://www.apache.org/licenses/LICENSE-2.0
11 #
12 # Unless required by applicable law or agreed to in writing,
13 # software distributed under the License is distributed on an
14 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15 # KIND, either express or implied. See the License for the
16 # specific language governing permissions and limitations
17 # under the License.
18 import jinja2
19 import argparse
20 import difflib
21 import re
22 import datetime
23 import textwrap
24
25 from pathlib import Path
26
27
28 REPO_ROOT = Path(__file__).resolve().parent.parent.parent
29 JENKINSFILE_TEMPLATE = REPO_ROOT / "ci" / "jenkins" / "Jenkinsfile.j2"
30 JENKINSFILE = REPO_ROOT / "Jenkinsfile"
31
32
33 data = {
34 "images": [
35 {
36 "name": "ci_arm",
37 "platform": "ARM",
38 },
39 {
40 "name": "ci_cpu",
41 "platform": "CPU",
42 },
43 {
44 "name": "ci_gpu",
45 "platform": "CPU",
46 },
47 {
48 "name": "ci_hexagon",
49 "platform": "CPU",
50 },
51 {
52 "name": "ci_i386",
53 "platform": "CPU",
54 },
55 {
56 "name": "ci_lint",
57 "platform": "CPU",
58 },
59 {
60 "name": "ci_cortexm",
61 "platform": "CPU",
62 },
63 {
64 "name": "ci_wasm",
65 "platform": "CPU",
66 },
67 ]
68 }
69
70
71 def lines_without_generated_tag(content):
72 return [
73 line for line in content.splitlines(keepends=True) if not line.startswith("// Generated at")
74 ]
75
76
77 if __name__ == "__main__":
78 help = "Regenerate Jenkinsfile from template"
79 parser = argparse.ArgumentParser(description=help)
80 parser.add_argument("--check", action="store_true", help="just verify the output didn't change")
81 args = parser.parse_args()
82
83 with open(JENKINSFILE) as f:
84 content = f.read()
85
86 data["generated_time"] = datetime.datetime.now().isoformat()
87
88 environment = jinja2.Environment(
89 loader=jinja2.FileSystemLoader(REPO_ROOT),
90 undefined=jinja2.StrictUndefined,
91 lstrip_blocks=True,
92 trim_blocks=True,
93 keep_trailing_newline=True,
94 )
95 template = environment.get_template(str(JENKINSFILE_TEMPLATE.relative_to(REPO_ROOT)))
96 new_content = template.render(**data)
97
98 diff = "".join(
99 difflib.unified_diff(
100 lines_without_generated_tag(content), lines_without_generated_tag(new_content)
101 )
102 )
103 if args.check:
104 if not diff:
105 print("Success, the newly generated Jenkinsfile matched the one on disk")
106 exit(0)
107 else:
108 print(
109 textwrap.dedent(
110 """
111 Newly generated Jenkinsfile did not match the one on disk! If you have made
112 edits to the Jenkinsfile, move them to 'jenkins/Jenkinsfile.j2' and
113 regenerate the Jenkinsfile from the template with
114
115 python3 -m pip install -r jenkins/requirements.txt
116 python3 jenkins/generate.py
117
118 Diffed changes:
119 """
120 ).strip()
121 )
122 print(diff)
123 exit(1)
124 else:
125 with open(JENKINSFILE, "w") as f:
126 f.write(new_content)
127 if not diff:
128 print(f"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, no changes made")
129 else:
130 print(f"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, changes:")
131 print(diff)
132
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ci/jenkins/generate.py b/ci/jenkins/generate.py
--- a/ci/jenkins/generate.py
+++ b/ci/jenkins/generate.py
@@ -40,6 +40,10 @@
"name": "ci_cpu",
"platform": "CPU",
},
+ {
+ "name": "ci_minimal",
+ "platform": "CPU",
+ },
{
"name": "ci_gpu",
"platform": "CPU",
| {"golden_diff": "diff --git a/ci/jenkins/generate.py b/ci/jenkins/generate.py\n--- a/ci/jenkins/generate.py\n+++ b/ci/jenkins/generate.py\n@@ -40,6 +40,10 @@\n \"name\": \"ci_cpu\",\n \"platform\": \"CPU\",\n },\n+ {\n+ \"name\": \"ci_minimal\",\n+ \"platform\": \"CPU\",\n+ },\n {\n \"name\": \"ci_gpu\",\n \"platform\": \"CPU\",\n", "issue": "Exercise TVM under minimal configuration in CI\nWe have seen a couple bugs due to microTVM being presumed-ON in config.cmake. Namely, you get python errors importing TVM right now when USE_MICRO is OFF. We should have a regression test that verifies basic functionality with everything (or nearly everything) OFF.\r\n\r\nContext: apache/tvm#9617\r\nAnd another micro-related issue of the same kind, which i don't have handy right now.\r\n\r\ncc @gigiblender \n", "before_files": [{"content": "#!/usr/bin/env python3\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\nimport jinja2\nimport argparse\nimport difflib\nimport re\nimport datetime\nimport textwrap\n\nfrom pathlib import Path\n\n\nREPO_ROOT = Path(__file__).resolve().parent.parent.parent\nJENKINSFILE_TEMPLATE = REPO_ROOT / \"ci\" / \"jenkins\" / \"Jenkinsfile.j2\"\nJENKINSFILE = REPO_ROOT / \"Jenkinsfile\"\n\n\ndata = {\n \"images\": [\n {\n \"name\": \"ci_arm\",\n \"platform\": \"ARM\",\n },\n {\n \"name\": \"ci_cpu\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_gpu\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_hexagon\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_i386\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_lint\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_cortexm\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_wasm\",\n \"platform\": \"CPU\",\n },\n ]\n}\n\n\ndef lines_without_generated_tag(content):\n return [\n line for line in content.splitlines(keepends=True) if not line.startswith(\"// Generated at\")\n ]\n\n\nif __name__ == \"__main__\":\n help = \"Regenerate Jenkinsfile from template\"\n parser = argparse.ArgumentParser(description=help)\n parser.add_argument(\"--check\", action=\"store_true\", help=\"just verify the output didn't change\")\n args = parser.parse_args()\n\n with open(JENKINSFILE) as f:\n content = f.read()\n\n data[\"generated_time\"] = datetime.datetime.now().isoformat()\n\n environment = jinja2.Environment(\n loader=jinja2.FileSystemLoader(REPO_ROOT),\n undefined=jinja2.StrictUndefined,\n lstrip_blocks=True,\n trim_blocks=True,\n keep_trailing_newline=True,\n )\n template = environment.get_template(str(JENKINSFILE_TEMPLATE.relative_to(REPO_ROOT)))\n new_content = template.render(**data)\n\n diff = \"\".join(\n difflib.unified_diff(\n lines_without_generated_tag(content), lines_without_generated_tag(new_content)\n )\n )\n if args.check:\n if not diff:\n print(\"Success, the newly generated Jenkinsfile matched the one on disk\")\n exit(0)\n else:\n print(\n textwrap.dedent(\n \"\"\"\n Newly generated Jenkinsfile did not match the one on disk! If you have made\n edits to the Jenkinsfile, move them to 'jenkins/Jenkinsfile.j2' and\n regenerate the Jenkinsfile from the template with\n\n python3 -m pip install -r jenkins/requirements.txt\n python3 jenkins/generate.py\n\n Diffed changes:\n \"\"\"\n ).strip()\n )\n print(diff)\n exit(1)\n else:\n with open(JENKINSFILE, \"w\") as f:\n f.write(new_content)\n if not diff:\n print(f\"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, no changes made\")\n else:\n print(f\"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, changes:\")\n print(diff)\n", "path": "ci/jenkins/generate.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\nimport jinja2\nimport argparse\nimport difflib\nimport re\nimport datetime\nimport textwrap\n\nfrom pathlib import Path\n\n\nREPO_ROOT = Path(__file__).resolve().parent.parent.parent\nJENKINSFILE_TEMPLATE = REPO_ROOT / \"ci\" / \"jenkins\" / \"Jenkinsfile.j2\"\nJENKINSFILE = REPO_ROOT / \"Jenkinsfile\"\n\n\ndata = {\n \"images\": [\n {\n \"name\": \"ci_arm\",\n \"platform\": \"ARM\",\n },\n {\n \"name\": \"ci_cpu\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_minimal\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_gpu\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_hexagon\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_i386\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_lint\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_cortexm\",\n \"platform\": \"CPU\",\n },\n {\n \"name\": \"ci_wasm\",\n \"platform\": \"CPU\",\n },\n ]\n}\n\n\ndef lines_without_generated_tag(content):\n return [\n line for line in content.splitlines(keepends=True) if not line.startswith(\"// Generated at\")\n ]\n\n\nif __name__ == \"__main__\":\n help = \"Regenerate Jenkinsfile from template\"\n parser = argparse.ArgumentParser(description=help)\n parser.add_argument(\"--check\", action=\"store_true\", help=\"just verify the output didn't change\")\n args = parser.parse_args()\n\n with open(JENKINSFILE) as f:\n content = f.read()\n\n data[\"generated_time\"] = datetime.datetime.now().isoformat()\n\n environment = jinja2.Environment(\n loader=jinja2.FileSystemLoader(REPO_ROOT),\n undefined=jinja2.StrictUndefined,\n lstrip_blocks=True,\n trim_blocks=True,\n keep_trailing_newline=True,\n )\n template = environment.get_template(str(JENKINSFILE_TEMPLATE.relative_to(REPO_ROOT)))\n new_content = template.render(**data)\n\n diff = \"\".join(\n difflib.unified_diff(\n lines_without_generated_tag(content), lines_without_generated_tag(new_content)\n )\n )\n if args.check:\n if not diff:\n print(\"Success, the newly generated Jenkinsfile matched the one on disk\")\n exit(0)\n else:\n print(\n textwrap.dedent(\n \"\"\"\n Newly generated Jenkinsfile did not match the one on disk! If you have made\n edits to the Jenkinsfile, move them to 'jenkins/Jenkinsfile.j2' and\n regenerate the Jenkinsfile from the template with\n\n python3 -m pip install -r jenkins/requirements.txt\n python3 jenkins/generate.py\n\n Diffed changes:\n \"\"\"\n ).strip()\n )\n print(diff)\n exit(1)\n else:\n with open(JENKINSFILE, \"w\") as f:\n f.write(new_content)\n if not diff:\n print(f\"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, no changes made\")\n else:\n print(f\"Wrote output to {JENKINSFILE.relative_to(REPO_ROOT)}, changes:\")\n print(diff)\n", "path": "ci/jenkins/generate.py"}]} | 1,556 | 111 |
gh_patches_debug_19824 | rasdani/github-patches | git_diff | liqd__a4-meinberlin-899 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make private project setting more clear
Currently the checkbox is kind of hidden and it is not instantly clear what is meant by "Access to Project". The setting should be more obvious and it's consequences more clear.
Opin has another way of showing the setting which we could/should adopt
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `meinberlin/apps/dashboard2/forms.py`
Content:
```
1 from django import forms
2 from django.contrib.auth import get_user_model
3 from django.forms import inlineformset_factory
4 from django.utils.translation import ugettext_lazy as _
5
6 from adhocracy4.categories import models as category_models
7 from adhocracy4.forms.fields import DateTimeField
8 from adhocracy4.maps import models as map_models
9 from adhocracy4.modules import models as module_models
10 from adhocracy4.phases import models as phase_models
11 from adhocracy4.projects import models as project_models
12 from meinberlin.apps.maps.widgets import MapChoosePolygonWithPresetWidget
13
14 from . import signals
15 from .components.forms import ModuleDashboardForm
16 from .components.forms import ModuleDashboardFormSet
17 from .components.forms import ProjectDashboardForm
18
19 User = get_user_model()
20
21
22 class ProjectCreateForm(forms.ModelForm):
23
24 class Meta:
25 model = project_models.Project
26 fields = ['name', 'description', 'image', 'image_copyright']
27
28 def __init__(self, organisation, creator,
29 *args, **kwargs):
30 super().__init__(*args, **kwargs)
31 self.organisation = organisation
32 self.creator = creator
33
34 def save(self, commit=True):
35 project = super().save(commit=False)
36
37 project.organisation = self.organisation
38 project.creator = self.creator
39
40 if commit:
41 project.save()
42 if hasattr(self, 'save_m2m'):
43 self.save_m2m()
44
45 return project
46
47
48 class ProjectBasicForm(ProjectDashboardForm):
49
50 class Meta:
51 model = project_models.Project
52 fields = ['name', 'description', 'image', 'image_copyright',
53 'tile_image', 'tile_image_copyright',
54 'is_archived', 'is_public']
55 required_for_project_publish = ['name', 'description']
56
57 def __init__(self, *args, **kwargs):
58 super().__init__(*args, **kwargs)
59 instance = kwargs.get('instance', None)
60 self._project_was_archived = instance and instance.is_archived
61
62 def save(self, commit=True):
63 project = super().save(commit)
64 if not self._project_was_archived and project.is_archived:
65 signals.project_archived.send(sender=None, project=project)
66 return project
67
68
69 class ProjectInformationForm(ProjectDashboardForm):
70
71 class Meta:
72 model = project_models.Project
73 fields = ['information']
74 required_for_project_publish = ['information']
75
76
77 class ProjectResultForm(ProjectDashboardForm):
78
79 class Meta:
80 model = project_models.Project
81 fields = ['result']
82 required_for_project_publish = []
83
84
85 class ModuleBasicForm(ModuleDashboardForm):
86
87 class Meta:
88 model = module_models.Module
89 fields = ['name', 'description']
90 required_for_project_publish = '__all__'
91
92
93 class PhaseForm(forms.ModelForm):
94 end_date = DateTimeField(
95 time_format='%H:%M',
96 required=False,
97 require_all_fields=False,
98 label=(_('End date'), _('End time'))
99 )
100 start_date = DateTimeField(
101 time_format='%H:%M',
102 required=False,
103 require_all_fields=False,
104 label=(_('Start date'), _('Start time'))
105 )
106
107 class Meta:
108 model = phase_models.Phase
109 fields = ['name', 'description', 'start_date', 'end_date',
110 'type', # required for get_phase_name in the tpl
111 ]
112 required_for_project_publish = ['name', 'description', 'start_date',
113 'end_date']
114 widgets = {
115 'type': forms.HiddenInput(),
116 'weight': forms.HiddenInput()
117 }
118
119
120 PhaseFormSet = inlineformset_factory(module_models.Module,
121 phase_models.Phase,
122 form=PhaseForm,
123 formset=ModuleDashboardFormSet,
124 extra=0,
125 can_delete=False,
126 )
127
128
129 class AreaSettingsForm(ModuleDashboardForm):
130
131 def __init__(self, *args, **kwargs):
132 self.module = kwargs['instance']
133 kwargs['instance'] = self.module.settings_instance
134 super().__init__(*args, **kwargs)
135
136 def save(self, commit=True):
137 super().save(commit)
138 return self.module
139
140 def get_project(self):
141 return self.module.project
142
143 class Meta:
144 model = map_models.AreaSettings
145 fields = ['polygon']
146 required_for_project_publish = ['polygon']
147 # widgets = map_models.AreaSettings.widgets()
148 widgets = {'polygon': MapChoosePolygonWithPresetWidget}
149
150
151 class CategoryForm(forms.ModelForm):
152 name = forms.CharField(widget=forms.TextInput(attrs={
153 'placeholder': _('Category')}
154 ))
155
156 @property
157 def media(self):
158 media = super().media
159 media.add_js(['js/formset.js'])
160 return media
161
162 class Meta:
163 model = category_models.Category
164 fields = ['name']
165
166
167 CategoryFormSet = inlineformset_factory(module_models.Module,
168 category_models.Category,
169 form=CategoryForm,
170 formset=ModuleDashboardFormSet,
171 extra=0,
172 )
173
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/meinberlin/apps/dashboard2/forms.py b/meinberlin/apps/dashboard2/forms.py
--- a/meinberlin/apps/dashboard2/forms.py
+++ b/meinberlin/apps/dashboard2/forms.py
@@ -1,5 +1,6 @@
from django import forms
from django.contrib.auth import get_user_model
+from django.forms import RadioSelect
from django.forms import inlineformset_factory
from django.utils.translation import ugettext_lazy as _
@@ -53,6 +54,14 @@
'tile_image', 'tile_image_copyright',
'is_archived', 'is_public']
required_for_project_publish = ['name', 'description']
+ widgets = {
+ 'is_public': RadioSelect(
+ choices=[
+ (True, _('All users can participate (public).')),
+ (False, _('Only invited users can participate (private).'))
+ ]
+ ),
+ }
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
| {"golden_diff": "diff --git a/meinberlin/apps/dashboard2/forms.py b/meinberlin/apps/dashboard2/forms.py\n--- a/meinberlin/apps/dashboard2/forms.py\n+++ b/meinberlin/apps/dashboard2/forms.py\n@@ -1,5 +1,6 @@\n from django import forms\n from django.contrib.auth import get_user_model\n+from django.forms import RadioSelect\n from django.forms import inlineformset_factory\n from django.utils.translation import ugettext_lazy as _\n \n@@ -53,6 +54,14 @@\n 'tile_image', 'tile_image_copyright',\n 'is_archived', 'is_public']\n required_for_project_publish = ['name', 'description']\n+ widgets = {\n+ 'is_public': RadioSelect(\n+ choices=[\n+ (True, _('All users can participate (public).')),\n+ (False, _('Only invited users can participate (private).'))\n+ ]\n+ ),\n+ }\n \n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n", "issue": "Make private project setting more clear\nCurrently the checkbox is kind of hidden and it is not instantly clear what is meant by \"Access to Project\". The setting should be more obvious and it's consequences more clear.\r\n\r\nOpin has another way of showing the setting which we could/should adopt\n", "before_files": [{"content": "from django import forms\nfrom django.contrib.auth import get_user_model\nfrom django.forms import inlineformset_factory\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom adhocracy4.categories import models as category_models\nfrom adhocracy4.forms.fields import DateTimeField\nfrom adhocracy4.maps import models as map_models\nfrom adhocracy4.modules import models as module_models\nfrom adhocracy4.phases import models as phase_models\nfrom adhocracy4.projects import models as project_models\nfrom meinberlin.apps.maps.widgets import MapChoosePolygonWithPresetWidget\n\nfrom . import signals\nfrom .components.forms import ModuleDashboardForm\nfrom .components.forms import ModuleDashboardFormSet\nfrom .components.forms import ProjectDashboardForm\n\nUser = get_user_model()\n\n\nclass ProjectCreateForm(forms.ModelForm):\n\n class Meta:\n model = project_models.Project\n fields = ['name', 'description', 'image', 'image_copyright']\n\n def __init__(self, organisation, creator,\n *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.organisation = organisation\n self.creator = creator\n\n def save(self, commit=True):\n project = super().save(commit=False)\n\n project.organisation = self.organisation\n project.creator = self.creator\n\n if commit:\n project.save()\n if hasattr(self, 'save_m2m'):\n self.save_m2m()\n\n return project\n\n\nclass ProjectBasicForm(ProjectDashboardForm):\n\n class Meta:\n model = project_models.Project\n fields = ['name', 'description', 'image', 'image_copyright',\n 'tile_image', 'tile_image_copyright',\n 'is_archived', 'is_public']\n required_for_project_publish = ['name', 'description']\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n instance = kwargs.get('instance', None)\n self._project_was_archived = instance and instance.is_archived\n\n def save(self, commit=True):\n project = super().save(commit)\n if not self._project_was_archived and project.is_archived:\n signals.project_archived.send(sender=None, project=project)\n return project\n\n\nclass ProjectInformationForm(ProjectDashboardForm):\n\n class Meta:\n model = project_models.Project\n fields = ['information']\n required_for_project_publish = ['information']\n\n\nclass ProjectResultForm(ProjectDashboardForm):\n\n class Meta:\n model = project_models.Project\n fields = ['result']\n required_for_project_publish = []\n\n\nclass ModuleBasicForm(ModuleDashboardForm):\n\n class Meta:\n model = module_models.Module\n fields = ['name', 'description']\n required_for_project_publish = '__all__'\n\n\nclass PhaseForm(forms.ModelForm):\n end_date = DateTimeField(\n time_format='%H:%M',\n required=False,\n require_all_fields=False,\n label=(_('End date'), _('End time'))\n )\n start_date = DateTimeField(\n time_format='%H:%M',\n required=False,\n require_all_fields=False,\n label=(_('Start date'), _('Start time'))\n )\n\n class Meta:\n model = phase_models.Phase\n fields = ['name', 'description', 'start_date', 'end_date',\n 'type', # required for get_phase_name in the tpl\n ]\n required_for_project_publish = ['name', 'description', 'start_date',\n 'end_date']\n widgets = {\n 'type': forms.HiddenInput(),\n 'weight': forms.HiddenInput()\n }\n\n\nPhaseFormSet = inlineformset_factory(module_models.Module,\n phase_models.Phase,\n form=PhaseForm,\n formset=ModuleDashboardFormSet,\n extra=0,\n can_delete=False,\n )\n\n\nclass AreaSettingsForm(ModuleDashboardForm):\n\n def __init__(self, *args, **kwargs):\n self.module = kwargs['instance']\n kwargs['instance'] = self.module.settings_instance\n super().__init__(*args, **kwargs)\n\n def save(self, commit=True):\n super().save(commit)\n return self.module\n\n def get_project(self):\n return self.module.project\n\n class Meta:\n model = map_models.AreaSettings\n fields = ['polygon']\n required_for_project_publish = ['polygon']\n # widgets = map_models.AreaSettings.widgets()\n widgets = {'polygon': MapChoosePolygonWithPresetWidget}\n\n\nclass CategoryForm(forms.ModelForm):\n name = forms.CharField(widget=forms.TextInput(attrs={\n 'placeholder': _('Category')}\n ))\n\n @property\n def media(self):\n media = super().media\n media.add_js(['js/formset.js'])\n return media\n\n class Meta:\n model = category_models.Category\n fields = ['name']\n\n\nCategoryFormSet = inlineformset_factory(module_models.Module,\n category_models.Category,\n form=CategoryForm,\n formset=ModuleDashboardFormSet,\n extra=0,\n )\n", "path": "meinberlin/apps/dashboard2/forms.py"}], "after_files": [{"content": "from django import forms\nfrom django.contrib.auth import get_user_model\nfrom django.forms import RadioSelect\nfrom django.forms import inlineformset_factory\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom adhocracy4.categories import models as category_models\nfrom adhocracy4.forms.fields import DateTimeField\nfrom adhocracy4.maps import models as map_models\nfrom adhocracy4.modules import models as module_models\nfrom adhocracy4.phases import models as phase_models\nfrom adhocracy4.projects import models as project_models\nfrom meinberlin.apps.maps.widgets import MapChoosePolygonWithPresetWidget\n\nfrom . import signals\nfrom .components.forms import ModuleDashboardForm\nfrom .components.forms import ModuleDashboardFormSet\nfrom .components.forms import ProjectDashboardForm\n\nUser = get_user_model()\n\n\nclass ProjectCreateForm(forms.ModelForm):\n\n class Meta:\n model = project_models.Project\n fields = ['name', 'description', 'image', 'image_copyright']\n\n def __init__(self, organisation, creator,\n *args, **kwargs):\n super().__init__(*args, **kwargs)\n self.organisation = organisation\n self.creator = creator\n\n def save(self, commit=True):\n project = super().save(commit=False)\n\n project.organisation = self.organisation\n project.creator = self.creator\n\n if commit:\n project.save()\n if hasattr(self, 'save_m2m'):\n self.save_m2m()\n\n return project\n\n\nclass ProjectBasicForm(ProjectDashboardForm):\n\n class Meta:\n model = project_models.Project\n fields = ['name', 'description', 'image', 'image_copyright',\n 'tile_image', 'tile_image_copyright',\n 'is_archived', 'is_public']\n required_for_project_publish = ['name', 'description']\n widgets = {\n 'is_public': RadioSelect(\n choices=[\n (True, _('All users can participate (public).')),\n (False, _('Only invited users can participate (private).'))\n ]\n ),\n }\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n instance = kwargs.get('instance', None)\n self._project_was_archived = instance and instance.is_archived\n\n def save(self, commit=True):\n project = super().save(commit)\n if not self._project_was_archived and project.is_archived:\n signals.project_archived.send(sender=None, project=project)\n return project\n\n\nclass ProjectInformationForm(ProjectDashboardForm):\n\n class Meta:\n model = project_models.Project\n fields = ['information']\n required_for_project_publish = ['information']\n\n\nclass ProjectResultForm(ProjectDashboardForm):\n\n class Meta:\n model = project_models.Project\n fields = ['result']\n required_for_project_publish = []\n\n\nclass ModuleBasicForm(ModuleDashboardForm):\n\n class Meta:\n model = module_models.Module\n fields = ['name', 'description']\n required_for_project_publish = '__all__'\n\n\nclass PhaseForm(forms.ModelForm):\n end_date = DateTimeField(\n time_format='%H:%M',\n required=False,\n require_all_fields=False,\n label=(_('End date'), _('End time'))\n )\n start_date = DateTimeField(\n time_format='%H:%M',\n required=False,\n require_all_fields=False,\n label=(_('Start date'), _('Start time'))\n )\n\n class Meta:\n model = phase_models.Phase\n fields = ['name', 'description', 'start_date', 'end_date',\n 'type', # required for get_phase_name in the tpl\n ]\n required_for_project_publish = ['name', 'description', 'start_date',\n 'end_date']\n widgets = {\n 'type': forms.HiddenInput(),\n 'weight': forms.HiddenInput()\n }\n\n\nPhaseFormSet = inlineformset_factory(module_models.Module,\n phase_models.Phase,\n form=PhaseForm,\n formset=ModuleDashboardFormSet,\n extra=0,\n can_delete=False,\n )\n\n\nclass AreaSettingsForm(ModuleDashboardForm):\n\n def __init__(self, *args, **kwargs):\n self.module = kwargs['instance']\n kwargs['instance'] = self.module.settings_instance\n super().__init__(*args, **kwargs)\n\n def save(self, commit=True):\n super().save(commit)\n return self.module\n\n def get_project(self):\n return self.module.project\n\n class Meta:\n model = map_models.AreaSettings\n fields = ['polygon']\n required_for_project_publish = ['polygon']\n # widgets = map_models.AreaSettings.widgets()\n widgets = {'polygon': MapChoosePolygonWithPresetWidget}\n\n\nclass CategoryForm(forms.ModelForm):\n name = forms.CharField(widget=forms.TextInput(attrs={\n 'placeholder': _('Category')}\n ))\n\n @property\n def media(self):\n media = super().media\n media.add_js(['js/formset.js'])\n return media\n\n class Meta:\n model = category_models.Category\n fields = ['name']\n\n\nCategoryFormSet = inlineformset_factory(module_models.Module,\n category_models.Category,\n form=CategoryForm,\n formset=ModuleDashboardFormSet,\n extra=0,\n )\n", "path": "meinberlin/apps/dashboard2/forms.py"}]} | 1,801 | 226 |
gh_patches_debug_26793 | rasdani/github-patches | git_diff | pulp__pulpcore-5196 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Overwriting existing packages in backend storage can lead to caching issues
If an existing package is re-added to pulp, the default behavior will overwrite the existing file in backing storage. This is typically fine.
- If using Azure Blobstore, the timestamp of the blob is updated (Last-Modified time and ETag).
- Conversely, some CDN's (notably Azure Front Door) use Last-Modified Time as a signal that a file in origin has updated.
- This can lead to poor cache behavior, and in some cases, incomplete downloads as the CDN attempts to resolve disparate content.
- If we set `AZURE_OVERWRITE_FILES` to `false` this partially mitigates the issue (Last-Modified/ETag are unmodified). However, this results in duplicate copies written to storage (with a suffix to differentiate from the original).
- We should have an option that does "nothing" if the uploaded file already exists (don't overwrite, and don't write a new copy).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pulpcore/plugin/viewsets/content.py`
Content:
```
1 from drf_spectacular.utils import extend_schema
2
3 from django.db import DatabaseError
4 from django.db.utils import IntegrityError
5
6 from pulpcore.app import tasks
7 from pulpcore.plugin.serializers import (
8 ArtifactSerializer,
9 AsyncOperationResponseSerializer,
10 )
11 from pulpcore.plugin.models import Artifact, PulpTemporaryFile
12 from pulpcore.plugin.tasking import dispatch
13 from pulpcore.plugin.viewsets import (
14 ContentViewSet,
15 OperationPostponedResponse,
16 )
17
18
19 class DefaultDeferredContextMixin:
20 """A mixin that provides a method for retrieving the default deferred context."""
21
22 def get_deferred_context(self, request):
23 """
24 Supply context for deferred validation.
25
26 When overwriting this method, it must return a dict, that is JSON serializable by
27 and does _not_ contain 'request' as a key.
28 """
29 return {}
30
31
32 class NoArtifactContentViewSet(DefaultDeferredContextMixin, ContentViewSet):
33 """A ViewSet for content creation that does not require a file to be uploaded."""
34
35 @extend_schema(
36 description="Trigger an asynchronous task to create content,"
37 "optionally create new repository version.",
38 responses={202: AsyncOperationResponseSerializer},
39 )
40 def create(self, request):
41 """Create a content unit."""
42 serializer = self.get_serializer(data=request.data)
43 serializer.is_valid(raise_exception=True)
44
45 exclusive_resources = [
46 item for item in (serializer.validated_data.get(key) for key in ("repository",)) if item
47 ]
48
49 task = dispatch(
50 tasks.base.general_create,
51 exclusive_resources=exclusive_resources,
52 args=(self.queryset.model._meta.app_label, serializer.__class__.__name__),
53 kwargs={
54 "data": {k: v for k, v in request.data.items()},
55 "context": self.get_deferred_context(request),
56 },
57 )
58 return OperationPostponedResponse(task, request)
59
60
61 class NoArtifactContentUploadViewSet(DefaultDeferredContextMixin, ContentViewSet):
62 """A ViewSet for uploads that do not require to store an uploaded content as an Artifact."""
63
64 @extend_schema(
65 description="Trigger an asynchronous task to create content,"
66 "optionally create new repository version.",
67 responses={202: AsyncOperationResponseSerializer},
68 )
69 def create(self, request):
70 """Create a content unit."""
71 serializer = self.get_serializer(data=request.data)
72 serializer.is_valid(raise_exception=True)
73
74 task_payload = {k: v for k, v in request.data.items()}
75
76 file_content = task_payload.pop("file", None)
77 temp_file = PulpTemporaryFile.init_and_validate(file_content)
78 temp_file.save()
79
80 exclusive_resources = [
81 item for item in (serializer.validated_data.get(key) for key in ("repository",)) if item
82 ]
83
84 app_label = self.queryset.model._meta.app_label
85 context = self.get_deferred_context(request)
86 context["pulp_temp_file_pk"] = str(temp_file.pk)
87 task = dispatch(
88 tasks.base.general_create,
89 exclusive_resources=exclusive_resources,
90 args=(app_label, serializer.__class__.__name__),
91 kwargs={"data": task_payload, "context": context},
92 )
93 return OperationPostponedResponse(task, request)
94
95
96 class SingleArtifactContentUploadViewSet(DefaultDeferredContextMixin, ContentViewSet):
97 """A ViewSet which can be used to store an uploaded content as an Artifact."""
98
99 @extend_schema(
100 description="Trigger an asynchronous task to create content,"
101 "optionally create new repository version.",
102 responses={202: AsyncOperationResponseSerializer},
103 )
104 def create(self, request):
105 """Create a content unit."""
106 serializer = self.get_serializer(data=request.data)
107 serializer.is_valid(raise_exception=True)
108
109 task_payload = self.init_content_data(serializer, request)
110
111 exclusive_resources = [
112 item
113 for item in (serializer.validated_data.get(key) for key in ("upload", "repository"))
114 if item
115 ]
116
117 app_label = self.queryset.model._meta.app_label
118 task = dispatch(
119 tasks.base.general_create,
120 exclusive_resources=exclusive_resources,
121 args=(app_label, serializer.__class__.__name__),
122 kwargs={
123 "data": task_payload,
124 "context": self.get_deferred_context(request),
125 },
126 )
127 return OperationPostponedResponse(task, request)
128
129 def init_content_data(self, serializer, request):
130 """Initialize the reference to an Artifact along with relevant task's payload data."""
131 task_payload = {k: v for k, v in request.data.items()}
132 if "file" in task_payload:
133 # in the upload code path make sure, the artifact exists, and the 'file'
134 # parameter is replaced by 'artifact'
135 artifact = Artifact.init_and_validate(task_payload.pop("file"))
136 try:
137 artifact.save()
138 except IntegrityError:
139 # if artifact already exists, let's use it
140 try:
141 artifact = Artifact.objects.get(
142 sha256=artifact.sha256, pulp_domain=request.pulp_domain
143 )
144 artifact.touch()
145 except (Artifact.DoesNotExist, DatabaseError):
146 # the artifact has since been removed from when we first attempted to save it
147 artifact.save()
148
149 task_payload["artifact"] = ArtifactSerializer(
150 artifact, context={"request": request}
151 ).data["pulp_href"]
152 elif "artifact" in serializer.validated_data:
153 serializer.validated_data["artifact"].touch()
154 # In case of a provided upload object, there is no artifact to touch yet.
155
156 return task_payload
157
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pulpcore/plugin/viewsets/content.py b/pulpcore/plugin/viewsets/content.py
--- a/pulpcore/plugin/viewsets/content.py
+++ b/pulpcore/plugin/viewsets/content.py
@@ -133,18 +133,20 @@
# in the upload code path make sure, the artifact exists, and the 'file'
# parameter is replaced by 'artifact'
artifact = Artifact.init_and_validate(task_payload.pop("file"))
+ # if artifact already exists, let's use it
try:
- artifact.save()
- except IntegrityError:
- # if artifact already exists, let's use it
+ artifact = Artifact.objects.get(
+ sha256=artifact.sha256, pulp_domain=request.pulp_domain
+ )
+ artifact.touch()
+ except (Artifact.DoesNotExist, DatabaseError):
try:
+ artifact.save()
+ except IntegrityError:
artifact = Artifact.objects.get(
sha256=artifact.sha256, pulp_domain=request.pulp_domain
)
artifact.touch()
- except (Artifact.DoesNotExist, DatabaseError):
- # the artifact has since been removed from when we first attempted to save it
- artifact.save()
task_payload["artifact"] = ArtifactSerializer(
artifact, context={"request": request}
| {"golden_diff": "diff --git a/pulpcore/plugin/viewsets/content.py b/pulpcore/plugin/viewsets/content.py\n--- a/pulpcore/plugin/viewsets/content.py\n+++ b/pulpcore/plugin/viewsets/content.py\n@@ -133,18 +133,20 @@\n # in the upload code path make sure, the artifact exists, and the 'file'\n # parameter is replaced by 'artifact'\n artifact = Artifact.init_and_validate(task_payload.pop(\"file\"))\n+ # if artifact already exists, let's use it\n try:\n- artifact.save()\n- except IntegrityError:\n- # if artifact already exists, let's use it\n+ artifact = Artifact.objects.get(\n+ sha256=artifact.sha256, pulp_domain=request.pulp_domain\n+ )\n+ artifact.touch()\n+ except (Artifact.DoesNotExist, DatabaseError):\n try:\n+ artifact.save()\n+ except IntegrityError:\n artifact = Artifact.objects.get(\n sha256=artifact.sha256, pulp_domain=request.pulp_domain\n )\n artifact.touch()\n- except (Artifact.DoesNotExist, DatabaseError):\n- # the artifact has since been removed from when we first attempted to save it\n- artifact.save()\n \n task_payload[\"artifact\"] = ArtifactSerializer(\n artifact, context={\"request\": request}\n", "issue": "Overwriting existing packages in backend storage can lead to caching issues\nIf an existing package is re-added to pulp, the default behavior will overwrite the existing file in backing storage. This is typically fine.\r\n- If using Azure Blobstore, the timestamp of the blob is updated (Last-Modified time and ETag).\r\n- Conversely, some CDN's (notably Azure Front Door) use Last-Modified Time as a signal that a file in origin has updated.\r\n- This can lead to poor cache behavior, and in some cases, incomplete downloads as the CDN attempts to resolve disparate content.\r\n- If we set `AZURE_OVERWRITE_FILES` to `false` this partially mitigates the issue (Last-Modified/ETag are unmodified). However, this results in duplicate copies written to storage (with a suffix to differentiate from the original).\r\n- We should have an option that does \"nothing\" if the uploaded file already exists (don't overwrite, and don't write a new copy).\n", "before_files": [{"content": "from drf_spectacular.utils import extend_schema\n\nfrom django.db import DatabaseError\nfrom django.db.utils import IntegrityError\n\nfrom pulpcore.app import tasks\nfrom pulpcore.plugin.serializers import (\n ArtifactSerializer,\n AsyncOperationResponseSerializer,\n)\nfrom pulpcore.plugin.models import Artifact, PulpTemporaryFile\nfrom pulpcore.plugin.tasking import dispatch\nfrom pulpcore.plugin.viewsets import (\n ContentViewSet,\n OperationPostponedResponse,\n)\n\n\nclass DefaultDeferredContextMixin:\n \"\"\"A mixin that provides a method for retrieving the default deferred context.\"\"\"\n\n def get_deferred_context(self, request):\n \"\"\"\n Supply context for deferred validation.\n\n When overwriting this method, it must return a dict, that is JSON serializable by\n and does _not_ contain 'request' as a key.\n \"\"\"\n return {}\n\n\nclass NoArtifactContentViewSet(DefaultDeferredContextMixin, ContentViewSet):\n \"\"\"A ViewSet for content creation that does not require a file to be uploaded.\"\"\"\n\n @extend_schema(\n description=\"Trigger an asynchronous task to create content,\"\n \"optionally create new repository version.\",\n responses={202: AsyncOperationResponseSerializer},\n )\n def create(self, request):\n \"\"\"Create a content unit.\"\"\"\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n\n exclusive_resources = [\n item for item in (serializer.validated_data.get(key) for key in (\"repository\",)) if item\n ]\n\n task = dispatch(\n tasks.base.general_create,\n exclusive_resources=exclusive_resources,\n args=(self.queryset.model._meta.app_label, serializer.__class__.__name__),\n kwargs={\n \"data\": {k: v for k, v in request.data.items()},\n \"context\": self.get_deferred_context(request),\n },\n )\n return OperationPostponedResponse(task, request)\n\n\nclass NoArtifactContentUploadViewSet(DefaultDeferredContextMixin, ContentViewSet):\n \"\"\"A ViewSet for uploads that do not require to store an uploaded content as an Artifact.\"\"\"\n\n @extend_schema(\n description=\"Trigger an asynchronous task to create content,\"\n \"optionally create new repository version.\",\n responses={202: AsyncOperationResponseSerializer},\n )\n def create(self, request):\n \"\"\"Create a content unit.\"\"\"\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n\n task_payload = {k: v for k, v in request.data.items()}\n\n file_content = task_payload.pop(\"file\", None)\n temp_file = PulpTemporaryFile.init_and_validate(file_content)\n temp_file.save()\n\n exclusive_resources = [\n item for item in (serializer.validated_data.get(key) for key in (\"repository\",)) if item\n ]\n\n app_label = self.queryset.model._meta.app_label\n context = self.get_deferred_context(request)\n context[\"pulp_temp_file_pk\"] = str(temp_file.pk)\n task = dispatch(\n tasks.base.general_create,\n exclusive_resources=exclusive_resources,\n args=(app_label, serializer.__class__.__name__),\n kwargs={\"data\": task_payload, \"context\": context},\n )\n return OperationPostponedResponse(task, request)\n\n\nclass SingleArtifactContentUploadViewSet(DefaultDeferredContextMixin, ContentViewSet):\n \"\"\"A ViewSet which can be used to store an uploaded content as an Artifact.\"\"\"\n\n @extend_schema(\n description=\"Trigger an asynchronous task to create content,\"\n \"optionally create new repository version.\",\n responses={202: AsyncOperationResponseSerializer},\n )\n def create(self, request):\n \"\"\"Create a content unit.\"\"\"\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n\n task_payload = self.init_content_data(serializer, request)\n\n exclusive_resources = [\n item\n for item in (serializer.validated_data.get(key) for key in (\"upload\", \"repository\"))\n if item\n ]\n\n app_label = self.queryset.model._meta.app_label\n task = dispatch(\n tasks.base.general_create,\n exclusive_resources=exclusive_resources,\n args=(app_label, serializer.__class__.__name__),\n kwargs={\n \"data\": task_payload,\n \"context\": self.get_deferred_context(request),\n },\n )\n return OperationPostponedResponse(task, request)\n\n def init_content_data(self, serializer, request):\n \"\"\"Initialize the reference to an Artifact along with relevant task's payload data.\"\"\"\n task_payload = {k: v for k, v in request.data.items()}\n if \"file\" in task_payload:\n # in the upload code path make sure, the artifact exists, and the 'file'\n # parameter is replaced by 'artifact'\n artifact = Artifact.init_and_validate(task_payload.pop(\"file\"))\n try:\n artifact.save()\n except IntegrityError:\n # if artifact already exists, let's use it\n try:\n artifact = Artifact.objects.get(\n sha256=artifact.sha256, pulp_domain=request.pulp_domain\n )\n artifact.touch()\n except (Artifact.DoesNotExist, DatabaseError):\n # the artifact has since been removed from when we first attempted to save it\n artifact.save()\n\n task_payload[\"artifact\"] = ArtifactSerializer(\n artifact, context={\"request\": request}\n ).data[\"pulp_href\"]\n elif \"artifact\" in serializer.validated_data:\n serializer.validated_data[\"artifact\"].touch()\n # In case of a provided upload object, there is no artifact to touch yet.\n\n return task_payload\n", "path": "pulpcore/plugin/viewsets/content.py"}], "after_files": [{"content": "from drf_spectacular.utils import extend_schema\n\nfrom django.db import DatabaseError\nfrom django.db.utils import IntegrityError\n\nfrom pulpcore.app import tasks\nfrom pulpcore.plugin.serializers import (\n ArtifactSerializer,\n AsyncOperationResponseSerializer,\n)\nfrom pulpcore.plugin.models import Artifact, PulpTemporaryFile\nfrom pulpcore.plugin.tasking import dispatch\nfrom pulpcore.plugin.viewsets import (\n ContentViewSet,\n OperationPostponedResponse,\n)\n\n\nclass DefaultDeferredContextMixin:\n \"\"\"A mixin that provides a method for retrieving the default deferred context.\"\"\"\n\n def get_deferred_context(self, request):\n \"\"\"\n Supply context for deferred validation.\n\n When overwriting this method, it must return a dict, that is JSON serializable by\n and does _not_ contain 'request' as a key.\n \"\"\"\n return {}\n\n\nclass NoArtifactContentViewSet(DefaultDeferredContextMixin, ContentViewSet):\n \"\"\"A ViewSet for content creation that does not require a file to be uploaded.\"\"\"\n\n @extend_schema(\n description=\"Trigger an asynchronous task to create content,\"\n \"optionally create new repository version.\",\n responses={202: AsyncOperationResponseSerializer},\n )\n def create(self, request):\n \"\"\"Create a content unit.\"\"\"\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n\n exclusive_resources = [\n item for item in (serializer.validated_data.get(key) for key in (\"repository\",)) if item\n ]\n\n task = dispatch(\n tasks.base.general_create,\n exclusive_resources=exclusive_resources,\n args=(self.queryset.model._meta.app_label, serializer.__class__.__name__),\n kwargs={\n \"data\": {k: v for k, v in request.data.items()},\n \"context\": self.get_deferred_context(request),\n },\n )\n return OperationPostponedResponse(task, request)\n\n\nclass NoArtifactContentUploadViewSet(DefaultDeferredContextMixin, ContentViewSet):\n \"\"\"A ViewSet for uploads that do not require to store an uploaded content as an Artifact.\"\"\"\n\n @extend_schema(\n description=\"Trigger an asynchronous task to create content,\"\n \"optionally create new repository version.\",\n responses={202: AsyncOperationResponseSerializer},\n )\n def create(self, request):\n \"\"\"Create a content unit.\"\"\"\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n\n task_payload = {k: v for k, v in request.data.items()}\n\n file_content = task_payload.pop(\"file\", None)\n temp_file = PulpTemporaryFile.init_and_validate(file_content)\n temp_file.save()\n\n exclusive_resources = [\n item for item in (serializer.validated_data.get(key) for key in (\"repository\",)) if item\n ]\n\n app_label = self.queryset.model._meta.app_label\n context = self.get_deferred_context(request)\n context[\"pulp_temp_file_pk\"] = str(temp_file.pk)\n task = dispatch(\n tasks.base.general_create,\n exclusive_resources=exclusive_resources,\n args=(app_label, serializer.__class__.__name__),\n kwargs={\"data\": task_payload, \"context\": context},\n )\n return OperationPostponedResponse(task, request)\n\n\nclass SingleArtifactContentUploadViewSet(DefaultDeferredContextMixin, ContentViewSet):\n \"\"\"A ViewSet which can be used to store an uploaded content as an Artifact.\"\"\"\n\n @extend_schema(\n description=\"Trigger an asynchronous task to create content,\"\n \"optionally create new repository version.\",\n responses={202: AsyncOperationResponseSerializer},\n )\n def create(self, request):\n \"\"\"Create a content unit.\"\"\"\n serializer = self.get_serializer(data=request.data)\n serializer.is_valid(raise_exception=True)\n\n task_payload = self.init_content_data(serializer, request)\n\n exclusive_resources = [\n item\n for item in (serializer.validated_data.get(key) for key in (\"upload\", \"repository\"))\n if item\n ]\n\n app_label = self.queryset.model._meta.app_label\n task = dispatch(\n tasks.base.general_create,\n exclusive_resources=exclusive_resources,\n args=(app_label, serializer.__class__.__name__),\n kwargs={\n \"data\": task_payload,\n \"context\": self.get_deferred_context(request),\n },\n )\n return OperationPostponedResponse(task, request)\n\n def init_content_data(self, serializer, request):\n \"\"\"Initialize the reference to an Artifact along with relevant task's payload data.\"\"\"\n task_payload = {k: v for k, v in request.data.items()}\n if \"file\" in task_payload:\n # in the upload code path make sure, the artifact exists, and the 'file'\n # parameter is replaced by 'artifact'\n artifact = Artifact.init_and_validate(task_payload.pop(\"file\"))\n # if artifact already exists, let's use it\n try:\n artifact = Artifact.objects.get(\n sha256=artifact.sha256, pulp_domain=request.pulp_domain\n )\n artifact.touch()\n except (Artifact.DoesNotExist, DatabaseError):\n try:\n artifact.save()\n except IntegrityError:\n artifact = Artifact.objects.get(\n sha256=artifact.sha256, pulp_domain=request.pulp_domain\n )\n artifact.touch()\n\n task_payload[\"artifact\"] = ArtifactSerializer(\n artifact, context={\"request\": request}\n ).data[\"pulp_href\"]\n elif \"artifact\" in serializer.validated_data:\n serializer.validated_data[\"artifact\"].touch()\n # In case of a provided upload object, there is no artifact to touch yet.\n\n return task_payload\n", "path": "pulpcore/plugin/viewsets/content.py"}]} | 2,008 | 286 |
gh_patches_debug_977 | rasdani/github-patches | git_diff | medtagger__MedTagger-442 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Getting random scan for non-existing task key results in 500
## Current Behavior
Providing non existing task key results in 500 HTTP code.
## Expected Behavior
Backend should handle this situation appropriate and return 404 HTTP code.
## Steps to Reproduce the Problem
1. Perform a GET `scans/random?task=<task_key>` and provide non existing key.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `backend/medtagger/repositories/tasks.py`
Content:
```
1 """Module responsible for definition of TaskRepository."""
2 from typing import List
3
4 from medtagger.database import db_session
5 from medtagger.database.models import Task, LabelTag, Dataset
6 from medtagger.exceptions import InternalErrorException
7
8
9 def get_all_tasks(include_disabled: bool = False) -> List[Task]:
10 """Fetch all tasks from database ordered by key."""
11 query = Task.query
12 if not include_disabled:
13 query = query.filter(~Task.disabled)
14 return query.order_by(Task.key).all()
15
16
17 def get_task_by_key(key: str) -> Task:
18 """Fetch Task from database.
19
20 :param key: key for a Task
21 :return: Task object
22 """
23 with db_session() as session:
24 task = session.query(Task).filter(Task.key == key).one()
25 return task
26
27
28 def add_task(key: str, name: str, image_path: str, datasets_keys: List[str], tags: List[LabelTag]) -> Task:
29 """Add new Task to the database.
30
31 :param key: key that will identify such Task
32 :param name: name that will be used in the Use Interface for such Task
33 :param image_path: path to the image that represents such Task (used in User Interface)
34 :param datasets_keys: Keys of Datasets that Task takes Scans from
35 :param tags: Label Tags that will be created and assigned to Task
36 :return: Task object
37 """
38 with db_session() as session:
39 task = Task(key, name, image_path)
40 datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore
41 task.datasets = datasets
42 task.available_tags = tags
43 session.add(task)
44 return task
45
46
47 def assign_label_tag(tag: LabelTag, task_key: str) -> None:
48 """Assign existing Label Tag to Task.
49
50 :param tag: tag that should be assigned to Task
51 :param task_key: key that will identify such Task
52 """
53 with db_session():
54 task = Task.query.filter(Task.key == task_key).one()
55 task.available_tags.append(tag)
56 task.save()
57
58
59 def unassign_label_tag(tag: LabelTag, task_key: str) -> None:
60 """Unassign Label Tag from Task.
61
62 :param tag: tag that should be unassigned from Task
63 :param task_key: key that will identify such Task
64 """
65 with db_session():
66 task = Task.query.filter(Task.key == task_key).one()
67 task.available_tags.remove(tag)
68 task.save()
69
70
71 def update(task_key: str, name: str = None, image_path: str = None, datasets_keys: List[str] = None) -> Task:
72 """Update Datasets where this Task will be available.
73
74 :param task_key: key that will identify such Task
75 :param name: (optional) new name for such Task
76 :param image_path: (optional) new path to the image which shows on the UI
77 :param datasets_keys: (optional) keys of Datasets which should have this Task
78 """
79 with db_session():
80 task = Task.query.filter(Task.key == task_key).one()
81 if name:
82 task.name = name
83 if image_path:
84 task.image_path = image_path
85 if datasets_keys:
86 datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore
87 task.datasets = datasets
88 return task
89
90
91 def disable(task_key: str) -> None:
92 """Disable existing Task."""
93 disabling_query = Task.query.filter(Task.key == task_key)
94 updated = disabling_query.update({'disabled': True}, synchronize_session='fetch')
95 if not updated:
96 raise InternalErrorException(f'Task "{task_key}" was not disabled due to unknown database error.')
97
98
99 def enable(task_key: str) -> None:
100 """Enable existing Task."""
101 enabling_query = Task.query.filter(Task.key == task_key)
102 updated = enabling_query.update({'disabled': False}, synchronize_session='fetch')
103 if not updated:
104 raise InternalErrorException(f'Task "{task_key}" was not enabled due to unknown database error.')
105
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/backend/medtagger/repositories/tasks.py b/backend/medtagger/repositories/tasks.py
--- a/backend/medtagger/repositories/tasks.py
+++ b/backend/medtagger/repositories/tasks.py
@@ -21,7 +21,7 @@
:return: Task object
"""
with db_session() as session:
- task = session.query(Task).filter(Task.key == key).one()
+ task = session.query(Task).filter(Task.key == key).first()
return task
| {"golden_diff": "diff --git a/backend/medtagger/repositories/tasks.py b/backend/medtagger/repositories/tasks.py\n--- a/backend/medtagger/repositories/tasks.py\n+++ b/backend/medtagger/repositories/tasks.py\n@@ -21,7 +21,7 @@\n :return: Task object\n \"\"\"\n with db_session() as session:\n- task = session.query(Task).filter(Task.key == key).one()\n+ task = session.query(Task).filter(Task.key == key).first()\n return task\n", "issue": "Getting random scan for non-existing task key results in 500\n## Current Behavior\r\n\r\nProviding non existing task key results in 500 HTTP code.\r\n\r\n## Expected Behavior\r\n\r\nBackend should handle this situation appropriate and return 404 HTTP code.\r\n\r\n## Steps to Reproduce the Problem\r\n\r\n 1. Perform a GET `scans/random?task=<task_key>` and provide non existing key.\r\n\n", "before_files": [{"content": "\"\"\"Module responsible for definition of TaskRepository.\"\"\"\nfrom typing import List\n\nfrom medtagger.database import db_session\nfrom medtagger.database.models import Task, LabelTag, Dataset\nfrom medtagger.exceptions import InternalErrorException\n\n\ndef get_all_tasks(include_disabled: bool = False) -> List[Task]:\n \"\"\"Fetch all tasks from database ordered by key.\"\"\"\n query = Task.query\n if not include_disabled:\n query = query.filter(~Task.disabled)\n return query.order_by(Task.key).all()\n\n\ndef get_task_by_key(key: str) -> Task:\n \"\"\"Fetch Task from database.\n\n :param key: key for a Task\n :return: Task object\n \"\"\"\n with db_session() as session:\n task = session.query(Task).filter(Task.key == key).one()\n return task\n\n\ndef add_task(key: str, name: str, image_path: str, datasets_keys: List[str], tags: List[LabelTag]) -> Task:\n \"\"\"Add new Task to the database.\n\n :param key: key that will identify such Task\n :param name: name that will be used in the Use Interface for such Task\n :param image_path: path to the image that represents such Task (used in User Interface)\n :param datasets_keys: Keys of Datasets that Task takes Scans from\n :param tags: Label Tags that will be created and assigned to Task\n :return: Task object\n \"\"\"\n with db_session() as session:\n task = Task(key, name, image_path)\n datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore\n task.datasets = datasets\n task.available_tags = tags\n session.add(task)\n return task\n\n\ndef assign_label_tag(tag: LabelTag, task_key: str) -> None:\n \"\"\"Assign existing Label Tag to Task.\n\n :param tag: tag that should be assigned to Task\n :param task_key: key that will identify such Task\n \"\"\"\n with db_session():\n task = Task.query.filter(Task.key == task_key).one()\n task.available_tags.append(tag)\n task.save()\n\n\ndef unassign_label_tag(tag: LabelTag, task_key: str) -> None:\n \"\"\"Unassign Label Tag from Task.\n\n :param tag: tag that should be unassigned from Task\n :param task_key: key that will identify such Task\n \"\"\"\n with db_session():\n task = Task.query.filter(Task.key == task_key).one()\n task.available_tags.remove(tag)\n task.save()\n\n\ndef update(task_key: str, name: str = None, image_path: str = None, datasets_keys: List[str] = None) -> Task:\n \"\"\"Update Datasets where this Task will be available.\n\n :param task_key: key that will identify such Task\n :param name: (optional) new name for such Task\n :param image_path: (optional) new path to the image which shows on the UI\n :param datasets_keys: (optional) keys of Datasets which should have this Task\n \"\"\"\n with db_session():\n task = Task.query.filter(Task.key == task_key).one()\n if name:\n task.name = name\n if image_path:\n task.image_path = image_path\n if datasets_keys:\n datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore\n task.datasets = datasets\n return task\n\n\ndef disable(task_key: str) -> None:\n \"\"\"Disable existing Task.\"\"\"\n disabling_query = Task.query.filter(Task.key == task_key)\n updated = disabling_query.update({'disabled': True}, synchronize_session='fetch')\n if not updated:\n raise InternalErrorException(f'Task \"{task_key}\" was not disabled due to unknown database error.')\n\n\ndef enable(task_key: str) -> None:\n \"\"\"Enable existing Task.\"\"\"\n enabling_query = Task.query.filter(Task.key == task_key)\n updated = enabling_query.update({'disabled': False}, synchronize_session='fetch')\n if not updated:\n raise InternalErrorException(f'Task \"{task_key}\" was not enabled due to unknown database error.')\n", "path": "backend/medtagger/repositories/tasks.py"}], "after_files": [{"content": "\"\"\"Module responsible for definition of TaskRepository.\"\"\"\nfrom typing import List\n\nfrom medtagger.database import db_session\nfrom medtagger.database.models import Task, LabelTag, Dataset\nfrom medtagger.exceptions import InternalErrorException\n\n\ndef get_all_tasks(include_disabled: bool = False) -> List[Task]:\n \"\"\"Fetch all tasks from database ordered by key.\"\"\"\n query = Task.query\n if not include_disabled:\n query = query.filter(~Task.disabled)\n return query.order_by(Task.key).all()\n\n\ndef get_task_by_key(key: str) -> Task:\n \"\"\"Fetch Task from database.\n\n :param key: key for a Task\n :return: Task object\n \"\"\"\n with db_session() as session:\n task = session.query(Task).filter(Task.key == key).first()\n return task\n\n\ndef add_task(key: str, name: str, image_path: str, datasets_keys: List[str], tags: List[LabelTag]) -> Task:\n \"\"\"Add new Task to the database.\n\n :param key: key that will identify such Task\n :param name: name that will be used in the Use Interface for such Task\n :param image_path: path to the image that represents such Task (used in User Interface)\n :param datasets_keys: Keys of Datasets that Task takes Scans from\n :param tags: Label Tags that will be created and assigned to Task\n :return: Task object\n \"\"\"\n with db_session() as session:\n task = Task(key, name, image_path)\n datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore\n task.datasets = datasets\n task.available_tags = tags\n session.add(task)\n return task\n\n\ndef assign_label_tag(tag: LabelTag, task_key: str) -> None:\n \"\"\"Assign existing Label Tag to Task.\n\n :param tag: tag that should be assigned to Task\n :param task_key: key that will identify such Task\n \"\"\"\n with db_session():\n task = Task.query.filter(Task.key == task_key).one()\n task.available_tags.append(tag)\n task.save()\n\n\ndef unassign_label_tag(tag: LabelTag, task_key: str) -> None:\n \"\"\"Unassign Label Tag from Task.\n\n :param tag: tag that should be unassigned from Task\n :param task_key: key that will identify such Task\n \"\"\"\n with db_session():\n task = Task.query.filter(Task.key == task_key).one()\n task.available_tags.remove(tag)\n task.save()\n\n\ndef update(task_key: str, name: str = None, image_path: str = None, datasets_keys: List[str] = None) -> Task:\n \"\"\"Update Datasets where this Task will be available.\n\n :param task_key: key that will identify such Task\n :param name: (optional) new name for such Task\n :param image_path: (optional) new path to the image which shows on the UI\n :param datasets_keys: (optional) keys of Datasets which should have this Task\n \"\"\"\n with db_session():\n task = Task.query.filter(Task.key == task_key).one()\n if name:\n task.name = name\n if image_path:\n task.image_path = image_path\n if datasets_keys:\n datasets = Dataset.query.filter(Dataset.key.in_(datasets_keys)).all() # type: ignore\n task.datasets = datasets\n return task\n\n\ndef disable(task_key: str) -> None:\n \"\"\"Disable existing Task.\"\"\"\n disabling_query = Task.query.filter(Task.key == task_key)\n updated = disabling_query.update({'disabled': True}, synchronize_session='fetch')\n if not updated:\n raise InternalErrorException(f'Task \"{task_key}\" was not disabled due to unknown database error.')\n\n\ndef enable(task_key: str) -> None:\n \"\"\"Enable existing Task.\"\"\"\n enabling_query = Task.query.filter(Task.key == task_key)\n updated = enabling_query.update({'disabled': False}, synchronize_session='fetch')\n if not updated:\n raise InternalErrorException(f'Task \"{task_key}\" was not enabled due to unknown database error.')\n", "path": "backend/medtagger/repositories/tasks.py"}]} | 1,444 | 113 |
gh_patches_debug_471 | rasdani/github-patches | git_diff | pytorch__rl-402 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] Loggers registration
## Describe the bug
The [`__init__.py`](https://github.com/facebookresearch/rl/blob/main/torchrl/trainers/loggers/__init__.py) from loggers does not reference the implemented loggers. However, the [docstring from the trainer](https://github.com/facebookresearch/rl/blob/806733f27dfa9a878b75b079de9f18df83f54c2d/torchrl/trainers/helpers/trainers.py#L109) assumes that this kind of import can be run.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torchrl/trainers/loggers/__init__.py`
Content:
```
1 # Copyright (c) Meta Platforms, Inc. and affiliates.
2 #
3 # This source code is licensed under the MIT license found in the
4 # LICENSE file in the root directory of this source tree.
5
6 from .common import Logger
7
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/torchrl/trainers/loggers/__init__.py b/torchrl/trainers/loggers/__init__.py
--- a/torchrl/trainers/loggers/__init__.py
+++ b/torchrl/trainers/loggers/__init__.py
@@ -4,3 +4,6 @@
# LICENSE file in the root directory of this source tree.
from .common import Logger
+from .csv import CSVLogger
+from .tensorboard import TensorboardLogger
+from .wandb import WandbLogger
| {"golden_diff": "diff --git a/torchrl/trainers/loggers/__init__.py b/torchrl/trainers/loggers/__init__.py\n--- a/torchrl/trainers/loggers/__init__.py\n+++ b/torchrl/trainers/loggers/__init__.py\n@@ -4,3 +4,6 @@\n # LICENSE file in the root directory of this source tree.\n \n from .common import Logger\n+from .csv import CSVLogger\n+from .tensorboard import TensorboardLogger\n+from .wandb import WandbLogger\n", "issue": "[BUG] Loggers registration\n## Describe the bug\r\n\r\nThe [`__init__.py`](https://github.com/facebookresearch/rl/blob/main/torchrl/trainers/loggers/__init__.py) from loggers does not reference the implemented loggers. However, the [docstring from the trainer](https://github.com/facebookresearch/rl/blob/806733f27dfa9a878b75b079de9f18df83f54c2d/torchrl/trainers/helpers/trainers.py#L109) assumes that this kind of import can be run.\n", "before_files": [{"content": "# Copyright (c) Meta Platforms, Inc. and affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nfrom .common import Logger\n", "path": "torchrl/trainers/loggers/__init__.py"}], "after_files": [{"content": "# Copyright (c) Meta Platforms, Inc. and affiliates.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n\nfrom .common import Logger\nfrom .csv import CSVLogger\nfrom .tensorboard import TensorboardLogger\nfrom .wandb import WandbLogger\n", "path": "torchrl/trainers/loggers/__init__.py"}]} | 451 | 113 |
gh_patches_debug_16425 | rasdani/github-patches | git_diff | pantsbuild__pants-15979 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`./pants run` crashes when `remote_cache_eager_fetch = false`
**Describe the bug**
After setting `remote_cache_eager_fetch = false`, a CI step that `./pants run`s a `pex_binary` has started (intermittently) failing with errors like:
```
Exception: Was not present in either the local or remote store: Digest { hash: Fingerprint<46683dec8706b7ac7c4f6011f68b4b8c10ad423ae8ba57745a6f5e01ba5b2f7b>, size_bytes: 11827 }
```
**Pants version**
`PANTS_SHA=5d8a328d72209863986c8959b20305505bc068ba`
**OS**
Linux
**Additional info**
Some BuildSense links where we've seen the failure:
* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_29_46_827_37a3a5f2d8e440cca373a7fb4a5d3b51/
* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_31_09_277_3793f53b54914135885f2ac951faf210/
* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_29_46_827_37a3a5f2d8e440cca373a7fb4a5d3b51/
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/python/pants/base/exceptions.py`
Content:
```
1 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 from __future__ import annotations
5
6
7 class TargetDefinitionException(Exception):
8 """Indicates an invalid target definition.
9
10 :API: public
11 """
12
13 def __init__(self, target, msg):
14 """
15 :param target: the target in question
16 :param string msg: a description of the target misconfiguration
17 """
18 super().__init__(f"Invalid target {target}: {msg}")
19
20
21 class BuildConfigurationError(Exception):
22 """Indicates an error in a pants installation's configuration."""
23
24
25 class BackendConfigurationError(BuildConfigurationError):
26 """Indicates a plugin backend with a missing or malformed register module."""
27
28
29 class MappingError(Exception):
30 """Indicates an error mapping addressable objects."""
31
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/python/pants/base/exceptions.py b/src/python/pants/base/exceptions.py
--- a/src/python/pants/base/exceptions.py
+++ b/src/python/pants/base/exceptions.py
@@ -3,6 +3,11 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from pants.engine.internals.native_engine import PyFailure
+
class TargetDefinitionException(Exception):
"""Indicates an invalid target definition.
@@ -28,3 +33,15 @@
class MappingError(Exception):
"""Indicates an error mapping addressable objects."""
+
+
+class NativeEngineFailure(Exception):
+ """A wrapper around a `Failure` instance.
+
+ TODO: This type is defined in Python because pyo3 doesn't support declaring Exceptions with
+ additional fields. See https://github.com/PyO3/pyo3/issues/295
+ """
+
+ def __init__(self, msg: str, failure: PyFailure) -> None:
+ super().__init__(msg)
+ self.failure = failure
| {"golden_diff": "diff --git a/src/python/pants/base/exceptions.py b/src/python/pants/base/exceptions.py\n--- a/src/python/pants/base/exceptions.py\n+++ b/src/python/pants/base/exceptions.py\n@@ -3,6 +3,11 @@\n \n from __future__ import annotations\n \n+from typing import TYPE_CHECKING\n+\n+if TYPE_CHECKING:\n+ from pants.engine.internals.native_engine import PyFailure\n+\n \n class TargetDefinitionException(Exception):\n \"\"\"Indicates an invalid target definition.\n@@ -28,3 +33,15 @@\n \n class MappingError(Exception):\n \"\"\"Indicates an error mapping addressable objects.\"\"\"\n+\n+\n+class NativeEngineFailure(Exception):\n+ \"\"\"A wrapper around a `Failure` instance.\n+\n+ TODO: This type is defined in Python because pyo3 doesn't support declaring Exceptions with\n+ additional fields. See https://github.com/PyO3/pyo3/issues/295\n+ \"\"\"\n+\n+ def __init__(self, msg: str, failure: PyFailure) -> None:\n+ super().__init__(msg)\n+ self.failure = failure\n", "issue": "`./pants run` crashes when `remote_cache_eager_fetch = false`\n**Describe the bug**\r\n\r\nAfter setting `remote_cache_eager_fetch = false`, a CI step that `./pants run`s a `pex_binary` has started (intermittently) failing with errors like:\r\n```\r\n Exception: Was not present in either the local or remote store: Digest { hash: Fingerprint<46683dec8706b7ac7c4f6011f68b4b8c10ad423ae8ba57745a6f5e01ba5b2f7b>, size_bytes: 11827 }\r\n```\r\n\r\n**Pants version**\r\n\r\n`PANTS_SHA=5d8a328d72209863986c8959b20305505bc068ba`\r\n\r\n**OS**\r\n\r\nLinux\r\n\r\n**Additional info**\r\n\r\nSome BuildSense links where we've seen the failure:\r\n* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_29_46_827_37a3a5f2d8e440cca373a7fb4a5d3b51/\r\n* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_31_09_277_3793f53b54914135885f2ac951faf210/\r\n* https://app.toolchain.com/organizations/color/repos/color/builds/pants_run_2022_06_27_19_29_46_827_37a3a5f2d8e440cca373a7fb4a5d3b51/\r\n\n", "before_files": [{"content": "# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\nfrom __future__ import annotations\n\n\nclass TargetDefinitionException(Exception):\n \"\"\"Indicates an invalid target definition.\n\n :API: public\n \"\"\"\n\n def __init__(self, target, msg):\n \"\"\"\n :param target: the target in question\n :param string msg: a description of the target misconfiguration\n \"\"\"\n super().__init__(f\"Invalid target {target}: {msg}\")\n\n\nclass BuildConfigurationError(Exception):\n \"\"\"Indicates an error in a pants installation's configuration.\"\"\"\n\n\nclass BackendConfigurationError(BuildConfigurationError):\n \"\"\"Indicates a plugin backend with a missing or malformed register module.\"\"\"\n\n\nclass MappingError(Exception):\n \"\"\"Indicates an error mapping addressable objects.\"\"\"\n", "path": "src/python/pants/base/exceptions.py"}], "after_files": [{"content": "# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\nfrom __future__ import annotations\n\nfrom typing import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from pants.engine.internals.native_engine import PyFailure\n\n\nclass TargetDefinitionException(Exception):\n \"\"\"Indicates an invalid target definition.\n\n :API: public\n \"\"\"\n\n def __init__(self, target, msg):\n \"\"\"\n :param target: the target in question\n :param string msg: a description of the target misconfiguration\n \"\"\"\n super().__init__(f\"Invalid target {target}: {msg}\")\n\n\nclass BuildConfigurationError(Exception):\n \"\"\"Indicates an error in a pants installation's configuration.\"\"\"\n\n\nclass BackendConfigurationError(BuildConfigurationError):\n \"\"\"Indicates a plugin backend with a missing or malformed register module.\"\"\"\n\n\nclass MappingError(Exception):\n \"\"\"Indicates an error mapping addressable objects.\"\"\"\n\n\nclass NativeEngineFailure(Exception):\n \"\"\"A wrapper around a `Failure` instance.\n\n TODO: This type is defined in Python because pyo3 doesn't support declaring Exceptions with\n additional fields. See https://github.com/PyO3/pyo3/issues/295\n \"\"\"\n\n def __init__(self, msg: str, failure: PyFailure) -> None:\n super().__init__(msg)\n self.failure = failure\n", "path": "src/python/pants/base/exceptions.py"}]} | 942 | 243 |
gh_patches_debug_2986 | rasdani/github-patches | git_diff | coala__coala-3908 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fail to install and py.test on docker environment.
<!-- Hello! If you're filing a bug, please include every step so as to help us reproduce it on our machines. If you're unsure about how to file an issue, use the issue template. If you need any help regarding usage of coala, check out the documentation or hit us up on chat. You can ignore or delete this text, it is commented and won't appear when the issue is submitted or previewed.
Chat: https://coala.io/chat
Issue Template: https://github.com/coala/coala/blob/master/CONTRIBUTING.rst#filing-issues
Documentation: https://docs.coala.io
-->
When I try to install by `python setup.py install`, it is failed with this message.
`UnicodeEncodeError: 'ascii' codec can't encode character '\xfc' in position 15224: ordinal not in range(128)`
Also, the same happening when I try to run unit test on local.
It needs to be fixed.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python3
2
3 import datetime
4 import locale
5 import platform
6 import sys
7 from os import getenv
8 from subprocess import call
9
10 import setuptools.command.build_py
11 from setuptools import find_packages, setup
12 from setuptools.command.test import test as TestCommand
13
14 from coalib import VERSION, assert_supported_version, get_version
15 from coalib.misc.BuildManPage import BuildManPage
16
17 try:
18 locale.getlocale()
19 except (ValueError, UnicodeError):
20 locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
21
22
23 assert_supported_version()
24
25
26 class BuildPyCommand(setuptools.command.build_py.build_py):
27
28 def run(self):
29 if platform.system() != 'Windows':
30 self.run_command('build_manpage')
31 setuptools.command.build_py.build_py.run(self)
32
33
34 class PyTestCommand(TestCommand):
35
36 def run_tests(self):
37 # import here, cause outside the eggs aren't loaded
38 import pytest
39 errno = pytest.main([])
40 sys.exit(errno)
41
42
43 class BuildDocsCommand(setuptools.command.build_py.build_py):
44 apidoc_command = (
45 'sphinx-apidoc', '-f', '-o', 'docs', '--no-toc', 'coalib'
46 )
47 doc_command = ('make', '-C', 'docs', 'html', 'SPHINXOPTS=-W')
48
49 def run(self):
50 errOne = call(self.apidoc_command)
51 errTwo = call(self.doc_command)
52 sys.exit(errOne or errTwo)
53
54
55 # Generate API documentation only if we are running on readthedocs.io
56 on_rtd = getenv('READTHEDOCS', None) is not None
57 if on_rtd:
58 call(BuildDocsCommand.apidoc_command)
59 if 'dev' in VERSION:
60 current_version = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
61 call(['python3', '.misc/adjust_version_number.py', 'coalib/VERSION',
62 '-b {}'.format(current_version)])
63 VERSION = get_version()
64
65 with open('requirements.txt') as requirements:
66 required = requirements.read().splitlines()
67
68 with open('test-requirements.txt') as requirements:
69 test_required = requirements.read().splitlines()
70
71 with open('README.rst') as readme:
72 long_description = readme.read()
73
74
75 if __name__ == '__main__':
76 if platform.system() != 'Windows':
77 data_files = [('.', ['coala.1'])]
78 else:
79 data_files = [('.', [])]
80
81 setup(name='coala',
82 version=VERSION,
83 description='Linting and Fixing Code for All Languages',
84 author='The coala developers',
85 author_email='[email protected]',
86 maintainer='Lasse Schuirmann, Fabian Neuschmidt, Mischa Kr\xfcger'
87 if not on_rtd else 'L.S., F.N., M.K.',
88 maintainer_email=('[email protected], '
89 '[email protected], '
90 '[email protected]'),
91 url='http://coala.io/',
92 platforms='any',
93 packages=find_packages(exclude=['build.*', 'tests', 'tests.*']),
94 install_requires=required,
95 tests_require=test_required,
96 package_data={'coalib': ['default_coafile', 'VERSION',
97 'bearlib/languages/documentation/*.coalang']
98 },
99 license='AGPL-3.0',
100 data_files=data_files,
101 long_description=long_description,
102 entry_points={
103 'console_scripts': [
104 'coala = coalib.coala:main',
105 'coala-ci = coalib.coala_ci:main',
106 'coala-json = coalib.coala_json:main',
107 'coala-format = coalib.coala_format:main',
108 'coala-delete-orig = coalib.coala_delete_orig:main']},
109 # from http://pypi.python.org/pypi?%3Aaction=list_classifiers
110 classifiers=[
111 'Development Status :: 4 - Beta',
112
113 'Environment :: Console',
114 'Environment :: MacOS X',
115 'Environment :: Win32 (MS Windows)',
116 'Environment :: X11 Applications :: Gnome',
117
118 'Intended Audience :: Science/Research',
119 'Intended Audience :: Developers',
120
121 'License :: OSI Approved :: GNU Affero General Public License '
122 'v3 or later (AGPLv3+)',
123
124 'Operating System :: OS Independent',
125
126 'Programming Language :: Python :: Implementation :: CPython',
127 'Programming Language :: Python :: 3.4',
128 'Programming Language :: Python :: 3.5',
129 'Programming Language :: Python :: 3 :: Only',
130
131 'Topic :: Scientific/Engineering :: Information Analysis',
132 'Topic :: Software Development :: Quality Assurance',
133 'Topic :: Text Processing :: Linguistic'],
134 cmdclass={'build_manpage': BuildManPage,
135 'build_py': BuildPyCommand,
136 'docs': BuildDocsCommand,
137 'test': PyTestCommand})
138
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,10 @@
from coalib.misc.BuildManPage import BuildManPage
try:
- locale.getlocale()
+ lc = locale.getlocale()
+ pf = platform.system()
+ if pf != 'Windows' and lc == (None, None):
+ locale.setlocale(locale.LC_ALL, 'C.UTF-8')
except (ValueError, UnicodeError):
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -15,7 +15,10 @@\n from coalib.misc.BuildManPage import BuildManPage\n \n try:\n- locale.getlocale()\n+ lc = locale.getlocale()\n+ pf = platform.system()\n+ if pf != 'Windows' and lc == (None, None):\n+ locale.setlocale(locale.LC_ALL, 'C.UTF-8')\n except (ValueError, UnicodeError):\n locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')\n", "issue": "Fail to install and py.test on docker environment.\n<!-- Hello! If you're filing a bug, please include every step so as to help us reproduce it on our machines. If you're unsure about how to file an issue, use the issue template. If you need any help regarding usage of coala, check out the documentation or hit us up on chat. You can ignore or delete this text, it is commented and won't appear when the issue is submitted or previewed.\r\n\r\nChat: https://coala.io/chat\r\nIssue Template: https://github.com/coala/coala/blob/master/CONTRIBUTING.rst#filing-issues\r\nDocumentation: https://docs.coala.io\r\n-->\r\nWhen I try to install by `python setup.py install`, it is failed with this message.\r\n`UnicodeEncodeError: 'ascii' codec can't encode character '\\xfc' in position 15224: ordinal not in range(128)`\r\nAlso, the same happening when I try to run unit test on local.\r\nIt needs to be fixed.\r\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n\nimport datetime\nimport locale\nimport platform\nimport sys\nfrom os import getenv\nfrom subprocess import call\n\nimport setuptools.command.build_py\nfrom setuptools import find_packages, setup\nfrom setuptools.command.test import test as TestCommand\n\nfrom coalib import VERSION, assert_supported_version, get_version\nfrom coalib.misc.BuildManPage import BuildManPage\n\ntry:\n locale.getlocale()\nexcept (ValueError, UnicodeError):\n locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')\n\n\nassert_supported_version()\n\n\nclass BuildPyCommand(setuptools.command.build_py.build_py):\n\n def run(self):\n if platform.system() != 'Windows':\n self.run_command('build_manpage')\n setuptools.command.build_py.build_py.run(self)\n\n\nclass PyTestCommand(TestCommand):\n\n def run_tests(self):\n # import here, cause outside the eggs aren't loaded\n import pytest\n errno = pytest.main([])\n sys.exit(errno)\n\n\nclass BuildDocsCommand(setuptools.command.build_py.build_py):\n apidoc_command = (\n 'sphinx-apidoc', '-f', '-o', 'docs', '--no-toc', 'coalib'\n )\n doc_command = ('make', '-C', 'docs', 'html', 'SPHINXOPTS=-W')\n\n def run(self):\n errOne = call(self.apidoc_command)\n errTwo = call(self.doc_command)\n sys.exit(errOne or errTwo)\n\n\n# Generate API documentation only if we are running on readthedocs.io\non_rtd = getenv('READTHEDOCS', None) is not None\nif on_rtd:\n call(BuildDocsCommand.apidoc_command)\n if 'dev' in VERSION:\n current_version = datetime.datetime.now().strftime('%Y%m%d%H%M%S')\n call(['python3', '.misc/adjust_version_number.py', 'coalib/VERSION',\n '-b {}'.format(current_version)])\n VERSION = get_version()\n\nwith open('requirements.txt') as requirements:\n required = requirements.read().splitlines()\n\nwith open('test-requirements.txt') as requirements:\n test_required = requirements.read().splitlines()\n\nwith open('README.rst') as readme:\n long_description = readme.read()\n\n\nif __name__ == '__main__':\n if platform.system() != 'Windows':\n data_files = [('.', ['coala.1'])]\n else:\n data_files = [('.', [])]\n\n setup(name='coala',\n version=VERSION,\n description='Linting and Fixing Code for All Languages',\n author='The coala developers',\n author_email='[email protected]',\n maintainer='Lasse Schuirmann, Fabian Neuschmidt, Mischa Kr\\xfcger'\n if not on_rtd else 'L.S., F.N., M.K.',\n maintainer_email=('[email protected], '\n '[email protected], '\n '[email protected]'),\n url='http://coala.io/',\n platforms='any',\n packages=find_packages(exclude=['build.*', 'tests', 'tests.*']),\n install_requires=required,\n tests_require=test_required,\n package_data={'coalib': ['default_coafile', 'VERSION',\n 'bearlib/languages/documentation/*.coalang']\n },\n license='AGPL-3.0',\n data_files=data_files,\n long_description=long_description,\n entry_points={\n 'console_scripts': [\n 'coala = coalib.coala:main',\n 'coala-ci = coalib.coala_ci:main',\n 'coala-json = coalib.coala_json:main',\n 'coala-format = coalib.coala_format:main',\n 'coala-delete-orig = coalib.coala_delete_orig:main']},\n # from http://pypi.python.org/pypi?%3Aaction=list_classifiers\n classifiers=[\n 'Development Status :: 4 - Beta',\n\n 'Environment :: Console',\n 'Environment :: MacOS X',\n 'Environment :: Win32 (MS Windows)',\n 'Environment :: X11 Applications :: Gnome',\n\n 'Intended Audience :: Science/Research',\n 'Intended Audience :: Developers',\n\n 'License :: OSI Approved :: GNU Affero General Public License '\n 'v3 or later (AGPLv3+)',\n\n 'Operating System :: OS Independent',\n\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3 :: Only',\n\n 'Topic :: Scientific/Engineering :: Information Analysis',\n 'Topic :: Software Development :: Quality Assurance',\n 'Topic :: Text Processing :: Linguistic'],\n cmdclass={'build_manpage': BuildManPage,\n 'build_py': BuildPyCommand,\n 'docs': BuildDocsCommand,\n 'test': PyTestCommand})\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\nimport datetime\nimport locale\nimport platform\nimport sys\nfrom os import getenv\nfrom subprocess import call\n\nimport setuptools.command.build_py\nfrom setuptools import find_packages, setup\nfrom setuptools.command.test import test as TestCommand\n\nfrom coalib import VERSION, assert_supported_version, get_version\nfrom coalib.misc.BuildManPage import BuildManPage\n\ntry:\n lc = locale.getlocale()\n pf = platform.system()\n if pf != 'Windows' and lc == (None, None):\n locale.setlocale(locale.LC_ALL, 'C.UTF-8')\nexcept (ValueError, UnicodeError):\n locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')\n\n\nassert_supported_version()\n\n\nclass BuildPyCommand(setuptools.command.build_py.build_py):\n\n def run(self):\n if platform.system() != 'Windows':\n self.run_command('build_manpage')\n setuptools.command.build_py.build_py.run(self)\n\n\nclass PyTestCommand(TestCommand):\n\n def run_tests(self):\n # import here, cause outside the eggs aren't loaded\n import pytest\n errno = pytest.main([])\n sys.exit(errno)\n\n\nclass BuildDocsCommand(setuptools.command.build_py.build_py):\n apidoc_command = (\n 'sphinx-apidoc', '-f', '-o', 'docs', '--no-toc', 'coalib'\n )\n doc_command = ('make', '-C', 'docs', 'html', 'SPHINXOPTS=-W')\n\n def run(self):\n errOne = call(self.apidoc_command)\n errTwo = call(self.doc_command)\n sys.exit(errOne or errTwo)\n\n\n# Generate API documentation only if we are running on readthedocs.io\non_rtd = getenv('READTHEDOCS', None) is not None\nif on_rtd:\n call(BuildDocsCommand.apidoc_command)\n if 'dev' in VERSION:\n current_version = datetime.datetime.now().strftime('%Y%m%d%H%M%S')\n call(['python3', '.misc/adjust_version_number.py', 'coalib/VERSION',\n '-b {}'.format(current_version)])\n VERSION = get_version()\n\nwith open('requirements.txt') as requirements:\n required = requirements.read().splitlines()\n\nwith open('test-requirements.txt') as requirements:\n test_required = requirements.read().splitlines()\n\nwith open('README.rst') as readme:\n long_description = readme.read()\n\n\nif __name__ == '__main__':\n if platform.system() != 'Windows':\n data_files = [('.', ['coala.1'])]\n else:\n data_files = [('.', [])]\n\n setup(name='coala',\n version=VERSION,\n description='Linting and Fixing Code for All Languages',\n author='The coala developers',\n author_email='[email protected]',\n maintainer='Lasse Schuirmann, Fabian Neuschmidt, Mischa Kr\\xfcger'\n if not on_rtd else 'L.S., F.N., M.K.',\n maintainer_email=('[email protected], '\n '[email protected], '\n '[email protected]'),\n url='http://coala.io/',\n platforms='any',\n packages=find_packages(exclude=['build.*', 'tests', 'tests.*']),\n install_requires=required,\n tests_require=test_required,\n package_data={'coalib': ['default_coafile', 'VERSION',\n 'bearlib/languages/documentation/*.coalang']\n },\n license='AGPL-3.0',\n data_files=data_files,\n long_description=long_description,\n entry_points={\n 'console_scripts': [\n 'coala = coalib.coala:main',\n 'coala-ci = coalib.coala_ci:main',\n 'coala-json = coalib.coala_json:main',\n 'coala-format = coalib.coala_format:main',\n 'coala-delete-orig = coalib.coala_delete_orig:main']},\n # from http://pypi.python.org/pypi?%3Aaction=list_classifiers\n classifiers=[\n 'Development Status :: 4 - Beta',\n\n 'Environment :: Console',\n 'Environment :: MacOS X',\n 'Environment :: Win32 (MS Windows)',\n 'Environment :: X11 Applications :: Gnome',\n\n 'Intended Audience :: Science/Research',\n 'Intended Audience :: Developers',\n\n 'License :: OSI Approved :: GNU Affero General Public License '\n 'v3 or later (AGPLv3+)',\n\n 'Operating System :: OS Independent',\n\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3 :: Only',\n\n 'Topic :: Scientific/Engineering :: Information Analysis',\n 'Topic :: Software Development :: Quality Assurance',\n 'Topic :: Text Processing :: Linguistic'],\n cmdclass={'build_manpage': BuildManPage,\n 'build_py': BuildPyCommand,\n 'docs': BuildDocsCommand,\n 'test': PyTestCommand})\n", "path": "setup.py"}]} | 1,853 | 124 |
gh_patches_debug_20763 | rasdani/github-patches | git_diff | nv-legate__cunumeric-278 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add some missing array methods
A few numpy array methods were discovered to be missing:
```
"loads",
"mafromtxt",
"matmul",
"ndfromtxt",
```
These should be copied over as necessary, and the comparison blocklist updated.
ref: https://github.com/nv-legate/cunumeric/pull/224#discussion_r830657282
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/cunumeric/source/comparison/_comparison_generator.py`
Content:
```
1 import importlib
2
3 import numpy
4
5 blocklist = [
6 "abs",
7 "add_docstring",
8 "add_newdoc",
9 "add_newdoc_ufunc",
10 "alen",
11 "alltrue",
12 "bitwise_not",
13 "compare_chararrays",
14 "cumproduct",
15 "fastCopyAndTranspose",
16 "get_array_wrap",
17 "iterable",
18 "loads",
19 "mafromtxt",
20 "max",
21 "min",
22 "ndfromtxt",
23 "ndim",
24 "product",
25 "recfromcsv",
26 "recfromtxt",
27 "round",
28 "safe_eval",
29 "set_numeric_ops",
30 "size",
31 "sometrue",
32 "test",
33 ]
34
35
36 def check_ufunc(obj, n):
37 try:
38 return isinstance(getattr(obj, n), numpy.ufunc)
39 except: # noqa E722
40 return False
41
42
43 def _filter(obj, n, ufuncs=False):
44 is_ufunc = check_ufunc(obj, n)
45 if not ufuncs:
46 is_ufunc = not is_ufunc
47
48 try:
49 return (
50 n not in blocklist
51 and callable(getattr(obj, n)) # callable
52 and not isinstance(getattr(obj, n), type) # not class
53 and n[0].islower() # starts with lower char
54 and not n.startswith("__") # not special methods
55 and is_ufunc
56 )
57 except: # noqa: E722
58 return False
59
60
61 def _get_functions(obj, ufuncs=False):
62 return set([n for n in dir(obj) if (_filter(obj, n, ufuncs))])
63
64
65 def _import(mod, klass):
66 try:
67 obj = importlib.import_module(mod)
68 except ModuleNotFoundError:
69 return None, None
70
71 if klass:
72 obj = getattr(obj, klass)
73 return obj, ":meth:`{}.{}.{{}}`".format(mod, klass)
74 else:
75 # ufunc is not a function
76 return obj, ":obj:`{}.{{}}`".format(mod)
77
78
79 def _section(
80 header, mod_ext, other_lib, klass=None, exclude_mod=None, ufuncs=False
81 ):
82 base_mod = "numpy" + mod_ext
83 other_mod = other_lib + mod_ext
84
85 base_funcs = []
86 base_obj, base_fmt = _import(base_mod, klass)
87 base_funcs = _get_functions(base_obj, ufuncs)
88 lg_obj, lg_fmt = _import(other_mod, klass)
89
90 lg_funcs = []
91 for f in _get_functions(lg_obj):
92 obj = getattr(lg_obj, f)
93 if getattr(obj, "_cunumeric_implemented", False):
94 lg_funcs.append(f)
95 lg_funcs = set(lg_funcs)
96
97 if exclude_mod:
98 exclude_obj, _ = _import(exclude_mod, klass)
99 exclude_funcs = _get_functions(exclude_obj)
100 base_funcs -= exclude_funcs
101 lg_funcs -= exclude_funcs
102
103 buf = [
104 header,
105 "~" * len(header),
106 "",
107 ]
108
109 buf += [
110 ".. currentmodule:: cunumeric",
111 "",
112 ".. autosummary::",
113 " :toctree: generated/",
114 "",
115 ]
116
117 buf += [
118 ".. csv-table::",
119 " :header: NumPy, {}, single-GPU/CPU, multi-GPU/CPU".format(
120 other_mod
121 ),
122 "",
123 ]
124 for f in sorted(base_funcs):
125 base_cell = base_fmt.format(f)
126 lg_cell = r"\-"
127 single_gpu_cell = ""
128 multi_gpu_cell = ""
129 if f in lg_funcs:
130 lg_cell = lg_fmt.format(f)
131 obj = getattr(lg_obj, f)
132 if obj.__doc__ is not None and "Single GPU" in obj.__doc__:
133 multi_gpu_cell = "No"
134 single_gpu_cell = "Yes"
135 elif obj.__doc__ is not None and "Multiple GPUs" in obj.__doc__:
136 multi_gpu_cell = "Yes"
137 single_gpu_cell = "Yes"
138 if getattr(base_obj, f) is getattr(lg_obj, f):
139 lg_cell = "{} (*alias of* {})".format(lg_cell, base_cell)
140 line = " {}, {}, {}, {}".format(
141 base_cell, lg_cell, single_gpu_cell, multi_gpu_cell
142 )
143 buf.append(line)
144
145 buf += [
146 "",
147 ".. Summary:",
148 " Number of NumPy functions: {}".format(len(base_funcs)),
149 " Number of functions covered by "
150 f"{other_lib}: {len(lg_funcs & base_funcs)}",
151 ]
152 buf += [
153 "",
154 ]
155 return buf
156
157
158 def generate(other_lib):
159 buf = []
160 buf += [
161 "NumPy vs cuNumeric APIs",
162 "------------------------",
163 "",
164 ]
165 buf += _section("Module-Level", "", other_lib)
166 buf += _section("Ufuncs", "", other_lib, ufuncs=True)
167 buf += _section("Multi-Dimensional Array", "", other_lib, klass="ndarray")
168 buf += _section("Linear Algebra", ".linalg", other_lib)
169 buf += _section("Discrete Fourier Transform", ".fft", other_lib)
170 buf += _section("Random Sampling", ".random", other_lib)
171
172 return "\n".join(buf)
173
174
175 if __name__ == "__main__":
176 print(generate("cunumeric"))
177
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/cunumeric/source/comparison/_comparison_generator.py b/docs/cunumeric/source/comparison/_comparison_generator.py
--- a/docs/cunumeric/source/comparison/_comparison_generator.py
+++ b/docs/cunumeric/source/comparison/_comparison_generator.py
@@ -15,11 +15,8 @@
"fastCopyAndTranspose",
"get_array_wrap",
"iterable",
- "loads",
- "mafromtxt",
"max",
"min",
- "ndfromtxt",
"ndim",
"product",
"recfromcsv",
@@ -32,6 +29,13 @@
"test",
]
+# these do not have valid intersphinx references
+missing_numpy_refs = {
+ "loads",
+ "mafromtxt",
+ "ndfromtxt",
+}
+
def check_ufunc(obj, n):
try:
@@ -122,7 +126,10 @@
"",
]
for f in sorted(base_funcs):
- base_cell = base_fmt.format(f)
+ if f not in missing_numpy_refs:
+ base_cell = base_fmt.format(f)
+ else:
+ base_cell = f"``numpy.{f}``"
lg_cell = r"\-"
single_gpu_cell = ""
multi_gpu_cell = ""
| {"golden_diff": "diff --git a/docs/cunumeric/source/comparison/_comparison_generator.py b/docs/cunumeric/source/comparison/_comparison_generator.py\n--- a/docs/cunumeric/source/comparison/_comparison_generator.py\n+++ b/docs/cunumeric/source/comparison/_comparison_generator.py\n@@ -15,11 +15,8 @@\n \"fastCopyAndTranspose\",\n \"get_array_wrap\",\n \"iterable\",\n- \"loads\",\n- \"mafromtxt\",\n \"max\",\n \"min\",\n- \"ndfromtxt\",\n \"ndim\",\n \"product\",\n \"recfromcsv\",\n@@ -32,6 +29,13 @@\n \"test\",\n ]\n \n+# these do not have valid intersphinx references\n+missing_numpy_refs = {\n+ \"loads\",\n+ \"mafromtxt\",\n+ \"ndfromtxt\",\n+}\n+\n \n def check_ufunc(obj, n):\n try:\n@@ -122,7 +126,10 @@\n \"\",\n ]\n for f in sorted(base_funcs):\n- base_cell = base_fmt.format(f)\n+ if f not in missing_numpy_refs:\n+ base_cell = base_fmt.format(f)\n+ else:\n+ base_cell = f\"``numpy.{f}``\"\n lg_cell = r\"\\-\"\n single_gpu_cell = \"\"\n multi_gpu_cell = \"\"\n", "issue": "Add some missing array methods \nA few numpy array methods were discovered to be missing:\r\n```\r\n \"loads\",\r\n \"mafromtxt\",\r\n \"matmul\",\r\n \"ndfromtxt\",\r\n```\r\n\r\nThese should be copied over as necessary, and the comparison blocklist updated. \r\n\r\nref: https://github.com/nv-legate/cunumeric/pull/224#discussion_r830657282\r\n\n", "before_files": [{"content": "import importlib\n\nimport numpy\n\nblocklist = [\n \"abs\",\n \"add_docstring\",\n \"add_newdoc\",\n \"add_newdoc_ufunc\",\n \"alen\",\n \"alltrue\",\n \"bitwise_not\",\n \"compare_chararrays\",\n \"cumproduct\",\n \"fastCopyAndTranspose\",\n \"get_array_wrap\",\n \"iterable\",\n \"loads\",\n \"mafromtxt\",\n \"max\",\n \"min\",\n \"ndfromtxt\",\n \"ndim\",\n \"product\",\n \"recfromcsv\",\n \"recfromtxt\",\n \"round\",\n \"safe_eval\",\n \"set_numeric_ops\",\n \"size\",\n \"sometrue\",\n \"test\",\n]\n\n\ndef check_ufunc(obj, n):\n try:\n return isinstance(getattr(obj, n), numpy.ufunc)\n except: # noqa E722\n return False\n\n\ndef _filter(obj, n, ufuncs=False):\n is_ufunc = check_ufunc(obj, n)\n if not ufuncs:\n is_ufunc = not is_ufunc\n\n try:\n return (\n n not in blocklist\n and callable(getattr(obj, n)) # callable\n and not isinstance(getattr(obj, n), type) # not class\n and n[0].islower() # starts with lower char\n and not n.startswith(\"__\") # not special methods\n and is_ufunc\n )\n except: # noqa: E722\n return False\n\n\ndef _get_functions(obj, ufuncs=False):\n return set([n for n in dir(obj) if (_filter(obj, n, ufuncs))])\n\n\ndef _import(mod, klass):\n try:\n obj = importlib.import_module(mod)\n except ModuleNotFoundError:\n return None, None\n\n if klass:\n obj = getattr(obj, klass)\n return obj, \":meth:`{}.{}.{{}}`\".format(mod, klass)\n else:\n # ufunc is not a function\n return obj, \":obj:`{}.{{}}`\".format(mod)\n\n\ndef _section(\n header, mod_ext, other_lib, klass=None, exclude_mod=None, ufuncs=False\n):\n base_mod = \"numpy\" + mod_ext\n other_mod = other_lib + mod_ext\n\n base_funcs = []\n base_obj, base_fmt = _import(base_mod, klass)\n base_funcs = _get_functions(base_obj, ufuncs)\n lg_obj, lg_fmt = _import(other_mod, klass)\n\n lg_funcs = []\n for f in _get_functions(lg_obj):\n obj = getattr(lg_obj, f)\n if getattr(obj, \"_cunumeric_implemented\", False):\n lg_funcs.append(f)\n lg_funcs = set(lg_funcs)\n\n if exclude_mod:\n exclude_obj, _ = _import(exclude_mod, klass)\n exclude_funcs = _get_functions(exclude_obj)\n base_funcs -= exclude_funcs\n lg_funcs -= exclude_funcs\n\n buf = [\n header,\n \"~\" * len(header),\n \"\",\n ]\n\n buf += [\n \".. currentmodule:: cunumeric\",\n \"\",\n \".. autosummary::\",\n \" :toctree: generated/\",\n \"\",\n ]\n\n buf += [\n \".. csv-table::\",\n \" :header: NumPy, {}, single-GPU/CPU, multi-GPU/CPU\".format(\n other_mod\n ),\n \"\",\n ]\n for f in sorted(base_funcs):\n base_cell = base_fmt.format(f)\n lg_cell = r\"\\-\"\n single_gpu_cell = \"\"\n multi_gpu_cell = \"\"\n if f in lg_funcs:\n lg_cell = lg_fmt.format(f)\n obj = getattr(lg_obj, f)\n if obj.__doc__ is not None and \"Single GPU\" in obj.__doc__:\n multi_gpu_cell = \"No\"\n single_gpu_cell = \"Yes\"\n elif obj.__doc__ is not None and \"Multiple GPUs\" in obj.__doc__:\n multi_gpu_cell = \"Yes\"\n single_gpu_cell = \"Yes\"\n if getattr(base_obj, f) is getattr(lg_obj, f):\n lg_cell = \"{} (*alias of* {})\".format(lg_cell, base_cell)\n line = \" {}, {}, {}, {}\".format(\n base_cell, lg_cell, single_gpu_cell, multi_gpu_cell\n )\n buf.append(line)\n\n buf += [\n \"\",\n \".. Summary:\",\n \" Number of NumPy functions: {}\".format(len(base_funcs)),\n \" Number of functions covered by \"\n f\"{other_lib}: {len(lg_funcs & base_funcs)}\",\n ]\n buf += [\n \"\",\n ]\n return buf\n\n\ndef generate(other_lib):\n buf = []\n buf += [\n \"NumPy vs cuNumeric APIs\",\n \"------------------------\",\n \"\",\n ]\n buf += _section(\"Module-Level\", \"\", other_lib)\n buf += _section(\"Ufuncs\", \"\", other_lib, ufuncs=True)\n buf += _section(\"Multi-Dimensional Array\", \"\", other_lib, klass=\"ndarray\")\n buf += _section(\"Linear Algebra\", \".linalg\", other_lib)\n buf += _section(\"Discrete Fourier Transform\", \".fft\", other_lib)\n buf += _section(\"Random Sampling\", \".random\", other_lib)\n\n return \"\\n\".join(buf)\n\n\nif __name__ == \"__main__\":\n print(generate(\"cunumeric\"))\n", "path": "docs/cunumeric/source/comparison/_comparison_generator.py"}], "after_files": [{"content": "import importlib\n\nimport numpy\n\nblocklist = [\n \"abs\",\n \"add_docstring\",\n \"add_newdoc\",\n \"add_newdoc_ufunc\",\n \"alen\",\n \"alltrue\",\n \"bitwise_not\",\n \"compare_chararrays\",\n \"cumproduct\",\n \"fastCopyAndTranspose\",\n \"get_array_wrap\",\n \"iterable\",\n \"max\",\n \"min\",\n \"ndim\",\n \"product\",\n \"recfromcsv\",\n \"recfromtxt\",\n \"round\",\n \"safe_eval\",\n \"set_numeric_ops\",\n \"size\",\n \"sometrue\",\n \"test\",\n]\n\n# these do not have valid intersphinx references\nmissing_numpy_refs = {\n \"loads\",\n \"mafromtxt\",\n \"ndfromtxt\",\n}\n\n\ndef check_ufunc(obj, n):\n try:\n return isinstance(getattr(obj, n), numpy.ufunc)\n except: # noqa E722\n return False\n\n\ndef _filter(obj, n, ufuncs=False):\n is_ufunc = check_ufunc(obj, n)\n if not ufuncs:\n is_ufunc = not is_ufunc\n\n try:\n return (\n n not in blocklist\n and callable(getattr(obj, n)) # callable\n and not isinstance(getattr(obj, n), type) # not class\n and n[0].islower() # starts with lower char\n and not n.startswith(\"__\") # not special methods\n and is_ufunc\n )\n except: # noqa: E722\n return False\n\n\ndef _get_functions(obj, ufuncs=False):\n return set([n for n in dir(obj) if (_filter(obj, n, ufuncs))])\n\n\ndef _import(mod, klass):\n try:\n obj = importlib.import_module(mod)\n except ModuleNotFoundError:\n return None, None\n\n if klass:\n obj = getattr(obj, klass)\n return obj, \":meth:`{}.{}.{{}}`\".format(mod, klass)\n else:\n # ufunc is not a function\n return obj, \":obj:`{}.{{}}`\".format(mod)\n\n\ndef _section(\n header, mod_ext, other_lib, klass=None, exclude_mod=None, ufuncs=False\n):\n base_mod = \"numpy\" + mod_ext\n other_mod = other_lib + mod_ext\n\n base_funcs = []\n base_obj, base_fmt = _import(base_mod, klass)\n base_funcs = _get_functions(base_obj, ufuncs)\n lg_obj, lg_fmt = _import(other_mod, klass)\n\n lg_funcs = []\n for f in _get_functions(lg_obj):\n obj = getattr(lg_obj, f)\n if getattr(obj, \"_cunumeric_implemented\", False):\n lg_funcs.append(f)\n lg_funcs = set(lg_funcs)\n\n if exclude_mod:\n exclude_obj, _ = _import(exclude_mod, klass)\n exclude_funcs = _get_functions(exclude_obj)\n base_funcs -= exclude_funcs\n lg_funcs -= exclude_funcs\n\n buf = [\n header,\n \"~\" * len(header),\n \"\",\n ]\n\n buf += [\n \".. currentmodule:: cunumeric\",\n \"\",\n \".. autosummary::\",\n \" :toctree: generated/\",\n \"\",\n ]\n\n buf += [\n \".. csv-table::\",\n \" :header: NumPy, {}, single-GPU/CPU, multi-GPU/CPU\".format(\n other_mod\n ),\n \"\",\n ]\n for f in sorted(base_funcs):\n if f not in missing_numpy_refs:\n base_cell = base_fmt.format(f)\n else:\n base_cell = f\"``numpy.{f}``\"\n lg_cell = r\"\\-\"\n single_gpu_cell = \"\"\n multi_gpu_cell = \"\"\n if f in lg_funcs:\n lg_cell = lg_fmt.format(f)\n obj = getattr(lg_obj, f)\n if obj.__doc__ is not None and \"Single GPU\" in obj.__doc__:\n multi_gpu_cell = \"No\"\n single_gpu_cell = \"Yes\"\n elif obj.__doc__ is not None and \"Multiple GPUs\" in obj.__doc__:\n multi_gpu_cell = \"Yes\"\n single_gpu_cell = \"Yes\"\n if getattr(base_obj, f) is getattr(lg_obj, f):\n lg_cell = \"{} (*alias of* {})\".format(lg_cell, base_cell)\n line = \" {}, {}, {}, {}\".format(\n base_cell, lg_cell, single_gpu_cell, multi_gpu_cell\n )\n buf.append(line)\n\n buf += [\n \"\",\n \".. Summary:\",\n \" Number of NumPy functions: {}\".format(len(base_funcs)),\n \" Number of functions covered by \"\n f\"{other_lib}: {len(lg_funcs & base_funcs)}\",\n ]\n buf += [\n \"\",\n ]\n return buf\n\n\ndef generate(other_lib):\n buf = []\n buf += [\n \"NumPy vs cuNumeric APIs\",\n \"------------------------\",\n \"\",\n ]\n buf += _section(\"Module-Level\", \"\", other_lib)\n buf += _section(\"Ufuncs\", \"\", other_lib, ufuncs=True)\n buf += _section(\"Multi-Dimensional Array\", \"\", other_lib, klass=\"ndarray\")\n buf += _section(\"Linear Algebra\", \".linalg\", other_lib)\n buf += _section(\"Discrete Fourier Transform\", \".fft\", other_lib)\n buf += _section(\"Random Sampling\", \".random\", other_lib)\n\n return \"\\n\".join(buf)\n\n\nif __name__ == \"__main__\":\n print(generate(\"cunumeric\"))\n", "path": "docs/cunumeric/source/comparison/_comparison_generator.py"}]} | 1,970 | 294 |
gh_patches_debug_2672 | rasdani/github-patches | git_diff | e-valuation__EvaP-1666 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make Typescript code Prettier
We should add automated formatting for our typescript files. I think https://prettier.io/ is pretty good, but the choice is open for discussion. The formatting should be done in `manage.py format` and be checked in CI.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `evap/evaluation/management/commands/format.py`
Content:
```
1 import subprocess # nosec
2
3 from django.core.management.base import BaseCommand
4
5
6 class Command(BaseCommand):
7 args = ""
8 help = "Runs the code formatter"
9 requires_migrations_checks = False
10
11 def handle(self, *args, **options):
12 subprocess.run(["black", "evap"], check=False) # nosec
13 subprocess.run(["isort", "."], check=False) # nosec
14
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/evap/evaluation/management/commands/format.py b/evap/evaluation/management/commands/format.py
--- a/evap/evaluation/management/commands/format.py
+++ b/evap/evaluation/management/commands/format.py
@@ -11,3 +11,4 @@
def handle(self, *args, **options):
subprocess.run(["black", "evap"], check=False) # nosec
subprocess.run(["isort", "."], check=False) # nosec
+ subprocess.run(["npx", "prettier", "--write", "evap/static/ts/src"], check=False) # nosec
| {"golden_diff": "diff --git a/evap/evaluation/management/commands/format.py b/evap/evaluation/management/commands/format.py\n--- a/evap/evaluation/management/commands/format.py\n+++ b/evap/evaluation/management/commands/format.py\n@@ -11,3 +11,4 @@\n def handle(self, *args, **options):\n subprocess.run([\"black\", \"evap\"], check=False) # nosec\n subprocess.run([\"isort\", \".\"], check=False) # nosec\n+ subprocess.run([\"npx\", \"prettier\", \"--write\", \"evap/static/ts/src\"], check=False) # nosec\n", "issue": "Make Typescript code Prettier\nWe should add automated formatting for our typescript files. I think https://prettier.io/ is pretty good, but the choice is open for discussion. The formatting should be done in `manage.py format` and be checked in CI.\n", "before_files": [{"content": "import subprocess # nosec\n\nfrom django.core.management.base import BaseCommand\n\n\nclass Command(BaseCommand):\n args = \"\"\n help = \"Runs the code formatter\"\n requires_migrations_checks = False\n\n def handle(self, *args, **options):\n subprocess.run([\"black\", \"evap\"], check=False) # nosec\n subprocess.run([\"isort\", \".\"], check=False) # nosec\n", "path": "evap/evaluation/management/commands/format.py"}], "after_files": [{"content": "import subprocess # nosec\n\nfrom django.core.management.base import BaseCommand\n\n\nclass Command(BaseCommand):\n args = \"\"\n help = \"Runs the code formatter\"\n requires_migrations_checks = False\n\n def handle(self, *args, **options):\n subprocess.run([\"black\", \"evap\"], check=False) # nosec\n subprocess.run([\"isort\", \".\"], check=False) # nosec\n subprocess.run([\"npx\", \"prettier\", \"--write\", \"evap/static/ts/src\"], check=False) # nosec\n", "path": "evap/evaluation/management/commands/format.py"}]} | 432 | 145 |
gh_patches_debug_29734 | rasdani/github-patches | git_diff | bridgecrewio__checkov-3007 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CKV_AZURE_116 fails with Terraform Azure provider >= v.2.97.0
**Describe the issue**
This issue is regarding CKV_AZURE_116.
Since v2.97.0 of the Azure Terraform provider, the Azure policies add-on is enabled by setting
```terraform
azure_policy_enabled = true
```
as a top-level property in a `azurerm_kubernetes_cluster` resource definition, instead of the following
```terraform
addon_profile {
azure_policy {
enabled = true
}
}
```
The check should be updated to reflect the change.
**Examples**
```terraform
resource "azurerm_kubernetes_cluster" "example" {
azure_policy_enabled = true
}
```
should pass the check for CKV_AZURE_116.
**Version:**
2.0.1160
**Additional context**
None
I will submit a PR to fix this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py`
Content:
```
1 from checkov.common.models.enums import CheckCategories
2 from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck
3
4
5 class AKSUsesAzurePoliciesAddon(BaseResourceValueCheck):
6 def __init__(self):
7 name = "Ensure that AKS uses Azure Policies Add-on"
8 id = "CKV_AZURE_116"
9 supported_resources = ['azurerm_kubernetes_cluster']
10 categories = [CheckCategories.NETWORKING]
11 super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
12
13 def get_inspected_key(self):
14 return "addon_profile/[0]/azure_policy/[0]/enabled"
15
16
17 check = AKSUsesAzurePoliciesAddon()
18
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py b/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py
--- a/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py
+++ b/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py
@@ -1,8 +1,10 @@
-from checkov.common.models.enums import CheckCategories
-from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck
+from typing import Dict, List, Any
+from checkov.common.models.enums import CheckCategories, CheckResult
+from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
-class AKSUsesAzurePoliciesAddon(BaseResourceValueCheck):
+
+class AKSUsesAzurePoliciesAddon(BaseResourceCheck):
def __init__(self):
name = "Ensure that AKS uses Azure Policies Add-on"
id = "CKV_AZURE_116"
@@ -10,8 +12,20 @@
categories = [CheckCategories.NETWORKING]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
- def get_inspected_key(self):
- return "addon_profile/[0]/azure_policy/[0]/enabled"
+ def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:
+ # since Azure provider v2.97.0
+ azure_policy_enabled = conf.get("azure_policy_enabled", [None])[0]
+ if azure_policy_enabled:
+ self.evaluated_keys = ["azure_policy_enabled"]
+ return CheckResult.PASSED
+ # up to and including Azure provider v2.96.0
+ self.evaluated_keys = ["addon_profile/[0]/azure_policy/[0]/enabled"]
+ addon_profile = conf.get("addon_profile", [None])[0]
+ if addon_profile and isinstance(addon_profile, dict):
+ azure_policy = addon_profile.get("azure_policy", [None])[0]
+ if azure_policy and isinstance(azure_policy, dict) and azure_policy.get("enabled", [None])[0]:
+ return CheckResult.PASSED
+ return CheckResult.FAILED
check = AKSUsesAzurePoliciesAddon()
| {"golden_diff": "diff --git a/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py b/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py\n--- a/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py\n+++ b/checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py\n@@ -1,8 +1,10 @@\n-from checkov.common.models.enums import CheckCategories\n-from checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n+from typing import Dict, List, Any\n \n+from checkov.common.models.enums import CheckCategories, CheckResult\n+from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\n \n-class AKSUsesAzurePoliciesAddon(BaseResourceValueCheck):\n+\n+class AKSUsesAzurePoliciesAddon(BaseResourceCheck):\n def __init__(self):\n name = \"Ensure that AKS uses Azure Policies Add-on\"\n id = \"CKV_AZURE_116\"\n@@ -10,8 +12,20 @@\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n \n- def get_inspected_key(self):\n- return \"addon_profile/[0]/azure_policy/[0]/enabled\"\n+ def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:\n+ # since Azure provider v2.97.0\n+ azure_policy_enabled = conf.get(\"azure_policy_enabled\", [None])[0]\n+ if azure_policy_enabled:\n+ self.evaluated_keys = [\"azure_policy_enabled\"]\n+ return CheckResult.PASSED\n+ # up to and including Azure provider v2.96.0\n+ self.evaluated_keys = [\"addon_profile/[0]/azure_policy/[0]/enabled\"]\n+ addon_profile = conf.get(\"addon_profile\", [None])[0]\n+ if addon_profile and isinstance(addon_profile, dict):\n+ azure_policy = addon_profile.get(\"azure_policy\", [None])[0]\n+ if azure_policy and isinstance(azure_policy, dict) and azure_policy.get(\"enabled\", [None])[0]:\n+ return CheckResult.PASSED\n+ return CheckResult.FAILED\n \n \n check = AKSUsesAzurePoliciesAddon()\n", "issue": "CKV_AZURE_116 fails with Terraform Azure provider >= v.2.97.0\n**Describe the issue**\r\nThis issue is regarding CKV_AZURE_116.\r\nSince v2.97.0 of the Azure Terraform provider, the Azure policies add-on is enabled by setting\r\n\r\n```terraform\r\nazure_policy_enabled = true\r\n```\r\n\r\nas a top-level property in a `azurerm_kubernetes_cluster` resource definition, instead of the following\r\n\r\n```terraform\r\naddon_profile {\r\n azure_policy {\r\n enabled = true\r\n }\r\n}\r\n```\r\nThe check should be updated to reflect the change.\r\n\r\n**Examples**\r\n```terraform\r\nresource \"azurerm_kubernetes_cluster\" \"example\" {\r\n azure_policy_enabled = true\r\n}\r\n```\r\nshould pass the check for CKV_AZURE_116.\r\n\r\n**Version:**\r\n2.0.1160\r\n\r\n**Additional context**\r\nNone\r\n\r\nI will submit a PR to fix this.\n", "before_files": [{"content": "from checkov.common.models.enums import CheckCategories\nfrom checkov.terraform.checks.resource.base_resource_value_check import BaseResourceValueCheck\n\n\nclass AKSUsesAzurePoliciesAddon(BaseResourceValueCheck):\n def __init__(self):\n name = \"Ensure that AKS uses Azure Policies Add-on\"\n id = \"CKV_AZURE_116\"\n supported_resources = ['azurerm_kubernetes_cluster']\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def get_inspected_key(self):\n return \"addon_profile/[0]/azure_policy/[0]/enabled\"\n\n\ncheck = AKSUsesAzurePoliciesAddon()\n", "path": "checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py"}], "after_files": [{"content": "from typing import Dict, List, Any\n\nfrom checkov.common.models.enums import CheckCategories, CheckResult\nfrom checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck\n\n\nclass AKSUsesAzurePoliciesAddon(BaseResourceCheck):\n def __init__(self):\n name = \"Ensure that AKS uses Azure Policies Add-on\"\n id = \"CKV_AZURE_116\"\n supported_resources = ['azurerm_kubernetes_cluster']\n categories = [CheckCategories.NETWORKING]\n super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)\n\n def scan_resource_conf(self, conf: Dict[str, List[Any]]) -> CheckResult:\n # since Azure provider v2.97.0\n azure_policy_enabled = conf.get(\"azure_policy_enabled\", [None])[0]\n if azure_policy_enabled:\n self.evaluated_keys = [\"azure_policy_enabled\"]\n return CheckResult.PASSED\n # up to and including Azure provider v2.96.0\n self.evaluated_keys = [\"addon_profile/[0]/azure_policy/[0]/enabled\"]\n addon_profile = conf.get(\"addon_profile\", [None])[0]\n if addon_profile and isinstance(addon_profile, dict):\n azure_policy = addon_profile.get(\"azure_policy\", [None])[0]\n if azure_policy and isinstance(azure_policy, dict) and azure_policy.get(\"enabled\", [None])[0]:\n return CheckResult.PASSED\n return CheckResult.FAILED\n\n\ncheck = AKSUsesAzurePoliciesAddon()\n", "path": "checkov/terraform/checks/resource/azure/AKSUsesAzurePoliciesAddon.py"}]} | 660 | 509 |
gh_patches_debug_15245 | rasdani/github-patches | git_diff | kornia__kornia-2232 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Cleanup on setup metadata
_Originally posted by @johnnv1 in https://github.com/kornia/kornia/pull/2225#discussion_r1117693700_
-----
right now I think we have some deadlines in the setup, for example:
- https://github.com/kornia/kornia/blob/master/setup.py#L16
- https://github.com/kornia/kornia/blob/master/setup.cfg#L57
- This isn't used anymore https://github.com/kornia/kornia/tree/master/packaging
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kornia/__init__.py`
Content:
```
1 # NOTE: kornia filters and geometry must go first since are the core of the library
2 # and by changing the import order you might get into a circular dependencies issue.
3 from . import filters
4 from . import geometry
5 from . import grad_estimator
6
7 # import the other modules for convenience
8 from . import augmentation, color, contrib, core, enhance, feature, io, losses, metrics, morphology, tracking, utils, x
9
10 # NOTE: we are going to expose to top level very few things
11 from kornia.constants import pi
12 from kornia.testing import xla_is_available
13 from kornia.utils import eye_like, vec_like, create_meshgrid, image_to_tensor, tensor_to_image
14
15 # Version variable
16 import sys
17
18 if sys.version_info >= (3, 8): # pragma: >=3.8 cover
19 import importlib.metadata as importlib_metadata
20 else: # pragma: <3.8 cover
21 import importlib_metadata
22
23 __version__ = importlib_metadata.version('kornia')
24
```
Path: `kornia/utils/_compat.py`
Content:
```
1 from typing import TYPE_CHECKING, Callable, ContextManager, List, Optional, Tuple, TypeVar
2
3 import torch
4 from torch import Tensor
5
6 from packaging import version
7
8
9 def torch_version() -> str:
10 """Parse the `torch.__version__` variable and removes +cu*/cpu."""
11 return torch.__version__.split('+')[0]
12
13
14 def torch_version_lt(major: int, minor: int, patch: int) -> bool:
15 _version = version.parse(torch_version())
16 return _version < version.parse(f"{major}.{minor}.{patch}")
17
18
19 def torch_version_le(major: int, minor: int, patch: int) -> bool:
20 _version = version.parse(torch_version())
21 return _version <= version.parse(f"{major}.{minor}.{patch}")
22
23
24 def torch_version_ge(major: int, minor: int, patch: Optional[int] = None) -> bool:
25 _version = version.parse(torch_version())
26 if patch is None:
27 return _version >= version.parse(f"{major}.{minor}")
28 else:
29 return _version >= version.parse(f"{major}.{minor}.{patch}")
30
31
32 if TYPE_CHECKING:
33 # TODO: remove this branch when kornia relies on torch >= 1.10.0
34 def torch_meshgrid(tensors: List[Tensor], indexing: Optional[str] = None) -> Tuple[Tensor, ...]:
35 ...
36
37 else:
38 if torch_version_ge(1, 10, 0):
39
40 def torch_meshgrid(tensors: List[Tensor], indexing: str):
41 return torch.meshgrid(tensors, indexing=indexing)
42
43 else:
44 # TODO: remove this branch when kornia relies on torch >= 1.10.0
45 def torch_meshgrid(tensors: List[Tensor], indexing: str):
46 return torch.meshgrid(tensors)
47
48
49 if TYPE_CHECKING:
50 # TODO: remove this branch when kornia relies on torch >= 1.10.0
51 _T = TypeVar('_T')
52 torch_inference_mode: Callable[..., ContextManager[_T]]
53 else:
54 if torch_version_ge(1, 10, 0):
55 torch_inference_mode = torch.inference_mode
56 else:
57 # TODO: remove this branch when kornia relies on torch >= 1.10.0
58 torch_inference_mode = torch.no_grad
59
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kornia/__init__.py b/kornia/__init__.py
--- a/kornia/__init__.py
+++ b/kornia/__init__.py
@@ -13,11 +13,4 @@
from kornia.utils import eye_like, vec_like, create_meshgrid, image_to_tensor, tensor_to_image
# Version variable
-import sys
-
-if sys.version_info >= (3, 8): # pragma: >=3.8 cover
- import importlib.metadata as importlib_metadata
-else: # pragma: <3.8 cover
- import importlib_metadata
-
-__version__ = importlib_metadata.version('kornia')
+__version__ = "0.6.11.dev"
diff --git a/kornia/utils/_compat.py b/kornia/utils/_compat.py
--- a/kornia/utils/_compat.py
+++ b/kornia/utils/_compat.py
@@ -1,9 +1,8 @@
from typing import TYPE_CHECKING, Callable, ContextManager, List, Optional, Tuple, TypeVar
import torch
-from torch import Tensor
-
from packaging import version
+from torch import Tensor
def torch_version() -> str:
| {"golden_diff": "diff --git a/kornia/__init__.py b/kornia/__init__.py\n--- a/kornia/__init__.py\n+++ b/kornia/__init__.py\n@@ -13,11 +13,4 @@\n from kornia.utils import eye_like, vec_like, create_meshgrid, image_to_tensor, tensor_to_image\n \n # Version variable\n-import sys\n-\n-if sys.version_info >= (3, 8): # pragma: >=3.8 cover\n- import importlib.metadata as importlib_metadata\n-else: # pragma: <3.8 cover\n- import importlib_metadata\n-\n-__version__ = importlib_metadata.version('kornia')\n+__version__ = \"0.6.11.dev\"\ndiff --git a/kornia/utils/_compat.py b/kornia/utils/_compat.py\n--- a/kornia/utils/_compat.py\n+++ b/kornia/utils/_compat.py\n@@ -1,9 +1,8 @@\n from typing import TYPE_CHECKING, Callable, ContextManager, List, Optional, Tuple, TypeVar\n \n import torch\n-from torch import Tensor\n-\n from packaging import version\n+from torch import Tensor\n \n \n def torch_version() -> str:\n", "issue": "Cleanup on setup metadata\n_Originally posted by @johnnv1 in https://github.com/kornia/kornia/pull/2225#discussion_r1117693700_\r\n\r\n\r\n-----\r\nright now I think we have some deadlines in the setup, for example:\r\n- https://github.com/kornia/kornia/blob/master/setup.py#L16\r\n- https://github.com/kornia/kornia/blob/master/setup.cfg#L57\r\n- This isn't used anymore https://github.com/kornia/kornia/tree/master/packaging \n", "before_files": [{"content": "# NOTE: kornia filters and geometry must go first since are the core of the library\n# and by changing the import order you might get into a circular dependencies issue.\nfrom . import filters\nfrom . import geometry\nfrom . import grad_estimator\n\n# import the other modules for convenience\nfrom . import augmentation, color, contrib, core, enhance, feature, io, losses, metrics, morphology, tracking, utils, x\n\n# NOTE: we are going to expose to top level very few things\nfrom kornia.constants import pi\nfrom kornia.testing import xla_is_available\nfrom kornia.utils import eye_like, vec_like, create_meshgrid, image_to_tensor, tensor_to_image\n\n# Version variable\nimport sys\n\nif sys.version_info >= (3, 8): # pragma: >=3.8 cover\n import importlib.metadata as importlib_metadata\nelse: # pragma: <3.8 cover\n import importlib_metadata\n\n__version__ = importlib_metadata.version('kornia')\n", "path": "kornia/__init__.py"}, {"content": "from typing import TYPE_CHECKING, Callable, ContextManager, List, Optional, Tuple, TypeVar\n\nimport torch\nfrom torch import Tensor\n\nfrom packaging import version\n\n\ndef torch_version() -> str:\n \"\"\"Parse the `torch.__version__` variable and removes +cu*/cpu.\"\"\"\n return torch.__version__.split('+')[0]\n\n\ndef torch_version_lt(major: int, minor: int, patch: int) -> bool:\n _version = version.parse(torch_version())\n return _version < version.parse(f\"{major}.{minor}.{patch}\")\n\n\ndef torch_version_le(major: int, minor: int, patch: int) -> bool:\n _version = version.parse(torch_version())\n return _version <= version.parse(f\"{major}.{minor}.{patch}\")\n\n\ndef torch_version_ge(major: int, minor: int, patch: Optional[int] = None) -> bool:\n _version = version.parse(torch_version())\n if patch is None:\n return _version >= version.parse(f\"{major}.{minor}\")\n else:\n return _version >= version.parse(f\"{major}.{minor}.{patch}\")\n\n\nif TYPE_CHECKING:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n def torch_meshgrid(tensors: List[Tensor], indexing: Optional[str] = None) -> Tuple[Tensor, ...]:\n ...\n\nelse:\n if torch_version_ge(1, 10, 0):\n\n def torch_meshgrid(tensors: List[Tensor], indexing: str):\n return torch.meshgrid(tensors, indexing=indexing)\n\n else:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n def torch_meshgrid(tensors: List[Tensor], indexing: str):\n return torch.meshgrid(tensors)\n\n\nif TYPE_CHECKING:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n _T = TypeVar('_T')\n torch_inference_mode: Callable[..., ContextManager[_T]]\nelse:\n if torch_version_ge(1, 10, 0):\n torch_inference_mode = torch.inference_mode\n else:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n torch_inference_mode = torch.no_grad\n", "path": "kornia/utils/_compat.py"}], "after_files": [{"content": "# NOTE: kornia filters and geometry must go first since are the core of the library\n# and by changing the import order you might get into a circular dependencies issue.\nfrom . import filters\nfrom . import geometry\nfrom . import grad_estimator\n\n# import the other modules for convenience\nfrom . import augmentation, color, contrib, core, enhance, feature, io, losses, metrics, morphology, tracking, utils, x\n\n# NOTE: we are going to expose to top level very few things\nfrom kornia.constants import pi\nfrom kornia.testing import xla_is_available\nfrom kornia.utils import eye_like, vec_like, create_meshgrid, image_to_tensor, tensor_to_image\n\n# Version variable\n__version__ = \"0.6.11.dev\"\n", "path": "kornia/__init__.py"}, {"content": "from typing import TYPE_CHECKING, Callable, ContextManager, List, Optional, Tuple, TypeVar\n\nimport torch\nfrom packaging import version\nfrom torch import Tensor\n\n\ndef torch_version() -> str:\n \"\"\"Parse the `torch.__version__` variable and removes +cu*/cpu.\"\"\"\n return torch.__version__.split('+')[0]\n\n\ndef torch_version_lt(major: int, minor: int, patch: int) -> bool:\n _version = version.parse(torch_version())\n return _version < version.parse(f\"{major}.{minor}.{patch}\")\n\n\ndef torch_version_le(major: int, minor: int, patch: int) -> bool:\n _version = version.parse(torch_version())\n return _version <= version.parse(f\"{major}.{minor}.{patch}\")\n\n\ndef torch_version_ge(major: int, minor: int, patch: Optional[int] = None) -> bool:\n _version = version.parse(torch_version())\n if patch is None:\n return _version >= version.parse(f\"{major}.{minor}\")\n else:\n return _version >= version.parse(f\"{major}.{minor}.{patch}\")\n\n\nif TYPE_CHECKING:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n def torch_meshgrid(tensors: List[Tensor], indexing: Optional[str] = None) -> Tuple[Tensor, ...]:\n ...\n\nelse:\n if torch_version_ge(1, 10, 0):\n\n def torch_meshgrid(tensors: List[Tensor], indexing: str):\n return torch.meshgrid(tensors, indexing=indexing)\n\n else:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n def torch_meshgrid(tensors: List[Tensor], indexing: str):\n return torch.meshgrid(tensors)\n\n\nif TYPE_CHECKING:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n _T = TypeVar('_T')\n torch_inference_mode: Callable[..., ContextManager[_T]]\nelse:\n if torch_version_ge(1, 10, 0):\n torch_inference_mode = torch.inference_mode\n else:\n # TODO: remove this branch when kornia relies on torch >= 1.10.0\n torch_inference_mode = torch.no_grad\n", "path": "kornia/utils/_compat.py"}]} | 1,284 | 265 |
gh_patches_debug_20585 | rasdani/github-patches | git_diff | e-valuation__EvaP-794 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Deal with update.sh
update.sh is out of date. We can either
- remove it
- update it
- replace it with something else
having a script would be pretty cool to document what needs to be done when updating a production server.
maybe this can go into a management command.
an idea for additional automation would be a git post-commit-hook that checks out the release branch and updates everything when someone pushes to the production server via ssh. logs of the update could be sent via email to the admins.
Deal with update.sh
update.sh is out of date. We can either
- remove it
- update it
- replace it with something else
having a script would be pretty cool to document what needs to be done when updating a production server.
maybe this can go into a management command.
an idea for additional automation would be a git post-commit-hook that checks out the release branch and updates everything when someone pushes to the production server via ssh. logs of the update could be sent via email to the admins.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `evap/evaluation/management/commands/refresh_results_cache.py`
Content:
```
1 from django.core.management.base import BaseCommand
2 from django.core.cache import cache
3
4 from evap.evaluation.models import Course
5 from evap.evaluation.tools import calculate_results
6
7
8 class Command(BaseCommand):
9 args = ''
10 help = 'Clears the cache and pre-warms it with the results of all courses'
11
12 def handle(self, *args, **options):
13 self.stdout.write("Clearing cache...")
14 cache.clear()
15
16 self.stdout.write("Calculating results for all courses...")
17 for course in Course.objects.all():
18 calculate_results(course)
19
20 self.stdout.write("Done with updating cache.")
21
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/evap/evaluation/management/commands/refresh_results_cache.py b/evap/evaluation/management/commands/refresh_results_cache.py
--- a/evap/evaluation/management/commands/refresh_results_cache.py
+++ b/evap/evaluation/management/commands/refresh_results_cache.py
@@ -1,4 +1,5 @@
from django.core.management.base import BaseCommand
+from django.core.serializers.base import ProgressBar
from django.core.cache import cache
from evap.evaluation.models import Course
@@ -12,9 +13,15 @@
def handle(self, *args, **options):
self.stdout.write("Clearing cache...")
cache.clear()
+ total_count = Course.objects.count()
self.stdout.write("Calculating results for all courses...")
- for course in Course.objects.all():
+
+ self.stdout.ending = None
+ progress_bar = ProgressBar(self.stdout, total_count)
+
+ for counter, course in enumerate(Course.objects.all()):
+ progress_bar.update(counter + 1)
calculate_results(course)
- self.stdout.write("Done with updating cache.")
+ self.stdout.write("Done with updating cache.\n")
| {"golden_diff": "diff --git a/evap/evaluation/management/commands/refresh_results_cache.py b/evap/evaluation/management/commands/refresh_results_cache.py\n--- a/evap/evaluation/management/commands/refresh_results_cache.py\n+++ b/evap/evaluation/management/commands/refresh_results_cache.py\n@@ -1,4 +1,5 @@\n from django.core.management.base import BaseCommand\n+from django.core.serializers.base import ProgressBar\n from django.core.cache import cache\n \n from evap.evaluation.models import Course\n@@ -12,9 +13,15 @@\n def handle(self, *args, **options):\n self.stdout.write(\"Clearing cache...\")\n cache.clear()\n+ total_count = Course.objects.count()\n \n self.stdout.write(\"Calculating results for all courses...\")\n- for course in Course.objects.all():\n+\n+ self.stdout.ending = None\n+ progress_bar = ProgressBar(self.stdout, total_count)\n+\n+ for counter, course in enumerate(Course.objects.all()):\n+ progress_bar.update(counter + 1)\n calculate_results(course)\n \n- self.stdout.write(\"Done with updating cache.\")\n+ self.stdout.write(\"Done with updating cache.\\n\")\n", "issue": "Deal with update.sh\nupdate.sh is out of date. We can either\n- remove it\n- update it\n- replace it with something else\n\nhaving a script would be pretty cool to document what needs to be done when updating a production server.\n\nmaybe this can go into a management command.\n\nan idea for additional automation would be a git post-commit-hook that checks out the release branch and updates everything when someone pushes to the production server via ssh. logs of the update could be sent via email to the admins.\n\nDeal with update.sh\nupdate.sh is out of date. We can either\n- remove it\n- update it\n- replace it with something else\n\nhaving a script would be pretty cool to document what needs to be done when updating a production server.\n\nmaybe this can go into a management command.\n\nan idea for additional automation would be a git post-commit-hook that checks out the release branch and updates everything when someone pushes to the production server via ssh. logs of the update could be sent via email to the admins.\n\n", "before_files": [{"content": "from django.core.management.base import BaseCommand\nfrom django.core.cache import cache\n\nfrom evap.evaluation.models import Course\nfrom evap.evaluation.tools import calculate_results\n\n\nclass Command(BaseCommand):\n args = ''\n help = 'Clears the cache and pre-warms it with the results of all courses'\n\n def handle(self, *args, **options):\n self.stdout.write(\"Clearing cache...\")\n cache.clear()\n\n self.stdout.write(\"Calculating results for all courses...\")\n for course in Course.objects.all():\n calculate_results(course)\n\n self.stdout.write(\"Done with updating cache.\")\n", "path": "evap/evaluation/management/commands/refresh_results_cache.py"}], "after_files": [{"content": "from django.core.management.base import BaseCommand\nfrom django.core.serializers.base import ProgressBar\nfrom django.core.cache import cache\n\nfrom evap.evaluation.models import Course\nfrom evap.evaluation.tools import calculate_results\n\n\nclass Command(BaseCommand):\n args = ''\n help = 'Clears the cache and pre-warms it with the results of all courses'\n\n def handle(self, *args, **options):\n self.stdout.write(\"Clearing cache...\")\n cache.clear()\n total_count = Course.objects.count()\n\n self.stdout.write(\"Calculating results for all courses...\")\n\n self.stdout.ending = None\n progress_bar = ProgressBar(self.stdout, total_count)\n\n for counter, course in enumerate(Course.objects.all()):\n progress_bar.update(counter + 1)\n calculate_results(course)\n\n self.stdout.write(\"Done with updating cache.\\n\")\n", "path": "evap/evaluation/management/commands/refresh_results_cache.py"}]} | 637 | 257 |
gh_patches_debug_3099 | rasdani/github-patches | git_diff | MongoEngine__mongoengine-2431 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Version restriction on pillow
Do we still need to restrict pillow to less then 7.0.0? This looks to have been implemented because of python2 support, which mongoengine dropped with version 0.20.0
https://github.com/MongoEngine/mongoengine/blob/277b827d4dab4630145bc747fdab0df48a045273/setup.py#L118
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import os
2 import sys
3
4 from pkg_resources import normalize_path
5 from setuptools import find_packages, setup
6 from setuptools.command.test import test as TestCommand
7
8 # Hack to silence atexit traceback in newer python versions
9 try:
10 import multiprocessing
11 except ImportError:
12 pass
13
14 DESCRIPTION = "MongoEngine is a Python Object-Document Mapper for working with MongoDB."
15
16 try:
17 with open("README.rst") as fin:
18 LONG_DESCRIPTION = fin.read()
19 except Exception:
20 LONG_DESCRIPTION = None
21
22
23 def get_version(version_tuple):
24 """Return the version tuple as a string, e.g. for (0, 10, 7),
25 return '0.10.7'.
26 """
27 return ".".join(map(str, version_tuple))
28
29
30 class PyTest(TestCommand):
31 """Will force pytest to search for tests inside the build directory
32 for 2to3 converted code (used by tox), instead of the current directory.
33 Required as long as we need 2to3
34
35 Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations
36 Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html
37 """
38
39 # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands
40 # Allows to provide pytest command argument through the test runner command `python setup.py test`
41 # e.g: `python setup.py test -a "-k=test"`
42 # This only works for 1 argument though
43 user_options = [("pytest-args=", "a", "Arguments to pass to py.test")]
44
45 def initialize_options(self):
46 TestCommand.initialize_options(self)
47 self.pytest_args = ""
48
49 def finalize_options(self):
50 TestCommand.finalize_options(self)
51 self.test_args = ["tests"]
52 self.test_suite = True
53
54 def run_tests(self):
55 # import here, cause outside the eggs aren't loaded
56 from pkg_resources import _namespace_packages
57 import pytest
58
59 # Purge modules under test from sys.modules. The test loader will
60 # re-import them from the build location. Required when 2to3 is used
61 # with namespace packages.
62 if sys.version_info >= (3,) and getattr(self.distribution, "use_2to3", False):
63 module = self.test_args[-1].split(".")[0]
64 if module in _namespace_packages:
65 del_modules = []
66 if module in sys.modules:
67 del_modules.append(module)
68 module += "."
69 for name in sys.modules:
70 if name.startswith(module):
71 del_modules.append(name)
72 map(sys.modules.__delitem__, del_modules)
73
74 # Run on the build directory for 2to3-built code
75 # This will prevent the old 2.x code from being found
76 # by py.test discovery mechanism, that apparently
77 # ignores sys.path..
78 ei_cmd = self.get_finalized_command("egg_info")
79 self.test_args = [normalize_path(ei_cmd.egg_base)]
80
81 cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else [])
82 errno = pytest.main(cmd_args)
83 sys.exit(errno)
84
85
86 # Dirty hack to get version number from monogengine/__init__.py - we can't
87 # import it as it depends on PyMongo and PyMongo isn't installed until this
88 # file is read
89 init = os.path.join(os.path.dirname(__file__), "mongoengine", "__init__.py")
90 version_line = list(filter(lambda l: l.startswith("VERSION"), open(init)))[0]
91
92 VERSION = get_version(eval(version_line.split("=")[-1]))
93
94 CLASSIFIERS = [
95 "Development Status :: 5 - Production/Stable",
96 "Intended Audience :: Developers",
97 "License :: OSI Approved :: MIT License",
98 "Operating System :: OS Independent",
99 "Programming Language :: Python",
100 "Programming Language :: Python :: 3",
101 "Programming Language :: Python :: 3.5",
102 "Programming Language :: Python :: 3.6",
103 "Programming Language :: Python :: 3.7",
104 "Programming Language :: Python :: 3.8",
105 "Programming Language :: Python :: Implementation :: CPython",
106 "Programming Language :: Python :: Implementation :: PyPy",
107 "Topic :: Database",
108 "Topic :: Software Development :: Libraries :: Python Modules",
109 ]
110
111 extra_opts = {
112 "packages": find_packages(exclude=["tests", "tests.*"]),
113 "tests_require": [
114 "pytest<5.0",
115 "pytest-cov",
116 "coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls
117 "blinker",
118 "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support
119 ],
120 }
121
122 if "test" in sys.argv:
123 extra_opts["packages"] = find_packages()
124 extra_opts["package_data"] = {
125 "tests": ["fields/mongoengine.png", "fields/mongodb_leaf.png"]
126 }
127
128 setup(
129 name="mongoengine",
130 version=VERSION,
131 author="Harry Marr",
132 author_email="[email protected]",
133 maintainer="Stefan Wojcik",
134 maintainer_email="[email protected]",
135 url="http://mongoengine.org/",
136 download_url="https://github.com/MongoEngine/mongoengine/tarball/master",
137 license="MIT",
138 include_package_data=True,
139 description=DESCRIPTION,
140 long_description=LONG_DESCRIPTION,
141 platforms=["any"],
142 classifiers=CLASSIFIERS,
143 python_requires=">=3.5",
144 install_requires=["pymongo>=3.4, <4.0"],
145 cmdclass={"test": PyTest},
146 **extra_opts
147 )
148
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -115,7 +115,7 @@
"pytest-cov",
"coverage<5.0", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls
"blinker",
- "Pillow>=2.0.0, <7.0.0", # 7.0.0 dropped Python2 support
+ "Pillow>=7.0.0",
],
}
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -115,7 +115,7 @@\n \"pytest-cov\",\n \"coverage<5.0\", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls\n \"blinker\",\n- \"Pillow>=2.0.0, <7.0.0\", # 7.0.0 dropped Python2 support\n+ \"Pillow>=7.0.0\",\n ],\n }\n", "issue": "Version restriction on pillow\nDo we still need to restrict pillow to less then 7.0.0? This looks to have been implemented because of python2 support, which mongoengine dropped with version 0.20.0\r\n\r\nhttps://github.com/MongoEngine/mongoengine/blob/277b827d4dab4630145bc747fdab0df48a045273/setup.py#L118\n", "before_files": [{"content": "import os\nimport sys\n\nfrom pkg_resources import normalize_path\nfrom setuptools import find_packages, setup\nfrom setuptools.command.test import test as TestCommand\n\n# Hack to silence atexit traceback in newer python versions\ntry:\n import multiprocessing\nexcept ImportError:\n pass\n\nDESCRIPTION = \"MongoEngine is a Python Object-Document Mapper for working with MongoDB.\"\n\ntry:\n with open(\"README.rst\") as fin:\n LONG_DESCRIPTION = fin.read()\nexcept Exception:\n LONG_DESCRIPTION = None\n\n\ndef get_version(version_tuple):\n \"\"\"Return the version tuple as a string, e.g. for (0, 10, 7),\n return '0.10.7'.\n \"\"\"\n return \".\".join(map(str, version_tuple))\n\n\nclass PyTest(TestCommand):\n \"\"\"Will force pytest to search for tests inside the build directory\n for 2to3 converted code (used by tox), instead of the current directory.\n Required as long as we need 2to3\n\n Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations\n Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html\n \"\"\"\n\n # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands\n # Allows to provide pytest command argument through the test runner command `python setup.py test`\n # e.g: `python setup.py test -a \"-k=test\"`\n # This only works for 1 argument though\n user_options = [(\"pytest-args=\", \"a\", \"Arguments to pass to py.test\")]\n\n def initialize_options(self):\n TestCommand.initialize_options(self)\n self.pytest_args = \"\"\n\n def finalize_options(self):\n TestCommand.finalize_options(self)\n self.test_args = [\"tests\"]\n self.test_suite = True\n\n def run_tests(self):\n # import here, cause outside the eggs aren't loaded\n from pkg_resources import _namespace_packages\n import pytest\n\n # Purge modules under test from sys.modules. The test loader will\n # re-import them from the build location. Required when 2to3 is used\n # with namespace packages.\n if sys.version_info >= (3,) and getattr(self.distribution, \"use_2to3\", False):\n module = self.test_args[-1].split(\".\")[0]\n if module in _namespace_packages:\n del_modules = []\n if module in sys.modules:\n del_modules.append(module)\n module += \".\"\n for name in sys.modules:\n if name.startswith(module):\n del_modules.append(name)\n map(sys.modules.__delitem__, del_modules)\n\n # Run on the build directory for 2to3-built code\n # This will prevent the old 2.x code from being found\n # by py.test discovery mechanism, that apparently\n # ignores sys.path..\n ei_cmd = self.get_finalized_command(\"egg_info\")\n self.test_args = [normalize_path(ei_cmd.egg_base)]\n\n cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else [])\n errno = pytest.main(cmd_args)\n sys.exit(errno)\n\n\n# Dirty hack to get version number from monogengine/__init__.py - we can't\n# import it as it depends on PyMongo and PyMongo isn't installed until this\n# file is read\ninit = os.path.join(os.path.dirname(__file__), \"mongoengine\", \"__init__.py\")\nversion_line = list(filter(lambda l: l.startswith(\"VERSION\"), open(init)))[0]\n\nVERSION = get_version(eval(version_line.split(\"=\")[-1]))\n\nCLASSIFIERS = [\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Topic :: Database\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n]\n\nextra_opts = {\n \"packages\": find_packages(exclude=[\"tests\", \"tests.*\"]),\n \"tests_require\": [\n \"pytest<5.0\",\n \"pytest-cov\",\n \"coverage<5.0\", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls\n \"blinker\",\n \"Pillow>=2.0.0, <7.0.0\", # 7.0.0 dropped Python2 support\n ],\n}\n\nif \"test\" in sys.argv:\n extra_opts[\"packages\"] = find_packages()\n extra_opts[\"package_data\"] = {\n \"tests\": [\"fields/mongoengine.png\", \"fields/mongodb_leaf.png\"]\n }\n\nsetup(\n name=\"mongoengine\",\n version=VERSION,\n author=\"Harry Marr\",\n author_email=\"[email protected]\",\n maintainer=\"Stefan Wojcik\",\n maintainer_email=\"[email protected]\",\n url=\"http://mongoengine.org/\",\n download_url=\"https://github.com/MongoEngine/mongoengine/tarball/master\",\n license=\"MIT\",\n include_package_data=True,\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n platforms=[\"any\"],\n classifiers=CLASSIFIERS,\n python_requires=\">=3.5\",\n install_requires=[\"pymongo>=3.4, <4.0\"],\n cmdclass={\"test\": PyTest},\n **extra_opts\n)\n", "path": "setup.py"}], "after_files": [{"content": "import os\nimport sys\n\nfrom pkg_resources import normalize_path\nfrom setuptools import find_packages, setup\nfrom setuptools.command.test import test as TestCommand\n\n# Hack to silence atexit traceback in newer python versions\ntry:\n import multiprocessing\nexcept ImportError:\n pass\n\nDESCRIPTION = \"MongoEngine is a Python Object-Document Mapper for working with MongoDB.\"\n\ntry:\n with open(\"README.rst\") as fin:\n LONG_DESCRIPTION = fin.read()\nexcept Exception:\n LONG_DESCRIPTION = None\n\n\ndef get_version(version_tuple):\n \"\"\"Return the version tuple as a string, e.g. for (0, 10, 7),\n return '0.10.7'.\n \"\"\"\n return \".\".join(map(str, version_tuple))\n\n\nclass PyTest(TestCommand):\n \"\"\"Will force pytest to search for tests inside the build directory\n for 2to3 converted code (used by tox), instead of the current directory.\n Required as long as we need 2to3\n\n Known Limitation: https://tox.readthedocs.io/en/latest/example/pytest.html#known-issues-and-limitations\n Source: https://www.hackzine.org/python-testing-with-pytest-and-2to3-plus-tox-and-travis-ci.html\n \"\"\"\n\n # https://pytest.readthedocs.io/en/2.7.3/goodpractises.html#integration-with-setuptools-test-commands\n # Allows to provide pytest command argument through the test runner command `python setup.py test`\n # e.g: `python setup.py test -a \"-k=test\"`\n # This only works for 1 argument though\n user_options = [(\"pytest-args=\", \"a\", \"Arguments to pass to py.test\")]\n\n def initialize_options(self):\n TestCommand.initialize_options(self)\n self.pytest_args = \"\"\n\n def finalize_options(self):\n TestCommand.finalize_options(self)\n self.test_args = [\"tests\"]\n self.test_suite = True\n\n def run_tests(self):\n # import here, cause outside the eggs aren't loaded\n from pkg_resources import _namespace_packages\n import pytest\n\n # Purge modules under test from sys.modules. The test loader will\n # re-import them from the build location. Required when 2to3 is used\n # with namespace packages.\n if sys.version_info >= (3,) and getattr(self.distribution, \"use_2to3\", False):\n module = self.test_args[-1].split(\".\")[0]\n if module in _namespace_packages:\n del_modules = []\n if module in sys.modules:\n del_modules.append(module)\n module += \".\"\n for name in sys.modules:\n if name.startswith(module):\n del_modules.append(name)\n map(sys.modules.__delitem__, del_modules)\n\n # Run on the build directory for 2to3-built code\n # This will prevent the old 2.x code from being found\n # by py.test discovery mechanism, that apparently\n # ignores sys.path..\n ei_cmd = self.get_finalized_command(\"egg_info\")\n self.test_args = [normalize_path(ei_cmd.egg_base)]\n\n cmd_args = self.test_args + ([self.pytest_args] if self.pytest_args else [])\n errno = pytest.main(cmd_args)\n sys.exit(errno)\n\n\n# Dirty hack to get version number from monogengine/__init__.py - we can't\n# import it as it depends on PyMongo and PyMongo isn't installed until this\n# file is read\ninit = os.path.join(os.path.dirname(__file__), \"mongoengine\", \"__init__.py\")\nversion_line = list(filter(lambda l: l.startswith(\"VERSION\"), open(init)))[0]\n\nVERSION = get_version(eval(version_line.split(\"=\")[-1]))\n\nCLASSIFIERS = [\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Topic :: Database\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n]\n\nextra_opts = {\n \"packages\": find_packages(exclude=[\"tests\", \"tests.*\"]),\n \"tests_require\": [\n \"pytest<5.0\",\n \"pytest-cov\",\n \"coverage<5.0\", # recent coverage switched to sqlite format for the .coverage file which isn't handled properly by coveralls\n \"blinker\",\n \"Pillow>=7.0.0\",\n ],\n}\n\nif \"test\" in sys.argv:\n extra_opts[\"packages\"] = find_packages()\n extra_opts[\"package_data\"] = {\n \"tests\": [\"fields/mongoengine.png\", \"fields/mongodb_leaf.png\"]\n }\n\nsetup(\n name=\"mongoengine\",\n version=VERSION,\n author=\"Harry Marr\",\n author_email=\"[email protected]\",\n maintainer=\"Stefan Wojcik\",\n maintainer_email=\"[email protected]\",\n url=\"http://mongoengine.org/\",\n download_url=\"https://github.com/MongoEngine/mongoengine/tarball/master\",\n license=\"MIT\",\n include_package_data=True,\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n platforms=[\"any\"],\n classifiers=CLASSIFIERS,\n python_requires=\">=3.5\",\n install_requires=[\"pymongo>=3.4, <4.0\"],\n cmdclass={\"test\": PyTest},\n **extra_opts\n)\n", "path": "setup.py"}]} | 1,969 | 122 |
gh_patches_debug_14107 | rasdani/github-patches | git_diff | spack__spack-17427 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Errors untaring source tarballs Docker
Some source tarballs are created in a way that tries to preserve user and group perms.
Yes, it's a bad to create a source tarball for release with these properties -- but I suspect it's easy to go undetected until you run in a security constrained linux.
Cython is one example example I hit when trying to build in a Docker container using ubuntu:latest
```
/bin/tar: Cython-0.25.2/.gitrev: Cannot change ownership to uid 96258, gid 5000: Invalid argument
.... (many many more errors)
/bin/tar: Cython-0.25.2/USAGE.txt: Cannot change ownership to uid 96258, gid 5000: Invalid argument
/bin/tar: Cython-0.25.2: Cannot change ownership to uid 96258, gid 5000: Invalid argument
/bin/tar: Exiting with failure status due to previous errors
```
The tar errors cause spack to stop. I am not sure if there is anyway to defensively avoid this, but if its possible it would be a good enhancement to spack.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lib/spack/spack/util/compression.py`
Content:
```
1 # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
2 # Spack Project Developers. See the top-level COPYRIGHT file for details.
3 #
4 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
5
6 import re
7 import os
8 from itertools import product
9 from spack.util.executable import which
10
11 # Supported archive extensions.
12 PRE_EXTS = ["tar", "TAR"]
13 EXTS = ["gz", "bz2", "xz", "Z"]
14 NOTAR_EXTS = ["zip", "tgz", "tbz2", "txz"]
15
16 # Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
17 ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(
18 PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
19
20
21 def allowed_archive(path):
22 return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
23
24
25 def decompressor_for(path, extension=None):
26 """Get the appropriate decompressor for a path."""
27 if ((extension and re.match(r'\.?zip$', extension)) or
28 path.endswith('.zip')):
29 unzip = which('unzip', required=True)
30 unzip.add_default_arg('-q')
31 return unzip
32 if extension and re.match(r'gz', extension):
33 gunzip = which('gunzip', required=True)
34 return gunzip
35 if extension and re.match(r'bz2', extension):
36 bunzip2 = which('bunzip2', required=True)
37 return bunzip2
38 tar = which('tar', required=True)
39 tar.add_default_arg('-xf')
40 return tar
41
42
43 def strip_extension(path):
44 """Get the part of a path that does not include its compressed
45 type extension."""
46 for type in ALLOWED_ARCHIVE_TYPES:
47 suffix = r'\.%s$' % type
48 if re.search(suffix, path):
49 return re.sub(suffix, "", path)
50 return path
51
52
53 def extension(path):
54 """Get the archive extension for a path."""
55 if path is None:
56 raise ValueError("Can't call extension() on None")
57
58 # Strip sourceforge suffix.
59 if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path):
60 path = os.path.dirname(path)
61
62 for t in ALLOWED_ARCHIVE_TYPES:
63 suffix = r'\.%s$' % t
64 if re.search(suffix, path):
65 return t
66 return None
67
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -14,7 +14,7 @@
NOTAR_EXTS = ["zip", "tgz", "tbz2", "txz"]
# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
-ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(
+ALLOWED_ARCHIVE_TYPES = [".".join(ext) for ext in product(
PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
@@ -36,7 +36,7 @@
bunzip2 = which('bunzip2', required=True)
return bunzip2
tar = which('tar', required=True)
- tar.add_default_arg('-xf')
+ tar.add_default_arg('-oxf')
return tar
| {"golden_diff": "diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py\n--- a/lib/spack/spack/util/compression.py\n+++ b/lib/spack/spack/util/compression.py\n@@ -14,7 +14,7 @@\n NOTAR_EXTS = [\"zip\", \"tgz\", \"tbz2\", \"txz\"]\n \n # Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz\n-ALLOWED_ARCHIVE_TYPES = [\".\".join(l) for l in product(\n+ALLOWED_ARCHIVE_TYPES = [\".\".join(ext) for ext in product(\n PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS\n \n \n@@ -36,7 +36,7 @@\n bunzip2 = which('bunzip2', required=True)\n return bunzip2\n tar = which('tar', required=True)\n- tar.add_default_arg('-xf')\n+ tar.add_default_arg('-oxf')\n return tar\n", "issue": "Errors untaring source tarballs Docker\nSome source tarballs are created in a way that tries to preserve user and group perms. \r\nYes, it's a bad to create a source tarball for release with these properties -- but I suspect it's easy to go undetected until you run in a security constrained linux.\r\n\r\nCython is one example example I hit when trying to build in a Docker container using ubuntu:latest\r\n\r\n```\r\n/bin/tar: Cython-0.25.2/.gitrev: Cannot change ownership to uid 96258, gid 5000: Invalid argument\r\n.... (many many more errors)\r\n/bin/tar: Cython-0.25.2/USAGE.txt: Cannot change ownership to uid 96258, gid 5000: Invalid argument\r\n/bin/tar: Cython-0.25.2: Cannot change ownership to uid 96258, gid 5000: Invalid argument\r\n/bin/tar: Exiting with failure status due to previous errors\r\n```\r\nThe tar errors cause spack to stop. I am not sure if there is anyway to defensively avoid this, but if its possible it would be a good enhancement to spack. \r\n\n", "before_files": [{"content": "# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nimport re\nimport os\nfrom itertools import product\nfrom spack.util.executable import which\n\n# Supported archive extensions.\nPRE_EXTS = [\"tar\", \"TAR\"]\nEXTS = [\"gz\", \"bz2\", \"xz\", \"Z\"]\nNOTAR_EXTS = [\"zip\", \"tgz\", \"tbz2\", \"txz\"]\n\n# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz\nALLOWED_ARCHIVE_TYPES = [\".\".join(l) for l in product(\n PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS\n\n\ndef allowed_archive(path):\n return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)\n\n\ndef decompressor_for(path, extension=None):\n \"\"\"Get the appropriate decompressor for a path.\"\"\"\n if ((extension and re.match(r'\\.?zip$', extension)) or\n path.endswith('.zip')):\n unzip = which('unzip', required=True)\n unzip.add_default_arg('-q')\n return unzip\n if extension and re.match(r'gz', extension):\n gunzip = which('gunzip', required=True)\n return gunzip\n if extension and re.match(r'bz2', extension):\n bunzip2 = which('bunzip2', required=True)\n return bunzip2\n tar = which('tar', required=True)\n tar.add_default_arg('-xf')\n return tar\n\n\ndef strip_extension(path):\n \"\"\"Get the part of a path that does not include its compressed\n type extension.\"\"\"\n for type in ALLOWED_ARCHIVE_TYPES:\n suffix = r'\\.%s$' % type\n if re.search(suffix, path):\n return re.sub(suffix, \"\", path)\n return path\n\n\ndef extension(path):\n \"\"\"Get the archive extension for a path.\"\"\"\n if path is None:\n raise ValueError(\"Can't call extension() on None\")\n\n # Strip sourceforge suffix.\n if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path):\n path = os.path.dirname(path)\n\n for t in ALLOWED_ARCHIVE_TYPES:\n suffix = r'\\.%s$' % t\n if re.search(suffix, path):\n return t\n return None\n", "path": "lib/spack/spack/util/compression.py"}], "after_files": [{"content": "# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other\n# Spack Project Developers. See the top-level COPYRIGHT file for details.\n#\n# SPDX-License-Identifier: (Apache-2.0 OR MIT)\n\nimport re\nimport os\nfrom itertools import product\nfrom spack.util.executable import which\n\n# Supported archive extensions.\nPRE_EXTS = [\"tar\", \"TAR\"]\nEXTS = [\"gz\", \"bz2\", \"xz\", \"Z\"]\nNOTAR_EXTS = [\"zip\", \"tgz\", \"tbz2\", \"txz\"]\n\n# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz\nALLOWED_ARCHIVE_TYPES = [\".\".join(ext) for ext in product(\n PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS\n\n\ndef allowed_archive(path):\n return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)\n\n\ndef decompressor_for(path, extension=None):\n \"\"\"Get the appropriate decompressor for a path.\"\"\"\n if ((extension and re.match(r'\\.?zip$', extension)) or\n path.endswith('.zip')):\n unzip = which('unzip', required=True)\n unzip.add_default_arg('-q')\n return unzip\n if extension and re.match(r'gz', extension):\n gunzip = which('gunzip', required=True)\n return gunzip\n if extension and re.match(r'bz2', extension):\n bunzip2 = which('bunzip2', required=True)\n return bunzip2\n tar = which('tar', required=True)\n tar.add_default_arg('-oxf')\n return tar\n\n\ndef strip_extension(path):\n \"\"\"Get the part of a path that does not include its compressed\n type extension.\"\"\"\n for type in ALLOWED_ARCHIVE_TYPES:\n suffix = r'\\.%s$' % type\n if re.search(suffix, path):\n return re.sub(suffix, \"\", path)\n return path\n\n\ndef extension(path):\n \"\"\"Get the archive extension for a path.\"\"\"\n if path is None:\n raise ValueError(\"Can't call extension() on None\")\n\n # Strip sourceforge suffix.\n if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path):\n path = os.path.dirname(path)\n\n for t in ALLOWED_ARCHIVE_TYPES:\n suffix = r'\\.%s$' % t\n if re.search(suffix, path):\n return t\n return None\n", "path": "lib/spack/spack/util/compression.py"}]} | 1,194 | 227 |
gh_patches_debug_23113 | rasdani/github-patches | git_diff | sopel-irc__sopel-1848 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
xkcd outputs link even if triggered by one
The `xkcd` plugin shouldn't output a link to the comic if it was triggered by a link.
See `wikipedia`, for example. This is an anti-cycling measure in case multiple bots handling the same link(s) are present in the channel.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sopel/modules/xkcd.py`
Content:
```
1 # coding=utf-8
2 """
3 xkcd.py - Sopel xkcd Module
4 Copyright 2010, Michael Yanovich (yanovich.net), and Morgan Goose
5 Copyright 2012, Lior Ramati
6 Copyright 2013, Elsie Powell (embolalia.com)
7 Licensed under the Eiffel Forum License 2.
8
9 https://sopel.chat
10 """
11 from __future__ import unicode_literals, absolute_import, print_function, division
12
13 import random
14 import re
15
16 import requests
17
18 from sopel.modules.search import bing_search
19 from sopel.module import commands, url
20
21
22 ignored_sites = [
23 # For searching the web
24 'almamater.xkcd.com',
25 'blog.xkcd.com',
26 'blag.xkcd.com',
27 'forums.xkcd.com',
28 'fora.xkcd.com',
29 'forums3.xkcd.com',
30 'store.xkcd.com',
31 'wiki.xkcd.com',
32 'what-if.xkcd.com',
33 ]
34 sites_query = ' site:xkcd.com -site:' + ' -site:'.join(ignored_sites)
35
36
37 def get_info(number=None):
38 if number:
39 url = 'https://xkcd.com/{}/info.0.json'.format(number)
40 else:
41 url = 'https://xkcd.com/info.0.json'
42 data = requests.get(url).json()
43 data['url'] = 'https://xkcd.com/' + str(data['num'])
44 return data
45
46
47 def web_search(query):
48 url = bing_search(query + sites_query)
49 if not url:
50 return None
51 match = re.match(r'(?:https?://)?xkcd.com/(\d+)/?', url)
52 if match:
53 return match.group(1)
54
55
56 @commands('xkcd')
57 def xkcd(bot, trigger):
58 """.xkcd - Finds an xkcd comic strip.
59
60 Takes one of 3 inputs:
61
62 * If no input is provided it will return a random comic
63 * If numeric input is provided it will return that comic, or the
64 nth-latest comic if the number is non-positive
65 * If non-numeric input is provided it will return the first search result
66 for those keywords on the xkcd.com site
67 """
68 # get latest comic for rand function and numeric input
69 latest = get_info()
70 max_int = latest['num']
71
72 # if no input is given (pre - lior's edits code)
73 if not trigger.group(2): # get rand comic
74 random.seed()
75 requested = get_info(random.randint(1, max_int + 1))
76 else:
77 query = trigger.group(2).strip()
78
79 numbered = re.match(r"^(#|\+|-)?(\d+)$", query)
80 if numbered:
81 query = int(numbered.group(2))
82 if numbered.group(1) == "-":
83 query = -query
84 return numbered_result(bot, query, latest)
85 else:
86 # Non-number: search the web.
87 if (query.lower() == "latest" or query.lower() == "newest"):
88 requested = latest
89 else:
90 number = web_search(query)
91 if not number:
92 bot.say('Could not find any comics for that query.')
93 return
94 requested = get_info(number)
95
96 say_result(bot, requested)
97
98
99 def numbered_result(bot, query, latest):
100 max_int = latest['num']
101 if query > max_int:
102 bot.say(("Sorry, comic #{} hasn't been posted yet. "
103 "The last comic was #{}").format(query, max_int))
104 return
105 elif query <= -max_int:
106 bot.say(("Sorry, but there were only {} comics "
107 "released yet so far").format(max_int))
108 return
109 elif abs(query) == 0:
110 requested = latest
111 elif query == 404 or max_int + query == 404:
112 bot.say("404 - Not Found") # don't error on that one
113 return
114 elif query > 0:
115 requested = get_info(query)
116 else:
117 # Negative: go back that many from current
118 requested = get_info(max_int + query)
119
120 say_result(bot, requested)
121
122
123 def say_result(bot, result):
124 message = '{} | {} | Alt-text: {}'.format(result['url'], result['title'],
125 result['alt'])
126 bot.say(message)
127
128
129 @url(r'xkcd.com/(\d+)')
130 def get_url(bot, trigger, match):
131 latest = get_info()
132 numbered_result(bot, int(match.group(1)), latest)
133
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sopel/modules/xkcd.py b/sopel/modules/xkcd.py
--- a/sopel/modules/xkcd.py
+++ b/sopel/modules/xkcd.py
@@ -96,7 +96,7 @@
say_result(bot, requested)
-def numbered_result(bot, query, latest):
+def numbered_result(bot, query, latest, commanded=True):
max_int = latest['num']
if query > max_int:
bot.say(("Sorry, comic #{} hasn't been posted yet. "
@@ -117,16 +117,18 @@
# Negative: go back that many from current
requested = get_info(max_int + query)
- say_result(bot, requested)
+ say_result(bot, requested, commanded)
-def say_result(bot, result):
- message = '{} | {} | Alt-text: {}'.format(result['url'], result['title'],
- result['alt'])
+def say_result(bot, result, commanded=True):
+ message = '{}{} | Alt-text: {}'.format(
+ result['url'] + ' | ' if commanded else '',
+ result['title'], result['alt']
+ )
bot.say(message)
@url(r'xkcd.com/(\d+)')
def get_url(bot, trigger, match):
latest = get_info()
- numbered_result(bot, int(match.group(1)), latest)
+ numbered_result(bot, int(match.group(1)), latest, commanded=False)
| {"golden_diff": "diff --git a/sopel/modules/xkcd.py b/sopel/modules/xkcd.py\n--- a/sopel/modules/xkcd.py\n+++ b/sopel/modules/xkcd.py\n@@ -96,7 +96,7 @@\n say_result(bot, requested)\n \n \n-def numbered_result(bot, query, latest):\n+def numbered_result(bot, query, latest, commanded=True):\n max_int = latest['num']\n if query > max_int:\n bot.say((\"Sorry, comic #{} hasn't been posted yet. \"\n@@ -117,16 +117,18 @@\n # Negative: go back that many from current\n requested = get_info(max_int + query)\n \n- say_result(bot, requested)\n+ say_result(bot, requested, commanded)\n \n \n-def say_result(bot, result):\n- message = '{} | {} | Alt-text: {}'.format(result['url'], result['title'],\n- result['alt'])\n+def say_result(bot, result, commanded=True):\n+ message = '{}{} | Alt-text: {}'.format(\n+ result['url'] + ' | ' if commanded else '',\n+ result['title'], result['alt']\n+ )\n bot.say(message)\n \n \n @url(r'xkcd.com/(\\d+)')\n def get_url(bot, trigger, match):\n latest = get_info()\n- numbered_result(bot, int(match.group(1)), latest)\n+ numbered_result(bot, int(match.group(1)), latest, commanded=False)\n", "issue": "xkcd outputs link even if triggered by one\nThe `xkcd` plugin shouldn't output a link to the comic if it was triggered by a link.\r\n\r\nSee `wikipedia`, for example. This is an anti-cycling measure in case multiple bots handling the same link(s) are present in the channel.\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"\nxkcd.py - Sopel xkcd Module\nCopyright 2010, Michael Yanovich (yanovich.net), and Morgan Goose\nCopyright 2012, Lior Ramati\nCopyright 2013, Elsie Powell (embolalia.com)\nLicensed under the Eiffel Forum License 2.\n\nhttps://sopel.chat\n\"\"\"\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport random\nimport re\n\nimport requests\n\nfrom sopel.modules.search import bing_search\nfrom sopel.module import commands, url\n\n\nignored_sites = [\n # For searching the web\n 'almamater.xkcd.com',\n 'blog.xkcd.com',\n 'blag.xkcd.com',\n 'forums.xkcd.com',\n 'fora.xkcd.com',\n 'forums3.xkcd.com',\n 'store.xkcd.com',\n 'wiki.xkcd.com',\n 'what-if.xkcd.com',\n]\nsites_query = ' site:xkcd.com -site:' + ' -site:'.join(ignored_sites)\n\n\ndef get_info(number=None):\n if number:\n url = 'https://xkcd.com/{}/info.0.json'.format(number)\n else:\n url = 'https://xkcd.com/info.0.json'\n data = requests.get(url).json()\n data['url'] = 'https://xkcd.com/' + str(data['num'])\n return data\n\n\ndef web_search(query):\n url = bing_search(query + sites_query)\n if not url:\n return None\n match = re.match(r'(?:https?://)?xkcd.com/(\\d+)/?', url)\n if match:\n return match.group(1)\n\n\n@commands('xkcd')\ndef xkcd(bot, trigger):\n \"\"\".xkcd - Finds an xkcd comic strip.\n\n Takes one of 3 inputs:\n\n * If no input is provided it will return a random comic\n * If numeric input is provided it will return that comic, or the\n nth-latest comic if the number is non-positive\n * If non-numeric input is provided it will return the first search result\n for those keywords on the xkcd.com site\n \"\"\"\n # get latest comic for rand function and numeric input\n latest = get_info()\n max_int = latest['num']\n\n # if no input is given (pre - lior's edits code)\n if not trigger.group(2): # get rand comic\n random.seed()\n requested = get_info(random.randint(1, max_int + 1))\n else:\n query = trigger.group(2).strip()\n\n numbered = re.match(r\"^(#|\\+|-)?(\\d+)$\", query)\n if numbered:\n query = int(numbered.group(2))\n if numbered.group(1) == \"-\":\n query = -query\n return numbered_result(bot, query, latest)\n else:\n # Non-number: search the web.\n if (query.lower() == \"latest\" or query.lower() == \"newest\"):\n requested = latest\n else:\n number = web_search(query)\n if not number:\n bot.say('Could not find any comics for that query.')\n return\n requested = get_info(number)\n\n say_result(bot, requested)\n\n\ndef numbered_result(bot, query, latest):\n max_int = latest['num']\n if query > max_int:\n bot.say((\"Sorry, comic #{} hasn't been posted yet. \"\n \"The last comic was #{}\").format(query, max_int))\n return\n elif query <= -max_int:\n bot.say((\"Sorry, but there were only {} comics \"\n \"released yet so far\").format(max_int))\n return\n elif abs(query) == 0:\n requested = latest\n elif query == 404 or max_int + query == 404:\n bot.say(\"404 - Not Found\") # don't error on that one\n return\n elif query > 0:\n requested = get_info(query)\n else:\n # Negative: go back that many from current\n requested = get_info(max_int + query)\n\n say_result(bot, requested)\n\n\ndef say_result(bot, result):\n message = '{} | {} | Alt-text: {}'.format(result['url'], result['title'],\n result['alt'])\n bot.say(message)\n\n\n@url(r'xkcd.com/(\\d+)')\ndef get_url(bot, trigger, match):\n latest = get_info()\n numbered_result(bot, int(match.group(1)), latest)\n", "path": "sopel/modules/xkcd.py"}], "after_files": [{"content": "# coding=utf-8\n\"\"\"\nxkcd.py - Sopel xkcd Module\nCopyright 2010, Michael Yanovich (yanovich.net), and Morgan Goose\nCopyright 2012, Lior Ramati\nCopyright 2013, Elsie Powell (embolalia.com)\nLicensed under the Eiffel Forum License 2.\n\nhttps://sopel.chat\n\"\"\"\nfrom __future__ import unicode_literals, absolute_import, print_function, division\n\nimport random\nimport re\n\nimport requests\n\nfrom sopel.modules.search import bing_search\nfrom sopel.module import commands, url\n\n\nignored_sites = [\n # For searching the web\n 'almamater.xkcd.com',\n 'blog.xkcd.com',\n 'blag.xkcd.com',\n 'forums.xkcd.com',\n 'fora.xkcd.com',\n 'forums3.xkcd.com',\n 'store.xkcd.com',\n 'wiki.xkcd.com',\n 'what-if.xkcd.com',\n]\nsites_query = ' site:xkcd.com -site:' + ' -site:'.join(ignored_sites)\n\n\ndef get_info(number=None):\n if number:\n url = 'https://xkcd.com/{}/info.0.json'.format(number)\n else:\n url = 'https://xkcd.com/info.0.json'\n data = requests.get(url).json()\n data['url'] = 'https://xkcd.com/' + str(data['num'])\n return data\n\n\ndef web_search(query):\n url = bing_search(query + sites_query)\n if not url:\n return None\n match = re.match(r'(?:https?://)?xkcd.com/(\\d+)/?', url)\n if match:\n return match.group(1)\n\n\n@commands('xkcd')\ndef xkcd(bot, trigger):\n \"\"\".xkcd - Finds an xkcd comic strip.\n\n Takes one of 3 inputs:\n\n * If no input is provided it will return a random comic\n * If numeric input is provided it will return that comic, or the\n nth-latest comic if the number is non-positive\n * If non-numeric input is provided it will return the first search result\n for those keywords on the xkcd.com site\n \"\"\"\n # get latest comic for rand function and numeric input\n latest = get_info()\n max_int = latest['num']\n\n # if no input is given (pre - lior's edits code)\n if not trigger.group(2): # get rand comic\n random.seed()\n requested = get_info(random.randint(1, max_int + 1))\n else:\n query = trigger.group(2).strip()\n\n numbered = re.match(r\"^(#|\\+|-)?(\\d+)$\", query)\n if numbered:\n query = int(numbered.group(2))\n if numbered.group(1) == \"-\":\n query = -query\n return numbered_result(bot, query, latest)\n else:\n # Non-number: search the web.\n if (query.lower() == \"latest\" or query.lower() == \"newest\"):\n requested = latest\n else:\n number = web_search(query)\n if not number:\n bot.say('Could not find any comics for that query.')\n return\n requested = get_info(number)\n\n say_result(bot, requested)\n\n\ndef numbered_result(bot, query, latest, commanded=True):\n max_int = latest['num']\n if query > max_int:\n bot.say((\"Sorry, comic #{} hasn't been posted yet. \"\n \"The last comic was #{}\").format(query, max_int))\n return\n elif query <= -max_int:\n bot.say((\"Sorry, but there were only {} comics \"\n \"released yet so far\").format(max_int))\n return\n elif abs(query) == 0:\n requested = latest\n elif query == 404 or max_int + query == 404:\n bot.say(\"404 - Not Found\") # don't error on that one\n return\n elif query > 0:\n requested = get_info(query)\n else:\n # Negative: go back that many from current\n requested = get_info(max_int + query)\n\n say_result(bot, requested, commanded)\n\n\ndef say_result(bot, result, commanded=True):\n message = '{}{} | Alt-text: {}'.format(\n result['url'] + ' | ' if commanded else '',\n result['title'], result['alt']\n )\n bot.say(message)\n\n\n@url(r'xkcd.com/(\\d+)')\ndef get_url(bot, trigger, match):\n latest = get_info()\n numbered_result(bot, int(match.group(1)), latest, commanded=False)\n", "path": "sopel/modules/xkcd.py"}]} | 1,636 | 324 |
gh_patches_debug_1564 | rasdani/github-patches | git_diff | python-trio__trio-1041 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
trio.Cancelled missing readable str()
Trying to print a trio.Cancelled exception e.g. with `f'{exc}'` yields an empty string. I usually resort to `type(exc)` when I need to see what's going on.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `trio/_core/_exceptions.py`
Content:
```
1 import attr
2
3
4 class TrioInternalError(Exception):
5 """Raised by :func:`run` if we encounter a bug in trio, or (possibly) a
6 misuse of one of the low-level :mod:`trio.hazmat` APIs.
7
8 This should never happen! If you get this error, please file a bug.
9
10 Unfortunately, if you get this error it also means that all bets are off –
11 trio doesn't know what is going on and its normal invariants may be void.
12 (For example, we might have "lost track" of a task. Or lost track of all
13 tasks.) Again, though, this shouldn't happen.
14
15 """
16 pass
17
18
19 class RunFinishedError(RuntimeError):
20 """Raised by ``run_in_trio_thread`` and similar functions if the
21 corresponding call to :func:`trio.run` has already finished.
22
23 """
24 pass
25
26
27 class WouldBlock(Exception):
28 """Raised by ``X_nowait`` functions if ``X`` would block.
29
30 """
31 pass
32
33
34 class Cancelled(BaseException):
35 """Raised by blocking calls if the surrounding scope has been cancelled.
36
37 You should let this exception propagate, to be caught by the relevant
38 cancel scope. To remind you of this, it inherits from :exc:`BaseException`
39 instead of :exc:`Exception`, just like :exc:`KeyboardInterrupt` and
40 :exc:`SystemExit` do. This means that if you write something like::
41
42 try:
43 ...
44 except Exception:
45 ...
46
47 then this *won't* catch a :exc:`Cancelled` exception.
48
49 You cannot raise :exc:`Cancelled` yourself. Attempting to do so
50 will produce a :exc:`RuntimeError`. Use :meth:`cancel_scope.cancel()
51 <trio.CancelScope.cancel>` instead.
52
53 .. note::
54
55 In the US it's also common to see this word spelled "canceled", with
56 only one "l". This is a `recent
57 <https://books.google.com/ngrams/graph?content=canceled%2Ccancelled&year_start=1800&year_end=2000&corpus=5&smoothing=3&direct_url=t1%3B%2Ccanceled%3B%2Cc0%3B.t1%3B%2Ccancelled%3B%2Cc0>`__
58 and `US-specific
59 <https://books.google.com/ngrams/graph?content=canceled%2Ccancelled&year_start=1800&year_end=2000&corpus=18&smoothing=3&share=&direct_url=t1%3B%2Ccanceled%3B%2Cc0%3B.t1%3B%2Ccancelled%3B%2Cc0>`__
60 innovation, and even in the US both forms are still commonly used. So
61 for consistency with the rest of the world and with "cancellation"
62 (which always has two "l"s), trio uses the two "l" spelling
63 everywhere.
64
65 """
66 __marker = object()
67
68 def __init__(self, _marker=None):
69 if _marker is not self.__marker:
70 raise RuntimeError(
71 'Cancelled should not be raised directly. Use the cancel() '
72 'method on your cancel scope.'
73 )
74 super().__init__()
75
76 @classmethod
77 def _init(cls):
78 """A private constructor so that a user-created instance of Cancelled
79 can raise an appropriate error. see `issue #342
80 <https://github.com/python-trio/trio/issues/342>`__.
81 """
82 return cls(_marker=cls.__marker)
83
84
85 class BusyResourceError(Exception):
86 """Raised when a task attempts to use a resource that some other task is
87 already using, and this would lead to bugs and nonsense.
88
89 For example, if two tasks try to send data through the same socket at the
90 same time, trio will raise :class:`BusyResourceError` instead of letting
91 the data get scrambled.
92
93 """
94
95
96 class ClosedResourceError(Exception):
97 """Raised when attempting to use a resource after it has been closed.
98
99 Note that "closed" here means that *your* code closed the resource,
100 generally by calling a method with a name like ``close`` or ``aclose``, or
101 by exiting a context manager. If a problem arises elsewhere – for example,
102 because of a network failure, or because a remote peer closed their end of
103 a connection – then that should be indicated by a different exception
104 class, like :exc:`BrokenResourceError` or an :exc:`OSError` subclass.
105
106 """
107
108
109 class BrokenResourceError(Exception):
110 """Raised when an attempt to use a resource fails due to external
111 circumstances.
112
113 For example, you might get this if you try to send data on a stream where
114 the remote side has already closed the connection.
115
116 You *don't* get this error if *you* closed the resource – in that case you
117 get :class:`ClosedResourceError`.
118
119 This exception's ``__cause__`` attribute will often contain more
120 information about the underlying error.
121
122 """
123
124
125 class EndOfChannel(Exception):
126 """Raised when trying to receive from a :class:`trio.abc.ReceiveChannel`
127 that has no more data to receive.
128
129 This is analogous to an "end-of-file" condition, but for channels.
130
131 """
132
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/trio/_core/_exceptions.py b/trio/_core/_exceptions.py
--- a/trio/_core/_exceptions.py
+++ b/trio/_core/_exceptions.py
@@ -73,6 +73,9 @@
)
super().__init__()
+ def __str__(self):
+ return "Cancelled"
+
@classmethod
def _init(cls):
"""A private constructor so that a user-created instance of Cancelled
| {"golden_diff": "diff --git a/trio/_core/_exceptions.py b/trio/_core/_exceptions.py\n--- a/trio/_core/_exceptions.py\n+++ b/trio/_core/_exceptions.py\n@@ -73,6 +73,9 @@\n )\n super().__init__()\n \n+ def __str__(self):\n+ return \"Cancelled\"\n+\n @classmethod\n def _init(cls):\n \"\"\"A private constructor so that a user-created instance of Cancelled\n", "issue": "trio.Cancelled missing readable str()\nTrying to print a trio.Cancelled exception e.g. with `f'{exc}'` yields an empty string. I usually resort to `type(exc)` when I need to see what's going on.\n", "before_files": [{"content": "import attr\n\n\nclass TrioInternalError(Exception):\n \"\"\"Raised by :func:`run` if we encounter a bug in trio, or (possibly) a\n misuse of one of the low-level :mod:`trio.hazmat` APIs.\n\n This should never happen! If you get this error, please file a bug.\n\n Unfortunately, if you get this error it also means that all bets are off \u2013\n trio doesn't know what is going on and its normal invariants may be void.\n (For example, we might have \"lost track\" of a task. Or lost track of all\n tasks.) Again, though, this shouldn't happen.\n\n \"\"\"\n pass\n\n\nclass RunFinishedError(RuntimeError):\n \"\"\"Raised by ``run_in_trio_thread`` and similar functions if the\n corresponding call to :func:`trio.run` has already finished.\n\n \"\"\"\n pass\n\n\nclass WouldBlock(Exception):\n \"\"\"Raised by ``X_nowait`` functions if ``X`` would block.\n\n \"\"\"\n pass\n\n\nclass Cancelled(BaseException):\n \"\"\"Raised by blocking calls if the surrounding scope has been cancelled.\n\n You should let this exception propagate, to be caught by the relevant\n cancel scope. To remind you of this, it inherits from :exc:`BaseException`\n instead of :exc:`Exception`, just like :exc:`KeyboardInterrupt` and\n :exc:`SystemExit` do. This means that if you write something like::\n\n try:\n ...\n except Exception:\n ...\n\n then this *won't* catch a :exc:`Cancelled` exception.\n\n You cannot raise :exc:`Cancelled` yourself. Attempting to do so\n will produce a :exc:`RuntimeError`. Use :meth:`cancel_scope.cancel()\n <trio.CancelScope.cancel>` instead.\n\n .. note::\n\n In the US it's also common to see this word spelled \"canceled\", with\n only one \"l\". This is a `recent\n <https://books.google.com/ngrams/graph?content=canceled%2Ccancelled&year_start=1800&year_end=2000&corpus=5&smoothing=3&direct_url=t1%3B%2Ccanceled%3B%2Cc0%3B.t1%3B%2Ccancelled%3B%2Cc0>`__\n and `US-specific\n <https://books.google.com/ngrams/graph?content=canceled%2Ccancelled&year_start=1800&year_end=2000&corpus=18&smoothing=3&share=&direct_url=t1%3B%2Ccanceled%3B%2Cc0%3B.t1%3B%2Ccancelled%3B%2Cc0>`__\n innovation, and even in the US both forms are still commonly used. So\n for consistency with the rest of the world and with \"cancellation\"\n (which always has two \"l\"s), trio uses the two \"l\" spelling\n everywhere.\n\n \"\"\"\n __marker = object()\n\n def __init__(self, _marker=None):\n if _marker is not self.__marker:\n raise RuntimeError(\n 'Cancelled should not be raised directly. Use the cancel() '\n 'method on your cancel scope.'\n )\n super().__init__()\n\n @classmethod\n def _init(cls):\n \"\"\"A private constructor so that a user-created instance of Cancelled\n can raise an appropriate error. see `issue #342\n <https://github.com/python-trio/trio/issues/342>`__.\n \"\"\"\n return cls(_marker=cls.__marker)\n\n\nclass BusyResourceError(Exception):\n \"\"\"Raised when a task attempts to use a resource that some other task is\n already using, and this would lead to bugs and nonsense.\n\n For example, if two tasks try to send data through the same socket at the\n same time, trio will raise :class:`BusyResourceError` instead of letting\n the data get scrambled.\n\n \"\"\"\n\n\nclass ClosedResourceError(Exception):\n \"\"\"Raised when attempting to use a resource after it has been closed.\n\n Note that \"closed\" here means that *your* code closed the resource,\n generally by calling a method with a name like ``close`` or ``aclose``, or\n by exiting a context manager. If a problem arises elsewhere \u2013 for example,\n because of a network failure, or because a remote peer closed their end of\n a connection \u2013 then that should be indicated by a different exception\n class, like :exc:`BrokenResourceError` or an :exc:`OSError` subclass.\n\n \"\"\"\n\n\nclass BrokenResourceError(Exception):\n \"\"\"Raised when an attempt to use a resource fails due to external\n circumstances.\n\n For example, you might get this if you try to send data on a stream where\n the remote side has already closed the connection.\n\n You *don't* get this error if *you* closed the resource \u2013 in that case you\n get :class:`ClosedResourceError`.\n\n This exception's ``__cause__`` attribute will often contain more\n information about the underlying error.\n\n \"\"\"\n\n\nclass EndOfChannel(Exception):\n \"\"\"Raised when trying to receive from a :class:`trio.abc.ReceiveChannel`\n that has no more data to receive.\n\n This is analogous to an \"end-of-file\" condition, but for channels.\n\n \"\"\"\n", "path": "trio/_core/_exceptions.py"}], "after_files": [{"content": "import attr\n\n\nclass TrioInternalError(Exception):\n \"\"\"Raised by :func:`run` if we encounter a bug in trio, or (possibly) a\n misuse of one of the low-level :mod:`trio.hazmat` APIs.\n\n This should never happen! If you get this error, please file a bug.\n\n Unfortunately, if you get this error it also means that all bets are off \u2013\n trio doesn't know what is going on and its normal invariants may be void.\n (For example, we might have \"lost track\" of a task. Or lost track of all\n tasks.) Again, though, this shouldn't happen.\n\n \"\"\"\n pass\n\n\nclass RunFinishedError(RuntimeError):\n \"\"\"Raised by ``run_in_trio_thread`` and similar functions if the\n corresponding call to :func:`trio.run` has already finished.\n\n \"\"\"\n pass\n\n\nclass WouldBlock(Exception):\n \"\"\"Raised by ``X_nowait`` functions if ``X`` would block.\n\n \"\"\"\n pass\n\n\nclass Cancelled(BaseException):\n \"\"\"Raised by blocking calls if the surrounding scope has been cancelled.\n\n You should let this exception propagate, to be caught by the relevant\n cancel scope. To remind you of this, it inherits from :exc:`BaseException`\n instead of :exc:`Exception`, just like :exc:`KeyboardInterrupt` and\n :exc:`SystemExit` do. This means that if you write something like::\n\n try:\n ...\n except Exception:\n ...\n\n then this *won't* catch a :exc:`Cancelled` exception.\n\n You cannot raise :exc:`Cancelled` yourself. Attempting to do so\n will produce a :exc:`RuntimeError`. Use :meth:`cancel_scope.cancel()\n <trio.CancelScope.cancel>` instead.\n\n .. note::\n\n In the US it's also common to see this word spelled \"canceled\", with\n only one \"l\". This is a `recent\n <https://books.google.com/ngrams/graph?content=canceled%2Ccancelled&year_start=1800&year_end=2000&corpus=5&smoothing=3&direct_url=t1%3B%2Ccanceled%3B%2Cc0%3B.t1%3B%2Ccancelled%3B%2Cc0>`__\n and `US-specific\n <https://books.google.com/ngrams/graph?content=canceled%2Ccancelled&year_start=1800&year_end=2000&corpus=18&smoothing=3&share=&direct_url=t1%3B%2Ccanceled%3B%2Cc0%3B.t1%3B%2Ccancelled%3B%2Cc0>`__\n innovation, and even in the US both forms are still commonly used. So\n for consistency with the rest of the world and with \"cancellation\"\n (which always has two \"l\"s), trio uses the two \"l\" spelling\n everywhere.\n\n \"\"\"\n __marker = object()\n\n def __init__(self, _marker=None):\n if _marker is not self.__marker:\n raise RuntimeError(\n 'Cancelled should not be raised directly. Use the cancel() '\n 'method on your cancel scope.'\n )\n super().__init__()\n\n def __str__(self):\n return \"Cancelled\"\n\n @classmethod\n def _init(cls):\n \"\"\"A private constructor so that a user-created instance of Cancelled\n can raise an appropriate error. see `issue #342\n <https://github.com/python-trio/trio/issues/342>`__.\n \"\"\"\n return cls(_marker=cls.__marker)\n\n\nclass BusyResourceError(Exception):\n \"\"\"Raised when a task attempts to use a resource that some other task is\n already using, and this would lead to bugs and nonsense.\n\n For example, if two tasks try to send data through the same socket at the\n same time, trio will raise :class:`BusyResourceError` instead of letting\n the data get scrambled.\n\n \"\"\"\n\n\nclass ClosedResourceError(Exception):\n \"\"\"Raised when attempting to use a resource after it has been closed.\n\n Note that \"closed\" here means that *your* code closed the resource,\n generally by calling a method with a name like ``close`` or ``aclose``, or\n by exiting a context manager. If a problem arises elsewhere \u2013 for example,\n because of a network failure, or because a remote peer closed their end of\n a connection \u2013 then that should be indicated by a different exception\n class, like :exc:`BrokenResourceError` or an :exc:`OSError` subclass.\n\n \"\"\"\n\n\nclass BrokenResourceError(Exception):\n \"\"\"Raised when an attempt to use a resource fails due to external\n circumstances.\n\n For example, you might get this if you try to send data on a stream where\n the remote side has already closed the connection.\n\n You *don't* get this error if *you* closed the resource \u2013 in that case you\n get :class:`ClosedResourceError`.\n\n This exception's ``__cause__`` attribute will often contain more\n information about the underlying error.\n\n \"\"\"\n\n\nclass EndOfChannel(Exception):\n \"\"\"Raised when trying to receive from a :class:`trio.abc.ReceiveChannel`\n that has no more data to receive.\n\n This is analogous to an \"end-of-file\" condition, but for channels.\n\n \"\"\"\n", "path": "trio/_core/_exceptions.py"}]} | 1,783 | 100 |
gh_patches_debug_14423 | rasdani/github-patches | git_diff | translate__pootle-4087 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Elasticsearch 1.7.0 breaks Pootle if ES not set up
With `elasticsearch==1.7.0` doing `es.ping()` on a not-setup server raises `ConnectionError`.
This breaks the default install here:
https://github.com/translate/pootle/blob/master/pootle/core/search/backends/elasticsearch.py#L29
Elasticsearch 1.7.0 breaks Pootle if ES not set up
With `elasticsearch==1.7.0` doing `es.ping()` on a not-setup server raises `ConnectionError`.
This breaks the default install here:
https://github.com/translate/pootle/blob/master/pootle/core/search/backends/elasticsearch.py#L29
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pootle/core/search/backends/elasticsearch.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Copyright (C) Pootle contributors.
5 #
6 # This file is a part of the Pootle project. It is distributed under the GPL3
7 # or later license. See the LICENSE file for a copy of the license and the
8 # AUTHORS file for copyright and authorship information.
9
10 from __future__ import absolute_import
11
12 __all__ = ('ElasticSearchBackend',)
13
14 try:
15 from elasticsearch import Elasticsearch
16 except:
17 Elasticsearch = None
18
19 from ..base import SearchBackend
20
21
22 class ElasticSearchBackend(SearchBackend):
23 def __init__(self, config_name):
24 super(ElasticSearchBackend, self).__init__(config_name)
25 self._es = self._get_es_server()
26 self._create_index_if_missing()
27
28 def _server_setup_and_alive(self):
29 return self._es is not None and self._es.ping()
30
31 def _get_es_server(self):
32 if self._settings is None or Elasticsearch is None:
33 return None
34 return Elasticsearch([
35 {'host': self._settings['HOST'],
36 'port': self._settings['PORT']},
37 ])
38
39 def _create_index_if_missing(self):
40 if self._server_setup_and_alive():
41 if not self._es.indices.exists(self._settings['INDEX_NAME']):
42 self._es.indices.create(self._settings['INDEX_NAME'])
43
44 def _is_valuable_hit(self, unit, hit):
45 return str(unit.id) != hit['_id']
46
47 def search(self, unit):
48 if not self._server_setup_and_alive():
49 return []
50
51 counter = {}
52 res = []
53 language = unit.store.translation_project.language.code
54 es_res = self._es.search(
55 index=self._settings['INDEX_NAME'],
56 doc_type=language,
57 body={
58 "query": {
59 "match": {
60 "source": {
61 "query": unit.source,
62 "fuzziness": self._settings['MIN_SCORE'],
63 }
64 }
65 }
66 }
67 )
68
69 for hit in es_res['hits']['hits']:
70 if self._is_valuable_hit(unit, hit):
71 translation_pair = hit['_source']['source'] + hit['_source']['target']
72 if translation_pair not in counter:
73 counter[translation_pair] = 1
74 res.append({
75 'unit_id': hit['_id'],
76 'source': hit['_source']['source'],
77 'target': hit['_source']['target'],
78 'project': hit['_source']['project'],
79 'path': hit['_source']['path'],
80 'username': hit['_source']['username'],
81 'fullname': hit['_source']['fullname'],
82 'email_md5': hit['_source']['email_md5'],
83 })
84 else:
85 counter[translation_pair] += 1
86
87 for item in res:
88 item['count'] = counter[item['source']+item['target']]
89
90 return res
91
92 def update(self, language, obj):
93 if self._server_setup_and_alive():
94 self._es.index(
95 index=self._settings['INDEX_NAME'],
96 doc_type=language,
97 body=obj,
98 id=obj['id']
99 )
100
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pootle/core/search/backends/elasticsearch.py b/pootle/core/search/backends/elasticsearch.py
--- a/pootle/core/search/backends/elasticsearch.py
+++ b/pootle/core/search/backends/elasticsearch.py
@@ -13,6 +13,7 @@
try:
from elasticsearch import Elasticsearch
+ from elasticsearch.exceptions import ConnectionError
except:
Elasticsearch = None
@@ -26,7 +27,12 @@
self._create_index_if_missing()
def _server_setup_and_alive(self):
- return self._es is not None and self._es.ping()
+ if self._es is None:
+ return False
+ try:
+ return self._es.ping()
+ except ConnectionError:
+ return False
def _get_es_server(self):
if self._settings is None or Elasticsearch is None:
| {"golden_diff": "diff --git a/pootle/core/search/backends/elasticsearch.py b/pootle/core/search/backends/elasticsearch.py\n--- a/pootle/core/search/backends/elasticsearch.py\n+++ b/pootle/core/search/backends/elasticsearch.py\n@@ -13,6 +13,7 @@\n \n try:\n from elasticsearch import Elasticsearch\n+ from elasticsearch.exceptions import ConnectionError\n except:\n Elasticsearch = None\n \n@@ -26,7 +27,12 @@\n self._create_index_if_missing()\n \n def _server_setup_and_alive(self):\n- return self._es is not None and self._es.ping()\n+ if self._es is None:\n+ return False\n+ try:\n+ return self._es.ping()\n+ except ConnectionError:\n+ return False\n \n def _get_es_server(self):\n if self._settings is None or Elasticsearch is None:\n", "issue": "Elasticsearch 1.7.0 breaks Pootle if ES not set up\nWith `elasticsearch==1.7.0` doing `es.ping()` on a not-setup server raises `ConnectionError`.\n\nThis breaks the default install here:\n\nhttps://github.com/translate/pootle/blob/master/pootle/core/search/backends/elasticsearch.py#L29\n\nElasticsearch 1.7.0 breaks Pootle if ES not set up\nWith `elasticsearch==1.7.0` doing `es.ping()` on a not-setup server raises `ConnectionError`.\n\nThis breaks the default install here:\n\nhttps://github.com/translate/pootle/blob/master/pootle/core/search/backends/elasticsearch.py#L29\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom __future__ import absolute_import\n\n__all__ = ('ElasticSearchBackend',)\n\ntry:\n from elasticsearch import Elasticsearch\nexcept:\n Elasticsearch = None\n\nfrom ..base import SearchBackend\n\n\nclass ElasticSearchBackend(SearchBackend):\n def __init__(self, config_name):\n super(ElasticSearchBackend, self).__init__(config_name)\n self._es = self._get_es_server()\n self._create_index_if_missing()\n\n def _server_setup_and_alive(self):\n return self._es is not None and self._es.ping()\n\n def _get_es_server(self):\n if self._settings is None or Elasticsearch is None:\n return None\n return Elasticsearch([\n {'host': self._settings['HOST'],\n 'port': self._settings['PORT']},\n ])\n\n def _create_index_if_missing(self):\n if self._server_setup_and_alive():\n if not self._es.indices.exists(self._settings['INDEX_NAME']):\n self._es.indices.create(self._settings['INDEX_NAME'])\n\n def _is_valuable_hit(self, unit, hit):\n return str(unit.id) != hit['_id']\n\n def search(self, unit):\n if not self._server_setup_and_alive():\n return []\n\n counter = {}\n res = []\n language = unit.store.translation_project.language.code\n es_res = self._es.search(\n index=self._settings['INDEX_NAME'],\n doc_type=language,\n body={\n \"query\": {\n \"match\": {\n \"source\": {\n \"query\": unit.source,\n \"fuzziness\": self._settings['MIN_SCORE'],\n }\n }\n }\n }\n )\n\n for hit in es_res['hits']['hits']:\n if self._is_valuable_hit(unit, hit):\n translation_pair = hit['_source']['source'] + hit['_source']['target']\n if translation_pair not in counter:\n counter[translation_pair] = 1\n res.append({\n 'unit_id': hit['_id'],\n 'source': hit['_source']['source'],\n 'target': hit['_source']['target'],\n 'project': hit['_source']['project'],\n 'path': hit['_source']['path'],\n 'username': hit['_source']['username'],\n 'fullname': hit['_source']['fullname'],\n 'email_md5': hit['_source']['email_md5'],\n })\n else:\n counter[translation_pair] += 1\n\n for item in res:\n item['count'] = counter[item['source']+item['target']]\n\n return res\n\n def update(self, language, obj):\n if self._server_setup_and_alive():\n self._es.index(\n index=self._settings['INDEX_NAME'],\n doc_type=language,\n body=obj,\n id=obj['id']\n )\n", "path": "pootle/core/search/backends/elasticsearch.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nfrom __future__ import absolute_import\n\n__all__ = ('ElasticSearchBackend',)\n\ntry:\n from elasticsearch import Elasticsearch\n from elasticsearch.exceptions import ConnectionError\nexcept:\n Elasticsearch = None\n\nfrom ..base import SearchBackend\n\n\nclass ElasticSearchBackend(SearchBackend):\n def __init__(self, config_name):\n super(ElasticSearchBackend, self).__init__(config_name)\n self._es = self._get_es_server()\n self._create_index_if_missing()\n\n def _server_setup_and_alive(self):\n if self._es is None:\n return False\n try:\n return self._es.ping()\n except ConnectionError:\n return False\n\n def _get_es_server(self):\n if self._settings is None or Elasticsearch is None:\n return None\n return Elasticsearch([\n {'host': self._settings['HOST'],\n 'port': self._settings['PORT']},\n ])\n\n def _create_index_if_missing(self):\n if self._server_setup_and_alive():\n if not self._es.indices.exists(self._settings['INDEX_NAME']):\n self._es.indices.create(self._settings['INDEX_NAME'])\n\n def _is_valuable_hit(self, unit, hit):\n return str(unit.id) != hit['_id']\n\n def search(self, unit):\n if not self._server_setup_and_alive():\n return []\n\n counter = {}\n res = []\n language = unit.store.translation_project.language.code\n es_res = self._es.search(\n index=self._settings['INDEX_NAME'],\n doc_type=language,\n body={\n \"query\": {\n \"match\": {\n \"source\": {\n \"query\": unit.source,\n \"fuzziness\": self._settings['MIN_SCORE'],\n }\n }\n }\n }\n )\n\n for hit in es_res['hits']['hits']:\n if self._is_valuable_hit(unit, hit):\n translation_pair = hit['_source']['source'] + hit['_source']['target']\n if translation_pair not in counter:\n counter[translation_pair] = 1\n res.append({\n 'unit_id': hit['_id'],\n 'source': hit['_source']['source'],\n 'target': hit['_source']['target'],\n 'project': hit['_source']['project'],\n 'path': hit['_source']['path'],\n 'username': hit['_source']['username'],\n 'fullname': hit['_source']['fullname'],\n 'email_md5': hit['_source']['email_md5'],\n })\n else:\n counter[translation_pair] += 1\n\n for item in res:\n item['count'] = counter[item['source']+item['target']]\n\n return res\n\n def update(self, language, obj):\n if self._server_setup_and_alive():\n self._es.index(\n index=self._settings['INDEX_NAME'],\n doc_type=language,\n body=obj,\n id=obj['id']\n )\n", "path": "pootle/core/search/backends/elasticsearch.py"}]} | 1,284 | 194 |
gh_patches_debug_199 | rasdani/github-patches | git_diff | twisted__twisted-1695 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 22.2.0
|[<img alt="adiroiban's avatar" src="https://avatars.githubusercontent.com/u/204609?s=50" width="50" height="50">](https://github.com/adiroiban)| @adiroiban reported|
|-|-|
|Trac ID|trac#10306|
|Type|enhancement|
|Created|2022-02-08 14:05:11Z|
<details><summary>Searchable metadata</summary>
```
trac-id__10306 10306
type__enhancement enhancement
reporter__adiroiban adiroiban
priority__normal normal
milestone__None None
branch__
branch_author__
status__closed closed
resolution__fixed fixed
component__core core
keywords__None None
time__1644329111193403 1644329111193403
changetime__1646513115841857 1646513115841857
version__None None
owner__None None
```
</details>
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/twisted/_version.py`
Content:
```
1 """
2 Provides Twisted version information.
3 """
4
5 # This file is auto-generated! Do not edit!
6 # Use `python -m incremental.update Twisted` to change this file.
7
8 from incremental import Version
9
10 __version__ = Version("Twisted", 22, 1, 0, post=0)
11 __all__ = ["__version__"]
12
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/twisted/_version.py b/src/twisted/_version.py
--- a/src/twisted/_version.py
+++ b/src/twisted/_version.py
@@ -7,5 +7,5 @@
from incremental import Version
-__version__ = Version("Twisted", 22, 1, 0, post=0)
+__version__ = Version("Twisted", 22, 2, 0, post=0)
__all__ = ["__version__"]
| {"golden_diff": "diff --git a/src/twisted/_version.py b/src/twisted/_version.py\n--- a/src/twisted/_version.py\n+++ b/src/twisted/_version.py\n@@ -7,5 +7,5 @@\n \n from incremental import Version\n \n-__version__ = Version(\"Twisted\", 22, 1, 0, post=0)\n+__version__ = Version(\"Twisted\", 22, 2, 0, post=0)\n __all__ = [\"__version__\"]\n", "issue": "Release 22.2.0\n|[<img alt=\"adiroiban's avatar\" src=\"https://avatars.githubusercontent.com/u/204609?s=50\" width=\"50\" height=\"50\">](https://github.com/adiroiban)| @adiroiban reported|\n|-|-|\n|Trac ID|trac#10306|\n|Type|enhancement|\n|Created|2022-02-08 14:05:11Z|\n\n\n\n<details><summary>Searchable metadata</summary>\n\n```\ntrac-id__10306 10306\ntype__enhancement enhancement\nreporter__adiroiban adiroiban\npriority__normal normal\nmilestone__None None\nbranch__ \nbranch_author__ \nstatus__closed closed\nresolution__fixed fixed\ncomponent__core core\nkeywords__None None\ntime__1644329111193403 1644329111193403\nchangetime__1646513115841857 1646513115841857\nversion__None None\nowner__None None\n\n```\n</details>\n\n", "before_files": [{"content": "\"\"\"\nProvides Twisted version information.\n\"\"\"\n\n# This file is auto-generated! Do not edit!\n# Use `python -m incremental.update Twisted` to change this file.\n\nfrom incremental import Version\n\n__version__ = Version(\"Twisted\", 22, 1, 0, post=0)\n__all__ = [\"__version__\"]\n", "path": "src/twisted/_version.py"}], "after_files": [{"content": "\"\"\"\nProvides Twisted version information.\n\"\"\"\n\n# This file is auto-generated! Do not edit!\n# Use `python -m incremental.update Twisted` to change this file.\n\nfrom incremental import Version\n\n__version__ = Version(\"Twisted\", 22, 2, 0, post=0)\n__all__ = [\"__version__\"]\n", "path": "src/twisted/_version.py"}]} | 637 | 113 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.