problem_id
stringlengths 18
22
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
58
| prompt
stringlengths 1.71k
18.9k
| golden_diff
stringlengths 145
5.13k
| verification_info
stringlengths 465
23.6k
| num_tokens_prompt
int64 556
4.1k
| num_tokens_diff
int64 47
1.02k
|
---|---|---|---|---|---|---|---|---|
gh_patches_debug_37088 | rasdani/github-patches | git_diff | pre-commit__pre-commit-321 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Does not work within submodules
I'm getting:
```
An unexpected error has occurred: NotADirectoryError: [Errno 20] Not a directory: '/home/quentin/chef-repo/cookbooks/ssmtp-cookbook/.git/hooks/pre-commit'
```
chef-repo is my primary repository and ssmtp-cookbook a git submodule of that.
**ssmtp-cookbook/.git file contents:**
```
gitdir: ../../.git/modules/cookbooks/ssmtp-cookbook
```
</issue>
<code>
[start of pre_commit/git.py]
1 from __future__ import unicode_literals
2
3 import functools
4 import logging
5 import os
6 import os.path
7 import re
8
9 from pre_commit.errors import FatalError
10 from pre_commit.util import CalledProcessError
11 from pre_commit.util import cmd_output
12 from pre_commit.util import memoize_by_cwd
13
14
15 logger = logging.getLogger('pre_commit')
16
17
18 def get_root():
19 try:
20 return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()
21 except CalledProcessError:
22 raise FatalError(
23 'Called from outside of the gits. Please cd to a git repository.'
24 )
25
26
27 def is_in_merge_conflict():
28 return (
29 os.path.exists(os.path.join('.git', 'MERGE_MSG')) and
30 os.path.exists(os.path.join('.git', 'MERGE_HEAD'))
31 )
32
33
34 def parse_merge_msg_for_conflicts(merge_msg):
35 # Conflicted files start with tabs
36 return [
37 line.lstrip('#').strip()
38 for line in merge_msg.splitlines()
39 # '#\t' for git 2.4.1
40 if line.startswith(('\t', '#\t'))
41 ]
42
43
44 @memoize_by_cwd
45 def get_conflicted_files():
46 logger.info('Checking merge-conflict files only.')
47 # Need to get the conflicted files from the MERGE_MSG because they could
48 # have resolved the conflict by choosing one side or the other
49 merge_msg = open(os.path.join('.git', 'MERGE_MSG')).read()
50 merge_conflict_filenames = parse_merge_msg_for_conflicts(merge_msg)
51
52 # This will get the rest of the changes made after the merge.
53 # If they resolved the merge conflict by choosing a mesh of both sides
54 # this will also include the conflicted files
55 tree_hash = cmd_output('git', 'write-tree')[1].strip()
56 merge_diff_filenames = cmd_output(
57 'git', 'diff', '-m', tree_hash, 'HEAD', 'MERGE_HEAD', '--name-only',
58 )[1].splitlines()
59 return set(merge_conflict_filenames) | set(merge_diff_filenames)
60
61
62 @memoize_by_cwd
63 def get_staged_files():
64 return cmd_output('git', 'diff', '--staged', '--name-only')[1].splitlines()
65
66
67 @memoize_by_cwd
68 def get_all_files():
69 return cmd_output('git', 'ls-files')[1].splitlines()
70
71
72 def get_files_matching(all_file_list_strategy):
73 @functools.wraps(all_file_list_strategy)
74 @memoize_by_cwd
75 def wrapper(include_expr, exclude_expr):
76 include_regex = re.compile(include_expr)
77 exclude_regex = re.compile(exclude_expr)
78 return set(
79 filename
80 for filename in all_file_list_strategy()
81 if (
82 include_regex.search(filename) and
83 not exclude_regex.search(filename) and
84 os.path.exists(filename)
85 )
86 )
87 return wrapper
88
89
90 get_staged_files_matching = get_files_matching(get_staged_files)
91 get_all_files_matching = get_files_matching(get_all_files)
92 get_conflicted_files_matching = get_files_matching(get_conflicted_files)
93
[end of pre_commit/git.py]
[start of pre_commit/runner.py]
1 from __future__ import unicode_literals
2
3 import os
4 import os.path
5
6 from cached_property import cached_property
7
8 import pre_commit.constants as C
9 from pre_commit import git
10 from pre_commit.clientlib.validate_config import load_config
11 from pre_commit.repository import Repository
12 from pre_commit.store import Store
13
14
15 class Runner(object):
16 """A `Runner` represents the execution context of the hooks. Notably the
17 repository under test.
18 """
19
20 def __init__(self, git_root):
21 self.git_root = git_root
22
23 @classmethod
24 def create(cls):
25 """Creates a PreCommitRunner by doing the following:
26 - Finds the root of the current git repository
27 - chdirs to that directory
28 """
29 root = git.get_root()
30 os.chdir(root)
31 return cls(root)
32
33 @cached_property
34 def config_file_path(self):
35 return os.path.join(self.git_root, C.CONFIG_FILE)
36
37 @cached_property
38 def repositories(self):
39 """Returns a tuple of the configured repositories."""
40 config = load_config(self.config_file_path)
41 repositories = tuple(Repository.create(x, self.store) for x in config)
42 for repository in repositories:
43 repository.require_installed()
44 return repositories
45
46 def get_hook_path(self, hook_type):
47 return os.path.join(self.git_root, '.git', 'hooks', hook_type)
48
49 @cached_property
50 def pre_commit_path(self):
51 return self.get_hook_path('pre-commit')
52
53 @cached_property
54 def pre_push_path(self):
55 return self.get_hook_path('pre-push')
56
57 @cached_property
58 def cmd_runner(self):
59 # TODO: remove this and inline runner.store.cmd_runner
60 return self.store.cmd_runner
61
62 @cached_property
63 def store(self):
64 return Store()
65
[end of pre_commit/runner.py]
[start of pre_commit/main.py]
1 from __future__ import unicode_literals
2
3 import argparse
4 import os
5 import sys
6
7 import pkg_resources
8
9 from pre_commit import color
10 from pre_commit import five
11 from pre_commit.commands.autoupdate import autoupdate
12 from pre_commit.commands.clean import clean
13 from pre_commit.commands.install_uninstall import install
14 from pre_commit.commands.install_uninstall import uninstall
15 from pre_commit.commands.run import run
16 from pre_commit.error_handler import error_handler
17 from pre_commit.runner import Runner
18
19
20 # https://github.com/pre-commit/pre-commit/issues/217
21 # On OSX, making a virtualenv using pyvenv at . causes `virtualenv` and `pip`
22 # to install packages to the wrong place. We don't want anything to deal with
23 # pyvenv
24 os.environ.pop('__PYVENV_LAUNCHER__', None)
25 # https://github.com/pre-commit/pre-commit/issues/300
26 # In git 2.6.3 (maybe others), git exports this while running pre-commit hooks
27 os.environ.pop('GIT_WORK_TREE', None)
28
29
30 def main(argv=None):
31 argv = argv if argv is not None else sys.argv[1:]
32 argv = [five.to_text(arg) for arg in argv]
33 parser = argparse.ArgumentParser()
34
35 # http://stackoverflow.com/a/8521644/812183
36 parser.add_argument(
37 '-V', '--version',
38 action='version',
39 version='%(prog)s {0}'.format(
40 pkg_resources.get_distribution('pre-commit').version
41 )
42 )
43
44 subparsers = parser.add_subparsers(dest='command')
45
46 install_parser = subparsers.add_parser(
47 'install', help='Install the pre-commit script.',
48 )
49 install_parser.add_argument(
50 '-f', '--overwrite', action='store_true',
51 help='Overwrite existing hooks / remove migration mode.',
52 )
53 install_parser.add_argument(
54 '--install-hooks', action='store_true',
55 help=(
56 'Whether to install hook environments for all environments '
57 'in the config file.'
58 ),
59 )
60 install_parser.add_argument(
61 '-t', '--hook-type', choices=('pre-commit', 'pre-push'),
62 default='pre-commit',
63 )
64
65 uninstall_parser = subparsers.add_parser(
66 'uninstall', help='Uninstall the pre-commit script.',
67 )
68 uninstall_parser.add_argument(
69 '-t', '--hook-type', choices=('pre-commit', 'pre-push'),
70 default='pre-commit',
71 )
72
73 subparsers.add_parser('clean', help='Clean out pre-commit files.')
74
75 subparsers.add_parser(
76 'autoupdate',
77 help="Auto-update pre-commit config to the latest repos' versions.",
78 )
79
80 run_parser = subparsers.add_parser('run', help='Run hooks.')
81 run_parser.add_argument('hook', nargs='?', help='A single hook-id to run')
82 run_parser.add_argument(
83 '--color', default='auto', type=color.use_color,
84 help='Whether to use color in output. Defaults to `auto`',
85 )
86 run_parser.add_argument(
87 '--no-stash', default=False, action='store_true',
88 help='Use this option to prevent auto stashing of unstaged files.',
89 )
90 run_parser.add_argument(
91 '--verbose', '-v', action='store_true', default=False,
92 )
93 run_parser.add_argument(
94 '--origin', '-o',
95 help='The origin branch\'s commit_id when using `git push`',
96 )
97 run_parser.add_argument(
98 '--source', '-s',
99 help='The remote branch\'s commit_id when using `git push`',
100 )
101 run_parser.add_argument(
102 '--allow-unstaged-config', default=False, action='store_true',
103 help='Allow an unstaged config to be present. Note that this will'
104 'be stashed before parsing unless --no-stash is specified'
105 )
106 run_parser.add_argument(
107 '--hook-stage', choices=('commit', 'push'), default='commit',
108 help='The stage during which the hook is fired e.g. commit or push',
109 )
110 run_mutex_group = run_parser.add_mutually_exclusive_group(required=False)
111 run_mutex_group.add_argument(
112 '--all-files', '-a', action='store_true', default=False,
113 help='Run on all the files in the repo. Implies --no-stash.',
114 )
115 run_mutex_group.add_argument(
116 '--files', nargs='*', help='Specific filenames to run hooks on.',
117 )
118
119 help = subparsers.add_parser(
120 'help', help='Show help for a specific command.'
121 )
122 help.add_argument('help_cmd', nargs='?', help='Command to show help for.')
123
124 # Argparse doesn't really provide a way to use a `default` subparser
125 if len(argv) == 0:
126 argv = ['run']
127 args = parser.parse_args(argv)
128
129 if args.command == 'help':
130 if args.help_cmd:
131 parser.parse_args([args.help_cmd, '--help'])
132 else:
133 parser.parse_args(['--help'])
134
135 with error_handler():
136 runner = Runner.create()
137
138 if args.command == 'install':
139 return install(
140 runner, overwrite=args.overwrite, hooks=args.install_hooks,
141 hook_type=args.hook_type,
142 )
143 elif args.command == 'uninstall':
144 return uninstall(runner, hook_type=args.hook_type)
145 elif args.command == 'clean':
146 return clean(runner)
147 elif args.command == 'autoupdate':
148 return autoupdate(runner)
149 elif args.command == 'run':
150 return run(runner, args)
151 else:
152 raise NotImplementedError(
153 'Command {0} not implemented.'.format(args.command)
154 )
155
156 raise AssertionError(
157 'Command {0} failed to exit with a returncode'.format(args.command)
158 )
159
160
161 if __name__ == '__main__':
162 exit(main())
163
[end of pre_commit/main.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pre_commit/git.py b/pre_commit/git.py
--- a/pre_commit/git.py
+++ b/pre_commit/git.py
@@ -24,10 +24,18 @@
)
+def get_git_dir(git_root):
+ return os.path.normpath(os.path.join(
+ git_root,
+ cmd_output('git', 'rev-parse', '--git-dir', cwd=git_root)[1].strip(),
+ ))
+
+
def is_in_merge_conflict():
+ git_dir = get_git_dir('.')
return (
- os.path.exists(os.path.join('.git', 'MERGE_MSG')) and
- os.path.exists(os.path.join('.git', 'MERGE_HEAD'))
+ os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and
+ os.path.exists(os.path.join(git_dir, 'MERGE_HEAD'))
)
@@ -46,7 +54,7 @@
logger.info('Checking merge-conflict files only.')
# Need to get the conflicted files from the MERGE_MSG because they could
# have resolved the conflict by choosing one side or the other
- merge_msg = open(os.path.join('.git', 'MERGE_MSG')).read()
+ merge_msg = open(os.path.join(get_git_dir('.'), 'MERGE_MSG')).read()
merge_conflict_filenames = parse_merge_msg_for_conflicts(merge_msg)
# This will get the rest of the changes made after the merge.
diff --git a/pre_commit/main.py b/pre_commit/main.py
--- a/pre_commit/main.py
+++ b/pre_commit/main.py
@@ -25,6 +25,13 @@
# https://github.com/pre-commit/pre-commit/issues/300
# In git 2.6.3 (maybe others), git exports this while running pre-commit hooks
os.environ.pop('GIT_WORK_TREE', None)
+# In git 1.9.1 (maybe others), git exports these while running pre-commit hooks
+# in submodules. In the general case this causes problems.
+# These are covered by test_install_in_submodule_and_run
+# Causes git clone to clone wrong thing
+os.environ.pop('GIT_DIR', None)
+# Causes 'error invalid object ...' during commit
+os.environ.pop('GIT_INDEX_FILE', None)
def main(argv=None):
diff --git a/pre_commit/runner.py b/pre_commit/runner.py
--- a/pre_commit/runner.py
+++ b/pre_commit/runner.py
@@ -30,6 +30,10 @@
os.chdir(root)
return cls(root)
+ @cached_property
+ def git_dir(self):
+ return git.get_git_dir(self.git_root)
+
@cached_property
def config_file_path(self):
return os.path.join(self.git_root, C.CONFIG_FILE)
@@ -44,7 +48,7 @@
return repositories
def get_hook_path(self, hook_type):
- return os.path.join(self.git_root, '.git', 'hooks', hook_type)
+ return os.path.join(self.git_dir, 'hooks', hook_type)
@cached_property
def pre_commit_path(self):
| {"golden_diff": "diff --git a/pre_commit/git.py b/pre_commit/git.py\n--- a/pre_commit/git.py\n+++ b/pre_commit/git.py\n@@ -24,10 +24,18 @@\n )\n \n \n+def get_git_dir(git_root):\n+ return os.path.normpath(os.path.join(\n+ git_root,\n+ cmd_output('git', 'rev-parse', '--git-dir', cwd=git_root)[1].strip(),\n+ ))\n+\n+\n def is_in_merge_conflict():\n+ git_dir = get_git_dir('.')\n return (\n- os.path.exists(os.path.join('.git', 'MERGE_MSG')) and\n- os.path.exists(os.path.join('.git', 'MERGE_HEAD'))\n+ os.path.exists(os.path.join(git_dir, 'MERGE_MSG')) and\n+ os.path.exists(os.path.join(git_dir, 'MERGE_HEAD'))\n )\n \n \n@@ -46,7 +54,7 @@\n logger.info('Checking merge-conflict files only.')\n # Need to get the conflicted files from the MERGE_MSG because they could\n # have resolved the conflict by choosing one side or the other\n- merge_msg = open(os.path.join('.git', 'MERGE_MSG')).read()\n+ merge_msg = open(os.path.join(get_git_dir('.'), 'MERGE_MSG')).read()\n merge_conflict_filenames = parse_merge_msg_for_conflicts(merge_msg)\n \n # This will get the rest of the changes made after the merge.\ndiff --git a/pre_commit/main.py b/pre_commit/main.py\n--- a/pre_commit/main.py\n+++ b/pre_commit/main.py\n@@ -25,6 +25,13 @@\n # https://github.com/pre-commit/pre-commit/issues/300\n # In git 2.6.3 (maybe others), git exports this while running pre-commit hooks\n os.environ.pop('GIT_WORK_TREE', None)\n+# In git 1.9.1 (maybe others), git exports these while running pre-commit hooks\n+# in submodules. In the general case this causes problems.\n+# These are covered by test_install_in_submodule_and_run\n+# Causes git clone to clone wrong thing\n+os.environ.pop('GIT_DIR', None)\n+# Causes 'error invalid object ...' during commit\n+os.environ.pop('GIT_INDEX_FILE', None)\n \n \n def main(argv=None):\ndiff --git a/pre_commit/runner.py b/pre_commit/runner.py\n--- a/pre_commit/runner.py\n+++ b/pre_commit/runner.py\n@@ -30,6 +30,10 @@\n os.chdir(root)\n return cls(root)\n \n+ @cached_property\n+ def git_dir(self):\n+ return git.get_git_dir(self.git_root)\n+\n @cached_property\n def config_file_path(self):\n return os.path.join(self.git_root, C.CONFIG_FILE)\n@@ -44,7 +48,7 @@\n return repositories\n \n def get_hook_path(self, hook_type):\n- return os.path.join(self.git_root, '.git', 'hooks', hook_type)\n+ return os.path.join(self.git_dir, 'hooks', hook_type)\n \n @cached_property\n def pre_commit_path(self):\n", "issue": "Does not work within submodules\nI'm getting: \n\n```\nAn unexpected error has occurred: NotADirectoryError: [Errno 20] Not a directory: '/home/quentin/chef-repo/cookbooks/ssmtp-cookbook/.git/hooks/pre-commit'\n```\n\nchef-repo is my primary repository and ssmtp-cookbook a git submodule of that. \n\n**ssmtp-cookbook/.git file contents:**\n\n```\ngitdir: ../../.git/modules/cookbooks/ssmtp-cookbook\n```\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport functools\nimport logging\nimport os\nimport os.path\nimport re\n\nfrom pre_commit.errors import FatalError\nfrom pre_commit.util import CalledProcessError\nfrom pre_commit.util import cmd_output\nfrom pre_commit.util import memoize_by_cwd\n\n\nlogger = logging.getLogger('pre_commit')\n\n\ndef get_root():\n try:\n return cmd_output('git', 'rev-parse', '--show-toplevel')[1].strip()\n except CalledProcessError:\n raise FatalError(\n 'Called from outside of the gits. Please cd to a git repository.'\n )\n\n\ndef is_in_merge_conflict():\n return (\n os.path.exists(os.path.join('.git', 'MERGE_MSG')) and\n os.path.exists(os.path.join('.git', 'MERGE_HEAD'))\n )\n\n\ndef parse_merge_msg_for_conflicts(merge_msg):\n # Conflicted files start with tabs\n return [\n line.lstrip('#').strip()\n for line in merge_msg.splitlines()\n # '#\\t' for git 2.4.1\n if line.startswith(('\\t', '#\\t'))\n ]\n\n\n@memoize_by_cwd\ndef get_conflicted_files():\n logger.info('Checking merge-conflict files only.')\n # Need to get the conflicted files from the MERGE_MSG because they could\n # have resolved the conflict by choosing one side or the other\n merge_msg = open(os.path.join('.git', 'MERGE_MSG')).read()\n merge_conflict_filenames = parse_merge_msg_for_conflicts(merge_msg)\n\n # This will get the rest of the changes made after the merge.\n # If they resolved the merge conflict by choosing a mesh of both sides\n # this will also include the conflicted files\n tree_hash = cmd_output('git', 'write-tree')[1].strip()\n merge_diff_filenames = cmd_output(\n 'git', 'diff', '-m', tree_hash, 'HEAD', 'MERGE_HEAD', '--name-only',\n )[1].splitlines()\n return set(merge_conflict_filenames) | set(merge_diff_filenames)\n\n\n@memoize_by_cwd\ndef get_staged_files():\n return cmd_output('git', 'diff', '--staged', '--name-only')[1].splitlines()\n\n\n@memoize_by_cwd\ndef get_all_files():\n return cmd_output('git', 'ls-files')[1].splitlines()\n\n\ndef get_files_matching(all_file_list_strategy):\n @functools.wraps(all_file_list_strategy)\n @memoize_by_cwd\n def wrapper(include_expr, exclude_expr):\n include_regex = re.compile(include_expr)\n exclude_regex = re.compile(exclude_expr)\n return set(\n filename\n for filename in all_file_list_strategy()\n if (\n include_regex.search(filename) and\n not exclude_regex.search(filename) and\n os.path.exists(filename)\n )\n )\n return wrapper\n\n\nget_staged_files_matching = get_files_matching(get_staged_files)\nget_all_files_matching = get_files_matching(get_all_files)\nget_conflicted_files_matching = get_files_matching(get_conflicted_files)\n", "path": "pre_commit/git.py"}, {"content": "from __future__ import unicode_literals\n\nimport os\nimport os.path\n\nfrom cached_property import cached_property\n\nimport pre_commit.constants as C\nfrom pre_commit import git\nfrom pre_commit.clientlib.validate_config import load_config\nfrom pre_commit.repository import Repository\nfrom pre_commit.store import Store\n\n\nclass Runner(object):\n \"\"\"A `Runner` represents the execution context of the hooks. Notably the\n repository under test.\n \"\"\"\n\n def __init__(self, git_root):\n self.git_root = git_root\n\n @classmethod\n def create(cls):\n \"\"\"Creates a PreCommitRunner by doing the following:\n - Finds the root of the current git repository\n - chdirs to that directory\n \"\"\"\n root = git.get_root()\n os.chdir(root)\n return cls(root)\n\n @cached_property\n def config_file_path(self):\n return os.path.join(self.git_root, C.CONFIG_FILE)\n\n @cached_property\n def repositories(self):\n \"\"\"Returns a tuple of the configured repositories.\"\"\"\n config = load_config(self.config_file_path)\n repositories = tuple(Repository.create(x, self.store) for x in config)\n for repository in repositories:\n repository.require_installed()\n return repositories\n\n def get_hook_path(self, hook_type):\n return os.path.join(self.git_root, '.git', 'hooks', hook_type)\n\n @cached_property\n def pre_commit_path(self):\n return self.get_hook_path('pre-commit')\n\n @cached_property\n def pre_push_path(self):\n return self.get_hook_path('pre-push')\n\n @cached_property\n def cmd_runner(self):\n # TODO: remove this and inline runner.store.cmd_runner\n return self.store.cmd_runner\n\n @cached_property\n def store(self):\n return Store()\n", "path": "pre_commit/runner.py"}, {"content": "from __future__ import unicode_literals\n\nimport argparse\nimport os\nimport sys\n\nimport pkg_resources\n\nfrom pre_commit import color\nfrom pre_commit import five\nfrom pre_commit.commands.autoupdate import autoupdate\nfrom pre_commit.commands.clean import clean\nfrom pre_commit.commands.install_uninstall import install\nfrom pre_commit.commands.install_uninstall import uninstall\nfrom pre_commit.commands.run import run\nfrom pre_commit.error_handler import error_handler\nfrom pre_commit.runner import Runner\n\n\n# https://github.com/pre-commit/pre-commit/issues/217\n# On OSX, making a virtualenv using pyvenv at . causes `virtualenv` and `pip`\n# to install packages to the wrong place. We don't want anything to deal with\n# pyvenv\nos.environ.pop('__PYVENV_LAUNCHER__', None)\n# https://github.com/pre-commit/pre-commit/issues/300\n# In git 2.6.3 (maybe others), git exports this while running pre-commit hooks\nos.environ.pop('GIT_WORK_TREE', None)\n\n\ndef main(argv=None):\n argv = argv if argv is not None else sys.argv[1:]\n argv = [five.to_text(arg) for arg in argv]\n parser = argparse.ArgumentParser()\n\n # http://stackoverflow.com/a/8521644/812183\n parser.add_argument(\n '-V', '--version',\n action='version',\n version='%(prog)s {0}'.format(\n pkg_resources.get_distribution('pre-commit').version\n )\n )\n\n subparsers = parser.add_subparsers(dest='command')\n\n install_parser = subparsers.add_parser(\n 'install', help='Install the pre-commit script.',\n )\n install_parser.add_argument(\n '-f', '--overwrite', action='store_true',\n help='Overwrite existing hooks / remove migration mode.',\n )\n install_parser.add_argument(\n '--install-hooks', action='store_true',\n help=(\n 'Whether to install hook environments for all environments '\n 'in the config file.'\n ),\n )\n install_parser.add_argument(\n '-t', '--hook-type', choices=('pre-commit', 'pre-push'),\n default='pre-commit',\n )\n\n uninstall_parser = subparsers.add_parser(\n 'uninstall', help='Uninstall the pre-commit script.',\n )\n uninstall_parser.add_argument(\n '-t', '--hook-type', choices=('pre-commit', 'pre-push'),\n default='pre-commit',\n )\n\n subparsers.add_parser('clean', help='Clean out pre-commit files.')\n\n subparsers.add_parser(\n 'autoupdate',\n help=\"Auto-update pre-commit config to the latest repos' versions.\",\n )\n\n run_parser = subparsers.add_parser('run', help='Run hooks.')\n run_parser.add_argument('hook', nargs='?', help='A single hook-id to run')\n run_parser.add_argument(\n '--color', default='auto', type=color.use_color,\n help='Whether to use color in output. Defaults to `auto`',\n )\n run_parser.add_argument(\n '--no-stash', default=False, action='store_true',\n help='Use this option to prevent auto stashing of unstaged files.',\n )\n run_parser.add_argument(\n '--verbose', '-v', action='store_true', default=False,\n )\n run_parser.add_argument(\n '--origin', '-o',\n help='The origin branch\\'s commit_id when using `git push`',\n )\n run_parser.add_argument(\n '--source', '-s',\n help='The remote branch\\'s commit_id when using `git push`',\n )\n run_parser.add_argument(\n '--allow-unstaged-config', default=False, action='store_true',\n help='Allow an unstaged config to be present. Note that this will'\n 'be stashed before parsing unless --no-stash is specified'\n )\n run_parser.add_argument(\n '--hook-stage', choices=('commit', 'push'), default='commit',\n help='The stage during which the hook is fired e.g. commit or push',\n )\n run_mutex_group = run_parser.add_mutually_exclusive_group(required=False)\n run_mutex_group.add_argument(\n '--all-files', '-a', action='store_true', default=False,\n help='Run on all the files in the repo. Implies --no-stash.',\n )\n run_mutex_group.add_argument(\n '--files', nargs='*', help='Specific filenames to run hooks on.',\n )\n\n help = subparsers.add_parser(\n 'help', help='Show help for a specific command.'\n )\n help.add_argument('help_cmd', nargs='?', help='Command to show help for.')\n\n # Argparse doesn't really provide a way to use a `default` subparser\n if len(argv) == 0:\n argv = ['run']\n args = parser.parse_args(argv)\n\n if args.command == 'help':\n if args.help_cmd:\n parser.parse_args([args.help_cmd, '--help'])\n else:\n parser.parse_args(['--help'])\n\n with error_handler():\n runner = Runner.create()\n\n if args.command == 'install':\n return install(\n runner, overwrite=args.overwrite, hooks=args.install_hooks,\n hook_type=args.hook_type,\n )\n elif args.command == 'uninstall':\n return uninstall(runner, hook_type=args.hook_type)\n elif args.command == 'clean':\n return clean(runner)\n elif args.command == 'autoupdate':\n return autoupdate(runner)\n elif args.command == 'run':\n return run(runner, args)\n else:\n raise NotImplementedError(\n 'Command {0} not implemented.'.format(args.command)\n )\n\n raise AssertionError(\n 'Command {0} failed to exit with a returncode'.format(args.command)\n )\n\n\nif __name__ == '__main__':\n exit(main())\n", "path": "pre_commit/main.py"}]} | 3,687 | 682 |
gh_patches_debug_9704 | rasdani/github-patches | git_diff | Textualize__textual-441 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[textual][bug] CSS rule parsing fails when the name of the colour we pass contains a digit
So while this is working correctly:
```css
#my_widget {
background: dark_cyan;
}
```
...this fails:
```css
#my_widget {
background: turquoise4;
}
```
...with the following error:
```
• failed to parse color 'turquoise';
• failed to parse 'turquoise' as a color;
```
(maybe just a regex that doesn't take into account the fact that colour names can include numbers?)
</issue>
<code>
[start of src/textual/css/tokenize.py]
1 from __future__ import annotations
2
3 import re
4 from typing import Iterable
5
6 from textual.css.tokenizer import Expect, Tokenizer, Token
7
8 COMMENT_START = r"\/\*"
9 SCALAR = r"\-?\d+\.?\d*(?:fr|%|w|h|vw|vh)"
10 DURATION = r"\d+\.?\d*(?:ms|s)"
11 NUMBER = r"\-?\d+\.?\d*"
12 COLOR = r"\#[0-9a-fA-F]{8}|\#[0-9a-fA-F]{6}|rgb\(\-?\d+\.?\d*,\-?\d+\.?\d*,\-?\d+\.?\d*\)|rgba\(\-?\d+\.?\d*,\-?\d+\.?\d*,\-?\d+\.?\d*,\-?\d+\.?\d*\)"
13 KEY_VALUE = r"[a-zA-Z_-][a-zA-Z0-9_-]*=[0-9a-zA-Z_\-\/]+"
14 TOKEN = "[a-zA-Z_-]+"
15 STRING = r"\".*?\""
16 VARIABLE_REF = r"\$[a-zA-Z0-9_\-]+"
17
18 # Values permitted in variable and rule declarations.
19 DECLARATION_VALUES = {
20 "scalar": SCALAR,
21 "duration": DURATION,
22 "number": NUMBER,
23 "color": COLOR,
24 "key_value": KEY_VALUE,
25 "token": TOKEN,
26 "string": STRING,
27 "variable_ref": VARIABLE_REF,
28 }
29
30 # The tokenizers "expectation" while at the root/highest level of scope
31 # in the CSS file. At this level we might expect to see selectors, comments,
32 # variable definitions etc.
33 expect_root_scope = Expect(
34 whitespace=r"\s+",
35 comment_start=COMMENT_START,
36 selector_start_id=r"\#[a-zA-Z_\-][a-zA-Z0-9_\-]*",
37 selector_start_class=r"\.[a-zA-Z_\-][a-zA-Z0-9_\-]*",
38 selector_start_universal=r"\*",
39 selector_start=r"[a-zA-Z_\-]+",
40 variable_name=rf"{VARIABLE_REF}:",
41 ).expect_eof(True)
42
43 # After a variable declaration e.g. "$warning-text: TOKENS;"
44 # for tokenizing variable value ------^~~~~~~^
45 expect_variable_name_continue = Expect(
46 variable_value_end=r"\n|;",
47 whitespace=r"\s+",
48 comment_start=COMMENT_START,
49 **DECLARATION_VALUES,
50 ).expect_eof(True)
51
52 expect_comment_end = Expect(
53 comment_end=re.escape("*/"),
54 )
55
56 # After we come across a selector in CSS e.g. ".my-class", we may
57 # find other selectors, pseudo-classes... e.g. ".my-class :hover"
58 expect_selector_continue = Expect(
59 whitespace=r"\s+",
60 comment_start=COMMENT_START,
61 pseudo_class=r"\:[a-zA-Z_-]+",
62 selector_id=r"\#[a-zA-Z_\-][a-zA-Z0-9_\-]*",
63 selector_class=r"\.[a-zA-Z_\-][a-zA-Z0-9_\-]*",
64 selector_universal=r"\*",
65 selector=r"[a-zA-Z_\-]+",
66 combinator_child=">",
67 new_selector=r",",
68 declaration_set_start=r"\{",
69 )
70
71 # A rule declaration e.g. "text: red;"
72 # ^---^
73 expect_declaration = Expect(
74 whitespace=r"\s+",
75 comment_start=COMMENT_START,
76 declaration_name=r"[a-zA-Z_\-]+\:",
77 declaration_set_end=r"\}",
78 )
79
80 expect_declaration_solo = Expect(
81 whitespace=r"\s+",
82 comment_start=COMMENT_START,
83 declaration_name=r"[a-zA-Z_\-]+\:",
84 declaration_set_end=r"\}",
85 ).expect_eof(True)
86
87 # The value(s)/content from a rule declaration e.g. "text: red;"
88 # ^---^
89 expect_declaration_content = Expect(
90 declaration_end=r";",
91 whitespace=r"\s+",
92 comment_start=COMMENT_START,
93 **DECLARATION_VALUES,
94 important=r"\!important",
95 comma=",",
96 declaration_set_end=r"\}",
97 )
98
99 expect_declaration_content_solo = Expect(
100 declaration_end=r";",
101 whitespace=r"\s+",
102 comment_start=COMMENT_START,
103 **DECLARATION_VALUES,
104 important=r"\!important",
105 comma=",",
106 declaration_set_end=r"\}",
107 ).expect_eof(True)
108
109
110 class TokenizerState:
111 """State machine for the tokenizer.
112
113 Attributes:
114 EXPECT: The initial expectation of the tokenizer. Since we start tokenizing
115 at the root scope, we might expect to see either a variable or selector, for example.
116 STATE_MAP: Maps token names to Expects, defines the sets of valid tokens
117 that we'd expect to see next, given the current token. For example, if
118 we've just processed a variable declaration name, we next expect to see
119 the value of that variable.
120 """
121
122 EXPECT = expect_root_scope
123 STATE_MAP = {
124 "variable_name": expect_variable_name_continue,
125 "variable_value_end": expect_root_scope,
126 "selector_start": expect_selector_continue,
127 "selector_start_id": expect_selector_continue,
128 "selector_start_class": expect_selector_continue,
129 "selector_start_universal": expect_selector_continue,
130 "selector_id": expect_selector_continue,
131 "selector_class": expect_selector_continue,
132 "selector_universal": expect_selector_continue,
133 "declaration_set_start": expect_declaration,
134 "declaration_name": expect_declaration_content,
135 "declaration_end": expect_declaration,
136 "declaration_set_end": expect_root_scope,
137 }
138
139 def __call__(self, code: str, path: str) -> Iterable[Token]:
140 tokenizer = Tokenizer(code, path=path)
141 expect = self.EXPECT
142 get_token = tokenizer.get_token
143 get_state = self.STATE_MAP.get
144 while True:
145 token = get_token(expect)
146 name = token.name
147 if name == "comment_start":
148 tokenizer.skip_to(expect_comment_end)
149 continue
150 elif name == "eof":
151 break
152 expect = get_state(name, expect)
153 yield token
154
155
156 class DeclarationTokenizerState(TokenizerState):
157 EXPECT = expect_declaration_solo
158 STATE_MAP = {
159 "declaration_name": expect_declaration_content,
160 "declaration_end": expect_declaration_solo,
161 }
162
163
164 class ValueTokenizerState(TokenizerState):
165 EXPECT = expect_declaration_content_solo
166
167
168 tokenize = TokenizerState()
169 tokenize_declarations = DeclarationTokenizerState()
170 tokenize_value = ValueTokenizerState()
171
172
173 def tokenize_values(values: dict[str, str]) -> dict[str, list[Token]]:
174 """Tokens the values in a dict of strings.
175
176 Args:
177 values (dict[str, str]): A mapping of CSS variable name on to a value, to be
178 added to the CSS context.
179
180 Returns:
181 dict[str, list[Token]]: A mapping of name on to a list of tokens,
182 """
183 value_tokens = {
184 name: list(tokenize_value(value, "__name__")) for name, value in values.items()
185 }
186 return value_tokens
187
188
189 if __name__ == "__main__":
190 from rich import print
191
192 css = """#something {
193
194 color: rgb(10,12,23)
195 }
196 """
197 # transition: offset 500 in_out_cubic;
198 tokens = tokenize(css, __name__)
199 print(list(tokens))
200
201 print(tokenize_values({"primary": "rgb(10,20,30)", "secondary": "#ff00ff"}))
202
[end of src/textual/css/tokenize.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/textual/css/tokenize.py b/src/textual/css/tokenize.py
--- a/src/textual/css/tokenize.py
+++ b/src/textual/css/tokenize.py
@@ -11,7 +11,7 @@
NUMBER = r"\-?\d+\.?\d*"
COLOR = r"\#[0-9a-fA-F]{8}|\#[0-9a-fA-F]{6}|rgb\(\-?\d+\.?\d*,\-?\d+\.?\d*,\-?\d+\.?\d*\)|rgba\(\-?\d+\.?\d*,\-?\d+\.?\d*,\-?\d+\.?\d*,\-?\d+\.?\d*\)"
KEY_VALUE = r"[a-zA-Z_-][a-zA-Z0-9_-]*=[0-9a-zA-Z_\-\/]+"
-TOKEN = "[a-zA-Z_-]+"
+TOKEN = "[a-zA-Z][a-zA-Z0-9_-]*"
STRING = r"\".*?\""
VARIABLE_REF = r"\$[a-zA-Z0-9_\-]+"
| {"golden_diff": "diff --git a/src/textual/css/tokenize.py b/src/textual/css/tokenize.py\n--- a/src/textual/css/tokenize.py\n+++ b/src/textual/css/tokenize.py\n@@ -11,7 +11,7 @@\n NUMBER = r\"\\-?\\d+\\.?\\d*\"\n COLOR = r\"\\#[0-9a-fA-F]{8}|\\#[0-9a-fA-F]{6}|rgb\\(\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*\\)|rgba\\(\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*\\)\"\n KEY_VALUE = r\"[a-zA-Z_-][a-zA-Z0-9_-]*=[0-9a-zA-Z_\\-\\/]+\"\n-TOKEN = \"[a-zA-Z_-]+\"\n+TOKEN = \"[a-zA-Z][a-zA-Z0-9_-]*\"\n STRING = r\"\\\".*?\\\"\"\n VARIABLE_REF = r\"\\$[a-zA-Z0-9_\\-]+\"\n", "issue": "[textual][bug] CSS rule parsing fails when the name of the colour we pass contains a digit\nSo while this is working correctly:\r\n```css\r\n#my_widget {\r\n background: dark_cyan;\r\n}\r\n```\r\n\r\n...this fails:\r\n```css\r\n#my_widget {\r\n background: turquoise4;\r\n}\r\n```\r\n...with the following error:\r\n```\r\n \u2022 failed to parse color 'turquoise'; \r\n \u2022 failed to parse 'turquoise' as a color; \r\n```\r\n(maybe just a regex that doesn't take into account the fact that colour names can include numbers?)\n", "before_files": [{"content": "from __future__ import annotations\n\nimport re\nfrom typing import Iterable\n\nfrom textual.css.tokenizer import Expect, Tokenizer, Token\n\nCOMMENT_START = r\"\\/\\*\"\nSCALAR = r\"\\-?\\d+\\.?\\d*(?:fr|%|w|h|vw|vh)\"\nDURATION = r\"\\d+\\.?\\d*(?:ms|s)\"\nNUMBER = r\"\\-?\\d+\\.?\\d*\"\nCOLOR = r\"\\#[0-9a-fA-F]{8}|\\#[0-9a-fA-F]{6}|rgb\\(\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*\\)|rgba\\(\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*,\\-?\\d+\\.?\\d*\\)\"\nKEY_VALUE = r\"[a-zA-Z_-][a-zA-Z0-9_-]*=[0-9a-zA-Z_\\-\\/]+\"\nTOKEN = \"[a-zA-Z_-]+\"\nSTRING = r\"\\\".*?\\\"\"\nVARIABLE_REF = r\"\\$[a-zA-Z0-9_\\-]+\"\n\n# Values permitted in variable and rule declarations.\nDECLARATION_VALUES = {\n \"scalar\": SCALAR,\n \"duration\": DURATION,\n \"number\": NUMBER,\n \"color\": COLOR,\n \"key_value\": KEY_VALUE,\n \"token\": TOKEN,\n \"string\": STRING,\n \"variable_ref\": VARIABLE_REF,\n}\n\n# The tokenizers \"expectation\" while at the root/highest level of scope\n# in the CSS file. At this level we might expect to see selectors, comments,\n# variable definitions etc.\nexpect_root_scope = Expect(\n whitespace=r\"\\s+\",\n comment_start=COMMENT_START,\n selector_start_id=r\"\\#[a-zA-Z_\\-][a-zA-Z0-9_\\-]*\",\n selector_start_class=r\"\\.[a-zA-Z_\\-][a-zA-Z0-9_\\-]*\",\n selector_start_universal=r\"\\*\",\n selector_start=r\"[a-zA-Z_\\-]+\",\n variable_name=rf\"{VARIABLE_REF}:\",\n).expect_eof(True)\n\n# After a variable declaration e.g. \"$warning-text: TOKENS;\"\n# for tokenizing variable value ------^~~~~~~^\nexpect_variable_name_continue = Expect(\n variable_value_end=r\"\\n|;\",\n whitespace=r\"\\s+\",\n comment_start=COMMENT_START,\n **DECLARATION_VALUES,\n).expect_eof(True)\n\nexpect_comment_end = Expect(\n comment_end=re.escape(\"*/\"),\n)\n\n# After we come across a selector in CSS e.g. \".my-class\", we may\n# find other selectors, pseudo-classes... e.g. \".my-class :hover\"\nexpect_selector_continue = Expect(\n whitespace=r\"\\s+\",\n comment_start=COMMENT_START,\n pseudo_class=r\"\\:[a-zA-Z_-]+\",\n selector_id=r\"\\#[a-zA-Z_\\-][a-zA-Z0-9_\\-]*\",\n selector_class=r\"\\.[a-zA-Z_\\-][a-zA-Z0-9_\\-]*\",\n selector_universal=r\"\\*\",\n selector=r\"[a-zA-Z_\\-]+\",\n combinator_child=\">\",\n new_selector=r\",\",\n declaration_set_start=r\"\\{\",\n)\n\n# A rule declaration e.g. \"text: red;\"\n# ^---^\nexpect_declaration = Expect(\n whitespace=r\"\\s+\",\n comment_start=COMMENT_START,\n declaration_name=r\"[a-zA-Z_\\-]+\\:\",\n declaration_set_end=r\"\\}\",\n)\n\nexpect_declaration_solo = Expect(\n whitespace=r\"\\s+\",\n comment_start=COMMENT_START,\n declaration_name=r\"[a-zA-Z_\\-]+\\:\",\n declaration_set_end=r\"\\}\",\n).expect_eof(True)\n\n# The value(s)/content from a rule declaration e.g. \"text: red;\"\n# ^---^\nexpect_declaration_content = Expect(\n declaration_end=r\";\",\n whitespace=r\"\\s+\",\n comment_start=COMMENT_START,\n **DECLARATION_VALUES,\n important=r\"\\!important\",\n comma=\",\",\n declaration_set_end=r\"\\}\",\n)\n\nexpect_declaration_content_solo = Expect(\n declaration_end=r\";\",\n whitespace=r\"\\s+\",\n comment_start=COMMENT_START,\n **DECLARATION_VALUES,\n important=r\"\\!important\",\n comma=\",\",\n declaration_set_end=r\"\\}\",\n).expect_eof(True)\n\n\nclass TokenizerState:\n \"\"\"State machine for the tokenizer.\n\n Attributes:\n EXPECT: The initial expectation of the tokenizer. Since we start tokenizing\n at the root scope, we might expect to see either a variable or selector, for example.\n STATE_MAP: Maps token names to Expects, defines the sets of valid tokens\n that we'd expect to see next, given the current token. For example, if\n we've just processed a variable declaration name, we next expect to see\n the value of that variable.\n \"\"\"\n\n EXPECT = expect_root_scope\n STATE_MAP = {\n \"variable_name\": expect_variable_name_continue,\n \"variable_value_end\": expect_root_scope,\n \"selector_start\": expect_selector_continue,\n \"selector_start_id\": expect_selector_continue,\n \"selector_start_class\": expect_selector_continue,\n \"selector_start_universal\": expect_selector_continue,\n \"selector_id\": expect_selector_continue,\n \"selector_class\": expect_selector_continue,\n \"selector_universal\": expect_selector_continue,\n \"declaration_set_start\": expect_declaration,\n \"declaration_name\": expect_declaration_content,\n \"declaration_end\": expect_declaration,\n \"declaration_set_end\": expect_root_scope,\n }\n\n def __call__(self, code: str, path: str) -> Iterable[Token]:\n tokenizer = Tokenizer(code, path=path)\n expect = self.EXPECT\n get_token = tokenizer.get_token\n get_state = self.STATE_MAP.get\n while True:\n token = get_token(expect)\n name = token.name\n if name == \"comment_start\":\n tokenizer.skip_to(expect_comment_end)\n continue\n elif name == \"eof\":\n break\n expect = get_state(name, expect)\n yield token\n\n\nclass DeclarationTokenizerState(TokenizerState):\n EXPECT = expect_declaration_solo\n STATE_MAP = {\n \"declaration_name\": expect_declaration_content,\n \"declaration_end\": expect_declaration_solo,\n }\n\n\nclass ValueTokenizerState(TokenizerState):\n EXPECT = expect_declaration_content_solo\n\n\ntokenize = TokenizerState()\ntokenize_declarations = DeclarationTokenizerState()\ntokenize_value = ValueTokenizerState()\n\n\ndef tokenize_values(values: dict[str, str]) -> dict[str, list[Token]]:\n \"\"\"Tokens the values in a dict of strings.\n\n Args:\n values (dict[str, str]): A mapping of CSS variable name on to a value, to be\n added to the CSS context.\n\n Returns:\n dict[str, list[Token]]: A mapping of name on to a list of tokens,\n \"\"\"\n value_tokens = {\n name: list(tokenize_value(value, \"__name__\")) for name, value in values.items()\n }\n return value_tokens\n\n\nif __name__ == \"__main__\":\n from rich import print\n\n css = \"\"\"#something {\n\n color: rgb(10,12,23)\n }\n \"\"\"\n # transition: offset 500 in_out_cubic;\n tokens = tokenize(css, __name__)\n print(list(tokens))\n\n print(tokenize_values({\"primary\": \"rgb(10,20,30)\", \"secondary\": \"#ff00ff\"}))\n", "path": "src/textual/css/tokenize.py"}]} | 2,780 | 239 |
gh_patches_debug_18348 | rasdani/github-patches | git_diff | elastic__apm-agent-python-1414 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Sanic 21.9.2 breaks exception tracking
For yet unknown reasons, Sanic 21.9.2+ broke exception tracking. The changes between 21.9.1 and 21.9.2 are here:
https://github.com/sanic-org/sanic/compare/v21.9.1...v21.9.2
The test failures are here: https://apm-ci.elastic.co/blue/organizations/jenkins/apm-agent-python%2Fapm-agent-python-nightly-mbp%2Fmaster/detail/master/787/tests/
Example:
----------------------------------------------------- Captured log call ------------------------------------------------------INFO sanic.root:testing.py:82 http://127.0.0.1:50003/fallback-value-error
ERROR sanic.error:request.py:193 Exception occurred in one of response middleware handlers
Traceback (most recent call last):
File "/home/user/.local/lib/python3.10/site-packages/sanic_routing/router.py", line 79, in resolve
route, param_basket = self.find_route(
File "", line 24, in find_route
sanic_routing.exceptions.NotFound: Not Found
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/.local/lib/python3.10/site-packages/sanic/router.py", line 38, in _get
return self.resolve(
File "/home/user/.local/lib/python3.10/site-packages/sanic_routing/router.py", line 96, in resolve
raise self.exception(str(e), path=path)
sanic_routing.exceptions.NotFound: Not Found
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "handle_request", line 26, in handle_request
Any,
File "/home/user/.local/lib/python3.10/site-packages/sanic/router.py", line 66, in get
return self._get(path, method, host)
File "/home/user/.local/lib/python3.10/site-packages/sanic/router.py", line 44, in _get
raise NotFound("Requested URL {} not found".format(e.path))
sanic.exceptions.NotFound: Requested URL /fallback-value-error not found
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/user/.local/lib/python3.10/site-packages/sanic/request.py", line 187, in respond
response = await self.app._run_response_middleware(
File "_run_response_middleware", line 22, in _run_response_middleware
from ssl import Purpose, SSLContext, create_default_context
File "/app/elasticapm/contrib/sanic/__init__.py", line 279, in _instrument_response
await set_context(
File "/app/elasticapm/contrib/asyncio/traces.py", line 93, in set_context
data = await data()
File "/app/elasticapm/contrib/sanic/utils.py", line 121, in get_response_info
if config.capture_body and "octet-stream" not in response.content_type:
TypeError: argument of type 'NoneType' is not iterable
Checking for `response.content_type is not None` in `elasticapm/contrib/sanic/utils.py:121` doesn't resolve the issue.
@ahopkins do you happen to have an idea what could cause these failures?
</issue>
<code>
[start of elasticapm/contrib/sanic/utils.py]
1 # BSD 3-Clause License
2 #
3 # Copyright (c) 2012, the Sentry Team, see AUTHORS for more details
4 # Copyright (c) 2019, Elasticsearch BV
5 # All rights reserved.
6 #
7 # Redistribution and use in source and binary forms, with or without
8 # modification, are permitted provided that the following conditions are met:
9 #
10 # * Redistributions of source code must retain the above copyright notice, this
11 # list of conditions and the following disclaimer.
12 #
13 # * Redistributions in binary form must reproduce the above copyright notice,
14 # this list of conditions and the following disclaimer in the documentation
15 # and/or other materials provided with the distribution.
16 #
17 # * Neither the name of the copyright holder nor the names of its
18 # contributors may be used to endorse or promote products derived from
19 # this software without specific prior written permission.
20 #
21 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24 # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25 # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26 # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27 # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29 # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30
31 from typing import Dict
32
33 from sanic import Sanic
34 from sanic import __version__ as version
35 from sanic.cookies import CookieJar
36 from sanic.request import Request
37 from sanic.response import HTTPResponse
38
39 from elasticapm.base import Client
40 from elasticapm.conf import Config, constants
41 from elasticapm.contrib.sanic.sanic_types import EnvInfoType
42 from elasticapm.utils import compat, get_url_dict
43
44
45 class SanicAPMConfig(dict):
46 def __init__(self, app: Sanic):
47 super(SanicAPMConfig, self).__init__()
48 for _key, _v in app.config.items():
49 if _key.startswith("ELASTIC_APM_"):
50 self[_key.replace("ELASTIC_APM_", "")] = _v
51
52
53 def get_env(request: Request) -> EnvInfoType:
54 """
55 Extract Server Environment Information from the current Request's context
56 :param request: Inbound HTTP Request
57 :return: A tuple containing the attribute and it's corresponding value for the current Application ENV
58 """
59 for _attr in ("server_name", "server_port", "version"):
60 if hasattr(request, _attr):
61 yield _attr, getattr(request, _attr)
62
63
64 # noinspection PyBroadException
65 async def get_request_info(config: Config, request: Request) -> Dict[str, str]:
66 """
67 Generate a traceable context information from the inbound HTTP request
68
69 :param config: Application Configuration used to tune the way the data is captured
70 :param request: Inbound HTTP request
71 :return: A dictionary containing the context information of the ongoing transaction
72 """
73 env = dict(get_env(request=request))
74 env.update(dict(request.app.config))
75 result = {
76 "env": env,
77 "method": request.method,
78 "socket": {
79 "remote_address": _get_client_ip(request=request),
80 "encrypted": request.scheme in ["https", "wss"],
81 },
82 "cookies": request.cookies,
83 "http_version": request.version,
84 }
85 if config.capture_headers:
86 result["headers"] = dict(request.headers)
87
88 if request.method in constants.HTTP_WITH_BODY and config.capture_body:
89 if request.content_type.startswith("multipart") or "octet-stream" in request.content_type:
90 result["body"] = "[DISCARDED]"
91 try:
92 result["body"] = request.body.decode("utf-8")
93 except Exception:
94 pass
95
96 if "body" not in result:
97 result["body"] = "[REDACTED]"
98 result["url"] = get_url_dict(request.url)
99 return result
100
101
102 async def get_response_info(config: Config, response: HTTPResponse) -> Dict[str, str]:
103 """
104 Generate a traceable context information from the inbound HTTP Response
105
106 :param config: Application Configuration used to tune the way the data is captured
107 :param response: outbound HTTP Response
108 :return: A dictionary containing the context information of the ongoing transaction
109 """
110 result = {
111 "cookies": _transform_response_cookie(cookies=response.cookies),
112 "finished": True,
113 "headers_sent": True,
114 }
115 if isinstance(response.status, compat.integer_types):
116 result["status_code"] = response.status
117
118 if config.capture_headers:
119 result["headers"] = dict(response.headers)
120
121 if config.capture_body and "octet-stream" not in response.content_type:
122 result["body"] = response.body.decode("utf-8")
123 else:
124 result["body"] = "[REDACTED]"
125
126 return result
127
128
129 def _get_client_ip(request: Request) -> str:
130 """Extract Client IP Address Information"""
131 try:
132 return request.ip or request.socket[0] or request.remote_addr
133 except IndexError:
134 return request.remote_addr
135
136
137 def make_client(client_cls=Client, **defaults) -> Client:
138 if "framework_name" not in defaults:
139 defaults["framework_name"] = "sanic"
140 defaults["framework_version"] = version
141
142 return client_cls(**defaults)
143
144
145 def _transform_response_cookie(cookies: CookieJar) -> Dict[str, str]:
146 """Transform the Sanic's CookieJar instance into a Normal dictionary to build the context"""
147 return {k: {"value": v.value, "path": v["path"]} for k, v in cookies.items()}
148
[end of elasticapm/contrib/sanic/utils.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/elasticapm/contrib/sanic/utils.py b/elasticapm/contrib/sanic/utils.py
--- a/elasticapm/contrib/sanic/utils.py
+++ b/elasticapm/contrib/sanic/utils.py
@@ -28,6 +28,7 @@
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+from string import ascii_uppercase
from typing import Dict
from sanic import Sanic
@@ -71,7 +72,8 @@
:return: A dictionary containing the context information of the ongoing transaction
"""
env = dict(get_env(request=request))
- env.update(dict(request.app.config))
+ app_config = {k: v for k, v in dict(request.app.config).items() if all(letter in ascii_uppercase for letter in k)}
+ env.update(app_config)
result = {
"env": env,
"method": request.method,
| {"golden_diff": "diff --git a/elasticapm/contrib/sanic/utils.py b/elasticapm/contrib/sanic/utils.py\n--- a/elasticapm/contrib/sanic/utils.py\n+++ b/elasticapm/contrib/sanic/utils.py\n@@ -28,6 +28,7 @@\n # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n \n+from string import ascii_uppercase\n from typing import Dict\n \n from sanic import Sanic\n@@ -71,7 +72,8 @@\n :return: A dictionary containing the context information of the ongoing transaction\n \"\"\"\n env = dict(get_env(request=request))\n- env.update(dict(request.app.config))\n+ app_config = {k: v for k, v in dict(request.app.config).items() if all(letter in ascii_uppercase for letter in k)}\n+ env.update(app_config)\n result = {\n \"env\": env,\n \"method\": request.method,\n", "issue": "Sanic 21.9.2 breaks exception tracking\nFor yet unknown reasons, Sanic 21.9.2+ broke exception tracking. The changes between 21.9.1 and 21.9.2 are here:\r\n\r\nhttps://github.com/sanic-org/sanic/compare/v21.9.1...v21.9.2\r\n\r\nThe test failures are here: https://apm-ci.elastic.co/blue/organizations/jenkins/apm-agent-python%2Fapm-agent-python-nightly-mbp%2Fmaster/detail/master/787/tests/\r\n\r\nExample:\r\n\r\n ----------------------------------------------------- Captured log call ------------------------------------------------------INFO sanic.root:testing.py:82 http://127.0.0.1:50003/fallback-value-error\r\n ERROR sanic.error:request.py:193 Exception occurred in one of response middleware handlers\r\n Traceback (most recent call last):\r\n File \"/home/user/.local/lib/python3.10/site-packages/sanic_routing/router.py\", line 79, in resolve\r\n route, param_basket = self.find_route(\r\n File \"\", line 24, in find_route\r\n sanic_routing.exceptions.NotFound: Not Found\r\n\r\n During handling of the above exception, another exception occurred:\r\n\r\n Traceback (most recent call last):\r\n File \"/home/user/.local/lib/python3.10/site-packages/sanic/router.py\", line 38, in _get\r\n return self.resolve(\r\n File \"/home/user/.local/lib/python3.10/site-packages/sanic_routing/router.py\", line 96, in resolve\r\n raise self.exception(str(e), path=path)\r\n sanic_routing.exceptions.NotFound: Not Found\r\n\r\n During handling of the above exception, another exception occurred:\r\n\r\n Traceback (most recent call last):\r\n File \"handle_request\", line 26, in handle_request\r\n Any,\r\n File \"/home/user/.local/lib/python3.10/site-packages/sanic/router.py\", line 66, in get\r\n return self._get(path, method, host)\r\n File \"/home/user/.local/lib/python3.10/site-packages/sanic/router.py\", line 44, in _get\r\n raise NotFound(\"Requested URL {} not found\".format(e.path))\r\n sanic.exceptions.NotFound: Requested URL /fallback-value-error not found\r\n\r\n During handling of the above exception, another exception occurred:\r\n\r\n Traceback (most recent call last):\r\n File \"/home/user/.local/lib/python3.10/site-packages/sanic/request.py\", line 187, in respond\r\n response = await self.app._run_response_middleware(\r\n File \"_run_response_middleware\", line 22, in _run_response_middleware\r\n from ssl import Purpose, SSLContext, create_default_context\r\n File \"/app/elasticapm/contrib/sanic/__init__.py\", line 279, in _instrument_response\r\n await set_context(\r\n File \"/app/elasticapm/contrib/asyncio/traces.py\", line 93, in set_context\r\n data = await data()\r\n File \"/app/elasticapm/contrib/sanic/utils.py\", line 121, in get_response_info\r\n if config.capture_body and \"octet-stream\" not in response.content_type:\r\n TypeError: argument of type 'NoneType' is not iterable\r\n\r\nChecking for `response.content_type is not None` in `elasticapm/contrib/sanic/utils.py:121` doesn't resolve the issue.\r\n\r\n@ahopkins do you happen to have an idea what could cause these failures?\n", "before_files": [{"content": "# BSD 3-Clause License\n#\n# Copyright (c) 2012, the Sentry Team, see AUTHORS for more details\n# Copyright (c) 2019, Elasticsearch BV\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n#\n# * Redistributions of source code must retain the above copyright notice, this\n# list of conditions and the following disclaimer.\n#\n# * Redistributions in binary form must reproduce the above copyright notice,\n# this list of conditions and the following disclaimer in the documentation\n# and/or other materials provided with the distribution.\n#\n# * Neither the name of the copyright holder nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\nfrom typing import Dict\n\nfrom sanic import Sanic\nfrom sanic import __version__ as version\nfrom sanic.cookies import CookieJar\nfrom sanic.request import Request\nfrom sanic.response import HTTPResponse\n\nfrom elasticapm.base import Client\nfrom elasticapm.conf import Config, constants\nfrom elasticapm.contrib.sanic.sanic_types import EnvInfoType\nfrom elasticapm.utils import compat, get_url_dict\n\n\nclass SanicAPMConfig(dict):\n def __init__(self, app: Sanic):\n super(SanicAPMConfig, self).__init__()\n for _key, _v in app.config.items():\n if _key.startswith(\"ELASTIC_APM_\"):\n self[_key.replace(\"ELASTIC_APM_\", \"\")] = _v\n\n\ndef get_env(request: Request) -> EnvInfoType:\n \"\"\"\n Extract Server Environment Information from the current Request's context\n :param request: Inbound HTTP Request\n :return: A tuple containing the attribute and it's corresponding value for the current Application ENV\n \"\"\"\n for _attr in (\"server_name\", \"server_port\", \"version\"):\n if hasattr(request, _attr):\n yield _attr, getattr(request, _attr)\n\n\n# noinspection PyBroadException\nasync def get_request_info(config: Config, request: Request) -> Dict[str, str]:\n \"\"\"\n Generate a traceable context information from the inbound HTTP request\n\n :param config: Application Configuration used to tune the way the data is captured\n :param request: Inbound HTTP request\n :return: A dictionary containing the context information of the ongoing transaction\n \"\"\"\n env = dict(get_env(request=request))\n env.update(dict(request.app.config))\n result = {\n \"env\": env,\n \"method\": request.method,\n \"socket\": {\n \"remote_address\": _get_client_ip(request=request),\n \"encrypted\": request.scheme in [\"https\", \"wss\"],\n },\n \"cookies\": request.cookies,\n \"http_version\": request.version,\n }\n if config.capture_headers:\n result[\"headers\"] = dict(request.headers)\n\n if request.method in constants.HTTP_WITH_BODY and config.capture_body:\n if request.content_type.startswith(\"multipart\") or \"octet-stream\" in request.content_type:\n result[\"body\"] = \"[DISCARDED]\"\n try:\n result[\"body\"] = request.body.decode(\"utf-8\")\n except Exception:\n pass\n\n if \"body\" not in result:\n result[\"body\"] = \"[REDACTED]\"\n result[\"url\"] = get_url_dict(request.url)\n return result\n\n\nasync def get_response_info(config: Config, response: HTTPResponse) -> Dict[str, str]:\n \"\"\"\n Generate a traceable context information from the inbound HTTP Response\n\n :param config: Application Configuration used to tune the way the data is captured\n :param response: outbound HTTP Response\n :return: A dictionary containing the context information of the ongoing transaction\n \"\"\"\n result = {\n \"cookies\": _transform_response_cookie(cookies=response.cookies),\n \"finished\": True,\n \"headers_sent\": True,\n }\n if isinstance(response.status, compat.integer_types):\n result[\"status_code\"] = response.status\n\n if config.capture_headers:\n result[\"headers\"] = dict(response.headers)\n\n if config.capture_body and \"octet-stream\" not in response.content_type:\n result[\"body\"] = response.body.decode(\"utf-8\")\n else:\n result[\"body\"] = \"[REDACTED]\"\n\n return result\n\n\ndef _get_client_ip(request: Request) -> str:\n \"\"\"Extract Client IP Address Information\"\"\"\n try:\n return request.ip or request.socket[0] or request.remote_addr\n except IndexError:\n return request.remote_addr\n\n\ndef make_client(client_cls=Client, **defaults) -> Client:\n if \"framework_name\" not in defaults:\n defaults[\"framework_name\"] = \"sanic\"\n defaults[\"framework_version\"] = version\n\n return client_cls(**defaults)\n\n\ndef _transform_response_cookie(cookies: CookieJar) -> Dict[str, str]:\n \"\"\"Transform the Sanic's CookieJar instance into a Normal dictionary to build the context\"\"\"\n return {k: {\"value\": v.value, \"path\": v[\"path\"]} for k, v in cookies.items()}\n", "path": "elasticapm/contrib/sanic/utils.py"}]} | 2,923 | 224 |
gh_patches_debug_27127 | rasdani/github-patches | git_diff | mindsdb__mindsdb-1011 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Endpoint to disable telemtry
fairly self explainatory, add and an endpoint to the HTTP API that can be used to disable/enable the telemtry.
Endpoint to disable telemtry
fairly self explainatory, add and an endpoint to the HTTP API that can be used to disable/enable the telemtry.
</issue>
<code>
[start of mindsdb/api/http/namespaces/util.py]
1 from flask import request
2 from flask_restx import Resource, abort
3 from flask import current_app as ca
4
5 from mindsdb.api.http.namespaces.configs.util import ns_conf
6 from mindsdb import __about__
7
8 @ns_conf.route('/ping')
9 class Ping(Resource):
10 @ns_conf.doc('get_ping')
11 def get(self):
12 '''Checks server avaliable'''
13 return {'status': 'ok'}
14
15 @ns_conf.route('/report_uuid')
16 class ReportUUID(Resource):
17 @ns_conf.doc('get_report_uuid')
18 def get(self):
19 metamodel_name = '___monitroing_metamodel___'
20 predictor = ca.mindsdb_native.create(metamodel_name)
21 return {
22 'report_uuid': predictor.report_uuid
23 }
24
[end of mindsdb/api/http/namespaces/util.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mindsdb/api/http/namespaces/util.py b/mindsdb/api/http/namespaces/util.py
--- a/mindsdb/api/http/namespaces/util.py
+++ b/mindsdb/api/http/namespaces/util.py
@@ -1,3 +1,4 @@
+import os
from flask import request
from flask_restx import Resource, abort
from flask import current_app as ca
@@ -5,6 +6,8 @@
from mindsdb.api.http.namespaces.configs.util import ns_conf
from mindsdb import __about__
+TELEMETRY_FILE = 'telemetry.lock'
+
@ns_conf.route('/ping')
class Ping(Resource):
@ns_conf.doc('get_ping')
@@ -21,3 +24,34 @@
return {
'report_uuid': predictor.report_uuid
}
+
+@ns_conf.route('/telemetry')
+class Telemetry(Resource):
+ @ns_conf.doc('get_telemetry_status')
+ def get(self):
+ status = "enabled" if is_telemetry_active() else "disabled"
+ return {"status": status}
+
+ @ns_conf.doc('set_telemetry')
+ def post(self):
+ data = request.json
+ action = data['action']
+ if str(action).lower() in ["true", "enable", "on"]:
+ enable_telemetry()
+ else:
+ disable_telemetry()
+
+
+def enable_telemetry():
+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)
+ if os.path.exists(path):
+ os.remove(path)
+
+def disable_telemetry():
+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)
+ with open(path, 'w') as _:
+ pass
+
+def is_telemetry_active():
+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)
+ return not os.path.exists(path)
| {"golden_diff": "diff --git a/mindsdb/api/http/namespaces/util.py b/mindsdb/api/http/namespaces/util.py\n--- a/mindsdb/api/http/namespaces/util.py\n+++ b/mindsdb/api/http/namespaces/util.py\n@@ -1,3 +1,4 @@\n+import os\n from flask import request\n from flask_restx import Resource, abort\n from flask import current_app as ca\n@@ -5,6 +6,8 @@\n from mindsdb.api.http.namespaces.configs.util import ns_conf\n from mindsdb import __about__\n \n+TELEMETRY_FILE = 'telemetry.lock'\n+\n @ns_conf.route('/ping')\n class Ping(Resource):\n @ns_conf.doc('get_ping')\n@@ -21,3 +24,34 @@\n return {\n 'report_uuid': predictor.report_uuid\n }\n+\n+@ns_conf.route('/telemetry')\n+class Telemetry(Resource):\n+ @ns_conf.doc('get_telemetry_status')\n+ def get(self):\n+ status = \"enabled\" if is_telemetry_active() else \"disabled\"\n+ return {\"status\": status}\n+\n+ @ns_conf.doc('set_telemetry')\n+ def post(self):\n+ data = request.json\n+ action = data['action']\n+ if str(action).lower() in [\"true\", \"enable\", \"on\"]:\n+ enable_telemetry()\n+ else:\n+ disable_telemetry()\n+\n+\n+def enable_telemetry():\n+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)\n+ if os.path.exists(path):\n+ os.remove(path)\n+\n+def disable_telemetry():\n+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)\n+ with open(path, 'w') as _:\n+ pass\n+\n+def is_telemetry_active():\n+ path = os.path.join(ca.config_obj['storage_dir'], TELEMETRY_FILE)\n+ return not os.path.exists(path)\n", "issue": "Endpoint to disable telemtry\nfairly self explainatory, add and an endpoint to the HTTP API that can be used to disable/enable the telemtry.\r\n\nEndpoint to disable telemtry\nfairly self explainatory, add and an endpoint to the HTTP API that can be used to disable/enable the telemtry.\r\n\n", "before_files": [{"content": "from flask import request\nfrom flask_restx import Resource, abort\nfrom flask import current_app as ca\n\nfrom mindsdb.api.http.namespaces.configs.util import ns_conf\nfrom mindsdb import __about__\n\n@ns_conf.route('/ping')\nclass Ping(Resource):\n @ns_conf.doc('get_ping')\n def get(self):\n '''Checks server avaliable'''\n return {'status': 'ok'}\n\n@ns_conf.route('/report_uuid')\nclass ReportUUID(Resource):\n @ns_conf.doc('get_report_uuid')\n def get(self):\n metamodel_name = '___monitroing_metamodel___'\n predictor = ca.mindsdb_native.create(metamodel_name)\n return {\n 'report_uuid': predictor.report_uuid\n }\n", "path": "mindsdb/api/http/namespaces/util.py"}]} | 810 | 433 |
gh_patches_debug_67113 | rasdani/github-patches | git_diff | zestedesavoir__zds-site-5120 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Erreur 403 après recherche dans la bibliothèque
> Bonjour,
>
> Je tombe sur une erreur 403 "Vous n’avez pas les droits suffisants pour accéder à cette page." après une recherche dans les cours présents dans la bibliothèque.
Voilà comment elle est apparue :
>
> - Arrivé sur le site déconnecté
> - Je suis allé dans "Développement web" depuis le bandeau en haut du site
> - A partir de là je me suis connecté à mon compte (même onglet, bouton classique), ce qui m'a ramené sur la page
> - Puis j'ai fait une recherche "PHP" dans la barre de rechercher, ce qui m'a amené sur [ce lien](https://zestedesavoir.com/rechercher/?q=PHP&models=content&from_library=on&category=informatique&subcategory=site-web)
>
> L'erreur 403 se produit lorsque je coche 1 à 3 cases (sous la barre de recherche), pour filtrer les résultats, et que je clique à nouveau sur le bouton "rechercher" dans la barre.
>
> Voilà [un exemple de lien](https://zestedesavoir.com/rechercher/?q=PHP&category=informatique&subcategory=site-web&from_library=on&models=) provoquant une erreur 403 de mon côté.
>
> Bye
Sujet : https://zestedesavoir.com/forums/sujet/11609/erreur-403-apres-recherche-dans-la-bibliotheque/
*Envoyé depuis Zeste de Savoir*
</issue>
<code>
[start of zds/searchv2/forms.py]
1 import os
2 import random
3
4 from django import forms
5 from django.conf import settings
6 from django.utils.translation import ugettext_lazy as _
7
8 from crispy_forms.bootstrap import StrictButton
9 from crispy_forms.helper import FormHelper
10 from crispy_forms.layout import Layout, Field
11 from django.core.urlresolvers import reverse
12
13
14 class SearchForm(forms.Form):
15 q = forms.CharField(
16 label=_('Recherche'),
17 max_length=150,
18 required=False,
19 widget=forms.TextInput(
20 attrs={
21 'type': 'search',
22 'required': 'required'
23 }
24 )
25 )
26
27 choices = sorted(
28 [(k, v[0]) for k, v in settings.ZDS_APP['search']['search_groups'].items()],
29 key=lambda pair: pair[1]
30 )
31
32 models = forms.MultipleChoiceField(
33 label='',
34 widget=forms.CheckboxSelectMultiple,
35 required=False,
36 choices=choices
37 )
38
39 category = forms.CharField(widget=forms.HiddenInput, required=False)
40 subcategory = forms.CharField(widget=forms.HiddenInput, required=False)
41 from_library = forms.CharField(widget=forms.HiddenInput, required=False)
42
43 def __init__(self, *args, **kwargs):
44
45 super(SearchForm, self).__init__(*args, **kwargs)
46
47 self.helper = FormHelper()
48 self.helper.form_id = 'search-form'
49 self.helper.form_class = 'clearfix'
50 self.helper.form_method = 'get'
51 self.helper.form_action = reverse('search:query')
52
53 try:
54 with open(os.path.join(settings.BASE_DIR, 'suggestions.txt'), 'r') as suggestions_file:
55 suggestions = ', '.join(random.sample(suggestions_file.readlines(), 5)) + '…'
56 except OSError:
57 suggestions = _('Mathématiques, Droit, UDK, Langues, Python…')
58
59 self.fields['q'].widget.attrs['placeholder'] = suggestions
60
61 self.helper.layout = Layout(
62 Field('q'),
63 StrictButton('', type='submit', css_class='ico-after ico-search', title=_('Rechercher')),
64 Field('category'),
65 Field('subcategory'),
66 Field('from_library')
67 )
68
[end of zds/searchv2/forms.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/zds/searchv2/forms.py b/zds/searchv2/forms.py
--- a/zds/searchv2/forms.py
+++ b/zds/searchv2/forms.py
@@ -31,7 +31,12 @@
models = forms.MultipleChoiceField(
label='',
- widget=forms.CheckboxSelectMultiple,
+ widget=forms.CheckboxSelectMultiple(
+ attrs={
+ 'class': 'search-filters',
+ 'form': 'search-form'
+ }
+ ),
required=False,
choices=choices
)
| {"golden_diff": "diff --git a/zds/searchv2/forms.py b/zds/searchv2/forms.py\n--- a/zds/searchv2/forms.py\n+++ b/zds/searchv2/forms.py\n@@ -31,7 +31,12 @@\n \n models = forms.MultipleChoiceField(\n label='',\n- widget=forms.CheckboxSelectMultiple,\n+ widget=forms.CheckboxSelectMultiple(\n+ attrs={\n+ 'class': 'search-filters',\n+ 'form': 'search-form'\n+ }\n+ ),\n required=False,\n choices=choices\n )\n", "issue": "Erreur 403 apr\u00e8s recherche dans la biblioth\u00e8que\n> Bonjour,\r\n> \r\n> Je tombe sur une erreur 403 \"Vous n\u2019avez pas les droits suffisants pour acc\u00e9der \u00e0 cette page.\" apr\u00e8s une recherche dans les cours pr\u00e9sents dans la biblioth\u00e8que.\r\nVoil\u00e0 comment elle est apparue :\r\n> \r\n> - Arriv\u00e9 sur le site d\u00e9connect\u00e9\r\n> - Je suis all\u00e9 dans \"D\u00e9veloppement web\" depuis le bandeau en haut du site\r\n> - A partir de l\u00e0 je me suis connect\u00e9 \u00e0 mon compte (m\u00eame onglet, bouton classique), ce qui m'a ramen\u00e9 sur la page\r\n> - Puis j'ai fait une recherche \"PHP\" dans la barre de rechercher, ce qui m'a amen\u00e9 sur [ce lien](https://zestedesavoir.com/rechercher/?q=PHP&models=content&from_library=on&category=informatique&subcategory=site-web)\r\n> \r\n> L'erreur 403 se produit lorsque je coche 1 \u00e0 3 cases (sous la barre de recherche), pour filtrer les r\u00e9sultats, et que je clique \u00e0 nouveau sur le bouton \"rechercher\" dans la barre.\r\n> \r\n> Voil\u00e0 [un exemple de lien](https://zestedesavoir.com/rechercher/?q=PHP&category=informatique&subcategory=site-web&from_library=on&models=) provoquant une erreur 403 de mon c\u00f4t\u00e9.\r\n> \r\n> Bye\r\n\r\nSujet : https://zestedesavoir.com/forums/sujet/11609/erreur-403-apres-recherche-dans-la-bibliotheque/\r\n*Envoy\u00e9 depuis Zeste de Savoir*\n", "before_files": [{"content": "import os\nimport random\n\nfrom django import forms\nfrom django.conf import settings\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom crispy_forms.bootstrap import StrictButton\nfrom crispy_forms.helper import FormHelper\nfrom crispy_forms.layout import Layout, Field\nfrom django.core.urlresolvers import reverse\n\n\nclass SearchForm(forms.Form):\n q = forms.CharField(\n label=_('Recherche'),\n max_length=150,\n required=False,\n widget=forms.TextInput(\n attrs={\n 'type': 'search',\n 'required': 'required'\n }\n )\n )\n\n choices = sorted(\n [(k, v[0]) for k, v in settings.ZDS_APP['search']['search_groups'].items()],\n key=lambda pair: pair[1]\n )\n\n models = forms.MultipleChoiceField(\n label='',\n widget=forms.CheckboxSelectMultiple,\n required=False,\n choices=choices\n )\n\n category = forms.CharField(widget=forms.HiddenInput, required=False)\n subcategory = forms.CharField(widget=forms.HiddenInput, required=False)\n from_library = forms.CharField(widget=forms.HiddenInput, required=False)\n\n def __init__(self, *args, **kwargs):\n\n super(SearchForm, self).__init__(*args, **kwargs)\n\n self.helper = FormHelper()\n self.helper.form_id = 'search-form'\n self.helper.form_class = 'clearfix'\n self.helper.form_method = 'get'\n self.helper.form_action = reverse('search:query')\n\n try:\n with open(os.path.join(settings.BASE_DIR, 'suggestions.txt'), 'r') as suggestions_file:\n suggestions = ', '.join(random.sample(suggestions_file.readlines(), 5)) + '\u2026'\n except OSError:\n suggestions = _('Math\u00e9matiques, Droit, UDK, Langues, Python\u2026')\n\n self.fields['q'].widget.attrs['placeholder'] = suggestions\n\n self.helper.layout = Layout(\n Field('q'),\n StrictButton('', type='submit', css_class='ico-after ico-search', title=_('Rechercher')),\n Field('category'),\n Field('subcategory'),\n Field('from_library')\n )\n", "path": "zds/searchv2/forms.py"}]} | 1,507 | 124 |
gh_patches_debug_34184 | rasdani/github-patches | git_diff | goauthentik__authentik-5163 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Outpost cannot be created in restricted namespaces
**Describe the bug**
Creating outposts in restricted namespaces does not work due to securityContext settings not getting copied over from main deployment.
**To Reproduce**
Steps to reproduce the behavior:
1. Create a restricted namespace:
* kubectl create ns test-authentik
* kubectl label ns test-authentikpod-security.kubernetes.io/enforce=baseline --overwrite
2. Create a new Outpost, in configuration do:
kubernetes_namespace=test-authentik
**Expected behavior**
Outpost should be created successfully.
**Logs**
```bash
kubectl describe -n authentik replicasets.apps ak-outpost-default-ldap-outpost
...
Warning FailedCreate 6m51s replicaset-controller Error creating: pods "ak-outpost-default-ldap-outpost-7f4d464f8c-7g76k" is forbidden: violates PodSecurity "restricted:latest": allowPrivilegeEscalation != false (container "ldap" must set securityContext.allowPrivilegeEscalation=false), unrestricted capabilities (container "ldap" must set securityContext.capabilities.drop=["ALL"]), runAsNonRoot != true (pod or container "ldap" must set securityContext.runAsNonRoot=true), seccompProfile (pod or container "ldap" must set securityContext.seccompProfile.type to "RuntimeDefault" or "Localhost")
```
**Version and Deployment (please complete the following information):**
- authentik version: 23.3.1
- Deployment: helm
**Workaround**
Edit deployment to use the same securityContext settings from main pod and container in new deployments.
</issue>
<code>
[start of authentik/outposts/controllers/k8s/deployment.py]
1 """Kubernetes Deployment Reconciler"""
2 from typing import TYPE_CHECKING
3
4 from django.utils.text import slugify
5 from kubernetes.client import (
6 AppsV1Api,
7 V1Container,
8 V1ContainerPort,
9 V1Deployment,
10 V1DeploymentSpec,
11 V1EnvVar,
12 V1EnvVarSource,
13 V1LabelSelector,
14 V1ObjectMeta,
15 V1ObjectReference,
16 V1PodSpec,
17 V1PodTemplateSpec,
18 V1SecretKeySelector,
19 )
20
21 from authentik import __version__, get_full_version
22 from authentik.outposts.controllers.base import FIELD_MANAGER
23 from authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler
24 from authentik.outposts.controllers.k8s.triggers import NeedsUpdate
25 from authentik.outposts.controllers.k8s.utils import compare_ports
26 from authentik.outposts.models import Outpost
27
28 if TYPE_CHECKING:
29 from authentik.outposts.controllers.kubernetes import KubernetesController
30
31
32 class DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):
33 """Kubernetes Deployment Reconciler"""
34
35 outpost: Outpost
36
37 def __init__(self, controller: "KubernetesController") -> None:
38 super().__init__(controller)
39 self.api = AppsV1Api(controller.client)
40 self.outpost = self.controller.outpost
41
42 def reconcile(self, current: V1Deployment, reference: V1Deployment):
43 compare_ports(
44 current.spec.template.spec.containers[0].ports,
45 reference.spec.template.spec.containers[0].ports,
46 )
47 if current.spec.replicas != reference.spec.replicas:
48 raise NeedsUpdate()
49 if (
50 current.spec.template.spec.containers[0].image
51 != reference.spec.template.spec.containers[0].image
52 ):
53 raise NeedsUpdate()
54 super().reconcile(current, reference)
55
56 def get_pod_meta(self, **kwargs) -> dict[str, str]:
57 """Get common object metadata"""
58 kwargs.update(
59 {
60 "app.kubernetes.io/name": f"authentik-outpost-{self.outpost.type}",
61 "app.kubernetes.io/managed-by": "goauthentik.io",
62 "goauthentik.io/outpost-uuid": self.controller.outpost.uuid.hex,
63 "goauthentik.io/outpost-name": slugify(self.controller.outpost.name),
64 "goauthentik.io/outpost-type": str(self.controller.outpost.type),
65 }
66 )
67 return kwargs
68
69 def get_reference_object(self) -> V1Deployment:
70 """Get deployment object for outpost"""
71 # Generate V1ContainerPort objects
72 container_ports = []
73 for port in self.controller.deployment_ports:
74 container_ports.append(
75 V1ContainerPort(
76 container_port=port.inner_port or port.port,
77 name=port.name,
78 protocol=port.protocol.upper(),
79 )
80 )
81 meta = self.get_object_meta(name=self.name)
82 image_name = self.controller.get_container_image()
83 image_pull_secrets = self.outpost.config.kubernetes_image_pull_secrets
84 version = get_full_version()
85 return V1Deployment(
86 metadata=meta,
87 spec=V1DeploymentSpec(
88 replicas=self.outpost.config.kubernetes_replicas,
89 selector=V1LabelSelector(match_labels=self.get_pod_meta()),
90 template=V1PodTemplateSpec(
91 metadata=V1ObjectMeta(
92 labels=self.get_pod_meta(
93 **{
94 # Support istio-specific labels, but also use the standard k8s
95 # recommendations
96 "app.kubernetes.io/version": version,
97 "app": "authentik-outpost",
98 "version": version,
99 }
100 )
101 ),
102 spec=V1PodSpec(
103 image_pull_secrets=[
104 V1ObjectReference(name=secret) for secret in image_pull_secrets
105 ],
106 containers=[
107 V1Container(
108 name=str(self.outpost.type),
109 image=image_name,
110 ports=container_ports,
111 env=[
112 V1EnvVar(
113 name="AUTHENTIK_HOST",
114 value_from=V1EnvVarSource(
115 secret_key_ref=V1SecretKeySelector(
116 name=self.name,
117 key="authentik_host",
118 )
119 ),
120 ),
121 V1EnvVar(
122 name="AUTHENTIK_HOST_BROWSER",
123 value_from=V1EnvVarSource(
124 secret_key_ref=V1SecretKeySelector(
125 name=self.name,
126 key="authentik_host_browser",
127 )
128 ),
129 ),
130 V1EnvVar(
131 name="AUTHENTIK_TOKEN",
132 value_from=V1EnvVarSource(
133 secret_key_ref=V1SecretKeySelector(
134 name=self.name,
135 key="token",
136 )
137 ),
138 ),
139 V1EnvVar(
140 name="AUTHENTIK_INSECURE",
141 value_from=V1EnvVarSource(
142 secret_key_ref=V1SecretKeySelector(
143 name=self.name,
144 key="authentik_host_insecure",
145 )
146 ),
147 ),
148 ],
149 )
150 ],
151 ),
152 ),
153 ),
154 )
155
156 def create(self, reference: V1Deployment):
157 return self.api.create_namespaced_deployment(
158 self.namespace, reference, field_manager=FIELD_MANAGER
159 )
160
161 def delete(self, reference: V1Deployment):
162 return self.api.delete_namespaced_deployment(reference.metadata.name, self.namespace)
163
164 def retrieve(self) -> V1Deployment:
165 return self.api.read_namespaced_deployment(self.name, self.namespace)
166
167 def update(self, current: V1Deployment, reference: V1Deployment):
168 return self.api.patch_namespaced_deployment(
169 current.metadata.name,
170 self.namespace,
171 reference,
172 field_manager=FIELD_MANAGER,
173 )
174
[end of authentik/outposts/controllers/k8s/deployment.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/authentik/outposts/controllers/k8s/deployment.py b/authentik/outposts/controllers/k8s/deployment.py
--- a/authentik/outposts/controllers/k8s/deployment.py
+++ b/authentik/outposts/controllers/k8s/deployment.py
@@ -4,6 +4,7 @@
from django.utils.text import slugify
from kubernetes.client import (
AppsV1Api,
+ V1Capabilities,
V1Container,
V1ContainerPort,
V1Deployment,
@@ -13,9 +14,12 @@
V1LabelSelector,
V1ObjectMeta,
V1ObjectReference,
+ V1PodSecurityContext,
V1PodSpec,
V1PodTemplateSpec,
+ V1SeccompProfile,
V1SecretKeySelector,
+ V1SecurityContext,
)
from authentik import __version__, get_full_version
@@ -103,6 +107,12 @@
image_pull_secrets=[
V1ObjectReference(name=secret) for secret in image_pull_secrets
],
+ security_context=V1PodSecurityContext(
+ run_as_non_root=True,
+ seccomp_profile=V1SeccompProfile(
+ type="RuntimeDefault",
+ ),
+ ),
containers=[
V1Container(
name=str(self.outpost.type),
@@ -146,6 +156,12 @@
),
),
],
+ security_context=V1SecurityContext(
+ allow_privilege_escalation=False,
+ capabilities=V1Capabilities(
+ drop=["ALL"],
+ ),
+ ),
)
],
),
| {"golden_diff": "diff --git a/authentik/outposts/controllers/k8s/deployment.py b/authentik/outposts/controllers/k8s/deployment.py\n--- a/authentik/outposts/controllers/k8s/deployment.py\n+++ b/authentik/outposts/controllers/k8s/deployment.py\n@@ -4,6 +4,7 @@\n from django.utils.text import slugify\n from kubernetes.client import (\n AppsV1Api,\n+ V1Capabilities,\n V1Container,\n V1ContainerPort,\n V1Deployment,\n@@ -13,9 +14,12 @@\n V1LabelSelector,\n V1ObjectMeta,\n V1ObjectReference,\n+ V1PodSecurityContext,\n V1PodSpec,\n V1PodTemplateSpec,\n+ V1SeccompProfile,\n V1SecretKeySelector,\n+ V1SecurityContext,\n )\n \n from authentik import __version__, get_full_version\n@@ -103,6 +107,12 @@\n image_pull_secrets=[\n V1ObjectReference(name=secret) for secret in image_pull_secrets\n ],\n+ security_context=V1PodSecurityContext(\n+ run_as_non_root=True,\n+ seccomp_profile=V1SeccompProfile(\n+ type=\"RuntimeDefault\",\n+ ),\n+ ),\n containers=[\n V1Container(\n name=str(self.outpost.type),\n@@ -146,6 +156,12 @@\n ),\n ),\n ],\n+ security_context=V1SecurityContext(\n+ allow_privilege_escalation=False,\n+ capabilities=V1Capabilities(\n+ drop=[\"ALL\"],\n+ ),\n+ ),\n )\n ],\n ),\n", "issue": "Outpost cannot be created in restricted namespaces\n**Describe the bug**\r\nCreating outposts in restricted namespaces does not work due to securityContext settings not getting copied over from main deployment.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Create a restricted namespace:\r\n* kubectl create ns test-authentik\r\n* kubectl label ns test-authentikpod-security.kubernetes.io/enforce=baseline --overwrite\r\n2. Create a new Outpost, in configuration do:\r\nkubernetes_namespace=test-authentik\r\n\r\n**Expected behavior**\r\nOutpost should be created successfully.\r\n\r\n**Logs**\r\n```bash\r\nkubectl describe -n authentik replicasets.apps ak-outpost-default-ldap-outpost \r\n ...\r\n Warning FailedCreate 6m51s replicaset-controller Error creating: pods \"ak-outpost-default-ldap-outpost-7f4d464f8c-7g76k\" is forbidden: violates PodSecurity \"restricted:latest\": allowPrivilegeEscalation != false (container \"ldap\" must set securityContext.allowPrivilegeEscalation=false), unrestricted capabilities (container \"ldap\" must set securityContext.capabilities.drop=[\"ALL\"]), runAsNonRoot != true (pod or container \"ldap\" must set securityContext.runAsNonRoot=true), seccompProfile (pod or container \"ldap\" must set securityContext.seccompProfile.type to \"RuntimeDefault\" or \"Localhost\")\r\n ```\r\n\r\n**Version and Deployment (please complete the following information):**\r\n - authentik version: 23.3.1\r\n - Deployment: helm\r\n\r\n**Workaround**\r\nEdit deployment to use the same securityContext settings from main pod and container in new deployments.\r\n\n", "before_files": [{"content": "\"\"\"Kubernetes Deployment Reconciler\"\"\"\nfrom typing import TYPE_CHECKING\n\nfrom django.utils.text import slugify\nfrom kubernetes.client import (\n AppsV1Api,\n V1Container,\n V1ContainerPort,\n V1Deployment,\n V1DeploymentSpec,\n V1EnvVar,\n V1EnvVarSource,\n V1LabelSelector,\n V1ObjectMeta,\n V1ObjectReference,\n V1PodSpec,\n V1PodTemplateSpec,\n V1SecretKeySelector,\n)\n\nfrom authentik import __version__, get_full_version\nfrom authentik.outposts.controllers.base import FIELD_MANAGER\nfrom authentik.outposts.controllers.k8s.base import KubernetesObjectReconciler\nfrom authentik.outposts.controllers.k8s.triggers import NeedsUpdate\nfrom authentik.outposts.controllers.k8s.utils import compare_ports\nfrom authentik.outposts.models import Outpost\n\nif TYPE_CHECKING:\n from authentik.outposts.controllers.kubernetes import KubernetesController\n\n\nclass DeploymentReconciler(KubernetesObjectReconciler[V1Deployment]):\n \"\"\"Kubernetes Deployment Reconciler\"\"\"\n\n outpost: Outpost\n\n def __init__(self, controller: \"KubernetesController\") -> None:\n super().__init__(controller)\n self.api = AppsV1Api(controller.client)\n self.outpost = self.controller.outpost\n\n def reconcile(self, current: V1Deployment, reference: V1Deployment):\n compare_ports(\n current.spec.template.spec.containers[0].ports,\n reference.spec.template.spec.containers[0].ports,\n )\n if current.spec.replicas != reference.spec.replicas:\n raise NeedsUpdate()\n if (\n current.spec.template.spec.containers[0].image\n != reference.spec.template.spec.containers[0].image\n ):\n raise NeedsUpdate()\n super().reconcile(current, reference)\n\n def get_pod_meta(self, **kwargs) -> dict[str, str]:\n \"\"\"Get common object metadata\"\"\"\n kwargs.update(\n {\n \"app.kubernetes.io/name\": f\"authentik-outpost-{self.outpost.type}\",\n \"app.kubernetes.io/managed-by\": \"goauthentik.io\",\n \"goauthentik.io/outpost-uuid\": self.controller.outpost.uuid.hex,\n \"goauthentik.io/outpost-name\": slugify(self.controller.outpost.name),\n \"goauthentik.io/outpost-type\": str(self.controller.outpost.type),\n }\n )\n return kwargs\n\n def get_reference_object(self) -> V1Deployment:\n \"\"\"Get deployment object for outpost\"\"\"\n # Generate V1ContainerPort objects\n container_ports = []\n for port in self.controller.deployment_ports:\n container_ports.append(\n V1ContainerPort(\n container_port=port.inner_port or port.port,\n name=port.name,\n protocol=port.protocol.upper(),\n )\n )\n meta = self.get_object_meta(name=self.name)\n image_name = self.controller.get_container_image()\n image_pull_secrets = self.outpost.config.kubernetes_image_pull_secrets\n version = get_full_version()\n return V1Deployment(\n metadata=meta,\n spec=V1DeploymentSpec(\n replicas=self.outpost.config.kubernetes_replicas,\n selector=V1LabelSelector(match_labels=self.get_pod_meta()),\n template=V1PodTemplateSpec(\n metadata=V1ObjectMeta(\n labels=self.get_pod_meta(\n **{\n # Support istio-specific labels, but also use the standard k8s\n # recommendations\n \"app.kubernetes.io/version\": version,\n \"app\": \"authentik-outpost\",\n \"version\": version,\n }\n )\n ),\n spec=V1PodSpec(\n image_pull_secrets=[\n V1ObjectReference(name=secret) for secret in image_pull_secrets\n ],\n containers=[\n V1Container(\n name=str(self.outpost.type),\n image=image_name,\n ports=container_ports,\n env=[\n V1EnvVar(\n name=\"AUTHENTIK_HOST\",\n value_from=V1EnvVarSource(\n secret_key_ref=V1SecretKeySelector(\n name=self.name,\n key=\"authentik_host\",\n )\n ),\n ),\n V1EnvVar(\n name=\"AUTHENTIK_HOST_BROWSER\",\n value_from=V1EnvVarSource(\n secret_key_ref=V1SecretKeySelector(\n name=self.name,\n key=\"authentik_host_browser\",\n )\n ),\n ),\n V1EnvVar(\n name=\"AUTHENTIK_TOKEN\",\n value_from=V1EnvVarSource(\n secret_key_ref=V1SecretKeySelector(\n name=self.name,\n key=\"token\",\n )\n ),\n ),\n V1EnvVar(\n name=\"AUTHENTIK_INSECURE\",\n value_from=V1EnvVarSource(\n secret_key_ref=V1SecretKeySelector(\n name=self.name,\n key=\"authentik_host_insecure\",\n )\n ),\n ),\n ],\n )\n ],\n ),\n ),\n ),\n )\n\n def create(self, reference: V1Deployment):\n return self.api.create_namespaced_deployment(\n self.namespace, reference, field_manager=FIELD_MANAGER\n )\n\n def delete(self, reference: V1Deployment):\n return self.api.delete_namespaced_deployment(reference.metadata.name, self.namespace)\n\n def retrieve(self) -> V1Deployment:\n return self.api.read_namespaced_deployment(self.name, self.namespace)\n\n def update(self, current: V1Deployment, reference: V1Deployment):\n return self.api.patch_namespaced_deployment(\n current.metadata.name,\n self.namespace,\n reference,\n field_manager=FIELD_MANAGER,\n )\n", "path": "authentik/outposts/controllers/k8s/deployment.py"}]} | 2,541 | 370 |
gh_patches_debug_2761 | rasdani/github-patches | git_diff | napari__napari-1088 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ListModel.append does not check type
## 🐛 Bug
in working on layer groups, I found a strange lack of type checking when appending to a `ListModel` (which inherits from `TypedList`). [`ListModel.append`](https://github.com/napari/napari/blob/59ed366e9d492a2389c451468fd8b9f96508b4e2/napari/utils/list/_model.py#L59) jumps right over `TypedList.append`
https://github.com/napari/napari/blob/59ed366e9d492a2389c451468fd8b9f96508b4e2/napari/utils/list/_model.py#L58-L60
... and if you try to something that is not a `Layer` to a `LayerList`, it works fine up until throwing an error (unrelated to typing) in `components.layerlist._add`. Is that supposed to be `TypedList.append(self, obj)`? or was that intentional?
</issue>
<code>
[start of napari/utils/list/_model.py]
1 from ...utils.event import EmitterGroup
2
3 from ._multi import MultiIndexList
4 from ._typed import TypedList
5
6
7 class ListModel(MultiIndexList, TypedList):
8 """List with events, tuple-indexing, typing, and filtering.
9
10 Parameters
11 ----------
12 basetype : type
13 Type of the elements in the list.
14 iterable : iterable, optional
15 Elements to initialize the list with.
16 lookup : dict of type : function(object, ``basetype``) -> bool
17 Functions that determine if an object is a reference to an
18 element of the list.
19
20 Attributes
21 ----------
22 events : vispy.util.event.EmitterGroup
23 Group of events for adding, removing, and reordering elements
24 within the list.
25 """
26
27 def __init__(self, basetype, iterable=(), lookup=None):
28 super().__init__(basetype, iterable, lookup)
29 self.events = EmitterGroup(
30 source=self,
31 auto_connect=True,
32 added=None,
33 removed=None,
34 reordered=None,
35 changed=None,
36 )
37 self.events.added.connect(self.events.changed)
38 self.events.removed.connect(self.events.changed)
39 self.events.reordered.connect(self.events.changed)
40
41 def __setitem__(self, query, values):
42 indices = tuple(self.__prsitem__(query))
43 new_indices = tuple(values)
44
45 if sorted(indices) != sorted(self.index(v) for v in new_indices):
46 raise TypeError(
47 'must be a reordering of indices; '
48 'setting of list items not allowed'
49 )
50
51 super().__setitem__(indices, new_indices)
52 self.events.reordered()
53
54 def insert(self, index, obj):
55 super().insert(index, obj)
56 self.events.added(item=obj, index=self.__locitem__(index))
57
58 def append(self, obj):
59 super(TypedList, self).append(obj)
60 self.events.added(item=obj, index=len(self) - 1)
61
62 def pop(self, key):
63 obj = super().pop(key)
64 self.events.removed(item=obj, index=key)
65 return obj
66
[end of napari/utils/list/_model.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/napari/utils/list/_model.py b/napari/utils/list/_model.py
--- a/napari/utils/list/_model.py
+++ b/napari/utils/list/_model.py
@@ -56,7 +56,7 @@
self.events.added(item=obj, index=self.__locitem__(index))
def append(self, obj):
- super(TypedList, self).append(obj)
+ TypedList.append(self, obj)
self.events.added(item=obj, index=len(self) - 1)
def pop(self, key):
| {"golden_diff": "diff --git a/napari/utils/list/_model.py b/napari/utils/list/_model.py\n--- a/napari/utils/list/_model.py\n+++ b/napari/utils/list/_model.py\n@@ -56,7 +56,7 @@\n self.events.added(item=obj, index=self.__locitem__(index))\n \n def append(self, obj):\n- super(TypedList, self).append(obj)\n+ TypedList.append(self, obj)\n self.events.added(item=obj, index=len(self) - 1)\n \n def pop(self, key):\n", "issue": "ListModel.append does not check type\n## \ud83d\udc1b Bug\r\nin working on layer groups, I found a strange lack of type checking when appending to a `ListModel` (which inherits from `TypedList`). [`ListModel.append`](https://github.com/napari/napari/blob/59ed366e9d492a2389c451468fd8b9f96508b4e2/napari/utils/list/_model.py#L59) jumps right over `TypedList.append`\r\nhttps://github.com/napari/napari/blob/59ed366e9d492a2389c451468fd8b9f96508b4e2/napari/utils/list/_model.py#L58-L60\r\n\r\n... and if you try to something that is not a `Layer` to a `LayerList`, it works fine up until throwing an error (unrelated to typing) in `components.layerlist._add`. Is that supposed to be `TypedList.append(self, obj)`? or was that intentional?\n", "before_files": [{"content": "from ...utils.event import EmitterGroup\n\nfrom ._multi import MultiIndexList\nfrom ._typed import TypedList\n\n\nclass ListModel(MultiIndexList, TypedList):\n \"\"\"List with events, tuple-indexing, typing, and filtering.\n\n Parameters\n ----------\n basetype : type\n Type of the elements in the list.\n iterable : iterable, optional\n Elements to initialize the list with.\n lookup : dict of type : function(object, ``basetype``) -> bool\n Functions that determine if an object is a reference to an\n element of the list.\n\n Attributes\n ----------\n events : vispy.util.event.EmitterGroup\n Group of events for adding, removing, and reordering elements\n within the list.\n \"\"\"\n\n def __init__(self, basetype, iterable=(), lookup=None):\n super().__init__(basetype, iterable, lookup)\n self.events = EmitterGroup(\n source=self,\n auto_connect=True,\n added=None,\n removed=None,\n reordered=None,\n changed=None,\n )\n self.events.added.connect(self.events.changed)\n self.events.removed.connect(self.events.changed)\n self.events.reordered.connect(self.events.changed)\n\n def __setitem__(self, query, values):\n indices = tuple(self.__prsitem__(query))\n new_indices = tuple(values)\n\n if sorted(indices) != sorted(self.index(v) for v in new_indices):\n raise TypeError(\n 'must be a reordering of indices; '\n 'setting of list items not allowed'\n )\n\n super().__setitem__(indices, new_indices)\n self.events.reordered()\n\n def insert(self, index, obj):\n super().insert(index, obj)\n self.events.added(item=obj, index=self.__locitem__(index))\n\n def append(self, obj):\n super(TypedList, self).append(obj)\n self.events.added(item=obj, index=len(self) - 1)\n\n def pop(self, key):\n obj = super().pop(key)\n self.events.removed(item=obj, index=key)\n return obj\n", "path": "napari/utils/list/_model.py"}]} | 1,363 | 125 |
gh_patches_debug_40266 | rasdani/github-patches | git_diff | ultrabug__py3status-1093 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
mpd_status creates a new socket connection every second
I'm not sure if this should be a bug report or a feature request, but I've been bitten by this, erm, _feature_: I use `mopidy` (mpd with spotify integration) and that gets confused when a request comes in from `mpd_status` while it processes a mpd-command-block.
This also keeps a lot of abandoned connections in `TIME_WAIT` state - 60 currently at my system.
I've had a quick look at the code for this module, and the connection i s esatablished in `current_track()`. I do not know if it is possible, but the best apporach (IMHO) would be to `connect()` once and keep the socket alive until `py3status` gets reloaded.
mpd_status creates a new socket connection every second
I'm not sure if this should be a bug report or a feature request, but I've been bitten by this, erm, _feature_: I use `mopidy` (mpd with spotify integration) and that gets confused when a request comes in from `mpd_status` while it processes a mpd-command-block.
This also keeps a lot of abandoned connections in `TIME_WAIT` state - 60 currently at my system.
I've had a quick look at the code for this module, and the connection i s esatablished in `current_track()`. I do not know if it is possible, but the best apporach (IMHO) would be to `connect()` once and keep the socket alive until `py3status` gets reloaded.
</issue>
<code>
[start of py3status/modules/mpd_status.py]
1 # -*- coding: utf-8 -*-
2 """
3 Display song currently playing in mpd.
4
5 Configuration parameters:
6 cache_timeout: how often we refresh this module in seconds (default 2)
7 format: template string (see below)
8 (default '{state} [[[{artist}] - {title}]|[{file}]]')
9 hide_when_paused: hide the status if state is paused (default False)
10 hide_when_stopped: hide the status if state is stopped (default True)
11 host: mpd host (default 'localhost')
12 max_width: maximum status length (default 120)
13 password: mpd password (default None)
14 port: mpd port (default '6600')
15 state_pause: label to display for "paused" state (default '[pause]')
16 state_play: label to display for "playing" state (default '[play]')
17 state_stop: label to display for "stopped" state (default '[stop]')
18
19 Color options:
20 color_pause: Paused, default color_degraded
21 color_play: Playing, default color_good
22 color_stop: Stopped, default color_bad
23
24 Format placeholders:
25 {state} state (paused, playing. stopped) can be defined via `state_..`
26 configuration parameters
27 Refer to the mpc(1) manual page for the list of available placeholders to
28 be used in the format. Placeholders should use braces `{}` rather than
29 percent `%%` eg `{artist}`.
30 Every placeholder can also be prefixed with
31 `next_` to retrieve the data for the song following the one currently
32 playing.
33
34 Requires:
35 python-mpd2: (NOT python2-mpd2)
36 ```
37 # pip install python-mpd2
38 ```
39
40 Note: previously formats using %field% where allowed for this module, but
41 standard placeholders should be used.
42
43 Examples of `format`
44 ```
45 # Show state and (artist -) title, if no title fallback to file:
46 {state} [[[{artist} - ]{title}]|[{file}]]
47
48 # Show state, [duration], title (or file) and next song title (or file):
49 {state} \[{time}\] [{title}|{file}] → [{next_title}|{next_file}]
50 ```
51
52 @author shadowprince, zopieux
53 @license Eclipse Public License
54
55 SAMPLE OUTPUT
56 {'color': '#00ff00', 'full_text': '[play] Music For Programming - Idol Eyes'}
57
58 paused
59 {'color': '#ffff00', 'full_text': '[pause] Music For Programming - Idol Eyes'}
60
61 stopped
62 {'color': '#ff0000', 'full_text': '[stop] Music For Programming - Idol Eyes'}
63 """
64
65 import datetime
66 import re
67 import socket
68 from mpd import MPDClient, CommandError
69
70
71 def song_attr(song, attr):
72 def parse_mtime(date_str):
73 return datetime.datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%SZ')
74
75 if attr == 'time':
76 try:
77 duration = int(song['time'])
78 if duration > 0:
79 minutes, seconds = divmod(duration, 60)
80 return '{:d}:{:02d}'.format(minutes, seconds)
81 raise ValueError
82 except (KeyError, ValueError):
83 return ''
84 elif attr == 'position':
85 try:
86 return '{}'.format(int(song['pos']) + 1)
87 except (KeyError, ValueError):
88 return ''
89 elif attr == 'mtime':
90 return parse_mtime(song['last-modified']).strftime('%c')
91 elif attr == 'mdate':
92 return parse_mtime(song['last-modified']).strftime('%x')
93
94 return song.get(attr, '')
95
96
97 class Py3status:
98 """
99 """
100 # available configuration parameters
101 cache_timeout = 2
102 format = '{state} [[[{artist}] - {title}]|[{file}]]'
103 hide_when_paused = False
104 hide_when_stopped = True
105 host = 'localhost'
106 max_width = 120
107 password = None
108 port = '6600'
109 state_pause = '[pause]'
110 state_play = '[play]'
111 state_stop = '[stop]'
112
113 def post_config_hook(self):
114 # Convert from %placeholder% to {placeholder}
115 # This is not perfect but should be good enough
116 if not self.py3.get_placeholders_list(self.format) and '%' in self.format:
117 self.format = re.sub('%([a-z]+)%', r'{\1}', self.format)
118 self.py3.log('Old % style format DEPRECATED use { style format')
119
120 def _state_character(self, state):
121 if state == 'play':
122 return self.state_play
123 elif state == 'pause':
124 return self.state_pause
125 elif state == 'stop':
126 return self.state_stop
127 return '?'
128
129 def current_track(self):
130 try:
131 c = MPDClient()
132 c.connect(host=self.host, port=self.port)
133 if self.password:
134 c.password(self.password)
135
136 status = c.status()
137 song = int(status.get('song', 0))
138 next_song = int(status.get('nextsong', 0))
139
140 state = status.get('state')
141
142 if ((state == 'pause' and self.hide_when_paused) or
143 (state == 'stop' and self.hide_when_stopped)):
144 text = ''
145
146 else:
147 playlist_info = c.playlistinfo()
148 try:
149 song = playlist_info[song]
150 except IndexError:
151 song = {}
152 try:
153 next_song = playlist_info[next_song]
154 except IndexError:
155 next_song = {}
156
157 song['state'] = next_song['state'] \
158 = self._state_character(state)
159
160 def attr_getter(attr):
161 if attr.startswith('next_'):
162 return song_attr(next_song, attr[5:])
163 return song_attr(song, attr)
164
165 text = self.py3.safe_format(self.format, attr_getter=attr_getter)
166
167 except socket.error:
168 text = "Failed to connect to mpd!"
169 state = None
170 except CommandError:
171 text = "Failed to authenticate to mpd!"
172 state = None
173 c.disconnect()
174 else:
175 c.disconnect()
176
177 if len(text) > self.max_width:
178 text = u'{}...'.format(text[:self.max_width - 3])
179
180 response = {
181 'cached_until': self.py3.time_in(self.cache_timeout),
182 'full_text': text,
183 }
184
185 if state:
186 if state == 'play':
187 response['color'] = self.py3.COLOR_PLAY or self.py3.COLOR_GOOD
188 elif state == 'pause':
189 response['color'] = (self.py3.COLOR_PAUSE or
190 self.py3.COLOR_DEGRADED)
191 elif state == 'stop':
192 response['color'] = self.py3.COLOR_STOP or self.py3.COLOR_BAD
193
194 return response
195
196
197 if __name__ == "__main__":
198 """
199 Run module in test mode.
200 """
201 from py3status.module_test import module_test
202 module_test(Py3status)
203
[end of py3status/modules/mpd_status.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/py3status/modules/mpd_status.py b/py3status/modules/mpd_status.py
--- a/py3status/modules/mpd_status.py
+++ b/py3status/modules/mpd_status.py
@@ -65,7 +65,7 @@
import datetime
import re
import socket
-from mpd import MPDClient, CommandError
+from mpd import MPDClient, CommandError, ConnectionError
def song_attr(song, attr):
@@ -116,6 +116,27 @@
if not self.py3.get_placeholders_list(self.format) and '%' in self.format:
self.format = re.sub('%([a-z]+)%', r'{\1}', self.format)
self.py3.log('Old % style format DEPRECATED use { style format')
+ # class variables:
+ self.client = None
+
+ def _get_mpd(self, disconnect=False):
+ if disconnect:
+ try:
+ self.client.disconnect()
+ finally:
+ self.client = None
+ return
+
+ try:
+ if self.client is None:
+ self.client = MPDClient()
+ self.client.connect(host=self.host, port=self.port)
+ if self.password:
+ self.client.password(self.password)
+ return self.client
+ except (socket.error, ConnectionError, CommandError) as e:
+ self.client = None
+ raise e
def _state_character(self, state):
if state == 'play':
@@ -128,12 +149,7 @@
def current_track(self):
try:
- c = MPDClient()
- c.connect(host=self.host, port=self.port)
- if self.password:
- c.password(self.password)
-
- status = c.status()
+ status = self._get_mpd().status()
song = int(status.get('song', 0))
next_song = int(status.get('nextsong', 0))
@@ -144,7 +160,7 @@
text = ''
else:
- playlist_info = c.playlistinfo()
+ playlist_info = self._get_mpd().playlistinfo()
try:
song = playlist_info[song]
except IndexError:
@@ -167,12 +183,14 @@
except socket.error:
text = "Failed to connect to mpd!"
state = None
+ except ConnectionError:
+ text = "Error while connecting to mpd!"
+ state = None
+ self._get_mpd(disconnect=True)
except CommandError:
text = "Failed to authenticate to mpd!"
state = None
- c.disconnect()
- else:
- c.disconnect()
+ self._get_mpd(disconnect=True)
if len(text) > self.max_width:
text = u'{}...'.format(text[:self.max_width - 3])
@@ -193,6 +211,9 @@
return response
+ def kill(self):
+ self._get_mpd(disconnect=True)
+
if __name__ == "__main__":
"""
| {"golden_diff": "diff --git a/py3status/modules/mpd_status.py b/py3status/modules/mpd_status.py\n--- a/py3status/modules/mpd_status.py\n+++ b/py3status/modules/mpd_status.py\n@@ -65,7 +65,7 @@\n import datetime\n import re\n import socket\n-from mpd import MPDClient, CommandError\n+from mpd import MPDClient, CommandError, ConnectionError\n \n \n def song_attr(song, attr):\n@@ -116,6 +116,27 @@\n if not self.py3.get_placeholders_list(self.format) and '%' in self.format:\n self.format = re.sub('%([a-z]+)%', r'{\\1}', self.format)\n self.py3.log('Old % style format DEPRECATED use { style format')\n+ # class variables:\n+ self.client = None\n+\n+ def _get_mpd(self, disconnect=False):\n+ if disconnect:\n+ try:\n+ self.client.disconnect()\n+ finally:\n+ self.client = None\n+ return\n+\n+ try:\n+ if self.client is None:\n+ self.client = MPDClient()\n+ self.client.connect(host=self.host, port=self.port)\n+ if self.password:\n+ self.client.password(self.password)\n+ return self.client\n+ except (socket.error, ConnectionError, CommandError) as e:\n+ self.client = None\n+ raise e\n \n def _state_character(self, state):\n if state == 'play':\n@@ -128,12 +149,7 @@\n \n def current_track(self):\n try:\n- c = MPDClient()\n- c.connect(host=self.host, port=self.port)\n- if self.password:\n- c.password(self.password)\n-\n- status = c.status()\n+ status = self._get_mpd().status()\n song = int(status.get('song', 0))\n next_song = int(status.get('nextsong', 0))\n \n@@ -144,7 +160,7 @@\n text = ''\n \n else:\n- playlist_info = c.playlistinfo()\n+ playlist_info = self._get_mpd().playlistinfo()\n try:\n song = playlist_info[song]\n except IndexError:\n@@ -167,12 +183,14 @@\n except socket.error:\n text = \"Failed to connect to mpd!\"\n state = None\n+ except ConnectionError:\n+ text = \"Error while connecting to mpd!\"\n+ state = None\n+ self._get_mpd(disconnect=True)\n except CommandError:\n text = \"Failed to authenticate to mpd!\"\n state = None\n- c.disconnect()\n- else:\n- c.disconnect()\n+ self._get_mpd(disconnect=True)\n \n if len(text) > self.max_width:\n text = u'{}...'.format(text[:self.max_width - 3])\n@@ -193,6 +211,9 @@\n \n return response\n \n+ def kill(self):\n+ self._get_mpd(disconnect=True)\n+\n \n if __name__ == \"__main__\":\n \"\"\"\n", "issue": "mpd_status creates a new socket connection every second\nI'm not sure if this should be a bug report or a feature request, but I've been bitten by this, erm, _feature_: I use `mopidy` (mpd with spotify integration) and that gets confused when a request comes in from `mpd_status` while it processes a mpd-command-block. \r\n\r\nThis also keeps a lot of abandoned connections in `TIME_WAIT` state - 60 currently at my system. \r\n\r\nI've had a quick look at the code for this module, and the connection i s esatablished in `current_track()`. I do not know if it is possible, but the best apporach (IMHO) would be to `connect()` once and keep the socket alive until `py3status` gets reloaded. \nmpd_status creates a new socket connection every second\nI'm not sure if this should be a bug report or a feature request, but I've been bitten by this, erm, _feature_: I use `mopidy` (mpd with spotify integration) and that gets confused when a request comes in from `mpd_status` while it processes a mpd-command-block. \r\n\r\nThis also keeps a lot of abandoned connections in `TIME_WAIT` state - 60 currently at my system. \r\n\r\nI've had a quick look at the code for this module, and the connection i s esatablished in `current_track()`. I do not know if it is possible, but the best apporach (IMHO) would be to `connect()` once and keep the socket alive until `py3status` gets reloaded. \n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"\nDisplay song currently playing in mpd.\n\nConfiguration parameters:\n cache_timeout: how often we refresh this module in seconds (default 2)\n format: template string (see below)\n (default '{state} [[[{artist}] - {title}]|[{file}]]')\n hide_when_paused: hide the status if state is paused (default False)\n hide_when_stopped: hide the status if state is stopped (default True)\n host: mpd host (default 'localhost')\n max_width: maximum status length (default 120)\n password: mpd password (default None)\n port: mpd port (default '6600')\n state_pause: label to display for \"paused\" state (default '[pause]')\n state_play: label to display for \"playing\" state (default '[play]')\n state_stop: label to display for \"stopped\" state (default '[stop]')\n\nColor options:\n color_pause: Paused, default color_degraded\n color_play: Playing, default color_good\n color_stop: Stopped, default color_bad\n\nFormat placeholders:\n {state} state (paused, playing. stopped) can be defined via `state_..`\n configuration parameters\n Refer to the mpc(1) manual page for the list of available placeholders to\n be used in the format. Placeholders should use braces `{}` rather than\n percent `%%` eg `{artist}`.\n Every placeholder can also be prefixed with\n `next_` to retrieve the data for the song following the one currently\n playing.\n\nRequires:\n python-mpd2: (NOT python2-mpd2)\n```\n# pip install python-mpd2\n```\n\nNote: previously formats using %field% where allowed for this module, but\nstandard placeholders should be used.\n\nExamples of `format`\n```\n# Show state and (artist -) title, if no title fallback to file:\n{state} [[[{artist} - ]{title}]|[{file}]]\n\n# Show state, [duration], title (or file) and next song title (or file):\n{state} \\[{time}\\] [{title}|{file}] \u2192 [{next_title}|{next_file}]\n```\n\n@author shadowprince, zopieux\n@license Eclipse Public License\n\nSAMPLE OUTPUT\n{'color': '#00ff00', 'full_text': '[play] Music For Programming - Idol Eyes'}\n\npaused\n{'color': '#ffff00', 'full_text': '[pause] Music For Programming - Idol Eyes'}\n\nstopped\n{'color': '#ff0000', 'full_text': '[stop] Music For Programming - Idol Eyes'}\n\"\"\"\n\nimport datetime\nimport re\nimport socket\nfrom mpd import MPDClient, CommandError\n\n\ndef song_attr(song, attr):\n def parse_mtime(date_str):\n return datetime.datetime.strptime(date_str, '%Y-%m-%dT%H:%M:%SZ')\n\n if attr == 'time':\n try:\n duration = int(song['time'])\n if duration > 0:\n minutes, seconds = divmod(duration, 60)\n return '{:d}:{:02d}'.format(minutes, seconds)\n raise ValueError\n except (KeyError, ValueError):\n return ''\n elif attr == 'position':\n try:\n return '{}'.format(int(song['pos']) + 1)\n except (KeyError, ValueError):\n return ''\n elif attr == 'mtime':\n return parse_mtime(song['last-modified']).strftime('%c')\n elif attr == 'mdate':\n return parse_mtime(song['last-modified']).strftime('%x')\n\n return song.get(attr, '')\n\n\nclass Py3status:\n \"\"\"\n \"\"\"\n # available configuration parameters\n cache_timeout = 2\n format = '{state} [[[{artist}] - {title}]|[{file}]]'\n hide_when_paused = False\n hide_when_stopped = True\n host = 'localhost'\n max_width = 120\n password = None\n port = '6600'\n state_pause = '[pause]'\n state_play = '[play]'\n state_stop = '[stop]'\n\n def post_config_hook(self):\n # Convert from %placeholder% to {placeholder}\n # This is not perfect but should be good enough\n if not self.py3.get_placeholders_list(self.format) and '%' in self.format:\n self.format = re.sub('%([a-z]+)%', r'{\\1}', self.format)\n self.py3.log('Old % style format DEPRECATED use { style format')\n\n def _state_character(self, state):\n if state == 'play':\n return self.state_play\n elif state == 'pause':\n return self.state_pause\n elif state == 'stop':\n return self.state_stop\n return '?'\n\n def current_track(self):\n try:\n c = MPDClient()\n c.connect(host=self.host, port=self.port)\n if self.password:\n c.password(self.password)\n\n status = c.status()\n song = int(status.get('song', 0))\n next_song = int(status.get('nextsong', 0))\n\n state = status.get('state')\n\n if ((state == 'pause' and self.hide_when_paused) or\n (state == 'stop' and self.hide_when_stopped)):\n text = ''\n\n else:\n playlist_info = c.playlistinfo()\n try:\n song = playlist_info[song]\n except IndexError:\n song = {}\n try:\n next_song = playlist_info[next_song]\n except IndexError:\n next_song = {}\n\n song['state'] = next_song['state'] \\\n = self._state_character(state)\n\n def attr_getter(attr):\n if attr.startswith('next_'):\n return song_attr(next_song, attr[5:])\n return song_attr(song, attr)\n\n text = self.py3.safe_format(self.format, attr_getter=attr_getter)\n\n except socket.error:\n text = \"Failed to connect to mpd!\"\n state = None\n except CommandError:\n text = \"Failed to authenticate to mpd!\"\n state = None\n c.disconnect()\n else:\n c.disconnect()\n\n if len(text) > self.max_width:\n text = u'{}...'.format(text[:self.max_width - 3])\n\n response = {\n 'cached_until': self.py3.time_in(self.cache_timeout),\n 'full_text': text,\n }\n\n if state:\n if state == 'play':\n response['color'] = self.py3.COLOR_PLAY or self.py3.COLOR_GOOD\n elif state == 'pause':\n response['color'] = (self.py3.COLOR_PAUSE or\n self.py3.COLOR_DEGRADED)\n elif state == 'stop':\n response['color'] = self.py3.COLOR_STOP or self.py3.COLOR_BAD\n\n return response\n\n\nif __name__ == \"__main__\":\n \"\"\"\n Run module in test mode.\n \"\"\"\n from py3status.module_test import module_test\n module_test(Py3status)\n", "path": "py3status/modules/mpd_status.py"}]} | 2,917 | 686 |
gh_patches_debug_17308 | rasdani/github-patches | git_diff | pre-commit__pre-commit-315 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
:args seems to break with {} in list.
I am working on a repo with some hooks for my company: https://github.com/marick/pre-commit-hooks
There is a hook that works fine with this `.pre-commit-config.yaml`:
``` yaml
- repo: /Users/marick/src/pre-commit-hooks
sha: d6dee96f56bf9290f7ebb852c4252c50b8f6215d
stages: [commit, push]
hooks:
- id: prohibit-suspicious-patterns
args: ["AKIA[[:alnum]]", --]
```
However, it I change the first arg by adding `{1}`:
``` yaml
args: ["AKIA[[:alnum]]{1}", --]
```
... I get this:
```
prohibit suspicious patterns..................................................................
An unexpected error has occurred: IndexError: tuple index out of range
Check the log at ~/.pre-commit/pre-commit.log
```
The contents of `pre-commit.log`:
```
An unexpected error has occurred: IndexError: tuple index out of range
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/pre_commit/error_handler.py", line 36, in error_handler
yield
File "/usr/local/lib/python2.7/site-packages/pre_commit/main.py", line 150, in main
return run(runner, args)
File "/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py", line 212, in run
return _run_hooks(repo_hooks, args, write, environ)
File "/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py", line 136, in _run_hooks
retval |= _run_single_hook(hook, repo, args, write, skips)
File "/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py", line 89, in _run_single_hook
retcode, stdout, stderr = repo.run_hook(hook, filenames)
File "/usr/local/lib/python2.7/site-packages/pre_commit/repository.py", line 145, in run_hook
self.cmd_runner, hook, file_args,
File "/usr/local/lib/python2.7/site-packages/pre_commit/languages/script.py", line 23, in run_hook
encoding=None,
File "/usr/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py", line 40, in run
replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)
File "/usr/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py", line 11, in _replace_cmd
return [part.format(**kwargs) for part in cmd]
IndexError: tuple index out of range
```
</issue>
<code>
[start of pre_commit/prefixed_command_runner.py]
1 from __future__ import unicode_literals
2
3 import os
4 import os.path
5 import subprocess
6
7 from pre_commit.util import cmd_output
8
9
10 def _replace_cmd(cmd, **kwargs):
11 return [part.format(**kwargs) for part in cmd]
12
13
14 class PrefixedCommandRunner(object):
15 """A PrefixedCommandRunner allows you to run subprocess commands with
16 comand substitution.
17
18 For instance:
19 PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])
20
21 will run ['/tmp/foo/foo.sh', 'bar', 'baz']
22 """
23
24 def __init__(
25 self,
26 prefix_dir,
27 popen=subprocess.Popen,
28 makedirs=os.makedirs
29 ):
30 self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep
31 self.__popen = popen
32 self.__makedirs = makedirs
33
34 def _create_path_if_not_exists(self):
35 if not os.path.exists(self.prefix_dir):
36 self.__makedirs(self.prefix_dir)
37
38 def run(self, cmd, **kwargs):
39 self._create_path_if_not_exists()
40 replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)
41 return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
42
43 def path(self, *parts):
44 path = os.path.join(self.prefix_dir, *parts)
45 return os.path.normpath(path)
46
47 def exists(self, *parts):
48 return os.path.exists(self.path(*parts))
49
50 @classmethod
51 def from_command_runner(cls, command_runner, path_end):
52 """Constructs a new command runner from an existing one by appending
53 `path_end` to the command runner's prefix directory.
54 """
55 return cls(
56 command_runner.path(path_end),
57 popen=command_runner.__popen,
58 makedirs=command_runner.__makedirs,
59 )
60
[end of pre_commit/prefixed_command_runner.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pre_commit/prefixed_command_runner.py b/pre_commit/prefixed_command_runner.py
--- a/pre_commit/prefixed_command_runner.py
+++ b/pre_commit/prefixed_command_runner.py
@@ -7,10 +7,6 @@
from pre_commit.util import cmd_output
-def _replace_cmd(cmd, **kwargs):
- return [part.format(**kwargs) for part in cmd]
-
-
class PrefixedCommandRunner(object):
"""A PrefixedCommandRunner allows you to run subprocess commands with
comand substitution.
@@ -37,7 +33,9 @@
def run(self, cmd, **kwargs):
self._create_path_if_not_exists()
- replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)
+ replaced_cmd = [
+ part.replace('{prefix}', self.prefix_dir) for part in cmd
+ ]
return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)
def path(self, *parts):
| {"golden_diff": "diff --git a/pre_commit/prefixed_command_runner.py b/pre_commit/prefixed_command_runner.py\n--- a/pre_commit/prefixed_command_runner.py\n+++ b/pre_commit/prefixed_command_runner.py\n@@ -7,10 +7,6 @@\n from pre_commit.util import cmd_output\n \n \n-def _replace_cmd(cmd, **kwargs):\n- return [part.format(**kwargs) for part in cmd]\n-\n-\n class PrefixedCommandRunner(object):\n \"\"\"A PrefixedCommandRunner allows you to run subprocess commands with\n comand substitution.\n@@ -37,7 +33,9 @@\n \n def run(self, cmd, **kwargs):\n self._create_path_if_not_exists()\n- replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)\n+ replaced_cmd = [\n+ part.replace('{prefix}', self.prefix_dir) for part in cmd\n+ ]\n return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)\n \n def path(self, *parts):\n", "issue": ":args seems to break with {} in list.\nI am working on a repo with some hooks for my company: https://github.com/marick/pre-commit-hooks\n\nThere is a hook that works fine with this `.pre-commit-config.yaml`:\n\n``` yaml\n- repo: /Users/marick/src/pre-commit-hooks\n sha: d6dee96f56bf9290f7ebb852c4252c50b8f6215d\n stages: [commit, push]\n hooks:\n - id: prohibit-suspicious-patterns\n args: [\"AKIA[[:alnum]]\", --]\n```\n\nHowever, it I change the first arg by adding `{1}`:\n\n``` yaml\n args: [\"AKIA[[:alnum]]{1}\", --]\n```\n\n... I get this:\n\n```\nprohibit suspicious patterns..................................................................\nAn unexpected error has occurred: IndexError: tuple index out of range\nCheck the log at ~/.pre-commit/pre-commit.log\n```\n\nThe contents of `pre-commit.log`:\n\n```\nAn unexpected error has occurred: IndexError: tuple index out of range\nTraceback (most recent call last):\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/error_handler.py\", line 36, in error_handler\n yield\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/main.py\", line 150, in main\n return run(runner, args)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 212, in run\n return _run_hooks(repo_hooks, args, write, environ)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 136, in _run_hooks\n retval |= _run_single_hook(hook, repo, args, write, skips)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/commands/run.py\", line 89, in _run_single_hook\n retcode, stdout, stderr = repo.run_hook(hook, filenames)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/repository.py\", line 145, in run_hook\n self.cmd_runner, hook, file_args,\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/languages/script.py\", line 23, in run_hook\n encoding=None,\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py\", line 40, in run\n replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)\n File \"/usr/local/lib/python2.7/site-packages/pre_commit/prefixed_command_runner.py\", line 11, in _replace_cmd\n return [part.format(**kwargs) for part in cmd]\nIndexError: tuple index out of range\n\n```\n\n", "before_files": [{"content": "from __future__ import unicode_literals\n\nimport os\nimport os.path\nimport subprocess\n\nfrom pre_commit.util import cmd_output\n\n\ndef _replace_cmd(cmd, **kwargs):\n return [part.format(**kwargs) for part in cmd]\n\n\nclass PrefixedCommandRunner(object):\n \"\"\"A PrefixedCommandRunner allows you to run subprocess commands with\n comand substitution.\n\n For instance:\n PrefixedCommandRunner('/tmp/foo').run(['{prefix}foo.sh', 'bar', 'baz'])\n\n will run ['/tmp/foo/foo.sh', 'bar', 'baz']\n \"\"\"\n\n def __init__(\n self,\n prefix_dir,\n popen=subprocess.Popen,\n makedirs=os.makedirs\n ):\n self.prefix_dir = prefix_dir.rstrip(os.sep) + os.sep\n self.__popen = popen\n self.__makedirs = makedirs\n\n def _create_path_if_not_exists(self):\n if not os.path.exists(self.prefix_dir):\n self.__makedirs(self.prefix_dir)\n\n def run(self, cmd, **kwargs):\n self._create_path_if_not_exists()\n replaced_cmd = _replace_cmd(cmd, prefix=self.prefix_dir)\n return cmd_output(*replaced_cmd, __popen=self.__popen, **kwargs)\n\n def path(self, *parts):\n path = os.path.join(self.prefix_dir, *parts)\n return os.path.normpath(path)\n\n def exists(self, *parts):\n return os.path.exists(self.path(*parts))\n\n @classmethod\n def from_command_runner(cls, command_runner, path_end):\n \"\"\"Constructs a new command runner from an existing one by appending\n `path_end` to the command runner's prefix directory.\n \"\"\"\n return cls(\n command_runner.path(path_end),\n popen=command_runner.__popen,\n makedirs=command_runner.__makedirs,\n )\n", "path": "pre_commit/prefixed_command_runner.py"}]} | 1,671 | 217 |
gh_patches_debug_38600 | rasdani/github-patches | git_diff | electricitymaps__electricitymaps-contrib-1295 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Namibia parser should read out data timestamp
Current time in Namibia is 2 April 14:41 but the [Nampower dashboard](http://www.nampower.com.na/Scada.aspx) is stuck on 1 April 11:45:40.
Timestamp of the data is available in http://www.nampower.com.na/gis/images/File_Info.png
We could OCR that file. A quick attempt using the current tesseract methods and upscaling from 600x20 to 1200x40 seems to have worked alright, I got `SCADA File Dene: 01/04/2018 11:45:40` (it should have been "File Date" but otherwise correct). There is always the danger of OCR misreading, so maybe only ignore the datapoint if the timestamp can be read and is too far in the past.
Screenshot from our page:

</issue>
<code>
[start of parsers/NA.py]
1 #!/usr/bin/env python3
2
3 """Parser for Namibia."""
4
5 import arrow
6 from bs4 import BeautifulSoup
7 from .lib.validation import validate
8 from logging import getLogger
9 from PIL import Image
10 from pytesseract import image_to_string
11 import re
12 import requests
13
14
15 generation_link = 'http://www.nampower.com.na/gis/images/Gx.png'
16 exchanges_link = 'http://www.nampower.com.na/gis/images/Imports_Exports.png'
17
18 plant_mapping = {"Ruacana": "hydro",
19 "Van Eck": "coal",
20 "Paratus": "oil",
21 "Anixas": "oil",
22 "Solar": "solar",
23 "Wind": "wind"
24 }
25
26 exchange_mapping = {"NA->ZA": "ESKOM",
27 "NA->ZM": "ZESCO"
28 }
29
30
31 def get_text_from_image(link, expected_size, new_size, logger, session=None):
32 """
33 Gets image from link and checks expected size vs actual.
34 Converts to black & white and enlarges to improve OCR accuracy.
35 Performs OCR using tesseract and returns a str.
36 """
37
38 s = session or requests.Session()
39 img = Image.open(s.get(link, stream=True).raw)
40
41 if img.size != expected_size:
42 if (logger):
43 logger.warning("Check Namibia Scada dashboard for {} changes.".format(link),
44 extras={'key': 'NA'})
45 else:
46 print("Check Namibia Scada dashboard for {} changes.".format(link))
47
48 gray = img.convert('L')
49 gray_enlarged = gray.resize(new_size, Image.LANCZOS)
50 text = image_to_string(gray_enlarged, lang='eng')
51
52 return text
53
54
55 def data_processor(text):
56 """
57 Takes text produced from OCR and extracts production.
58 Returns a dictionary.
59 """
60
61 production = {}
62 for k in plant_mapping.keys():
63 pattern = re.escape(k) + r": (\d+\.\d\d)"
64 try:
65 val = re.search(pattern, text).group(1)
66 production[plant_mapping[k]] = production.get(plant_mapping[k], 0.0)+ float(val)
67 except (AttributeError, ValueError) as e:
68 production[plant_mapping[k]] = None
69
70 return production
71
72
73 def fetch_production(zone_key = 'NA', session=None, target_datetime=None, logger=getLogger(__name__)):
74 """
75 Requests the last known production mix (in MW) of a given country
76 Arguments:
77 zone_key (optional) -- used in case a parser is able to fetch multiple countries
78 session (optional) -- request session passed in order to re-use an existing session
79 Return:
80 A dictionary in the form:
81 {
82 'zoneKey': 'FR',
83 'datetime': '2017-01-01T00:00:00Z',
84 'production': {
85 'biomass': 0.0,
86 'coal': 0.0,
87 'gas': 0.0,
88 'hydro': 0.0,
89 'nuclear': null,
90 'oil': 0.0,
91 'solar': 0.0,
92 'wind': 0.0,
93 'geothermal': 0.0,
94 'unknown': 0.0
95 },
96 'storage': {
97 'hydro': -10.0,
98 },
99 'source': 'mysource.com'
100 }
101 """
102 if target_datetime:
103 raise NotImplementedError('This parser is not yet able to parse past dates')
104
105 raw_text = get_text_from_image(session=session, link=generation_link, \
106 expected_size=(400, 245), new_size=(1000,612), \
107 logger=logger)
108
109 production = data_processor(raw_text)
110
111 data = {
112 'zoneKey': zone_key,
113 'datetime': arrow.now('Africa/Windhoek').datetime,
114 'production': production,
115 'storage': {},
116 'source': 'nampower.com.na'
117 }
118
119 data = validate(data, required=['hydro'])
120
121 return data
122
123
124 def exchange_processor(text, exchange, logger):
125 """
126 Takes text produced from OCR and extracts exchange flow.
127 Returns a float or None.
128 """
129
130 utility = exchange_mapping[exchange]
131
132 try:
133 pattern = re.escape(utility) + r"([\D]*?)([-+]?\d+\.\d\d)"
134 val = re.search(pattern, text).group(2)
135 flow = float(val)
136 except (AttributeError, ValueError) as e:
137 logger.warning("""{} regex match failed on the following text.
138 {}""".format(exchange, text))
139 raise Exception("Exchange {} cannot be read.".format(exchange)) from e
140
141 return flow
142
143
144 def fetch_exchange(zone_key1, zone_key2, session=None, target_datetime=None, logger=getLogger(__name__)):
145 """Requests the last known power exchange (in MW) between two zones
146 Arguments:
147 zone_key1 -- the first country code
148 zone_key2 -- the second country code; order of the two codes in params doesn't matter
149 session (optional) -- request session passed in order to re-use an existing session
150 Return:
151 A dictionary in the form:
152 {
153 'sortedZoneKeys': 'DK->NO',
154 'datetime': '2017-01-01T00:00:00Z',
155 'netFlow': 0.0,
156 'source': 'mysource.com'
157 }
158 where net flow is from DK into NO
159 """
160 if target_datetime:
161 raise NotImplementedError('This parser is not yet able to parse past dates')
162
163 sorted_codes = "->".join(sorted([zone_key1, zone_key2]))
164
165 raw_text = get_text_from_image(session=session, link=exchanges_link,
166 expected_size=(400, 195), new_size=(1120, 546), \
167 logger=logger)
168
169 if sorted_codes == 'NA->ZA':
170 flow = exchange_processor(raw_text, 'NA->ZA', logger=logger)
171 elif sorted_codes == 'NA->ZM':
172 flow = exchange_processor(raw_text, 'NA->ZM', logger=logger)
173 else:
174 raise NotImplementedError('This exchange pair is not implemented')
175
176 #Import considered positive in data source.
177 if flow is not None:
178 flow = -1 * flow
179
180 exchange = {'sortedZoneKeys': sorted_codes,
181 'datetime': arrow.now('Africa/Windhoek').datetime,
182 'netFlow': flow,
183 'source': 'nampower.com.na'
184 }
185
186 return exchange
187
188
189 if __name__ == '__main__':
190 print('fetch_production() ->')
191 print(fetch_production())
192 print('fetch_exchange(NA, ZA)')
193 print(fetch_exchange('NA', 'ZA'))
194 print('fetch_exchange(NA, ZM)')
195 print(fetch_exchange('NA', 'ZM'))
196
[end of parsers/NA.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/parsers/NA.py b/parsers/NA.py
--- a/parsers/NA.py
+++ b/parsers/NA.py
@@ -12,6 +12,7 @@
import requests
+timestamp_link = 'http://www.nampower.com.na/gis/images/File_Info.png'
generation_link = 'http://www.nampower.com.na/gis/images/Gx.png'
exchanges_link = 'http://www.nampower.com.na/gis/images/Imports_Exports.png'
@@ -39,11 +40,8 @@
img = Image.open(s.get(link, stream=True).raw)
if img.size != expected_size:
- if (logger):
- logger.warning("Check Namibia Scada dashboard for {} changes.".format(link),
- extras={'key': 'NA'})
- else:
- print("Check Namibia Scada dashboard for {} changes.".format(link))
+ logger.warning("Check Namibia Scada dashboard for {} changes.".format(link),
+ extras={'key': 'NA'})
gray = img.convert('L')
gray_enlarged = gray.resize(new_size, Image.LANCZOS)
@@ -52,6 +50,35 @@
return text
+def check_timestamp(session=None, logger=None):
+ """
+ Sometimes the Scada Dashboard image stops updating for a while.
+ This function tries to ensure that only data younger than 1 hour
+ is accepted.
+ """
+
+ scada_info = get_text_from_image(session=session, link=timestamp_link,
+ expected_size=(600,20), new_size=(1200,40),
+ logger=logger)
+
+ timestamp = scada_info.split(':', 1)[1]
+
+ try:
+ scada_time = arrow.get(timestamp, ' DD/MM/YYYY HH:mm:ss')
+ except arrow.parser.ParserError as e:
+ logger.warning('Namibia scada timestamp cannot be read, got {}.'.format(timestamp))
+ # The OCR of the Scada dashboard is not very reliable, on failure safer to assume data is good.
+ return
+
+ data_time = scada_time.replace(tzinfo='Africa/Windhoek')
+ current_time = arrow.now('Africa/Windhoek')
+ diff = current_time - data_time
+
+ # Need to be sure we don't get old data if image stops updating.
+ if diff.seconds > 3600:
+ raise ValueError('Namibia scada data is too old to use, data is {} hours old.'.format(diff.seconds/3600))
+
+
def data_processor(text):
"""
Takes text produced from OCR and extracts production.
@@ -107,6 +134,7 @@
logger=logger)
production = data_processor(raw_text)
+ check_timestamp(session=session, logger=logger)
data = {
'zoneKey': zone_key,
@@ -177,6 +205,8 @@
if flow is not None:
flow = -1 * flow
+ check_timestamp(session=session, logger=logger)
+
exchange = {'sortedZoneKeys': sorted_codes,
'datetime': arrow.now('Africa/Windhoek').datetime,
'netFlow': flow,
| {"golden_diff": "diff --git a/parsers/NA.py b/parsers/NA.py\n--- a/parsers/NA.py\n+++ b/parsers/NA.py\n@@ -12,6 +12,7 @@\n import requests\n \n \n+timestamp_link = 'http://www.nampower.com.na/gis/images/File_Info.png'\n generation_link = 'http://www.nampower.com.na/gis/images/Gx.png'\n exchanges_link = 'http://www.nampower.com.na/gis/images/Imports_Exports.png'\n \n@@ -39,11 +40,8 @@\n img = Image.open(s.get(link, stream=True).raw)\n \n if img.size != expected_size:\n- if (logger):\n- logger.warning(\"Check Namibia Scada dashboard for {} changes.\".format(link),\n- extras={'key': 'NA'})\n- else:\n- print(\"Check Namibia Scada dashboard for {} changes.\".format(link))\n+ logger.warning(\"Check Namibia Scada dashboard for {} changes.\".format(link),\n+ extras={'key': 'NA'})\n \n gray = img.convert('L')\n gray_enlarged = gray.resize(new_size, Image.LANCZOS)\n@@ -52,6 +50,35 @@\n return text\n \n \n+def check_timestamp(session=None, logger=None):\n+ \"\"\"\n+ Sometimes the Scada Dashboard image stops updating for a while.\n+ This function tries to ensure that only data younger than 1 hour\n+ is accepted.\n+ \"\"\"\n+\n+ scada_info = get_text_from_image(session=session, link=timestamp_link,\n+ expected_size=(600,20), new_size=(1200,40),\n+ logger=logger)\n+\n+ timestamp = scada_info.split(':', 1)[1]\n+\n+ try:\n+ scada_time = arrow.get(timestamp, ' DD/MM/YYYY HH:mm:ss')\n+ except arrow.parser.ParserError as e:\n+ logger.warning('Namibia scada timestamp cannot be read, got {}.'.format(timestamp))\n+ # The OCR of the Scada dashboard is not very reliable, on failure safer to assume data is good.\n+ return\n+\n+ data_time = scada_time.replace(tzinfo='Africa/Windhoek')\n+ current_time = arrow.now('Africa/Windhoek')\n+ diff = current_time - data_time\n+\n+ # Need to be sure we don't get old data if image stops updating.\n+ if diff.seconds > 3600:\n+ raise ValueError('Namibia scada data is too old to use, data is {} hours old.'.format(diff.seconds/3600))\n+\n+\n def data_processor(text):\n \"\"\"\n Takes text produced from OCR and extracts production.\n@@ -107,6 +134,7 @@\n logger=logger)\n \n production = data_processor(raw_text)\n+ check_timestamp(session=session, logger=logger)\n \n data = {\n 'zoneKey': zone_key,\n@@ -177,6 +205,8 @@\n if flow is not None:\n flow = -1 * flow\n \n+ check_timestamp(session=session, logger=logger)\n+\n exchange = {'sortedZoneKeys': sorted_codes,\n 'datetime': arrow.now('Africa/Windhoek').datetime,\n 'netFlow': flow,\n", "issue": "Namibia parser should read out data timestamp\nCurrent time in Namibia is 2 April 14:41 but the [Nampower dashboard](http://www.nampower.com.na/Scada.aspx) is stuck on 1 April 11:45:40.\r\n\r\nTimestamp of the data is available in http://www.nampower.com.na/gis/images/File_Info.png\r\n\r\nWe could OCR that file. A quick attempt using the current tesseract methods and upscaling from 600x20 to 1200x40 seems to have worked alright, I got `SCADA File Dene: 01/04/2018 11:45:40` (it should have been \"File Date\" but otherwise correct). There is always the danger of OCR misreading, so maybe only ignore the datapoint if the timestamp can be read and is too far in the past.\r\n\r\nScreenshot from our page:\r\n\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"Parser for Namibia.\"\"\"\n\nimport arrow\nfrom bs4 import BeautifulSoup\nfrom .lib.validation import validate\nfrom logging import getLogger\nfrom PIL import Image\nfrom pytesseract import image_to_string\nimport re\nimport requests\n\n\ngeneration_link = 'http://www.nampower.com.na/gis/images/Gx.png'\nexchanges_link = 'http://www.nampower.com.na/gis/images/Imports_Exports.png'\n\nplant_mapping = {\"Ruacana\": \"hydro\",\n \"Van Eck\": \"coal\",\n \"Paratus\": \"oil\",\n \"Anixas\": \"oil\",\n \"Solar\": \"solar\",\n \"Wind\": \"wind\"\n }\n\nexchange_mapping = {\"NA->ZA\": \"ESKOM\",\n \"NA->ZM\": \"ZESCO\"\n }\n\n\ndef get_text_from_image(link, expected_size, new_size, logger, session=None):\n \"\"\"\n Gets image from link and checks expected size vs actual.\n Converts to black & white and enlarges to improve OCR accuracy.\n Performs OCR using tesseract and returns a str.\n \"\"\"\n\n s = session or requests.Session()\n img = Image.open(s.get(link, stream=True).raw)\n\n if img.size != expected_size:\n if (logger):\n logger.warning(\"Check Namibia Scada dashboard for {} changes.\".format(link),\n extras={'key': 'NA'})\n else:\n print(\"Check Namibia Scada dashboard for {} changes.\".format(link))\n\n gray = img.convert('L')\n gray_enlarged = gray.resize(new_size, Image.LANCZOS)\n text = image_to_string(gray_enlarged, lang='eng')\n\n return text\n\n\ndef data_processor(text):\n \"\"\"\n Takes text produced from OCR and extracts production.\n Returns a dictionary.\n \"\"\"\n\n production = {}\n for k in plant_mapping.keys():\n pattern = re.escape(k) + r\": (\\d+\\.\\d\\d)\"\n try:\n val = re.search(pattern, text).group(1)\n production[plant_mapping[k]] = production.get(plant_mapping[k], 0.0)+ float(val)\n except (AttributeError, ValueError) as e:\n production[plant_mapping[k]] = None\n\n return production\n\n\ndef fetch_production(zone_key = 'NA', session=None, target_datetime=None, logger=getLogger(__name__)):\n \"\"\"\n Requests the last known production mix (in MW) of a given country\n Arguments:\n zone_key (optional) -- used in case a parser is able to fetch multiple countries\n session (optional) -- request session passed in order to re-use an existing session\n Return:\n A dictionary in the form:\n {\n 'zoneKey': 'FR',\n 'datetime': '2017-01-01T00:00:00Z',\n 'production': {\n 'biomass': 0.0,\n 'coal': 0.0,\n 'gas': 0.0,\n 'hydro': 0.0,\n 'nuclear': null,\n 'oil': 0.0,\n 'solar': 0.0,\n 'wind': 0.0,\n 'geothermal': 0.0,\n 'unknown': 0.0\n },\n 'storage': {\n 'hydro': -10.0,\n },\n 'source': 'mysource.com'\n }\n \"\"\"\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n\n raw_text = get_text_from_image(session=session, link=generation_link, \\\n expected_size=(400, 245), new_size=(1000,612), \\\n logger=logger)\n\n production = data_processor(raw_text)\n\n data = {\n 'zoneKey': zone_key,\n 'datetime': arrow.now('Africa/Windhoek').datetime,\n 'production': production,\n 'storage': {},\n 'source': 'nampower.com.na'\n }\n\n data = validate(data, required=['hydro'])\n\n return data\n\n\ndef exchange_processor(text, exchange, logger):\n \"\"\"\n Takes text produced from OCR and extracts exchange flow.\n Returns a float or None.\n \"\"\"\n\n utility = exchange_mapping[exchange]\n\n try:\n pattern = re.escape(utility) + r\"([\\D]*?)([-+]?\\d+\\.\\d\\d)\"\n val = re.search(pattern, text).group(2)\n flow = float(val)\n except (AttributeError, ValueError) as e:\n logger.warning(\"\"\"{} regex match failed on the following text.\n {}\"\"\".format(exchange, text))\n raise Exception(\"Exchange {} cannot be read.\".format(exchange)) from e\n\n return flow\n\n\ndef fetch_exchange(zone_key1, zone_key2, session=None, target_datetime=None, logger=getLogger(__name__)):\n \"\"\"Requests the last known power exchange (in MW) between two zones\n Arguments:\n zone_key1 -- the first country code\n zone_key2 -- the second country code; order of the two codes in params doesn't matter\n session (optional) -- request session passed in order to re-use an existing session\n Return:\n A dictionary in the form:\n {\n 'sortedZoneKeys': 'DK->NO',\n 'datetime': '2017-01-01T00:00:00Z',\n 'netFlow': 0.0,\n 'source': 'mysource.com'\n }\n where net flow is from DK into NO\n \"\"\"\n if target_datetime:\n raise NotImplementedError('This parser is not yet able to parse past dates')\n\n sorted_codes = \"->\".join(sorted([zone_key1, zone_key2]))\n\n raw_text = get_text_from_image(session=session, link=exchanges_link,\n expected_size=(400, 195), new_size=(1120, 546), \\\n logger=logger)\n\n if sorted_codes == 'NA->ZA':\n flow = exchange_processor(raw_text, 'NA->ZA', logger=logger)\n elif sorted_codes == 'NA->ZM':\n flow = exchange_processor(raw_text, 'NA->ZM', logger=logger)\n else:\n raise NotImplementedError('This exchange pair is not implemented')\n\n #Import considered positive in data source.\n if flow is not None:\n flow = -1 * flow\n\n exchange = {'sortedZoneKeys': sorted_codes,\n 'datetime': arrow.now('Africa/Windhoek').datetime,\n 'netFlow': flow,\n 'source': 'nampower.com.na'\n }\n\n return exchange\n\n\nif __name__ == '__main__':\n print('fetch_production() ->')\n print(fetch_production())\n print('fetch_exchange(NA, ZA)')\n print(fetch_exchange('NA', 'ZA'))\n print('fetch_exchange(NA, ZM)')\n print(fetch_exchange('NA', 'ZM'))\n", "path": "parsers/NA.py"}]} | 2,805 | 719 |
gh_patches_debug_31061 | rasdani/github-patches | git_diff | conan-io__conan-center-index-2686 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[package] libunwind/1.3.1 library dependency order is wrong
### Package and Environment Details (include every applicable attribute)
* Package Name/Version: **libunwind/1.3.1**
* Operating System+version: **Linux Ubuntu 18.04**
* Compiler+version: **GCC 7**
* Conan version: **conan 1.21.1**
* Python version: **Python 2.7.17, 3.6.9**
### Conan profile (output of `conan profile show default` or `conan profile show <profile>` if custom profile is in use)
```
[settings]
os=Linux
os_build=Linux
arch=x86_64
arch_build=x86_64
compiler=gcc
compiler.version=7
compiler.libcxx=libstdc++1+
[options]
[build_requires]
[env]
```
But **error occurs when static linking**, that is shared=False.
### Steps to reproduce (Include if Applicable)
Soon I will write an example to reproduce.
But there is [an example about libunwind library dependency](https://github.com/daniel-thompson/libunwind-examples/blob/master/Makefile)
* current order : `unwind` `unwind-generic` `unwind-ptrace`
* right order : `unwind-ptrace` `unwind-generic` `unwind`
### Logs (Include/Attach if Applicable)
<details><summary>Click to expand log</summary>
```
Put your log output here
```
</details>
</issue>
<code>
[start of recipes/libunwind/all/conanfile.py]
1 from conans import ConanFile, AutoToolsBuildEnvironment, tools
2 from conans.errors import ConanInvalidConfiguration
3 import os
4 import glob
5
6
7 class LiunwindConan(ConanFile):
8 name = "libunwind"
9 description = "Manipulate the preserved state of each call-frame and resume the execution at any point."
10 topics = ("conan", "libunwind", "unwind", "debuggers", "exception-handling", "introspection", "setjmp")
11 url = "https://github.com/conan-io/conan-center-index"
12 homepage = "https://github.com/libunwind/libunwind"
13 license = "MIT"
14 settings = "os", "arch", "compiler", "build_type"
15 options = {"shared": [True, False], "fPIC": [True, False], "coredump": [True, False], "ptrace": [True, False], "setjmp": [True, False]}
16 default_options = {"shared": False, "fPIC": True, "coredump": True, "ptrace": True, "setjmp": True}
17 requires = "xz_utils/5.2.4"
18 _autotools = None
19
20 @property
21 def _source_subfolder(self):
22 return "source_subfolder"
23
24 def configure(self):
25 if self.settings.os not in ["Linux", "FreeBSD"]:
26 raise ConanInvalidConfiguration("libunwind is only supported on Linux and FreeBSD")
27 del self.settings.compiler.libcxx
28 del self.settings.compiler.cppstd
29
30 def source(self):
31 tools.get(**self.conan_data["sources"][self.version])
32 extracted_dir = self.name + "-" + self.version
33 os.rename(extracted_dir, self._source_subfolder)
34
35 def _configure_autotools(self):
36 if not self._autotools:
37 self._autotools = AutoToolsBuildEnvironment(self, win_bash=tools.os_info.is_windows)
38 args = [
39 "--enable-shared={}".format("yes" if self.options.shared else "no"),
40 "--enable-static={}".format("no" if self.options.shared else "yes"),
41 "--enable-coredump={}".format("yes" if self.options.coredump else "no"),
42 "--enable-ptrace={}".format("yes" if self.options.ptrace else "no"),
43 "--enable-setjmp={}".format("yes" if self.options.setjmp else "no"),
44 "--disable-tests",
45 "--disable-documentation"
46 ]
47 self._autotools.configure(configure_dir=self._source_subfolder, args=args)
48 return self._autotools
49
50 def build(self):
51 autotools = self._configure_autotools()
52 autotools.make()
53
54 def package(self):
55 self.copy(pattern="COPYING", dst="licenses", src=self._source_subfolder)
56 autotools = self._configure_autotools()
57 autotools.install()
58 tools.rmdir(os.path.join(self.package_folder, 'lib', 'pkgconfig'))
59 with tools.chdir(os.path.join(self.package_folder, "lib")):
60 for filename in glob.glob("*.la"):
61 os.unlink(filename)
62
63 def package_info(self):
64 self.cpp_info.libs = tools.collect_libs(self)
65 if self.settings.os == "Linux":
66 self.cpp_info.system_libs.append("pthread")
67
[end of recipes/libunwind/all/conanfile.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/recipes/libunwind/all/conanfile.py b/recipes/libunwind/all/conanfile.py
--- a/recipes/libunwind/all/conanfile.py
+++ b/recipes/libunwind/all/conanfile.py
@@ -24,6 +24,8 @@
def configure(self):
if self.settings.os not in ["Linux", "FreeBSD"]:
raise ConanInvalidConfiguration("libunwind is only supported on Linux and FreeBSD")
+ if self.options.shared:
+ del self.options.fPIC
del self.settings.compiler.libcxx
del self.settings.compiler.cppstd
@@ -61,6 +63,23 @@
os.unlink(filename)
def package_info(self):
- self.cpp_info.libs = tools.collect_libs(self)
+ self.cpp_info.components["unwind"].names["pkg_config"] = "libunwind"
+ self.cpp_info.components["unwind"].libs = ["unwind"]
+ self.cpp_info.components["unwind"].requires = ["xz_utils::xz_utils"]
if self.settings.os == "Linux":
- self.cpp_info.system_libs.append("pthread")
+ self.cpp_info.components["unwind"].system_libs.append("pthread")
+ self.cpp_info.components["generic"].names["pkg_config"] = "libunwind-generic"
+ self.cpp_info.components["generic"].libs = ["unwind-generic"]
+ self.cpp_info.components["generic"].requires = ["unwind"]
+ if self.options.ptrace:
+ self.cpp_info.components["ptrace"].names["pkg_config"] = "libunwind-ptrace"
+ self.cpp_info.components["ptrace"].libs = ["unwind-ptrace"]
+ self.cpp_info.components["ptrace"].requires = ["generic", "unwind"]
+ if self.options.setjmp:
+ self.cpp_info.components["setjmp"].names["pkg_config"] = "libunwind-setjmp"
+ self.cpp_info.components["setjmp"].libs = ["unwind-setjmp"]
+ self.cpp_info.components["setjmp"].requires = ["unwind"]
+ if self.options.coredump:
+ self.cpp_info.components["coredump"].names["pkg_config"] = "libunwind-coredump"
+ self.cpp_info.components["coredump"].libs = ["unwind-coredump"]
+ self.cpp_info.components["coredump"].requires = ["generic", "unwind"]
| {"golden_diff": "diff --git a/recipes/libunwind/all/conanfile.py b/recipes/libunwind/all/conanfile.py\n--- a/recipes/libunwind/all/conanfile.py\n+++ b/recipes/libunwind/all/conanfile.py\n@@ -24,6 +24,8 @@\n def configure(self):\n if self.settings.os not in [\"Linux\", \"FreeBSD\"]:\n raise ConanInvalidConfiguration(\"libunwind is only supported on Linux and FreeBSD\")\n+ if self.options.shared:\n+ del self.options.fPIC\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n \n@@ -61,6 +63,23 @@\n os.unlink(filename)\n \n def package_info(self):\n- self.cpp_info.libs = tools.collect_libs(self)\n+ self.cpp_info.components[\"unwind\"].names[\"pkg_config\"] = \"libunwind\"\n+ self.cpp_info.components[\"unwind\"].libs = [\"unwind\"]\n+ self.cpp_info.components[\"unwind\"].requires = [\"xz_utils::xz_utils\"]\n if self.settings.os == \"Linux\":\n- self.cpp_info.system_libs.append(\"pthread\")\n+ self.cpp_info.components[\"unwind\"].system_libs.append(\"pthread\")\n+ self.cpp_info.components[\"generic\"].names[\"pkg_config\"] = \"libunwind-generic\"\n+ self.cpp_info.components[\"generic\"].libs = [\"unwind-generic\"]\n+ self.cpp_info.components[\"generic\"].requires = [\"unwind\"]\n+ if self.options.ptrace:\n+ self.cpp_info.components[\"ptrace\"].names[\"pkg_config\"] = \"libunwind-ptrace\"\n+ self.cpp_info.components[\"ptrace\"].libs = [\"unwind-ptrace\"]\n+ self.cpp_info.components[\"ptrace\"].requires = [\"generic\", \"unwind\"]\n+ if self.options.setjmp:\n+ self.cpp_info.components[\"setjmp\"].names[\"pkg_config\"] = \"libunwind-setjmp\"\n+ self.cpp_info.components[\"setjmp\"].libs = [\"unwind-setjmp\"]\n+ self.cpp_info.components[\"setjmp\"].requires = [\"unwind\"]\n+ if self.options.coredump:\n+ self.cpp_info.components[\"coredump\"].names[\"pkg_config\"] = \"libunwind-coredump\"\n+ self.cpp_info.components[\"coredump\"].libs = [\"unwind-coredump\"]\n+ self.cpp_info.components[\"coredump\"].requires = [\"generic\", \"unwind\"]\n", "issue": "[package] libunwind/1.3.1 library dependency order is wrong\n### Package and Environment Details (include every applicable attribute)\r\n * Package Name/Version: **libunwind/1.3.1**\r\n * Operating System+version: **Linux Ubuntu 18.04**\r\n * Compiler+version: **GCC 7**\r\n * Conan version: **conan 1.21.1**\r\n * Python version: **Python 2.7.17, 3.6.9**\r\n\r\n\r\n### Conan profile (output of `conan profile show default` or `conan profile show <profile>` if custom profile is in use)\r\n```\r\n[settings]\r\nos=Linux\r\nos_build=Linux\r\narch=x86_64\r\narch_build=x86_64\r\ncompiler=gcc\r\ncompiler.version=7\r\ncompiler.libcxx=libstdc++1+\r\n[options]\r\n[build_requires]\r\n[env]\r\n```\r\n\r\nBut **error occurs when static linking**, that is shared=False.\r\n\r\n### Steps to reproduce (Include if Applicable)\r\n\r\nSoon I will write an example to reproduce.\r\n\r\nBut there is [an example about libunwind library dependency](https://github.com/daniel-thompson/libunwind-examples/blob/master/Makefile)\r\n\r\n* current order : `unwind` `unwind-generic` `unwind-ptrace`\r\n* right order : `unwind-ptrace` `unwind-generic` `unwind`\r\n\r\n### Logs (Include/Attach if Applicable)\r\n<details><summary>Click to expand log</summary>\r\n\r\n```\r\nPut your log output here\r\n```\r\n\r\n</details>\r\n\n", "before_files": [{"content": "from conans import ConanFile, AutoToolsBuildEnvironment, tools\nfrom conans.errors import ConanInvalidConfiguration\nimport os\nimport glob\n\n\nclass LiunwindConan(ConanFile):\n name = \"libunwind\"\n description = \"Manipulate the preserved state of each call-frame and resume the execution at any point.\"\n topics = (\"conan\", \"libunwind\", \"unwind\", \"debuggers\", \"exception-handling\", \"introspection\", \"setjmp\")\n url = \"https://github.com/conan-io/conan-center-index\"\n homepage = \"https://github.com/libunwind/libunwind\"\n license = \"MIT\"\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n options = {\"shared\": [True, False], \"fPIC\": [True, False], \"coredump\": [True, False], \"ptrace\": [True, False], \"setjmp\": [True, False]}\n default_options = {\"shared\": False, \"fPIC\": True, \"coredump\": True, \"ptrace\": True, \"setjmp\": True}\n requires = \"xz_utils/5.2.4\"\n _autotools = None\n\n @property\n def _source_subfolder(self):\n return \"source_subfolder\"\n\n def configure(self):\n if self.settings.os not in [\"Linux\", \"FreeBSD\"]:\n raise ConanInvalidConfiguration(\"libunwind is only supported on Linux and FreeBSD\")\n del self.settings.compiler.libcxx\n del self.settings.compiler.cppstd\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n extracted_dir = self.name + \"-\" + self.version\n os.rename(extracted_dir, self._source_subfolder)\n\n def _configure_autotools(self):\n if not self._autotools:\n self._autotools = AutoToolsBuildEnvironment(self, win_bash=tools.os_info.is_windows)\n args = [\n \"--enable-shared={}\".format(\"yes\" if self.options.shared else \"no\"),\n \"--enable-static={}\".format(\"no\" if self.options.shared else \"yes\"),\n \"--enable-coredump={}\".format(\"yes\" if self.options.coredump else \"no\"),\n \"--enable-ptrace={}\".format(\"yes\" if self.options.ptrace else \"no\"),\n \"--enable-setjmp={}\".format(\"yes\" if self.options.setjmp else \"no\"),\n \"--disable-tests\",\n \"--disable-documentation\"\n ]\n self._autotools.configure(configure_dir=self._source_subfolder, args=args)\n return self._autotools\n\n def build(self):\n autotools = self._configure_autotools()\n autotools.make()\n\n def package(self):\n self.copy(pattern=\"COPYING\", dst=\"licenses\", src=self._source_subfolder)\n autotools = self._configure_autotools()\n autotools.install()\n tools.rmdir(os.path.join(self.package_folder, 'lib', 'pkgconfig'))\n with tools.chdir(os.path.join(self.package_folder, \"lib\")):\n for filename in glob.glob(\"*.la\"):\n os.unlink(filename)\n\n def package_info(self):\n self.cpp_info.libs = tools.collect_libs(self)\n if self.settings.os == \"Linux\":\n self.cpp_info.system_libs.append(\"pthread\")\n", "path": "recipes/libunwind/all/conanfile.py"}]} | 1,728 | 519 |
gh_patches_debug_5977 | rasdani/github-patches | git_diff | bokeh__bokeh-4754 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Typo in categorical example plot title
File examples/plotting/file/categorical.py contains a typo in plot name.
</issue>
<code>
[start of examples/plotting/file/categorical.py]
1 from bokeh.layouts import row
2 from bokeh.plotting import figure, show, output_file
3
4 factors = ["a", "b", "c", "d", "e", "f", "g", "h"]
5 x = [50, 40, 65, 10, 25, 37, 80, 60]
6
7 dot = figure(title="Catgorical Dot Plot", tools="", toolbar_location=None,
8 y_range=factors, x_range=[0,100])
9
10 dot.segment(0, factors, x, factors, line_width=2, line_color="green", )
11 dot.circle(x, factors, size=15, fill_color="orange", line_color="green", line_width=3, )
12
13 factors = ["foo", "bar", "baz"]
14 x = ["foo", "foo", "foo", "bar", "bar", "bar", "baz", "baz", "baz"]
15 y = ["foo", "bar", "baz", "foo", "bar", "baz", "foo", "bar", "baz"]
16 colors = [
17 "#0B486B", "#79BD9A", "#CFF09E",
18 "#79BD9A", "#0B486B", "#79BD9A",
19 "#CFF09E", "#79BD9A", "#0B486B"
20 ]
21
22 hm = figure(title="Categorical Heatmap", tools="hover", toolbar_location=None,
23 x_range=factors, y_range=factors)
24
25 hm.rect(x, y, color=colors, width=1, height=1)
26
27 output_file("categorical.html", title="categorical.py example")
28
29 show(row(hm, dot, sizing_mode="scale_width")) # open a browser
30
[end of examples/plotting/file/categorical.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/examples/plotting/file/categorical.py b/examples/plotting/file/categorical.py
--- a/examples/plotting/file/categorical.py
+++ b/examples/plotting/file/categorical.py
@@ -4,7 +4,7 @@
factors = ["a", "b", "c", "d", "e", "f", "g", "h"]
x = [50, 40, 65, 10, 25, 37, 80, 60]
-dot = figure(title="Catgorical Dot Plot", tools="", toolbar_location=None,
+dot = figure(title="Categorical Dot Plot", tools="", toolbar_location=None,
y_range=factors, x_range=[0,100])
dot.segment(0, factors, x, factors, line_width=2, line_color="green", )
| {"golden_diff": "diff --git a/examples/plotting/file/categorical.py b/examples/plotting/file/categorical.py\n--- a/examples/plotting/file/categorical.py\n+++ b/examples/plotting/file/categorical.py\n@@ -4,7 +4,7 @@\n factors = [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\"]\n x = [50, 40, 65, 10, 25, 37, 80, 60]\n \n-dot = figure(title=\"Catgorical Dot Plot\", tools=\"\", toolbar_location=None,\n+dot = figure(title=\"Categorical Dot Plot\", tools=\"\", toolbar_location=None,\n y_range=factors, x_range=[0,100])\n \n dot.segment(0, factors, x, factors, line_width=2, line_color=\"green\", )\n", "issue": "Typo in categorical example plot title\nFile examples/plotting/file/categorical.py contains a typo in plot name.\n\n", "before_files": [{"content": "from bokeh.layouts import row\nfrom bokeh.plotting import figure, show, output_file\n\nfactors = [\"a\", \"b\", \"c\", \"d\", \"e\", \"f\", \"g\", \"h\"]\nx = [50, 40, 65, 10, 25, 37, 80, 60]\n\ndot = figure(title=\"Catgorical Dot Plot\", tools=\"\", toolbar_location=None,\n y_range=factors, x_range=[0,100])\n\ndot.segment(0, factors, x, factors, line_width=2, line_color=\"green\", )\ndot.circle(x, factors, size=15, fill_color=\"orange\", line_color=\"green\", line_width=3, )\n\nfactors = [\"foo\", \"bar\", \"baz\"]\nx = [\"foo\", \"foo\", \"foo\", \"bar\", \"bar\", \"bar\", \"baz\", \"baz\", \"baz\"]\ny = [\"foo\", \"bar\", \"baz\", \"foo\", \"bar\", \"baz\", \"foo\", \"bar\", \"baz\"]\ncolors = [\n \"#0B486B\", \"#79BD9A\", \"#CFF09E\",\n \"#79BD9A\", \"#0B486B\", \"#79BD9A\",\n \"#CFF09E\", \"#79BD9A\", \"#0B486B\"\n]\n\nhm = figure(title=\"Categorical Heatmap\", tools=\"hover\", toolbar_location=None,\n x_range=factors, y_range=factors)\n\nhm.rect(x, y, color=colors, width=1, height=1)\n\noutput_file(\"categorical.html\", title=\"categorical.py example\")\n\nshow(row(hm, dot, sizing_mode=\"scale_width\")) # open a browser\n", "path": "examples/plotting/file/categorical.py"}]} | 1,008 | 188 |
gh_patches_debug_32242 | rasdani/github-patches | git_diff | doccano__doccano-863 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[Bug report] Error on Django Admin search
How to reproduce the behaviour
---------

Press "Search".
```
backend_1 | Internal Server Error: /admin/api/document/
backend_1 | Traceback (most recent call last):
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/core/handlers/exception.py", line 34, in inner
backend_1 | response = get_response(request)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/core/handlers/base.py", line 115, in _get_response
backend_1 | response = self.process_exception_by_middleware(e, request)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/core/handlers/base.py", line 113, in _get_response
backend_1 | response = wrapped_callback(request, *callback_args, **callback_kwargs)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/contrib/admin/options.py", line 606, in wrapper
backend_1 | return self.admin_site.admin_view(view)(*args, **kwargs)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/utils/decorators.py", line 142, in _wrapped_view
backend_1 | response = view_func(request, *args, **kwargs)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/views/decorators/cache.py", line 44, in _wrapped_view_func
backend_1 | response = view_func(request, *args, **kwargs)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/contrib/admin/sites.py", line 223, in inner
backend_1 | return view(request, *args, **kwargs)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/utils/decorators.py", line 45, in _wrapper
backend_1 | return bound_method(*args, **kwargs)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/utils/decorators.py", line 142, in _wrapped_view
backend_1 | response = view_func(request, *args, **kwargs)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/contrib/admin/options.py", line 1685, in changelist_view
backend_1 | cl = self.get_changelist_instance(request)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/contrib/admin/options.py", line 744, in get_changelist_instance
backend_1 | sortable_by,
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/contrib/admin/views/main.py", line 81, in __init__
backend_1 | self.queryset = self.get_queryset(request)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/contrib/admin/views/main.py", line 439, in get_queryset
backend_1 | qs, search_use_distinct = self.model_admin.get_search_results(request, qs, self.query)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/contrib/admin/options.py", line 1023, in get_search_results
backend_1 | queryset = queryset.filter(reduce(operator.or_, or_queries))
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/db/models/query.py", line 892, in filter
backend_1 | return self._filter_or_exclude(False, *args, **kwargs)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/db/models/query.py", line 910, in _filter_or_exclude
backend_1 | clone.query.add_q(Q(*args, **kwargs))
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1290, in add_q
backend_1 | clause, _ = self._add_q(q_object, self.used_aliases)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1312, in _add_q
backend_1 | current_negated, allow_joins, split_subq, simple_col)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1318, in _add_q
backend_1 | split_subq=split_subq, simple_col=simple_col,
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1251, in build_filter
backend_1 | condition = self.build_lookup(lookups, col, value)
backend_1 | File "/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py", line 1107, in build_lookup
backend_1 | raise FieldError('Related Field got invalid lookup: {}'.format(lookup_name))
backend_1 | django.core.exceptions.FieldError: Related Field got invalid lookup: icontains
backend_1 | [29/Jun/2020 12:25:49] "GET /admin/api/document/?q=request HTTP/1.1" 500 160618
```
Is this like https://stackoverflow.com/questions/11754877/troubleshooting-related-field-has-invalid-lookup-icontains?
Your Environment
---------
<!-- Include details of your environment.-->
* Operating System: Calculate Linux 20.6
* Python Version Used: system Python version is 3.7.7
* When you install doccano: 4927a01f090b91d8e14e467f2fd40d8301612e72
* How did you install doccano (Heroku button etc): Docker Compose
</issue>
<code>
[start of app/api/admin.py]
1 from django.contrib import admin
2
3 from .models import Label, Document, Project
4 from .models import Role, RoleMapping
5 from .models import DocumentAnnotation, SequenceAnnotation, Seq2seqAnnotation
6 from .models import TextClassificationProject, SequenceLabelingProject, Seq2seqProject
7
8
9 class LabelAdmin(admin.ModelAdmin):
10 list_display = ('text', 'project', 'text_color', 'background_color')
11 ordering = ('project',)
12 search_fields = ('project',)
13
14
15 class DocumentAdmin(admin.ModelAdmin):
16 list_display = ('text', 'project', 'meta')
17 ordering = ('project',)
18 search_fields = ('project',)
19
20
21 class ProjectAdmin(admin.ModelAdmin):
22 list_display = ('name', 'description', 'project_type', 'randomize_document_order', 'collaborative_annotation')
23 ordering = ('project_type',)
24 search_fields = ('name',)
25
26
27 class SequenceAnnotationAdmin(admin.ModelAdmin):
28 list_display = ('document', 'label', 'start_offset', 'user')
29 ordering = ('document',)
30 search_fields = ('document',)
31
32
33 class DocumentAnnotationAdmin(admin.ModelAdmin):
34 list_display = ('document', 'label', 'user')
35 ordering = ('document',)
36 search_fields = ('document',)
37
38
39 class Seq2seqAnnotationAdmin(admin.ModelAdmin):
40 list_display = ('document', 'text', 'user')
41 ordering = ('document',)
42 search_fields = ('document',)
43
44
45 class RoleAdmin(admin.ModelAdmin):
46 list_display = ('name', 'description')
47 ordering = ('name',)
48 search_fields = ('name',)
49
50
51 class RoleMappingAdmin(admin.ModelAdmin):
52 list_display = ('user', 'role', 'project', )
53 ordering = ('user',)
54 search_fields = ('user',)
55
56
57 admin.site.register(DocumentAnnotation, DocumentAnnotationAdmin)
58 admin.site.register(SequenceAnnotation, SequenceAnnotationAdmin)
59 admin.site.register(Seq2seqAnnotation, Seq2seqAnnotationAdmin)
60 admin.site.register(Label, LabelAdmin)
61 admin.site.register(Document, DocumentAdmin)
62 admin.site.register(Project, ProjectAdmin)
63 admin.site.register(TextClassificationProject, ProjectAdmin)
64 admin.site.register(SequenceLabelingProject, ProjectAdmin)
65 admin.site.register(Seq2seqProject, ProjectAdmin)
66 admin.site.register(Role, RoleAdmin)
67 admin.site.register(RoleMapping, RoleMappingAdmin)
68
[end of app/api/admin.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/app/api/admin.py b/app/api/admin.py
--- a/app/api/admin.py
+++ b/app/api/admin.py
@@ -9,13 +9,13 @@
class LabelAdmin(admin.ModelAdmin):
list_display = ('text', 'project', 'text_color', 'background_color')
ordering = ('project',)
- search_fields = ('project',)
+ search_fields = ('text',)
class DocumentAdmin(admin.ModelAdmin):
list_display = ('text', 'project', 'meta')
ordering = ('project',)
- search_fields = ('project',)
+ search_fields = ('text',)
class ProjectAdmin(admin.ModelAdmin):
@@ -27,19 +27,19 @@
class SequenceAnnotationAdmin(admin.ModelAdmin):
list_display = ('document', 'label', 'start_offset', 'user')
ordering = ('document',)
- search_fields = ('document',)
+ search_fields = ('document__text',)
class DocumentAnnotationAdmin(admin.ModelAdmin):
list_display = ('document', 'label', 'user')
ordering = ('document',)
- search_fields = ('document',)
+ search_fields = ('document__text',)
class Seq2seqAnnotationAdmin(admin.ModelAdmin):
list_display = ('document', 'text', 'user')
ordering = ('document',)
- search_fields = ('document',)
+ search_fields = ('document__text',)
class RoleAdmin(admin.ModelAdmin):
@@ -51,7 +51,7 @@
class RoleMappingAdmin(admin.ModelAdmin):
list_display = ('user', 'role', 'project', )
ordering = ('user',)
- search_fields = ('user',)
+ search_fields = ('user__username',)
admin.site.register(DocumentAnnotation, DocumentAnnotationAdmin)
| {"golden_diff": "diff --git a/app/api/admin.py b/app/api/admin.py\n--- a/app/api/admin.py\n+++ b/app/api/admin.py\n@@ -9,13 +9,13 @@\n class LabelAdmin(admin.ModelAdmin):\n list_display = ('text', 'project', 'text_color', 'background_color')\n ordering = ('project',)\n- search_fields = ('project',)\n+ search_fields = ('text',)\n \n \n class DocumentAdmin(admin.ModelAdmin):\n list_display = ('text', 'project', 'meta')\n ordering = ('project',)\n- search_fields = ('project',)\n+ search_fields = ('text',)\n \n \n class ProjectAdmin(admin.ModelAdmin):\n@@ -27,19 +27,19 @@\n class SequenceAnnotationAdmin(admin.ModelAdmin):\n list_display = ('document', 'label', 'start_offset', 'user')\n ordering = ('document',)\n- search_fields = ('document',)\n+ search_fields = ('document__text',)\n \n \n class DocumentAnnotationAdmin(admin.ModelAdmin):\n list_display = ('document', 'label', 'user')\n ordering = ('document',)\n- search_fields = ('document',)\n+ search_fields = ('document__text',)\n \n \n class Seq2seqAnnotationAdmin(admin.ModelAdmin):\n list_display = ('document', 'text', 'user')\n ordering = ('document',)\n- search_fields = ('document',)\n+ search_fields = ('document__text',)\n \n \n class RoleAdmin(admin.ModelAdmin):\n@@ -51,7 +51,7 @@\n class RoleMappingAdmin(admin.ModelAdmin):\n list_display = ('user', 'role', 'project', )\n ordering = ('user',)\n- search_fields = ('user',)\n+ search_fields = ('user__username',)\n \n \n admin.site.register(DocumentAnnotation, DocumentAnnotationAdmin)\n", "issue": "[Bug report] Error on Django Admin search\nHow to reproduce the behaviour\r\n---------\r\n\r\n\r\nPress \"Search\".\r\n\r\n```\r\nbackend_1 | Internal Server Error: /admin/api/document/\r\nbackend_1 | Traceback (most recent call last):\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/core/handlers/exception.py\", line 34, in inner\r\nbackend_1 | response = get_response(request)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/core/handlers/base.py\", line 115, in _get_response\r\nbackend_1 | response = self.process_exception_by_middleware(e, request)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/core/handlers/base.py\", line 113, in _get_response\r\nbackend_1 | response = wrapped_callback(request, *callback_args, **callback_kwargs)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/contrib/admin/options.py\", line 606, in wrapper\r\nbackend_1 | return self.admin_site.admin_view(view)(*args, **kwargs)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/utils/decorators.py\", line 142, in _wrapped_view\r\nbackend_1 | response = view_func(request, *args, **kwargs)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/views/decorators/cache.py\", line 44, in _wrapped_view_func\r\nbackend_1 | response = view_func(request, *args, **kwargs)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/contrib/admin/sites.py\", line 223, in inner\r\nbackend_1 | return view(request, *args, **kwargs)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/utils/decorators.py\", line 45, in _wrapper\r\nbackend_1 | return bound_method(*args, **kwargs)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/utils/decorators.py\", line 142, in _wrapped_view\r\nbackend_1 | response = view_func(request, *args, **kwargs)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/contrib/admin/options.py\", line 1685, in changelist_view\r\nbackend_1 | cl = self.get_changelist_instance(request)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/contrib/admin/options.py\", line 744, in get_changelist_instance\r\nbackend_1 | sortable_by,\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/contrib/admin/views/main.py\", line 81, in __init__\r\nbackend_1 | self.queryset = self.get_queryset(request)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/contrib/admin/views/main.py\", line 439, in get_queryset\r\nbackend_1 | qs, search_use_distinct = self.model_admin.get_search_results(request, qs, self.query)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/contrib/admin/options.py\", line 1023, in get_search_results\r\nbackend_1 | queryset = queryset.filter(reduce(operator.or_, or_queries))\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/db/models/query.py\", line 892, in filter\r\nbackend_1 | return self._filter_or_exclude(False, *args, **kwargs)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/db/models/query.py\", line 910, in _filter_or_exclude\r\nbackend_1 | clone.query.add_q(Q(*args, **kwargs))\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1290, in add_q\r\nbackend_1 | clause, _ = self._add_q(q_object, self.used_aliases)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1312, in _add_q\r\nbackend_1 | current_negated, allow_joins, split_subq, simple_col)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1318, in _add_q\r\nbackend_1 | split_subq=split_subq, simple_col=simple_col,\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1251, in build_filter\r\nbackend_1 | condition = self.build_lookup(lookups, col, value)\r\nbackend_1 | File \"/src/venv/lib/python3.6/site-packages/django/db/models/sql/query.py\", line 1107, in build_lookup\r\nbackend_1 | raise FieldError('Related Field got invalid lookup: {}'.format(lookup_name))\r\nbackend_1 | django.core.exceptions.FieldError: Related Field got invalid lookup: icontains\r\nbackend_1 | [29/Jun/2020 12:25:49] \"GET /admin/api/document/?q=request HTTP/1.1\" 500 160618\r\n```\r\n\r\nIs this like https://stackoverflow.com/questions/11754877/troubleshooting-related-field-has-invalid-lookup-icontains?\r\n\r\nYour Environment\r\n---------\r\n<!-- Include details of your environment.-->\r\n* Operating System: Calculate Linux 20.6\r\n* Python Version Used: system Python version is 3.7.7\r\n* When you install doccano: 4927a01f090b91d8e14e467f2fd40d8301612e72\r\n* How did you install doccano (Heroku button etc): Docker Compose\r\n\n", "before_files": [{"content": "from django.contrib import admin\n\nfrom .models import Label, Document, Project\nfrom .models import Role, RoleMapping\nfrom .models import DocumentAnnotation, SequenceAnnotation, Seq2seqAnnotation\nfrom .models import TextClassificationProject, SequenceLabelingProject, Seq2seqProject\n\n\nclass LabelAdmin(admin.ModelAdmin):\n list_display = ('text', 'project', 'text_color', 'background_color')\n ordering = ('project',)\n search_fields = ('project',)\n\n\nclass DocumentAdmin(admin.ModelAdmin):\n list_display = ('text', 'project', 'meta')\n ordering = ('project',)\n search_fields = ('project',)\n\n\nclass ProjectAdmin(admin.ModelAdmin):\n list_display = ('name', 'description', 'project_type', 'randomize_document_order', 'collaborative_annotation')\n ordering = ('project_type',)\n search_fields = ('name',)\n\n\nclass SequenceAnnotationAdmin(admin.ModelAdmin):\n list_display = ('document', 'label', 'start_offset', 'user')\n ordering = ('document',)\n search_fields = ('document',)\n\n\nclass DocumentAnnotationAdmin(admin.ModelAdmin):\n list_display = ('document', 'label', 'user')\n ordering = ('document',)\n search_fields = ('document',)\n\n\nclass Seq2seqAnnotationAdmin(admin.ModelAdmin):\n list_display = ('document', 'text', 'user')\n ordering = ('document',)\n search_fields = ('document',)\n\n\nclass RoleAdmin(admin.ModelAdmin):\n list_display = ('name', 'description')\n ordering = ('name',)\n search_fields = ('name',)\n\n\nclass RoleMappingAdmin(admin.ModelAdmin):\n list_display = ('user', 'role', 'project', )\n ordering = ('user',)\n search_fields = ('user',)\n\n\nadmin.site.register(DocumentAnnotation, DocumentAnnotationAdmin)\nadmin.site.register(SequenceAnnotation, SequenceAnnotationAdmin)\nadmin.site.register(Seq2seqAnnotation, Seq2seqAnnotationAdmin)\nadmin.site.register(Label, LabelAdmin)\nadmin.site.register(Document, DocumentAdmin)\nadmin.site.register(Project, ProjectAdmin)\nadmin.site.register(TextClassificationProject, ProjectAdmin)\nadmin.site.register(SequenceLabelingProject, ProjectAdmin)\nadmin.site.register(Seq2seqProject, ProjectAdmin)\nadmin.site.register(Role, RoleAdmin)\nadmin.site.register(RoleMapping, RoleMappingAdmin)\n", "path": "app/api/admin.py"}]} | 2,631 | 364 |
gh_patches_debug_26203 | rasdani/github-patches | git_diff | pyg-team__pytorch_geometric-6546 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Bipartite graph support for utils.dense_to_sparse
### 🚀 The feature, motivation and pitch
I have a nearly-dense bipartite graph (that is, most features in node set A are connected to most features in node set B), and so it is easiest for me to define the edge adjacency matrix as a dense, non-square matrix. However, the message passing class expects a sparse edge adjacency layout. The dense_to_sparse utility would seem to be ideal for this purpose, but it can only take square matrices (thus, is unhelpful for bipartite graphs).
### Alternatives
A way to implicitly request propagate to pass messages from every node in A to every node in B would be even better (storing fully connected graphs is very memory inefficient), but I know that pyg is meant for sparser graph constructions so this would likely be a feature that wasn't used very much by other people.
### Additional context
_No response_
</issue>
<code>
[start of torch_geometric/utils/sparse.py]
1 from typing import Any, Optional, Tuple, Union
2
3 import torch
4 from torch import Tensor
5
6 from torch_geometric.typing import SparseTensor
7
8
9 def dense_to_sparse(adj: Tensor) -> Tuple[Tensor, Tensor]:
10 r"""Converts a dense adjacency matrix to a sparse adjacency matrix defined
11 by edge indices and edge attributes.
12
13 Args:
14 adj (Tensor): The dense adjacency matrix.
15 :rtype: (:class:`LongTensor`, :class:`Tensor`)
16
17 Examples:
18
19 >>> # Forr a single adjacency matrix
20 >>> adj = torch.tensor([[3, 1],
21 ... [2, 0]])
22 >>> dense_to_sparse(adj)
23 (tensor([[0, 0, 1],
24 [0, 1, 0]]),
25 tensor([3, 1, 2]))
26
27 >>> # For two adjacency matrixes
28 >>> adj = torch.tensor([[[3, 1],
29 ... [2, 0]],
30 ... [[0, 1],
31 ... [0, 2]]])
32 >>> dense_to_sparse(adj)
33 (tensor([[0, 0, 1, 2, 3],
34 [0, 1, 0, 3, 3]]),
35 tensor([3, 1, 2, 1, 2]))
36 """
37 assert adj.dim() >= 2 and adj.dim() <= 3
38 assert adj.size(-1) == adj.size(-2)
39
40 edge_index = adj.nonzero().t()
41
42 if edge_index.size(0) == 2:
43 edge_attr = adj[edge_index[0], edge_index[1]]
44 return edge_index, edge_attr
45 else:
46 edge_attr = adj[edge_index[0], edge_index[1], edge_index[2]]
47 batch = edge_index[0] * adj.size(-1)
48 row = batch + edge_index[1]
49 col = batch + edge_index[2]
50 return torch.stack([row, col], dim=0), edge_attr
51
52
53 def is_torch_sparse_tensor(src: Any) -> bool:
54 """Returns :obj:`True` if the input :obj:`src` is a
55 :class:`torch.sparse.Tensor` (in any sparse layout).
56
57 Args:
58 src (Any): The input object to be checked.
59 """
60 return isinstance(src, Tensor) and src.is_sparse
61
62
63 def is_sparse(src: Any) -> bool:
64 """Returns :obj:`True` if the input :obj:`src` is of type
65 :class:`torch.sparse.Tensor` (in any sparse layout) or of type
66 :class:`torch_sparse.SparseTensor`.
67
68 Args:
69 src (Any): The input object to be checked.
70 """
71 return is_torch_sparse_tensor(src) or isinstance(src, SparseTensor)
72
73
74 def to_torch_coo_tensor(
75 edge_index: Tensor,
76 edge_attr: Optional[Tensor] = None,
77 size: Optional[Union[int, Tuple[int, int]]] = None,
78 ) -> Tensor:
79 """Converts a sparse adjacency matrix defined by edge indices and edge
80 attributes to a :class:`torch.sparse.Tensor`.
81
82 Args:
83 edge_index (LongTensor): The edge indices.
84 edge_attr (Tensor, optional): The edge attributes.
85 (default: :obj:`None`)
86 size (int or (int, int), optional): The size of the sparse matrix.
87 If given as an integer, will create a quadratic sparse matrix.
88 If set to :obj:`None`, will infer a quadratic sparse matrix based
89 on :obj:`edge_index.max() + 1`. (default: :obj:`None`)
90
91 :rtype: :class:`torch.sparse.FloatTensor`
92
93 Example:
94
95 >>> edge_index = torch.tensor([[0, 1, 1, 2, 2, 3],
96 ... [1, 0, 2, 1, 3, 2]])
97 >>> to_torch_coo_tensor(edge_index)
98 tensor(indices=tensor([[0, 1, 1, 2, 2, 3],
99 [1, 0, 2, 1, 3, 2]]),
100 values=tensor([1., 1., 1., 1., 1., 1.]),
101 size=(4, 4), nnz=6, layout=torch.sparse_coo)
102
103 """
104 if size is None:
105 size = int(edge_index.max()) + 1
106 if not isinstance(size, (tuple, list)):
107 size = (size, size)
108
109 if edge_attr is None:
110 edge_attr = torch.ones(edge_index.size(1), device=edge_index.device)
111
112 size = tuple(size) + edge_attr.size()[1:]
113 out = torch.sparse_coo_tensor(edge_index, edge_attr, size,
114 device=edge_index.device)
115 out = out.coalesce()
116 return out
117
[end of torch_geometric/utils/sparse.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/torch_geometric/utils/sparse.py b/torch_geometric/utils/sparse.py
--- a/torch_geometric/utils/sparse.py
+++ b/torch_geometric/utils/sparse.py
@@ -11,7 +11,10 @@
by edge indices and edge attributes.
Args:
- adj (Tensor): The dense adjacency matrix.
+ adj (Tensor): The dense adjacency matrix of shape
+ :obj:`[num_nodes, num_nodes]` or
+ :obj:`[batch_size, num_nodes, num_nodes]`.
+
:rtype: (:class:`LongTensor`, :class:`Tensor`)
Examples:
@@ -34,8 +37,9 @@
[0, 1, 0, 3, 3]]),
tensor([3, 1, 2, 1, 2]))
"""
- assert adj.dim() >= 2 and adj.dim() <= 3
- assert adj.size(-1) == adj.size(-2)
+ if adj.dim() < 2 or adj.dim() > 3:
+ raise ValueError(f"Dense adjacency matrix 'adj' must be 2- or "
+ f"3-dimensional (got {adj.dim()} dimensions)")
edge_index = adj.nonzero().t()
@@ -44,9 +48,8 @@
return edge_index, edge_attr
else:
edge_attr = adj[edge_index[0], edge_index[1], edge_index[2]]
- batch = edge_index[0] * adj.size(-1)
- row = batch + edge_index[1]
- col = batch + edge_index[2]
+ row = edge_index[1] + adj.size(-2) * edge_index[0]
+ col = edge_index[2] + adj.size(-1) * edge_index[0]
return torch.stack([row, col], dim=0), edge_attr
| {"golden_diff": "diff --git a/torch_geometric/utils/sparse.py b/torch_geometric/utils/sparse.py\n--- a/torch_geometric/utils/sparse.py\n+++ b/torch_geometric/utils/sparse.py\n@@ -11,7 +11,10 @@\n by edge indices and edge attributes.\n \n Args:\n- adj (Tensor): The dense adjacency matrix.\n+ adj (Tensor): The dense adjacency matrix of shape\n+ :obj:`[num_nodes, num_nodes]` or\n+ :obj:`[batch_size, num_nodes, num_nodes]`.\n+\n :rtype: (:class:`LongTensor`, :class:`Tensor`)\n \n Examples:\n@@ -34,8 +37,9 @@\n [0, 1, 0, 3, 3]]),\n tensor([3, 1, 2, 1, 2]))\n \"\"\"\n- assert adj.dim() >= 2 and adj.dim() <= 3\n- assert adj.size(-1) == adj.size(-2)\n+ if adj.dim() < 2 or adj.dim() > 3:\n+ raise ValueError(f\"Dense adjacency matrix 'adj' must be 2- or \"\n+ f\"3-dimensional (got {adj.dim()} dimensions)\")\n \n edge_index = adj.nonzero().t()\n \n@@ -44,9 +48,8 @@\n return edge_index, edge_attr\n else:\n edge_attr = adj[edge_index[0], edge_index[1], edge_index[2]]\n- batch = edge_index[0] * adj.size(-1)\n- row = batch + edge_index[1]\n- col = batch + edge_index[2]\n+ row = edge_index[1] + adj.size(-2) * edge_index[0]\n+ col = edge_index[2] + adj.size(-1) * edge_index[0]\n return torch.stack([row, col], dim=0), edge_attr\n", "issue": "Bipartite graph support for utils.dense_to_sparse\n### \ud83d\ude80 The feature, motivation and pitch\n\nI have a nearly-dense bipartite graph (that is, most features in node set A are connected to most features in node set B), and so it is easiest for me to define the edge adjacency matrix as a dense, non-square matrix. However, the message passing class expects a sparse edge adjacency layout. The dense_to_sparse utility would seem to be ideal for this purpose, but it can only take square matrices (thus, is unhelpful for bipartite graphs).\n\n### Alternatives\n\nA way to implicitly request propagate to pass messages from every node in A to every node in B would be even better (storing fully connected graphs is very memory inefficient), but I know that pyg is meant for sparser graph constructions so this would likely be a feature that wasn't used very much by other people.\n\n### Additional context\n\n_No response_\n", "before_files": [{"content": "from typing import Any, Optional, Tuple, Union\n\nimport torch\nfrom torch import Tensor\n\nfrom torch_geometric.typing import SparseTensor\n\n\ndef dense_to_sparse(adj: Tensor) -> Tuple[Tensor, Tensor]:\n r\"\"\"Converts a dense adjacency matrix to a sparse adjacency matrix defined\n by edge indices and edge attributes.\n\n Args:\n adj (Tensor): The dense adjacency matrix.\n :rtype: (:class:`LongTensor`, :class:`Tensor`)\n\n Examples:\n\n >>> # Forr a single adjacency matrix\n >>> adj = torch.tensor([[3, 1],\n ... [2, 0]])\n >>> dense_to_sparse(adj)\n (tensor([[0, 0, 1],\n [0, 1, 0]]),\n tensor([3, 1, 2]))\n\n >>> # For two adjacency matrixes\n >>> adj = torch.tensor([[[3, 1],\n ... [2, 0]],\n ... [[0, 1],\n ... [0, 2]]])\n >>> dense_to_sparse(adj)\n (tensor([[0, 0, 1, 2, 3],\n [0, 1, 0, 3, 3]]),\n tensor([3, 1, 2, 1, 2]))\n \"\"\"\n assert adj.dim() >= 2 and adj.dim() <= 3\n assert adj.size(-1) == adj.size(-2)\n\n edge_index = adj.nonzero().t()\n\n if edge_index.size(0) == 2:\n edge_attr = adj[edge_index[0], edge_index[1]]\n return edge_index, edge_attr\n else:\n edge_attr = adj[edge_index[0], edge_index[1], edge_index[2]]\n batch = edge_index[0] * adj.size(-1)\n row = batch + edge_index[1]\n col = batch + edge_index[2]\n return torch.stack([row, col], dim=0), edge_attr\n\n\ndef is_torch_sparse_tensor(src: Any) -> bool:\n \"\"\"Returns :obj:`True` if the input :obj:`src` is a\n :class:`torch.sparse.Tensor` (in any sparse layout).\n\n Args:\n src (Any): The input object to be checked.\n \"\"\"\n return isinstance(src, Tensor) and src.is_sparse\n\n\ndef is_sparse(src: Any) -> bool:\n \"\"\"Returns :obj:`True` if the input :obj:`src` is of type\n :class:`torch.sparse.Tensor` (in any sparse layout) or of type\n :class:`torch_sparse.SparseTensor`.\n\n Args:\n src (Any): The input object to be checked.\n \"\"\"\n return is_torch_sparse_tensor(src) or isinstance(src, SparseTensor)\n\n\ndef to_torch_coo_tensor(\n edge_index: Tensor,\n edge_attr: Optional[Tensor] = None,\n size: Optional[Union[int, Tuple[int, int]]] = None,\n) -> Tensor:\n \"\"\"Converts a sparse adjacency matrix defined by edge indices and edge\n attributes to a :class:`torch.sparse.Tensor`.\n\n Args:\n edge_index (LongTensor): The edge indices.\n edge_attr (Tensor, optional): The edge attributes.\n (default: :obj:`None`)\n size (int or (int, int), optional): The size of the sparse matrix.\n If given as an integer, will create a quadratic sparse matrix.\n If set to :obj:`None`, will infer a quadratic sparse matrix based\n on :obj:`edge_index.max() + 1`. (default: :obj:`None`)\n\n :rtype: :class:`torch.sparse.FloatTensor`\n\n Example:\n\n >>> edge_index = torch.tensor([[0, 1, 1, 2, 2, 3],\n ... [1, 0, 2, 1, 3, 2]])\n >>> to_torch_coo_tensor(edge_index)\n tensor(indices=tensor([[0, 1, 1, 2, 2, 3],\n [1, 0, 2, 1, 3, 2]]),\n values=tensor([1., 1., 1., 1., 1., 1.]),\n size=(4, 4), nnz=6, layout=torch.sparse_coo)\n\n \"\"\"\n if size is None:\n size = int(edge_index.max()) + 1\n if not isinstance(size, (tuple, list)):\n size = (size, size)\n\n if edge_attr is None:\n edge_attr = torch.ones(edge_index.size(1), device=edge_index.device)\n\n size = tuple(size) + edge_attr.size()[1:]\n out = torch.sparse_coo_tensor(edge_index, edge_attr, size,\n device=edge_index.device)\n out = out.coalesce()\n return out\n", "path": "torch_geometric/utils/sparse.py"}]} | 2,041 | 423 |
gh_patches_debug_2242 | rasdani/github-patches | git_diff | python-poetry__poetry-1577 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
poetry v1.0.0b4 breaks on zip packages
<!-- Checked checkbox should look like this: [x] -->
- [x] I am on the [latest](https://github.com/sdispater/poetry/releases/latest) Poetry version.
- [x] I have searched the [issues](https://github.com/sdispater/poetry/issues) of this repo and believe that this is not a duplicate.
- [x] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option).
- **OS version and name**: Windows 10 with a virtual environment for Python v3.7.4
- **Poetry version**: 1.0.0b4
- **Link of a [Gist](https://gist.github.com/) with the contents of your pyproject.toml file**: (empty project)
## Issue Summary
The newly refactored distribution loading mechanism from [PR 1549](https://github.com/sdispater/poetry/pull/1549/) fails when a zip-based package is present. The presenting problem is that the `zipp.Path` class is not compatible with the `__fspath__` protocol.
## Issue Details
After updating to Poetry v1.0.0b4, I get this traceback
```
% poetry update -v
Using virtualenv: C:\Users\garyd\devel\video-storage\venv-new
[TypeError]
expected str, bytes or os.PathLike object, not Path
Traceback (most recent call last):
File "C:\Users\garyd\.poetry\lib\poetry\_vendor\py3.7\clikit\console_application.py", line 131, in run
status_code = command.handle(parsed_args, io)
File "C:\Users\garyd\.poetry\lib\poetry\_vendor\py3.7\clikit\api\command\command.py", line 120, in handle
status_code = self._do_handle(args, io)
File "C:\Users\garyd\.poetry\lib\poetry\_vendor\py3.7\clikit\api\command\command.py", line 171, in _do_handle
return getattr(handler, handler_method)(args, io, self)
File "C:\Users\garyd\.poetry\lib\poetry\_vendor\py3.7\cleo\commands\command.py", line 92, in wrap_handle
return self.handle()
File "C:\Users\garyd\.poetry\lib\poetry\console\commands\update.py", line 36, in handle
self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool
File "C:\Users\garyd\.poetry\lib\poetry\installation\installer.py", line 55, in __init__
installed = self._get_installed()
File "C:\Users\garyd\.poetry\lib\poetry\installation\installer.py", line 507, in _get_installed
return InstalledRepository.load(self._env)
File "C:\Users\garyd\.poetry\lib\poetry\repositories\installed_repository.py", line 30, in load
path = Path(distribution._path)
File "C:\Users\garyd\AppData\Local\Programs\Python\Python37\lib\pathlib.py", line 1010, in __new__
self = cls._from_parts(args, init=False)
```
When I run the broken part of the code in my console, I find that the broken distribution is:
* type == <class 'importlib_metadata.PathDistribution'>
* type(dist._path) == <class 'zipp.Path'>
* dist._path == C:\Users\garyd\devel\video-storage\venv\lib\site-packages\setuptools-40.8.0-py3.7.egg/EGG-INFO/
</issue>
<code>
[start of poetry/repositories/installed_repository.py]
1 from importlib_metadata import distributions
2 from poetry.packages import Package
3 from poetry.utils._compat import Path
4 from poetry.utils.env import Env
5
6 from .repository import Repository
7
8
9 class InstalledRepository(Repository):
10 @classmethod
11 def load(cls, env): # type: (Env) -> InstalledRepository
12 """
13 Load installed packages.
14
15 For now, it uses the pip "freeze" command.
16 """
17 repo = cls()
18
19 for distribution in sorted(
20 distributions(path=env.sys_path), key=lambda d: str(d._path),
21 ):
22 metadata = distribution.metadata
23 name = metadata["name"]
24 version = metadata["version"]
25 package = Package(name, version, version)
26 package.description = metadata.get("summary", "")
27
28 repo.add_package(package)
29
30 path = Path(distribution._path)
31 is_standard_package = True
32 try:
33 path.relative_to(env.site_packages)
34 except ValueError:
35 is_standard_package = False
36
37 if is_standard_package:
38 continue
39
40 src_path = env.path / "src"
41
42 # A VCS dependency should have been installed
43 # in the src directory. If not, it's a path dependency
44 try:
45 path.relative_to(src_path)
46
47 from poetry.vcs.git import Git
48
49 git = Git()
50 revision = git.rev_parse("HEAD", src_path / package.name).strip()
51 url = git.remote_url(src_path / package.name)
52
53 package.source_type = "git"
54 package.source_url = url
55 package.source_reference = revision
56 except ValueError:
57 package.source_type = "directory"
58 package.source_url = str(path.parent)
59
60 return repo
61
[end of poetry/repositories/installed_repository.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/poetry/repositories/installed_repository.py b/poetry/repositories/installed_repository.py
--- a/poetry/repositories/installed_repository.py
+++ b/poetry/repositories/installed_repository.py
@@ -27,7 +27,7 @@
repo.add_package(package)
- path = Path(distribution._path)
+ path = Path(str(distribution._path))
is_standard_package = True
try:
path.relative_to(env.site_packages)
| {"golden_diff": "diff --git a/poetry/repositories/installed_repository.py b/poetry/repositories/installed_repository.py\n--- a/poetry/repositories/installed_repository.py\n+++ b/poetry/repositories/installed_repository.py\n@@ -27,7 +27,7 @@\n \n repo.add_package(package)\n \n- path = Path(distribution._path)\n+ path = Path(str(distribution._path))\n is_standard_package = True\n try:\n path.relative_to(env.site_packages)\n", "issue": "poetry v1.0.0b4 breaks on zip packages\n<!-- Checked checkbox should look like this: [x] -->\r\n- [x] I am on the [latest](https://github.com/sdispater/poetry/releases/latest) Poetry version.\r\n- [x] I have searched the [issues](https://github.com/sdispater/poetry/issues) of this repo and believe that this is not a duplicate.\r\n- [x] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option).\r\n\r\n- **OS version and name**: Windows 10 with a virtual environment for Python v3.7.4\r\n- **Poetry version**: 1.0.0b4\r\n- **Link of a [Gist](https://gist.github.com/) with the contents of your pyproject.toml file**: (empty project)\r\n\r\n## Issue Summary\r\nThe newly refactored distribution loading mechanism from [PR 1549](https://github.com/sdispater/poetry/pull/1549/) fails when a zip-based package is present. The presenting problem is that the `zipp.Path` class is not compatible with the `__fspath__` protocol.\r\n\r\n## Issue Details\r\nAfter updating to Poetry v1.0.0b4, I get this traceback\r\n\r\n```\r\n% poetry update -v\r\nUsing virtualenv: C:\\Users\\garyd\\devel\\video-storage\\venv-new\r\n[TypeError]\r\nexpected str, bytes or os.PathLike object, not Path\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\_vendor\\py3.7\\clikit\\console_application.py\", line 131, in run\r\n status_code = command.handle(parsed_args, io)\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\_vendor\\py3.7\\clikit\\api\\command\\command.py\", line 120, in handle\r\n status_code = self._do_handle(args, io)\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\_vendor\\py3.7\\clikit\\api\\command\\command.py\", line 171, in _do_handle\r\n return getattr(handler, handler_method)(args, io, self)\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\_vendor\\py3.7\\cleo\\commands\\command.py\", line 92, in wrap_handle\r\n return self.handle()\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\console\\commands\\update.py\", line 36, in handle\r\n self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\installation\\installer.py\", line 55, in __init__\r\n installed = self._get_installed()\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\installation\\installer.py\", line 507, in _get_installed\r\n return InstalledRepository.load(self._env)\r\n File \"C:\\Users\\garyd\\.poetry\\lib\\poetry\\repositories\\installed_repository.py\", line 30, in load\r\n path = Path(distribution._path)\r\n File \"C:\\Users\\garyd\\AppData\\Local\\Programs\\Python\\Python37\\lib\\pathlib.py\", line 1010, in __new__\r\n self = cls._from_parts(args, init=False)\r\n```\r\n\r\nWhen I run the broken part of the code in my console, I find that the broken distribution is:\r\n\r\n* type == <class 'importlib_metadata.PathDistribution'>\r\n* type(dist._path) == <class 'zipp.Path'>\r\n* dist._path == C:\\Users\\garyd\\devel\\video-storage\\venv\\lib\\site-packages\\setuptools-40.8.0-py3.7.egg/EGG-INFO/\r\n\r\n\n", "before_files": [{"content": "from importlib_metadata import distributions\nfrom poetry.packages import Package\nfrom poetry.utils._compat import Path\nfrom poetry.utils.env import Env\n\nfrom .repository import Repository\n\n\nclass InstalledRepository(Repository):\n @classmethod\n def load(cls, env): # type: (Env) -> InstalledRepository\n \"\"\"\n Load installed packages.\n\n For now, it uses the pip \"freeze\" command.\n \"\"\"\n repo = cls()\n\n for distribution in sorted(\n distributions(path=env.sys_path), key=lambda d: str(d._path),\n ):\n metadata = distribution.metadata\n name = metadata[\"name\"]\n version = metadata[\"version\"]\n package = Package(name, version, version)\n package.description = metadata.get(\"summary\", \"\")\n\n repo.add_package(package)\n\n path = Path(distribution._path)\n is_standard_package = True\n try:\n path.relative_to(env.site_packages)\n except ValueError:\n is_standard_package = False\n\n if is_standard_package:\n continue\n\n src_path = env.path / \"src\"\n\n # A VCS dependency should have been installed\n # in the src directory. If not, it's a path dependency\n try:\n path.relative_to(src_path)\n\n from poetry.vcs.git import Git\n\n git = Git()\n revision = git.rev_parse(\"HEAD\", src_path / package.name).strip()\n url = git.remote_url(src_path / package.name)\n\n package.source_type = \"git\"\n package.source_url = url\n package.source_reference = revision\n except ValueError:\n package.source_type = \"directory\"\n package.source_url = str(path.parent)\n\n return repo\n", "path": "poetry/repositories/installed_repository.py"}]} | 1,890 | 107 |
gh_patches_debug_20658 | rasdani/github-patches | git_diff | svthalia__concrexit-3531 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Entry.DoesNotExist in RegistrationAdmin changeform
Sentry Issue: [CONCREXIT-Q5](https://thalia.sentry.io/issues/4058249887/?referrer=github_integration)
```
Entry.DoesNotExist: Entry matching query does not exist.
(5 additional frame(s) were not displayed)
...
File "registrations/admin.py", line 169, in changeform_view
obj = Entry.objects.get(id=object_id)
```
</issue>
<code>
[start of website/registrations/admin.py]
1 """Registers admin interfaces for the registrations module."""
2 from functools import partial
3
4 from django.contrib import admin
5 from django.forms import Field
6 from django.utils.translation import gettext_lazy as _
7
8 from payments.widgets import PaymentWidget
9 from registrations.services import (
10 accept_registration,
11 accept_renewal,
12 reject_registration,
13 reject_renewal,
14 )
15
16 from .forms import RegistrationAdminForm
17 from .models import Entry, Reference, Registration, Renewal
18
19
20 class ReferenceInline(admin.StackedInline):
21 model = Reference
22 extra = 0
23
24
25 @admin.register(Registration)
26 class RegistrationAdmin(admin.ModelAdmin):
27 """Manage the registrations."""
28
29 list_display = (
30 "name",
31 "email",
32 "status",
33 "membership_type",
34 "contribution",
35 "created_at",
36 "payment",
37 "no_references",
38 "reference_count",
39 )
40 list_filter = (
41 "status",
42 "programme",
43 "membership_type",
44 "no_references",
45 "payment__type",
46 "contribution",
47 )
48 inlines = (ReferenceInline,)
49 search_fields = (
50 "first_name",
51 "last_name",
52 "email",
53 "phone_number",
54 "student_number",
55 )
56 date_hierarchy = "created_at"
57 fieldsets = (
58 (
59 _("Application information"),
60 {
61 "fields": (
62 "created_at",
63 "updated_at",
64 "username",
65 "length",
66 "contribution",
67 "membership_type",
68 "status",
69 "payment",
70 "remarks",
71 )
72 },
73 ),
74 (
75 _("Personal information"),
76 {
77 "fields": (
78 "first_name",
79 "last_name",
80 "birthday",
81 "optin_birthday",
82 "email",
83 "optin_mailinglist",
84 "phone_number",
85 )
86 },
87 ),
88 (
89 _("Address"),
90 {
91 "fields": (
92 "address_street",
93 "address_street2",
94 "address_postal_code",
95 "address_city",
96 "address_country",
97 )
98 },
99 ),
100 (
101 _("Financial"),
102 {
103 "fields": (
104 "direct_debit",
105 "initials",
106 "iban",
107 "bic",
108 "signature",
109 )
110 },
111 ),
112 (
113 _("University information"),
114 {
115 "fields": (
116 "student_number",
117 "programme",
118 "starting_year",
119 )
120 },
121 ),
122 )
123
124 form = RegistrationAdminForm
125
126 actions = ["accept_registrations", "reject_registrations"]
127
128 def get_actions(self, request):
129 actions = super().get_actions(request)
130
131 if not request.user.has_perm("registrations.review_entries"):
132 if "accept_registrations" in actions:
133 del actions["accept_registrations"]
134 if "reject_registrations" in actions:
135 del actions["reject_registrations"]
136
137 return actions
138
139 @admin.action(description="Accept selected registrations")
140 def accept_registrations(self, request, queryset): # pragma: no cover
141 if queryset.exclude(status=Registration.STATUS_REVIEW).exists():
142 self.message_user(
143 request, "Only registrations in review can be accepted", "error"
144 )
145 return
146
147 count = 0
148 for registration in queryset:
149 try:
150 accept_registration(registration, actor=request.user)
151 count += 1
152 except ValueError as e:
153 self.message_user(
154 request, f"Error accepting {registration}: {e.message}", "error"
155 )
156
157 self.message_user(request, f"Accepted {count} registrations", "success")
158
159 @admin.action(description="Reject selected registrations")
160 def reject_registrations(self, request, queryset): # pragma: no cover
161 if queryset.exclude(status=Registration.STATUS_REVIEW).exists():
162 self.message_user(
163 request, "Only registrations in review can be rejected", "error"
164 )
165 return
166
167 count = queryset.count()
168 for registration in queryset:
169 reject_registration(registration, actor=request.user)
170
171 self.message_user(request, f"Rejected {count} registrations", "success")
172
173 def reference_count(self, obj):
174 return obj.reference_set.count()
175
176 reference_count.short_description = _("references")
177
178 def get_form(self, request, obj=None, **kwargs):
179 return super().get_form(
180 request,
181 obj,
182 formfield_callback=partial(
183 self.formfield_for_dbfield, request=request, obj=obj
184 ),
185 **kwargs,
186 )
187
188 def formfield_for_dbfield(self, db_field, request, obj=None, **kwargs):
189 field = super().formfield_for_dbfield(db_field, request, **kwargs)
190 if db_field.name == "payment":
191 return Field(
192 widget=PaymentWidget(obj=obj), initial=field.initial, required=False
193 )
194 return field
195
196 def changeform_view(self, request, object_id=None, form_url="", extra_context=None):
197 """Render the change formview.
198
199 Only allow when the entry has not been processed yet
200 """
201 obj = None
202 can_review = False
203 can_resend = False
204 can_revert = False
205 if object_id is not None and request.user.has_perm(
206 "registrations.review_entries"
207 ):
208 obj = Entry.objects.get(id=object_id)
209 can_review = obj.status == Entry.STATUS_REVIEW
210 can_revert = obj.status in [Entry.STATUS_ACCEPTED, Entry.STATUS_REJECTED]
211 try:
212 can_resend = obj.registration.status == Entry.STATUS_CONFIRM
213 except Registration.DoesNotExist:
214 pass
215 return super().changeform_view(
216 request,
217 object_id,
218 form_url,
219 {
220 "entry": obj,
221 "can_review": can_review,
222 "can_resend": can_resend,
223 "can_revert": can_revert,
224 },
225 )
226
227 def get_readonly_fields(self, request, obj=None):
228 if obj is None or obj.status not in (
229 Entry.STATUS_REJECTED,
230 Entry.STATUS_ACCEPTED,
231 Entry.STATUS_COMPLETED,
232 ):
233 return ["status", "created_at", "updated_at", "payment"]
234 return [
235 field.name
236 for field in self.model._meta.get_fields()
237 if field.name not in ["payment", "no_references"] and field.editable
238 ]
239
240 @staticmethod
241 def name(obj):
242 return obj.get_full_name()
243
244 def has_change_permission(self, request, obj=None):
245 """Completed registrations are read-only."""
246 return (
247 False
248 if obj and obj.status == Entry.STATUS_COMPLETED
249 else super().has_change_permission(request, obj)
250 )
251
252 def has_add_permission(self, request):
253 return False
254
255 def save_model(self, request, obj, form, change):
256 if obj.status not in (
257 Entry.STATUS_REJECTED,
258 Entry.STATUS_ACCEPTED,
259 Entry.STATUS_COMPLETED,
260 ):
261 super().save_model(request, obj, form, change)
262
263
264 @admin.register(Renewal)
265 class RenewalAdmin(RegistrationAdmin):
266 """Manage the renewals."""
267
268 list_display = (
269 "name",
270 "email",
271 "status",
272 "membership_type",
273 "contribution",
274 "created_at",
275 "payment",
276 "no_references",
277 "reference_count",
278 )
279 list_filter = (
280 "status",
281 "membership_type",
282 "no_references",
283 "payment__type",
284 "contribution",
285 )
286 search_fields = (
287 "member__first_name",
288 "member__last_name",
289 "member__email",
290 "member__profile__phone_number",
291 "member__profile__student_number",
292 )
293 date_hierarchy = "created_at"
294 fieldsets = (
295 (
296 _("Application information"),
297 {
298 "fields": (
299 "created_at",
300 "updated_at",
301 "length",
302 "contribution",
303 "membership_type",
304 "status",
305 "payment",
306 "remarks",
307 "member",
308 )
309 },
310 ),
311 )
312
313 actions = ["accept_renewals", "reject_renewals"]
314
315 def get_actions(self, request):
316 actions = super().get_actions(request)
317
318 if not request.user.has_perm("registrations.review_entries"):
319 if "accept_renewals" in actions: # pragma: no cover
320 del actions["accept_renewals"]
321 if "reject_renewals" in actions: # pragma: no cover
322 del actions["reject_renewals"]
323
324 return actions
325
326 @admin.action(description="Accept selected renewals")
327 def accept_renewals(self, request, queryset): # pragma: no cover
328 if queryset.exclude(status=Renewal.STATUS_REVIEW).exists():
329 self.message_user(
330 request, "Only renewals in review can be accepted", "error"
331 )
332 return
333
334 count = queryset.count()
335 for renewal in queryset:
336 accept_renewal(renewal, actor=request.user)
337 count += 1
338
339 self.message_user(request, f"Accepted {count} renewals", "success")
340
341 @admin.action(description="Reject selected renewals")
342 def reject_renewals(self, request, queryset): # pragma: no cover
343 if queryset.exclude(status=Renewal.STATUS_REVIEW).exists():
344 self.message_user(
345 request, "Only renewals in review can be rejected", "error"
346 )
347 return
348
349 count = queryset.count()
350 for renewal in queryset:
351 reject_renewal(renewal, actor=request.user)
352
353 self.message_user(request, f"Rejected {count} renewals", "success")
354
355 def get_readonly_fields(self, request, obj=None):
356 """Make all fields read-only and add member if needed."""
357 fields = super().get_readonly_fields(request, obj)
358 if "member" not in fields and obj is not None:
359 return fields + ["member"]
360 return fields
361
362 def has_add_permission(self, request):
363 return False
364
365 @staticmethod
366 def name(obj):
367 return obj.member.get_full_name()
368
369 name.short_description = _("name")
370
371 @staticmethod
372 def email(obj):
373 return obj.member.email
374
[end of website/registrations/admin.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/website/registrations/admin.py b/website/registrations/admin.py
--- a/website/registrations/admin.py
+++ b/website/registrations/admin.py
@@ -204,14 +204,18 @@
can_revert = False
if object_id is not None and request.user.has_perm(
"registrations.review_entries"
- ):
- obj = Entry.objects.get(id=object_id)
+ ): # pragma: no cover
+ obj = self.get_object(request, object_id)
+ if obj is None:
+ return self._get_obj_does_not_exist_redirect(
+ request, self.opts, object_id
+ )
can_review = obj.status == Entry.STATUS_REVIEW
can_revert = obj.status in [Entry.STATUS_ACCEPTED, Entry.STATUS_REJECTED]
- try:
- can_resend = obj.registration.status == Entry.STATUS_CONFIRM
- except Registration.DoesNotExist:
- pass
+ can_resend = obj.status == Entry.STATUS_CONFIRM and isinstance(
+ obj, Registration
+ )
+
return super().changeform_view(
request,
object_id,
| {"golden_diff": "diff --git a/website/registrations/admin.py b/website/registrations/admin.py\n--- a/website/registrations/admin.py\n+++ b/website/registrations/admin.py\n@@ -204,14 +204,18 @@\n can_revert = False\n if object_id is not None and request.user.has_perm(\n \"registrations.review_entries\"\n- ):\n- obj = Entry.objects.get(id=object_id)\n+ ): # pragma: no cover\n+ obj = self.get_object(request, object_id)\n+ if obj is None:\n+ return self._get_obj_does_not_exist_redirect(\n+ request, self.opts, object_id\n+ )\n can_review = obj.status == Entry.STATUS_REVIEW\n can_revert = obj.status in [Entry.STATUS_ACCEPTED, Entry.STATUS_REJECTED]\n- try:\n- can_resend = obj.registration.status == Entry.STATUS_CONFIRM\n- except Registration.DoesNotExist:\n- pass\n+ can_resend = obj.status == Entry.STATUS_CONFIRM and isinstance(\n+ obj, Registration\n+ )\n+\n return super().changeform_view(\n request,\n object_id,\n", "issue": "Entry.DoesNotExist in RegistrationAdmin changeform\nSentry Issue: [CONCREXIT-Q5](https://thalia.sentry.io/issues/4058249887/?referrer=github_integration)\n\n```\nEntry.DoesNotExist: Entry matching query does not exist.\n(5 additional frame(s) were not displayed)\n...\n File \"registrations/admin.py\", line 169, in changeform_view\n obj = Entry.objects.get(id=object_id)\n```\n", "before_files": [{"content": "\"\"\"Registers admin interfaces for the registrations module.\"\"\"\nfrom functools import partial\n\nfrom django.contrib import admin\nfrom django.forms import Field\nfrom django.utils.translation import gettext_lazy as _\n\nfrom payments.widgets import PaymentWidget\nfrom registrations.services import (\n accept_registration,\n accept_renewal,\n reject_registration,\n reject_renewal,\n)\n\nfrom .forms import RegistrationAdminForm\nfrom .models import Entry, Reference, Registration, Renewal\n\n\nclass ReferenceInline(admin.StackedInline):\n model = Reference\n extra = 0\n\n\[email protected](Registration)\nclass RegistrationAdmin(admin.ModelAdmin):\n \"\"\"Manage the registrations.\"\"\"\n\n list_display = (\n \"name\",\n \"email\",\n \"status\",\n \"membership_type\",\n \"contribution\",\n \"created_at\",\n \"payment\",\n \"no_references\",\n \"reference_count\",\n )\n list_filter = (\n \"status\",\n \"programme\",\n \"membership_type\",\n \"no_references\",\n \"payment__type\",\n \"contribution\",\n )\n inlines = (ReferenceInline,)\n search_fields = (\n \"first_name\",\n \"last_name\",\n \"email\",\n \"phone_number\",\n \"student_number\",\n )\n date_hierarchy = \"created_at\"\n fieldsets = (\n (\n _(\"Application information\"),\n {\n \"fields\": (\n \"created_at\",\n \"updated_at\",\n \"username\",\n \"length\",\n \"contribution\",\n \"membership_type\",\n \"status\",\n \"payment\",\n \"remarks\",\n )\n },\n ),\n (\n _(\"Personal information\"),\n {\n \"fields\": (\n \"first_name\",\n \"last_name\",\n \"birthday\",\n \"optin_birthday\",\n \"email\",\n \"optin_mailinglist\",\n \"phone_number\",\n )\n },\n ),\n (\n _(\"Address\"),\n {\n \"fields\": (\n \"address_street\",\n \"address_street2\",\n \"address_postal_code\",\n \"address_city\",\n \"address_country\",\n )\n },\n ),\n (\n _(\"Financial\"),\n {\n \"fields\": (\n \"direct_debit\",\n \"initials\",\n \"iban\",\n \"bic\",\n \"signature\",\n )\n },\n ),\n (\n _(\"University information\"),\n {\n \"fields\": (\n \"student_number\",\n \"programme\",\n \"starting_year\",\n )\n },\n ),\n )\n\n form = RegistrationAdminForm\n\n actions = [\"accept_registrations\", \"reject_registrations\"]\n\n def get_actions(self, request):\n actions = super().get_actions(request)\n\n if not request.user.has_perm(\"registrations.review_entries\"):\n if \"accept_registrations\" in actions:\n del actions[\"accept_registrations\"]\n if \"reject_registrations\" in actions:\n del actions[\"reject_registrations\"]\n\n return actions\n\n @admin.action(description=\"Accept selected registrations\")\n def accept_registrations(self, request, queryset): # pragma: no cover\n if queryset.exclude(status=Registration.STATUS_REVIEW).exists():\n self.message_user(\n request, \"Only registrations in review can be accepted\", \"error\"\n )\n return\n\n count = 0\n for registration in queryset:\n try:\n accept_registration(registration, actor=request.user)\n count += 1\n except ValueError as e:\n self.message_user(\n request, f\"Error accepting {registration}: {e.message}\", \"error\"\n )\n\n self.message_user(request, f\"Accepted {count} registrations\", \"success\")\n\n @admin.action(description=\"Reject selected registrations\")\n def reject_registrations(self, request, queryset): # pragma: no cover\n if queryset.exclude(status=Registration.STATUS_REVIEW).exists():\n self.message_user(\n request, \"Only registrations in review can be rejected\", \"error\"\n )\n return\n\n count = queryset.count()\n for registration in queryset:\n reject_registration(registration, actor=request.user)\n\n self.message_user(request, f\"Rejected {count} registrations\", \"success\")\n\n def reference_count(self, obj):\n return obj.reference_set.count()\n\n reference_count.short_description = _(\"references\")\n\n def get_form(self, request, obj=None, **kwargs):\n return super().get_form(\n request,\n obj,\n formfield_callback=partial(\n self.formfield_for_dbfield, request=request, obj=obj\n ),\n **kwargs,\n )\n\n def formfield_for_dbfield(self, db_field, request, obj=None, **kwargs):\n field = super().formfield_for_dbfield(db_field, request, **kwargs)\n if db_field.name == \"payment\":\n return Field(\n widget=PaymentWidget(obj=obj), initial=field.initial, required=False\n )\n return field\n\n def changeform_view(self, request, object_id=None, form_url=\"\", extra_context=None):\n \"\"\"Render the change formview.\n\n Only allow when the entry has not been processed yet\n \"\"\"\n obj = None\n can_review = False\n can_resend = False\n can_revert = False\n if object_id is not None and request.user.has_perm(\n \"registrations.review_entries\"\n ):\n obj = Entry.objects.get(id=object_id)\n can_review = obj.status == Entry.STATUS_REVIEW\n can_revert = obj.status in [Entry.STATUS_ACCEPTED, Entry.STATUS_REJECTED]\n try:\n can_resend = obj.registration.status == Entry.STATUS_CONFIRM\n except Registration.DoesNotExist:\n pass\n return super().changeform_view(\n request,\n object_id,\n form_url,\n {\n \"entry\": obj,\n \"can_review\": can_review,\n \"can_resend\": can_resend,\n \"can_revert\": can_revert,\n },\n )\n\n def get_readonly_fields(self, request, obj=None):\n if obj is None or obj.status not in (\n Entry.STATUS_REJECTED,\n Entry.STATUS_ACCEPTED,\n Entry.STATUS_COMPLETED,\n ):\n return [\"status\", \"created_at\", \"updated_at\", \"payment\"]\n return [\n field.name\n for field in self.model._meta.get_fields()\n if field.name not in [\"payment\", \"no_references\"] and field.editable\n ]\n\n @staticmethod\n def name(obj):\n return obj.get_full_name()\n\n def has_change_permission(self, request, obj=None):\n \"\"\"Completed registrations are read-only.\"\"\"\n return (\n False\n if obj and obj.status == Entry.STATUS_COMPLETED\n else super().has_change_permission(request, obj)\n )\n\n def has_add_permission(self, request):\n return False\n\n def save_model(self, request, obj, form, change):\n if obj.status not in (\n Entry.STATUS_REJECTED,\n Entry.STATUS_ACCEPTED,\n Entry.STATUS_COMPLETED,\n ):\n super().save_model(request, obj, form, change)\n\n\[email protected](Renewal)\nclass RenewalAdmin(RegistrationAdmin):\n \"\"\"Manage the renewals.\"\"\"\n\n list_display = (\n \"name\",\n \"email\",\n \"status\",\n \"membership_type\",\n \"contribution\",\n \"created_at\",\n \"payment\",\n \"no_references\",\n \"reference_count\",\n )\n list_filter = (\n \"status\",\n \"membership_type\",\n \"no_references\",\n \"payment__type\",\n \"contribution\",\n )\n search_fields = (\n \"member__first_name\",\n \"member__last_name\",\n \"member__email\",\n \"member__profile__phone_number\",\n \"member__profile__student_number\",\n )\n date_hierarchy = \"created_at\"\n fieldsets = (\n (\n _(\"Application information\"),\n {\n \"fields\": (\n \"created_at\",\n \"updated_at\",\n \"length\",\n \"contribution\",\n \"membership_type\",\n \"status\",\n \"payment\",\n \"remarks\",\n \"member\",\n )\n },\n ),\n )\n\n actions = [\"accept_renewals\", \"reject_renewals\"]\n\n def get_actions(self, request):\n actions = super().get_actions(request)\n\n if not request.user.has_perm(\"registrations.review_entries\"):\n if \"accept_renewals\" in actions: # pragma: no cover\n del actions[\"accept_renewals\"]\n if \"reject_renewals\" in actions: # pragma: no cover\n del actions[\"reject_renewals\"]\n\n return actions\n\n @admin.action(description=\"Accept selected renewals\")\n def accept_renewals(self, request, queryset): # pragma: no cover\n if queryset.exclude(status=Renewal.STATUS_REVIEW).exists():\n self.message_user(\n request, \"Only renewals in review can be accepted\", \"error\"\n )\n return\n\n count = queryset.count()\n for renewal in queryset:\n accept_renewal(renewal, actor=request.user)\n count += 1\n\n self.message_user(request, f\"Accepted {count} renewals\", \"success\")\n\n @admin.action(description=\"Reject selected renewals\")\n def reject_renewals(self, request, queryset): # pragma: no cover\n if queryset.exclude(status=Renewal.STATUS_REVIEW).exists():\n self.message_user(\n request, \"Only renewals in review can be rejected\", \"error\"\n )\n return\n\n count = queryset.count()\n for renewal in queryset:\n reject_renewal(renewal, actor=request.user)\n\n self.message_user(request, f\"Rejected {count} renewals\", \"success\")\n\n def get_readonly_fields(self, request, obj=None):\n \"\"\"Make all fields read-only and add member if needed.\"\"\"\n fields = super().get_readonly_fields(request, obj)\n if \"member\" not in fields and obj is not None:\n return fields + [\"member\"]\n return fields\n\n def has_add_permission(self, request):\n return False\n\n @staticmethod\n def name(obj):\n return obj.member.get_full_name()\n\n name.short_description = _(\"name\")\n\n @staticmethod\n def email(obj):\n return obj.member.email\n", "path": "website/registrations/admin.py"}]} | 3,826 | 251 |
gh_patches_debug_21740 | rasdani/github-patches | git_diff | mathesar-foundation__mathesar-503 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
New CSV imports use autogenerated column names.
## Description
<!-- A clear and concise description of what the bug is. -->
According to #459, newly imported CSVs are supposed to use the first row as headers by default. However, newly uploaded CSVs are showing autogenerated column names.
## Expected behavior
<!-- A clear and concise description of what you expected to happen. -->
The first row of the CSV should be used as header names by default.
## To Reproduce
<!-- How can we recreate this bug? Please try to provide a Minimal, Complete, and Verifiable (http://stackoverflow.com/help/mcve) example if code-related. -->
Upload a CSV from the Mathesar UI. Column names will be of the form `column_0`, etc.
</issue>
<code>
[start of mathesar/serializers.py]
1 from django.urls import reverse
2 from rest_framework import serializers
3
4 from mathesar.models import Table, Schema, DataFile, Database
5
6
7 class NestedTableSerializer(serializers.HyperlinkedModelSerializer):
8 url = serializers.SerializerMethodField()
9
10 class Meta:
11 model = Table
12 fields = ['id', 'name', 'url']
13
14 def get_url(self, obj):
15 request = self.context['request']
16 return request.build_absolute_uri(reverse('table-detail', kwargs={'pk': obj.pk}))
17
18
19 class ModelNameField(serializers.CharField):
20 """
21 De-serializes the request field as a string, but serializes the response field as
22 `model.name`. Required to support passing and returing a model name from the
23 endpoint, while also storing the model as a related field.
24 """
25 def to_representation(self, value):
26 return value.name
27
28
29 class SchemaSerializer(serializers.HyperlinkedModelSerializer):
30 tables = NestedTableSerializer(many=True, read_only=True)
31 name = serializers.CharField()
32 database = ModelNameField(max_length=128)
33
34 class Meta:
35 model = Schema
36 fields = ['id', 'name', 'tables', 'database', 'has_dependencies']
37
38
39 class SimpleColumnSerializer(serializers.Serializer):
40 name = serializers.CharField()
41 type = serializers.CharField()
42
43
44 class ColumnSerializer(SimpleColumnSerializer):
45 index = serializers.IntegerField(source='column_index', read_only=True)
46 nullable = serializers.BooleanField(default=True)
47 primary_key = serializers.BooleanField(default=False)
48 valid_target_types = serializers.ListField(read_only=True)
49
50
51 class TableSerializer(serializers.ModelSerializer):
52 columns = SimpleColumnSerializer(many=True, read_only=True, source='sa_columns')
53 records = serializers.SerializerMethodField()
54 name = serializers.CharField()
55 data_files = serializers.PrimaryKeyRelatedField(required=False, many=True, queryset=DataFile.objects.all())
56
57 class Meta:
58 model = Table
59 fields = ['id', 'name', 'schema', 'created_at', 'updated_at',
60 'columns', 'records', 'data_files', 'has_dependencies']
61
62 def get_records(self, obj):
63 if isinstance(obj, Table):
64 # Only get records if we are serializing an existing table
65 request = self.context['request']
66 return request.build_absolute_uri(reverse('table-record-list', kwargs={'table_pk': obj.pk}))
67 else:
68 return None
69
70
71 class RecordSerializer(serializers.BaseSerializer):
72 def to_representation(self, instance):
73 return instance._asdict()
74
75
76 class RecordListParameterSerializer(serializers.Serializer):
77 filters = serializers.JSONField(required=False, default=[])
78 order_by = serializers.JSONField(required=False, default=[])
79 group_count_by = serializers.JSONField(required=False, default=[])
80
81
82 class DatabaseSerializer(serializers.ModelSerializer):
83 supported_types = serializers.ListField(child=serializers.CharField())
84
85 class Meta:
86 model = Database
87 fields = ['id', 'name', 'deleted', 'supported_types']
88 read_only_fields = ['id', 'name', 'deleted', 'supported_types']
89
90
91 class DataFileSerializer(serializers.ModelSerializer):
92 user = serializers.PrimaryKeyRelatedField(
93 default=serializers.CurrentUserDefault(), read_only=True
94 )
95
96 class Meta:
97 model = DataFile
98 fields = [
99 'id', 'file', 'table_imported_to', 'user', 'header', 'delimiter', 'escapechar', 'quotechar'
100 ]
101 extra_kwargs = {'delimiter': {'trim_whitespace': False},
102 'escapechar': {'trim_whitespace': False},
103 'quotechar': {'trim_whitespace': False},
104 'header': {'required': True}}
105 # We only currently support importing to a new table, so setting a table via API is invalid.
106 # User should be set automatically, not submitted via the API.
107 read_only_fields = ['table_imported_to']
108
109 def save(self, **kwargs):
110 """
111 Set user to current user while saving the data file.
112 """
113 current_user = self.fields['user'].get_default()
114 if current_user.is_authenticated:
115 kwargs['user'] = current_user
116 return super().save(**kwargs)
117
[end of mathesar/serializers.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mathesar/serializers.py b/mathesar/serializers.py
--- a/mathesar/serializers.py
+++ b/mathesar/serializers.py
@@ -92,6 +92,7 @@
user = serializers.PrimaryKeyRelatedField(
default=serializers.CurrentUserDefault(), read_only=True
)
+ header = serializers.BooleanField(default=True)
class Meta:
model = DataFile
@@ -100,8 +101,8 @@
]
extra_kwargs = {'delimiter': {'trim_whitespace': False},
'escapechar': {'trim_whitespace': False},
- 'quotechar': {'trim_whitespace': False},
- 'header': {'required': True}}
+ 'quotechar': {'trim_whitespace': False}
+ }
# We only currently support importing to a new table, so setting a table via API is invalid.
# User should be set automatically, not submitted via the API.
read_only_fields = ['table_imported_to']
| {"golden_diff": "diff --git a/mathesar/serializers.py b/mathesar/serializers.py\n--- a/mathesar/serializers.py\n+++ b/mathesar/serializers.py\n@@ -92,6 +92,7 @@\n user = serializers.PrimaryKeyRelatedField(\n default=serializers.CurrentUserDefault(), read_only=True\n )\n+ header = serializers.BooleanField(default=True)\n \n class Meta:\n model = DataFile\n@@ -100,8 +101,8 @@\n ]\n extra_kwargs = {'delimiter': {'trim_whitespace': False},\n 'escapechar': {'trim_whitespace': False},\n- 'quotechar': {'trim_whitespace': False},\n- 'header': {'required': True}}\n+ 'quotechar': {'trim_whitespace': False}\n+ }\n # We only currently support importing to a new table, so setting a table via API is invalid.\n # User should be set automatically, not submitted via the API.\n read_only_fields = ['table_imported_to']\n", "issue": "New CSV imports use autogenerated column names.\n## Description\r\n<!-- A clear and concise description of what the bug is. -->\r\nAccording to #459, newly imported CSVs are supposed to use the first row as headers by default. However, newly uploaded CSVs are showing autogenerated column names.\r\n\r\n## Expected behavior\r\n<!-- A clear and concise description of what you expected to happen. -->\r\nThe first row of the CSV should be used as header names by default.\r\n\r\n## To Reproduce\r\n<!-- How can we recreate this bug? Please try to provide a Minimal, Complete, and Verifiable (http://stackoverflow.com/help/mcve) example if code-related. -->\r\nUpload a CSV from the Mathesar UI. Column names will be of the form `column_0`, etc.\n", "before_files": [{"content": "from django.urls import reverse\nfrom rest_framework import serializers\n\nfrom mathesar.models import Table, Schema, DataFile, Database\n\n\nclass NestedTableSerializer(serializers.HyperlinkedModelSerializer):\n url = serializers.SerializerMethodField()\n\n class Meta:\n model = Table\n fields = ['id', 'name', 'url']\n\n def get_url(self, obj):\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-detail', kwargs={'pk': obj.pk}))\n\n\nclass ModelNameField(serializers.CharField):\n \"\"\"\n De-serializes the request field as a string, but serializes the response field as\n `model.name`. Required to support passing and returing a model name from the\n endpoint, while also storing the model as a related field.\n \"\"\"\n def to_representation(self, value):\n return value.name\n\n\nclass SchemaSerializer(serializers.HyperlinkedModelSerializer):\n tables = NestedTableSerializer(many=True, read_only=True)\n name = serializers.CharField()\n database = ModelNameField(max_length=128)\n\n class Meta:\n model = Schema\n fields = ['id', 'name', 'tables', 'database', 'has_dependencies']\n\n\nclass SimpleColumnSerializer(serializers.Serializer):\n name = serializers.CharField()\n type = serializers.CharField()\n\n\nclass ColumnSerializer(SimpleColumnSerializer):\n index = serializers.IntegerField(source='column_index', read_only=True)\n nullable = serializers.BooleanField(default=True)\n primary_key = serializers.BooleanField(default=False)\n valid_target_types = serializers.ListField(read_only=True)\n\n\nclass TableSerializer(serializers.ModelSerializer):\n columns = SimpleColumnSerializer(many=True, read_only=True, source='sa_columns')\n records = serializers.SerializerMethodField()\n name = serializers.CharField()\n data_files = serializers.PrimaryKeyRelatedField(required=False, many=True, queryset=DataFile.objects.all())\n\n class Meta:\n model = Table\n fields = ['id', 'name', 'schema', 'created_at', 'updated_at',\n 'columns', 'records', 'data_files', 'has_dependencies']\n\n def get_records(self, obj):\n if isinstance(obj, Table):\n # Only get records if we are serializing an existing table\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-record-list', kwargs={'table_pk': obj.pk}))\n else:\n return None\n\n\nclass RecordSerializer(serializers.BaseSerializer):\n def to_representation(self, instance):\n return instance._asdict()\n\n\nclass RecordListParameterSerializer(serializers.Serializer):\n filters = serializers.JSONField(required=False, default=[])\n order_by = serializers.JSONField(required=False, default=[])\n group_count_by = serializers.JSONField(required=False, default=[])\n\n\nclass DatabaseSerializer(serializers.ModelSerializer):\n supported_types = serializers.ListField(child=serializers.CharField())\n\n class Meta:\n model = Database\n fields = ['id', 'name', 'deleted', 'supported_types']\n read_only_fields = ['id', 'name', 'deleted', 'supported_types']\n\n\nclass DataFileSerializer(serializers.ModelSerializer):\n user = serializers.PrimaryKeyRelatedField(\n default=serializers.CurrentUserDefault(), read_only=True\n )\n\n class Meta:\n model = DataFile\n fields = [\n 'id', 'file', 'table_imported_to', 'user', 'header', 'delimiter', 'escapechar', 'quotechar'\n ]\n extra_kwargs = {'delimiter': {'trim_whitespace': False},\n 'escapechar': {'trim_whitespace': False},\n 'quotechar': {'trim_whitespace': False},\n 'header': {'required': True}}\n # We only currently support importing to a new table, so setting a table via API is invalid.\n # User should be set automatically, not submitted via the API.\n read_only_fields = ['table_imported_to']\n\n def save(self, **kwargs):\n \"\"\"\n Set user to current user while saving the data file.\n \"\"\"\n current_user = self.fields['user'].get_default()\n if current_user.is_authenticated:\n kwargs['user'] = current_user\n return super().save(**kwargs)\n", "path": "mathesar/serializers.py"}]} | 1,806 | 214 |
gh_patches_debug_47851 | rasdani/github-patches | git_diff | searx__searx-2256 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Make secret_key default one that will fail if not set to a custom value
Currently, the `secret_key` default value is `ultrasecretkey` which is a valid value. Would it not be better to let the default value of this setting be one that will make searx fail to start? This will force the user to conciously change this setting to a secure value instead of accidentally forgetting to set this to something random and secure.
</issue>
<code>
[start of searx/__init__.py]
1 '''
2 searx is free software: you can redistribute it and/or modify
3 it under the terms of the GNU Affero General Public License as published by
4 the Free Software Foundation, either version 3 of the License, or
5 (at your option) any later version.
6
7 searx is distributed in the hope that it will be useful,
8 but WITHOUT ANY WARRANTY; without even the implied warranty of
9 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 GNU Affero General Public License for more details.
11
12 You should have received a copy of the GNU Affero General Public License
13 along with searx. If not, see < http://www.gnu.org/licenses/ >.
14
15 (C) 2013- by Adam Tauber, <[email protected]>
16 '''
17
18 import logging
19 from os import environ
20 from os.path import realpath, dirname, join, abspath, isfile
21 from io import open
22 from yaml import safe_load
23
24
25 searx_dir = abspath(dirname(__file__))
26 engine_dir = dirname(realpath(__file__))
27 static_path = abspath(join(dirname(__file__), 'static'))
28
29
30 def check_settings_yml(file_name):
31 if isfile(file_name):
32 return file_name
33 else:
34 return None
35
36
37 # find location of settings.yml
38 if 'SEARX_SETTINGS_PATH' in environ:
39 # if possible set path to settings using the
40 # enviroment variable SEARX_SETTINGS_PATH
41 settings_path = check_settings_yml(environ['SEARX_SETTINGS_PATH'])
42 else:
43 # if not, get it from searx code base or last solution from /etc/searx
44 settings_path = check_settings_yml(join(searx_dir, 'settings.yml')) or check_settings_yml('/etc/searx/settings.yml')
45
46 if not settings_path:
47 raise Exception('settings.yml not found')
48
49 # load settings
50 with open(settings_path, 'r', encoding='utf-8') as settings_yaml:
51 settings = safe_load(settings_yaml)
52
53 if settings['ui']['static_path']:
54 static_path = settings['ui']['static_path']
55
56 '''
57 enable debug if
58 the environnement variable SEARX_DEBUG is 1 or true
59 (whatever the value in settings.yml)
60 or general.debug=True in settings.yml
61
62 disable debug if
63 the environnement variable SEARX_DEBUG is 0 or false
64 (whatever the value in settings.yml)
65 or general.debug=False in settings.yml
66 '''
67 searx_debug_env = environ.get('SEARX_DEBUG', '').lower()
68 if searx_debug_env == 'true' or searx_debug_env == '1':
69 searx_debug = True
70 elif searx_debug_env == 'false' or searx_debug_env == '0':
71 searx_debug = False
72 else:
73 searx_debug = settings.get('general', {}).get('debug')
74
75 if searx_debug:
76 logging.basicConfig(level=logging.DEBUG)
77 else:
78 logging.basicConfig(level=logging.WARNING)
79
80 logger = logging.getLogger('searx')
81 logger.debug('read configuration from %s', settings_path)
82 logger.info('Initialisation done')
83
84 if 'SEARX_SECRET' in environ:
85 settings['server']['secret_key'] = environ['SEARX_SECRET']
86 if 'SEARX_BIND_ADDRESS' in environ:
87 settings['server']['bind_address'] = environ['SEARX_BIND_ADDRESS']
88
[end of searx/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/searx/__init__.py b/searx/__init__.py
--- a/searx/__init__.py
+++ b/searx/__init__.py
@@ -85,3 +85,7 @@
settings['server']['secret_key'] = environ['SEARX_SECRET']
if 'SEARX_BIND_ADDRESS' in environ:
settings['server']['bind_address'] = environ['SEARX_BIND_ADDRESS']
+
+if not searx_debug and settings['server']['secret_key'] == 'ultrasecretkey':
+ logger.error('server.secret_key is not changed. Please use something else instead of ultrasecretkey.')
+ exit(1)
| {"golden_diff": "diff --git a/searx/__init__.py b/searx/__init__.py\n--- a/searx/__init__.py\n+++ b/searx/__init__.py\n@@ -85,3 +85,7 @@\n settings['server']['secret_key'] = environ['SEARX_SECRET']\n if 'SEARX_BIND_ADDRESS' in environ:\n settings['server']['bind_address'] = environ['SEARX_BIND_ADDRESS']\n+\n+if not searx_debug and settings['server']['secret_key'] == 'ultrasecretkey':\n+ logger.error('server.secret_key is not changed. Please use something else instead of ultrasecretkey.')\n+ exit(1)\n", "issue": "Make secret_key default one that will fail if not set to a custom value\nCurrently, the `secret_key` default value is `ultrasecretkey` which is a valid value. Would it not be better to let the default value of this setting be one that will make searx fail to start? This will force the user to conciously change this setting to a secure value instead of accidentally forgetting to set this to something random and secure.\n", "before_files": [{"content": "'''\nsearx is free software: you can redistribute it and/or modify\nit under the terms of the GNU Affero General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nsearx is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU Affero General Public License for more details.\n\nYou should have received a copy of the GNU Affero General Public License\nalong with searx. If not, see < http://www.gnu.org/licenses/ >.\n\n(C) 2013- by Adam Tauber, <[email protected]>\n'''\n\nimport logging\nfrom os import environ\nfrom os.path import realpath, dirname, join, abspath, isfile\nfrom io import open\nfrom yaml import safe_load\n\n\nsearx_dir = abspath(dirname(__file__))\nengine_dir = dirname(realpath(__file__))\nstatic_path = abspath(join(dirname(__file__), 'static'))\n\n\ndef check_settings_yml(file_name):\n if isfile(file_name):\n return file_name\n else:\n return None\n\n\n# find location of settings.yml\nif 'SEARX_SETTINGS_PATH' in environ:\n # if possible set path to settings using the\n # enviroment variable SEARX_SETTINGS_PATH\n settings_path = check_settings_yml(environ['SEARX_SETTINGS_PATH'])\nelse:\n # if not, get it from searx code base or last solution from /etc/searx\n settings_path = check_settings_yml(join(searx_dir, 'settings.yml')) or check_settings_yml('/etc/searx/settings.yml')\n\nif not settings_path:\n raise Exception('settings.yml not found')\n\n# load settings\nwith open(settings_path, 'r', encoding='utf-8') as settings_yaml:\n settings = safe_load(settings_yaml)\n\nif settings['ui']['static_path']:\n static_path = settings['ui']['static_path']\n\n'''\nenable debug if\nthe environnement variable SEARX_DEBUG is 1 or true\n(whatever the value in settings.yml)\nor general.debug=True in settings.yml\n\ndisable debug if\nthe environnement variable SEARX_DEBUG is 0 or false\n(whatever the value in settings.yml)\nor general.debug=False in settings.yml\n'''\nsearx_debug_env = environ.get('SEARX_DEBUG', '').lower()\nif searx_debug_env == 'true' or searx_debug_env == '1':\n searx_debug = True\nelif searx_debug_env == 'false' or searx_debug_env == '0':\n searx_debug = False\nelse:\n searx_debug = settings.get('general', {}).get('debug')\n\nif searx_debug:\n logging.basicConfig(level=logging.DEBUG)\nelse:\n logging.basicConfig(level=logging.WARNING)\n\nlogger = logging.getLogger('searx')\nlogger.debug('read configuration from %s', settings_path)\nlogger.info('Initialisation done')\n\nif 'SEARX_SECRET' in environ:\n settings['server']['secret_key'] = environ['SEARX_SECRET']\nif 'SEARX_BIND_ADDRESS' in environ:\n settings['server']['bind_address'] = environ['SEARX_BIND_ADDRESS']\n", "path": "searx/__init__.py"}]} | 1,512 | 149 |
gh_patches_debug_33620 | rasdani/github-patches | git_diff | weecology__retriever-160 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Final page of connection wizard reports nothing on Mac & Windows
The final page of the connection wizard is blank on both Mac & Windows. This happens regardless of success or failure and predates the recent changes to the GUI in #157.
</issue>
<code>
[start of app/connect_wizard.py]
1 """Connection setup wizard.
2
3 """
4
5 import os
6 import sys
7 import wx
8 import wx.html
9 import wx.wizard
10 from retriever.lib.models import Engine
11 from retriever.lib.tools import get_saved_connection, save_connection
12 from retriever.app.controls import *
13 from retriever.app.images import icon
14
15 from retriever import VERSION
16
17
18 class ConnectWizard(wx.wizard.Wizard):
19 def __init__(self, lists, engine_list, selected=None):
20 wx.wizard.Wizard.__init__(self, None, -1, "EcoData Retriever")
21
22 #self.SetIcon(icon.GetIcon())
23
24 welcome = """<h2>Connection Wizard</h2>
25
26 <p>The EcoData Retriever downloads raw data files, stores them on your
27 computer, and imports the data into your own local database.</p>
28 <p>To begin, you'll need to set up your own database. Once you've done
29 that, this wizard will walk you through the steps to connect to your
30 database.</p>
31 <p>Supported database systems currently include:</p>
32 <ul>"""
33
34 for db in engine_list:
35 welcome += "<li>" + db.name + "</li>"
36
37 welcome += "</ul>"
38
39 self.pages = []
40 self.lists = lists
41 self.engine_list = engine_list
42 self.selected = selected
43
44 self.pages.append(TitledPage(self, "", ""))
45
46 self.pages.append(ChooseDbPage(self, "Database",
47 "What kind of database are you using?"))
48
49 self.pages.append(ConnectPage(self,
50 "Connection Info",
51 ""))
52
53 self.pages.append(ConfirmPage(self, "Connection", ""))
54
55 (self.TITLE, self.CHOOSEDB,
56 self.CONNECTION, self.CONFIRM) = [self.pages[i]
57 for i in range(len(self.pages))]
58
59 self.TITLE.welcome = HtmlWindow(self.TITLE)
60 self.TITLE.welcome.SetSize((450,400))
61 self.TITLE.welcome.SetHtml(welcome)
62 self.TITLE.sizer.Add(self.TITLE.welcome, 1, wx.EXPAND)
63
64 self.CHOOSEDB.Bind(wx.wizard.EVT_WIZARD_PAGE_CHANGING, self.CONNECTION.Draw)
65 self.CONNECTION.Bind(wx.wizard.EVT_WIZARD_PAGE_CHANGING, self.CONFIRM.Draw)
66 self.CONFIRM.Bind(wx.wizard.EVT_WIZARD_PAGE_CHANGING, self.CONNECTION.Draw)
67
68 for i in range(len(self.pages) - 1):
69 wx.wizard.WizardPageSimple_Chain(self.pages[i], self.pages[i + 1])
70
71 for page in self.pages:
72 self.FitToPage(page)
73
74
75 def check_for_updates(self):
76 reload(retriever)
77
78
79 class ChooseDbPage(TitledPage):
80 def __init__(self, parent, title, label):
81 TitledPage.__init__(self, parent, title, label)
82 engine_list = parent.engine_list
83
84 dblist = ListBox(self, -1,
85 choices=[db.name for db in engine_list],
86 style=wx.LB_SINGLE,
87 size=(-1,150))
88 self.dblist = dblist
89 if parent.selected:
90 index = 0
91 for i in range(len(engine_list)):
92 if engine_list[i].name == parent.selected:
93 index = i
94 self.dblist.SetSelection(index)
95 else:
96 self.dblist.SetSelection(0)
97 self.sizer.Add(self.dblist, -1, wx.EXPAND)
98
99 #TODO: This is a hack to force the wizard to be large enough to not hide
100 #the Choose button on the Connect Page when the engine has a file
101 #attribute. This should be fixed properly by figuring out how to make wx
102 #work with the fact that the connect page has varying fields depending
103 #on the engine.
104 longspace = StaticText(self, -1, "", wx.Size(375, -1))
105 self.sizer.Add(longspace, -1)
106
107 def dirbtn_click(self, evt):
108 dialog = wx.DirDialog(None, message="Choose a directory to " +
109 "download your data files.")
110 if dialog.ShowModal() == wx.ID_OK:
111 self.raw_data_dir.SetValue(dialog.GetPath())
112 else:
113 pass
114 dialog.Destroy()
115
116
117 class ConnectPage(TitledPage):
118 """The connection info page."""
119 def __init__(self, parent, title, label):
120 TitledPage.__init__(self, parent, title, label)
121 self.option = dict()
122 self.sel = None
123 self.fields = wx.BoxSizer(wx.VERTICAL)
124 self.parent = parent
125
126 def Draw(self, evt):
127 """When the page is drawn, it may need to update its fields if
128 the selected database has changed."""
129
130 if not evt.GetDirection():
131 btn = self.parent.FindWindowById(wx.ID_FORWARD)
132 if btn: btn.Enable()
133
134 if len(self.Parent.CHOOSEDB.dblist.GetStringSelection()) == 0 and evt.Direction:
135 evt.Veto()
136 else:
137 if self.sel != self.Parent.CHOOSEDB.dblist.GetStringSelection():
138 self.sel = self.Parent.CHOOSEDB.dblist.GetStringSelection()
139 self.engine = Engine()
140 for db in self.Parent.engine_list:
141 if db.name == self.sel:
142 self.engine = db
143 self.fields.Clear(True)
144 self.fields = wx.BoxSizer(wx.VERTICAL)
145 if self.engine.instructions:
146 self.fields.Add(StaticText(self, -1, '\n' + self.engine.instructions + '\n\n'))
147 self.fieldset = dict()
148 self.option = dict()
149 saved_opts = get_saved_connection(self.engine.name)
150 for opt in self.engine.required_opts:
151 if opt[0] in saved_opts.keys():
152 default = saved_opts[opt[0]]
153 else:
154 default = opt[2]
155 self.fieldset[opt[0]] = wx.BoxSizer(wx.HORIZONTAL)
156 label = StaticText(self, -1, opt[0] + ": ",
157 size=wx.Size(90,35))
158 style = wx.TE_PASSWORD if opt[0] == "password" else 0
159 txt = TextCtrl(self, -1, str(default), size=wx.Size(200,-1), style=style)
160 self.option[opt[0]] = txt
161 self.fieldset[opt[0]].AddMany([label,
162 self.option[opt[0]]])
163 if opt[0] == "file":
164 file_opt = opt
165 def open_file_dialog(evt):
166 filter = ""
167 if file_opt[3]:
168 filter = file_opt[3] + "|"
169 filter += "All files (*.*)|*.*"
170 dialog = wx.FileDialog(None, style = wx.OPEN,
171 wildcard = filter)
172 if dialog.ShowModal() == wx.ID_OK:
173 self.option[file_opt[0]].SetValue(dialog.GetPath())
174 self.browse = wx.Button(self, -1, "Choose...")
175 self.fieldset[file_opt[0]].Add(self.browse)
176 self.browse.Bind(wx.EVT_BUTTON, open_file_dialog)
177 self.fieldset[opt[0]].Layout()
178 self.fields.Add(self.fieldset[opt[0]])
179 self.sizer.Add(self.fields)
180 self.sizer.Layout()
181
182
183 class ConfirmPage(TitledPage):
184 """The final confirmation page."""
185 def __init__(self, parent, title, label):
186 TitledPage.__init__(self, parent, title, label)
187 self.fields = wx.BoxSizer(wx.VERTICAL)
188 self.parent = parent
189
190 def Draw(self, evt):
191 if not evt.GetDirection(): return
192
193 self.fields.Clear(True)
194 self.fields = wx.BoxSizer(wx.VERTICAL)
195
196 self.values_dict = dict()
197 connect = self.parent.CONNECTION
198 for key in connect.option.keys():
199 self.values_dict[key] = str(connect.option[key].Value)
200
201 try:
202 connect.engine.opts = self.values_dict
203 connect.engine.connect(force_reconnect=True)
204 message = '''<p><b>Success!</b>Your connection has been saved.</p>
205 <p>Click Finish to continue.</p>'''
206 save_connection(connect.engine.name, self.values_dict)
207
208 except Exception as e:
209 message = message = '''<p><b>Error.</b>There was a problem with your
210 connection:</p><p>%s</p>
211 <p>Click Back to try again, or Cancel.</p>''' % e
212
213 btn = self.parent.FindWindowById(wx.ID_FORWARD)
214 if btn: btn.Disable()
215
216 self.message = HtmlWindow(self)
217 self.message.SetSize((450,400))
218 self.message.SetHtml(message)
219 self.fields.Add(self.message, 1, wx.EXPAND)
220 self.sizer.Add(self.fields)
221
222 self.sizer.Layout()
223
[end of app/connect_wizard.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/app/connect_wizard.py b/app/connect_wizard.py
--- a/app/connect_wizard.py
+++ b/app/connect_wizard.py
@@ -184,15 +184,11 @@
"""The final confirmation page."""
def __init__(self, parent, title, label):
TitledPage.__init__(self, parent, title, label)
- self.fields = wx.BoxSizer(wx.VERTICAL)
self.parent = parent
def Draw(self, evt):
if not evt.GetDirection(): return
- self.fields.Clear(True)
- self.fields = wx.BoxSizer(wx.VERTICAL)
-
self.values_dict = dict()
connect = self.parent.CONNECTION
for key in connect.option.keys():
@@ -201,12 +197,12 @@
try:
connect.engine.opts = self.values_dict
connect.engine.connect(force_reconnect=True)
- message = '''<p><b>Success!</b>Your connection has been saved.</p>
+ message = '''<p><b>Success!</b> Your connection has been saved.</p>
<p>Click Finish to continue.</p>'''
save_connection(connect.engine.name, self.values_dict)
except Exception as e:
- message = message = '''<p><b>Error.</b>There was a problem with your
+ message = message = '''<p><b>Error.</b> There was a problem with your
connection:</p><p>%s</p>
<p>Click Back to try again, or Cancel.</p>''' % e
@@ -216,7 +212,5 @@
self.message = HtmlWindow(self)
self.message.SetSize((450,400))
self.message.SetHtml(message)
- self.fields.Add(self.message, 1, wx.EXPAND)
- self.sizer.Add(self.fields)
-
+ self.sizer.Add(self.message, 1, wx.EXPAND)
self.sizer.Layout()
| {"golden_diff": "diff --git a/app/connect_wizard.py b/app/connect_wizard.py\n--- a/app/connect_wizard.py\n+++ b/app/connect_wizard.py\n@@ -184,15 +184,11 @@\n \"\"\"The final confirmation page.\"\"\"\n def __init__(self, parent, title, label):\n TitledPage.__init__(self, parent, title, label)\n- self.fields = wx.BoxSizer(wx.VERTICAL)\n self.parent = parent\n \n def Draw(self, evt):\n if not evt.GetDirection(): return\n \n- self.fields.Clear(True)\n- self.fields = wx.BoxSizer(wx.VERTICAL)\n- \n self.values_dict = dict()\n connect = self.parent.CONNECTION\n for key in connect.option.keys():\n@@ -201,12 +197,12 @@\n try:\n connect.engine.opts = self.values_dict\n connect.engine.connect(force_reconnect=True)\n- message = '''<p><b>Success!</b>Your connection has been saved.</p>\n+ message = '''<p><b>Success!</b> Your connection has been saved.</p>\n <p>Click Finish to continue.</p>'''\n save_connection(connect.engine.name, self.values_dict)\n \n except Exception as e:\n- message = message = '''<p><b>Error.</b>There was a problem with your\n+ message = message = '''<p><b>Error.</b> There was a problem with your\n connection:</p><p>%s</p>\n <p>Click Back to try again, or Cancel.</p>''' % e\n \n@@ -216,7 +212,5 @@\n self.message = HtmlWindow(self)\n self.message.SetSize((450,400))\n self.message.SetHtml(message)\n- self.fields.Add(self.message, 1, wx.EXPAND)\n- self.sizer.Add(self.fields)\n- \n+ self.sizer.Add(self.message, 1, wx.EXPAND)\n self.sizer.Layout()\n", "issue": "Final page of connection wizard reports nothing on Mac & Windows\nThe final page of the connection wizard is blank on both Mac & Windows. This happens regardless of success or failure and predates the recent changes to the GUI in #157.\n\n", "before_files": [{"content": "\"\"\"Connection setup wizard.\n\n\"\"\"\n\nimport os\nimport sys\nimport wx\nimport wx.html\nimport wx.wizard\nfrom retriever.lib.models import Engine\nfrom retriever.lib.tools import get_saved_connection, save_connection\nfrom retriever.app.controls import *\nfrom retriever.app.images import icon\n\nfrom retriever import VERSION\n\n\nclass ConnectWizard(wx.wizard.Wizard):\n def __init__(self, lists, engine_list, selected=None):\n wx.wizard.Wizard.__init__(self, None, -1, \"EcoData Retriever\")\n \n #self.SetIcon(icon.GetIcon())\n \n welcome = \"\"\"<h2>Connection Wizard</h2>\n \n <p>The EcoData Retriever downloads raw data files, stores them on your\n computer, and imports the data into your own local database.</p>\n <p>To begin, you'll need to set up your own database. Once you've done\n that, this wizard will walk you through the steps to connect to your\n database.</p>\n <p>Supported database systems currently include:</p>\n <ul>\"\"\"\n \n for db in engine_list:\n welcome += \"<li>\" + db.name + \"</li>\" \n \n welcome += \"</ul>\" \n \n self.pages = []\n self.lists = lists\n self.engine_list = engine_list\n self.selected = selected\n \n self.pages.append(TitledPage(self, \"\", \"\"))\n \n self.pages.append(ChooseDbPage(self, \"Database\", \n \"What kind of database are you using?\"))\n \n self.pages.append(ConnectPage(self, \n \"Connection Info\", \n \"\"))\n \n self.pages.append(ConfirmPage(self, \"Connection\", \"\"))\n\n (self.TITLE, self.CHOOSEDB, \n self.CONNECTION, self.CONFIRM) = [self.pages[i] \n for i in range(len(self.pages))]\n \n self.TITLE.welcome = HtmlWindow(self.TITLE)\n self.TITLE.welcome.SetSize((450,400))\n self.TITLE.welcome.SetHtml(welcome)\n self.TITLE.sizer.Add(self.TITLE.welcome, 1, wx.EXPAND)\n \n self.CHOOSEDB.Bind(wx.wizard.EVT_WIZARD_PAGE_CHANGING, self.CONNECTION.Draw)\n self.CONNECTION.Bind(wx.wizard.EVT_WIZARD_PAGE_CHANGING, self.CONFIRM.Draw)\n self.CONFIRM.Bind(wx.wizard.EVT_WIZARD_PAGE_CHANGING, self.CONNECTION.Draw)\n \n for i in range(len(self.pages) - 1):\n wx.wizard.WizardPageSimple_Chain(self.pages[i], self.pages[i + 1])\n \n for page in self.pages:\n self.FitToPage(page)\n \n \n def check_for_updates(self):\n reload(retriever)\n\n\nclass ChooseDbPage(TitledPage):\n def __init__(self, parent, title, label):\n TitledPage.__init__(self, parent, title, label)\n engine_list = parent.engine_list\n \n dblist = ListBox(self, -1, \n choices=[db.name for db in engine_list], \n style=wx.LB_SINGLE,\n size=(-1,150))\n self.dblist = dblist\n if parent.selected:\n index = 0\n for i in range(len(engine_list)):\n if engine_list[i].name == parent.selected:\n index = i \n self.dblist.SetSelection(index)\n else:\n self.dblist.SetSelection(0)\n self.sizer.Add(self.dblist, -1, wx.EXPAND)\n\n #TODO: This is a hack to force the wizard to be large enough to not hide\n #the Choose button on the Connect Page when the engine has a file\n #attribute. This should be fixed properly by figuring out how to make wx\n #work with the fact that the connect page has varying fields depending\n #on the engine.\n longspace = StaticText(self, -1, \"\", wx.Size(375, -1))\n self.sizer.Add(longspace, -1)\n \n def dirbtn_click(self, evt):\n dialog = wx.DirDialog(None, message=\"Choose a directory to \" +\n \"download your data files.\") \n if dialog.ShowModal() == wx.ID_OK: \n self.raw_data_dir.SetValue(dialog.GetPath()) \n else:\n pass\n dialog.Destroy()\n \n\nclass ConnectPage(TitledPage):\n \"\"\"The connection info page.\"\"\"\n def __init__(self, parent, title, label):\n TitledPage.__init__(self, parent, title, label)\n self.option = dict()\n self.sel = None\n self.fields = wx.BoxSizer(wx.VERTICAL)\n self.parent = parent\n \n def Draw(self, evt):\n \"\"\"When the page is drawn, it may need to update its fields if \n the selected database has changed.\"\"\"\n \n if not evt.GetDirection():\n btn = self.parent.FindWindowById(wx.ID_FORWARD) \n if btn: btn.Enable()\n \n if len(self.Parent.CHOOSEDB.dblist.GetStringSelection()) == 0 and evt.Direction:\n evt.Veto() \n else:\n if self.sel != self.Parent.CHOOSEDB.dblist.GetStringSelection():\n self.sel = self.Parent.CHOOSEDB.dblist.GetStringSelection()\n self.engine = Engine()\n for db in self.Parent.engine_list:\n if db.name == self.sel:\n self.engine = db\n self.fields.Clear(True) \n self.fields = wx.BoxSizer(wx.VERTICAL)\n if self.engine.instructions:\n self.fields.Add(StaticText(self, -1, '\\n' + self.engine.instructions + '\\n\\n'))\n self.fieldset = dict()\n self.option = dict()\n saved_opts = get_saved_connection(self.engine.name)\n for opt in self.engine.required_opts:\n if opt[0] in saved_opts.keys():\n default = saved_opts[opt[0]]\n else:\n default = opt[2]\n self.fieldset[opt[0]] = wx.BoxSizer(wx.HORIZONTAL)\n label = StaticText(self, -1, opt[0] + \": \", \n size=wx.Size(90,35))\n style = wx.TE_PASSWORD if opt[0] == \"password\" else 0\n txt = TextCtrl(self, -1, str(default), size=wx.Size(200,-1), style=style)\n self.option[opt[0]] = txt\n self.fieldset[opt[0]].AddMany([label, \n self.option[opt[0]]])\n if opt[0] == \"file\":\n file_opt = opt\n def open_file_dialog(evt):\n filter = \"\"\n if file_opt[3]:\n filter = file_opt[3] + \"|\"\n filter += \"All files (*.*)|*.*\" \n dialog = wx.FileDialog(None, style = wx.OPEN,\n wildcard = filter)\n if dialog.ShowModal() == wx.ID_OK:\n self.option[file_opt[0]].SetValue(dialog.GetPath())\n self.browse = wx.Button(self, -1, \"Choose...\")\n self.fieldset[file_opt[0]].Add(self.browse)\n self.browse.Bind(wx.EVT_BUTTON, open_file_dialog) \n self.fieldset[opt[0]].Layout()\n self.fields.Add(self.fieldset[opt[0]])\n self.sizer.Add(self.fields)\n self.sizer.Layout()\n \n \nclass ConfirmPage(TitledPage):\n \"\"\"The final confirmation page.\"\"\"\n def __init__(self, parent, title, label):\n TitledPage.__init__(self, parent, title, label)\n self.fields = wx.BoxSizer(wx.VERTICAL)\n self.parent = parent\n \n def Draw(self, evt):\n if not evt.GetDirection(): return\n \n self.fields.Clear(True)\n self.fields = wx.BoxSizer(wx.VERTICAL)\n \n self.values_dict = dict()\n connect = self.parent.CONNECTION\n for key in connect.option.keys():\n self.values_dict[key] = str(connect.option[key].Value)\n \n try:\n connect.engine.opts = self.values_dict\n connect.engine.connect(force_reconnect=True)\n message = '''<p><b>Success!</b>Your connection has been saved.</p>\n <p>Click Finish to continue.</p>'''\n save_connection(connect.engine.name, self.values_dict)\n \n except Exception as e:\n message = message = '''<p><b>Error.</b>There was a problem with your\n connection:</p><p>%s</p>\n <p>Click Back to try again, or Cancel.</p>''' % e\n \n btn = self.parent.FindWindowById(wx.ID_FORWARD) \n if btn: btn.Disable()\n \n self.message = HtmlWindow(self)\n self.message.SetSize((450,400))\n self.message.SetHtml(message)\n self.fields.Add(self.message, 1, wx.EXPAND)\n self.sizer.Add(self.fields)\n \n self.sizer.Layout()\n", "path": "app/connect_wizard.py"}]} | 3,056 | 437 |
gh_patches_debug_20566 | rasdani/github-patches | git_diff | vispy__vispy-930 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Importing gloo should not automatically load the OpenGL library
I'm trying to run vispy on a headless server with docker, to use the ipynb webgl backend exclusively. I cannot `import vispy.gloo`:
```
File "/opt/conda/lib/python3.4/site-packages/vispy/gloo/__init__.py", line 47, in <module>
from . import gl # noqa
File "/opt/conda/lib/python3.4/site-packages/vispy/gloo/gl/__init__.py", line 213, in <module>
from . import gl2 as default_backend # noqa
File "/opt/conda/lib/python3.4/site-packages/vispy/gloo/gl/gl2.py", line 46, in <module>
raise RuntimeError('Could not load OpenGL library.')
RuntimeError: Could not load OpenGL library.
```
I should not need to have the OpenGL library on a headless server when using a remote backend.
</issue>
<code>
[start of vispy/gloo/gl/gl2.py]
1 # -*- coding: utf-8 -*-
2 # Copyright (c) 2015, Vispy Development Team.
3 # Distributed under the (new) BSD License. See LICENSE.txt for more info.
4
5 """ GL ES 2.0 API implemented via desktop GL (i.e subset of normal OpenGL).
6 """
7
8 import os
9 import sys
10 import ctypes.util
11
12 from . import _copy_gl_functions
13 from ._constants import * # noqa
14
15 # Ctypes stuff
16
17
18 # Load the OpenGL library. We more or less follow the same approach
19 # as PyOpenGL does internally
20
21 _have_get_proc_address = False
22 _lib = os.getenv('VISPY_GL_LIB', '')
23 if _lib != '':
24 if sys.platform.startswith('win'):
25 _lib = ctypes.windll.LoadLibrary(_lib)
26 else:
27 _lib = ctypes.cdll.LoadLibrary(_lib)
28 elif sys.platform.startswith('win'):
29 # Windows
30 _lib = ctypes.windll.opengl32
31 try:
32 wglGetProcAddress = _lib.wglGetProcAddress
33 wglGetProcAddress.restype = ctypes.CFUNCTYPE(
34 ctypes.POINTER(ctypes.c_int))
35 wglGetProcAddress.argtypes = [ctypes.c_char_p]
36 _have_get_proc_address = True
37 except AttributeError:
38 pass
39 else:
40 # Unix-ish
41 if sys.platform.startswith('darwin'):
42 _fname = ctypes.util.find_library('OpenGL')
43 else:
44 _fname = ctypes.util.find_library('GL')
45 if not _fname:
46 raise RuntimeError('Could not load OpenGL library.')
47 # Load lib
48 _lib = ctypes.cdll.LoadLibrary(_fname)
49
50
51 def _have_context():
52 return _lib.glGetError() != 1282 # GL_INVALID_OPERATION
53
54
55 def _get_gl_version(_lib):
56 """Helper to get the GL version string"""
57 try:
58 return _lib.glGetString(7938).decode('utf-8')
59 except Exception:
60 return 'unknown'
61
62
63 def _get_gl_func(name, restype, argtypes):
64 # Based on a function in Pyglet
65 try:
66 # Try using normal ctypes stuff
67 func = getattr(_lib, name)
68 func.restype = restype
69 func.argtypes = argtypes
70 return func
71 except AttributeError:
72 if sys.platform.startswith('win'):
73 # Ask for a pointer to the function, this is the approach
74 # for OpenGL extensions on Windows
75 fargs = (restype,) + argtypes
76 ftype = ctypes.WINFUNCTYPE(*fargs)
77 if not _have_get_proc_address:
78 raise RuntimeError('Function %s not available '
79 '(OpenGL version is %s).'
80 % (name, _get_gl_version(_lib)))
81 if not _have_context():
82 raise RuntimeError('Using %s with no OpenGL context.' % name)
83 address = wglGetProcAddress(name.encode('utf-8'))
84 if address:
85 return ctypes.cast(address, ftype)
86 # If not Windows or if we did not return function object on Windows:
87 raise RuntimeError('Function %s not present in context '
88 '(OpenGL version is %s).'
89 % (name, _get_gl_version(_lib)))
90
91
92 # Inject
93
94 from . import _gl2 # noqa
95 _copy_gl_functions(_gl2, globals())
96
[end of vispy/gloo/gl/gl2.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/vispy/gloo/gl/gl2.py b/vispy/gloo/gl/gl2.py
--- a/vispy/gloo/gl/gl2.py
+++ b/vispy/gloo/gl/gl2.py
@@ -11,6 +11,7 @@
from . import _copy_gl_functions
from ._constants import * # noqa
+from ...util import logger
# Ctypes stuff
@@ -43,9 +44,11 @@
else:
_fname = ctypes.util.find_library('GL')
if not _fname:
- raise RuntimeError('Could not load OpenGL library.')
- # Load lib
- _lib = ctypes.cdll.LoadLibrary(_fname)
+ logger.warning('Could not load OpenGL library.')
+ _lib = None
+ else:
+ # Load lib
+ _lib = ctypes.cdll.LoadLibrary(_fname)
def _have_context():
@@ -62,6 +65,8 @@
def _get_gl_func(name, restype, argtypes):
# Based on a function in Pyglet
+ if _lib is None:
+ raise RuntimeError('Could not load OpenGL library, gl cannot be used')
try:
# Try using normal ctypes stuff
func = getattr(_lib, name)
| {"golden_diff": "diff --git a/vispy/gloo/gl/gl2.py b/vispy/gloo/gl/gl2.py\n--- a/vispy/gloo/gl/gl2.py\n+++ b/vispy/gloo/gl/gl2.py\n@@ -11,6 +11,7 @@\n \n from . import _copy_gl_functions\n from ._constants import * # noqa\n+from ...util import logger\n \n # Ctypes stuff\n \n@@ -43,9 +44,11 @@\n else:\n _fname = ctypes.util.find_library('GL')\n if not _fname:\n- raise RuntimeError('Could not load OpenGL library.')\n- # Load lib\n- _lib = ctypes.cdll.LoadLibrary(_fname)\n+ logger.warning('Could not load OpenGL library.')\n+ _lib = None\n+ else:\n+ # Load lib\n+ _lib = ctypes.cdll.LoadLibrary(_fname)\n \n \n def _have_context():\n@@ -62,6 +65,8 @@\n \n def _get_gl_func(name, restype, argtypes):\n # Based on a function in Pyglet\n+ if _lib is None:\n+ raise RuntimeError('Could not load OpenGL library, gl cannot be used')\n try:\n # Try using normal ctypes stuff\n func = getattr(_lib, name)\n", "issue": "Importing gloo should not automatically load the OpenGL library\nI'm trying to run vispy on a headless server with docker, to use the ipynb webgl backend exclusively. I cannot `import vispy.gloo`:\n\n```\n File \"/opt/conda/lib/python3.4/site-packages/vispy/gloo/__init__.py\", line 47, in <module>\n from . import gl # noqa\n File \"/opt/conda/lib/python3.4/site-packages/vispy/gloo/gl/__init__.py\", line 213, in <module>\n from . import gl2 as default_backend # noqa\n File \"/opt/conda/lib/python3.4/site-packages/vispy/gloo/gl/gl2.py\", line 46, in <module>\n raise RuntimeError('Could not load OpenGL library.')\nRuntimeError: Could not load OpenGL library.\n```\n\nI should not need to have the OpenGL library on a headless server when using a remote backend.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# Copyright (c) 2015, Vispy Development Team.\n# Distributed under the (new) BSD License. See LICENSE.txt for more info.\n\n\"\"\" GL ES 2.0 API implemented via desktop GL (i.e subset of normal OpenGL).\n\"\"\"\n\nimport os\nimport sys\nimport ctypes.util\n\nfrom . import _copy_gl_functions\nfrom ._constants import * # noqa\n\n# Ctypes stuff\n\n\n# Load the OpenGL library. We more or less follow the same approach\n# as PyOpenGL does internally\n\n_have_get_proc_address = False\n_lib = os.getenv('VISPY_GL_LIB', '')\nif _lib != '':\n if sys.platform.startswith('win'):\n _lib = ctypes.windll.LoadLibrary(_lib)\n else:\n _lib = ctypes.cdll.LoadLibrary(_lib)\nelif sys.platform.startswith('win'):\n # Windows\n _lib = ctypes.windll.opengl32\n try:\n wglGetProcAddress = _lib.wglGetProcAddress\n wglGetProcAddress.restype = ctypes.CFUNCTYPE(\n ctypes.POINTER(ctypes.c_int))\n wglGetProcAddress.argtypes = [ctypes.c_char_p]\n _have_get_proc_address = True\n except AttributeError:\n pass\nelse:\n # Unix-ish\n if sys.platform.startswith('darwin'):\n _fname = ctypes.util.find_library('OpenGL')\n else:\n _fname = ctypes.util.find_library('GL')\n if not _fname:\n raise RuntimeError('Could not load OpenGL library.')\n # Load lib\n _lib = ctypes.cdll.LoadLibrary(_fname)\n\n\ndef _have_context():\n return _lib.glGetError() != 1282 # GL_INVALID_OPERATION\n\n\ndef _get_gl_version(_lib):\n \"\"\"Helper to get the GL version string\"\"\"\n try:\n return _lib.glGetString(7938).decode('utf-8')\n except Exception:\n return 'unknown'\n\n\ndef _get_gl_func(name, restype, argtypes):\n # Based on a function in Pyglet\n try:\n # Try using normal ctypes stuff\n func = getattr(_lib, name)\n func.restype = restype\n func.argtypes = argtypes\n return func\n except AttributeError:\n if sys.platform.startswith('win'):\n # Ask for a pointer to the function, this is the approach\n # for OpenGL extensions on Windows\n fargs = (restype,) + argtypes\n ftype = ctypes.WINFUNCTYPE(*fargs)\n if not _have_get_proc_address:\n raise RuntimeError('Function %s not available '\n '(OpenGL version is %s).'\n % (name, _get_gl_version(_lib)))\n if not _have_context():\n raise RuntimeError('Using %s with no OpenGL context.' % name)\n address = wglGetProcAddress(name.encode('utf-8'))\n if address:\n return ctypes.cast(address, ftype)\n # If not Windows or if we did not return function object on Windows:\n raise RuntimeError('Function %s not present in context '\n '(OpenGL version is %s).'\n % (name, _get_gl_version(_lib)))\n\n\n# Inject\n\nfrom . import _gl2 # noqa\n_copy_gl_functions(_gl2, globals())\n", "path": "vispy/gloo/gl/gl2.py"}]} | 1,644 | 281 |
gh_patches_debug_5755 | rasdani/github-patches | git_diff | pypi__warehouse-3820 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Expose long_description_content_type in JSON API
Feature request: Add 'long_description_content_type' to JSON API in 'info': section, alongside the existing 'description'
Now that PyPI supports Markdown and more than one kind of content type for long descriptions, it would be helpful to be able to see what the content type is with the JSON API. Otherwise it is more difficult to correctly interpret the 'description', since it isn't immediately obvious if it's reStructuredText or Markdown with which variant.
I'm willing to try this if I'm pointed in the right general direction :) If someone else can get it done trivially that's fine too.
</issue>
<code>
[start of warehouse/legacy/api/json.py]
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13 from pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound
14 from pyramid.view import view_config
15 from sqlalchemy.orm import Load
16 from sqlalchemy.orm.exc import NoResultFound
17
18 from warehouse.cache.http import cache_control
19 from warehouse.cache.origin import origin_cache
20 from warehouse.packaging.models import File, Release, Project
21
22
23 # Generate appropriate CORS headers for the JSON endpoint.
24 # We want to allow Cross-Origin requests here so that users can interact
25 # with these endpoints via XHR/Fetch APIs in the browser.
26 _CORS_HEADERS = {
27 "Access-Control-Allow-Origin": "*",
28 "Access-Control-Allow-Headers": ", ".join([
29 "Content-Type",
30 "If-Match",
31 "If-Modified-Since",
32 "If-None-Match",
33 "If-Unmodified-Since",
34 ]),
35 "Access-Control-Allow-Methods": "GET",
36 "Access-Control-Max-Age": "86400", # 1 day.
37 "Access-Control-Expose-Headers": ", ".join([
38 "X-PyPI-Last-Serial",
39 ]),
40 }
41
42
43 @view_config(
44 route_name="legacy.api.json.project",
45 context=Project,
46 renderer="json",
47 decorator=[
48 cache_control(15 * 60), # 15 minutes
49 origin_cache(
50 1 * 24 * 60 * 60, # 1 day
51 stale_while_revalidate=5 * 60, # 5 minutes
52 stale_if_error=1 * 24 * 60 * 60, # 1 day
53 ),
54 ],
55 )
56 def json_project(project, request):
57 if project.name != request.matchdict.get("name", project.name):
58 return HTTPMovedPermanently(
59 request.current_route_path(name=project.name),
60 headers=_CORS_HEADERS
61 )
62
63 try:
64 release = (
65 request.db.query(Release)
66 .filter(Release.project == project)
67 .order_by(
68 Release.is_prerelease.nullslast(),
69 Release._pypi_ordering.desc())
70 .limit(1)
71 .one()
72 )
73 except NoResultFound:
74 return HTTPNotFound(headers=_CORS_HEADERS)
75
76 return json_release(release, request)
77
78
79 @view_config(
80 route_name="legacy.api.json.release",
81 context=Release,
82 renderer="json",
83 decorator=[
84 cache_control(15 * 60), # 15 minutes
85 origin_cache(
86 1 * 24 * 60 * 60, # 1 day
87 stale_while_revalidate=5 * 60, # 5 minutes
88 stale_if_error=1 * 24 * 60 * 60, # 1 day
89 ),
90 ],
91 )
92 def json_release(release, request):
93 project = release.project
94
95 if project.name != request.matchdict.get("name", project.name):
96 return HTTPMovedPermanently(
97 request.current_route_path(name=project.name),
98 headers=_CORS_HEADERS
99 )
100
101 # Apply CORS headers.
102 request.response.headers.update(_CORS_HEADERS)
103
104 # Get the latest serial number for this project.
105 request.response.headers["X-PyPI-Last-Serial"] = str(project.last_serial)
106
107 # Get all of the releases and files for this project.
108 release_files = (
109 request.db.query(Release, File)
110 .options(Load(Release).load_only('version'))
111 .outerjoin(File)
112 .filter(Release.project == project)
113 .order_by(Release._pypi_ordering.desc(), File.filename)
114 .all()
115 )
116
117 # Map our releases + files into a dictionary that maps each release to a
118 # list of all its files.
119 releases = {}
120 for r, file_ in release_files:
121 files = releases.setdefault(r, [])
122 if file_ is not None:
123 files.append(file_)
124
125 # Serialize our database objects to match the way that PyPI legacy
126 # presented this data.
127 releases = {
128 r.version: [
129 {
130 "filename": f.filename,
131 "packagetype": f.packagetype,
132 "python_version": f.python_version,
133 "has_sig": f.has_signature,
134 "comment_text": f.comment_text,
135 "md5_digest": f.md5_digest,
136 "digests": {
137 "md5": f.md5_digest,
138 "sha256": f.sha256_digest,
139 },
140 "size": f.size,
141 # TODO: Remove this once we've had a long enough time with it
142 # here to consider it no longer in use.
143 "downloads": -1,
144 "upload_time": f.upload_time.strftime("%Y-%m-%dT%H:%M:%S"),
145 "url": request.route_url("packaging.file", path=f.path),
146 }
147 for f in fs
148 ]
149 for r, fs in releases.items()
150 }
151
152 return {
153 "info": {
154 "name": project.name,
155 "version": release.version,
156 "summary": release.summary,
157 "description": release.description,
158 "keywords": release.keywords,
159 "license": release.license,
160 "classifiers": list(release.classifiers),
161 "author": release.author,
162 "author_email": release.author_email,
163 "maintainer": release.maintainer,
164 "maintainer_email": release.maintainer_email,
165 "requires_python": release.requires_python,
166 "platform": release.platform,
167 "downloads": {
168 "last_day": -1,
169 "last_week": -1,
170 "last_month": -1,
171 },
172 "package_url": request.route_url(
173 "packaging.project",
174 name=project.name,
175 ),
176 "project_url": request.route_url(
177 "packaging.project",
178 name=project.name,
179 ),
180 "release_url": request.route_url(
181 "packaging.release",
182 name=project.name,
183 version=release.version,
184 ),
185 "requires_dist": (list(release.requires_dist)
186 if release.requires_dist else None),
187 "docs_url": project.documentation_url,
188 "bugtrack_url": project.bugtrack_url,
189 "home_page": release.home_page,
190 "download_url": release.download_url,
191 },
192 "urls": releases[release.version],
193 "releases": releases,
194 "last_serial": project.last_serial,
195 }
196
[end of warehouse/legacy/api/json.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py
--- a/warehouse/legacy/api/json.py
+++ b/warehouse/legacy/api/json.py
@@ -154,6 +154,7 @@
"name": project.name,
"version": release.version,
"summary": release.summary,
+ "description_content_type": release.description_content_type,
"description": release.description,
"keywords": release.keywords,
"license": release.license,
| {"golden_diff": "diff --git a/warehouse/legacy/api/json.py b/warehouse/legacy/api/json.py\n--- a/warehouse/legacy/api/json.py\n+++ b/warehouse/legacy/api/json.py\n@@ -154,6 +154,7 @@\n \"name\": project.name,\n \"version\": release.version,\n \"summary\": release.summary,\n+ \"description_content_type\": release.description_content_type,\n \"description\": release.description,\n \"keywords\": release.keywords,\n \"license\": release.license,\n", "issue": "Expose long_description_content_type in JSON API\nFeature request: Add 'long_description_content_type' to JSON API in 'info': section, alongside the existing 'description'\r\n\r\nNow that PyPI supports Markdown and more than one kind of content type for long descriptions, it would be helpful to be able to see what the content type is with the JSON API. Otherwise it is more difficult to correctly interpret the 'description', since it isn't immediately obvious if it's reStructuredText or Markdown with which variant.\r\n\r\nI'm willing to try this if I'm pointed in the right general direction :) If someone else can get it done trivially that's fine too.\r\n\r\n\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom pyramid.httpexceptions import HTTPMovedPermanently, HTTPNotFound\nfrom pyramid.view import view_config\nfrom sqlalchemy.orm import Load\nfrom sqlalchemy.orm.exc import NoResultFound\n\nfrom warehouse.cache.http import cache_control\nfrom warehouse.cache.origin import origin_cache\nfrom warehouse.packaging.models import File, Release, Project\n\n\n# Generate appropriate CORS headers for the JSON endpoint.\n# We want to allow Cross-Origin requests here so that users can interact\n# with these endpoints via XHR/Fetch APIs in the browser.\n_CORS_HEADERS = {\n \"Access-Control-Allow-Origin\": \"*\",\n \"Access-Control-Allow-Headers\": \", \".join([\n \"Content-Type\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n ]),\n \"Access-Control-Allow-Methods\": \"GET\",\n \"Access-Control-Max-Age\": \"86400\", # 1 day.\n \"Access-Control-Expose-Headers\": \", \".join([\n \"X-PyPI-Last-Serial\",\n ]),\n}\n\n\n@view_config(\n route_name=\"legacy.api.json.project\",\n context=Project,\n renderer=\"json\",\n decorator=[\n cache_control(15 * 60), # 15 minutes\n origin_cache(\n 1 * 24 * 60 * 60, # 1 day\n stale_while_revalidate=5 * 60, # 5 minutes\n stale_if_error=1 * 24 * 60 * 60, # 1 day\n ),\n ],\n)\ndef json_project(project, request):\n if project.name != request.matchdict.get(\"name\", project.name):\n return HTTPMovedPermanently(\n request.current_route_path(name=project.name),\n headers=_CORS_HEADERS\n )\n\n try:\n release = (\n request.db.query(Release)\n .filter(Release.project == project)\n .order_by(\n Release.is_prerelease.nullslast(),\n Release._pypi_ordering.desc())\n .limit(1)\n .one()\n )\n except NoResultFound:\n return HTTPNotFound(headers=_CORS_HEADERS)\n\n return json_release(release, request)\n\n\n@view_config(\n route_name=\"legacy.api.json.release\",\n context=Release,\n renderer=\"json\",\n decorator=[\n cache_control(15 * 60), # 15 minutes\n origin_cache(\n 1 * 24 * 60 * 60, # 1 day\n stale_while_revalidate=5 * 60, # 5 minutes\n stale_if_error=1 * 24 * 60 * 60, # 1 day\n ),\n ],\n)\ndef json_release(release, request):\n project = release.project\n\n if project.name != request.matchdict.get(\"name\", project.name):\n return HTTPMovedPermanently(\n request.current_route_path(name=project.name),\n headers=_CORS_HEADERS\n )\n\n # Apply CORS headers.\n request.response.headers.update(_CORS_HEADERS)\n\n # Get the latest serial number for this project.\n request.response.headers[\"X-PyPI-Last-Serial\"] = str(project.last_serial)\n\n # Get all of the releases and files for this project.\n release_files = (\n request.db.query(Release, File)\n .options(Load(Release).load_only('version'))\n .outerjoin(File)\n .filter(Release.project == project)\n .order_by(Release._pypi_ordering.desc(), File.filename)\n .all()\n )\n\n # Map our releases + files into a dictionary that maps each release to a\n # list of all its files.\n releases = {}\n for r, file_ in release_files:\n files = releases.setdefault(r, [])\n if file_ is not None:\n files.append(file_)\n\n # Serialize our database objects to match the way that PyPI legacy\n # presented this data.\n releases = {\n r.version: [\n {\n \"filename\": f.filename,\n \"packagetype\": f.packagetype,\n \"python_version\": f.python_version,\n \"has_sig\": f.has_signature,\n \"comment_text\": f.comment_text,\n \"md5_digest\": f.md5_digest,\n \"digests\": {\n \"md5\": f.md5_digest,\n \"sha256\": f.sha256_digest,\n },\n \"size\": f.size,\n # TODO: Remove this once we've had a long enough time with it\n # here to consider it no longer in use.\n \"downloads\": -1,\n \"upload_time\": f.upload_time.strftime(\"%Y-%m-%dT%H:%M:%S\"),\n \"url\": request.route_url(\"packaging.file\", path=f.path),\n }\n for f in fs\n ]\n for r, fs in releases.items()\n }\n\n return {\n \"info\": {\n \"name\": project.name,\n \"version\": release.version,\n \"summary\": release.summary,\n \"description\": release.description,\n \"keywords\": release.keywords,\n \"license\": release.license,\n \"classifiers\": list(release.classifiers),\n \"author\": release.author,\n \"author_email\": release.author_email,\n \"maintainer\": release.maintainer,\n \"maintainer_email\": release.maintainer_email,\n \"requires_python\": release.requires_python,\n \"platform\": release.platform,\n \"downloads\": {\n \"last_day\": -1,\n \"last_week\": -1,\n \"last_month\": -1,\n },\n \"package_url\": request.route_url(\n \"packaging.project\",\n name=project.name,\n ),\n \"project_url\": request.route_url(\n \"packaging.project\",\n name=project.name,\n ),\n \"release_url\": request.route_url(\n \"packaging.release\",\n name=project.name,\n version=release.version,\n ),\n \"requires_dist\": (list(release.requires_dist)\n if release.requires_dist else None),\n \"docs_url\": project.documentation_url,\n \"bugtrack_url\": project.bugtrack_url,\n \"home_page\": release.home_page,\n \"download_url\": release.download_url,\n },\n \"urls\": releases[release.version],\n \"releases\": releases,\n \"last_serial\": project.last_serial,\n }\n", "path": "warehouse/legacy/api/json.py"}]} | 2,650 | 109 |
gh_patches_debug_29995 | rasdani/github-patches | git_diff | alltheplaces__alltheplaces-1360 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Petsmart Canada
https://www.petsmart.ca/store-locator/all/
</issue>
<code>
[start of locations/spiders/petsmart.py]
1 import datetime
2 import re
3 import scrapy
4 from locations.items import GeojsonPointItem
5 from locations.hours import OpeningHours
6
7 day_mapping = {'MON': 'Mo','TUE': 'Tu','WED': 'We','THU': 'Th',
8 'FRI': 'Fr','SAT': 'Sa','SUN': 'Su'}
9
10 def convert_24hour(time):
11 """
12 Takes 12 hour time as a string and converts it to 24 hour time.
13 """
14
15 if len(time[:-2].split(':')) < 2:
16 hour = time[:-2]
17 minute = '00'
18 else:
19 hour, minute = time[:-2].split(':')
20
21 if time[-2:] == 'AM':
22 time_formatted = hour + ':' + minute
23 elif time[-2:] == 'PM':
24 time_formatted = str(int(hour)+ 12) + ':' + minute
25
26 if time_formatted in ['24:00','0:00','00:00']:
27 time_formatted = '23:59'
28
29 return time_formatted
30
31 class PetSmartSpider(scrapy.Spider):
32 download_delay = 0.2
33 name = "petsmart"
34 item_attributes = { 'brand': "Petsmart" }
35 allowed_domains = ["petsmart.com"]
36 start_urls = (
37 'https://www.petsmart.com/store-locator/all/',
38 )
39
40 def parse(self, response):
41 state_urls = response.xpath('//li[@class="col-sm-12 col-md-4"]/a/@href').extract()
42 is_store_details_urls = response.xpath('//a[@class="store-details-link"]/@href').extract()
43
44 if not state_urls and is_store_details_urls:
45 for url in is_store_details_urls:
46 yield scrapy.Request(response.urljoin(url), callback=self.parse_store)
47 else:
48 for url in state_urls:
49 yield scrapy.Request(response.urljoin(url))
50
51 def parse_store(self, response):
52 ref = re.search(r'.+/?\?(.+)', response.url).group(1)
53
54 properties = {
55 'name': response.xpath('//span[@itemprop="name"]/text()').extract_first().strip(),
56 'addr_full': response.xpath('//div[@itemprop="streetAddress"]/text()').extract_first(),
57 'city': response.xpath('//span[@itemprop="addressLocality"][1]/text()').extract_first().title(),
58 'state': response.xpath('//span[@itemprop="addressLocality"][2]/text()').extract_first(),
59 'postcode': response.xpath('//span[@itemprop="postalCode"]/text()').extract_first(),
60 'lat': float(response.xpath('//input[@name="storeLatitudeVal"]/@value').extract_first()),
61 'lon': float(response.xpath('//input[@name="storeLongitudeVal"]/@value').extract_first()),
62 'phone': response.xpath('//a[@class="store-contact-info"]/text()').extract_first(),
63 'ref': ref,
64 'website': response.url
65 }
66
67 hours = self.parse_hours(response.xpath('//div[@class="store-detail-address"]'))
68
69 if hours:
70 properties['opening_hours'] = hours
71
72 yield GeojsonPointItem(**properties)
73
74 def parse_hours(self, elements):
75 opening_hours = OpeningHours()
76
77 days = elements.xpath('//span[@itemprop="dayOfWeek"]/text()').extract()
78 today = (set(day_mapping) - set(days)).pop()
79 days.remove('TODAY')
80 days.insert(0,today)
81 open_hours = elements.xpath('//div[@class="store-hours"]/time[@itemprop="opens"]/@content').extract()
82 close_hours = elements.xpath('//div[@class="store-hours"]/time[@itemprop="closes"]/@content').extract()
83
84 store_hours = dict((z[0],list(z[1:])) for z in zip(days, open_hours, close_hours))
85
86 for day, hours in store_hours.items():
87 if 'CLOSED' in hours:
88 continue
89 opening_hours.add_range(day=day_mapping[day],
90 open_time=convert_24hour(hours[0]),
91 close_time=convert_24hour(hours[1]))
92 return opening_hours.as_opening_hours()
93
[end of locations/spiders/petsmart.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/locations/spiders/petsmart.py b/locations/spiders/petsmart.py
--- a/locations/spiders/petsmart.py
+++ b/locations/spiders/petsmart.py
@@ -32,9 +32,10 @@
download_delay = 0.2
name = "petsmart"
item_attributes = { 'brand': "Petsmart" }
- allowed_domains = ["petsmart.com"]
+ allowed_domains = ["petsmart.com", "petsmart.ca"]
start_urls = (
'https://www.petsmart.com/store-locator/all/',
+ 'https://www.petsmart.ca/store-locator/all/'
)
def parse(self, response):
@@ -50,6 +51,10 @@
def parse_store(self, response):
ref = re.search(r'.+/?\?(.+)', response.url).group(1)
+ if 'petsmart.ca' in response.url:
+ country = 'CA'
+ elif 'petsmart.com' in response.url:
+ country = 'US'
properties = {
'name': response.xpath('//span[@itemprop="name"]/text()').extract_first().strip(),
@@ -60,6 +65,7 @@
'lat': float(response.xpath('//input[@name="storeLatitudeVal"]/@value').extract_first()),
'lon': float(response.xpath('//input[@name="storeLongitudeVal"]/@value').extract_first()),
'phone': response.xpath('//a[@class="store-contact-info"]/text()').extract_first(),
+ 'country': country,
'ref': ref,
'website': response.url
}
| {"golden_diff": "diff --git a/locations/spiders/petsmart.py b/locations/spiders/petsmart.py\n--- a/locations/spiders/petsmart.py\n+++ b/locations/spiders/petsmart.py\n@@ -32,9 +32,10 @@\n download_delay = 0.2\n name = \"petsmart\"\n item_attributes = { 'brand': \"Petsmart\" }\n- allowed_domains = [\"petsmart.com\"]\n+ allowed_domains = [\"petsmart.com\", \"petsmart.ca\"]\n start_urls = (\n 'https://www.petsmart.com/store-locator/all/',\n+ 'https://www.petsmart.ca/store-locator/all/'\n )\n \n def parse(self, response):\n@@ -50,6 +51,10 @@\n \n def parse_store(self, response):\n ref = re.search(r'.+/?\\?(.+)', response.url).group(1)\n+ if 'petsmart.ca' in response.url:\n+ country = 'CA'\n+ elif 'petsmart.com' in response.url:\n+ country = 'US'\n \n properties = {\n 'name': response.xpath('//span[@itemprop=\"name\"]/text()').extract_first().strip(),\n@@ -60,6 +65,7 @@\n 'lat': float(response.xpath('//input[@name=\"storeLatitudeVal\"]/@value').extract_first()),\n 'lon': float(response.xpath('//input[@name=\"storeLongitudeVal\"]/@value').extract_first()),\n 'phone': response.xpath('//a[@class=\"store-contact-info\"]/text()').extract_first(),\n+ 'country': country,\n 'ref': ref,\n 'website': response.url\n }\n", "issue": "Petsmart Canada\nhttps://www.petsmart.ca/store-locator/all/\n", "before_files": [{"content": "import datetime\nimport re\nimport scrapy\nfrom locations.items import GeojsonPointItem\nfrom locations.hours import OpeningHours\n\nday_mapping = {'MON': 'Mo','TUE': 'Tu','WED': 'We','THU': 'Th',\n 'FRI': 'Fr','SAT': 'Sa','SUN': 'Su'}\n\ndef convert_24hour(time):\n \"\"\"\n Takes 12 hour time as a string and converts it to 24 hour time.\n \"\"\"\n\n if len(time[:-2].split(':')) < 2:\n hour = time[:-2]\n minute = '00'\n else:\n hour, minute = time[:-2].split(':')\n\n if time[-2:] == 'AM':\n time_formatted = hour + ':' + minute\n elif time[-2:] == 'PM':\n time_formatted = str(int(hour)+ 12) + ':' + minute\n\n if time_formatted in ['24:00','0:00','00:00']:\n time_formatted = '23:59'\n\n return time_formatted\n\nclass PetSmartSpider(scrapy.Spider):\n download_delay = 0.2\n name = \"petsmart\"\n item_attributes = { 'brand': \"Petsmart\" }\n allowed_domains = [\"petsmart.com\"]\n start_urls = (\n 'https://www.petsmart.com/store-locator/all/',\n )\n\n def parse(self, response):\n state_urls = response.xpath('//li[@class=\"col-sm-12 col-md-4\"]/a/@href').extract()\n is_store_details_urls = response.xpath('//a[@class=\"store-details-link\"]/@href').extract()\n\n if not state_urls and is_store_details_urls:\n for url in is_store_details_urls:\n yield scrapy.Request(response.urljoin(url), callback=self.parse_store)\n else:\n for url in state_urls:\n yield scrapy.Request(response.urljoin(url))\n\n def parse_store(self, response):\n ref = re.search(r'.+/?\\?(.+)', response.url).group(1)\n\n properties = {\n 'name': response.xpath('//span[@itemprop=\"name\"]/text()').extract_first().strip(),\n 'addr_full': response.xpath('//div[@itemprop=\"streetAddress\"]/text()').extract_first(),\n 'city': response.xpath('//span[@itemprop=\"addressLocality\"][1]/text()').extract_first().title(),\n 'state': response.xpath('//span[@itemprop=\"addressLocality\"][2]/text()').extract_first(),\n 'postcode': response.xpath('//span[@itemprop=\"postalCode\"]/text()').extract_first(),\n 'lat': float(response.xpath('//input[@name=\"storeLatitudeVal\"]/@value').extract_first()),\n 'lon': float(response.xpath('//input[@name=\"storeLongitudeVal\"]/@value').extract_first()),\n 'phone': response.xpath('//a[@class=\"store-contact-info\"]/text()').extract_first(),\n 'ref': ref,\n 'website': response.url\n }\n\n hours = self.parse_hours(response.xpath('//div[@class=\"store-detail-address\"]'))\n\n if hours:\n properties['opening_hours'] = hours\n\n yield GeojsonPointItem(**properties)\n\n def parse_hours(self, elements):\n opening_hours = OpeningHours()\n\n days = elements.xpath('//span[@itemprop=\"dayOfWeek\"]/text()').extract()\n today = (set(day_mapping) - set(days)).pop()\n days.remove('TODAY')\n days.insert(0,today)\n open_hours = elements.xpath('//div[@class=\"store-hours\"]/time[@itemprop=\"opens\"]/@content').extract()\n close_hours = elements.xpath('//div[@class=\"store-hours\"]/time[@itemprop=\"closes\"]/@content').extract()\n\n store_hours = dict((z[0],list(z[1:])) for z in zip(days, open_hours, close_hours))\n\n for day, hours in store_hours.items():\n if 'CLOSED' in hours:\n continue\n opening_hours.add_range(day=day_mapping[day],\n open_time=convert_24hour(hours[0]),\n close_time=convert_24hour(hours[1]))\n return opening_hours.as_opening_hours()\n", "path": "locations/spiders/petsmart.py"}]} | 1,640 | 361 |
gh_patches_debug_36927 | rasdani/github-patches | git_diff | mdn__kuma-7800 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
🧹 Remove hompage related frontend code
Once https://github.com/mdn/yari/issues/2438 lands, we need to go into the Kuma forest and find all things related to the homepage frontend code, chop it down, and clear it out.
</issue>
<code>
[start of kuma/landing/views.py]
1 from django.conf import settings
2 from django.http import HttpResponse
3 from django.shortcuts import redirect, render
4 from django.views import static
5 from django.views.decorators.cache import never_cache
6 from django.views.generic import RedirectView
7
8 from kuma.core.decorators import ensure_wiki_domain, shared_cache_control
9 from kuma.core.utils import is_wiki
10 from kuma.feeder.models import Bundle
11 from kuma.feeder.sections import SECTION_HACKS
12 from kuma.search.models import Filter
13
14 from .utils import favicon_url
15
16
17 @shared_cache_control
18 def contribute_json(request):
19 return static.serve(request, "contribute.json", document_root=settings.ROOT)
20
21
22 @shared_cache_control
23 def home(request):
24 """Home page."""
25 context = {}
26 # Need for both wiki and react homepage
27 context["updates"] = list(Bundle.objects.recent_entries(SECTION_HACKS.updates)[:5])
28
29 # The default template name
30 template_name = "landing/react_homepage.html"
31 if is_wiki(request):
32 template_name = "landing/homepage.html"
33 context["default_filters"] = Filter.objects.default_filters()
34 return render(request, template_name, context)
35
36
37 @ensure_wiki_domain
38 @never_cache
39 def maintenance_mode(request):
40 if settings.MAINTENANCE_MODE:
41 return render(request, "landing/maintenance-mode.html")
42 else:
43 return redirect("home")
44
45
46 @ensure_wiki_domain
47 @shared_cache_control
48 def promote_buttons(request):
49 """Bug 646192: MDN affiliate buttons"""
50 return render(request, "landing/promote_buttons.html")
51
52
53 ROBOTS_ALL_ALLOWED_TXT = """\
54 User-agent: *
55 Sitemap: https://wiki.developer.mozilla.org/sitemap.xml
56
57 Disallow:
58 """
59
60 ROBOTS_ALLOWED_TXT = """\
61 User-agent: *
62 Sitemap: https://developer.mozilla.org/sitemap.xml
63
64 Disallow: /api/
65 Disallow: /*docs/get-documents
66 Disallow: /*docs/Experiment:*
67 Disallow: /*$children
68 Disallow: /*docs.json
69 Disallow: /*/files/
70 Disallow: /media
71 Disallow: /*profiles*/edit
72 """ + "\n".join(
73 "Disallow: /{locale}/search".format(locale=locale)
74 for locale in settings.ENABLED_LOCALES
75 )
76
77 ROBOTS_GO_AWAY_TXT = """\
78 User-Agent: *
79 Disallow: /
80 """
81
82
83 @shared_cache_control
84 def robots_txt(request):
85 """Serve robots.txt that allows or forbids robots."""
86 host = request.get_host()
87 if host in settings.ALLOW_ROBOTS_DOMAINS:
88 robots = ""
89 elif host in settings.ALLOW_ROBOTS_WEB_DOMAINS:
90 if host == settings.WIKI_HOST:
91 robots = ROBOTS_ALL_ALLOWED_TXT
92 else:
93 robots = ROBOTS_ALLOWED_TXT
94 else:
95 robots = ROBOTS_GO_AWAY_TXT
96 return HttpResponse(robots, content_type="text/plain")
97
98
99 class FaviconRedirect(RedirectView):
100 """Redirect to the favicon in the static img folder (bug 1402497)"""
101
102 def get_redirect_url(self, *args, **kwargs):
103 return favicon_url()
104
[end of kuma/landing/views.py]
[start of kuma/landing/urls.py]
1 from django.urls import re_path
2
3 from kuma.core.decorators import shared_cache_control
4
5 from . import views
6
7
8 MONTH = 60 * 60 * 24 * 30
9
10
11 lang_urlpatterns = [
12 re_path(r"^$", views.home, name="home"),
13 re_path(r"^maintenance-mode/?$", views.maintenance_mode, name="maintenance_mode"),
14 re_path(r"^promote/?$", views.promote_buttons, name="promote"),
15 re_path(r"^promote/buttons/?$", views.promote_buttons, name="promote_buttons"),
16 ]
17
18 urlpatterns = [
19 re_path(r"^contribute\.json$", views.contribute_json, name="contribute_json"),
20 re_path(r"^robots.txt$", views.robots_txt, name="robots_txt"),
21 re_path(
22 r"^favicon.ico$",
23 shared_cache_control(views.FaviconRedirect.as_view(), s_maxage=MONTH),
24 name="favicon_ico",
25 ),
26 ]
27
[end of kuma/landing/urls.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kuma/landing/urls.py b/kuma/landing/urls.py
--- a/kuma/landing/urls.py
+++ b/kuma/landing/urls.py
@@ -11,12 +11,9 @@
lang_urlpatterns = [
re_path(r"^$", views.home, name="home"),
re_path(r"^maintenance-mode/?$", views.maintenance_mode, name="maintenance_mode"),
- re_path(r"^promote/?$", views.promote_buttons, name="promote"),
- re_path(r"^promote/buttons/?$", views.promote_buttons, name="promote_buttons"),
]
urlpatterns = [
- re_path(r"^contribute\.json$", views.contribute_json, name="contribute_json"),
re_path(r"^robots.txt$", views.robots_txt, name="robots_txt"),
re_path(
r"^favicon.ico$",
diff --git a/kuma/landing/views.py b/kuma/landing/views.py
--- a/kuma/landing/views.py
+++ b/kuma/landing/views.py
@@ -1,37 +1,25 @@
from django.conf import settings
from django.http import HttpResponse
from django.shortcuts import redirect, render
-from django.views import static
from django.views.decorators.cache import never_cache
from django.views.generic import RedirectView
from kuma.core.decorators import ensure_wiki_domain, shared_cache_control
-from kuma.core.utils import is_wiki
-from kuma.feeder.models import Bundle
-from kuma.feeder.sections import SECTION_HACKS
-from kuma.search.models import Filter
from .utils import favicon_url
-@shared_cache_control
-def contribute_json(request):
- return static.serve(request, "contribute.json", document_root=settings.ROOT)
-
-
-@shared_cache_control
def home(request):
"""Home page."""
- context = {}
- # Need for both wiki and react homepage
- context["updates"] = list(Bundle.objects.recent_entries(SECTION_HACKS.updates)[:5])
-
- # The default template name
- template_name = "landing/react_homepage.html"
- if is_wiki(request):
- template_name = "landing/homepage.html"
- context["default_filters"] = Filter.objects.default_filters()
- return render(request, template_name, context)
+ return HttpResponse(
+ """
+ <html>
+ End of an era. Kuma's no longer rendering a home page.<br>
+ See project Yari.
+ </html>
+ """,
+ content_type="text/html",
+ )
@ensure_wiki_domain
@@ -43,13 +31,6 @@
return redirect("home")
-@ensure_wiki_domain
-@shared_cache_control
-def promote_buttons(request):
- """Bug 646192: MDN affiliate buttons"""
- return render(request, "landing/promote_buttons.html")
-
-
ROBOTS_ALL_ALLOWED_TXT = """\
User-agent: *
Sitemap: https://wiki.developer.mozilla.org/sitemap.xml
| {"golden_diff": "diff --git a/kuma/landing/urls.py b/kuma/landing/urls.py\n--- a/kuma/landing/urls.py\n+++ b/kuma/landing/urls.py\n@@ -11,12 +11,9 @@\n lang_urlpatterns = [\n re_path(r\"^$\", views.home, name=\"home\"),\n re_path(r\"^maintenance-mode/?$\", views.maintenance_mode, name=\"maintenance_mode\"),\n- re_path(r\"^promote/?$\", views.promote_buttons, name=\"promote\"),\n- re_path(r\"^promote/buttons/?$\", views.promote_buttons, name=\"promote_buttons\"),\n ]\n \n urlpatterns = [\n- re_path(r\"^contribute\\.json$\", views.contribute_json, name=\"contribute_json\"),\n re_path(r\"^robots.txt$\", views.robots_txt, name=\"robots_txt\"),\n re_path(\n r\"^favicon.ico$\",\ndiff --git a/kuma/landing/views.py b/kuma/landing/views.py\n--- a/kuma/landing/views.py\n+++ b/kuma/landing/views.py\n@@ -1,37 +1,25 @@\n from django.conf import settings\n from django.http import HttpResponse\n from django.shortcuts import redirect, render\n-from django.views import static\n from django.views.decorators.cache import never_cache\n from django.views.generic import RedirectView\n \n from kuma.core.decorators import ensure_wiki_domain, shared_cache_control\n-from kuma.core.utils import is_wiki\n-from kuma.feeder.models import Bundle\n-from kuma.feeder.sections import SECTION_HACKS\n-from kuma.search.models import Filter\n \n from .utils import favicon_url\n \n \n-@shared_cache_control\n-def contribute_json(request):\n- return static.serve(request, \"contribute.json\", document_root=settings.ROOT)\n-\n-\n-@shared_cache_control\n def home(request):\n \"\"\"Home page.\"\"\"\n- context = {}\n- # Need for both wiki and react homepage\n- context[\"updates\"] = list(Bundle.objects.recent_entries(SECTION_HACKS.updates)[:5])\n-\n- # The default template name\n- template_name = \"landing/react_homepage.html\"\n- if is_wiki(request):\n- template_name = \"landing/homepage.html\"\n- context[\"default_filters\"] = Filter.objects.default_filters()\n- return render(request, template_name, context)\n+ return HttpResponse(\n+ \"\"\"\n+ <html>\n+ End of an era. Kuma's no longer rendering a home page.<br>\n+ See project Yari.\n+ </html>\n+ \"\"\",\n+ content_type=\"text/html\",\n+ )\n \n \n @ensure_wiki_domain\n@@ -43,13 +31,6 @@\n return redirect(\"home\")\n \n \n-@ensure_wiki_domain\n-@shared_cache_control\n-def promote_buttons(request):\n- \"\"\"Bug 646192: MDN affiliate buttons\"\"\"\n- return render(request, \"landing/promote_buttons.html\")\n-\n-\n ROBOTS_ALL_ALLOWED_TXT = \"\"\"\\\n User-agent: *\n Sitemap: https://wiki.developer.mozilla.org/sitemap.xml\n", "issue": "\ud83e\uddf9 Remove hompage related frontend code\nOnce https://github.com/mdn/yari/issues/2438 lands, we need to go into the Kuma forest and find all things related to the homepage frontend code, chop it down, and clear it out.\n", "before_files": [{"content": "from django.conf import settings\nfrom django.http import HttpResponse\nfrom django.shortcuts import redirect, render\nfrom django.views import static\nfrom django.views.decorators.cache import never_cache\nfrom django.views.generic import RedirectView\n\nfrom kuma.core.decorators import ensure_wiki_domain, shared_cache_control\nfrom kuma.core.utils import is_wiki\nfrom kuma.feeder.models import Bundle\nfrom kuma.feeder.sections import SECTION_HACKS\nfrom kuma.search.models import Filter\n\nfrom .utils import favicon_url\n\n\n@shared_cache_control\ndef contribute_json(request):\n return static.serve(request, \"contribute.json\", document_root=settings.ROOT)\n\n\n@shared_cache_control\ndef home(request):\n \"\"\"Home page.\"\"\"\n context = {}\n # Need for both wiki and react homepage\n context[\"updates\"] = list(Bundle.objects.recent_entries(SECTION_HACKS.updates)[:5])\n\n # The default template name\n template_name = \"landing/react_homepage.html\"\n if is_wiki(request):\n template_name = \"landing/homepage.html\"\n context[\"default_filters\"] = Filter.objects.default_filters()\n return render(request, template_name, context)\n\n\n@ensure_wiki_domain\n@never_cache\ndef maintenance_mode(request):\n if settings.MAINTENANCE_MODE:\n return render(request, \"landing/maintenance-mode.html\")\n else:\n return redirect(\"home\")\n\n\n@ensure_wiki_domain\n@shared_cache_control\ndef promote_buttons(request):\n \"\"\"Bug 646192: MDN affiliate buttons\"\"\"\n return render(request, \"landing/promote_buttons.html\")\n\n\nROBOTS_ALL_ALLOWED_TXT = \"\"\"\\\nUser-agent: *\nSitemap: https://wiki.developer.mozilla.org/sitemap.xml\n\nDisallow:\n\"\"\"\n\nROBOTS_ALLOWED_TXT = \"\"\"\\\nUser-agent: *\nSitemap: https://developer.mozilla.org/sitemap.xml\n\nDisallow: /api/\nDisallow: /*docs/get-documents\nDisallow: /*docs/Experiment:*\nDisallow: /*$children\nDisallow: /*docs.json\nDisallow: /*/files/\nDisallow: /media\nDisallow: /*profiles*/edit\n\"\"\" + \"\\n\".join(\n \"Disallow: /{locale}/search\".format(locale=locale)\n for locale in settings.ENABLED_LOCALES\n)\n\nROBOTS_GO_AWAY_TXT = \"\"\"\\\nUser-Agent: *\nDisallow: /\n\"\"\"\n\n\n@shared_cache_control\ndef robots_txt(request):\n \"\"\"Serve robots.txt that allows or forbids robots.\"\"\"\n host = request.get_host()\n if host in settings.ALLOW_ROBOTS_DOMAINS:\n robots = \"\"\n elif host in settings.ALLOW_ROBOTS_WEB_DOMAINS:\n if host == settings.WIKI_HOST:\n robots = ROBOTS_ALL_ALLOWED_TXT\n else:\n robots = ROBOTS_ALLOWED_TXT\n else:\n robots = ROBOTS_GO_AWAY_TXT\n return HttpResponse(robots, content_type=\"text/plain\")\n\n\nclass FaviconRedirect(RedirectView):\n \"\"\"Redirect to the favicon in the static img folder (bug 1402497)\"\"\"\n\n def get_redirect_url(self, *args, **kwargs):\n return favicon_url()\n", "path": "kuma/landing/views.py"}, {"content": "from django.urls import re_path\n\nfrom kuma.core.decorators import shared_cache_control\n\nfrom . import views\n\n\nMONTH = 60 * 60 * 24 * 30\n\n\nlang_urlpatterns = [\n re_path(r\"^$\", views.home, name=\"home\"),\n re_path(r\"^maintenance-mode/?$\", views.maintenance_mode, name=\"maintenance_mode\"),\n re_path(r\"^promote/?$\", views.promote_buttons, name=\"promote\"),\n re_path(r\"^promote/buttons/?$\", views.promote_buttons, name=\"promote_buttons\"),\n]\n\nurlpatterns = [\n re_path(r\"^contribute\\.json$\", views.contribute_json, name=\"contribute_json\"),\n re_path(r\"^robots.txt$\", views.robots_txt, name=\"robots_txt\"),\n re_path(\n r\"^favicon.ico$\",\n shared_cache_control(views.FaviconRedirect.as_view(), s_maxage=MONTH),\n name=\"favicon_ico\",\n ),\n]\n", "path": "kuma/landing/urls.py"}]} | 1,754 | 654 |
gh_patches_debug_41527 | rasdani/github-patches | git_diff | streamlink__streamlink-2643 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ITV Player unable to open URL
- [x] This is a plugin issue and I have read the contribution guidelines.
### Description
ITV Player/Hub for the main ITV channel do not open and give an error message.
### Reproduction steps / Explicit stream URLs to test
Type "streamlink itv.com/hub/itv".
Other ITV Hub URL's such as hub/itv2 or hub/citv continue to work.
### Log output
[cli][info] Found matching plugin itvplayer for URL itv.com/hub/itv
error: Unable to open URL: http://cdn.dar.itv.com/private/aHR0cDovL2NzbS1lLnBsYXkuZGFyLml0di5jb20vY3NtL2V4dGxpdmUvMjQ3MTYyMTUubTN1OD95by5hYz10cnVlJnlvLmFkPWZhbHNlJnBtPWZyZWUmcmFuZG9tPTU2OTk0ODU2ODc0ODA1NzE0MjAmYXJlYT1pdHZwbGF5ZXIuc2ltdWxjYXN0JnNvdXJjZT1zaW11bGNhc3QmaGRldmlkPXgmc2l6ZT1wYXR0ZXJuJnN1cGVydGFnPWRmbHQsc2ltJnVzPWFubyZzaXRlPWl0diZvc3Zlcj0xMCZjaGFuYnJhbmQ9aXR2MSZvcz13aW5kb3dzJmhtb2Q9NjYmcHY9YnJvd3Nlci40LjEmcGxheWVyPWh0bWw1LmRlc2t0b3AmZG09bmxpJmhtYW49Y2hyb21lJnRkdXI9MjE2MDAmdHBhcnRzPTEmYXJwPXgmZGVmYXVsdD1EZWZhdWx0UGF0dGVybiZwbGlzdD1zaW1hZHJlcGxhY2VtZW50JnBsZmNpZD1zaW1hZHJlcGxhY2VtZW50/endpoint?hdnea=st=1563543051~exp=1563564651~acl=/*~data=nohubplus~hmac=0ebaf6d4f22f101378a6c7ed3903c1999f6b28efd29ee05763e5ec7bf0d27896&yo.channel=24716215 (404 Client Error: for url: http://csm-e.play.dar.itv.com/csm/extlive/24716215.m3u8?yo.ac=true&yo.ad=false&pm=free&random=5699485687480571420&area=itvplayer.simulcast&source=simulcast&hdevid=x&size=pattern&supertag=dflt,sim&us=ano&site=itv&osver=10&chanbrand=itv1&os=windows&hmod=66&pv=browser.4.1&player=html5.desktop&dm=nli&hman=chrome&tdur=21600&tparts=1&arp=x&default=DefaultPattern&plist=simadreplacement&plfcid=simadreplacement)
### Additional comments, screenshots, etc.
I have noticed the site plays a ITV Hub promo when starting a livestream on the website itself before loading that stream. This does not occur for the other channels.
</issue>
<code>
[start of src/streamlink/plugins/itvplayer.py]
1 import json
2 import logging
3 import re
4
5 from streamlink.compat import urljoin
6 from streamlink.plugin import Plugin
7 from streamlink.plugin.api import useragents, validate
8 from streamlink.plugin.api.utils import itertags
9 from streamlink.stream import HLSStream
10
11 log = logging.getLogger(__name__)
12
13
14 class ITVPlayer(Plugin):
15 _url_re = re.compile(r"https?://(?:www.)?itv.com/hub/(?P<stream>.+)")
16 _video_info_schema = validate.Schema({
17 "StatusCode": 200,
18 "AdditionalInfo": {
19 "Message": validate.any(None, validate.text)
20 },
21 "Playlist": {
22 "VideoType": validate.text,
23 "Video": {
24 "Subtitles": validate.any(None, [{
25 "Href": validate.url(),
26 }]),
27 "Base": validate.url(),
28 "MediaFiles": [
29 {"Href": validate.text,
30 "KeyServiceUrl": validate.any(None, validate.url())}
31 ]
32 }
33 }
34 })
35
36 @classmethod
37 def can_handle_url(cls, url):
38 match = cls._url_re.match(url)
39 return match is not None
40
41 @property
42 def device_info(self):
43 return {"user": {},
44 "device": {"manufacturer": "Chrome", "model": "66",
45 "os": {"name": "Windows", "version": "10", "type": "desktop"}},
46 "client": {"version": "4.1", "id": "browser"},
47 "variantAvailability": {"featureset": {"min": ["hls", "aes"],
48 "max": ["hls", "aes"]},
49 "platformTag": "dotcom"}}
50
51 def video_info(self):
52 page = self.session.http.get(self.url)
53 for div in itertags(page.text, 'div'):
54 if div.attributes.get("id") == "video":
55 return div.attributes
56
57 def _get_streams(self):
58 """
59 Find all the streams for the ITV url
60 :return: Mapping of quality to stream
61 """
62 self.session.http.headers.update({"User-Agent": useragents.FIREFOX})
63 video_info = self.video_info()
64 video_info_url = video_info.get("data-html5-playlist") or video_info.get("data-video-id")
65
66 res = self.session.http.post(video_info_url,
67 data=json.dumps(self.device_info),
68 headers={"hmac": video_info.get("data-video-hmac")})
69 data = self.session.http.json(res, schema=self._video_info_schema)
70
71 log.debug("Video ID info response: {0}".format(data))
72
73 stype = data['Playlist']['VideoType']
74
75 for media in data['Playlist']['Video']['MediaFiles']:
76 url = urljoin(data['Playlist']['Video']['Base'], media['Href'])
77 name_fmt = "{pixels}_{bitrate}" if stype == "CATCHUP" else None
78 for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt=name_fmt).items():
79 yield s
80
81
82
83 __plugin__ = ITVPlayer
84
[end of src/streamlink/plugins/itvplayer.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/streamlink/plugins/itvplayer.py b/src/streamlink/plugins/itvplayer.py
--- a/src/streamlink/plugins/itvplayer.py
+++ b/src/streamlink/plugins/itvplayer.py
@@ -6,13 +6,14 @@
from streamlink.plugin import Plugin
from streamlink.plugin.api import useragents, validate
from streamlink.plugin.api.utils import itertags
-from streamlink.stream import HLSStream
+from streamlink.stream import HLSStream, RTMPStream
log = logging.getLogger(__name__)
class ITVPlayer(Plugin):
_url_re = re.compile(r"https?://(?:www.)?itv.com/hub/(?P<stream>.+)")
+ swf_url = "https://mediaplayer.itv.com/2.19.5%2Bbuild.a23aa62b1e/ITVMediaPlayer.swf"
_video_info_schema = validate.Schema({
"StatusCode": 200,
"AdditionalInfo": {
@@ -35,8 +36,7 @@
@classmethod
def can_handle_url(cls, url):
- match = cls._url_re.match(url)
- return match is not None
+ return cls._url_re.match(url) is not None
@property
def device_info(self):
@@ -54,18 +54,11 @@
if div.attributes.get("id") == "video":
return div.attributes
- def _get_streams(self):
- """
- Find all the streams for the ITV url
- :return: Mapping of quality to stream
- """
- self.session.http.headers.update({"User-Agent": useragents.FIREFOX})
+ def _get_html5_streams(self, video_info_url):
video_info = self.video_info()
- video_info_url = video_info.get("data-html5-playlist") or video_info.get("data-video-id")
-
res = self.session.http.post(video_info_url,
- data=json.dumps(self.device_info),
- headers={"hmac": video_info.get("data-video-hmac")})
+ data=json.dumps(self.device_info),
+ headers={"hmac": video_info.get("data-video-hmac")})
data = self.session.http.json(res, schema=self._video_info_schema)
log.debug("Video ID info response: {0}".format(data))
@@ -78,6 +71,31 @@
for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt=name_fmt).items():
yield s
+ def _get_rtmp_streams(self, video_info_url):
+ log.debug("XML data path: {0}".format(video_info_url))
+ res = self.session.http.get(video_info_url)
+ playlist = self.session.http.xml(res, ignore_ns=True)
+ mediafiles = playlist.find(".//Playlist/VideoEntries/Video/MediaFiles")
+ playpath = mediafiles.find("./MediaFile/URL")
+ return {"live": RTMPStream(self.session, {"rtmp": mediafiles.attrib.get("base"),
+ "playpath": playpath.text,
+ "live": True,
+ "swfVfy": self.swf_url
+ })}
+
+ def _get_streams(self):
+ """
+ Find all the streams for the ITV url
+ :return: Mapping of quality to stream
+ """
+ self.session.http.headers.update({"User-Agent": useragents.FIREFOX})
+ stream = self._url_re.match(self.url).group("stream")
+ video_info = self.video_info()
+ video_info_url = video_info.get("data-video-id" if stream.lower() in ("itv", "itv4") else "data-html5-playlist")
+ if video_info_url.endswith(".xml"):
+ return self._get_rtmp_streams(video_info_url)
+ else:
+ return self._get_html5_streams(video_info_url)
__plugin__ = ITVPlayer
| {"golden_diff": "diff --git a/src/streamlink/plugins/itvplayer.py b/src/streamlink/plugins/itvplayer.py\n--- a/src/streamlink/plugins/itvplayer.py\n+++ b/src/streamlink/plugins/itvplayer.py\n@@ -6,13 +6,14 @@\n from streamlink.plugin import Plugin\n from streamlink.plugin.api import useragents, validate\n from streamlink.plugin.api.utils import itertags\n-from streamlink.stream import HLSStream\n+from streamlink.stream import HLSStream, RTMPStream\n \n log = logging.getLogger(__name__)\n \n \n class ITVPlayer(Plugin):\n _url_re = re.compile(r\"https?://(?:www.)?itv.com/hub/(?P<stream>.+)\")\n+ swf_url = \"https://mediaplayer.itv.com/2.19.5%2Bbuild.a23aa62b1e/ITVMediaPlayer.swf\"\n _video_info_schema = validate.Schema({\n \"StatusCode\": 200,\n \"AdditionalInfo\": {\n@@ -35,8 +36,7 @@\n \n @classmethod\n def can_handle_url(cls, url):\n- match = cls._url_re.match(url)\n- return match is not None\n+ return cls._url_re.match(url) is not None\n \n @property\n def device_info(self):\n@@ -54,18 +54,11 @@\n if div.attributes.get(\"id\") == \"video\":\n return div.attributes\n \n- def _get_streams(self):\n- \"\"\"\n- Find all the streams for the ITV url\n- :return: Mapping of quality to stream\n- \"\"\"\n- self.session.http.headers.update({\"User-Agent\": useragents.FIREFOX})\n+ def _get_html5_streams(self, video_info_url):\n video_info = self.video_info()\n- video_info_url = video_info.get(\"data-html5-playlist\") or video_info.get(\"data-video-id\")\n-\n res = self.session.http.post(video_info_url,\n- data=json.dumps(self.device_info),\n- headers={\"hmac\": video_info.get(\"data-video-hmac\")})\n+ data=json.dumps(self.device_info),\n+ headers={\"hmac\": video_info.get(\"data-video-hmac\")})\n data = self.session.http.json(res, schema=self._video_info_schema)\n \n log.debug(\"Video ID info response: {0}\".format(data))\n@@ -78,6 +71,31 @@\n for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt=name_fmt).items():\n yield s\n \n+ def _get_rtmp_streams(self, video_info_url):\n+ log.debug(\"XML data path: {0}\".format(video_info_url))\n+ res = self.session.http.get(video_info_url)\n+ playlist = self.session.http.xml(res, ignore_ns=True)\n+ mediafiles = playlist.find(\".//Playlist/VideoEntries/Video/MediaFiles\")\n+ playpath = mediafiles.find(\"./MediaFile/URL\")\n+ return {\"live\": RTMPStream(self.session, {\"rtmp\": mediafiles.attrib.get(\"base\"),\n+ \"playpath\": playpath.text,\n+ \"live\": True,\n+ \"swfVfy\": self.swf_url\n+ })}\n+\n+ def _get_streams(self):\n+ \"\"\"\n+ Find all the streams for the ITV url\n+ :return: Mapping of quality to stream\n+ \"\"\"\n+ self.session.http.headers.update({\"User-Agent\": useragents.FIREFOX})\n+ stream = self._url_re.match(self.url).group(\"stream\")\n+ video_info = self.video_info()\n+ video_info_url = video_info.get(\"data-video-id\" if stream.lower() in (\"itv\", \"itv4\") else \"data-html5-playlist\")\n+ if video_info_url.endswith(\".xml\"):\n+ return self._get_rtmp_streams(video_info_url)\n+ else:\n+ return self._get_html5_streams(video_info_url)\n \n \n __plugin__ = ITVPlayer\n", "issue": "ITV Player unable to open URL\n- [x] This is a plugin issue and I have read the contribution guidelines.\r\n\r\n\r\n### Description\r\n\r\nITV Player/Hub for the main ITV channel do not open and give an error message.\r\n\r\n\r\n### Reproduction steps / Explicit stream URLs to test\r\n\r\nType \"streamlink itv.com/hub/itv\".\r\n\r\nOther ITV Hub URL's such as hub/itv2 or hub/citv continue to work.\r\n\r\n### Log output\r\n\r\n[cli][info] Found matching plugin itvplayer for URL itv.com/hub/itv\r\nerror: Unable to open URL: http://cdn.dar.itv.com/private/aHR0cDovL2NzbS1lLnBsYXkuZGFyLml0di5jb20vY3NtL2V4dGxpdmUvMjQ3MTYyMTUubTN1OD95by5hYz10cnVlJnlvLmFkPWZhbHNlJnBtPWZyZWUmcmFuZG9tPTU2OTk0ODU2ODc0ODA1NzE0MjAmYXJlYT1pdHZwbGF5ZXIuc2ltdWxjYXN0JnNvdXJjZT1zaW11bGNhc3QmaGRldmlkPXgmc2l6ZT1wYXR0ZXJuJnN1cGVydGFnPWRmbHQsc2ltJnVzPWFubyZzaXRlPWl0diZvc3Zlcj0xMCZjaGFuYnJhbmQ9aXR2MSZvcz13aW5kb3dzJmhtb2Q9NjYmcHY9YnJvd3Nlci40LjEmcGxheWVyPWh0bWw1LmRlc2t0b3AmZG09bmxpJmhtYW49Y2hyb21lJnRkdXI9MjE2MDAmdHBhcnRzPTEmYXJwPXgmZGVmYXVsdD1EZWZhdWx0UGF0dGVybiZwbGlzdD1zaW1hZHJlcGxhY2VtZW50JnBsZmNpZD1zaW1hZHJlcGxhY2VtZW50/endpoint?hdnea=st=1563543051~exp=1563564651~acl=/*~data=nohubplus~hmac=0ebaf6d4f22f101378a6c7ed3903c1999f6b28efd29ee05763e5ec7bf0d27896&yo.channel=24716215 (404 Client Error: for url: http://csm-e.play.dar.itv.com/csm/extlive/24716215.m3u8?yo.ac=true&yo.ad=false&pm=free&random=5699485687480571420&area=itvplayer.simulcast&source=simulcast&hdevid=x&size=pattern&supertag=dflt,sim&us=ano&site=itv&osver=10&chanbrand=itv1&os=windows&hmod=66&pv=browser.4.1&player=html5.desktop&dm=nli&hman=chrome&tdur=21600&tparts=1&arp=x&default=DefaultPattern&plist=simadreplacement&plfcid=simadreplacement)\r\n\r\n\r\n### Additional comments, screenshots, etc.\r\n\r\nI have noticed the site plays a ITV Hub promo when starting a livestream on the website itself before loading that stream. This does not occur for the other channels.\n", "before_files": [{"content": "import json\nimport logging\nimport re\n\nfrom streamlink.compat import urljoin\nfrom streamlink.plugin import Plugin\nfrom streamlink.plugin.api import useragents, validate\nfrom streamlink.plugin.api.utils import itertags\nfrom streamlink.stream import HLSStream\n\nlog = logging.getLogger(__name__)\n\n\nclass ITVPlayer(Plugin):\n _url_re = re.compile(r\"https?://(?:www.)?itv.com/hub/(?P<stream>.+)\")\n _video_info_schema = validate.Schema({\n \"StatusCode\": 200,\n \"AdditionalInfo\": {\n \"Message\": validate.any(None, validate.text)\n },\n \"Playlist\": {\n \"VideoType\": validate.text,\n \"Video\": {\n \"Subtitles\": validate.any(None, [{\n \"Href\": validate.url(),\n }]),\n \"Base\": validate.url(),\n \"MediaFiles\": [\n {\"Href\": validate.text,\n \"KeyServiceUrl\": validate.any(None, validate.url())}\n ]\n }\n }\n })\n\n @classmethod\n def can_handle_url(cls, url):\n match = cls._url_re.match(url)\n return match is not None\n\n @property\n def device_info(self):\n return {\"user\": {},\n \"device\": {\"manufacturer\": \"Chrome\", \"model\": \"66\",\n \"os\": {\"name\": \"Windows\", \"version\": \"10\", \"type\": \"desktop\"}},\n \"client\": {\"version\": \"4.1\", \"id\": \"browser\"},\n \"variantAvailability\": {\"featureset\": {\"min\": [\"hls\", \"aes\"],\n \"max\": [\"hls\", \"aes\"]},\n \"platformTag\": \"dotcom\"}}\n\n def video_info(self):\n page = self.session.http.get(self.url)\n for div in itertags(page.text, 'div'):\n if div.attributes.get(\"id\") == \"video\":\n return div.attributes\n\n def _get_streams(self):\n \"\"\"\n Find all the streams for the ITV url\n :return: Mapping of quality to stream\n \"\"\"\n self.session.http.headers.update({\"User-Agent\": useragents.FIREFOX})\n video_info = self.video_info()\n video_info_url = video_info.get(\"data-html5-playlist\") or video_info.get(\"data-video-id\")\n\n res = self.session.http.post(video_info_url,\n data=json.dumps(self.device_info),\n headers={\"hmac\": video_info.get(\"data-video-hmac\")})\n data = self.session.http.json(res, schema=self._video_info_schema)\n\n log.debug(\"Video ID info response: {0}\".format(data))\n\n stype = data['Playlist']['VideoType']\n\n for media in data['Playlist']['Video']['MediaFiles']:\n url = urljoin(data['Playlist']['Video']['Base'], media['Href'])\n name_fmt = \"{pixels}_{bitrate}\" if stype == \"CATCHUP\" else None\n for s in HLSStream.parse_variant_playlist(self.session, url, name_fmt=name_fmt).items():\n yield s\n\n\n\n__plugin__ = ITVPlayer\n", "path": "src/streamlink/plugins/itvplayer.py"}]} | 2,260 | 861 |
gh_patches_debug_34783 | rasdani/github-patches | git_diff | netbox-community__netbox-2957 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
API Docs - Typo error on /virtualisation/interfaces/
<!--
Before opening a new issue, please search through the existing issues to
see if your topic has already been addressed. Note that you may need to
remove the "is:open" filter from the search bar to include closed issues.
Check the appropriate type for your issue below by placing an x between the
brackets. For assistance with installation issues, or for any other issues
other than those listed below, please raise your topic for discussion on
our mailing list:
https://groups.google.com/forum/#!forum/netbox-discuss
Please note that issues which do not fall under any of the below categories
will be closed. Due to an excessive backlog of feature requests, we are
not currently accepting any proposals which extend NetBox's feature scope.
Do not prepend any sort of tag to your issue's title. An administrator will
review your issue and assign labels as appropriate.
--->
### Issue type
[ ] Feature request <!-- An enhancement of existing functionality -->
[ ] Bug report <!-- Unexpected or erroneous behavior -->
[X] Documentation <!-- A modification to the documentation -->
<!--
Please describe the environment in which you are running NetBox. (Be sure
to verify that you are running the latest stable release of NetBox before
submitting a bug report.) If you are submitting a bug report and have made
any changes to the code base, please first validate that your bug can be
recreated while running an official release.
-->
### Environment
* Python version: 3.6.5
* NetBox version: 2.3.3
<!--
BUG REPORTS must include:
* A list of the steps needed for someone else to reproduce the bug
* A description of the expected and observed behavior
* Any relevant error messages (screenshots may also help)
FEATURE REQUESTS must include:
* A detailed description of the proposed functionality
* A use case for the new feature
* A rough description of any necessary changes to the database schema
* Any relevant third-party libraries which would be needed
-->
### Description
Hello,
On the API documentation it seems there is a little error.
On `/virtualisation/interfaces/` endpoint, method `POST`, the field `device` is marked as mandatory, but the API only respond to the field `virtual_machine`.
The error message returned by the API is also misleading :
```
{
"non_field_errors": [
"An interface must belong to either a device or a virtual machine."
]
}
```
It seems the error is also present on `/virtualisation/interfaces/{id}` methods `PUT` and `PATCH`.
</issue>
<code>
[start of netbox/utilities/custom_inspectors.py]
1 from drf_yasg import openapi
2 from drf_yasg.inspectors import FieldInspector, NotHandled, PaginatorInspector, FilterInspector, SwaggerAutoSchema
3 from rest_framework.fields import ChoiceField
4 from rest_framework.relations import ManyRelatedField
5 from taggit_serializer.serializers import TagListSerializerField
6
7 from extras.api.customfields import CustomFieldsSerializer
8 from utilities.api import ChoiceField, SerializedPKRelatedField, WritableNestedSerializer
9
10
11 class NetBoxSwaggerAutoSchema(SwaggerAutoSchema):
12 def get_request_serializer(self):
13 serializer = super().get_request_serializer()
14
15 if serializer is not None and self.method in self.implicit_body_methods:
16 properties = {}
17 for child_name, child in serializer.fields.items():
18 if isinstance(child, (ChoiceField, WritableNestedSerializer)):
19 properties[child_name] = None
20 elif isinstance(child, ManyRelatedField) and isinstance(child.child_relation, SerializedPKRelatedField):
21 properties[child_name] = None
22
23 if properties:
24 writable_class = type('Writable' + type(serializer).__name__, (type(serializer),), properties)
25 serializer = writable_class()
26
27 return serializer
28
29
30 class SerializedPKRelatedFieldInspector(FieldInspector):
31 def field_to_swagger_object(self, field, swagger_object_type, use_references, **kwargs):
32 SwaggerType, ChildSwaggerType = self._get_partial_types(field, swagger_object_type, use_references, **kwargs)
33 if isinstance(field, SerializedPKRelatedField):
34 return self.probe_field_inspectors(field.serializer(), ChildSwaggerType, use_references)
35
36 return NotHandled
37
38
39 class TagListFieldInspector(FieldInspector):
40 def field_to_swagger_object(self, field, swagger_object_type, use_references, **kwargs):
41 SwaggerType, ChildSwaggerType = self._get_partial_types(field, swagger_object_type, use_references, **kwargs)
42 if isinstance(field, TagListSerializerField):
43 child_schema = self.probe_field_inspectors(field.child, ChildSwaggerType, use_references)
44 return SwaggerType(
45 type=openapi.TYPE_ARRAY,
46 items=child_schema,
47 )
48
49 return NotHandled
50
51
52 class CustomChoiceFieldInspector(FieldInspector):
53 def field_to_swagger_object(self, field, swagger_object_type, use_references, **kwargs):
54 # this returns a callable which extracts title, description and other stuff
55 # https://drf-yasg.readthedocs.io/en/stable/_modules/drf_yasg/inspectors/base.html#FieldInspector._get_partial_types
56 SwaggerType, _ = self._get_partial_types(field, swagger_object_type, use_references, **kwargs)
57
58 if isinstance(field, ChoiceField):
59 value_schema = openapi.Schema(type=openapi.TYPE_INTEGER)
60
61 choices = list(field._choices.keys())
62 if set([None] + choices) == {None, True, False}:
63 # DeviceType.subdevice_role, Device.face and InterfaceConnection.connection_status all need to be
64 # differentiated since they each have subtly different values in their choice keys.
65 # - subdevice_role and connection_status are booleans, although subdevice_role includes None
66 # - face is an integer set {0, 1} which is easily confused with {False, True}
67 schema_type = openapi.TYPE_INTEGER
68 if all(type(x) == bool for x in [c for c in choices if c is not None]):
69 schema_type = openapi.TYPE_BOOLEAN
70 value_schema = openapi.Schema(type=schema_type)
71 value_schema['x-nullable'] = True
72
73 schema = SwaggerType(type=openapi.TYPE_OBJECT, required=["label", "value"], properties={
74 "label": openapi.Schema(type=openapi.TYPE_STRING),
75 "value": value_schema
76 })
77
78 return schema
79
80 elif isinstance(field, CustomFieldsSerializer):
81 schema = SwaggerType(type=openapi.TYPE_OBJECT)
82 return schema
83
84 return NotHandled
85
86
87 class NullableBooleanFieldInspector(FieldInspector):
88 def process_result(self, result, method_name, obj, **kwargs):
89
90 if isinstance(result, openapi.Schema) and isinstance(obj, ChoiceField) and result.type == 'boolean':
91 keys = obj.choices.keys()
92 if set(keys) == {None, True, False}:
93 result['x-nullable'] = True
94 result.type = 'boolean'
95
96 return result
97
98
99 class IdInFilterInspector(FilterInspector):
100 def process_result(self, result, method_name, obj, **kwargs):
101 if isinstance(result, list):
102 params = [p for p in result if isinstance(p, openapi.Parameter) and p.name == 'id__in']
103 for p in params:
104 p.type = 'string'
105
106 return result
107
108
109 class NullablePaginatorInspector(PaginatorInspector):
110 def process_result(self, result, method_name, obj, **kwargs):
111 if method_name == 'get_paginated_response' and isinstance(result, openapi.Schema):
112 next = result.properties['next']
113 if isinstance(next, openapi.Schema):
114 next['x-nullable'] = True
115 previous = result.properties['previous']
116 if isinstance(previous, openapi.Schema):
117 previous['x-nullable'] = True
118
119 return result
120
[end of netbox/utilities/custom_inspectors.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/netbox/utilities/custom_inspectors.py b/netbox/utilities/custom_inspectors.py
--- a/netbox/utilities/custom_inspectors.py
+++ b/netbox/utilities/custom_inspectors.py
@@ -1,14 +1,24 @@
from drf_yasg import openapi
from drf_yasg.inspectors import FieldInspector, NotHandled, PaginatorInspector, FilterInspector, SwaggerAutoSchema
+from drf_yasg.utils import get_serializer_ref_name
from rest_framework.fields import ChoiceField
from rest_framework.relations import ManyRelatedField
from taggit_serializer.serializers import TagListSerializerField
+from dcim.api.serializers import InterfaceSerializer as DCIMInterfaceSerializer
+from virtualization.api.serializers import InterfaceSerializer as VirtualMachineInterfaceSerializer
from extras.api.customfields import CustomFieldsSerializer
from utilities.api import ChoiceField, SerializedPKRelatedField, WritableNestedSerializer
+# this might be ugly, but it limits drf_yasg-specific code to this file
+DCIMInterfaceSerializer.Meta.ref_name = 'DCIMInterface'
+VirtualMachineInterfaceSerializer.Meta.ref_name = 'VirtualMachineInterface'
+
+
class NetBoxSwaggerAutoSchema(SwaggerAutoSchema):
+ writable_serializers = {}
+
def get_request_serializer(self):
serializer = super().get_request_serializer()
@@ -21,7 +31,17 @@
properties[child_name] = None
if properties:
- writable_class = type('Writable' + type(serializer).__name__, (type(serializer),), properties)
+ if type(serializer) not in self.writable_serializers:
+ writable_name = 'Writable' + type(serializer).__name__
+ meta_class = getattr(type(serializer), 'Meta', None)
+ if meta_class:
+ ref_name = 'Writable' + get_serializer_ref_name(serializer)
+ writable_meta = type('Meta', (meta_class,), {'ref_name': ref_name})
+ properties['Meta'] = writable_meta
+
+ self.writable_serializers[type(serializer)] = type(writable_name, (type(serializer),), properties)
+
+ writable_class = self.writable_serializers[type(serializer)]
serializer = writable_class()
return serializer
| {"golden_diff": "diff --git a/netbox/utilities/custom_inspectors.py b/netbox/utilities/custom_inspectors.py\n--- a/netbox/utilities/custom_inspectors.py\n+++ b/netbox/utilities/custom_inspectors.py\n@@ -1,14 +1,24 @@\n from drf_yasg import openapi\n from drf_yasg.inspectors import FieldInspector, NotHandled, PaginatorInspector, FilterInspector, SwaggerAutoSchema\n+from drf_yasg.utils import get_serializer_ref_name\n from rest_framework.fields import ChoiceField\n from rest_framework.relations import ManyRelatedField\n from taggit_serializer.serializers import TagListSerializerField\n \n+from dcim.api.serializers import InterfaceSerializer as DCIMInterfaceSerializer\n+from virtualization.api.serializers import InterfaceSerializer as VirtualMachineInterfaceSerializer\n from extras.api.customfields import CustomFieldsSerializer\n from utilities.api import ChoiceField, SerializedPKRelatedField, WritableNestedSerializer\n \n \n+# this might be ugly, but it limits drf_yasg-specific code to this file\n+DCIMInterfaceSerializer.Meta.ref_name = 'DCIMInterface'\n+VirtualMachineInterfaceSerializer.Meta.ref_name = 'VirtualMachineInterface'\n+\n+\n class NetBoxSwaggerAutoSchema(SwaggerAutoSchema):\n+ writable_serializers = {}\n+\n def get_request_serializer(self):\n serializer = super().get_request_serializer()\n \n@@ -21,7 +31,17 @@\n properties[child_name] = None\n \n if properties:\n- writable_class = type('Writable' + type(serializer).__name__, (type(serializer),), properties)\n+ if type(serializer) not in self.writable_serializers:\n+ writable_name = 'Writable' + type(serializer).__name__\n+ meta_class = getattr(type(serializer), 'Meta', None)\n+ if meta_class:\n+ ref_name = 'Writable' + get_serializer_ref_name(serializer)\n+ writable_meta = type('Meta', (meta_class,), {'ref_name': ref_name})\n+ properties['Meta'] = writable_meta\n+\n+ self.writable_serializers[type(serializer)] = type(writable_name, (type(serializer),), properties)\n+\n+ writable_class = self.writable_serializers[type(serializer)]\n serializer = writable_class()\n \n return serializer\n", "issue": "API Docs - Typo error on /virtualisation/interfaces/\n<!--\r\n Before opening a new issue, please search through the existing issues to\r\n see if your topic has already been addressed. Note that you may need to\r\n remove the \"is:open\" filter from the search bar to include closed issues.\r\n\r\n Check the appropriate type for your issue below by placing an x between the\r\n brackets. For assistance with installation issues, or for any other issues\r\n other than those listed below, please raise your topic for discussion on\r\n our mailing list:\r\n\r\n https://groups.google.com/forum/#!forum/netbox-discuss\r\n\r\n Please note that issues which do not fall under any of the below categories\r\n will be closed. Due to an excessive backlog of feature requests, we are\r\n not currently accepting any proposals which extend NetBox's feature scope.\r\n\r\n Do not prepend any sort of tag to your issue's title. An administrator will\r\n review your issue and assign labels as appropriate.\r\n--->\r\n### Issue type\r\n[ ] Feature request <!-- An enhancement of existing functionality -->\r\n[ ] Bug report <!-- Unexpected or erroneous behavior -->\r\n[X] Documentation <!-- A modification to the documentation -->\r\n\r\n<!--\r\n Please describe the environment in which you are running NetBox. (Be sure\r\n to verify that you are running the latest stable release of NetBox before\r\n submitting a bug report.) If you are submitting a bug report and have made\r\n any changes to the code base, please first validate that your bug can be\r\n recreated while running an official release.\r\n-->\r\n### Environment\r\n* Python version: 3.6.5\r\n* NetBox version: 2.3.3\r\n\r\n<!--\r\n BUG REPORTS must include:\r\n * A list of the steps needed for someone else to reproduce the bug\r\n * A description of the expected and observed behavior\r\n * Any relevant error messages (screenshots may also help)\r\n\r\n FEATURE REQUESTS must include:\r\n * A detailed description of the proposed functionality\r\n * A use case for the new feature\r\n * A rough description of any necessary changes to the database schema\r\n * Any relevant third-party libraries which would be needed\r\n-->\r\n### Description\r\nHello,\r\n\r\nOn the API documentation it seems there is a little error.\r\n\r\nOn `/virtualisation/interfaces/` endpoint, method `POST`, the field `device` is marked as mandatory, but the API only respond to the field `virtual_machine`.\r\n\r\nThe error message returned by the API is also misleading :\r\n```\r\n{\r\n \"non_field_errors\": [\r\n \"An interface must belong to either a device or a virtual machine.\"\r\n ]\r\n}\r\n```\r\n\r\nIt seems the error is also present on `/virtualisation/interfaces/{id}` methods `PUT` and `PATCH`.\n", "before_files": [{"content": "from drf_yasg import openapi\nfrom drf_yasg.inspectors import FieldInspector, NotHandled, PaginatorInspector, FilterInspector, SwaggerAutoSchema\nfrom rest_framework.fields import ChoiceField\nfrom rest_framework.relations import ManyRelatedField\nfrom taggit_serializer.serializers import TagListSerializerField\n\nfrom extras.api.customfields import CustomFieldsSerializer\nfrom utilities.api import ChoiceField, SerializedPKRelatedField, WritableNestedSerializer\n\n\nclass NetBoxSwaggerAutoSchema(SwaggerAutoSchema):\n def get_request_serializer(self):\n serializer = super().get_request_serializer()\n\n if serializer is not None and self.method in self.implicit_body_methods:\n properties = {}\n for child_name, child in serializer.fields.items():\n if isinstance(child, (ChoiceField, WritableNestedSerializer)):\n properties[child_name] = None\n elif isinstance(child, ManyRelatedField) and isinstance(child.child_relation, SerializedPKRelatedField):\n properties[child_name] = None\n\n if properties:\n writable_class = type('Writable' + type(serializer).__name__, (type(serializer),), properties)\n serializer = writable_class()\n\n return serializer\n\n\nclass SerializedPKRelatedFieldInspector(FieldInspector):\n def field_to_swagger_object(self, field, swagger_object_type, use_references, **kwargs):\n SwaggerType, ChildSwaggerType = self._get_partial_types(field, swagger_object_type, use_references, **kwargs)\n if isinstance(field, SerializedPKRelatedField):\n return self.probe_field_inspectors(field.serializer(), ChildSwaggerType, use_references)\n\n return NotHandled\n\n\nclass TagListFieldInspector(FieldInspector):\n def field_to_swagger_object(self, field, swagger_object_type, use_references, **kwargs):\n SwaggerType, ChildSwaggerType = self._get_partial_types(field, swagger_object_type, use_references, **kwargs)\n if isinstance(field, TagListSerializerField):\n child_schema = self.probe_field_inspectors(field.child, ChildSwaggerType, use_references)\n return SwaggerType(\n type=openapi.TYPE_ARRAY,\n items=child_schema,\n )\n\n return NotHandled\n\n\nclass CustomChoiceFieldInspector(FieldInspector):\n def field_to_swagger_object(self, field, swagger_object_type, use_references, **kwargs):\n # this returns a callable which extracts title, description and other stuff\n # https://drf-yasg.readthedocs.io/en/stable/_modules/drf_yasg/inspectors/base.html#FieldInspector._get_partial_types\n SwaggerType, _ = self._get_partial_types(field, swagger_object_type, use_references, **kwargs)\n\n if isinstance(field, ChoiceField):\n value_schema = openapi.Schema(type=openapi.TYPE_INTEGER)\n\n choices = list(field._choices.keys())\n if set([None] + choices) == {None, True, False}:\n # DeviceType.subdevice_role, Device.face and InterfaceConnection.connection_status all need to be\n # differentiated since they each have subtly different values in their choice keys.\n # - subdevice_role and connection_status are booleans, although subdevice_role includes None\n # - face is an integer set {0, 1} which is easily confused with {False, True}\n schema_type = openapi.TYPE_INTEGER\n if all(type(x) == bool for x in [c for c in choices if c is not None]):\n schema_type = openapi.TYPE_BOOLEAN\n value_schema = openapi.Schema(type=schema_type)\n value_schema['x-nullable'] = True\n\n schema = SwaggerType(type=openapi.TYPE_OBJECT, required=[\"label\", \"value\"], properties={\n \"label\": openapi.Schema(type=openapi.TYPE_STRING),\n \"value\": value_schema\n })\n\n return schema\n\n elif isinstance(field, CustomFieldsSerializer):\n schema = SwaggerType(type=openapi.TYPE_OBJECT)\n return schema\n\n return NotHandled\n\n\nclass NullableBooleanFieldInspector(FieldInspector):\n def process_result(self, result, method_name, obj, **kwargs):\n\n if isinstance(result, openapi.Schema) and isinstance(obj, ChoiceField) and result.type == 'boolean':\n keys = obj.choices.keys()\n if set(keys) == {None, True, False}:\n result['x-nullable'] = True\n result.type = 'boolean'\n\n return result\n\n\nclass IdInFilterInspector(FilterInspector):\n def process_result(self, result, method_name, obj, **kwargs):\n if isinstance(result, list):\n params = [p for p in result if isinstance(p, openapi.Parameter) and p.name == 'id__in']\n for p in params:\n p.type = 'string'\n\n return result\n\n\nclass NullablePaginatorInspector(PaginatorInspector):\n def process_result(self, result, method_name, obj, **kwargs):\n if method_name == 'get_paginated_response' and isinstance(result, openapi.Schema):\n next = result.properties['next']\n if isinstance(next, openapi.Schema):\n next['x-nullable'] = True\n previous = result.properties['previous']\n if isinstance(previous, openapi.Schema):\n previous['x-nullable'] = True\n\n return result\n", "path": "netbox/utilities/custom_inspectors.py"}]} | 2,462 | 475 |
gh_patches_debug_11333 | rasdani/github-patches | git_diff | searx__searx-3479 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Using single apostrophe causes a "search error"
<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF SEARX -->
**Version of Searx, commit number if you are using on master branch and stipulate if you forked Searx**
1.1.0-27-bf0a583f
<!-- If you are running on master branch using git execute this command
in order to fetch the latest commit ID:
```
git log -1
```
If you are using searx-docker then look at the bottom of the Searx page
and check for the version after "Powered by searx"
Please also stipulate if you are using a forked version of Searx and
include a link to the fork source code.
-->
**How did you install Searx?**
searx-docker. Specifically using docker-compose and running ``docker-compose up``.
Compose file:
```yaml
version: '3.3'
services:
searx:
volumes:
- /mnt/storage1/configs/searx:/etc/searx
ports:
- 9999:8080
environment:
- BASE_URL=http://localhost:9999/
image: searx/searx
restart: unless-stopped
```
<!-- Did you install Searx using the official wiki or using searx-docker
or manually by executing the searx/webapp.py file? -->
**What happened?**
Using a single apostrophe in a word (for example, "What's") causes a search error
<!-- A clear and concise description of what the bug is. -->
**How To Reproduce**
Use any single apostrophe in a search query
<!-- How can we reproduce this issue? (as minimally and as precisely as possible) -->
**Expected behavior**
Should return results, instead of an error.
<!-- A clear and concise description of what you expected to happen. -->
**Screenshots & Logs**
https://imgur.com/a/E2b4mNX
https://paste.ee/p/e5gS8
**Additional context**
It's also somewhat infuriating that it also returns from a result page to the home page when this happens, meaning I have to retype my query (though that's another issue entirely).
</issue>
<code>
[start of searx/plugins/search_operators.py]
1 import shlex
2 import string
3
4 from flask_babel import gettext
5
6 name = gettext("Search operators")
7 description = gettext("""Filter results using hyphen, site: and -site:.
8 Please note that you might get less results with the additional filtering.""")
9 default_on = False
10
11
12 def on_result(request, search, result):
13 q = search.search_query.query
14 qs = shlex.split(q)
15 spitems = [x.lower() for x in qs if ' ' in x]
16 mitems = [x.lower() for x in qs if x.startswith('-')]
17 siteitems = [x.lower() for x in qs if x.startswith('site:')]
18 msiteitems = [x.lower() for x in qs if x.startswith('-site:')]
19 url, title, content = (
20 result["url"].lower(),
21 result["title"].lower(),
22 (result.get("content").lower() if result.get("content") else '')
23 )
24 if all((x not in title or x not in content) for x in spitems):
25 return False
26 if all((x in title or x in content) for x in mitems):
27 return False
28 if all(x not in url for x in siteitems):
29 return False
30 if all(x in url for x in msiteitems):
31 return False
32 return True
33
[end of searx/plugins/search_operators.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/searx/plugins/search_operators.py b/searx/plugins/search_operators.py
--- a/searx/plugins/search_operators.py
+++ b/searx/plugins/search_operators.py
@@ -11,7 +11,11 @@
def on_result(request, search, result):
q = search.search_query.query
- qs = shlex.split(q)
+ # WARN: shlex.quote is designed only for Unix shells and may be vulnerable
+ # to command injection on non-POSIX compliant shells (Windows)
+ # https://docs.python.org/3/library/shlex.html#shlex.quote
+ squote = shlex.quote(q)
+ qs = shlex.split(squote)
spitems = [x.lower() for x in qs if ' ' in x]
mitems = [x.lower() for x in qs if x.startswith('-')]
siteitems = [x.lower() for x in qs if x.startswith('site:')]
| {"golden_diff": "diff --git a/searx/plugins/search_operators.py b/searx/plugins/search_operators.py\n--- a/searx/plugins/search_operators.py\n+++ b/searx/plugins/search_operators.py\n@@ -11,7 +11,11 @@\n \n def on_result(request, search, result):\n q = search.search_query.query\n- qs = shlex.split(q)\n+ # WARN: shlex.quote is designed only for Unix shells and may be vulnerable\n+ # to command injection on non-POSIX compliant shells (Windows)\n+ # https://docs.python.org/3/library/shlex.html#shlex.quote\n+ squote = shlex.quote(q)\n+ qs = shlex.split(squote)\n spitems = [x.lower() for x in qs if ' ' in x]\n mitems = [x.lower() for x in qs if x.startswith('-')]\n siteitems = [x.lower() for x in qs if x.startswith('site:')]\n", "issue": "Using single apostrophe causes a \"search error\"\n<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF SEARX -->\r\n\r\n**Version of Searx, commit number if you are using on master branch and stipulate if you forked Searx**\r\n1.1.0-27-bf0a583f\r\n<!-- If you are running on master branch using git execute this command\r\nin order to fetch the latest commit ID:\r\n```\r\ngit log -1\r\n``` \r\nIf you are using searx-docker then look at the bottom of the Searx page\r\nand check for the version after \"Powered by searx\"\r\n\r\nPlease also stipulate if you are using a forked version of Searx and\r\ninclude a link to the fork source code.\r\n-->\r\n**How did you install Searx?**\r\nsearx-docker. Specifically using docker-compose and running ``docker-compose up``.\r\nCompose file:\r\n```yaml\r\nversion: '3.3'\r\nservices:\r\n searx:\r\n volumes:\r\n - /mnt/storage1/configs/searx:/etc/searx\r\n ports:\r\n - 9999:8080\r\n environment:\r\n - BASE_URL=http://localhost:9999/\r\n image: searx/searx\r\n restart: unless-stopped\r\n```\r\n<!-- Did you install Searx using the official wiki or using searx-docker\r\nor manually by executing the searx/webapp.py file? -->\r\n**What happened?**\r\nUsing a single apostrophe in a word (for example, \"What's\") causes a search error\r\n<!-- A clear and concise description of what the bug is. -->\r\n**How To Reproduce**\r\nUse any single apostrophe in a search query\r\n<!-- How can we reproduce this issue? (as minimally and as precisely as possible) -->\r\n**Expected behavior**\r\nShould return results, instead of an error.\r\n<!-- A clear and concise description of what you expected to happen. -->\r\n**Screenshots & Logs**\r\nhttps://imgur.com/a/E2b4mNX\r\nhttps://paste.ee/p/e5gS8\r\n\r\n**Additional context**\r\nIt's also somewhat infuriating that it also returns from a result page to the home page when this happens, meaning I have to retype my query (though that's another issue entirely).\n", "before_files": [{"content": "import shlex\nimport string\n\nfrom flask_babel import gettext\n\nname = gettext(\"Search operators\")\ndescription = gettext(\"\"\"Filter results using hyphen, site: and -site:.\nPlease note that you might get less results with the additional filtering.\"\"\")\ndefault_on = False\n\n\ndef on_result(request, search, result):\n q = search.search_query.query\n qs = shlex.split(q)\n spitems = [x.lower() for x in qs if ' ' in x]\n mitems = [x.lower() for x in qs if x.startswith('-')]\n siteitems = [x.lower() for x in qs if x.startswith('site:')]\n msiteitems = [x.lower() for x in qs if x.startswith('-site:')]\n url, title, content = (\n result[\"url\"].lower(),\n result[\"title\"].lower(),\n (result.get(\"content\").lower() if result.get(\"content\") else '')\n )\n if all((x not in title or x not in content) for x in spitems):\n return False\n if all((x in title or x in content) for x in mitems):\n return False\n if all(x not in url for x in siteitems):\n return False\n if all(x in url for x in msiteitems):\n return False\n return True\n", "path": "searx/plugins/search_operators.py"}]} | 1,379 | 212 |
gh_patches_debug_38414 | rasdani/github-patches | git_diff | pymeasure__pymeasure-284 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Rename pyvisa imports
In the upcoming pyvisa version, using `import visa` is being [deprecated](https://github.com/pyvisa/pyvisa/pull/448) to avoid a name clash with another package. We need to fix that in our codebase, too. This could also be behind the version comparison issues like #130
</issue>
<code>
[start of pymeasure/adapters/visa.py]
1 #
2 # This file is part of the PyMeasure package.
3 #
4 # Copyright (c) 2013-2020 PyMeasure Developers
5 #
6 # Permission is hereby granted, free of charge, to any person obtaining a copy
7 # of this software and associated documentation files (the "Software"), to deal
8 # in the Software without restriction, including without limitation the rights
9 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 # copies of the Software, and to permit persons to whom the Software is
11 # furnished to do so, subject to the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be included in
14 # all copies or substantial portions of the Software.
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 # THE SOFTWARE.
23 #
24
25 import logging
26
27 import copy
28 import visa
29 import numpy as np
30 from pkg_resources import parse_version
31
32 from .adapter import Adapter
33
34 log = logging.getLogger(__name__)
35 log.addHandler(logging.NullHandler())
36
37
38 # noinspection PyPep8Naming,PyUnresolvedReferences
39 class VISAAdapter(Adapter):
40 """ Adapter class for the VISA library using PyVISA to communicate
41 with instruments.
42
43 :param resource: VISA resource name that identifies the address
44 :param visa_library: VisaLibrary Instance, path of the VISA library or VisaLibrary spec string (@py or @ni).
45 if not given, the default for the platform will be used.
46 :param kwargs: Any valid key-word arguments for constructing a PyVISA instrument
47 """
48
49 def __init__(self, resourceName, visa_library='', **kwargs):
50 if not VISAAdapter.has_supported_version():
51 raise NotImplementedError("Please upgrade PyVISA to version 1.8 or later.")
52
53 if isinstance(resourceName, int):
54 resourceName = "GPIB0::%d::INSTR" % resourceName
55 super(VISAAdapter, self).__init__()
56 self.resource_name = resourceName
57 self.manager = visa.ResourceManager(visa_library)
58 safeKeywords = ['resource_name', 'timeout',
59 'chunk_size', 'lock', 'query_delay', 'send_end',
60 'values_format', 'read_termination', 'write_termination']
61 kwargsCopy = copy.deepcopy(kwargs)
62 for key in kwargsCopy:
63 if key not in safeKeywords:
64 kwargs.pop(key)
65 self.connection = self.manager.get_instrument(
66 resourceName,
67 **kwargs
68 )
69
70 @staticmethod
71 def has_supported_version():
72 """ Returns True if the PyVISA version is greater than 1.8 """
73 if hasattr(visa, '__version__'):
74 return parse_version(visa.__version__) >= parse_version('1.8')
75 else:
76 return False
77
78 def __repr__(self):
79 return "<VISAAdapter(resource='%s')>" % self.connection.resourceName
80
81 def write(self, command):
82 """ Writes a command to the instrument
83
84 :param command: SCPI command string to be sent to the instrument
85 """
86 self.connection.write(command)
87
88 def read(self):
89 """ Reads until the buffer is empty and returns the resulting
90 ASCII response
91
92 :returns: String ASCII response of the instrument.
93 """
94 return self.connection.read()
95
96 def read_bytes(self, size):
97 """ Reads specified number of bytes from the buffer and returns
98 the resulting ASCII response
99
100 :param size: Number of bytes to read from the buffer
101 :returns: String ASCII response of the instrument.
102 """
103 return self.connection.read_bytes(size)
104
105 def ask(self, command):
106 """ Writes the command to the instrument and returns the resulting
107 ASCII response
108
109 :param command: SCPI command string to be sent to the instrument
110 :returns: String ASCII response of the instrument
111 """
112 return self.connection.query(command)
113
114 def ask_values(self, command):
115 """ Writes a command to the instrument and returns a list of formatted
116 values from the result. The format of the return is configurated by
117 self.config().
118
119 :param command: SCPI command to be sent to the instrument
120 :returns: Formatted response of the instrument.
121 """
122 return self.connection.query_values(command)
123
124 def binary_values(self, command, header_bytes=0, dtype=np.float32):
125 """ Returns a numpy array from a query for binary data
126
127 :param command: SCPI command to be sent to the instrument
128 :param header_bytes: Integer number of bytes to ignore in header
129 :param dtype: The NumPy data type to format the values with
130 :returns: NumPy array of values
131 """
132 self.connection.write(command)
133 binary = self.connection.read_raw()
134 header, data = binary[:header_bytes], binary[header_bytes:]
135 return np.fromstring(data, dtype=dtype)
136
137 def config(self, is_binary=False, datatype='str',
138 container=np.array, converter='s',
139 separator=',', is_big_endian=False):
140 """ Configurate the format of data transfer to and from the instrument.
141
142 :param is_binary: If True, data is in binary format, otherwise ASCII.
143 :param datatype: Data type.
144 :param container: Return format. Any callable/type that takes an iterable.
145 :param converter: String converter, used in dealing with ASCII data.
146 :param separator: Delimiter of a series of data in ASCII.
147 :param is_big_endian: Endianness.
148 """
149 self.connection.values_format.is_binary = is_binary
150 self.connection.values_format.datatype = datatype
151 self.connection.values_format.container = container
152 self.connection.values_format.converter = converter
153 self.connection.values_format.separator = separator
154 self.connection.values_format.is_big_endian = is_big_endian
155
156 def wait_for_srq(self, timeout=25, delay=0.1):
157 """ Blocks until a SRQ, and leaves the bit high
158
159 :param timeout: Timeout duration in seconds
160 :param delay: Time delay between checking SRQ in seconds
161 """
162 self.connection.wait_for_srq(timeout * 1000)
163
[end of pymeasure/adapters/visa.py]
[start of pymeasure/instruments/resources.py]
1 #
2 # This file is part of the PyMeasure package.
3 #
4 # Copyright (c) 2013-2020 PyMeasure Developers
5 #
6 # Permission is hereby granted, free of charge, to any person obtaining a copy
7 # of this software and associated documentation files (the "Software"), to deal
8 # in the Software without restriction, including without limitation the rights
9 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 # copies of the Software, and to permit persons to whom the Software is
11 # furnished to do so, subject to the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be included in
14 # all copies or substantial portions of the Software.
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
22 # THE SOFTWARE.
23 #
24
25 import visa
26
27
28 def list_resources():
29 """
30 Prints the available resources, and returns a list of VISA resource names
31
32 .. code-block:: python
33
34 resources = list_resources()
35 #prints (e.g.)
36 #0 : GPIB0::22::INSTR : Agilent Technologies,34410A,******
37 #1 : GPIB0::26::INSTR : Keithley Instruments Inc., Model 2612, *****
38 dmm = Agilent34410(resources[0])
39
40 """
41 rm = visa.ResourceManager()
42 instrs = rm.list_resources()
43 for n, instr in enumerate(instrs):
44 # trying to catch errors in comunication
45 try:
46 res = rm.open_resource(instr)
47 # try to avoid errors from *idn?
48 try:
49 # noinspection PyUnresolvedReferences
50 idn = res.ask('*idn?')[:-1]
51 except visa.Error:
52 idn = "Not known"
53 finally:
54 res.close()
55 print(n, ":", instr, ":", idn)
56 except visa.VisaIOError as e:
57 print(n, ":", instr, ":", "Visa IO Error: check connections")
58 print(e)
59 rm.close()
60 return instrs
61
[end of pymeasure/instruments/resources.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pymeasure/adapters/visa.py b/pymeasure/adapters/visa.py
--- a/pymeasure/adapters/visa.py
+++ b/pymeasure/adapters/visa.py
@@ -25,7 +25,7 @@
import logging
import copy
-import visa
+import pyvisa
import numpy as np
from pkg_resources import parse_version
@@ -54,7 +54,7 @@
resourceName = "GPIB0::%d::INSTR" % resourceName
super(VISAAdapter, self).__init__()
self.resource_name = resourceName
- self.manager = visa.ResourceManager(visa_library)
+ self.manager = pyvisa.ResourceManager(visa_library)
safeKeywords = ['resource_name', 'timeout',
'chunk_size', 'lock', 'query_delay', 'send_end',
'values_format', 'read_termination', 'write_termination']
@@ -70,8 +70,8 @@
@staticmethod
def has_supported_version():
""" Returns True if the PyVISA version is greater than 1.8 """
- if hasattr(visa, '__version__'):
- return parse_version(visa.__version__) >= parse_version('1.8')
+ if hasattr(pyvisa, '__version__'):
+ return parse_version(pyvisa.__version__) >= parse_version('1.8')
else:
return False
diff --git a/pymeasure/instruments/resources.py b/pymeasure/instruments/resources.py
--- a/pymeasure/instruments/resources.py
+++ b/pymeasure/instruments/resources.py
@@ -22,7 +22,7 @@
# THE SOFTWARE.
#
-import visa
+import pyvisa
def list_resources():
@@ -38,7 +38,7 @@
dmm = Agilent34410(resources[0])
"""
- rm = visa.ResourceManager()
+ rm = pyvisa.ResourceManager()
instrs = rm.list_resources()
for n, instr in enumerate(instrs):
# trying to catch errors in comunication
@@ -48,12 +48,12 @@
try:
# noinspection PyUnresolvedReferences
idn = res.ask('*idn?')[:-1]
- except visa.Error:
+ except pyvisa.Error:
idn = "Not known"
finally:
res.close()
print(n, ":", instr, ":", idn)
- except visa.VisaIOError as e:
+ except pyvisa.VisaIOError as e:
print(n, ":", instr, ":", "Visa IO Error: check connections")
print(e)
rm.close()
| {"golden_diff": "diff --git a/pymeasure/adapters/visa.py b/pymeasure/adapters/visa.py\n--- a/pymeasure/adapters/visa.py\n+++ b/pymeasure/adapters/visa.py\n@@ -25,7 +25,7 @@\n import logging\n \n import copy\n-import visa\n+import pyvisa\n import numpy as np\n from pkg_resources import parse_version\n \n@@ -54,7 +54,7 @@\n resourceName = \"GPIB0::%d::INSTR\" % resourceName\n super(VISAAdapter, self).__init__()\n self.resource_name = resourceName\n- self.manager = visa.ResourceManager(visa_library)\n+ self.manager = pyvisa.ResourceManager(visa_library)\n safeKeywords = ['resource_name', 'timeout',\n 'chunk_size', 'lock', 'query_delay', 'send_end',\n 'values_format', 'read_termination', 'write_termination']\n@@ -70,8 +70,8 @@\n @staticmethod\n def has_supported_version():\n \"\"\" Returns True if the PyVISA version is greater than 1.8 \"\"\"\n- if hasattr(visa, '__version__'):\n- return parse_version(visa.__version__) >= parse_version('1.8')\n+ if hasattr(pyvisa, '__version__'):\n+ return parse_version(pyvisa.__version__) >= parse_version('1.8')\n else:\n return False\n \ndiff --git a/pymeasure/instruments/resources.py b/pymeasure/instruments/resources.py\n--- a/pymeasure/instruments/resources.py\n+++ b/pymeasure/instruments/resources.py\n@@ -22,7 +22,7 @@\n # THE SOFTWARE.\n #\n \n-import visa\n+import pyvisa\n \n \n def list_resources():\n@@ -38,7 +38,7 @@\n dmm = Agilent34410(resources[0])\n \n \"\"\"\n- rm = visa.ResourceManager()\n+ rm = pyvisa.ResourceManager()\n instrs = rm.list_resources()\n for n, instr in enumerate(instrs):\n # trying to catch errors in comunication\n@@ -48,12 +48,12 @@\n try:\n # noinspection PyUnresolvedReferences\n idn = res.ask('*idn?')[:-1]\n- except visa.Error:\n+ except pyvisa.Error:\n idn = \"Not known\"\n finally:\n res.close()\n print(n, \":\", instr, \":\", idn)\n- except visa.VisaIOError as e:\n+ except pyvisa.VisaIOError as e:\n print(n, \":\", instr, \":\", \"Visa IO Error: check connections\")\n print(e)\n rm.close()\n", "issue": "Rename pyvisa imports\nIn the upcoming pyvisa version, using `import visa` is being [deprecated](https://github.com/pyvisa/pyvisa/pull/448) to avoid a name clash with another package. We need to fix that in our codebase, too. This could also be behind the version comparison issues like #130\n", "before_files": [{"content": "#\n# This file is part of the PyMeasure package.\n#\n# Copyright (c) 2013-2020 PyMeasure Developers\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n#\n\nimport logging\n\nimport copy\nimport visa\nimport numpy as np\nfrom pkg_resources import parse_version\n\nfrom .adapter import Adapter\n\nlog = logging.getLogger(__name__)\nlog.addHandler(logging.NullHandler())\n\n\n# noinspection PyPep8Naming,PyUnresolvedReferences\nclass VISAAdapter(Adapter):\n \"\"\" Adapter class for the VISA library using PyVISA to communicate\n with instruments.\n\n :param resource: VISA resource name that identifies the address\n :param visa_library: VisaLibrary Instance, path of the VISA library or VisaLibrary spec string (@py or @ni).\n if not given, the default for the platform will be used.\n :param kwargs: Any valid key-word arguments for constructing a PyVISA instrument\n \"\"\"\n\n def __init__(self, resourceName, visa_library='', **kwargs):\n if not VISAAdapter.has_supported_version():\n raise NotImplementedError(\"Please upgrade PyVISA to version 1.8 or later.\")\n\n if isinstance(resourceName, int):\n resourceName = \"GPIB0::%d::INSTR\" % resourceName\n super(VISAAdapter, self).__init__()\n self.resource_name = resourceName\n self.manager = visa.ResourceManager(visa_library)\n safeKeywords = ['resource_name', 'timeout',\n 'chunk_size', 'lock', 'query_delay', 'send_end',\n 'values_format', 'read_termination', 'write_termination']\n kwargsCopy = copy.deepcopy(kwargs)\n for key in kwargsCopy:\n if key not in safeKeywords:\n kwargs.pop(key)\n self.connection = self.manager.get_instrument(\n resourceName,\n **kwargs\n )\n\n @staticmethod\n def has_supported_version():\n \"\"\" Returns True if the PyVISA version is greater than 1.8 \"\"\"\n if hasattr(visa, '__version__'):\n return parse_version(visa.__version__) >= parse_version('1.8')\n else:\n return False\n\n def __repr__(self):\n return \"<VISAAdapter(resource='%s')>\" % self.connection.resourceName\n\n def write(self, command):\n \"\"\" Writes a command to the instrument\n\n :param command: SCPI command string to be sent to the instrument\n \"\"\"\n self.connection.write(command)\n\n def read(self):\n \"\"\" Reads until the buffer is empty and returns the resulting\n ASCII response\n\n :returns: String ASCII response of the instrument.\n \"\"\"\n return self.connection.read()\n\n def read_bytes(self, size):\n \"\"\" Reads specified number of bytes from the buffer and returns\n the resulting ASCII response\n\n :param size: Number of bytes to read from the buffer\n :returns: String ASCII response of the instrument.\n \"\"\"\n return self.connection.read_bytes(size)\n\n def ask(self, command):\n \"\"\" Writes the command to the instrument and returns the resulting\n ASCII response\n\n :param command: SCPI command string to be sent to the instrument\n :returns: String ASCII response of the instrument\n \"\"\"\n return self.connection.query(command)\n\n def ask_values(self, command):\n \"\"\" Writes a command to the instrument and returns a list of formatted\n values from the result. The format of the return is configurated by\n self.config().\n\n :param command: SCPI command to be sent to the instrument\n :returns: Formatted response of the instrument.\n \"\"\"\n return self.connection.query_values(command)\n\n def binary_values(self, command, header_bytes=0, dtype=np.float32):\n \"\"\" Returns a numpy array from a query for binary data\n\n :param command: SCPI command to be sent to the instrument\n :param header_bytes: Integer number of bytes to ignore in header\n :param dtype: The NumPy data type to format the values with\n :returns: NumPy array of values\n \"\"\"\n self.connection.write(command)\n binary = self.connection.read_raw()\n header, data = binary[:header_bytes], binary[header_bytes:]\n return np.fromstring(data, dtype=dtype)\n\n def config(self, is_binary=False, datatype='str',\n container=np.array, converter='s',\n separator=',', is_big_endian=False):\n \"\"\" Configurate the format of data transfer to and from the instrument.\n\n :param is_binary: If True, data is in binary format, otherwise ASCII.\n :param datatype: Data type.\n :param container: Return format. Any callable/type that takes an iterable.\n :param converter: String converter, used in dealing with ASCII data.\n :param separator: Delimiter of a series of data in ASCII.\n :param is_big_endian: Endianness.\n \"\"\"\n self.connection.values_format.is_binary = is_binary\n self.connection.values_format.datatype = datatype\n self.connection.values_format.container = container\n self.connection.values_format.converter = converter\n self.connection.values_format.separator = separator\n self.connection.values_format.is_big_endian = is_big_endian\n\n def wait_for_srq(self, timeout=25, delay=0.1):\n \"\"\" Blocks until a SRQ, and leaves the bit high\n\n :param timeout: Timeout duration in seconds\n :param delay: Time delay between checking SRQ in seconds\n \"\"\"\n self.connection.wait_for_srq(timeout * 1000)\n", "path": "pymeasure/adapters/visa.py"}, {"content": "#\n# This file is part of the PyMeasure package.\n#\n# Copyright (c) 2013-2020 PyMeasure Developers\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n# THE SOFTWARE.\n#\n\nimport visa\n\n\ndef list_resources():\n \"\"\"\n Prints the available resources, and returns a list of VISA resource names\n \n .. code-block:: python\n\n resources = list_resources()\n #prints (e.g.)\n #0 : GPIB0::22::INSTR : Agilent Technologies,34410A,******\n #1 : GPIB0::26::INSTR : Keithley Instruments Inc., Model 2612, *****\n dmm = Agilent34410(resources[0])\n \n \"\"\"\n rm = visa.ResourceManager()\n instrs = rm.list_resources()\n for n, instr in enumerate(instrs):\n # trying to catch errors in comunication\n try:\n res = rm.open_resource(instr)\n # try to avoid errors from *idn?\n try:\n # noinspection PyUnresolvedReferences\n idn = res.ask('*idn?')[:-1]\n except visa.Error:\n idn = \"Not known\"\n finally:\n res.close()\n print(n, \":\", instr, \":\", idn)\n except visa.VisaIOError as e:\n print(n, \":\", instr, \":\", \"Visa IO Error: check connections\")\n print(e)\n rm.close()\n return instrs\n", "path": "pymeasure/instruments/resources.py"}]} | 3,047 | 580 |
gh_patches_debug_426 | rasdani/github-patches | git_diff | nautobot__nautobot-3981 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
2.0: Tag string representation is incorrect
### Environment
* Nautobot version (Docker tag too if applicable): `next`, e13883b7b8a4f44bca0c40d8074dcf8f82e544e6
### Steps to Reproduce
1. Create a Tag and associate it to any model
2. View the legacy-UI list view or detail view of that model
<!-- What did you expect to happen? -->
### Expected Behavior
Tag name to be displayed
<!-- What happened instead? -->
### Observed Behavior
`Tag object (<uuid>)` is displayed.

Appears to be a regression due to #3914.
</issue>
<code>
[start of nautobot/extras/models/tags.py]
1 from django.contrib.contenttypes.models import ContentType
2 from django.db import models
3 from taggit.models import GenericUUIDTaggedItemBase
4
5 from nautobot.core.choices import ColorChoices
6 from nautobot.core.models import BaseManager, BaseModel
7 from nautobot.core.models.fields import ColorField
8 from nautobot.core.models.querysets import RestrictedQuerySet
9 from nautobot.extras.models import ChangeLoggedModel, CustomFieldModel
10 from nautobot.extras.models.mixins import NotesMixin
11 from nautobot.extras.models.relationships import RelationshipModel
12 from nautobot.extras.utils import extras_features, TaggableClassesQuery
13
14
15 #
16 # Tags
17 #
18
19
20 class TagQuerySet(RestrictedQuerySet):
21 """Queryset for `Tags` objects."""
22
23 def get_for_model(self, model):
24 """
25 Return all `Tags` assigned to the given model.
26 """
27 return self.filter(content_types__model=model._meta.model_name, content_types__app_label=model._meta.app_label)
28
29
30 # Tag *should* be a `NameColorContentTypesModel` but that way lies circular import purgatory. Sigh.
31 @extras_features(
32 "custom_validators",
33 )
34 class Tag(BaseModel, ChangeLoggedModel, CustomFieldModel, RelationshipModel, NotesMixin):
35 name = models.CharField(max_length=100, unique=True)
36 content_types = models.ManyToManyField(
37 to=ContentType,
38 related_name="tags",
39 limit_choices_to=TaggableClassesQuery(),
40 )
41 color = ColorField(default=ColorChoices.COLOR_GREY)
42 description = models.CharField(
43 max_length=200,
44 blank=True,
45 )
46
47 objects = BaseManager.from_queryset(TagQuerySet)()
48
49 class Meta:
50 ordering = ["name"]
51
52 def validate_content_types_removal(self, content_types_id):
53 """Validate content_types to be removed are not tagged to a model"""
54 errors = {}
55
56 removed_content_types = self.content_types.exclude(id__in=content_types_id)
57
58 # check if tag is assigned to any of the removed content_types
59 for content_type in removed_content_types:
60 model = content_type.model_class()
61 if model.objects.filter(tags=self).exists():
62 errors.setdefault("content_types", []).append(
63 f"Unable to remove {model._meta.label_lower}. Dependent objects were found."
64 )
65
66 return errors
67
68
69 class TaggedItem(BaseModel, GenericUUIDTaggedItemBase):
70 tag = models.ForeignKey(to=Tag, related_name="%(app_label)s_%(class)s_items", on_delete=models.CASCADE)
71
72 class Meta:
73 index_together = ("content_type", "object_id")
74 unique_together = [["content_type", "object_id", "tag"]]
75
[end of nautobot/extras/models/tags.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/nautobot/extras/models/tags.py b/nautobot/extras/models/tags.py
--- a/nautobot/extras/models/tags.py
+++ b/nautobot/extras/models/tags.py
@@ -46,6 +46,9 @@
objects = BaseManager.from_queryset(TagQuerySet)()
+ def __str__(self):
+ return self.name
+
class Meta:
ordering = ["name"]
| {"golden_diff": "diff --git a/nautobot/extras/models/tags.py b/nautobot/extras/models/tags.py\n--- a/nautobot/extras/models/tags.py\n+++ b/nautobot/extras/models/tags.py\n@@ -46,6 +46,9 @@\n \n objects = BaseManager.from_queryset(TagQuerySet)()\n \n+ def __str__(self):\n+ return self.name\n+\n class Meta:\n ordering = [\"name\"]\n", "issue": "2.0: Tag string representation is incorrect\n### Environment\r\n* Nautobot version (Docker tag too if applicable): `next`, e13883b7b8a4f44bca0c40d8074dcf8f82e544e6\r\n\r\n### Steps to Reproduce\r\n1. Create a Tag and associate it to any model\r\n2. View the legacy-UI list view or detail view of that model\r\n\r\n<!-- What did you expect to happen? -->\r\n### Expected Behavior\r\n\r\nTag name to be displayed\r\n\r\n<!-- What happened instead? -->\r\n### Observed Behavior\r\n\r\n`Tag object (<uuid>)` is displayed.\r\n\r\n\r\n\r\nAppears to be a regression due to #3914.\r\n\n", "before_files": [{"content": "from django.contrib.contenttypes.models import ContentType\nfrom django.db import models\nfrom taggit.models import GenericUUIDTaggedItemBase\n\nfrom nautobot.core.choices import ColorChoices\nfrom nautobot.core.models import BaseManager, BaseModel\nfrom nautobot.core.models.fields import ColorField\nfrom nautobot.core.models.querysets import RestrictedQuerySet\nfrom nautobot.extras.models import ChangeLoggedModel, CustomFieldModel\nfrom nautobot.extras.models.mixins import NotesMixin\nfrom nautobot.extras.models.relationships import RelationshipModel\nfrom nautobot.extras.utils import extras_features, TaggableClassesQuery\n\n\n#\n# Tags\n#\n\n\nclass TagQuerySet(RestrictedQuerySet):\n \"\"\"Queryset for `Tags` objects.\"\"\"\n\n def get_for_model(self, model):\n \"\"\"\n Return all `Tags` assigned to the given model.\n \"\"\"\n return self.filter(content_types__model=model._meta.model_name, content_types__app_label=model._meta.app_label)\n\n\n# Tag *should* be a `NameColorContentTypesModel` but that way lies circular import purgatory. Sigh.\n@extras_features(\n \"custom_validators\",\n)\nclass Tag(BaseModel, ChangeLoggedModel, CustomFieldModel, RelationshipModel, NotesMixin):\n name = models.CharField(max_length=100, unique=True)\n content_types = models.ManyToManyField(\n to=ContentType,\n related_name=\"tags\",\n limit_choices_to=TaggableClassesQuery(),\n )\n color = ColorField(default=ColorChoices.COLOR_GREY)\n description = models.CharField(\n max_length=200,\n blank=True,\n )\n\n objects = BaseManager.from_queryset(TagQuerySet)()\n\n class Meta:\n ordering = [\"name\"]\n\n def validate_content_types_removal(self, content_types_id):\n \"\"\"Validate content_types to be removed are not tagged to a model\"\"\"\n errors = {}\n\n removed_content_types = self.content_types.exclude(id__in=content_types_id)\n\n # check if tag is assigned to any of the removed content_types\n for content_type in removed_content_types:\n model = content_type.model_class()\n if model.objects.filter(tags=self).exists():\n errors.setdefault(\"content_types\", []).append(\n f\"Unable to remove {model._meta.label_lower}. Dependent objects were found.\"\n )\n\n return errors\n\n\nclass TaggedItem(BaseModel, GenericUUIDTaggedItemBase):\n tag = models.ForeignKey(to=Tag, related_name=\"%(app_label)s_%(class)s_items\", on_delete=models.CASCADE)\n\n class Meta:\n index_together = (\"content_type\", \"object_id\")\n unique_together = [[\"content_type\", \"object_id\", \"tag\"]]\n", "path": "nautobot/extras/models/tags.py"}]} | 1,474 | 95 |
gh_patches_debug_30621 | rasdani/github-patches | git_diff | pyodide__pyodide-3483 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
On first call, `pyodide config get emscripten_version` returns `Downloading xbuild environment Installing xbuild environment 3.1.27` instead of `3.1.27`
## 🐛 Bug
In [the docs for out-of-tree builds](https://pyodide.org/en/stable/development/building-and-testing-packages.html#building-and-testing-packages-out-of-tree) it gives this code snippet:
```bash
pip install pyodide-build
git clone https://github.com/emscripten-core/emsdk.git
cd emsdk
PYODIDE_EMSCRIPTEN_VERSION=$(pyodide config get emscripten_version)
./emsdk install ${PYODIDE_EMSCRIPTEN_VERSION}
./emsdk activate ${PYODIDE_EMSCRIPTEN_VERSION}
source emsdk_env.sh
```
But this doesn't work because on the first call, `pyodide config get emscripten_version` outputs this:
```
Downloading xbuild environment
Installing xbuild environment
3.1.27
```
On subsequent calls it returns `3.1.27`.
### To Reproduce
See above.
### Expected behavior
Calls to `pyodide config get emscripten_version` should only ever output the version string such that this command can be reliably used in build automation.
### Environment
- Pyodide Version<!-- (e.g. 1.8.1) -->: Pyodide CLI Version: 0.2.2
### Additional context
As a workaround for build scripts, `pyodide config get emscripten_version` can be called once before actually using it.
</issue>
<code>
[start of pyodide-build/pyodide_build/cli/config.py]
1 import typer
2
3 from ..common import get_make_environment_vars
4 from ..out_of_tree.utils import initialize_pyodide_root
5
6 app = typer.Typer(help="Manage config variables used in pyodide")
7
8
9 # A dictionary of config variables {key: env_var_in_makefile}
10 PYODIDE_CONFIGS = {
11 "emscripten_version": "PYODIDE_EMSCRIPTEN_VERSION",
12 "python_version": "PYVERSION",
13 }
14
15
16 @app.callback(no_args_is_help=True) # type: ignore[misc]
17 def callback() -> None:
18 return
19
20
21 def _get_configs() -> dict[str, str]:
22 initialize_pyodide_root()
23
24 configs: dict[str, str] = get_make_environment_vars()
25
26 configs_filtered = {k: configs[v] for k, v in PYODIDE_CONFIGS.items()}
27 return configs_filtered
28
29
30 @app.command("list")
31 def list_config():
32 """
33 List config variables used in pyodide
34 """
35 configs = _get_configs()
36
37 for k, v in configs.items():
38 typer.echo(f"{k}={v}")
39
40
41 @app.command("get") # type: ignore[misc]
42 def get_config(
43 config_var: str = typer.Argument(
44 ..., help="A config variable to get. Use `list` to see all possible values."
45 ),
46 ) -> None:
47 """
48 Get a value of a single config variable used in pyodide
49 """
50 configs = _get_configs()
51
52 if config_var not in configs:
53 typer.echo(f"Config variable {config_var} not found.")
54 typer.Exit(1)
55
56 typer.echo(configs[config_var])
57
[end of pyodide-build/pyodide_build/cli/config.py]
[start of pyodide-build/pyodide_build/out_of_tree/utils.py]
1 import os
2 from pathlib import Path
3
4 from ..common import search_pyodide_root
5
6
7 def ensure_env_installed(env: Path) -> None:
8 if env.exists():
9 return
10 from .. import __version__
11 from ..install_xbuildenv import download_xbuildenv, install_xbuildenv
12
13 if "dev" in __version__:
14 raise RuntimeError(
15 "To use out of tree builds with development Pyodide, you must explicitly set PYODIDE_ROOT"
16 )
17
18 download_xbuildenv(__version__, env)
19 install_xbuildenv(__version__, env)
20
21
22 def initialize_pyodide_root() -> None:
23 if "PYODIDE_ROOT" in os.environ:
24 return
25 try:
26 os.environ["PYODIDE_ROOT"] = str(search_pyodide_root(__file__))
27 return
28 except FileNotFoundError:
29 pass
30 env = Path(".pyodide-xbuildenv")
31 os.environ["PYODIDE_ROOT"] = str(env / "xbuildenv/pyodide-root")
32 ensure_env_installed(env)
33
[end of pyodide-build/pyodide_build/out_of_tree/utils.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pyodide-build/pyodide_build/cli/config.py b/pyodide-build/pyodide_build/cli/config.py
--- a/pyodide-build/pyodide_build/cli/config.py
+++ b/pyodide-build/pyodide_build/cli/config.py
@@ -19,7 +19,7 @@
def _get_configs() -> dict[str, str]:
- initialize_pyodide_root()
+ initialize_pyodide_root(quiet=True)
configs: dict[str, str] = get_make_environment_vars()
diff --git a/pyodide-build/pyodide_build/out_of_tree/utils.py b/pyodide-build/pyodide_build/out_of_tree/utils.py
--- a/pyodide-build/pyodide_build/out_of_tree/utils.py
+++ b/pyodide-build/pyodide_build/out_of_tree/utils.py
@@ -1,10 +1,12 @@
import os
+from contextlib import ExitStack, redirect_stdout
+from io import StringIO
from pathlib import Path
from ..common import search_pyodide_root
-def ensure_env_installed(env: Path) -> None:
+def ensure_env_installed(env: Path, *, quiet: bool = False) -> None:
if env.exists():
return
from .. import __version__
@@ -15,11 +17,16 @@
"To use out of tree builds with development Pyodide, you must explicitly set PYODIDE_ROOT"
)
- download_xbuildenv(__version__, env)
- install_xbuildenv(__version__, env)
+ with ExitStack() as stack:
+ if quiet:
+ # Prevent writes to stdout
+ stack.enter_context(redirect_stdout(StringIO()))
+ download_xbuildenv(__version__, env)
+ install_xbuildenv(__version__, env)
-def initialize_pyodide_root() -> None:
+
+def initialize_pyodide_root(*, quiet: bool = False) -> None:
if "PYODIDE_ROOT" in os.environ:
return
try:
@@ -29,4 +36,4 @@
pass
env = Path(".pyodide-xbuildenv")
os.environ["PYODIDE_ROOT"] = str(env / "xbuildenv/pyodide-root")
- ensure_env_installed(env)
+ ensure_env_installed(env, quiet=quiet)
| {"golden_diff": "diff --git a/pyodide-build/pyodide_build/cli/config.py b/pyodide-build/pyodide_build/cli/config.py\n--- a/pyodide-build/pyodide_build/cli/config.py\n+++ b/pyodide-build/pyodide_build/cli/config.py\n@@ -19,7 +19,7 @@\n \n \n def _get_configs() -> dict[str, str]:\n- initialize_pyodide_root()\n+ initialize_pyodide_root(quiet=True)\n \n configs: dict[str, str] = get_make_environment_vars()\n \ndiff --git a/pyodide-build/pyodide_build/out_of_tree/utils.py b/pyodide-build/pyodide_build/out_of_tree/utils.py\n--- a/pyodide-build/pyodide_build/out_of_tree/utils.py\n+++ b/pyodide-build/pyodide_build/out_of_tree/utils.py\n@@ -1,10 +1,12 @@\n import os\n+from contextlib import ExitStack, redirect_stdout\n+from io import StringIO\n from pathlib import Path\n \n from ..common import search_pyodide_root\n \n \n-def ensure_env_installed(env: Path) -> None:\n+def ensure_env_installed(env: Path, *, quiet: bool = False) -> None:\n if env.exists():\n return\n from .. import __version__\n@@ -15,11 +17,16 @@\n \"To use out of tree builds with development Pyodide, you must explicitly set PYODIDE_ROOT\"\n )\n \n- download_xbuildenv(__version__, env)\n- install_xbuildenv(__version__, env)\n+ with ExitStack() as stack:\n+ if quiet:\n+ # Prevent writes to stdout\n+ stack.enter_context(redirect_stdout(StringIO()))\n \n+ download_xbuildenv(__version__, env)\n+ install_xbuildenv(__version__, env)\n \n-def initialize_pyodide_root() -> None:\n+\n+def initialize_pyodide_root(*, quiet: bool = False) -> None:\n if \"PYODIDE_ROOT\" in os.environ:\n return\n try:\n@@ -29,4 +36,4 @@\n pass\n env = Path(\".pyodide-xbuildenv\")\n os.environ[\"PYODIDE_ROOT\"] = str(env / \"xbuildenv/pyodide-root\")\n- ensure_env_installed(env)\n+ ensure_env_installed(env, quiet=quiet)\n", "issue": "On first call, `pyodide config get emscripten_version` returns `Downloading xbuild environment Installing xbuild environment 3.1.27` instead of `3.1.27`\n## \ud83d\udc1b Bug\r\n\r\nIn [the docs for out-of-tree builds](https://pyodide.org/en/stable/development/building-and-testing-packages.html#building-and-testing-packages-out-of-tree) it gives this code snippet:\r\n\r\n```bash\r\npip install pyodide-build\r\n\r\ngit clone https://github.com/emscripten-core/emsdk.git\r\ncd emsdk\r\n\r\nPYODIDE_EMSCRIPTEN_VERSION=$(pyodide config get emscripten_version)\r\n./emsdk install ${PYODIDE_EMSCRIPTEN_VERSION}\r\n./emsdk activate ${PYODIDE_EMSCRIPTEN_VERSION}\r\nsource emsdk_env.sh\r\n```\r\nBut this doesn't work because on the first call, `pyodide config get emscripten_version` outputs this:\r\n```\r\nDownloading xbuild environment\r\nInstalling xbuild environment\r\n3.1.27\r\n```\r\nOn subsequent calls it returns `3.1.27`.\r\n\r\n### To Reproduce\r\n\r\nSee above.\r\n\r\n### Expected behavior\r\n\r\nCalls to `pyodide config get emscripten_version` should only ever output the version string such that this command can be reliably used in build automation.\r\n\r\n### Environment\r\n\r\n- Pyodide Version<!-- (e.g. 1.8.1) -->: Pyodide CLI Version: 0.2.2\r\n\r\n### Additional context\r\n\r\nAs a workaround for build scripts, `pyodide config get emscripten_version` can be called once before actually using it.\r\n\n", "before_files": [{"content": "import typer\n\nfrom ..common import get_make_environment_vars\nfrom ..out_of_tree.utils import initialize_pyodide_root\n\napp = typer.Typer(help=\"Manage config variables used in pyodide\")\n\n\n# A dictionary of config variables {key: env_var_in_makefile}\nPYODIDE_CONFIGS = {\n \"emscripten_version\": \"PYODIDE_EMSCRIPTEN_VERSION\",\n \"python_version\": \"PYVERSION\",\n}\n\n\[email protected](no_args_is_help=True) # type: ignore[misc]\ndef callback() -> None:\n return\n\n\ndef _get_configs() -> dict[str, str]:\n initialize_pyodide_root()\n\n configs: dict[str, str] = get_make_environment_vars()\n\n configs_filtered = {k: configs[v] for k, v in PYODIDE_CONFIGS.items()}\n return configs_filtered\n\n\[email protected](\"list\")\ndef list_config():\n \"\"\"\n List config variables used in pyodide\n \"\"\"\n configs = _get_configs()\n\n for k, v in configs.items():\n typer.echo(f\"{k}={v}\")\n\n\[email protected](\"get\") # type: ignore[misc]\ndef get_config(\n config_var: str = typer.Argument(\n ..., help=\"A config variable to get. Use `list` to see all possible values.\"\n ),\n) -> None:\n \"\"\"\n Get a value of a single config variable used in pyodide\n \"\"\"\n configs = _get_configs()\n\n if config_var not in configs:\n typer.echo(f\"Config variable {config_var} not found.\")\n typer.Exit(1)\n\n typer.echo(configs[config_var])\n", "path": "pyodide-build/pyodide_build/cli/config.py"}, {"content": "import os\nfrom pathlib import Path\n\nfrom ..common import search_pyodide_root\n\n\ndef ensure_env_installed(env: Path) -> None:\n if env.exists():\n return\n from .. import __version__\n from ..install_xbuildenv import download_xbuildenv, install_xbuildenv\n\n if \"dev\" in __version__:\n raise RuntimeError(\n \"To use out of tree builds with development Pyodide, you must explicitly set PYODIDE_ROOT\"\n )\n\n download_xbuildenv(__version__, env)\n install_xbuildenv(__version__, env)\n\n\ndef initialize_pyodide_root() -> None:\n if \"PYODIDE_ROOT\" in os.environ:\n return\n try:\n os.environ[\"PYODIDE_ROOT\"] = str(search_pyodide_root(__file__))\n return\n except FileNotFoundError:\n pass\n env = Path(\".pyodide-xbuildenv\")\n os.environ[\"PYODIDE_ROOT\"] = str(env / \"xbuildenv/pyodide-root\")\n ensure_env_installed(env)\n", "path": "pyodide-build/pyodide_build/out_of_tree/utils.py"}]} | 1,675 | 505 |
gh_patches_debug_8872 | rasdani/github-patches | git_diff | holoviz__panel-889 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
'str' object has no attribute 'items' when visualising Vega plot of network using Panel
When running `pn.pane.Vega(spec)` on a network spec (from the vega example showing a force-directed network of the Miserables characters), I get an error in line 21 of the `pane.py` source file. The error:
```
~/anaconda3/envs/i0u19a_jupyter/lib/python3.7/site-packages/panel/pane/vega.py in ds_as_cds(dataset)
19 if len(dataset) == 0:
20 return {}
---> 21 data = {k: [] for k, v in dataset[0].items()}
22 for item in dataset:
23 for k, v in item.items():
AttributeError: 'str' object has no attribute 'items'
```
See the image for a screenshot of the complete error message. The command used was `pn.pane.Vega(spec)`, the error what is shown above, and the output was `Vega(dict)`.

</issue>
<code>
[start of panel/pane/vega.py]
1 from __future__ import absolute_import, division, unicode_literals
2
3 import sys
4
5 import param
6 import numpy as np
7
8 from bokeh.models import ColumnDataSource
9 from pyviz_comms import JupyterComm
10
11 from ..viewable import Layoutable
12 from .base import PaneBase
13
14
15 def ds_as_cds(dataset):
16 """
17 Converts Vega dataset into Bokeh ColumnDataSource data
18 """
19 if len(dataset) == 0:
20 return {}
21 data = {k: [] for k, v in dataset[0].items()}
22 for item in dataset:
23 for k, v in item.items():
24 data[k].append(v)
25 data = {k: np.asarray(v) for k, v in data.items()}
26 return data
27
28
29 class Vega(PaneBase):
30 """
31 Vega panes allow rendering Vega plots and traces.
32
33 For efficiency any array objects found inside a Figure are added
34 to a ColumnDataSource which allows using binary transport to sync
35 the figure on bokeh server and via Comms.
36 """
37
38 margin = param.Parameter(default=(5, 5, 30, 5), doc="""
39 Allows to create additional space around the component. May
40 be specified as a two-tuple of the form (vertical, horizontal)
41 or a four-tuple (top, right, bottom, left).""")
42
43 priority = 0.8
44
45 _updates = True
46
47 @classmethod
48 def is_altair(cls, obj):
49 if 'altair' in sys.modules:
50 import altair as alt
51 return isinstance(obj, alt.api.TopLevelMixin)
52 return False
53
54 @classmethod
55 def applies(cls, obj):
56 if isinstance(obj, dict) and 'vega' in obj.get('$schema', '').lower():
57 return True
58 return cls.is_altair(obj)
59
60 @classmethod
61 def _to_json(cls, obj):
62 if isinstance(obj, dict):
63 json = dict(obj)
64 if 'data' in json:
65 data = json['data']
66 if isinstance(data, dict):
67 json['data'] = dict(data)
68 elif isinstance(data, list):
69 json['data'] = [dict(d) for d in data]
70 return json
71 return obj.to_dict()
72
73 def _get_sources(self, json, sources):
74 datasets = json.get('datasets', {})
75 for name in list(datasets):
76 if name in sources or isinstance(datasets[name], dict):
77 continue
78 data = datasets.pop(name)
79 columns = set(data[0]) if data else []
80 if self.is_altair(self.object):
81 import altair as alt
82 if (not isinstance(self.object.data, (alt.Data, alt.UrlData)) and
83 columns == set(self.object.data)):
84 data = ColumnDataSource.from_df(self.object.data)
85 else:
86 data = ds_as_cds(data)
87 sources[name] = ColumnDataSource(data=data)
88 else:
89 sources[name] = ColumnDataSource(data=ds_as_cds(data))
90 data = json.get('data', {})
91 if isinstance(data, dict):
92 data = data.pop('values', {})
93 if data:
94 sources['data'] = ColumnDataSource(data=ds_as_cds(data))
95 elif isinstance(data, list):
96 for d in data:
97 sources[d['name']] = ColumnDataSource(data=ds_as_cds(d['values']))
98
99
100
101 @classmethod
102 def _get_dimensions(cls, json, props):
103 if json is None:
104 return
105
106 view = {}
107 if 'width' in json:
108 view['width'] = json['width']
109 if 'height' in json:
110 view['height'] = json['height']
111 if 'config' in json and 'view' in json['config']:
112 view = json['config']['view']
113 for p in ('width', 'height'):
114 if p not in view:
115 continue
116 if props.get(p) is None or p in view and props.get(p) < view[p]:
117 v = view[p]
118 props[p] = v+22 if isinstance(v, int) else v
119
120 def _get_model(self, doc, root=None, parent=None, comm=None):
121 if 'panel.models.vega' not in sys.modules:
122 if isinstance(comm, JupyterComm):
123 self.param.warning('VegaPlot was not imported on instantiation '
124 'and may not render in a notebook. Restart '
125 'the notebook kernel and ensure you load '
126 'it as part of the extension using:'
127 '\n\npn.extension(\'vega\')\n')
128 from ..models.vega import VegaPlot
129 else:
130 VegaPlot = getattr(sys.modules['panel.models.vega'], 'VegaPlot')
131
132 sources = {}
133 if self.object is None:
134 json = None
135 else:
136 json = self._to_json(self.object)
137 self._get_sources(json, sources)
138 props = self._process_param_change(self._init_properties())
139 self._get_dimensions(json, props)
140 model = VegaPlot(data=json, data_sources=sources, **props)
141 if root is None:
142 root = model
143 self._models[root.ref['id']] = (model, parent)
144 return model
145
146 def _update(self, model):
147 if self.object is None:
148 json = None
149 else:
150 json = self._to_json(self.object)
151 self._get_sources(json, model.data_sources)
152 props = {p : getattr(self, p) for p in list(Layoutable.param)
153 if getattr(self, p) is not None}
154 self._get_dimensions(json, props)
155 props['data'] = json
156 model.update(**props)
157
[end of panel/pane/vega.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/panel/pane/vega.py b/panel/pane/vega.py
--- a/panel/pane/vega.py
+++ b/panel/pane/vega.py
@@ -94,10 +94,10 @@
sources['data'] = ColumnDataSource(data=ds_as_cds(data))
elif isinstance(data, list):
for d in data:
- sources[d['name']] = ColumnDataSource(data=ds_as_cds(d['values']))
+ if 'values' in d:
+ sources[d['name']] = ColumnDataSource(data=ds_as_cds(d.pop('values')))
-
@classmethod
def _get_dimensions(cls, json, props):
if json is None:
| {"golden_diff": "diff --git a/panel/pane/vega.py b/panel/pane/vega.py\n--- a/panel/pane/vega.py\n+++ b/panel/pane/vega.py\n@@ -94,10 +94,10 @@\n sources['data'] = ColumnDataSource(data=ds_as_cds(data))\n elif isinstance(data, list):\n for d in data:\n- sources[d['name']] = ColumnDataSource(data=ds_as_cds(d['values']))\n+ if 'values' in d:\n+ sources[d['name']] = ColumnDataSource(data=ds_as_cds(d.pop('values')))\n \n \n-\n @classmethod\n def _get_dimensions(cls, json, props):\n if json is None:\n", "issue": "'str' object has no attribute 'items' when visualising Vega plot of network using Panel\nWhen running `pn.pane.Vega(spec)` on a network spec (from the vega example showing a force-directed network of the Miserables characters), I get an error in line 21 of the `pane.py` source file. The error:\r\n\r\n```\r\n~/anaconda3/envs/i0u19a_jupyter/lib/python3.7/site-packages/panel/pane/vega.py in ds_as_cds(dataset)\r\n 19 if len(dataset) == 0:\r\n 20 return {}\r\n---> 21 data = {k: [] for k, v in dataset[0].items()}\r\n 22 for item in dataset:\r\n 23 for k, v in item.items():\r\n\r\nAttributeError: 'str' object has no attribute 'items'\r\n```\r\n\r\nSee the image for a screenshot of the complete error message. The command used was `pn.pane.Vega(spec)`, the error what is shown above, and the output was `Vega(dict)`.\r\n\r\n\n", "before_files": [{"content": "from __future__ import absolute_import, division, unicode_literals\n\nimport sys\n\nimport param\nimport numpy as np\n\nfrom bokeh.models import ColumnDataSource\nfrom pyviz_comms import JupyterComm\n\nfrom ..viewable import Layoutable\nfrom .base import PaneBase\n\n\ndef ds_as_cds(dataset):\n \"\"\"\n Converts Vega dataset into Bokeh ColumnDataSource data\n \"\"\"\n if len(dataset) == 0:\n return {}\n data = {k: [] for k, v in dataset[0].items()}\n for item in dataset:\n for k, v in item.items():\n data[k].append(v)\n data = {k: np.asarray(v) for k, v in data.items()}\n return data\n\n\nclass Vega(PaneBase):\n \"\"\"\n Vega panes allow rendering Vega plots and traces.\n\n For efficiency any array objects found inside a Figure are added\n to a ColumnDataSource which allows using binary transport to sync\n the figure on bokeh server and via Comms.\n \"\"\"\n\n margin = param.Parameter(default=(5, 5, 30, 5), doc=\"\"\"\n Allows to create additional space around the component. May\n be specified as a two-tuple of the form (vertical, horizontal)\n or a four-tuple (top, right, bottom, left).\"\"\")\n\n priority = 0.8\n\n _updates = True\n\n @classmethod\n def is_altair(cls, obj):\n if 'altair' in sys.modules:\n import altair as alt\n return isinstance(obj, alt.api.TopLevelMixin)\n return False\n\n @classmethod\n def applies(cls, obj):\n if isinstance(obj, dict) and 'vega' in obj.get('$schema', '').lower():\n return True\n return cls.is_altair(obj)\n\n @classmethod\n def _to_json(cls, obj):\n if isinstance(obj, dict):\n json = dict(obj)\n if 'data' in json:\n data = json['data']\n if isinstance(data, dict):\n json['data'] = dict(data)\n elif isinstance(data, list):\n json['data'] = [dict(d) for d in data]\n return json\n return obj.to_dict()\n\n def _get_sources(self, json, sources):\n datasets = json.get('datasets', {})\n for name in list(datasets):\n if name in sources or isinstance(datasets[name], dict):\n continue\n data = datasets.pop(name)\n columns = set(data[0]) if data else []\n if self.is_altair(self.object):\n import altair as alt\n if (not isinstance(self.object.data, (alt.Data, alt.UrlData)) and\n columns == set(self.object.data)):\n data = ColumnDataSource.from_df(self.object.data)\n else:\n data = ds_as_cds(data)\n sources[name] = ColumnDataSource(data=data)\n else:\n sources[name] = ColumnDataSource(data=ds_as_cds(data))\n data = json.get('data', {})\n if isinstance(data, dict):\n data = data.pop('values', {})\n if data:\n sources['data'] = ColumnDataSource(data=ds_as_cds(data))\n elif isinstance(data, list):\n for d in data:\n sources[d['name']] = ColumnDataSource(data=ds_as_cds(d['values']))\n \n\n\n @classmethod\n def _get_dimensions(cls, json, props):\n if json is None:\n return\n\n view = {}\n if 'width' in json:\n view['width'] = json['width']\n if 'height' in json:\n view['height'] = json['height']\n if 'config' in json and 'view' in json['config']:\n view = json['config']['view']\n for p in ('width', 'height'):\n if p not in view:\n continue\n if props.get(p) is None or p in view and props.get(p) < view[p]:\n v = view[p]\n props[p] = v+22 if isinstance(v, int) else v\n\n def _get_model(self, doc, root=None, parent=None, comm=None):\n if 'panel.models.vega' not in sys.modules:\n if isinstance(comm, JupyterComm):\n self.param.warning('VegaPlot was not imported on instantiation '\n 'and may not render in a notebook. Restart '\n 'the notebook kernel and ensure you load '\n 'it as part of the extension using:'\n '\\n\\npn.extension(\\'vega\\')\\n')\n from ..models.vega import VegaPlot\n else:\n VegaPlot = getattr(sys.modules['panel.models.vega'], 'VegaPlot')\n\n sources = {}\n if self.object is None:\n json = None\n else:\n json = self._to_json(self.object)\n self._get_sources(json, sources)\n props = self._process_param_change(self._init_properties())\n self._get_dimensions(json, props)\n model = VegaPlot(data=json, data_sources=sources, **props)\n if root is None:\n root = model\n self._models[root.ref['id']] = (model, parent)\n return model\n\n def _update(self, model):\n if self.object is None:\n json = None\n else:\n json = self._to_json(self.object)\n self._get_sources(json, model.data_sources)\n props = {p : getattr(self, p) for p in list(Layoutable.param)\n if getattr(self, p) is not None}\n self._get_dimensions(json, props)\n props['data'] = json\n model.update(**props)\n", "path": "panel/pane/vega.py"}]} | 2,438 | 161 |
gh_patches_debug_11973 | rasdani/github-patches | git_diff | bids-standard__pybids-833 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Pandas method FutureWarning: frame.append
With:
- Python 3.9.7
- pybids 0.15.0
- pandas 0.14.0
I'm getting the following `FutureWarning` when running `bids = BIDSLayout(self.dataset.path, derivatives=True)`:
```
...bids/variables/entities.py:245: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.
```
pointing to [this line](https://github.com/bids-standard/pybids/blob/e92d4e8d058fc0e1797c8882dd98024068a303bf/bids/variables/entities.py#L245).
Should I submit a PR to replace that, or are there reasons why this should not be done?
</issue>
<code>
[start of bids/variables/entities.py]
1 """ Data classes for internal BIDS data hierarchy. """
2
3 from itertools import chain
4 from collections import namedtuple
5
6 import pandas as pd
7
8 from . import collections as clc
9 from bids.utils import matches_entities
10
11
12 class Node(object):
13 """Base class for objects that represent a single object in the BIDS
14 hierarchy.
15
16 Parameters
17 ----------
18 id : int or str
19 A value uniquely identifying this node. Typically the
20 entity value extracted from the filename via layout.
21 """
22
23 def __init__(self, level, entities):
24 self.level = level.lower()
25 self.entities = entities
26 self.variables = {}
27
28 def add_variable(self, variable):
29 """Adds a BIDSVariable to the current Node's list.
30
31 Parameters
32 ----------
33 variable : BIDSVariable
34 The Variable to add to the list.
35 """
36 self.variables[variable.name] = variable
37
38
39 class RunNode(Node):
40 """Represents a single Run in a BIDS project.
41
42 Parameters
43 ----------
44 id : int
45 The index of the run.
46 entities : dict
47 Dictionary of entities for this Node.
48 image_file : str
49 The full path to the corresponding nifti image.
50 duration : float
51 Duration of the run, in seconds.
52 repetition_time : float
53 TR for the run.
54 task : str
55 The task name for this run.
56 """
57
58 def __init__(self, entities, image_file, duration, repetition_time, n_vols):
59 self.image_file = image_file
60 self.duration = duration
61 self.repetition_time = repetition_time
62 self.n_vols = n_vols
63 super(RunNode, self).__init__('run', entities)
64
65 def get_info(self):
66 # Note: do not remove the dict() call! self.entities is a SQLAlchemy
67 # association_proxy mapping, and without the conversion, the connection
68 # to the DB persists, causing problems on Python 3.5 if we try to clone
69 # a RunInfo or any containing object.
70 entities = dict(self.entities)
71 return RunInfo(entities, self.duration,
72 self.repetition_time, self.image_file, self.n_vols)
73
74
75 # Stores key information for each Run.
76 RunInfo_ = namedtuple('RunInfo', ['entities', 'duration', 'tr', 'image', 'n_vols'])
77
78
79 # Wrap with class to provide docstring
80 class RunInfo(RunInfo_):
81 """ A namedtuple storing run-related information.
82
83 Properties include 'entities', 'duration', 'tr', and 'image', 'n_vols'.
84 """
85 pass
86
87
88 class NodeIndex(object):
89 """Represents the top level in a BIDS hierarchy. """
90
91 def __init__(self):
92 super(NodeIndex, self).__init__()
93 self.index = pd.DataFrame()
94 self.nodes = []
95
96 def get_collections(self, unit, names=None, merge=False,
97 sampling_rate=None, **entities):
98 """Retrieve variable data for a specified level in the Dataset.
99
100 Parameters
101 ----------
102 unit : str
103 The unit of analysis to return variables for. Must be
104 one of 'run', 'session', 'subject', or 'dataset'.
105 names : list
106 Optional list of variables names to return. If
107 None, all available variables are returned.
108 merge : bool
109 If True, variables are merged across all observations
110 of the current unit. E.g., if unit='subject' and return_type=
111 'collection', variables from all subjects will be merged into a
112 single collection. If False, each observation is handled
113 separately, and the result is returned as a list.
114 sampling_rate : int or str
115 If unit='run', the sampling rate to
116 pass onto the returned BIDSRunVariableCollection.
117 entities : dict
118 Optional constraints used to limit what gets returned.
119
120 Returns
121 -------
122 A list of BIDSVariableCollections if merge=False; a single
123 BIDSVariableCollection if merge=True.
124 """
125
126 nodes = self.get_nodes(unit, entities)
127 var_sets = []
128
129 for n in nodes:
130 var_set = list(n.variables.values())
131 var_set = [v for v in var_set if matches_entities(v, entities)]
132 if names is not None:
133 var_set = [v for v in var_set if v.name in names]
134 # Additional filtering on Variables past run level, because their
135 # contents are extracted from TSV files containing rows from
136 # multiple observations
137 if unit != 'run':
138 var_set = [v.filter(entities) for v in var_set]
139 var_sets.append(var_set)
140
141 if merge:
142 var_sets = [list(chain(*var_sets))]
143
144 results = []
145 for vs in var_sets:
146 if not vs:
147 continue
148 if unit == 'run':
149 vs = clc.BIDSRunVariableCollection(vs, sampling_rate)
150 else:
151 vs = clc.BIDSVariableCollection(vs)
152 results.append(vs)
153
154 if merge:
155 return results[0] if results else None
156
157 return results
158
159 def get_nodes(self, level=None, entities=None, strict=False):
160 """Retrieves all nodes that match the specified criteria.
161
162 Parameters
163 ----------
164 level : str
165 The level of analysis of nodes to return.
166 entities : dict
167 Entities to filter on. All nodes must have
168 matching values on all defined keys to be included.
169 strict : bool
170 If True, an exception will be raised if the entities
171 dict contains any keys that aren't contained in the current
172 index.
173
174 Returns
175 -------
176 A list of Node instances.
177 """
178
179 entities = {} if entities is None else entities.copy()
180
181 if level is not None:
182 entities['level'] = level
183
184 if entities is None:
185 return self.nodes
186
187 match_ents = set(entities.keys())
188 common_cols = list(match_ents & set(self.index.columns))
189
190 if strict and match_ents - common_cols:
191 raise ValueError("Invalid entities: ", match_ents - common_cols)
192
193 if not common_cols:
194 return self.nodes
195
196 # Construct query string that handles both single values and iterables
197 query = []
198 for col in common_cols:
199 oper = 'in' if isinstance(entities[col], (list, tuple)) else '=='
200 q = '{name} {oper} {val}'.format(name=col, oper=oper,
201 val=repr(entities[col]))
202 query.append(q)
203 query = ' and '.join(query)
204
205 rows = self.index.query(query)
206 if rows.empty:
207 return []
208
209 # Sort and return
210 sort_cols = ['subject', 'session', 'task', 'run', 'node_index',
211 'suffix', 'level', 'datatype']
212 sort_cols = [sc for sc in sort_cols if sc in set(rows.columns)]
213 rows = rows.sort_values(sort_cols)
214 inds = rows['node_index'].astype(int)
215 return [self.nodes[i] for i in inds]
216
217 def create_node(self, level, entities, *args, **kwargs):
218 """Creates a new child Node.
219
220 Parameters
221 ----------
222 level : str
223 The level of analysis of the new Node.
224 entities : dict
225 Dictionary of entities belonging to Node.
226 args, kwargs : dict
227 Optional positional or named arguments to pass on to
228 class-specific initializers. These arguments are only used if
229 a Node that matches the passed entities doesn't already exist,
230 and a new one must be created.
231
232 Returns
233 -------
234 A Node instance.
235 """
236
237 if level == 'run':
238 node = RunNode(entities, *args, **kwargs)
239 else:
240 node = Node(level, entities)
241
242 entities = dict(entities, node_index=len(self.nodes), level=level)
243 self.nodes.append(node)
244 node_row = pd.Series(entities)
245 self.index = self.index.append(node_row, ignore_index=True)
246 return node
247
248 def get_or_create_node(self, level, entities, *args, **kwargs):
249 """Retrieves a child Node based on the specified criteria, creating a
250 new Node if necessary.
251
252 Parameters
253 ----------
254 level : str
255 The level of analysis of the Node.
256 entities : dict
257 Dictionary of entities to include in newly-created
258 Nodes or filter existing ones.
259 args, kwargs : dict
260 Optional positional or named arguments to pass on to
261 class-specific initializers. These arguments are only used if
262 a Node that matches the passed entities doesn't already exist,
263 and a new one must be created.
264
265 Returns
266 -------
267 A Node instance.
268 """
269
270 result = self.get_nodes(level, entities)
271
272 if result:
273 if len(result) > 1:
274 raise ValueError("More than one matching Node found! If you're"
275 " expecting more than one Node, use "
276 "get_nodes() instead of get_or_create_node()."
277 )
278 return result[0]
279
280 return self.create_node(level, entities, *args, **kwargs)
281
[end of bids/variables/entities.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/bids/variables/entities.py b/bids/variables/entities.py
--- a/bids/variables/entities.py
+++ b/bids/variables/entities.py
@@ -241,8 +241,11 @@
entities = dict(entities, node_index=len(self.nodes), level=level)
self.nodes.append(node)
- node_row = pd.Series(entities)
- self.index = self.index.append(node_row, ignore_index=True)
+ # Because "entities" may have non-scalar values (such as `SliceTiming`)
+ # we need to first create a Series to avoid expansion
+ # From here we can concatenate
+ node_row = pd.DataFrame(pd.Series(entities)).T
+ self.index = pd.concat([self.index, node_row], ignore_index=True)
return node
def get_or_create_node(self, level, entities, *args, **kwargs):
| {"golden_diff": "diff --git a/bids/variables/entities.py b/bids/variables/entities.py\n--- a/bids/variables/entities.py\n+++ b/bids/variables/entities.py\n@@ -241,8 +241,11 @@\n \n entities = dict(entities, node_index=len(self.nodes), level=level)\n self.nodes.append(node)\n- node_row = pd.Series(entities)\n- self.index = self.index.append(node_row, ignore_index=True)\n+ # Because \"entities\" may have non-scalar values (such as `SliceTiming`)\n+ # we need to first create a Series to avoid expansion\n+ # From here we can concatenate\n+ node_row = pd.DataFrame(pd.Series(entities)).T\n+ self.index = pd.concat([self.index, node_row], ignore_index=True)\n return node\n \n def get_or_create_node(self, level, entities, *args, **kwargs):\n", "issue": "Pandas method FutureWarning: frame.append\nWith:\r\n- Python 3.9.7\r\n- pybids 0.15.0\r\n- pandas 0.14.0\r\n\r\nI'm getting the following `FutureWarning` when running `bids = BIDSLayout(self.dataset.path, derivatives=True)`:\r\n\r\n```\r\n...bids/variables/entities.py:245: FutureWarning: The frame.append method is deprecated and will be removed from pandas in a future version. Use pandas.concat instead.\r\n```\r\n\r\npointing to [this line](https://github.com/bids-standard/pybids/blob/e92d4e8d058fc0e1797c8882dd98024068a303bf/bids/variables/entities.py#L245).\r\n\r\nShould I submit a PR to replace that, or are there reasons why this should not be done?\n", "before_files": [{"content": "\"\"\" Data classes for internal BIDS data hierarchy. \"\"\"\n\nfrom itertools import chain\nfrom collections import namedtuple\n\nimport pandas as pd\n\nfrom . import collections as clc\nfrom bids.utils import matches_entities\n\n\nclass Node(object):\n \"\"\"Base class for objects that represent a single object in the BIDS\n hierarchy.\n\n Parameters\n ----------\n id : int or str\n A value uniquely identifying this node. Typically the\n entity value extracted from the filename via layout.\n \"\"\"\n\n def __init__(self, level, entities):\n self.level = level.lower()\n self.entities = entities\n self.variables = {}\n\n def add_variable(self, variable):\n \"\"\"Adds a BIDSVariable to the current Node's list.\n\n Parameters\n ----------\n variable : BIDSVariable\n The Variable to add to the list.\n \"\"\"\n self.variables[variable.name] = variable\n\n\nclass RunNode(Node):\n \"\"\"Represents a single Run in a BIDS project.\n\n Parameters\n ----------\n id : int\n The index of the run.\n entities : dict\n Dictionary of entities for this Node.\n image_file : str\n The full path to the corresponding nifti image.\n duration : float\n Duration of the run, in seconds.\n repetition_time : float\n TR for the run.\n task : str\n The task name for this run.\n \"\"\"\n\n def __init__(self, entities, image_file, duration, repetition_time, n_vols):\n self.image_file = image_file\n self.duration = duration\n self.repetition_time = repetition_time\n self.n_vols = n_vols\n super(RunNode, self).__init__('run', entities)\n\n def get_info(self):\n # Note: do not remove the dict() call! self.entities is a SQLAlchemy\n # association_proxy mapping, and without the conversion, the connection\n # to the DB persists, causing problems on Python 3.5 if we try to clone\n # a RunInfo or any containing object.\n entities = dict(self.entities)\n return RunInfo(entities, self.duration,\n self.repetition_time, self.image_file, self.n_vols)\n\n\n# Stores key information for each Run.\nRunInfo_ = namedtuple('RunInfo', ['entities', 'duration', 'tr', 'image', 'n_vols'])\n\n\n# Wrap with class to provide docstring\nclass RunInfo(RunInfo_):\n \"\"\" A namedtuple storing run-related information.\n\n Properties include 'entities', 'duration', 'tr', and 'image', 'n_vols'.\n \"\"\"\n pass\n\n\nclass NodeIndex(object):\n \"\"\"Represents the top level in a BIDS hierarchy. \"\"\"\n\n def __init__(self):\n super(NodeIndex, self).__init__()\n self.index = pd.DataFrame()\n self.nodes = []\n\n def get_collections(self, unit, names=None, merge=False,\n sampling_rate=None, **entities):\n \"\"\"Retrieve variable data for a specified level in the Dataset.\n\n Parameters\n ----------\n unit : str\n The unit of analysis to return variables for. Must be\n one of 'run', 'session', 'subject', or 'dataset'.\n names : list\n Optional list of variables names to return. If\n None, all available variables are returned.\n merge : bool\n If True, variables are merged across all observations\n of the current unit. E.g., if unit='subject' and return_type=\n 'collection', variables from all subjects will be merged into a\n single collection. If False, each observation is handled\n separately, and the result is returned as a list.\n sampling_rate : int or str\n If unit='run', the sampling rate to\n pass onto the returned BIDSRunVariableCollection.\n entities : dict\n Optional constraints used to limit what gets returned.\n\n Returns\n -------\n A list of BIDSVariableCollections if merge=False; a single\n BIDSVariableCollection if merge=True.\n \"\"\"\n\n nodes = self.get_nodes(unit, entities)\n var_sets = []\n\n for n in nodes:\n var_set = list(n.variables.values())\n var_set = [v for v in var_set if matches_entities(v, entities)]\n if names is not None:\n var_set = [v for v in var_set if v.name in names]\n # Additional filtering on Variables past run level, because their\n # contents are extracted from TSV files containing rows from\n # multiple observations\n if unit != 'run':\n var_set = [v.filter(entities) for v in var_set]\n var_sets.append(var_set)\n\n if merge:\n var_sets = [list(chain(*var_sets))]\n\n results = []\n for vs in var_sets:\n if not vs:\n continue\n if unit == 'run':\n vs = clc.BIDSRunVariableCollection(vs, sampling_rate)\n else:\n vs = clc.BIDSVariableCollection(vs)\n results.append(vs)\n\n if merge:\n return results[0] if results else None\n\n return results\n\n def get_nodes(self, level=None, entities=None, strict=False):\n \"\"\"Retrieves all nodes that match the specified criteria.\n\n Parameters\n ----------\n level : str\n The level of analysis of nodes to return.\n entities : dict\n Entities to filter on. All nodes must have\n matching values on all defined keys to be included.\n strict : bool\n If True, an exception will be raised if the entities\n dict contains any keys that aren't contained in the current\n index.\n\n Returns\n -------\n A list of Node instances.\n \"\"\"\n\n entities = {} if entities is None else entities.copy()\n\n if level is not None:\n entities['level'] = level\n\n if entities is None:\n return self.nodes\n\n match_ents = set(entities.keys())\n common_cols = list(match_ents & set(self.index.columns))\n\n if strict and match_ents - common_cols:\n raise ValueError(\"Invalid entities: \", match_ents - common_cols)\n\n if not common_cols:\n return self.nodes\n\n # Construct query string that handles both single values and iterables\n query = []\n for col in common_cols:\n oper = 'in' if isinstance(entities[col], (list, tuple)) else '=='\n q = '{name} {oper} {val}'.format(name=col, oper=oper,\n val=repr(entities[col]))\n query.append(q)\n query = ' and '.join(query)\n\n rows = self.index.query(query)\n if rows.empty:\n return []\n\n # Sort and return\n sort_cols = ['subject', 'session', 'task', 'run', 'node_index',\n 'suffix', 'level', 'datatype']\n sort_cols = [sc for sc in sort_cols if sc in set(rows.columns)]\n rows = rows.sort_values(sort_cols)\n inds = rows['node_index'].astype(int)\n return [self.nodes[i] for i in inds]\n\n def create_node(self, level, entities, *args, **kwargs):\n \"\"\"Creates a new child Node.\n\n Parameters\n ----------\n level : str\n The level of analysis of the new Node.\n entities : dict\n Dictionary of entities belonging to Node.\n args, kwargs : dict\n Optional positional or named arguments to pass on to\n class-specific initializers. These arguments are only used if\n a Node that matches the passed entities doesn't already exist,\n and a new one must be created.\n\n Returns\n -------\n A Node instance.\n \"\"\"\n\n if level == 'run':\n node = RunNode(entities, *args, **kwargs)\n else:\n node = Node(level, entities)\n\n entities = dict(entities, node_index=len(self.nodes), level=level)\n self.nodes.append(node)\n node_row = pd.Series(entities)\n self.index = self.index.append(node_row, ignore_index=True)\n return node\n\n def get_or_create_node(self, level, entities, *args, **kwargs):\n \"\"\"Retrieves a child Node based on the specified criteria, creating a\n new Node if necessary.\n\n Parameters\n ----------\n level : str\n The level of analysis of the Node.\n entities : dict\n Dictionary of entities to include in newly-created\n Nodes or filter existing ones.\n args, kwargs : dict\n Optional positional or named arguments to pass on to\n class-specific initializers. These arguments are only used if\n a Node that matches the passed entities doesn't already exist,\n and a new one must be created.\n\n Returns\n -------\n A Node instance.\n \"\"\"\n\n result = self.get_nodes(level, entities)\n\n if result:\n if len(result) > 1:\n raise ValueError(\"More than one matching Node found! If you're\"\n \" expecting more than one Node, use \"\n \"get_nodes() instead of get_or_create_node().\"\n )\n return result[0]\n\n return self.create_node(level, entities, *args, **kwargs)\n", "path": "bids/variables/entities.py"}]} | 3,427 | 194 |
gh_patches_debug_19145 | rasdani/github-patches | git_diff | kivy__python-for-android-1480 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
libxml2 build broken on latest p4a master with python 3
With latest p4a master & python3crystax & host python 3, the libxml2 recipe gets built inside `/root/.local/share/python-for-android/build/other_builds/libxml2/armeabi-v7a__ndk_target_19/libxml2/ `.
This is a folder name change, previously it used to be in `/root/.local/share/python-for-android/build/other_builds/libxml2/armeabi-v7a/libxml2/` - and as a result, the path detection inside the libxml2 fails for some reason.
### Versions
* Python: host python 3.x, python3crystax target
* OS: ubuntu in docker
* Kivy: not used
* Cython: not sure, but I don't think it's relevant
Edit: corrected lxml -> libxml2
</issue>
<code>
[start of pythonforandroid/recipes/libxslt/__init__.py]
1 from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory
2 from os.path import exists, join, dirname
3 import sh
4
5
6 class LibxsltRecipe(Recipe):
7 version = "1.1.28"
8 url = "http://xmlsoft.org/sources/libxslt-{version}.tar.gz"
9 depends = ["libxml2"]
10 patches = ["fix-dlopen.patch"]
11
12 call_hostpython_via_targetpython = False
13
14 def should_build(self, arch):
15 super(LibxsltRecipe, self).should_build(arch)
16 return not exists(join(self.ctx.get_libs_dir(arch.arch), "libxslt.a"))
17
18 def build_arch(self, arch):
19 super(LibxsltRecipe, self).build_arch(arch)
20 env = self.get_recipe_env(arch)
21 with current_directory(self.get_build_dir(arch.arch)):
22 # If the build is done with /bin/sh things blow up,
23 # try really hard to use bash
24 env["CC"] += " -I%s" % self.get_build_dir(arch.arch)
25 libxml = (
26 dirname(dirname(self.get_build_container_dir(arch.arch)))
27 + "/libxml2/%s/libxml2" % arch.arch
28 )
29 shprint(
30 sh.Command("./configure"),
31 "--build=i686-pc-linux-gnu",
32 "--host=arm-linux-eabi",
33 "--without-plugins",
34 "--without-debug",
35 "--without-python",
36 "--without-crypto",
37 "--with-libxml-src=%s" % libxml,
38 _env=env,
39 )
40 shprint(sh.make, "V=1", _env=env)
41 shutil.copyfile(
42 "libxslt/.libs/libxslt.a",
43 join(self.ctx.get_libs_dir(arch.arch), "libxslt.a"),
44 )
45 shutil.copyfile(
46 "libexslt/.libs/libexslt.a",
47 join(self.ctx.get_libs_dir(arch.arch), "libexslt.a"),
48 )
49
50 def get_recipe_env(self, arch):
51 env = super(LibxsltRecipe, self).get_recipe_env(arch)
52 env["CONFIG_SHELL"] = "/bin/bash"
53 env["SHELL"] = "/bin/bash"
54 env[
55 "CC"
56 ] = "arm-linux-androideabi-gcc -DANDROID -mandroid -fomit-frame-pointer --sysroot={}".format(
57 self.ctx.ndk_platform
58 )
59
60 env["LDSHARED"] = "%s -nostartfiles -shared -fPIC" % env["CC"]
61 return env
62
63
64 recipe = LibxsltRecipe()
65
[end of pythonforandroid/recipes/libxslt/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pythonforandroid/recipes/libxslt/__init__.py b/pythonforandroid/recipes/libxslt/__init__.py
--- a/pythonforandroid/recipes/libxslt/__init__.py
+++ b/pythonforandroid/recipes/libxslt/__init__.py
@@ -1,5 +1,5 @@
from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory
-from os.path import exists, join, dirname
+from os.path import exists, join
import sh
@@ -22,10 +22,8 @@
# If the build is done with /bin/sh things blow up,
# try really hard to use bash
env["CC"] += " -I%s" % self.get_build_dir(arch.arch)
- libxml = (
- dirname(dirname(self.get_build_container_dir(arch.arch)))
- + "/libxml2/%s/libxml2" % arch.arch
- )
+ libxml = Recipe.get_recipe(
+ 'libxml2', self.ctx).get_build_dir(arch.arch)
shprint(
sh.Command("./configure"),
"--build=i686-pc-linux-gnu",
| {"golden_diff": "diff --git a/pythonforandroid/recipes/libxslt/__init__.py b/pythonforandroid/recipes/libxslt/__init__.py\n--- a/pythonforandroid/recipes/libxslt/__init__.py\n+++ b/pythonforandroid/recipes/libxslt/__init__.py\n@@ -1,5 +1,5 @@\n from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory\n-from os.path import exists, join, dirname\n+from os.path import exists, join\n import sh\n \n \n@@ -22,10 +22,8 @@\n # If the build is done with /bin/sh things blow up,\n # try really hard to use bash\n env[\"CC\"] += \" -I%s\" % self.get_build_dir(arch.arch)\n- libxml = (\n- dirname(dirname(self.get_build_container_dir(arch.arch)))\n- + \"/libxml2/%s/libxml2\" % arch.arch\n- )\n+ libxml = Recipe.get_recipe(\n+ 'libxml2', self.ctx).get_build_dir(arch.arch)\n shprint(\n sh.Command(\"./configure\"),\n \"--build=i686-pc-linux-gnu\",\n", "issue": "libxml2 build broken on latest p4a master with python 3\nWith latest p4a master & python3crystax & host python 3, the libxml2 recipe gets built inside `/root/.local/share/python-for-android/build/other_builds/libxml2/armeabi-v7a__ndk_target_19/libxml2/ `.\r\n\r\nThis is a folder name change, previously it used to be in `/root/.local/share/python-for-android/build/other_builds/libxml2/armeabi-v7a/libxml2/` - and as a result, the path detection inside the libxml2 fails for some reason.\r\n\r\n### Versions\r\n\r\n* Python: host python 3.x, python3crystax target\r\n* OS: ubuntu in docker\r\n* Kivy: not used\r\n* Cython: not sure, but I don't think it's relevant\r\n\r\nEdit: corrected lxml -> libxml2\n", "before_files": [{"content": "from pythonforandroid.toolchain import Recipe, shprint, shutil, current_directory\nfrom os.path import exists, join, dirname\nimport sh\n\n\nclass LibxsltRecipe(Recipe):\n version = \"1.1.28\"\n url = \"http://xmlsoft.org/sources/libxslt-{version}.tar.gz\"\n depends = [\"libxml2\"]\n patches = [\"fix-dlopen.patch\"]\n\n call_hostpython_via_targetpython = False\n\n def should_build(self, arch):\n super(LibxsltRecipe, self).should_build(arch)\n return not exists(join(self.ctx.get_libs_dir(arch.arch), \"libxslt.a\"))\n\n def build_arch(self, arch):\n super(LibxsltRecipe, self).build_arch(arch)\n env = self.get_recipe_env(arch)\n with current_directory(self.get_build_dir(arch.arch)):\n # If the build is done with /bin/sh things blow up,\n # try really hard to use bash\n env[\"CC\"] += \" -I%s\" % self.get_build_dir(arch.arch)\n libxml = (\n dirname(dirname(self.get_build_container_dir(arch.arch)))\n + \"/libxml2/%s/libxml2\" % arch.arch\n )\n shprint(\n sh.Command(\"./configure\"),\n \"--build=i686-pc-linux-gnu\",\n \"--host=arm-linux-eabi\",\n \"--without-plugins\",\n \"--without-debug\",\n \"--without-python\",\n \"--without-crypto\",\n \"--with-libxml-src=%s\" % libxml,\n _env=env,\n )\n shprint(sh.make, \"V=1\", _env=env)\n shutil.copyfile(\n \"libxslt/.libs/libxslt.a\",\n join(self.ctx.get_libs_dir(arch.arch), \"libxslt.a\"),\n )\n shutil.copyfile(\n \"libexslt/.libs/libexslt.a\",\n join(self.ctx.get_libs_dir(arch.arch), \"libexslt.a\"),\n )\n\n def get_recipe_env(self, arch):\n env = super(LibxsltRecipe, self).get_recipe_env(arch)\n env[\"CONFIG_SHELL\"] = \"/bin/bash\"\n env[\"SHELL\"] = \"/bin/bash\"\n env[\n \"CC\"\n ] = \"arm-linux-androideabi-gcc -DANDROID -mandroid -fomit-frame-pointer --sysroot={}\".format(\n self.ctx.ndk_platform\n )\n\n env[\"LDSHARED\"] = \"%s -nostartfiles -shared -fPIC\" % env[\"CC\"]\n return env\n\n\nrecipe = LibxsltRecipe()\n", "path": "pythonforandroid/recipes/libxslt/__init__.py"}]} | 1,432 | 254 |
gh_patches_debug_12242 | rasdani/github-patches | git_diff | DataBiosphere__toil-562 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Need `toil --version`
Need `toil --version`
</issue>
<code>
[start of src/toil/utils/toilMain.py]
1 from __future__ import absolute_import, print_function
2 import os
3 import sys
4
5 def main():
6 modules = loadModules()
7 try:
8 command = sys.argv[1]
9 except IndexError:
10 printHelp(modules)
11 else:
12 if command == '--help':
13 printHelp(modules)
14 else:
15 try:
16 module = modules[command]
17 except KeyError:
18 print("Unknown option '%s'. "
19 "Pass --help to display usage information.\n" % command, file=sys.stderr)
20 sys.exit(1)
21 else:
22 del sys.argv[1]
23 module.main()
24
25
26 def loadModules():
27 # noinspection PyUnresolvedReferences
28 from toil.utils import toilKill, toilRestart, toilStats, toilStatus, toilClean
29 return {name[4:].lower(): module for name, module in locals().iteritems()}
30
31
32 def printHelp(modules):
33 usage = ("\n"
34 "Usage: {name} COMMAND ...\n"
35 " {name} --help\n"
36 " {name} COMMAND --help\n\n"
37 "where COMMAND is one of the following:\n\n{descriptions}\n\n")
38 print(usage.format(
39 name=os.path.basename(sys.argv[0]),
40 commands='|'.join(modules.iterkeys()),
41 descriptions='\n'.join("%s - %s" % (n, m.__doc__.strip()) for n, m in modules.iteritems())))
42
[end of src/toil/utils/toilMain.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/toil/utils/toilMain.py b/src/toil/utils/toilMain.py
--- a/src/toil/utils/toilMain.py
+++ b/src/toil/utils/toilMain.py
@@ -1,4 +1,6 @@
from __future__ import absolute_import, print_function
+from toil.version import version
+import pkg_resources
import os
import sys
@@ -11,6 +13,11 @@
else:
if command == '--help':
printHelp(modules)
+ elif command == '--version':
+ try:
+ print(pkg_resources.get_distribution('toil').version)
+ except:
+ print("Version gathered from toil.version: "+version)
else:
try:
module = modules[command]
| {"golden_diff": "diff --git a/src/toil/utils/toilMain.py b/src/toil/utils/toilMain.py\n--- a/src/toil/utils/toilMain.py\n+++ b/src/toil/utils/toilMain.py\n@@ -1,4 +1,6 @@\n from __future__ import absolute_import, print_function\n+from toil.version import version\n+import pkg_resources\n import os\n import sys\n \n@@ -11,6 +13,11 @@\n else:\n if command == '--help':\n printHelp(modules)\n+ elif command == '--version':\n+ try:\n+ print(pkg_resources.get_distribution('toil').version)\n+ except:\n+ print(\"Version gathered from toil.version: \"+version)\n else:\n try:\n module = modules[command]\n", "issue": "Need `toil --version`\n\nNeed `toil --version`\n\n", "before_files": [{"content": "from __future__ import absolute_import, print_function\nimport os\nimport sys\n\ndef main():\n modules = loadModules()\n try:\n command = sys.argv[1]\n except IndexError:\n printHelp(modules)\n else:\n if command == '--help':\n printHelp(modules)\n else:\n try:\n module = modules[command]\n except KeyError:\n print(\"Unknown option '%s'. \"\n \"Pass --help to display usage information.\\n\" % command, file=sys.stderr)\n sys.exit(1)\n else:\n del sys.argv[1]\n module.main()\n\n\ndef loadModules():\n # noinspection PyUnresolvedReferences\n from toil.utils import toilKill, toilRestart, toilStats, toilStatus, toilClean\n return {name[4:].lower(): module for name, module in locals().iteritems()}\n\n\ndef printHelp(modules):\n usage = (\"\\n\"\n \"Usage: {name} COMMAND ...\\n\"\n \" {name} --help\\n\"\n \" {name} COMMAND --help\\n\\n\"\n \"where COMMAND is one of the following:\\n\\n{descriptions}\\n\\n\")\n print(usage.format(\n name=os.path.basename(sys.argv[0]),\n commands='|'.join(modules.iterkeys()),\n descriptions='\\n'.join(\"%s - %s\" % (n, m.__doc__.strip()) for n, m in modules.iteritems())))\n", "path": "src/toil/utils/toilMain.py"}]} | 944 | 169 |
gh_patches_debug_35008 | rasdani/github-patches | git_diff | quantumlib__Cirq-1170 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
check.sh fails on clean master
Just merged master and ran check.sh and it is failing due to tensorflow
```
(cirq) [dabacon:cirq:~/git/cirq]$./continuous-integration/check.sh
run: ('virtualenv', '--quiet', '-p', '/usr/bin/python3.5', '/tmp/test-cirq-kp5mfrah/.test_virtualenv')
run: ('/tmp/test-cirq-kp5mfrah/.test_virtualenv/bin/pip', 'install', '--quiet', '-r', '/tmp/test-cirq-kp5mfrah/requirements.txt')
run: ('/tmp/test-cirq-kp5mfrah/.test_virtualenv/bin/pip', 'install', '--quiet', '-r', '/tmp/test-cirq-kp5mfrah/dev_tools/conf/pip-list-dev-tools.txt')
Running pylint
Finished pylint
Running typecheck
Finished typecheck
Running pytest
==================================== ERRORS ====================================
_______ ERROR collecting cirq/contrib/tpu/circuit_to_tensorflow_test.py ________
ImportError while importing test module '/tmp/test-cirq-kp5mfrah/cirq/contrib/tpu/circuit_to_tensorflow_test.py'.
Hint: make sure your test modules/packages have valid Python names.
Traceback:
cirq/contrib/tpu/__init__.py:75: in <module>
from cirq.contrib.tpu.circuit_to_tensorflow import (
cirq/contrib/tpu/circuit_to_tensorflow.py:20: in <module>
import tensorflow as tf
E ImportError: No module named 'tensorflow'
----------- coverage: platform linux, python 3.5.3-final-0 -----------
Coverage annotated source written next to source
!!!!!!!!!!!!!!!!!!! Interrupted: 1 errors during collection !!!!!!!!!!!!!!!!!!!!
1 error in 34.82 seconds
Finished pytest
```
</issue>
<code>
[start of dev_tools/env_tools.py]
1 # Copyright 2018 The Cirq Developers
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import os
16 import shutil
17 import sys
18 from typing import Optional, Iterable, Callable, cast
19
20 from dev_tools import shell_tools, git_env_tools
21 from dev_tools.github_repository import GithubRepository
22 from dev_tools.prepared_env import PreparedEnv
23
24
25 def get_unhidden_ungenerated_python_files(directory: str) -> Iterable[str]:
26 """Iterates through relevant python files within the given directory.
27
28 Args:
29 directory: The top-level directory to explore.
30
31 Yields:
32 File paths.
33 """
34 for dirpath, dirnames, filenames in os.walk(directory, topdown=True):
35 if os.path.split(dirpath)[-1].startswith('.'):
36 dirnames.clear()
37 continue
38
39 for filename in filenames:
40 if filename.endswith('.py') and not filename.endswith('_pb2.py'):
41 yield os.path.join(dirpath, filename)
42
43
44 def create_virtual_env(venv_path: str,
45 requirements_paths: Iterable[str],
46 python_path: str,
47 verbose: bool) -> None:
48 """Creates a new virtual environment and then installs dependencies.
49
50 Args:
51 venv_path: Where to put the virtual environment's state.
52 requirements_paths: Location of requirements files to -r install.
53 python_path: The python binary to use.
54 verbose: When set, more progress output is produced.
55 """
56 shell_tools.run_cmd('virtualenv',
57 None if verbose else '--quiet',
58 '-p',
59 python_path,
60 venv_path,
61 out=sys.stderr)
62 pip_path = os.path.join(venv_path, 'bin', 'pip')
63 for req_path in requirements_paths:
64 shell_tools.run_cmd(pip_path,
65 'install',
66 None if verbose else '--quiet',
67 '-r',
68 req_path,
69 out=sys.stderr)
70
71
72 def prepare_temporary_test_environment(
73 destination_directory: str,
74 repository: GithubRepository,
75 pull_request_number: Optional[int],
76 verbose: bool,
77 env_name: str = '.test_virtualenv',
78 python_path: str = '/usr/bin/python3.5',
79 commit_ids_known_callback: Callable[[PreparedEnv], None] = None
80 ) -> PreparedEnv:
81 """Prepares a temporary test environment at the (existing empty) directory.
82
83 Args:
84 destination_directory: The location to put files. The caller is
85 responsible for deleting the directory, whether or not this method
86 succeeds or fails.
87 repository: The github repository to download content from, if a pull
88 request number is given.
89 pull_request_number: If set, test content is fetched from github.
90 Otherwise copies of local files are used.
91 verbose: When set, more progress output is produced.
92 env_name: The name to use for the virtual environment.
93 python_path: Location of the python binary to use within the
94 virtual environment.
95 commit_ids_known_callback: A function to call when the actual commit id
96 being tested is known, before the virtual environment is ready.
97
98 Returns:
99 Commit ids corresponding to content to test/compare.
100 """
101 # Fetch content.
102 if pull_request_number is not None:
103 env = git_env_tools.fetch_github_pull_request(
104 destination_directory=destination_directory,
105 repository=repository,
106 pull_request_number=pull_request_number,
107 verbose=verbose)
108 else:
109 env = git_env_tools.fetch_local_files(
110 destination_directory=destination_directory,
111 verbose=verbose)
112
113 if commit_ids_known_callback is not None:
114 commit_ids_known_callback(env)
115
116 # Create virtual environment.
117 base_path = cast(str, env.destination_directory)
118 env_path = os.path.join(base_path, env_name)
119 req_path = os.path.join(base_path, 'requirements.txt')
120 req_path_2 = os.path.join(base_path,
121 'dev_tools',
122 'conf',
123 'pip-list-dev-tools.txt')
124 create_virtual_env(venv_path=env_path,
125 python_path=python_path,
126 requirements_paths=[req_path, req_path_2],
127 verbose=verbose)
128
129 return PreparedEnv(github_repo=env.repository,
130 actual_commit_id=env.actual_commit_id,
131 compare_commit_id=env.compare_commit_id,
132 destination_directory=env.destination_directory,
133 virtual_env_path=env_path)
134
135
136 def derive_temporary_python2_environment(
137 destination_directory: str,
138 python3_environment: PreparedEnv,
139 verbose: bool,
140 env_name: str = '.test_virtualenv_py2',
141 python_path: str = "/usr/bin/python2.7") -> PreparedEnv:
142 """Creates a python 2.7 environment starting from a prepared python 3 one.
143
144 Args:
145 destination_directory: Where to put the python 2 environment.
146 python3_environment: The prepared environment to start from.
147 verbose: When set, more progress output is produced.
148 env_name: The name to use for the virtualenv directory.
149 python_path: The python binary to use.
150
151 Returns:
152 A description of the environment that was prepared.
153 """
154
155 shutil.rmtree(destination_directory)
156 input_directory = cast(str, python3_environment.destination_directory)
157 os.chdir(input_directory)
158 conversion_script_path = os.path.join(
159 input_directory,
160 'dev_tools',
161 'python2.7-generate.sh')
162 shell_tools.run_cmd('bash',
163 conversion_script_path,
164 destination_directory,
165 input_directory,
166 python3_environment.virtual_env_path,
167 out=sys.stderr)
168 os.chdir(destination_directory)
169
170 # Create virtual environment.
171 env_path = os.path.join(destination_directory, env_name)
172 # (These files are output by dev_tools/python2.7-generate.sh.)
173 req_path = os.path.join(destination_directory, 'requirements.txt')
174 req_path_2 = os.path.join(destination_directory, 'pip-list-test-tools.txt')
175 create_virtual_env(venv_path=env_path,
176 python_path=python_path,
177 requirements_paths=[req_path, req_path_2],
178 verbose=verbose)
179
180 return PreparedEnv(github_repo=python3_environment.repository,
181 actual_commit_id=python3_environment.actual_commit_id,
182 compare_commit_id=python3_environment.compare_commit_id,
183 destination_directory=destination_directory,
184 virtual_env_path=env_path)
185
[end of dev_tools/env_tools.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/dev_tools/env_tools.py b/dev_tools/env_tools.py
--- a/dev_tools/env_tools.py
+++ b/dev_tools/env_tools.py
@@ -117,13 +117,18 @@
base_path = cast(str, env.destination_directory)
env_path = os.path.join(base_path, env_name)
req_path = os.path.join(base_path, 'requirements.txt')
- req_path_2 = os.path.join(base_path,
- 'dev_tools',
- 'conf',
- 'pip-list-dev-tools.txt')
+ dev_req_path = os.path.join(base_path,
+ 'dev_tools',
+ 'conf',
+ 'pip-list-dev-tools.txt')
+ contrib_req_path = os.path.join(base_path,
+ 'cirq',
+ 'contrib',
+ 'contrib-requirements.txt')
+ rev_paths = [req_path, dev_req_path, contrib_req_path]
create_virtual_env(venv_path=env_path,
python_path=python_path,
- requirements_paths=[req_path, req_path_2],
+ requirements_paths=rev_paths,
verbose=verbose)
return PreparedEnv(github_repo=env.repository,
@@ -171,10 +176,16 @@
env_path = os.path.join(destination_directory, env_name)
# (These files are output by dev_tools/python2.7-generate.sh.)
req_path = os.path.join(destination_directory, 'requirements.txt')
- req_path_2 = os.path.join(destination_directory, 'pip-list-test-tools.txt')
+ dev_req_path = os.path.join(destination_directory,
+ 'pip-list-test-tools.txt')
+ contrib_req_path = os.path.join(destination_directory,
+ 'cirq',
+ 'contrib',
+ 'contrib-requirements.txt')
+ req_paths = [req_path, dev_req_path, contrib_req_path]
create_virtual_env(venv_path=env_path,
python_path=python_path,
- requirements_paths=[req_path, req_path_2],
+ requirements_paths=req_paths,
verbose=verbose)
return PreparedEnv(github_repo=python3_environment.repository,
| {"golden_diff": "diff --git a/dev_tools/env_tools.py b/dev_tools/env_tools.py\n--- a/dev_tools/env_tools.py\n+++ b/dev_tools/env_tools.py\n@@ -117,13 +117,18 @@\n base_path = cast(str, env.destination_directory)\n env_path = os.path.join(base_path, env_name)\n req_path = os.path.join(base_path, 'requirements.txt')\n- req_path_2 = os.path.join(base_path,\n- 'dev_tools',\n- 'conf',\n- 'pip-list-dev-tools.txt')\n+ dev_req_path = os.path.join(base_path,\n+ 'dev_tools',\n+ 'conf',\n+ 'pip-list-dev-tools.txt')\n+ contrib_req_path = os.path.join(base_path,\n+ 'cirq',\n+ 'contrib',\n+ 'contrib-requirements.txt')\n+ rev_paths = [req_path, dev_req_path, contrib_req_path]\n create_virtual_env(venv_path=env_path,\n python_path=python_path,\n- requirements_paths=[req_path, req_path_2],\n+ requirements_paths=rev_paths,\n verbose=verbose)\n \n return PreparedEnv(github_repo=env.repository,\n@@ -171,10 +176,16 @@\n env_path = os.path.join(destination_directory, env_name)\n # (These files are output by dev_tools/python2.7-generate.sh.)\n req_path = os.path.join(destination_directory, 'requirements.txt')\n- req_path_2 = os.path.join(destination_directory, 'pip-list-test-tools.txt')\n+ dev_req_path = os.path.join(destination_directory,\n+ 'pip-list-test-tools.txt')\n+ contrib_req_path = os.path.join(destination_directory,\n+ 'cirq',\n+ 'contrib',\n+ 'contrib-requirements.txt')\n+ req_paths = [req_path, dev_req_path, contrib_req_path]\n create_virtual_env(venv_path=env_path,\n python_path=python_path,\n- requirements_paths=[req_path, req_path_2],\n+ requirements_paths=req_paths,\n verbose=verbose)\n \n return PreparedEnv(github_repo=python3_environment.repository,\n", "issue": "check.sh fails on clean master\nJust merged master and ran check.sh and it is failing due to tensorflow\r\n\r\n```\r\n(cirq) [dabacon:cirq:~/git/cirq]$./continuous-integration/check.sh \r\nrun: ('virtualenv', '--quiet', '-p', '/usr/bin/python3.5', '/tmp/test-cirq-kp5mfrah/.test_virtualenv')\r\nrun: ('/tmp/test-cirq-kp5mfrah/.test_virtualenv/bin/pip', 'install', '--quiet', '-r', '/tmp/test-cirq-kp5mfrah/requirements.txt')\r\nrun: ('/tmp/test-cirq-kp5mfrah/.test_virtualenv/bin/pip', 'install', '--quiet', '-r', '/tmp/test-cirq-kp5mfrah/dev_tools/conf/pip-list-dev-tools.txt')\r\n\r\nRunning pylint\r\nFinished pylint\r\n\r\n\r\nRunning typecheck\r\nFinished typecheck\r\n\r\n\r\nRunning pytest\r\n\r\n==================================== ERRORS ====================================\r\n_______ ERROR collecting cirq/contrib/tpu/circuit_to_tensorflow_test.py ________\r\nImportError while importing test module '/tmp/test-cirq-kp5mfrah/cirq/contrib/tpu/circuit_to_tensorflow_test.py'.\r\nHint: make sure your test modules/packages have valid Python names.\r\nTraceback:\r\ncirq/contrib/tpu/__init__.py:75: in <module>\r\n from cirq.contrib.tpu.circuit_to_tensorflow import (\r\ncirq/contrib/tpu/circuit_to_tensorflow.py:20: in <module>\r\n import tensorflow as tf\r\nE ImportError: No module named 'tensorflow'\r\n\r\n----------- coverage: platform linux, python 3.5.3-final-0 -----------\r\nCoverage annotated source written next to source\r\n\r\n!!!!!!!!!!!!!!!!!!! Interrupted: 1 errors during collection !!!!!!!!!!!!!!!!!!!!\r\n1 error in 34.82 seconds\r\nFinished pytest\r\n```\n", "before_files": [{"content": "# Copyright 2018 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport os\nimport shutil\nimport sys\nfrom typing import Optional, Iterable, Callable, cast\n\nfrom dev_tools import shell_tools, git_env_tools\nfrom dev_tools.github_repository import GithubRepository\nfrom dev_tools.prepared_env import PreparedEnv\n\n\ndef get_unhidden_ungenerated_python_files(directory: str) -> Iterable[str]:\n \"\"\"Iterates through relevant python files within the given directory.\n\n Args:\n directory: The top-level directory to explore.\n\n Yields:\n File paths.\n \"\"\"\n for dirpath, dirnames, filenames in os.walk(directory, topdown=True):\n if os.path.split(dirpath)[-1].startswith('.'):\n dirnames.clear()\n continue\n\n for filename in filenames:\n if filename.endswith('.py') and not filename.endswith('_pb2.py'):\n yield os.path.join(dirpath, filename)\n\n\ndef create_virtual_env(venv_path: str,\n requirements_paths: Iterable[str],\n python_path: str,\n verbose: bool) -> None:\n \"\"\"Creates a new virtual environment and then installs dependencies.\n\n Args:\n venv_path: Where to put the virtual environment's state.\n requirements_paths: Location of requirements files to -r install.\n python_path: The python binary to use.\n verbose: When set, more progress output is produced.\n \"\"\"\n shell_tools.run_cmd('virtualenv',\n None if verbose else '--quiet',\n '-p',\n python_path,\n venv_path,\n out=sys.stderr)\n pip_path = os.path.join(venv_path, 'bin', 'pip')\n for req_path in requirements_paths:\n shell_tools.run_cmd(pip_path,\n 'install',\n None if verbose else '--quiet',\n '-r',\n req_path,\n out=sys.stderr)\n\n\ndef prepare_temporary_test_environment(\n destination_directory: str,\n repository: GithubRepository,\n pull_request_number: Optional[int],\n verbose: bool,\n env_name: str = '.test_virtualenv',\n python_path: str = '/usr/bin/python3.5',\n commit_ids_known_callback: Callable[[PreparedEnv], None] = None\n) -> PreparedEnv:\n \"\"\"Prepares a temporary test environment at the (existing empty) directory.\n\n Args:\n destination_directory: The location to put files. The caller is\n responsible for deleting the directory, whether or not this method\n succeeds or fails.\n repository: The github repository to download content from, if a pull\n request number is given.\n pull_request_number: If set, test content is fetched from github.\n Otherwise copies of local files are used.\n verbose: When set, more progress output is produced.\n env_name: The name to use for the virtual environment.\n python_path: Location of the python binary to use within the\n virtual environment.\n commit_ids_known_callback: A function to call when the actual commit id\n being tested is known, before the virtual environment is ready.\n\n Returns:\n Commit ids corresponding to content to test/compare.\n \"\"\"\n # Fetch content.\n if pull_request_number is not None:\n env = git_env_tools.fetch_github_pull_request(\n destination_directory=destination_directory,\n repository=repository,\n pull_request_number=pull_request_number,\n verbose=verbose)\n else:\n env = git_env_tools.fetch_local_files(\n destination_directory=destination_directory,\n verbose=verbose)\n\n if commit_ids_known_callback is not None:\n commit_ids_known_callback(env)\n\n # Create virtual environment.\n base_path = cast(str, env.destination_directory)\n env_path = os.path.join(base_path, env_name)\n req_path = os.path.join(base_path, 'requirements.txt')\n req_path_2 = os.path.join(base_path,\n 'dev_tools',\n 'conf',\n 'pip-list-dev-tools.txt')\n create_virtual_env(venv_path=env_path,\n python_path=python_path,\n requirements_paths=[req_path, req_path_2],\n verbose=verbose)\n\n return PreparedEnv(github_repo=env.repository,\n actual_commit_id=env.actual_commit_id,\n compare_commit_id=env.compare_commit_id,\n destination_directory=env.destination_directory,\n virtual_env_path=env_path)\n\n\ndef derive_temporary_python2_environment(\n destination_directory: str,\n python3_environment: PreparedEnv,\n verbose: bool,\n env_name: str = '.test_virtualenv_py2',\n python_path: str = \"/usr/bin/python2.7\") -> PreparedEnv:\n \"\"\"Creates a python 2.7 environment starting from a prepared python 3 one.\n\n Args:\n destination_directory: Where to put the python 2 environment.\n python3_environment: The prepared environment to start from.\n verbose: When set, more progress output is produced.\n env_name: The name to use for the virtualenv directory.\n python_path: The python binary to use.\n\n Returns:\n A description of the environment that was prepared.\n \"\"\"\n\n shutil.rmtree(destination_directory)\n input_directory = cast(str, python3_environment.destination_directory)\n os.chdir(input_directory)\n conversion_script_path = os.path.join(\n input_directory,\n 'dev_tools',\n 'python2.7-generate.sh')\n shell_tools.run_cmd('bash',\n conversion_script_path,\n destination_directory,\n input_directory,\n python3_environment.virtual_env_path,\n out=sys.stderr)\n os.chdir(destination_directory)\n\n # Create virtual environment.\n env_path = os.path.join(destination_directory, env_name)\n # (These files are output by dev_tools/python2.7-generate.sh.)\n req_path = os.path.join(destination_directory, 'requirements.txt')\n req_path_2 = os.path.join(destination_directory, 'pip-list-test-tools.txt')\n create_virtual_env(venv_path=env_path,\n python_path=python_path,\n requirements_paths=[req_path, req_path_2],\n verbose=verbose)\n\n return PreparedEnv(github_repo=python3_environment.repository,\n actual_commit_id=python3_environment.actual_commit_id,\n compare_commit_id=python3_environment.compare_commit_id,\n destination_directory=destination_directory,\n virtual_env_path=env_path)\n", "path": "dev_tools/env_tools.py"}]} | 2,821 | 464 |
gh_patches_debug_22578 | rasdani/github-patches | git_diff | quantumlib__Cirq-4690 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Should an IonDevice accept any qubit type?
Some docstrings ([here](https://github.com/quantumlib/Cirq/blob/master/cirq/ion/ion_device.py#L26)) state qubits are one a line, while others ([here](https://github.com/quantumlib/Cirq/blob/master/cirq/ion/ion_device.py#L42)) state qubits are identified by their x, y location. I believe the latter is a typo due to the [annotation](https://github.com/quantumlib/Cirq/blob/master/cirq/ion/ion_device.py#L34).
If so, should `__init__` raise an error if the input qubits are not `LineQubit`s? (The `at` and `neighbors_of` methods assume `LineQubit`s.)
</issue>
<code>
[start of cirq-core/cirq/ion/ion_device.py]
1 # Copyright 2018 The Cirq Developers
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 from typing import Any, FrozenSet, Iterable, Optional, Set, TYPE_CHECKING
16
17 from cirq import circuits, value, devices, ops, protocols
18 from cirq.ion import convert_to_ion_gates
19
20 if TYPE_CHECKING:
21 import cirq
22
23
24 def get_ion_gateset() -> ops.Gateset:
25 return ops.Gateset(
26 ops.XXPowGate,
27 ops.MeasurementGate,
28 ops.XPowGate,
29 ops.YPowGate,
30 ops.ZPowGate,
31 ops.PhasedXPowGate,
32 unroll_circuit_op=False,
33 accept_global_phase_op=False,
34 )
35
36
37 @value.value_equality
38 class IonDevice(devices.Device):
39 """A device with qubits placed on a line.
40
41 Qubits have all-to-all connectivity.
42 """
43
44 def __init__(
45 self,
46 measurement_duration: 'cirq.DURATION_LIKE',
47 twoq_gates_duration: 'cirq.DURATION_LIKE',
48 oneq_gates_duration: 'cirq.DURATION_LIKE',
49 qubits: Iterable[devices.LineQubit],
50 ) -> None:
51 """Initializes the description of an ion trap device.
52
53 Args:
54 measurement_duration: The maximum duration of a measurement.
55 twoq_gates_duration: The maximum duration of a two qubit operation.
56 oneq_gates_duration: The maximum duration of a single qubit
57 operation.
58 qubits: Qubits on the device, identified by their x, y location.
59 """
60 self._measurement_duration = value.Duration(measurement_duration)
61 self._twoq_gates_duration = value.Duration(twoq_gates_duration)
62 self._oneq_gates_duration = value.Duration(oneq_gates_duration)
63 self.qubits = frozenset(qubits)
64 self.gateset = get_ion_gateset()
65
66 def qubit_set(self) -> FrozenSet['cirq.LineQubit']:
67 return self.qubits
68
69 def qid_pairs(self) -> FrozenSet['cirq.SymmetricalQidPair']:
70 """Qubits have all-to-all connectivity, so returns all pairs.
71
72 Returns:
73 All qubit pairs on the device.
74 """
75 qs = self.qubits
76 return frozenset([devices.SymmetricalQidPair(q, q2) for q in qs for q2 in qs if q < q2])
77
78 def decompose_operation(self, operation: ops.Operation) -> ops.OP_TREE:
79 return convert_to_ion_gates.ConvertToIonGates().convert_one(operation)
80
81 def decompose_circuit(self, circuit: circuits.Circuit) -> circuits.Circuit:
82 return convert_to_ion_gates.ConvertToIonGates().convert_circuit(circuit)
83
84 def duration_of(self, operation):
85 if isinstance(operation.gate, ops.XXPowGate):
86 return self._twoq_gates_duration
87 if isinstance(
88 operation.gate, (ops.XPowGate, ops.YPowGate, ops.ZPowGate, ops.PhasedXPowGate)
89 ):
90 return self._oneq_gates_duration
91 if isinstance(operation.gate, ops.MeasurementGate):
92 return self._measurement_duration
93 raise ValueError(f'Unsupported gate type: {operation!r}')
94
95 def validate_gate(self, gate: ops.Gate):
96 if gate not in self.gateset:
97 raise ValueError(f'Unsupported gate type: {gate!r}')
98
99 def validate_operation(self, operation):
100 if not isinstance(operation, ops.GateOperation):
101 raise ValueError(f'Unsupported operation: {operation!r}')
102
103 self.validate_gate(operation.gate)
104
105 for q in operation.qubits:
106 if not isinstance(q, devices.LineQubit):
107 raise ValueError(f'Unsupported qubit type: {q!r}')
108 if q not in self.qubits:
109 raise ValueError(f'Qubit not on device: {q!r}')
110
111 def validate_circuit(self, circuit: circuits.AbstractCircuit):
112 super().validate_circuit(circuit)
113 _verify_unique_measurement_keys(circuit.all_operations())
114
115 def at(self, position: int) -> Optional[devices.LineQubit]:
116 """Returns the qubit at the given position, if there is one, else None."""
117 q = devices.LineQubit(position)
118 return q if q in self.qubits else None
119
120 def neighbors_of(self, qubit: devices.LineQubit) -> Iterable[devices.LineQubit]:
121 """Returns the qubits that the given qubit can interact with."""
122 possibles = [
123 devices.LineQubit(qubit.x + 1),
124 devices.LineQubit(qubit.x - 1),
125 ]
126 return [e for e in possibles if e in self.qubits]
127
128 def __repr__(self) -> str:
129 return (
130 f'IonDevice(measurement_duration={self._measurement_duration!r}, '
131 f'twoq_gates_duration={self._twoq_gates_duration!r}, '
132 f'oneq_gates_duration={self._oneq_gates_duration!r} '
133 f'qubits={sorted(self.qubits)!r})'
134 )
135
136 def __str__(self) -> str:
137 diagram = circuits.TextDiagramDrawer()
138
139 for q in self.qubits:
140 diagram.write(q.x, 0, str(q))
141 for q2 in self.neighbors_of(q):
142 diagram.grid_line(q.x, 0, q2.x, 0)
143
144 return diagram.render(horizontal_spacing=3, vertical_spacing=2, use_unicode_characters=True)
145
146 def _repr_pretty_(self, p: Any, cycle: bool):
147 """iPython (Jupyter) pretty print."""
148 p.text("IonDevice(...)" if cycle else self.__str__())
149
150 def _value_equality_values_(self) -> Any:
151 return (
152 self._measurement_duration,
153 self._twoq_gates_duration,
154 self._oneq_gates_duration,
155 self.qubits,
156 )
157
158
159 def _verify_unique_measurement_keys(operations: Iterable[ops.Operation]):
160 seen: Set[str] = set()
161 for op in operations:
162 if isinstance(op.gate, ops.MeasurementGate):
163 meas = op.gate
164 key = protocols.measurement_key_name(meas)
165 if key in seen:
166 raise ValueError(f'Measurement key {key} repeated')
167 seen.add(key)
168
[end of cirq-core/cirq/ion/ion_device.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/cirq-core/cirq/ion/ion_device.py b/cirq-core/cirq/ion/ion_device.py
--- a/cirq-core/cirq/ion/ion_device.py
+++ b/cirq-core/cirq/ion/ion_device.py
@@ -55,11 +55,19 @@
twoq_gates_duration: The maximum duration of a two qubit operation.
oneq_gates_duration: The maximum duration of a single qubit
operation.
- qubits: Qubits on the device, identified by their x, y location.
+ qubits: Qubits on the device, identified by their x location.
+
+ Raises:
+ TypeError: If not all the qubits supplied are `cirq.LineQubit`s.
"""
self._measurement_duration = value.Duration(measurement_duration)
self._twoq_gates_duration = value.Duration(twoq_gates_duration)
self._oneq_gates_duration = value.Duration(oneq_gates_duration)
+ if not all(isinstance(qubit, devices.LineQubit) for qubit in qubits):
+ raise TypeError(
+ "All qubits were not of type cirq.LineQubit, instead were "
+ f"{set(type(qubit) for qubit in qubits)}"
+ )
self.qubits = frozenset(qubits)
self.gateset = get_ion_gateset()
| {"golden_diff": "diff --git a/cirq-core/cirq/ion/ion_device.py b/cirq-core/cirq/ion/ion_device.py\n--- a/cirq-core/cirq/ion/ion_device.py\n+++ b/cirq-core/cirq/ion/ion_device.py\n@@ -55,11 +55,19 @@\n twoq_gates_duration: The maximum duration of a two qubit operation.\n oneq_gates_duration: The maximum duration of a single qubit\n operation.\n- qubits: Qubits on the device, identified by their x, y location.\n+ qubits: Qubits on the device, identified by their x location.\n+\n+ Raises:\n+ TypeError: If not all the qubits supplied are `cirq.LineQubit`s.\n \"\"\"\n self._measurement_duration = value.Duration(measurement_duration)\n self._twoq_gates_duration = value.Duration(twoq_gates_duration)\n self._oneq_gates_duration = value.Duration(oneq_gates_duration)\n+ if not all(isinstance(qubit, devices.LineQubit) for qubit in qubits):\n+ raise TypeError(\n+ \"All qubits were not of type cirq.LineQubit, instead were \"\n+ f\"{set(type(qubit) for qubit in qubits)}\"\n+ )\n self.qubits = frozenset(qubits)\n self.gateset = get_ion_gateset()\n", "issue": "Should an IonDevice accept any qubit type?\nSome docstrings ([here](https://github.com/quantumlib/Cirq/blob/master/cirq/ion/ion_device.py#L26)) state qubits are one a line, while others ([here](https://github.com/quantumlib/Cirq/blob/master/cirq/ion/ion_device.py#L42)) state qubits are identified by their x, y location. I believe the latter is a typo due to the [annotation](https://github.com/quantumlib/Cirq/blob/master/cirq/ion/ion_device.py#L34).\r\n\r\nIf so, should `__init__` raise an error if the input qubits are not `LineQubit`s? (The `at` and `neighbors_of` methods assume `LineQubit`s.)\n", "before_files": [{"content": "# Copyright 2018 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom typing import Any, FrozenSet, Iterable, Optional, Set, TYPE_CHECKING\n\nfrom cirq import circuits, value, devices, ops, protocols\nfrom cirq.ion import convert_to_ion_gates\n\nif TYPE_CHECKING:\n import cirq\n\n\ndef get_ion_gateset() -> ops.Gateset:\n return ops.Gateset(\n ops.XXPowGate,\n ops.MeasurementGate,\n ops.XPowGate,\n ops.YPowGate,\n ops.ZPowGate,\n ops.PhasedXPowGate,\n unroll_circuit_op=False,\n accept_global_phase_op=False,\n )\n\n\[email protected]_equality\nclass IonDevice(devices.Device):\n \"\"\"A device with qubits placed on a line.\n\n Qubits have all-to-all connectivity.\n \"\"\"\n\n def __init__(\n self,\n measurement_duration: 'cirq.DURATION_LIKE',\n twoq_gates_duration: 'cirq.DURATION_LIKE',\n oneq_gates_duration: 'cirq.DURATION_LIKE',\n qubits: Iterable[devices.LineQubit],\n ) -> None:\n \"\"\"Initializes the description of an ion trap device.\n\n Args:\n measurement_duration: The maximum duration of a measurement.\n twoq_gates_duration: The maximum duration of a two qubit operation.\n oneq_gates_duration: The maximum duration of a single qubit\n operation.\n qubits: Qubits on the device, identified by their x, y location.\n \"\"\"\n self._measurement_duration = value.Duration(measurement_duration)\n self._twoq_gates_duration = value.Duration(twoq_gates_duration)\n self._oneq_gates_duration = value.Duration(oneq_gates_duration)\n self.qubits = frozenset(qubits)\n self.gateset = get_ion_gateset()\n\n def qubit_set(self) -> FrozenSet['cirq.LineQubit']:\n return self.qubits\n\n def qid_pairs(self) -> FrozenSet['cirq.SymmetricalQidPair']:\n \"\"\"Qubits have all-to-all connectivity, so returns all pairs.\n\n Returns:\n All qubit pairs on the device.\n \"\"\"\n qs = self.qubits\n return frozenset([devices.SymmetricalQidPair(q, q2) for q in qs for q2 in qs if q < q2])\n\n def decompose_operation(self, operation: ops.Operation) -> ops.OP_TREE:\n return convert_to_ion_gates.ConvertToIonGates().convert_one(operation)\n\n def decompose_circuit(self, circuit: circuits.Circuit) -> circuits.Circuit:\n return convert_to_ion_gates.ConvertToIonGates().convert_circuit(circuit)\n\n def duration_of(self, operation):\n if isinstance(operation.gate, ops.XXPowGate):\n return self._twoq_gates_duration\n if isinstance(\n operation.gate, (ops.XPowGate, ops.YPowGate, ops.ZPowGate, ops.PhasedXPowGate)\n ):\n return self._oneq_gates_duration\n if isinstance(operation.gate, ops.MeasurementGate):\n return self._measurement_duration\n raise ValueError(f'Unsupported gate type: {operation!r}')\n\n def validate_gate(self, gate: ops.Gate):\n if gate not in self.gateset:\n raise ValueError(f'Unsupported gate type: {gate!r}')\n\n def validate_operation(self, operation):\n if not isinstance(operation, ops.GateOperation):\n raise ValueError(f'Unsupported operation: {operation!r}')\n\n self.validate_gate(operation.gate)\n\n for q in operation.qubits:\n if not isinstance(q, devices.LineQubit):\n raise ValueError(f'Unsupported qubit type: {q!r}')\n if q not in self.qubits:\n raise ValueError(f'Qubit not on device: {q!r}')\n\n def validate_circuit(self, circuit: circuits.AbstractCircuit):\n super().validate_circuit(circuit)\n _verify_unique_measurement_keys(circuit.all_operations())\n\n def at(self, position: int) -> Optional[devices.LineQubit]:\n \"\"\"Returns the qubit at the given position, if there is one, else None.\"\"\"\n q = devices.LineQubit(position)\n return q if q in self.qubits else None\n\n def neighbors_of(self, qubit: devices.LineQubit) -> Iterable[devices.LineQubit]:\n \"\"\"Returns the qubits that the given qubit can interact with.\"\"\"\n possibles = [\n devices.LineQubit(qubit.x + 1),\n devices.LineQubit(qubit.x - 1),\n ]\n return [e for e in possibles if e in self.qubits]\n\n def __repr__(self) -> str:\n return (\n f'IonDevice(measurement_duration={self._measurement_duration!r}, '\n f'twoq_gates_duration={self._twoq_gates_duration!r}, '\n f'oneq_gates_duration={self._oneq_gates_duration!r} '\n f'qubits={sorted(self.qubits)!r})'\n )\n\n def __str__(self) -> str:\n diagram = circuits.TextDiagramDrawer()\n\n for q in self.qubits:\n diagram.write(q.x, 0, str(q))\n for q2 in self.neighbors_of(q):\n diagram.grid_line(q.x, 0, q2.x, 0)\n\n return diagram.render(horizontal_spacing=3, vertical_spacing=2, use_unicode_characters=True)\n\n def _repr_pretty_(self, p: Any, cycle: bool):\n \"\"\"iPython (Jupyter) pretty print.\"\"\"\n p.text(\"IonDevice(...)\" if cycle else self.__str__())\n\n def _value_equality_values_(self) -> Any:\n return (\n self._measurement_duration,\n self._twoq_gates_duration,\n self._oneq_gates_duration,\n self.qubits,\n )\n\n\ndef _verify_unique_measurement_keys(operations: Iterable[ops.Operation]):\n seen: Set[str] = set()\n for op in operations:\n if isinstance(op.gate, ops.MeasurementGate):\n meas = op.gate\n key = protocols.measurement_key_name(meas)\n if key in seen:\n raise ValueError(f'Measurement key {key} repeated')\n seen.add(key)\n", "path": "cirq-core/cirq/ion/ion_device.py"}]} | 2,604 | 296 |
gh_patches_debug_35827 | rasdani/github-patches | git_diff | mesonbuild__meson-12085 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`dependency('vulkan', version: ['>=1.X.X'])` fails on Windows
**Describe the bug**
`dependency('vulkan', version: ['>=1.3.0'])` fails on Windows if a version is specified.
The Vulkan SDK is installed, and the environment variable (`VULKAN_SDK=C:\VulkanSDK\1.3.250.1`) is set.
Edit: The same does work on Linux.
**To Reproduce**
Install a recent Vulkan SDK (>= 1.3.0) and use this meson.build:
```python
project('test-project', 'cpp')
# this does not work
vulkan = dependency('vulkan', version: ['>=1.3.0'])
# this does work
# vulkan = dependency('vulkan')
executable('test-project', ['main.cpp'], dependencies : [vulkan])
```
**Expected behavior**
Vulkan is detected correctly.
**system parameters**
* native build
* Windows 11
* Python 3.9.13
* Meson 1.2.0
* Ninja 1.11.0
</issue>
<code>
[start of mesonbuild/dependencies/ui.py]
1 # Copyright 2013-2017 The Meson development team
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6
7 # http://www.apache.org/licenses/LICENSE-2.0
8
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # This file contains the detection logic for external dependencies that
16 # are UI-related.
17 from __future__ import annotations
18
19 import os
20 import subprocess
21 import typing as T
22
23 from .. import mlog
24 from .. import mesonlib
25 from ..mesonlib import (
26 Popen_safe, extract_as_list, version_compare_many
27 )
28 from ..environment import detect_cpu_family
29
30 from .base import DependencyException, DependencyMethods, DependencyTypeName, SystemDependency
31 from .configtool import ConfigToolDependency
32 from .detect import packages
33 from .factory import DependencyFactory
34
35 if T.TYPE_CHECKING:
36 from ..environment import Environment
37
38
39 class GLDependencySystem(SystemDependency):
40 def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
41 super().__init__(name, environment, kwargs)
42
43 if self.env.machines[self.for_machine].is_darwin():
44 self.is_found = True
45 # FIXME: Use AppleFrameworks dependency
46 self.link_args = ['-framework', 'OpenGL']
47 # FIXME: Detect version using self.clib_compiler
48 return
49 elif self.env.machines[self.for_machine].is_windows():
50 self.is_found = True
51 # FIXME: Use self.clib_compiler.find_library()
52 self.link_args = ['-lopengl32']
53 # FIXME: Detect version using self.clib_compiler
54 return
55 else:
56 links = self.clib_compiler.find_library('GL', environment, [])
57 has_header = self.clib_compiler.has_header('GL/gl.h', '', environment)[0]
58 if links and has_header:
59 self.is_found = True
60 self.link_args = links
61 elif links:
62 raise DependencyException('Found GL runtime library but no development header files')
63
64 class GnuStepDependency(ConfigToolDependency):
65
66 tools = ['gnustep-config']
67 tool_name = 'gnustep-config'
68
69 def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
70 super().__init__('gnustep', environment, kwargs, language='objc')
71 if not self.is_found:
72 return
73 self.modules = kwargs.get('modules', [])
74 self.compile_args = self.filter_args(
75 self.get_config_value(['--objc-flags'], 'compile_args'))
76 self.link_args = self.weird_filter(self.get_config_value(
77 ['--gui-libs' if 'gui' in self.modules else '--base-libs'],
78 'link_args'))
79
80 def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
81 tool = [self.tools[0]]
82 try:
83 p, out = Popen_safe(tool + ['--help'])[:2]
84 except (FileNotFoundError, PermissionError):
85 return (None, None)
86 if p.returncode != returncode:
87 return (None, None)
88 self.config = tool
89 found_version = self.detect_version()
90 if versions and not version_compare_many(found_version, versions)[0]:
91 return (None, found_version)
92
93 return (tool, found_version)
94
95 @staticmethod
96 def weird_filter(elems: T.List[str]) -> T.List[str]:
97 """When building packages, the output of the enclosing Make is
98 sometimes mixed among the subprocess output. I have no idea why. As a
99 hack filter out everything that is not a flag.
100 """
101 return [e for e in elems if e.startswith('-')]
102
103 @staticmethod
104 def filter_args(args: T.List[str]) -> T.List[str]:
105 """gnustep-config returns a bunch of garbage args such as -O2 and so
106 on. Drop everything that is not needed.
107 """
108 result = []
109 for f in args:
110 if f.startswith('-D') \
111 or f.startswith('-f') \
112 or f.startswith('-I') \
113 or f == '-pthread' \
114 or (f.startswith('-W') and not f == '-Wall'):
115 result.append(f)
116 return result
117
118 def detect_version(self) -> str:
119 gmake = self.get_config_value(['--variable=GNUMAKE'], 'variable')[0]
120 makefile_dir = self.get_config_value(['--variable=GNUSTEP_MAKEFILES'], 'variable')[0]
121 # This Makefile has the GNUStep version set
122 base_make = os.path.join(makefile_dir, 'Additional', 'base.make')
123 # Print the Makefile variable passed as the argument. For instance, if
124 # you run the make target `print-SOME_VARIABLE`, this will print the
125 # value of the variable `SOME_VARIABLE`.
126 printver = "print-%:\n\t@echo '$($*)'"
127 env = os.environ.copy()
128 # See base.make to understand why this is set
129 env['FOUNDATION_LIB'] = 'gnu'
130 p, o, e = Popen_safe([gmake, '-f', '-', '-f', base_make,
131 'print-GNUSTEP_BASE_VERSION'],
132 env=env, write=printver, stdin=subprocess.PIPE)
133 version = o.strip()
134 if not version:
135 mlog.debug("Couldn't detect GNUStep version, falling back to '1'")
136 # Fallback to setting some 1.x version
137 version = '1'
138 return version
139
140 packages['gnustep'] = GnuStepDependency
141
142
143 class SDL2DependencyConfigTool(ConfigToolDependency):
144
145 tools = ['sdl2-config']
146 tool_name = 'sdl2-config'
147
148 def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
149 super().__init__(name, environment, kwargs)
150 if not self.is_found:
151 return
152 self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
153 self.link_args = self.get_config_value(['--libs'], 'link_args')
154
155
156 class WxDependency(ConfigToolDependency):
157
158 tools = ['wx-config-3.0', 'wx-config-3.1', 'wx-config', 'wx-config-gtk3']
159 tool_name = 'wx-config'
160
161 def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
162 super().__init__('WxWidgets', environment, kwargs, language='cpp')
163 if not self.is_found:
164 return
165 self.requested_modules = self.get_requested(kwargs)
166
167 extra_args = []
168 if self.static:
169 extra_args.append('--static=yes')
170
171 # Check to make sure static is going to work
172 err = Popen_safe(self.config + extra_args)[2]
173 if 'No config found to match' in err:
174 mlog.debug('WxWidgets is missing static libraries.')
175 self.is_found = False
176 return
177
178 # wx-config seems to have a cflags as well but since it requires C++,
179 # this should be good, at least for now.
180 self.compile_args = self.get_config_value(['--cxxflags'] + extra_args + self.requested_modules, 'compile_args')
181 self.link_args = self.get_config_value(['--libs'] + extra_args + self.requested_modules, 'link_args')
182
183 @staticmethod
184 def get_requested(kwargs: T.Dict[str, T.Any]) -> T.List[str]:
185 if 'modules' not in kwargs:
186 return []
187 candidates = extract_as_list(kwargs, 'modules')
188 for c in candidates:
189 if not isinstance(c, str):
190 raise DependencyException('wxwidgets module argument is not a string')
191 return candidates
192
193 packages['wxwidgets'] = WxDependency
194
195 class VulkanDependencySystem(SystemDependency):
196
197 def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
198 super().__init__(name, environment, kwargs, language=language)
199
200 try:
201 self.vulkan_sdk = os.environ['VULKAN_SDK']
202 if not os.path.isabs(self.vulkan_sdk):
203 raise DependencyException('VULKAN_SDK must be an absolute path.')
204 except KeyError:
205 self.vulkan_sdk = None
206
207 if self.vulkan_sdk:
208 # TODO: this config might not work on some platforms, fix bugs as reported
209 # we should at least detect other 64-bit platforms (e.g. armv8)
210 lib_name = 'vulkan'
211 lib_dir = 'lib'
212 inc_dir = 'include'
213 if mesonlib.is_windows():
214 lib_name = 'vulkan-1'
215 lib_dir = 'Lib32'
216 inc_dir = 'Include'
217 if detect_cpu_family(self.env.coredata.compilers.host) == 'x86_64':
218 lib_dir = 'Lib'
219
220 # make sure header and lib are valid
221 inc_path = os.path.join(self.vulkan_sdk, inc_dir)
222 header = os.path.join(inc_path, 'vulkan', 'vulkan.h')
223 lib_path = os.path.join(self.vulkan_sdk, lib_dir)
224 find_lib = self.clib_compiler.find_library(lib_name, environment, [lib_path])
225
226 if not find_lib:
227 raise DependencyException('VULKAN_SDK point to invalid directory (no lib)')
228
229 if not os.path.isfile(header):
230 raise DependencyException('VULKAN_SDK point to invalid directory (no include)')
231
232 # XXX: this is very odd, and may deserve being removed
233 self.type_name = DependencyTypeName('vulkan_sdk')
234 self.is_found = True
235 self.compile_args.append('-I' + inc_path)
236 self.link_args.append('-L' + lib_path)
237 self.link_args.append('-l' + lib_name)
238
239 # TODO: find a way to retrieve the version from the sdk?
240 # Usually it is a part of the path to it (but does not have to be)
241 return
242 else:
243 # simply try to guess it, usually works on linux
244 libs = self.clib_compiler.find_library('vulkan', environment, [])
245 if libs is not None and self.clib_compiler.has_header('vulkan/vulkan.h', '', environment, disable_cache=True)[0]:
246 self.is_found = True
247 for lib in libs:
248 self.link_args.append(lib)
249 return
250
251 packages['gl'] = gl_factory = DependencyFactory(
252 'gl',
253 [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
254 system_class=GLDependencySystem,
255 )
256
257 packages['sdl2'] = sdl2_factory = DependencyFactory(
258 'sdl2',
259 [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE],
260 configtool_class=SDL2DependencyConfigTool,
261 cmake_name='SDL2',
262 )
263
264 packages['vulkan'] = vulkan_factory = DependencyFactory(
265 'vulkan',
266 [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
267 system_class=VulkanDependencySystem,
268 )
269
[end of mesonbuild/dependencies/ui.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -17,11 +17,13 @@
from __future__ import annotations
import os
+import re
import subprocess
import typing as T
from .. import mlog
from .. import mesonlib
+from ..compilers.compilers import CrossNoRunException
from ..mesonlib import (
Popen_safe, extract_as_list, version_compare_many
)
@@ -235,10 +237,6 @@
self.compile_args.append('-I' + inc_path)
self.link_args.append('-L' + lib_path)
self.link_args.append('-l' + lib_name)
-
- # TODO: find a way to retrieve the version from the sdk?
- # Usually it is a part of the path to it (but does not have to be)
- return
else:
# simply try to guess it, usually works on linux
libs = self.clib_compiler.find_library('vulkan', environment, [])
@@ -246,7 +244,33 @@
self.is_found = True
for lib in libs:
self.link_args.append(lib)
- return
+
+ if self.is_found:
+ get_version = '''\
+#include <stdio.h>
+#include <vulkan/vulkan.h>
+
+int main() {
+ printf("%i.%i.%i", VK_VERSION_MAJOR(VK_HEADER_VERSION_COMPLETE),
+ VK_VERSION_MINOR(VK_HEADER_VERSION_COMPLETE),
+ VK_VERSION_PATCH(VK_HEADER_VERSION_COMPLETE));
+ return 0;
+}
+'''
+ try:
+ run = self.clib_compiler.run(get_version, environment, extra_args=self.compile_args)
+ except CrossNoRunException:
+ run = None
+ if run and run.compiled and run.returncode == 0:
+ self.version = run.stdout
+ elif self.vulkan_sdk:
+ # fall back to heuristics: detect version number in path
+ # matches the default install path on Windows
+ match = re.search(rf'VulkanSDK{re.escape(os.path.sep)}([0-9]+(?:\.[0-9]+)+)', self.vulkan_sdk)
+ if match:
+ self.version = match.group(1)
+ else:
+ mlog.warning(f'Environment variable VULKAN_SDK={self.vulkan_sdk} is present, but Vulkan version could not be extracted.')
packages['gl'] = gl_factory = DependencyFactory(
'gl',
| {"golden_diff": "diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py\n--- a/mesonbuild/dependencies/ui.py\n+++ b/mesonbuild/dependencies/ui.py\n@@ -17,11 +17,13 @@\n from __future__ import annotations\n \n import os\n+import re\n import subprocess\n import typing as T\n \n from .. import mlog\n from .. import mesonlib\n+from ..compilers.compilers import CrossNoRunException\n from ..mesonlib import (\n Popen_safe, extract_as_list, version_compare_many\n )\n@@ -235,10 +237,6 @@\n self.compile_args.append('-I' + inc_path)\n self.link_args.append('-L' + lib_path)\n self.link_args.append('-l' + lib_name)\n-\n- # TODO: find a way to retrieve the version from the sdk?\n- # Usually it is a part of the path to it (but does not have to be)\n- return\n else:\n # simply try to guess it, usually works on linux\n libs = self.clib_compiler.find_library('vulkan', environment, [])\n@@ -246,7 +244,33 @@\n self.is_found = True\n for lib in libs:\n self.link_args.append(lib)\n- return\n+\n+ if self.is_found:\n+ get_version = '''\\\n+#include <stdio.h>\n+#include <vulkan/vulkan.h>\n+\n+int main() {\n+ printf(\"%i.%i.%i\", VK_VERSION_MAJOR(VK_HEADER_VERSION_COMPLETE),\n+ VK_VERSION_MINOR(VK_HEADER_VERSION_COMPLETE),\n+ VK_VERSION_PATCH(VK_HEADER_VERSION_COMPLETE));\n+ return 0;\n+}\n+'''\n+ try:\n+ run = self.clib_compiler.run(get_version, environment, extra_args=self.compile_args)\n+ except CrossNoRunException:\n+ run = None\n+ if run and run.compiled and run.returncode == 0:\n+ self.version = run.stdout\n+ elif self.vulkan_sdk:\n+ # fall back to heuristics: detect version number in path\n+ # matches the default install path on Windows\n+ match = re.search(rf'VulkanSDK{re.escape(os.path.sep)}([0-9]+(?:\\.[0-9]+)+)', self.vulkan_sdk)\n+ if match:\n+ self.version = match.group(1)\n+ else:\n+ mlog.warning(f'Environment variable VULKAN_SDK={self.vulkan_sdk} is present, but Vulkan version could not be extracted.')\n \n packages['gl'] = gl_factory = DependencyFactory(\n 'gl',\n", "issue": "`dependency('vulkan', version: ['>=1.X.X'])` fails on Windows\n**Describe the bug**\r\n`dependency('vulkan', version: ['>=1.3.0'])` fails on Windows if a version is specified.\r\nThe Vulkan SDK is installed, and the environment variable (`VULKAN_SDK=C:\\VulkanSDK\\1.3.250.1`) is set.\r\n\r\nEdit: The same does work on Linux.\r\n\r\n**To Reproduce**\r\nInstall a recent Vulkan SDK (>= 1.3.0) and use this meson.build:\r\n```python\r\nproject('test-project', 'cpp')\r\n\r\n# this does not work\r\nvulkan = dependency('vulkan', version: ['>=1.3.0'])\r\n# this does work\r\n# vulkan = dependency('vulkan')\r\n\r\nexecutable('test-project', ['main.cpp'], dependencies : [vulkan])\r\n```\r\n\r\n**Expected behavior**\r\nVulkan is detected correctly.\r\n\r\n**system parameters**\r\n* native build\r\n* Windows 11\r\n* Python 3.9.13\r\n* Meson 1.2.0\r\n* Ninja 1.11.0\r\n\n", "before_files": [{"content": "# Copyright 2013-2017 The Meson development team\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# This file contains the detection logic for external dependencies that\n# are UI-related.\nfrom __future__ import annotations\n\nimport os\nimport subprocess\nimport typing as T\n\nfrom .. import mlog\nfrom .. import mesonlib\nfrom ..mesonlib import (\n Popen_safe, extract_as_list, version_compare_many\n)\nfrom ..environment import detect_cpu_family\n\nfrom .base import DependencyException, DependencyMethods, DependencyTypeName, SystemDependency\nfrom .configtool import ConfigToolDependency\nfrom .detect import packages\nfrom .factory import DependencyFactory\n\nif T.TYPE_CHECKING:\n from ..environment import Environment\n\n\nclass GLDependencySystem(SystemDependency):\n def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:\n super().__init__(name, environment, kwargs)\n\n if self.env.machines[self.for_machine].is_darwin():\n self.is_found = True\n # FIXME: Use AppleFrameworks dependency\n self.link_args = ['-framework', 'OpenGL']\n # FIXME: Detect version using self.clib_compiler\n return\n elif self.env.machines[self.for_machine].is_windows():\n self.is_found = True\n # FIXME: Use self.clib_compiler.find_library()\n self.link_args = ['-lopengl32']\n # FIXME: Detect version using self.clib_compiler\n return\n else:\n links = self.clib_compiler.find_library('GL', environment, [])\n has_header = self.clib_compiler.has_header('GL/gl.h', '', environment)[0]\n if links and has_header:\n self.is_found = True\n self.link_args = links\n elif links:\n raise DependencyException('Found GL runtime library but no development header files')\n\nclass GnuStepDependency(ConfigToolDependency):\n\n tools = ['gnustep-config']\n tool_name = 'gnustep-config'\n\n def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:\n super().__init__('gnustep', environment, kwargs, language='objc')\n if not self.is_found:\n return\n self.modules = kwargs.get('modules', [])\n self.compile_args = self.filter_args(\n self.get_config_value(['--objc-flags'], 'compile_args'))\n self.link_args = self.weird_filter(self.get_config_value(\n ['--gui-libs' if 'gui' in self.modules else '--base-libs'],\n 'link_args'))\n\n def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:\n tool = [self.tools[0]]\n try:\n p, out = Popen_safe(tool + ['--help'])[:2]\n except (FileNotFoundError, PermissionError):\n return (None, None)\n if p.returncode != returncode:\n return (None, None)\n self.config = tool\n found_version = self.detect_version()\n if versions and not version_compare_many(found_version, versions)[0]:\n return (None, found_version)\n\n return (tool, found_version)\n\n @staticmethod\n def weird_filter(elems: T.List[str]) -> T.List[str]:\n \"\"\"When building packages, the output of the enclosing Make is\n sometimes mixed among the subprocess output. I have no idea why. As a\n hack filter out everything that is not a flag.\n \"\"\"\n return [e for e in elems if e.startswith('-')]\n\n @staticmethod\n def filter_args(args: T.List[str]) -> T.List[str]:\n \"\"\"gnustep-config returns a bunch of garbage args such as -O2 and so\n on. Drop everything that is not needed.\n \"\"\"\n result = []\n for f in args:\n if f.startswith('-D') \\\n or f.startswith('-f') \\\n or f.startswith('-I') \\\n or f == '-pthread' \\\n or (f.startswith('-W') and not f == '-Wall'):\n result.append(f)\n return result\n\n def detect_version(self) -> str:\n gmake = self.get_config_value(['--variable=GNUMAKE'], 'variable')[0]\n makefile_dir = self.get_config_value(['--variable=GNUSTEP_MAKEFILES'], 'variable')[0]\n # This Makefile has the GNUStep version set\n base_make = os.path.join(makefile_dir, 'Additional', 'base.make')\n # Print the Makefile variable passed as the argument. For instance, if\n # you run the make target `print-SOME_VARIABLE`, this will print the\n # value of the variable `SOME_VARIABLE`.\n printver = \"print-%:\\n\\t@echo '$($*)'\"\n env = os.environ.copy()\n # See base.make to understand why this is set\n env['FOUNDATION_LIB'] = 'gnu'\n p, o, e = Popen_safe([gmake, '-f', '-', '-f', base_make,\n 'print-GNUSTEP_BASE_VERSION'],\n env=env, write=printver, stdin=subprocess.PIPE)\n version = o.strip()\n if not version:\n mlog.debug(\"Couldn't detect GNUStep version, falling back to '1'\")\n # Fallback to setting some 1.x version\n version = '1'\n return version\n\npackages['gnustep'] = GnuStepDependency\n\n\nclass SDL2DependencyConfigTool(ConfigToolDependency):\n\n tools = ['sdl2-config']\n tool_name = 'sdl2-config'\n\n def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):\n super().__init__(name, environment, kwargs)\n if not self.is_found:\n return\n self.compile_args = self.get_config_value(['--cflags'], 'compile_args')\n self.link_args = self.get_config_value(['--libs'], 'link_args')\n\n\nclass WxDependency(ConfigToolDependency):\n\n tools = ['wx-config-3.0', 'wx-config-3.1', 'wx-config', 'wx-config-gtk3']\n tool_name = 'wx-config'\n\n def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):\n super().__init__('WxWidgets', environment, kwargs, language='cpp')\n if not self.is_found:\n return\n self.requested_modules = self.get_requested(kwargs)\n\n extra_args = []\n if self.static:\n extra_args.append('--static=yes')\n\n # Check to make sure static is going to work\n err = Popen_safe(self.config + extra_args)[2]\n if 'No config found to match' in err:\n mlog.debug('WxWidgets is missing static libraries.')\n self.is_found = False\n return\n\n # wx-config seems to have a cflags as well but since it requires C++,\n # this should be good, at least for now.\n self.compile_args = self.get_config_value(['--cxxflags'] + extra_args + self.requested_modules, 'compile_args')\n self.link_args = self.get_config_value(['--libs'] + extra_args + self.requested_modules, 'link_args')\n\n @staticmethod\n def get_requested(kwargs: T.Dict[str, T.Any]) -> T.List[str]:\n if 'modules' not in kwargs:\n return []\n candidates = extract_as_list(kwargs, 'modules')\n for c in candidates:\n if not isinstance(c, str):\n raise DependencyException('wxwidgets module argument is not a string')\n return candidates\n\npackages['wxwidgets'] = WxDependency\n\nclass VulkanDependencySystem(SystemDependency):\n\n def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:\n super().__init__(name, environment, kwargs, language=language)\n\n try:\n self.vulkan_sdk = os.environ['VULKAN_SDK']\n if not os.path.isabs(self.vulkan_sdk):\n raise DependencyException('VULKAN_SDK must be an absolute path.')\n except KeyError:\n self.vulkan_sdk = None\n\n if self.vulkan_sdk:\n # TODO: this config might not work on some platforms, fix bugs as reported\n # we should at least detect other 64-bit platforms (e.g. armv8)\n lib_name = 'vulkan'\n lib_dir = 'lib'\n inc_dir = 'include'\n if mesonlib.is_windows():\n lib_name = 'vulkan-1'\n lib_dir = 'Lib32'\n inc_dir = 'Include'\n if detect_cpu_family(self.env.coredata.compilers.host) == 'x86_64':\n lib_dir = 'Lib'\n\n # make sure header and lib are valid\n inc_path = os.path.join(self.vulkan_sdk, inc_dir)\n header = os.path.join(inc_path, 'vulkan', 'vulkan.h')\n lib_path = os.path.join(self.vulkan_sdk, lib_dir)\n find_lib = self.clib_compiler.find_library(lib_name, environment, [lib_path])\n\n if not find_lib:\n raise DependencyException('VULKAN_SDK point to invalid directory (no lib)')\n\n if not os.path.isfile(header):\n raise DependencyException('VULKAN_SDK point to invalid directory (no include)')\n\n # XXX: this is very odd, and may deserve being removed\n self.type_name = DependencyTypeName('vulkan_sdk')\n self.is_found = True\n self.compile_args.append('-I' + inc_path)\n self.link_args.append('-L' + lib_path)\n self.link_args.append('-l' + lib_name)\n\n # TODO: find a way to retrieve the version from the sdk?\n # Usually it is a part of the path to it (but does not have to be)\n return\n else:\n # simply try to guess it, usually works on linux\n libs = self.clib_compiler.find_library('vulkan', environment, [])\n if libs is not None and self.clib_compiler.has_header('vulkan/vulkan.h', '', environment, disable_cache=True)[0]:\n self.is_found = True\n for lib in libs:\n self.link_args.append(lib)\n return\n\npackages['gl'] = gl_factory = DependencyFactory(\n 'gl',\n [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],\n system_class=GLDependencySystem,\n)\n\npackages['sdl2'] = sdl2_factory = DependencyFactory(\n 'sdl2',\n [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE],\n configtool_class=SDL2DependencyConfigTool,\n cmake_name='SDL2',\n)\n\npackages['vulkan'] = vulkan_factory = DependencyFactory(\n 'vulkan',\n [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],\n system_class=VulkanDependencySystem,\n)\n", "path": "mesonbuild/dependencies/ui.py"}]} | 3,971 | 574 |
gh_patches_debug_7834 | rasdani/github-patches | git_diff | python-poetry__poetry-7671 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Poetry 1.4.0: There is no item named `<package>/WHEEL` in the archive
<!--
Hi there! Thank you for submitting a bug report!
Before you submit your issue, please review and follow the instructions at
https://python-poetry.org/docs/contributing/#reporting-bugs. Fully fill out the template below; insufficient
information or bad reproduction instructions will impair the ability of others to help you.
-->
<!-- All the below information must be provided for others to understand and help with your issue. -->
- **Poetry version**: 1.4.0<!-- Replace with version, e.g. from `poetry --version` -->
- **Python version**: 3.11<!-- Replace with the version of Python being used to run Poetry, as reported by `poetry debug info` -->
- **OS version and name**: macOS 13.2.1/MacPorts <!-- Replace with version + name, e.g. Ubuntu 22.04 or macOS 12.6 -->
- **pyproject.toml**: https://gist.github.com/elventear/2410f2f8a05926df27488767cfcab780<!-- Replace with a link to a Gist (https://gist.github.com/) containing your sanitized pyproject.toml -->
<!-- All the below steps should be completed before submitting your issue. Checked checkbox should look like this: [x] -->
- [x] I am on the [latest](https://github.com/python-poetry/poetry/releases/latest) stable Poetry version, installed using a recommended method.
- [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate.
- [x] I have consulted the [FAQ](https://python-poetry.org/docs/faq/) and [blog](https://python-poetry.org/blog/) for any relevant entries or release notes.
- [x] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option) and have included the output below.
## Issue
Hypercorn can't be installed with poetry 1.4.0, it used to be possible to install it:
<!-- Now feel free to write your issue, and please be as descriptive as possible! Make sure to include detailed reproduction steps. -->
<!-- Thanks again 🙌 ❤ -->
```
poetry install
Creating virtualenv test in /path/to/.venv
Updating dependencies
Resolving dependencies... (0.4s)
Writing lock file
Package operations: 8 installs, 0 updates, 0 removals
• Installing h11 (0.14.0)
• Installing hpack (4.0.0)
• Installing hyperframe (6.0.1)
• Installing h2 (4.1.0)
• Installing priority (2.0.0)
• Installing toml (0.10.2)
• Installing wsproto (1.2.0)
• Installing hypercorn (0.14.3): Failed
Stack trace:
10 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/executor.py:271 in _execute_operation
269│
270│ try:
→ 271│ result = self._do_execute_operation(operation)
272│ except EnvCommandError as e:
273│ if e.e.returncode == -2:
9 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/executor.py:357 in _do_execute_operation
355│ return 0
356│
→ 357│ result: int = getattr(self, f"_execute_{method}")(operation)
358│
359│ if result != 0:
8 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/executor.py:477 in _execute_install
475│
476│ def _execute_install(self, operation: Install | Update) -> int:
→ 477│ status_code = self._install(operation)
478│
479│ self._save_url_reference(operation)
7 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/executor.py:535 in _install
533│ self._remove(operation.initial_package)
534│
→ 535│ self._wheel_installer.install(archive)
536│ finally:
537│ if cleanup_archive:
6 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/wheel_installer.py:101 in install
99│ def install(self, wheel: Path) -> None:
100│ with WheelFile.open(Path(wheel.as_posix())) as source:
→ 101│ install(
102│ source=source,
103│ destination=self._destination.for_source(source),
5 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/installer/_core.py:77 in install
75│
76│ """
→ 77│ root_scheme = _process_WHEEL_file(source)
78│
79│ # RECORD handling
4 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/installer/_core.py:21 in _process_WHEEL_file
19│ Returns the scheme that the archive root should go in.
20│ """
→ 21│ stream = source.read_dist_info("WHEEL")
22│ metadata = parse_metadata_file(stream)
23│
3 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/installer/sources.py:139 in read_dist_info
137│ """Get contents, from ``filename`` in the dist-info directory."""
138│ path = posixpath.join(self.dist_info_dir, filename)
→ 139│ return self._zipfile.read(path).decode("utf-8")
140│
141│ def get_contents(self) -> Iterator[WheelContentElement]:
2 /opt/local/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/zipfile.py:1507 in read
1505│ def read(self, name, pwd=None):
1506│ """Return file bytes for name."""
→ 1507│ with self.open(name, "r", pwd) as fp:
1508│ return fp.read()
1509│
1 /opt/local/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/zipfile.py:1544 in open
1542│ else:
1543│ # Get info object for name
→ 1544│ zinfo = self.getinfo(name)
1545│
1546│ if mode == 'w':
KeyError
"There is no item named 'Hypercorn-0.14.3.dist-info/WHEEL' in the archive"
at /opt/local/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/zipfile.py:1473 in getinfo
1469│ def getinfo(self, name):
1470│ """Return the instance of ZipInfo given 'name'."""
1471│ info = self.NameToInfo.get(name)
1472│ if info is None:
→ 1473│ raise KeyError(
1474│ 'There is no item named %r in the archive' % name)
1475│
1476│ return info
```
Wondering if there is a case sensitivity issue. Looking at the wheel, I do see there is a file in `hypercorn-0.14.3.dist-info/WHEEL`.
Latest 1.4.0 release causes unclear Issue with aws_psycopg2 install
- **Poetry version**: `1.4.0`
- **Python version**: `python3.9`
<!-- All the below steps should be completed before submitting your issue. Checked checkbox should look like this: [x] -->
- [x] I am on the [latest](https://github.com/python-poetry/poetry/releases/latest) stable Poetry version, installed using a recommended method.
- [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate.
- [ ] I have consulted the [FAQ](https://python-poetry.org/docs/faq/) and [blog](https://python-poetry.org/blog/) for any relevant entries or release notes.
- [ ] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option) and have included the output below.
## Issue
<!-- Now feel free to write your issue, and please be as descriptive as possible! Make sure to include detailed reproduction steps. -->
<!-- Thanks again 🙌 ❤ -->
When using the latest version, `1.4.0` we get the following runtime error:
```
Installing sqlalchemy (1.4.46)
AssertionError
In /builds/project-0/.cache/pypoetry/artifacts/39/96/f1/0f021ed26225e8d9035c9bac5a886b024840f841af84bd33f2be466dcb/aws_psycopg2-1.3.8-py3-none-any.whl, psycopg2/_psycopg.cpython-38-aarch64-linux-gnu.so is not mentioned in RECORD
at venv/lib/python3.9/site-packages/installer/sources.py:158 in get_contents
154│ if item.filename[-1:] == "/": # looks like a directory
155│ continue
156│
157│ record = record_mapping.pop(item.filename, None)
→ 158│ assert record is not None, "In {}, {} is not mentioned in RECORD".format(
159│ self._zipfile.filename,
160│ item.filename,
161│ ) # should not happen for valid wheels
162│
```
However, when we downgrade to `poetry==1.3.2` this problem goes away...
</issue>
<code>
[start of src/poetry/installation/wheel_installer.py]
1 from __future__ import annotations
2
3 import os
4 import platform
5 import sys
6
7 from pathlib import Path
8 from typing import TYPE_CHECKING
9
10 from installer import install
11 from installer.destinations import SchemeDictionaryDestination
12 from installer.sources import WheelFile
13
14 from poetry.__version__ import __version__
15 from poetry.utils._compat import WINDOWS
16
17
18 if TYPE_CHECKING:
19 from typing import BinaryIO
20
21 from installer.records import RecordEntry
22 from installer.scripts import LauncherKind
23 from installer.utils import Scheme
24
25 from poetry.utils.env import Env
26
27
28 class WheelDestination(SchemeDictionaryDestination):
29 """ """
30
31 def write_to_fs(
32 self,
33 scheme: Scheme,
34 path: Path | str,
35 stream: BinaryIO,
36 is_executable: bool,
37 ) -> RecordEntry:
38 from installer.records import Hash
39 from installer.records import RecordEntry
40 from installer.utils import copyfileobj_with_hashing
41 from installer.utils import make_file_executable
42
43 target_path = Path(self.scheme_dict[scheme]) / path
44 if target_path.exists():
45 # Contrary to the base library we don't raise an error
46 # here since it can break namespace packages (like Poetry's)
47 pass
48
49 parent_folder = target_path.parent
50 if not parent_folder.exists():
51 # Due to the parallel installation it can happen
52 # that two threads try to create the directory.
53 os.makedirs(parent_folder, exist_ok=True)
54
55 with open(target_path, "wb") as f:
56 hash_, size = copyfileobj_with_hashing(stream, f, self.hash_algorithm)
57
58 if is_executable:
59 make_file_executable(target_path)
60
61 return RecordEntry(str(path), Hash(self.hash_algorithm, hash_), size)
62
63 def for_source(self, source: WheelFile) -> WheelDestination:
64 scheme_dict = self.scheme_dict.copy()
65
66 scheme_dict["headers"] = str(Path(scheme_dict["headers"]) / source.distribution)
67
68 return self.__class__(
69 scheme_dict,
70 interpreter=self.interpreter,
71 script_kind=self.script_kind,
72 bytecode_optimization_levels=self.bytecode_optimization_levels,
73 )
74
75
76 class WheelInstaller:
77 def __init__(self, env: Env) -> None:
78 self._env = env
79
80 script_kind: LauncherKind
81 if not WINDOWS:
82 script_kind = "posix"
83 else:
84 if platform.uname()[4].startswith("arm"):
85 script_kind = "win-arm64" if sys.maxsize > 2**32 else "win-arm"
86 else:
87 script_kind = "win-amd64" if sys.maxsize > 2**32 else "win-ia32"
88
89 schemes = self._env.paths
90 schemes["headers"] = schemes["include"]
91
92 self._destination = WheelDestination(
93 schemes, interpreter=self._env.python, script_kind=script_kind
94 )
95
96 def enable_bytecode_compilation(self, enable: bool = True) -> None:
97 self._destination.bytecode_optimization_levels = (1,) if enable else ()
98
99 def install(self, wheel: Path) -> None:
100 with WheelFile.open(Path(wheel.as_posix())) as source:
101 install(
102 source=source,
103 destination=self._destination.for_source(source),
104 # Additional metadata that is generated by the installation tool.
105 additional_metadata={
106 "INSTALLER": f"Poetry {__version__}".encode(),
107 },
108 )
109
[end of src/poetry/installation/wheel_installer.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/poetry/installation/wheel_installer.py b/src/poetry/installation/wheel_installer.py
--- a/src/poetry/installation/wheel_installer.py
+++ b/src/poetry/installation/wheel_installer.py
@@ -97,7 +97,8 @@
self._destination.bytecode_optimization_levels = (1,) if enable else ()
def install(self, wheel: Path) -> None:
- with WheelFile.open(Path(wheel.as_posix())) as source:
+ with WheelFile.open(wheel) as source:
+ source.validate_record()
install(
source=source,
destination=self._destination.for_source(source),
| {"golden_diff": "diff --git a/src/poetry/installation/wheel_installer.py b/src/poetry/installation/wheel_installer.py\n--- a/src/poetry/installation/wheel_installer.py\n+++ b/src/poetry/installation/wheel_installer.py\n@@ -97,7 +97,8 @@\n self._destination.bytecode_optimization_levels = (1,) if enable else ()\n \n def install(self, wheel: Path) -> None:\n- with WheelFile.open(Path(wheel.as_posix())) as source:\n+ with WheelFile.open(wheel) as source:\n+ source.validate_record()\n install(\n source=source,\n destination=self._destination.for_source(source),\n", "issue": "Poetry 1.4.0: There is no item named `<package>/WHEEL` in the archive\n<!--\r\n Hi there! Thank you for submitting a bug report!\r\n\r\n Before you submit your issue, please review and follow the instructions at\r\n https://python-poetry.org/docs/contributing/#reporting-bugs. Fully fill out the template below; insufficient\r\n information or bad reproduction instructions will impair the ability of others to help you.\r\n-->\r\n\r\n<!-- All the below information must be provided for others to understand and help with your issue. -->\r\n- **Poetry version**: 1.4.0<!-- Replace with version, e.g. from `poetry --version` -->\r\n- **Python version**: 3.11<!-- Replace with the version of Python being used to run Poetry, as reported by `poetry debug info` -->\r\n- **OS version and name**: macOS 13.2.1/MacPorts <!-- Replace with version + name, e.g. Ubuntu 22.04 or macOS 12.6 -->\r\n- **pyproject.toml**: https://gist.github.com/elventear/2410f2f8a05926df27488767cfcab780<!-- Replace with a link to a Gist (https://gist.github.com/) containing your sanitized pyproject.toml -->\r\n\r\n<!-- All the below steps should be completed before submitting your issue. Checked checkbox should look like this: [x] -->\r\n- [x] I am on the [latest](https://github.com/python-poetry/poetry/releases/latest) stable Poetry version, installed using a recommended method.\r\n- [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate.\r\n- [x] I have consulted the [FAQ](https://python-poetry.org/docs/faq/) and [blog](https://python-poetry.org/blog/) for any relevant entries or release notes.\r\n- [x] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option) and have included the output below.\r\n\r\n## Issue\r\n\r\nHypercorn can't be installed with poetry 1.4.0, it used to be possible to install it:\r\n\r\n<!-- Now feel free to write your issue, and please be as descriptive as possible! Make sure to include detailed reproduction steps. -->\r\n<!-- Thanks again \ud83d\ude4c \u2764 -->\r\n\r\n```\r\npoetry install \r\nCreating virtualenv test in /path/to/.venv\r\nUpdating dependencies\r\nResolving dependencies... (0.4s)\r\n\r\nWriting lock file\r\n\r\nPackage operations: 8 installs, 0 updates, 0 removals\r\n\r\n \u2022 Installing h11 (0.14.0)\r\n \u2022 Installing hpack (4.0.0)\r\n \u2022 Installing hyperframe (6.0.1)\r\n \u2022 Installing h2 (4.1.0)\r\n \u2022 Installing priority (2.0.0)\r\n \u2022 Installing toml (0.10.2)\r\n \u2022 Installing wsproto (1.2.0)\r\n \u2022 Installing hypercorn (0.14.3): Failed\r\n\r\n Stack trace:\r\n\r\n 10 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/executor.py:271 in _execute_operation\r\n 269\u2502\r\n 270\u2502 try:\r\n \u2192 271\u2502 result = self._do_execute_operation(operation)\r\n 272\u2502 except EnvCommandError as e:\r\n 273\u2502 if e.e.returncode == -2:\r\n\r\n 9 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/executor.py:357 in _do_execute_operation\r\n 355\u2502 return 0\r\n 356\u2502\r\n \u2192 357\u2502 result: int = getattr(self, f\"_execute_{method}\")(operation)\r\n 358\u2502\r\n 359\u2502 if result != 0:\r\n\r\n 8 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/executor.py:477 in _execute_install\r\n 475\u2502\r\n 476\u2502 def _execute_install(self, operation: Install | Update) -> int:\r\n \u2192 477\u2502 status_code = self._install(operation)\r\n 478\u2502\r\n 479\u2502 self._save_url_reference(operation)\r\n\r\n 7 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/executor.py:535 in _install\r\n 533\u2502 self._remove(operation.initial_package)\r\n 534\u2502\r\n \u2192 535\u2502 self._wheel_installer.install(archive)\r\n 536\u2502 finally:\r\n 537\u2502 if cleanup_archive:\r\n\r\n 6 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/poetry/installation/wheel_installer.py:101 in install\r\n 99\u2502 def install(self, wheel: Path) -> None:\r\n 100\u2502 with WheelFile.open(Path(wheel.as_posix())) as source:\r\n \u2192 101\u2502 install(\r\n 102\u2502 source=source,\r\n 103\u2502 destination=self._destination.for_source(source),\r\n\r\n 5 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/installer/_core.py:77 in install\r\n 75\u2502\r\n 76\u2502 \"\"\"\r\n \u2192 77\u2502 root_scheme = _process_WHEEL_file(source)\r\n 78\u2502\r\n 79\u2502 # RECORD handling\r\n\r\n 4 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/installer/_core.py:21 in _process_WHEEL_file\r\n 19\u2502 Returns the scheme that the archive root should go in.\r\n 20\u2502 \"\"\"\r\n \u2192 21\u2502 stream = source.read_dist_info(\"WHEEL\")\r\n 22\u2502 metadata = parse_metadata_file(stream)\r\n 23\u2502\r\n\r\n 3 ~/.local/pipx/venvs/poetry/lib/python3.11/site-packages/installer/sources.py:139 in read_dist_info\r\n 137\u2502 \"\"\"Get contents, from ``filename`` in the dist-info directory.\"\"\"\r\n 138\u2502 path = posixpath.join(self.dist_info_dir, filename)\r\n \u2192 139\u2502 return self._zipfile.read(path).decode(\"utf-8\")\r\n 140\u2502\r\n 141\u2502 def get_contents(self) -> Iterator[WheelContentElement]:\r\n\r\n 2 /opt/local/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/zipfile.py:1507 in read\r\n 1505\u2502 def read(self, name, pwd=None):\r\n 1506\u2502 \"\"\"Return file bytes for name.\"\"\"\r\n \u2192 1507\u2502 with self.open(name, \"r\", pwd) as fp:\r\n 1508\u2502 return fp.read()\r\n 1509\u2502\r\n\r\n 1 /opt/local/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/zipfile.py:1544 in open\r\n 1542\u2502 else:\r\n 1543\u2502 # Get info object for name\r\n \u2192 1544\u2502 zinfo = self.getinfo(name)\r\n 1545\u2502\r\n 1546\u2502 if mode == 'w':\r\n\r\n KeyError\r\n\r\n \"There is no item named 'Hypercorn-0.14.3.dist-info/WHEEL' in the archive\"\r\n\r\n at /opt/local/Library/Frameworks/Python.framework/Versions/3.11/lib/python3.11/zipfile.py:1473 in getinfo\r\n 1469\u2502 def getinfo(self, name):\r\n 1470\u2502 \"\"\"Return the instance of ZipInfo given 'name'.\"\"\"\r\n 1471\u2502 info = self.NameToInfo.get(name)\r\n 1472\u2502 if info is None:\r\n \u2192 1473\u2502 raise KeyError(\r\n 1474\u2502 'There is no item named %r in the archive' % name)\r\n 1475\u2502\r\n 1476\u2502 return info\r\n```\r\n\r\nWondering if there is a case sensitivity issue. Looking at the wheel, I do see there is a file in `hypercorn-0.14.3.dist-info/WHEEL`. \r\n\nLatest 1.4.0 release causes unclear Issue with aws_psycopg2 install\n- **Poetry version**: `1.4.0`\r\n- **Python version**: `python3.9`\r\n\r\n<!-- All the below steps should be completed before submitting your issue. Checked checkbox should look like this: [x] -->\r\n- [x] I am on the [latest](https://github.com/python-poetry/poetry/releases/latest) stable Poetry version, installed using a recommended method.\r\n- [x] I have searched the [issues](https://github.com/python-poetry/poetry/issues) of this repo and believe that this is not a duplicate.\r\n- [ ] I have consulted the [FAQ](https://python-poetry.org/docs/faq/) and [blog](https://python-poetry.org/blog/) for any relevant entries or release notes.\r\n- [ ] If an exception occurs when executing a command, I executed it again in debug mode (`-vvv` option) and have included the output below.\r\n\r\n## Issue\r\n<!-- Now feel free to write your issue, and please be as descriptive as possible! Make sure to include detailed reproduction steps. -->\r\n<!-- Thanks again \ud83d\ude4c \u2764 -->\r\n\r\nWhen using the latest version, `1.4.0` we get the following runtime error:\r\n\r\n```\r\n Installing sqlalchemy (1.4.46)\r\n\r\n AssertionError\r\n\r\n In /builds/project-0/.cache/pypoetry/artifacts/39/96/f1/0f021ed26225e8d9035c9bac5a886b024840f841af84bd33f2be466dcb/aws_psycopg2-1.3.8-py3-none-any.whl, psycopg2/_psycopg.cpython-38-aarch64-linux-gnu.so is not mentioned in RECORD\r\n\r\n at venv/lib/python3.9/site-packages/installer/sources.py:158 in get_contents\r\n 154\u2502 if item.filename[-1:] == \"/\": # looks like a directory\r\n 155\u2502 continue\r\n 156\u2502 \r\n 157\u2502 record = record_mapping.pop(item.filename, None)\r\n \u2192 158\u2502 assert record is not None, \"In {}, {} is not mentioned in RECORD\".format(\r\n 159\u2502 self._zipfile.filename,\r\n 160\u2502 item.filename,\r\n 161\u2502 ) # should not happen for valid wheels\r\n 162\u2502 \r\n```\r\n\r\nHowever, when we downgrade to `poetry==1.3.2` this problem goes away...\n", "before_files": [{"content": "from __future__ import annotations\n\nimport os\nimport platform\nimport sys\n\nfrom pathlib import Path\nfrom typing import TYPE_CHECKING\n\nfrom installer import install\nfrom installer.destinations import SchemeDictionaryDestination\nfrom installer.sources import WheelFile\n\nfrom poetry.__version__ import __version__\nfrom poetry.utils._compat import WINDOWS\n\n\nif TYPE_CHECKING:\n from typing import BinaryIO\n\n from installer.records import RecordEntry\n from installer.scripts import LauncherKind\n from installer.utils import Scheme\n\n from poetry.utils.env import Env\n\n\nclass WheelDestination(SchemeDictionaryDestination):\n \"\"\" \"\"\"\n\n def write_to_fs(\n self,\n scheme: Scheme,\n path: Path | str,\n stream: BinaryIO,\n is_executable: bool,\n ) -> RecordEntry:\n from installer.records import Hash\n from installer.records import RecordEntry\n from installer.utils import copyfileobj_with_hashing\n from installer.utils import make_file_executable\n\n target_path = Path(self.scheme_dict[scheme]) / path\n if target_path.exists():\n # Contrary to the base library we don't raise an error\n # here since it can break namespace packages (like Poetry's)\n pass\n\n parent_folder = target_path.parent\n if not parent_folder.exists():\n # Due to the parallel installation it can happen\n # that two threads try to create the directory.\n os.makedirs(parent_folder, exist_ok=True)\n\n with open(target_path, \"wb\") as f:\n hash_, size = copyfileobj_with_hashing(stream, f, self.hash_algorithm)\n\n if is_executable:\n make_file_executable(target_path)\n\n return RecordEntry(str(path), Hash(self.hash_algorithm, hash_), size)\n\n def for_source(self, source: WheelFile) -> WheelDestination:\n scheme_dict = self.scheme_dict.copy()\n\n scheme_dict[\"headers\"] = str(Path(scheme_dict[\"headers\"]) / source.distribution)\n\n return self.__class__(\n scheme_dict,\n interpreter=self.interpreter,\n script_kind=self.script_kind,\n bytecode_optimization_levels=self.bytecode_optimization_levels,\n )\n\n\nclass WheelInstaller:\n def __init__(self, env: Env) -> None:\n self._env = env\n\n script_kind: LauncherKind\n if not WINDOWS:\n script_kind = \"posix\"\n else:\n if platform.uname()[4].startswith(\"arm\"):\n script_kind = \"win-arm64\" if sys.maxsize > 2**32 else \"win-arm\"\n else:\n script_kind = \"win-amd64\" if sys.maxsize > 2**32 else \"win-ia32\"\n\n schemes = self._env.paths\n schemes[\"headers\"] = schemes[\"include\"]\n\n self._destination = WheelDestination(\n schemes, interpreter=self._env.python, script_kind=script_kind\n )\n\n def enable_bytecode_compilation(self, enable: bool = True) -> None:\n self._destination.bytecode_optimization_levels = (1,) if enable else ()\n\n def install(self, wheel: Path) -> None:\n with WheelFile.open(Path(wheel.as_posix())) as source:\n install(\n source=source,\n destination=self._destination.for_source(source),\n # Additional metadata that is generated by the installation tool.\n additional_metadata={\n \"INSTALLER\": f\"Poetry {__version__}\".encode(),\n },\n )\n", "path": "src/poetry/installation/wheel_installer.py"}]} | 4,030 | 149 |
gh_patches_debug_30357 | rasdani/github-patches | git_diff | canonical__snapcraft-1490 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Make script aware of 407 when downloading gradle
builds.snapcraft.io rejects downloading gradle.
```
Downloading https://services.gradle.org/distributions/gradle-4.0.2-bin.zip
Exception in thread "main" java.io.IOException: Unable to tunnel through proxy. Proxy returns "HTTP/1.1 407 Proxy Authentication Required"
```
Source: https://build.snapcraft.io/user/JabRef/jabref/67152
An issue has been submitted - see https://github.com/canonical-websites/build.snapcraft.io/issues/918. I don't know, if this will be fixed or if it is easier to quickyl adapt `gradle.py`.
I assume, gradle.py has to be adapted accordingly? https://github.com/snapcore/snapcraft/blob/master/snapcraft/plugins/gradle.py#L88; if gradlew exists, execute it. If that fails with proxy 407 error (grepping stdout?), execute gradle
</issue>
<code>
[start of snapcraft/plugins/ant.py]
1 # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2 #
3 # Copyright (C) 2015 Canonical Ltd
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License version 3 as
7 # published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 """The ant plugin is useful for ant based parts.
18
19 The ant build system is commonly used to build Java projects.
20 The plugin requires a build.xml in the root of the source tree.
21
22 This plugin uses the common plugin keywords as well as those for "sources".
23 For more information check the 'plugins' topic for the former and the
24 'sources' topic for the latter.
25
26 Additionally, this plugin uses the following plugin-specific keywords:
27
28 - ant-properties:
29 (object)
30 A dictionary of key-value pairs. Set the following properties when
31 running ant.
32
33 - ant-build-targets:
34 (list of strings)
35 Run the given ant targets.
36 """
37
38 import glob
39 import logging
40 import os
41 from urllib.parse import urlsplit
42
43 import snapcraft
44 import snapcraft.common
45 import snapcraft.plugins.jdk
46
47
48 logger = logging.getLogger(__name__)
49
50
51 class AntPlugin(snapcraft.plugins.jdk.JdkPlugin):
52
53 @classmethod
54 def schema(cls):
55 schema = super().schema()
56 schema['properties']['ant-properties'] = {
57 'type': 'object',
58 'default': {},
59 }
60 schema['properties']['ant-build-targets'] = {
61 'type': 'array',
62 'uniqueItems': True,
63 'items': {
64 'type': 'string',
65 },
66 'default': [],
67 }
68 return schema
69
70 def __init__(self, name, options, project):
71 super().__init__(name, options, project)
72 self.build_packages.append('ant')
73
74 @classmethod
75 def get_build_properties(cls):
76 # Inform Snapcraft of the properties associated with building. If these
77 # change in the YAML Snapcraft will consider the build step dirty.
78 return ['ant-build-targets', 'ant-properties']
79
80 def build(self):
81 super().build()
82
83 command = ['ant']
84
85 if self.options.ant_build_targets:
86 command.extend(self.options.ant_build_targets)
87
88 for prop, value in self.options.ant_properties.items():
89 command.extend(['-D{}={}'.format(prop, value)])
90
91 self.run(command)
92 files = glob.glob(os.path.join(self.builddir, 'target', '*.jar'))
93 if files:
94 jardir = os.path.join(self.installdir, 'jar')
95 os.makedirs(jardir)
96 for f in files:
97 base = os.path.basename(f)
98 os.link(f, os.path.join(jardir, base))
99
100 def get_proxy_options(self, scheme):
101 proxy = os.environ.get('{}_proxy'.format(scheme))
102 if proxy:
103 parsed = urlsplit(proxy)
104 if parsed.hostname is not None:
105 yield '-D{}.proxyHost={}'.format(scheme, parsed.hostname)
106 if parsed.port is not None:
107 yield '-D{}.proxyPort={}'.format(scheme, parsed.port)
108
109 def env(self, root):
110 env = super().env(root)
111 jars = glob.glob(os.path.join(self.installdir, 'jar', '*.jar'))
112 if jars:
113 jars = [os.path.join(root, 'jar',
114 os.path.basename(x)) for x in sorted(jars)]
115 env.extend(
116 ['CLASSPATH={}:$CLASSPATH'.format(':'.join(jars))])
117 # Getting ant to use a proxy requires a little work; the JRE doesn't
118 # help as much as it should. (java.net.useSystemProxies=true ought
119 # to do the trick, but it relies on desktop configuration rather
120 # than using the standard environment variables.)
121 ant_opts = []
122 ant_opts.extend(self.get_proxy_options('http'))
123 ant_opts.extend(self.get_proxy_options('https'))
124 if ant_opts:
125 env.append("ANT_OPTS='{}'".format(
126 ' '.join(opt.replace("'", "'\\''") for opt in ant_opts)))
127 return env
128
[end of snapcraft/plugins/ant.py]
[start of snapcraft/plugins/gradle.py]
1 # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
2 #
3 # Copyright (C) 2016 Canonical Ltd
4 #
5 # This program is free software: you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License version 3 as
7 # published by the Free Software Foundation.
8 #
9 # This program is distributed in the hope that it will be useful,
10 # but WITHOUT ANY WARRANTY; without even the implied warranty of
11 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 # GNU General Public License for more details.
13 #
14 # You should have received a copy of the GNU General Public License
15 # along with this program. If not, see <http://www.gnu.org/licenses/>.
16
17 """This plugin is useful for building parts that use gradle.
18
19 The gradle build system is commonly used to build Java projects.
20 The plugin requires a pom.xml in the root of the source tree.
21
22 This plugin uses the common plugin keywords as well as those for "sources".
23 For more information check the 'plugins' topic for the former and the
24 'sources' topic for the latter.
25
26 Additionally, this plugin uses the following plugin-specific keywords:
27
28 - gradle-options:
29 (list of strings)
30 Flags to pass to the build using the gradle semantics for parameters.
31 The 'jar' option is always passed in as the last parameter.
32
33 - gradle-output-dir:
34 (string; default: 'build/libs')
35 The output directory where the resulting jar or war files from gradle[w]
36 are generated.
37 """
38
39 import glob
40 import logging
41 import os
42 import urllib.parse
43 import snapcraft
44 import snapcraft.common
45 import snapcraft.plugins.jdk
46
47
48 logger = logging.getLogger(__name__)
49
50
51 class GradlePlugin(snapcraft.plugins.jdk.JdkPlugin):
52
53 @classmethod
54 def schema(cls):
55 schema = super().schema()
56 schema['properties']['gradle-options'] = {
57 'type': 'array',
58 'minitems': 1,
59 'uniqueItems': True,
60 'items': {
61 'type': 'string',
62 },
63 'default': [],
64 }
65 schema['properties']['gradle-output-dir'] = {
66 'type': 'string',
67 'default': 'build/libs',
68 }
69
70 return schema
71
72 def __init__(self, name, options, project):
73 super().__init__(name, options, project)
74 filename = os.path.join(os.getcwd(), 'gradlew')
75 if not os.path.isfile(filename):
76 self.build_packages.append('gradle')
77 self.build_packages.append('ca-certificates-java')
78
79 @classmethod
80 def get_build_properties(cls):
81 # Inform Snapcraft of the properties associated with building. If these
82 # change in the YAML Snapcraft will consider the build step dirty.
83 return super().get_build_properties() + ['gradle-options',
84 'gradle-output-dir']
85
86 def build(self):
87 super().build()
88 filename = os.path.join(os.getcwd(), 'gradlew')
89 if os.path.isfile(filename):
90 gradle_cmd = ['./gradlew']
91 else:
92 gradle_cmd = ['gradle']
93 self.run(gradle_cmd +
94 self._get_proxy_options() +
95 self.options.gradle_options + ['jar'])
96
97 src = os.path.join(self.builddir, self.options.gradle_output_dir)
98 jarfiles = glob.glob(os.path.join(src, '*.jar'))
99 warfiles = glob.glob(os.path.join(src, '*.war'))
100
101 if len(jarfiles) > 0:
102 basedir = 'jar'
103 elif len(warfiles) > 0:
104 basedir = 'war'
105 jarfiles = warfiles
106 else:
107 raise RuntimeError("Could not find any built jar files for part")
108
109 snapcraft.file_utils.link_or_copy_tree(
110 src, os.path.join(self.installdir, basedir),
111 copy_function=lambda src, dst:
112 snapcraft.file_utils.link_or_copy(src, dst, self.installdir))
113
114 def _get_proxy_options(self):
115 # XXX This doesn't yet support username and password.
116 # -- elopio - 2016-11-17
117 proxy_options = []
118 for var in ('http', 'https'):
119 proxy = os.environ.get('{}_proxy'.format(var), False)
120 if proxy:
121 parsed_url = urllib.parse.urlparse(proxy)
122 proxy_options.append('-D{}.proxyHost={}'.format(
123 var, parsed_url.hostname))
124 if parsed_url.port:
125 proxy_options.append(
126 '-D{}.proxyPort={}'.format(var, parsed_url.port))
127 return proxy_options
128
[end of snapcraft/plugins/gradle.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/snapcraft/plugins/ant.py b/snapcraft/plugins/ant.py
--- a/snapcraft/plugins/ant.py
+++ b/snapcraft/plugins/ant.py
@@ -105,6 +105,10 @@
yield '-D{}.proxyHost={}'.format(scheme, parsed.hostname)
if parsed.port is not None:
yield '-D{}.proxyPort={}'.format(scheme, parsed.port)
+ if parsed.username is not None:
+ yield '-D{}.proxyUser={}'.format(scheme, parsed.username)
+ if parsed.password is not None:
+ yield '-D{}.proxyPassword={}'.format(scheme, parsed.password)
def env(self, root):
env = super().env(root)
diff --git a/snapcraft/plugins/gradle.py b/snapcraft/plugins/gradle.py
--- a/snapcraft/plugins/gradle.py
+++ b/snapcraft/plugins/gradle.py
@@ -112,8 +112,6 @@
snapcraft.file_utils.link_or_copy(src, dst, self.installdir))
def _get_proxy_options(self):
- # XXX This doesn't yet support username and password.
- # -- elopio - 2016-11-17
proxy_options = []
for var in ('http', 'https'):
proxy = os.environ.get('{}_proxy'.format(var), False)
@@ -124,4 +122,11 @@
if parsed_url.port:
proxy_options.append(
'-D{}.proxyPort={}'.format(var, parsed_url.port))
+ if parsed_url.username:
+ proxy_options.append(
+ '-D{}.proxyUser={}'.format(var, parsed_url.username))
+ if parsed_url.password:
+ proxy_options.append(
+ '-D{}.proxyPassword={}'.format(
+ var, parsed_url.password))
return proxy_options
| {"golden_diff": "diff --git a/snapcraft/plugins/ant.py b/snapcraft/plugins/ant.py\n--- a/snapcraft/plugins/ant.py\n+++ b/snapcraft/plugins/ant.py\n@@ -105,6 +105,10 @@\n yield '-D{}.proxyHost={}'.format(scheme, parsed.hostname)\n if parsed.port is not None:\n yield '-D{}.proxyPort={}'.format(scheme, parsed.port)\n+ if parsed.username is not None:\n+ yield '-D{}.proxyUser={}'.format(scheme, parsed.username)\n+ if parsed.password is not None:\n+ yield '-D{}.proxyPassword={}'.format(scheme, parsed.password)\n \n def env(self, root):\n env = super().env(root)\ndiff --git a/snapcraft/plugins/gradle.py b/snapcraft/plugins/gradle.py\n--- a/snapcraft/plugins/gradle.py\n+++ b/snapcraft/plugins/gradle.py\n@@ -112,8 +112,6 @@\n snapcraft.file_utils.link_or_copy(src, dst, self.installdir))\n \n def _get_proxy_options(self):\n- # XXX This doesn't yet support username and password.\n- # -- elopio - 2016-11-17\n proxy_options = []\n for var in ('http', 'https'):\n proxy = os.environ.get('{}_proxy'.format(var), False)\n@@ -124,4 +122,11 @@\n if parsed_url.port:\n proxy_options.append(\n '-D{}.proxyPort={}'.format(var, parsed_url.port))\n+ if parsed_url.username:\n+ proxy_options.append(\n+ '-D{}.proxyUser={}'.format(var, parsed_url.username))\n+ if parsed_url.password:\n+ proxy_options.append(\n+ '-D{}.proxyPassword={}'.format(\n+ var, parsed_url.password))\n return proxy_options\n", "issue": "Make script aware of 407 when downloading gradle\nbuilds.snapcraft.io rejects downloading gradle.\r\n\r\n```\r\nDownloading https://services.gradle.org/distributions/gradle-4.0.2-bin.zip\r\n\r\nException in thread \"main\" java.io.IOException: Unable to tunnel through proxy. Proxy returns \"HTTP/1.1 407 Proxy Authentication Required\"\r\n```\r\n\r\nSource: https://build.snapcraft.io/user/JabRef/jabref/67152\r\n\r\nAn issue has been submitted - see https://github.com/canonical-websites/build.snapcraft.io/issues/918. I don't know, if this will be fixed or if it is easier to quickyl adapt `gradle.py`.\r\n\r\nI assume, gradle.py has to be adapted accordingly? https://github.com/snapcore/snapcraft/blob/master/snapcraft/plugins/gradle.py#L88; if gradlew exists, execute it. If that fails with proxy 407 error (grepping stdout?), execute gradle\n", "before_files": [{"content": "# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-\n#\n# Copyright (C) 2015 Canonical Ltd\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License version 3 as\n# published by the Free Software Foundation.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"The ant plugin is useful for ant based parts.\n\nThe ant build system is commonly used to build Java projects.\nThe plugin requires a build.xml in the root of the source tree.\n\nThis plugin uses the common plugin keywords as well as those for \"sources\".\nFor more information check the 'plugins' topic for the former and the\n'sources' topic for the latter.\n\nAdditionally, this plugin uses the following plugin-specific keywords:\n\n - ant-properties:\n (object)\n A dictionary of key-value pairs. Set the following properties when\n running ant.\n\n - ant-build-targets:\n (list of strings)\n Run the given ant targets.\n\"\"\"\n\nimport glob\nimport logging\nimport os\nfrom urllib.parse import urlsplit\n\nimport snapcraft\nimport snapcraft.common\nimport snapcraft.plugins.jdk\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass AntPlugin(snapcraft.plugins.jdk.JdkPlugin):\n\n @classmethod\n def schema(cls):\n schema = super().schema()\n schema['properties']['ant-properties'] = {\n 'type': 'object',\n 'default': {},\n }\n schema['properties']['ant-build-targets'] = {\n 'type': 'array',\n 'uniqueItems': True,\n 'items': {\n 'type': 'string',\n },\n 'default': [],\n }\n return schema\n\n def __init__(self, name, options, project):\n super().__init__(name, options, project)\n self.build_packages.append('ant')\n\n @classmethod\n def get_build_properties(cls):\n # Inform Snapcraft of the properties associated with building. If these\n # change in the YAML Snapcraft will consider the build step dirty.\n return ['ant-build-targets', 'ant-properties']\n\n def build(self):\n super().build()\n\n command = ['ant']\n\n if self.options.ant_build_targets:\n command.extend(self.options.ant_build_targets)\n\n for prop, value in self.options.ant_properties.items():\n command.extend(['-D{}={}'.format(prop, value)])\n\n self.run(command)\n files = glob.glob(os.path.join(self.builddir, 'target', '*.jar'))\n if files:\n jardir = os.path.join(self.installdir, 'jar')\n os.makedirs(jardir)\n for f in files:\n base = os.path.basename(f)\n os.link(f, os.path.join(jardir, base))\n\n def get_proxy_options(self, scheme):\n proxy = os.environ.get('{}_proxy'.format(scheme))\n if proxy:\n parsed = urlsplit(proxy)\n if parsed.hostname is not None:\n yield '-D{}.proxyHost={}'.format(scheme, parsed.hostname)\n if parsed.port is not None:\n yield '-D{}.proxyPort={}'.format(scheme, parsed.port)\n\n def env(self, root):\n env = super().env(root)\n jars = glob.glob(os.path.join(self.installdir, 'jar', '*.jar'))\n if jars:\n jars = [os.path.join(root, 'jar',\n os.path.basename(x)) for x in sorted(jars)]\n env.extend(\n ['CLASSPATH={}:$CLASSPATH'.format(':'.join(jars))])\n # Getting ant to use a proxy requires a little work; the JRE doesn't\n # help as much as it should. (java.net.useSystemProxies=true ought\n # to do the trick, but it relies on desktop configuration rather\n # than using the standard environment variables.)\n ant_opts = []\n ant_opts.extend(self.get_proxy_options('http'))\n ant_opts.extend(self.get_proxy_options('https'))\n if ant_opts:\n env.append(\"ANT_OPTS='{}'\".format(\n ' '.join(opt.replace(\"'\", \"'\\\\''\") for opt in ant_opts)))\n return env\n", "path": "snapcraft/plugins/ant.py"}, {"content": "# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-\n#\n# Copyright (C) 2016 Canonical Ltd\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License version 3 as\n# published by the Free Software Foundation.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n\n\"\"\"This plugin is useful for building parts that use gradle.\n\nThe gradle build system is commonly used to build Java projects.\nThe plugin requires a pom.xml in the root of the source tree.\n\nThis plugin uses the common plugin keywords as well as those for \"sources\".\nFor more information check the 'plugins' topic for the former and the\n'sources' topic for the latter.\n\nAdditionally, this plugin uses the following plugin-specific keywords:\n\n - gradle-options:\n (list of strings)\n Flags to pass to the build using the gradle semantics for parameters.\n The 'jar' option is always passed in as the last parameter.\n\n - gradle-output-dir:\n (string; default: 'build/libs')\n The output directory where the resulting jar or war files from gradle[w]\n are generated.\n\"\"\"\n\nimport glob\nimport logging\nimport os\nimport urllib.parse\nimport snapcraft\nimport snapcraft.common\nimport snapcraft.plugins.jdk\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass GradlePlugin(snapcraft.plugins.jdk.JdkPlugin):\n\n @classmethod\n def schema(cls):\n schema = super().schema()\n schema['properties']['gradle-options'] = {\n 'type': 'array',\n 'minitems': 1,\n 'uniqueItems': True,\n 'items': {\n 'type': 'string',\n },\n 'default': [],\n }\n schema['properties']['gradle-output-dir'] = {\n 'type': 'string',\n 'default': 'build/libs',\n }\n\n return schema\n\n def __init__(self, name, options, project):\n super().__init__(name, options, project)\n filename = os.path.join(os.getcwd(), 'gradlew')\n if not os.path.isfile(filename):\n self.build_packages.append('gradle')\n self.build_packages.append('ca-certificates-java')\n\n @classmethod\n def get_build_properties(cls):\n # Inform Snapcraft of the properties associated with building. If these\n # change in the YAML Snapcraft will consider the build step dirty.\n return super().get_build_properties() + ['gradle-options',\n 'gradle-output-dir']\n\n def build(self):\n super().build()\n filename = os.path.join(os.getcwd(), 'gradlew')\n if os.path.isfile(filename):\n gradle_cmd = ['./gradlew']\n else:\n gradle_cmd = ['gradle']\n self.run(gradle_cmd +\n self._get_proxy_options() +\n self.options.gradle_options + ['jar'])\n\n src = os.path.join(self.builddir, self.options.gradle_output_dir)\n jarfiles = glob.glob(os.path.join(src, '*.jar'))\n warfiles = glob.glob(os.path.join(src, '*.war'))\n\n if len(jarfiles) > 0:\n basedir = 'jar'\n elif len(warfiles) > 0:\n basedir = 'war'\n jarfiles = warfiles\n else:\n raise RuntimeError(\"Could not find any built jar files for part\")\n\n snapcraft.file_utils.link_or_copy_tree(\n src, os.path.join(self.installdir, basedir),\n copy_function=lambda src, dst:\n snapcraft.file_utils.link_or_copy(src, dst, self.installdir))\n\n def _get_proxy_options(self):\n # XXX This doesn't yet support username and password.\n # -- elopio - 2016-11-17\n proxy_options = []\n for var in ('http', 'https'):\n proxy = os.environ.get('{}_proxy'.format(var), False)\n if proxy:\n parsed_url = urllib.parse.urlparse(proxy)\n proxy_options.append('-D{}.proxyHost={}'.format(\n var, parsed_url.hostname))\n if parsed_url.port:\n proxy_options.append(\n '-D{}.proxyPort={}'.format(var, parsed_url.port))\n return proxy_options\n", "path": "snapcraft/plugins/gradle.py"}]} | 3,289 | 418 |
gh_patches_debug_10883 | rasdani/github-patches | git_diff | ansible-collections__community.general-4853 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
sudoers module does not handle state: absent properly
### Summary
The `community.general.sudoers` module does not handle `state: absent` properly.
If the file specified by `name` does not exist, it gets created, with possibly invalid content. As a side effect, `commands` and `user`/`group` is required, although these options do not make a lot of sense with `state: absent`.
If the file specified by `name` exists, it is (correctly) deleted. Of course, the next invocation of the play creates it again.
### Issue Type
Bug Report
### Component Name
sudoers
### Ansible Version
```console (paste below)
$ ansible --version
ansible [core 2.12.5]
config file = ...
configured module search path = [...]
ansible python module location = /usr/lib/python3.9/site-packages/ansible
ansible collection location = /usr/share/ansible/collections
executable location = /usr/lib/python-exec/python3.9/ansible
python version = 3.9.12 (main, Jun 17 2022, 16:04:33) [GCC 11.3.0]
jinja version = 3.1.2
libyaml = True
```
### Community.general Version
```console (paste below)
$ ansible-galaxy collection list community.general
# /usr/lib/python3.9/site-packages/ansible_collections
Collection Version
----------------- -------
community.general 4.8.0
```
### Configuration
```console (paste below)
$ ansible-config dump --only-changed
```
### OS / Environment
Target OS is Debian 11.
### Steps to Reproduce
```yaml (paste below)
---
- hosts: all
gather_facts: false
tasks:
- community.general.sudoers:
name: test
commands: []
user: root
state: absent
- ansible.builtin.stat:
path: /etc/sudoers.d/test
register: _stat_sudoers
- ansible.builtin.assert:
that:
- "not _stat_sudoers.stat.exists"
```
Note: `/etc/sudoers.d/test` is assumed not to exist prior to running this play.
### Expected Results
The play never fails, i.e. `/etc/sudoers.d/test` gets deleted/not created.
### Actual Results
The assertion fails on every second invocation of the play (starting with the first). The file `/etc/sudoers.d/test` gets created by the `sudoers` module.
The assertion passes on every other invocation of the play (starting with the second).
```console (paste below)
PLAY [all] **********************************************************************************************
TASK [community.general.sudoers] ************************************************************************
changed: [test]
TASK [ansible.builtin.stat] *****************************************************************************
ok: [test]
TASK [ansible.builtin.assert] ***************************************************************************
fatal: [test]: FAILED! => {
"assertion": "not _stat_sudoers.stat.exists",
"changed": false,
"evaluated_to": false,
"msg": "Assertion failed"
}
PLAY RECAP **********************************************************************************************
test : ok=2 changed=1 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0
```
### Code of Conduct
- [X] I agree to follow the Ansible Code of Conduct
</issue>
<code>
[start of plugins/modules/system/sudoers.py]
1 #!/usr/bin/python
2 # -*- coding: utf-8 -*-
3
4
5 # Copyright: (c) 2019, Jon Ellis (@JonEllis) <[email protected]>
6 # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
7
8 from __future__ import absolute_import, division, print_function
9 __metaclass__ = type
10
11
12 DOCUMENTATION = '''
13 ---
14 module: sudoers
15 short_description: Manage sudoers files
16 version_added: "4.3.0"
17 description:
18 - This module allows for the manipulation of sudoers files.
19 author:
20 - "Jon Ellis (@JonEllis) <[email protected]>"
21 options:
22 commands:
23 description:
24 - The commands allowed by the sudoers rule.
25 - Multiple can be added by passing a list of commands.
26 - Use C(ALL) for all commands.
27 type: list
28 elements: str
29 group:
30 description:
31 - The name of the group for the sudoers rule.
32 - This option cannot be used in conjunction with I(user).
33 type: str
34 name:
35 required: true
36 description:
37 - The name of the sudoers rule.
38 - This will be used for the filename for the sudoers file managed by this rule.
39 type: str
40 nopassword:
41 description:
42 - Whether a password will be required to run the sudo'd command.
43 default: true
44 type: bool
45 runas:
46 description:
47 - Specify the target user the command(s) will run as.
48 type: str
49 version_added: 4.7.0
50 sudoers_path:
51 description:
52 - The path which sudoers config files will be managed in.
53 default: /etc/sudoers.d
54 type: str
55 state:
56 default: "present"
57 choices:
58 - present
59 - absent
60 description:
61 - Whether the rule should exist or not.
62 type: str
63 user:
64 description:
65 - The name of the user for the sudoers rule.
66 - This option cannot be used in conjunction with I(group).
67 type: str
68 '''
69
70 EXAMPLES = '''
71 - name: Allow the backup user to sudo /usr/local/bin/backup
72 community.general.sudoers:
73 name: allow-backup
74 state: present
75 user: backup
76 commands: /usr/local/bin/backup
77
78 - name: Allow the bob user to run any commands as alice with sudo -u alice
79 community.general.sudoers:
80 name: bob-do-as-alice
81 state: present
82 user: bob
83 runas: alice
84 commands: ALL
85
86 - name: >-
87 Allow the monitoring group to run sudo /usr/local/bin/gather-app-metrics
88 without requiring a password
89 community.general.sudoers:
90 name: monitor-app
91 group: monitoring
92 commands: /usr/local/bin/gather-app-metrics
93
94 - name: >-
95 Allow the alice user to run sudo /bin/systemctl restart my-service or
96 sudo /bin/systemctl reload my-service, but a password is required
97 community.general.sudoers:
98 name: alice-service
99 user: alice
100 commands:
101 - /bin/systemctl restart my-service
102 - /bin/systemctl reload my-service
103 nopassword: false
104
105 - name: Revoke the previous sudo grants given to the alice user
106 community.general.sudoers:
107 name: alice-service
108 state: absent
109 '''
110
111 import os
112 from ansible.module_utils.basic import AnsibleModule
113 from ansible.module_utils.common.text.converters import to_native
114
115
116 class Sudoers(object):
117
118 FILE_MODE = 0o440
119
120 def __init__(self, module):
121 self.check_mode = module.check_mode
122 self.name = module.params['name']
123 self.user = module.params['user']
124 self.group = module.params['group']
125 self.state = module.params['state']
126 self.nopassword = module.params['nopassword']
127 self.runas = module.params['runas']
128 self.sudoers_path = module.params['sudoers_path']
129 self.file = os.path.join(self.sudoers_path, self.name)
130 self.commands = module.params['commands']
131
132 def write(self):
133 if self.check_mode:
134 return
135
136 with open(self.file, 'w') as f:
137 f.write(self.content())
138
139 os.chmod(self.file, self.FILE_MODE)
140
141 def delete(self):
142 if self.check_mode:
143 return
144
145 os.remove(self.file)
146
147 def exists(self):
148 return os.path.exists(self.file)
149
150 def matches(self):
151 with open(self.file, 'r') as f:
152 content_matches = f.read() == self.content()
153
154 current_mode = os.stat(self.file).st_mode & 0o777
155 mode_matches = current_mode == self.FILE_MODE
156
157 return content_matches and mode_matches
158
159 def content(self):
160 if self.user:
161 owner = self.user
162 elif self.group:
163 owner = '%{group}'.format(group=self.group)
164
165 commands_str = ', '.join(self.commands)
166 nopasswd_str = 'NOPASSWD:' if self.nopassword else ''
167 runas_str = '({runas})'.format(runas=self.runas) if self.runas is not None else ''
168 return "{owner} ALL={runas}{nopasswd} {commands}\n".format(owner=owner, runas=runas_str, nopasswd=nopasswd_str, commands=commands_str)
169
170 def run(self):
171 if self.state == 'absent' and self.exists():
172 self.delete()
173 return True
174
175 if self.exists() and self.matches():
176 return False
177
178 self.write()
179 return True
180
181
182 def main():
183 argument_spec = {
184 'commands': {
185 'type': 'list',
186 'elements': 'str',
187 },
188 'group': {},
189 'name': {
190 'required': True,
191 },
192 'nopassword': {
193 'type': 'bool',
194 'default': True,
195 },
196 'runas': {
197 'type': 'str',
198 'default': None,
199 },
200 'sudoers_path': {
201 'type': 'str',
202 'default': '/etc/sudoers.d',
203 },
204 'state': {
205 'default': 'present',
206 'choices': ['present', 'absent'],
207 },
208 'user': {},
209 }
210
211 module = AnsibleModule(
212 argument_spec=argument_spec,
213 mutually_exclusive=[['user', 'group']],
214 supports_check_mode=True,
215 required_if=[('state', 'present', ['commands'])],
216 )
217
218 sudoers = Sudoers(module)
219
220 try:
221 changed = sudoers.run()
222 module.exit_json(changed=changed)
223 except Exception as e:
224 module.fail_json(msg=to_native(e))
225
226
227 if __name__ == '__main__':
228 main()
229
[end of plugins/modules/system/sudoers.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/plugins/modules/system/sudoers.py b/plugins/modules/system/sudoers.py
--- a/plugins/modules/system/sudoers.py
+++ b/plugins/modules/system/sudoers.py
@@ -168,9 +168,12 @@
return "{owner} ALL={runas}{nopasswd} {commands}\n".format(owner=owner, runas=runas_str, nopasswd=nopasswd_str, commands=commands_str)
def run(self):
- if self.state == 'absent' and self.exists():
- self.delete()
- return True
+ if self.state == 'absent':
+ if self.exists():
+ self.delete()
+ return True
+ else:
+ return False
if self.exists() and self.matches():
return False
| {"golden_diff": "diff --git a/plugins/modules/system/sudoers.py b/plugins/modules/system/sudoers.py\n--- a/plugins/modules/system/sudoers.py\n+++ b/plugins/modules/system/sudoers.py\n@@ -168,9 +168,12 @@\n return \"{owner} ALL={runas}{nopasswd} {commands}\\n\".format(owner=owner, runas=runas_str, nopasswd=nopasswd_str, commands=commands_str)\n \n def run(self):\n- if self.state == 'absent' and self.exists():\n- self.delete()\n- return True\n+ if self.state == 'absent':\n+ if self.exists():\n+ self.delete()\n+ return True\n+ else:\n+ return False\n \n if self.exists() and self.matches():\n return False\n", "issue": "sudoers module does not handle state: absent properly\n### Summary\n\nThe `community.general.sudoers` module does not handle `state: absent` properly.\r\nIf the file specified by `name` does not exist, it gets created, with possibly invalid content. As a side effect, `commands` and `user`/`group` is required, although these options do not make a lot of sense with `state: absent`.\r\nIf the file specified by `name` exists, it is (correctly) deleted. Of course, the next invocation of the play creates it again.\n\n### Issue Type\n\nBug Report\n\n### Component Name\n\nsudoers\n\n### Ansible Version\n\n```console (paste below)\r\n$ ansible --version\r\nansible [core 2.12.5]\r\n config file = ...\r\n configured module search path = [...]\r\n ansible python module location = /usr/lib/python3.9/site-packages/ansible\r\n ansible collection location = /usr/share/ansible/collections\r\n executable location = /usr/lib/python-exec/python3.9/ansible\r\n python version = 3.9.12 (main, Jun 17 2022, 16:04:33) [GCC 11.3.0]\r\n jinja version = 3.1.2\r\n libyaml = True\r\n```\n\n### Community.general Version\n\n```console (paste below)\r\n$ ansible-galaxy collection list community.general\r\n# /usr/lib/python3.9/site-packages/ansible_collections\r\nCollection Version\r\n----------------- -------\r\ncommunity.general 4.8.0 \r\n```\n\n### Configuration\n\n```console (paste below)\r\n$ ansible-config dump --only-changed\r\n\r\n```\r\n\n\n### OS / Environment\n\nTarget OS is Debian 11.\n\n### Steps to Reproduce\n\n```yaml (paste below)\r\n---\r\n- hosts: all\r\n gather_facts: false\r\n tasks:\r\n - community.general.sudoers:\r\n name: test\r\n commands: []\r\n user: root\r\n state: absent\r\n\r\n - ansible.builtin.stat:\r\n path: /etc/sudoers.d/test\r\n register: _stat_sudoers\r\n\r\n - ansible.builtin.assert:\r\n that:\r\n - \"not _stat_sudoers.stat.exists\"\r\n```\r\nNote: `/etc/sudoers.d/test` is assumed not to exist prior to running this play.\n\n### Expected Results\n\nThe play never fails, i.e. `/etc/sudoers.d/test` gets deleted/not created.\n\n### Actual Results\n\nThe assertion fails on every second invocation of the play (starting with the first). The file `/etc/sudoers.d/test` gets created by the `sudoers` module.\r\nThe assertion passes on every other invocation of the play (starting with the second).\r\n\r\n```console (paste below)\r\nPLAY [all] **********************************************************************************************\r\n\r\nTASK [community.general.sudoers] ************************************************************************\r\nchanged: [test]\r\n\r\nTASK [ansible.builtin.stat] *****************************************************************************\r\nok: [test]\r\n\r\nTASK [ansible.builtin.assert] ***************************************************************************\r\nfatal: [test]: FAILED! => {\r\n \"assertion\": \"not _stat_sudoers.stat.exists\",\r\n \"changed\": false,\r\n \"evaluated_to\": false,\r\n \"msg\": \"Assertion failed\"\r\n}\r\n\r\nPLAY RECAP **********************************************************************************************\r\ntest : ok=2 changed=1 unreachable=0 failed=1 skipped=0 rescued=0 ignored=0\r\n```\n\n### Code of Conduct\n\n- [X] I agree to follow the Ansible Code of Conduct\n", "before_files": [{"content": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n\n\n# Copyright: (c) 2019, Jon Ellis (@JonEllis) <[email protected]>\n# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)\n\nfrom __future__ import absolute_import, division, print_function\n__metaclass__ = type\n\n\nDOCUMENTATION = '''\n---\nmodule: sudoers\nshort_description: Manage sudoers files\nversion_added: \"4.3.0\"\ndescription:\n - This module allows for the manipulation of sudoers files.\nauthor:\n - \"Jon Ellis (@JonEllis) <[email protected]>\"\noptions:\n commands:\n description:\n - The commands allowed by the sudoers rule.\n - Multiple can be added by passing a list of commands.\n - Use C(ALL) for all commands.\n type: list\n elements: str\n group:\n description:\n - The name of the group for the sudoers rule.\n - This option cannot be used in conjunction with I(user).\n type: str\n name:\n required: true\n description:\n - The name of the sudoers rule.\n - This will be used for the filename for the sudoers file managed by this rule.\n type: str\n nopassword:\n description:\n - Whether a password will be required to run the sudo'd command.\n default: true\n type: bool\n runas:\n description:\n - Specify the target user the command(s) will run as.\n type: str\n version_added: 4.7.0\n sudoers_path:\n description:\n - The path which sudoers config files will be managed in.\n default: /etc/sudoers.d\n type: str\n state:\n default: \"present\"\n choices:\n - present\n - absent\n description:\n - Whether the rule should exist or not.\n type: str\n user:\n description:\n - The name of the user for the sudoers rule.\n - This option cannot be used in conjunction with I(group).\n type: str\n'''\n\nEXAMPLES = '''\n- name: Allow the backup user to sudo /usr/local/bin/backup\n community.general.sudoers:\n name: allow-backup\n state: present\n user: backup\n commands: /usr/local/bin/backup\n\n- name: Allow the bob user to run any commands as alice with sudo -u alice\n community.general.sudoers:\n name: bob-do-as-alice\n state: present\n user: bob\n runas: alice\n commands: ALL\n\n- name: >-\n Allow the monitoring group to run sudo /usr/local/bin/gather-app-metrics\n without requiring a password\n community.general.sudoers:\n name: monitor-app\n group: monitoring\n commands: /usr/local/bin/gather-app-metrics\n\n- name: >-\n Allow the alice user to run sudo /bin/systemctl restart my-service or\n sudo /bin/systemctl reload my-service, but a password is required\n community.general.sudoers:\n name: alice-service\n user: alice\n commands:\n - /bin/systemctl restart my-service\n - /bin/systemctl reload my-service\n nopassword: false\n\n- name: Revoke the previous sudo grants given to the alice user\n community.general.sudoers:\n name: alice-service\n state: absent\n'''\n\nimport os\nfrom ansible.module_utils.basic import AnsibleModule\nfrom ansible.module_utils.common.text.converters import to_native\n\n\nclass Sudoers(object):\n\n FILE_MODE = 0o440\n\n def __init__(self, module):\n self.check_mode = module.check_mode\n self.name = module.params['name']\n self.user = module.params['user']\n self.group = module.params['group']\n self.state = module.params['state']\n self.nopassword = module.params['nopassword']\n self.runas = module.params['runas']\n self.sudoers_path = module.params['sudoers_path']\n self.file = os.path.join(self.sudoers_path, self.name)\n self.commands = module.params['commands']\n\n def write(self):\n if self.check_mode:\n return\n\n with open(self.file, 'w') as f:\n f.write(self.content())\n\n os.chmod(self.file, self.FILE_MODE)\n\n def delete(self):\n if self.check_mode:\n return\n\n os.remove(self.file)\n\n def exists(self):\n return os.path.exists(self.file)\n\n def matches(self):\n with open(self.file, 'r') as f:\n content_matches = f.read() == self.content()\n\n current_mode = os.stat(self.file).st_mode & 0o777\n mode_matches = current_mode == self.FILE_MODE\n\n return content_matches and mode_matches\n\n def content(self):\n if self.user:\n owner = self.user\n elif self.group:\n owner = '%{group}'.format(group=self.group)\n\n commands_str = ', '.join(self.commands)\n nopasswd_str = 'NOPASSWD:' if self.nopassword else ''\n runas_str = '({runas})'.format(runas=self.runas) if self.runas is not None else ''\n return \"{owner} ALL={runas}{nopasswd} {commands}\\n\".format(owner=owner, runas=runas_str, nopasswd=nopasswd_str, commands=commands_str)\n\n def run(self):\n if self.state == 'absent' and self.exists():\n self.delete()\n return True\n\n if self.exists() and self.matches():\n return False\n\n self.write()\n return True\n\n\ndef main():\n argument_spec = {\n 'commands': {\n 'type': 'list',\n 'elements': 'str',\n },\n 'group': {},\n 'name': {\n 'required': True,\n },\n 'nopassword': {\n 'type': 'bool',\n 'default': True,\n },\n 'runas': {\n 'type': 'str',\n 'default': None,\n },\n 'sudoers_path': {\n 'type': 'str',\n 'default': '/etc/sudoers.d',\n },\n 'state': {\n 'default': 'present',\n 'choices': ['present', 'absent'],\n },\n 'user': {},\n }\n\n module = AnsibleModule(\n argument_spec=argument_spec,\n mutually_exclusive=[['user', 'group']],\n supports_check_mode=True,\n required_if=[('state', 'present', ['commands'])],\n )\n\n sudoers = Sudoers(module)\n\n try:\n changed = sudoers.run()\n module.exit_json(changed=changed)\n except Exception as e:\n module.fail_json(msg=to_native(e))\n\n\nif __name__ == '__main__':\n main()\n", "path": "plugins/modules/system/sudoers.py"}]} | 3,381 | 178 |
gh_patches_debug_14555 | rasdani/github-patches | git_diff | inventree__InvenTree-1446 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Error exporting BOM in YAML format
The error is:

</issue>
<code>
[start of InvenTree/part/admin.py]
1 # -*- coding: utf-8 -*-
2 from __future__ import unicode_literals
3
4 from django.contrib import admin
5
6 from import_export.admin import ImportExportModelAdmin
7 from import_export.resources import ModelResource
8 from import_export.fields import Field
9 import import_export.widgets as widgets
10
11 from .models import PartCategory, Part
12 from .models import PartAttachment, PartStar, PartRelated
13 from .models import BomItem
14 from .models import PartParameterTemplate, PartParameter
15 from .models import PartCategoryParameterTemplate
16 from .models import PartTestTemplate
17 from .models import PartSellPriceBreak
18
19 from InvenTree.helpers import normalize
20
21 from stock.models import StockLocation
22 from company.models import SupplierPart
23
24
25 class PartResource(ModelResource):
26 """ Class for managing Part data import/export """
27
28 # ForeignKey fields
29 category = Field(attribute='category', widget=widgets.ForeignKeyWidget(PartCategory))
30
31 default_location = Field(attribute='default_location', widget=widgets.ForeignKeyWidget(StockLocation))
32
33 default_supplier = Field(attribute='default_supplier', widget=widgets.ForeignKeyWidget(SupplierPart))
34
35 category_name = Field(attribute='category__name', readonly=True)
36
37 variant_of = Field(attribute='variant_of', widget=widgets.ForeignKeyWidget(Part))
38
39 suppliers = Field(attribute='supplier_count', readonly=True)
40
41 # Extra calculated meta-data (readonly)
42 in_stock = Field(attribute='total_stock', readonly=True, widget=widgets.IntegerWidget())
43
44 on_order = Field(attribute='on_order', readonly=True, widget=widgets.IntegerWidget())
45
46 used_in = Field(attribute='used_in_count', readonly=True, widget=widgets.IntegerWidget())
47
48 allocated = Field(attribute='allocation_count', readonly=True, widget=widgets.IntegerWidget())
49
50 building = Field(attribute='quantity_being_built', readonly=True, widget=widgets.IntegerWidget())
51
52 class Meta:
53 model = Part
54 skip_unchanged = True
55 report_skipped = False
56 clean_model_instances = True
57 exclude = [
58 'bom_checksum', 'bom_checked_by', 'bom_checked_date',
59 'lft', 'rght', 'tree_id', 'level',
60 ]
61
62 def get_queryset(self):
63 """ Prefetch related data for quicker access """
64
65 query = super().get_queryset()
66 query = query.prefetch_related(
67 'category',
68 'used_in',
69 'builds',
70 'supplier_parts__purchase_order_line_items',
71 'stock_items__allocations'
72 )
73
74 return query
75
76
77 class PartAdmin(ImportExportModelAdmin):
78
79 resource_class = PartResource
80
81 list_display = ('full_name', 'description', 'total_stock', 'category')
82
83 list_filter = ('active', 'assembly', 'is_template', 'virtual')
84
85 search_fields = ('name', 'description', 'category__name', 'category__description', 'IPN')
86
87
88 class PartCategoryResource(ModelResource):
89 """ Class for managing PartCategory data import/export """
90
91 parent = Field(attribute='parent', widget=widgets.ForeignKeyWidget(PartCategory))
92
93 parent_name = Field(attribute='parent__name', readonly=True)
94
95 default_location = Field(attribute='default_location', widget=widgets.ForeignKeyWidget(StockLocation))
96
97 class Meta:
98 model = PartCategory
99 skip_unchanged = True
100 report_skipped = False
101 clean_model_instances = True
102
103 exclude = [
104 # Exclude MPTT internal model fields
105 'lft', 'rght', 'tree_id', 'level',
106 ]
107
108 def after_import(self, dataset, result, using_transactions, dry_run, **kwargs):
109
110 super().after_import(dataset, result, using_transactions, dry_run, **kwargs)
111
112 # Rebuild the PartCategory tree(s)
113 PartCategory.objects.rebuild()
114
115
116 class PartCategoryAdmin(ImportExportModelAdmin):
117
118 resource_class = PartCategoryResource
119
120 list_display = ('name', 'pathstring', 'description')
121
122 search_fields = ('name', 'description')
123
124
125 class PartRelatedAdmin(admin.ModelAdmin):
126 ''' Class to manage PartRelated objects '''
127 pass
128
129
130 class PartAttachmentAdmin(admin.ModelAdmin):
131
132 list_display = ('part', 'attachment', 'comment')
133
134
135 class PartStarAdmin(admin.ModelAdmin):
136
137 list_display = ('part', 'user')
138
139
140 class PartTestTemplateAdmin(admin.ModelAdmin):
141
142 list_display = ('part', 'test_name', 'required')
143
144
145 class BomItemResource(ModelResource):
146 """ Class for managing BomItem data import/export """
147
148 level = Field(attribute='level', readonly=True)
149
150 bom_id = Field(attribute='pk')
151
152 # ID of the parent part
153 parent_part_id = Field(attribute='part', widget=widgets.ForeignKeyWidget(Part))
154
155 # IPN of the parent part
156 parent_part_ipn = Field(attribute='part__IPN', readonly=True)
157
158 # Name of the parent part
159 parent_part_name = Field(attribute='part__name', readonly=True)
160
161 # ID of the sub-part
162 part_id = Field(attribute='sub_part', widget=widgets.ForeignKeyWidget(Part))
163
164 # IPN of the sub-part
165 part_ipn = Field(attribute='sub_part__IPN', readonly=True)
166
167 # Name of the sub-part
168 part_name = Field(attribute='sub_part__name', readonly=True)
169
170 # Description of the sub-part
171 part_description = Field(attribute='sub_part__description', readonly=True)
172
173 # Is the sub-part itself an assembly?
174 sub_assembly = Field(attribute='sub_part__assembly', readonly=True)
175
176 def dehydrate_quantity(self, item):
177 """
178 Special consideration for the 'quantity' field on data export.
179 We do not want a spreadsheet full of "1.0000" (we'd rather "1")
180
181 Ref: https://django-import-export.readthedocs.io/en/latest/getting_started.html#advanced-data-manipulation-on-export
182 """
183 return normalize(item.quantity)
184
185 def before_export(self, queryset, *args, **kwargs):
186
187 self.is_importing = kwargs.get('importing', False)
188
189 def get_fields(self, **kwargs):
190 """
191 If we are exporting for the purposes of generating
192 a 'bom-import' template, there are some fields which
193 we are not interested in.
194 """
195
196 fields = super().get_fields(**kwargs)
197
198 # If we are not generating an "import" template,
199 # just return the complete list of fields
200 if not self.is_importing:
201 return fields
202
203 # Otherwise, remove some fields we are not interested in
204
205 idx = 0
206
207 to_remove = [
208 'level',
209 'bom_id',
210 'parent_part_id',
211 'parent_part_ipn',
212 'parent_part_name',
213 'part_description',
214 'sub_assembly'
215 ]
216
217 while idx < len(fields):
218
219 if fields[idx].column_name.lower() in to_remove:
220 del fields[idx]
221 else:
222 idx += 1
223
224 return fields
225
226 class Meta:
227 model = BomItem
228 skip_unchanged = True
229 report_skipped = False
230 clean_model_instances = True
231
232 exclude = [
233 'checksum',
234 'id',
235 'part',
236 'sub_part',
237 ]
238
239
240 class BomItemAdmin(ImportExportModelAdmin):
241
242 resource_class = BomItemResource
243
244 list_display = ('part', 'sub_part', 'quantity')
245
246 search_fields = ('part__name', 'part__description', 'sub_part__name', 'sub_part__description')
247
248
249 class ParameterTemplateAdmin(ImportExportModelAdmin):
250 list_display = ('name', 'units')
251
252
253 class ParameterResource(ModelResource):
254 """ Class for managing PartParameter data import/export """
255
256 part = Field(attribute='part', widget=widgets.ForeignKeyWidget(Part))
257
258 part_name = Field(attribute='part__name', readonly=True)
259
260 template = Field(attribute='template', widget=widgets.ForeignKeyWidget(PartParameterTemplate))
261
262 template_name = Field(attribute='template__name', readonly=True)
263
264 class Meta:
265 model = PartParameter
266 skip_unchanged = True
267 report_skipped = False
268 clean_model_instance = True
269
270
271 class ParameterAdmin(ImportExportModelAdmin):
272
273 resource_class = ParameterResource
274
275 list_display = ('part', 'template', 'data')
276
277
278 class PartCategoryParameterAdmin(admin.ModelAdmin):
279
280 pass
281
282
283 class PartSellPriceBreakAdmin(admin.ModelAdmin):
284
285 class Meta:
286 model = PartSellPriceBreak
287
288 list_display = ('part', 'quantity', 'price',)
289
290
291 admin.site.register(Part, PartAdmin)
292 admin.site.register(PartCategory, PartCategoryAdmin)
293 admin.site.register(PartRelated, PartRelatedAdmin)
294 admin.site.register(PartAttachment, PartAttachmentAdmin)
295 admin.site.register(PartStar, PartStarAdmin)
296 admin.site.register(BomItem, BomItemAdmin)
297 admin.site.register(PartParameterTemplate, ParameterTemplateAdmin)
298 admin.site.register(PartParameter, ParameterAdmin)
299 admin.site.register(PartCategoryParameterTemplate, PartCategoryParameterAdmin)
300 admin.site.register(PartTestTemplate, PartTestTemplateAdmin)
301 admin.site.register(PartSellPriceBreak, PartSellPriceBreakAdmin)
302
[end of InvenTree/part/admin.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/InvenTree/part/admin.py b/InvenTree/part/admin.py
--- a/InvenTree/part/admin.py
+++ b/InvenTree/part/admin.py
@@ -16,8 +16,6 @@
from .models import PartTestTemplate
from .models import PartSellPriceBreak
-from InvenTree.helpers import normalize
-
from stock.models import StockLocation
from company.models import SupplierPart
@@ -180,7 +178,7 @@
Ref: https://django-import-export.readthedocs.io/en/latest/getting_started.html#advanced-data-manipulation-on-export
"""
- return normalize(item.quantity)
+ return float(item.quantity)
def before_export(self, queryset, *args, **kwargs):
| {"golden_diff": "diff --git a/InvenTree/part/admin.py b/InvenTree/part/admin.py\n--- a/InvenTree/part/admin.py\n+++ b/InvenTree/part/admin.py\n@@ -16,8 +16,6 @@\n from .models import PartTestTemplate\n from .models import PartSellPriceBreak\n \n-from InvenTree.helpers import normalize\n-\n from stock.models import StockLocation\n from company.models import SupplierPart\n \n@@ -180,7 +178,7 @@\n \n Ref: https://django-import-export.readthedocs.io/en/latest/getting_started.html#advanced-data-manipulation-on-export\n \"\"\"\n- return normalize(item.quantity)\n+ return float(item.quantity)\n \n def before_export(self, queryset, *args, **kwargs):\n", "issue": "Error exporting BOM in YAML format\nThe error is:\r\n\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import unicode_literals\n\nfrom django.contrib import admin\n\nfrom import_export.admin import ImportExportModelAdmin\nfrom import_export.resources import ModelResource\nfrom import_export.fields import Field\nimport import_export.widgets as widgets\n\nfrom .models import PartCategory, Part\nfrom .models import PartAttachment, PartStar, PartRelated\nfrom .models import BomItem\nfrom .models import PartParameterTemplate, PartParameter\nfrom .models import PartCategoryParameterTemplate\nfrom .models import PartTestTemplate\nfrom .models import PartSellPriceBreak\n\nfrom InvenTree.helpers import normalize\n\nfrom stock.models import StockLocation\nfrom company.models import SupplierPart\n\n\nclass PartResource(ModelResource):\n \"\"\" Class for managing Part data import/export \"\"\"\n\n # ForeignKey fields\n category = Field(attribute='category', widget=widgets.ForeignKeyWidget(PartCategory))\n \n default_location = Field(attribute='default_location', widget=widgets.ForeignKeyWidget(StockLocation))\n\n default_supplier = Field(attribute='default_supplier', widget=widgets.ForeignKeyWidget(SupplierPart))\n\n category_name = Field(attribute='category__name', readonly=True)\n \n variant_of = Field(attribute='variant_of', widget=widgets.ForeignKeyWidget(Part))\n\n suppliers = Field(attribute='supplier_count', readonly=True)\n\n # Extra calculated meta-data (readonly)\n in_stock = Field(attribute='total_stock', readonly=True, widget=widgets.IntegerWidget())\n\n on_order = Field(attribute='on_order', readonly=True, widget=widgets.IntegerWidget())\n\n used_in = Field(attribute='used_in_count', readonly=True, widget=widgets.IntegerWidget())\n\n allocated = Field(attribute='allocation_count', readonly=True, widget=widgets.IntegerWidget())\n\n building = Field(attribute='quantity_being_built', readonly=True, widget=widgets.IntegerWidget())\n\n class Meta:\n model = Part\n skip_unchanged = True\n report_skipped = False\n clean_model_instances = True\n exclude = [\n 'bom_checksum', 'bom_checked_by', 'bom_checked_date',\n 'lft', 'rght', 'tree_id', 'level',\n ]\n\n def get_queryset(self):\n \"\"\" Prefetch related data for quicker access \"\"\"\n\n query = super().get_queryset()\n query = query.prefetch_related(\n 'category',\n 'used_in',\n 'builds',\n 'supplier_parts__purchase_order_line_items',\n 'stock_items__allocations'\n )\n\n return query\n\n\nclass PartAdmin(ImportExportModelAdmin):\n \n resource_class = PartResource\n\n list_display = ('full_name', 'description', 'total_stock', 'category')\n\n list_filter = ('active', 'assembly', 'is_template', 'virtual')\n\n search_fields = ('name', 'description', 'category__name', 'category__description', 'IPN')\n\n\nclass PartCategoryResource(ModelResource):\n \"\"\" Class for managing PartCategory data import/export \"\"\"\n\n parent = Field(attribute='parent', widget=widgets.ForeignKeyWidget(PartCategory))\n\n parent_name = Field(attribute='parent__name', readonly=True)\n\n default_location = Field(attribute='default_location', widget=widgets.ForeignKeyWidget(StockLocation))\n\n class Meta:\n model = PartCategory\n skip_unchanged = True\n report_skipped = False\n clean_model_instances = True\n\n exclude = [\n # Exclude MPTT internal model fields\n 'lft', 'rght', 'tree_id', 'level',\n ]\n\n def after_import(self, dataset, result, using_transactions, dry_run, **kwargs):\n\n super().after_import(dataset, result, using_transactions, dry_run, **kwargs)\n\n # Rebuild the PartCategory tree(s)\n PartCategory.objects.rebuild()\n\n\nclass PartCategoryAdmin(ImportExportModelAdmin):\n\n resource_class = PartCategoryResource\n\n list_display = ('name', 'pathstring', 'description')\n\n search_fields = ('name', 'description')\n\n\nclass PartRelatedAdmin(admin.ModelAdmin):\n ''' Class to manage PartRelated objects '''\n pass\n\n\nclass PartAttachmentAdmin(admin.ModelAdmin):\n\n list_display = ('part', 'attachment', 'comment')\n\n\nclass PartStarAdmin(admin.ModelAdmin):\n\n list_display = ('part', 'user')\n\n\nclass PartTestTemplateAdmin(admin.ModelAdmin):\n\n list_display = ('part', 'test_name', 'required')\n\n\nclass BomItemResource(ModelResource):\n \"\"\" Class for managing BomItem data import/export \"\"\"\n\n level = Field(attribute='level', readonly=True)\n\n bom_id = Field(attribute='pk')\n\n # ID of the parent part\n parent_part_id = Field(attribute='part', widget=widgets.ForeignKeyWidget(Part))\n\n # IPN of the parent part\n parent_part_ipn = Field(attribute='part__IPN', readonly=True)\n\n # Name of the parent part\n parent_part_name = Field(attribute='part__name', readonly=True)\n\n # ID of the sub-part\n part_id = Field(attribute='sub_part', widget=widgets.ForeignKeyWidget(Part))\n\n # IPN of the sub-part\n part_ipn = Field(attribute='sub_part__IPN', readonly=True)\n\n # Name of the sub-part\n part_name = Field(attribute='sub_part__name', readonly=True)\n\n # Description of the sub-part\n part_description = Field(attribute='sub_part__description', readonly=True)\n\n # Is the sub-part itself an assembly?\n sub_assembly = Field(attribute='sub_part__assembly', readonly=True)\n\n def dehydrate_quantity(self, item):\n \"\"\"\n Special consideration for the 'quantity' field on data export.\n We do not want a spreadsheet full of \"1.0000\" (we'd rather \"1\")\n\n Ref: https://django-import-export.readthedocs.io/en/latest/getting_started.html#advanced-data-manipulation-on-export\n \"\"\"\n return normalize(item.quantity)\n\n def before_export(self, queryset, *args, **kwargs):\n\n self.is_importing = kwargs.get('importing', False)\n\n def get_fields(self, **kwargs):\n \"\"\"\n If we are exporting for the purposes of generating\n a 'bom-import' template, there are some fields which\n we are not interested in.\n \"\"\"\n\n fields = super().get_fields(**kwargs)\n\n # If we are not generating an \"import\" template,\n # just return the complete list of fields\n if not self.is_importing:\n return fields\n\n # Otherwise, remove some fields we are not interested in\n\n idx = 0\n\n to_remove = [\n 'level',\n 'bom_id',\n 'parent_part_id',\n 'parent_part_ipn',\n 'parent_part_name',\n 'part_description',\n 'sub_assembly'\n ]\n\n while idx < len(fields):\n\n if fields[idx].column_name.lower() in to_remove:\n del fields[idx]\n else:\n idx += 1\n\n return fields\n\n class Meta:\n model = BomItem\n skip_unchanged = True\n report_skipped = False\n clean_model_instances = True\n\n exclude = [\n 'checksum',\n 'id',\n 'part',\n 'sub_part',\n ]\n\n\nclass BomItemAdmin(ImportExportModelAdmin):\n\n resource_class = BomItemResource\n\n list_display = ('part', 'sub_part', 'quantity')\n\n search_fields = ('part__name', 'part__description', 'sub_part__name', 'sub_part__description')\n\n\nclass ParameterTemplateAdmin(ImportExportModelAdmin):\n list_display = ('name', 'units')\n\n\nclass ParameterResource(ModelResource):\n \"\"\" Class for managing PartParameter data import/export \"\"\"\n\n part = Field(attribute='part', widget=widgets.ForeignKeyWidget(Part))\n\n part_name = Field(attribute='part__name', readonly=True)\n\n template = Field(attribute='template', widget=widgets.ForeignKeyWidget(PartParameterTemplate))\n\n template_name = Field(attribute='template__name', readonly=True)\n\n class Meta:\n model = PartParameter\n skip_unchanged = True\n report_skipped = False\n clean_model_instance = True\n\n\nclass ParameterAdmin(ImportExportModelAdmin):\n\n resource_class = ParameterResource\n\n list_display = ('part', 'template', 'data')\n\n\nclass PartCategoryParameterAdmin(admin.ModelAdmin):\n\n pass\n\n\nclass PartSellPriceBreakAdmin(admin.ModelAdmin):\n\n class Meta:\n model = PartSellPriceBreak\n\n list_display = ('part', 'quantity', 'price',)\n\n\nadmin.site.register(Part, PartAdmin)\nadmin.site.register(PartCategory, PartCategoryAdmin)\nadmin.site.register(PartRelated, PartRelatedAdmin)\nadmin.site.register(PartAttachment, PartAttachmentAdmin)\nadmin.site.register(PartStar, PartStarAdmin)\nadmin.site.register(BomItem, BomItemAdmin)\nadmin.site.register(PartParameterTemplate, ParameterTemplateAdmin)\nadmin.site.register(PartParameter, ParameterAdmin)\nadmin.site.register(PartCategoryParameterTemplate, PartCategoryParameterAdmin)\nadmin.site.register(PartTestTemplate, PartTestTemplateAdmin)\nadmin.site.register(PartSellPriceBreak, PartSellPriceBreakAdmin)\n", "path": "InvenTree/part/admin.py"}]} | 3,396 | 165 |
gh_patches_debug_8190 | rasdani/github-patches | git_diff | microsoft__presidio-782 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
typo in append_analysis_explenation_text
It seems that there is a typo in the function name.
append_analysis_expl**e**nation_text - > append_analysis_expl**a**nation_text
</issue>
<code>
[start of presidio-analyzer/presidio_analyzer/recognizer_result.py]
1 import logging
2 from typing import Dict
3
4 from presidio_analyzer import AnalysisExplanation
5
6
7 class RecognizerResult:
8 """
9 Recognizer Result represents the findings of the detected entity.
10
11 Result of a recognizer analyzing the text.
12
13 :param entity_type: the type of the entity
14 :param start: the start location of the detected entity
15 :param end: the end location of the detected entity
16 :param score: the score of the detection
17 :param analysis_explanation: contains the explanation of why this
18 entity was identified
19 """
20
21 logger = logging.getLogger("presidio-analyzer")
22
23 def __init__(
24 self,
25 entity_type: str,
26 start: int,
27 end: int,
28 score: float,
29 analysis_explanation: AnalysisExplanation = None,
30 ):
31
32 self.entity_type = entity_type
33 self.start = start
34 self.end = end
35 self.score = score
36 self.analysis_explanation = analysis_explanation
37
38 def append_analysis_explenation_text(self, text: str) -> None:
39 """Add text to the analysis explanation."""
40 if self.analysis_explanation:
41 self.analysis_explanation.append_textual_explanation_line(text)
42
43 def to_dict(self) -> Dict:
44 """
45 Serialize self to dictionary.
46
47 :return: a dictionary
48 """
49 return self.__dict__
50
51 @classmethod
52 def from_json(cls, data: Dict) -> "RecognizerResult":
53 """
54 Create RecognizerResult from json.
55
56 :param data: e.g. {
57 "start": 24,
58 "end": 32,
59 "score": 0.8,
60 "entity_type": "NAME"
61 }
62 :return: RecognizerResult
63 """
64 score = data.get("score")
65 entity_type = data.get("entity_type")
66 start = data.get("start")
67 end = data.get("end")
68 return cls(entity_type, start, end, score)
69
70 def __repr__(self) -> str:
71 """Return a string representation of the instance."""
72 return self.__str__()
73
74 def intersects(self, other: "RecognizerResult") -> int:
75 """
76 Check if self intersects with a different RecognizerResult.
77
78 :return: If intersecting, returns the number of
79 intersecting characters.
80 If not, returns 0
81 """
82 # if they do not overlap the intersection is 0
83 if self.end < other.start or other.end < self.start:
84 return 0
85
86 # otherwise the intersection is min(end) - max(start)
87 return min(self.end, other.end) - max(self.start, other.start)
88
89 def contained_in(self, other: "RecognizerResult") -> bool:
90 """
91 Check if self is contained in a different RecognizerResult.
92
93 :return: true if contained
94 """
95 return self.start >= other.start and self.end <= other.end
96
97 def contains(self, other: "RecognizerResult") -> bool:
98 """
99 Check if one result is contained or equal to another result.
100
101 :param other: another RecognizerResult
102 :return: bool
103 """
104 return self.start <= other.start and self.end >= other.end
105
106 def equal_indices(self, other: "RecognizerResult") -> bool:
107 """
108 Check if the indices are equal between two results.
109
110 :param other: another RecognizerResult
111 :return:
112 """
113 return self.start == other.start and self.end == other.end
114
115 def __gt__(self, other: "RecognizerResult") -> bool:
116 """
117 Check if one result is greater by using the results indices in the text.
118
119 :param other: another RecognizerResult
120 :return: bool
121 """
122 if self.start == other.start:
123 return self.end > other.end
124 return self.start > other.start
125
126 def __eq__(self, other: "RecognizerResult") -> bool:
127 """
128 Check two results are equal by using all class fields.
129
130 :param other: another RecognizerResult
131 :return: bool
132 """
133 equal_type = self.entity_type == other.entity_type
134 equal_score = self.score == other.score
135 return self.equal_indices(other) and equal_type and equal_score
136
137 def __hash__(self):
138 """
139 Hash the result data by using all class fields.
140
141 :return: int
142 """
143 return hash(
144 f"{str(self.start)} {str(self.end)} {str(self.score)} {self.entity_type}"
145 )
146
147 def __str__(self) -> str:
148 """Return a string representation of the instance."""
149 return (
150 f"type: {self.entity_type}, "
151 f"start: {self.start}, "
152 f"end: {self.end}, "
153 f"score: {self.score}"
154 )
155
156 def has_conflict(self, other: "RecognizerResult") -> bool:
157 """
158 Check if two recognizer results are conflicted or not.
159
160 I have a conflict if:
161 1. My indices are the same as the other and my score is lower.
162 2. If my indices are contained in another.
163
164 :param other: RecognizerResult
165 :return:
166 """
167 if self.equal_indices(other):
168 return self.score <= other.score
169 return other.contains(self)
170
[end of presidio-analyzer/presidio_analyzer/recognizer_result.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/presidio-analyzer/presidio_analyzer/recognizer_result.py b/presidio-analyzer/presidio_analyzer/recognizer_result.py
--- a/presidio-analyzer/presidio_analyzer/recognizer_result.py
+++ b/presidio-analyzer/presidio_analyzer/recognizer_result.py
@@ -35,7 +35,7 @@
self.score = score
self.analysis_explanation = analysis_explanation
- def append_analysis_explenation_text(self, text: str) -> None:
+ def append_analysis_explanation_text(self, text: str) -> None:
"""Add text to the analysis explanation."""
if self.analysis_explanation:
self.analysis_explanation.append_textual_explanation_line(text)
| {"golden_diff": "diff --git a/presidio-analyzer/presidio_analyzer/recognizer_result.py b/presidio-analyzer/presidio_analyzer/recognizer_result.py\n--- a/presidio-analyzer/presidio_analyzer/recognizer_result.py\n+++ b/presidio-analyzer/presidio_analyzer/recognizer_result.py\n@@ -35,7 +35,7 @@\n self.score = score\n self.analysis_explanation = analysis_explanation\n \n- def append_analysis_explenation_text(self, text: str) -> None:\n+ def append_analysis_explanation_text(self, text: str) -> None:\n \"\"\"Add text to the analysis explanation.\"\"\"\n if self.analysis_explanation:\n self.analysis_explanation.append_textual_explanation_line(text)\n", "issue": "typo in append_analysis_explenation_text\nIt seems that there is a typo in the function name.\r\n\r\nappend_analysis_expl**e**nation_text - > append_analysis_expl**a**nation_text\n", "before_files": [{"content": "import logging\nfrom typing import Dict\n\nfrom presidio_analyzer import AnalysisExplanation\n\n\nclass RecognizerResult:\n \"\"\"\n Recognizer Result represents the findings of the detected entity.\n\n Result of a recognizer analyzing the text.\n\n :param entity_type: the type of the entity\n :param start: the start location of the detected entity\n :param end: the end location of the detected entity\n :param score: the score of the detection\n :param analysis_explanation: contains the explanation of why this\n entity was identified\n \"\"\"\n\n logger = logging.getLogger(\"presidio-analyzer\")\n\n def __init__(\n self,\n entity_type: str,\n start: int,\n end: int,\n score: float,\n analysis_explanation: AnalysisExplanation = None,\n ):\n\n self.entity_type = entity_type\n self.start = start\n self.end = end\n self.score = score\n self.analysis_explanation = analysis_explanation\n\n def append_analysis_explenation_text(self, text: str) -> None:\n \"\"\"Add text to the analysis explanation.\"\"\"\n if self.analysis_explanation:\n self.analysis_explanation.append_textual_explanation_line(text)\n\n def to_dict(self) -> Dict:\n \"\"\"\n Serialize self to dictionary.\n\n :return: a dictionary\n \"\"\"\n return self.__dict__\n\n @classmethod\n def from_json(cls, data: Dict) -> \"RecognizerResult\":\n \"\"\"\n Create RecognizerResult from json.\n\n :param data: e.g. {\n \"start\": 24,\n \"end\": 32,\n \"score\": 0.8,\n \"entity_type\": \"NAME\"\n }\n :return: RecognizerResult\n \"\"\"\n score = data.get(\"score\")\n entity_type = data.get(\"entity_type\")\n start = data.get(\"start\")\n end = data.get(\"end\")\n return cls(entity_type, start, end, score)\n\n def __repr__(self) -> str:\n \"\"\"Return a string representation of the instance.\"\"\"\n return self.__str__()\n\n def intersects(self, other: \"RecognizerResult\") -> int:\n \"\"\"\n Check if self intersects with a different RecognizerResult.\n\n :return: If intersecting, returns the number of\n intersecting characters.\n If not, returns 0\n \"\"\"\n # if they do not overlap the intersection is 0\n if self.end < other.start or other.end < self.start:\n return 0\n\n # otherwise the intersection is min(end) - max(start)\n return min(self.end, other.end) - max(self.start, other.start)\n\n def contained_in(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if self is contained in a different RecognizerResult.\n\n :return: true if contained\n \"\"\"\n return self.start >= other.start and self.end <= other.end\n\n def contains(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if one result is contained or equal to another result.\n\n :param other: another RecognizerResult\n :return: bool\n \"\"\"\n return self.start <= other.start and self.end >= other.end\n\n def equal_indices(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if the indices are equal between two results.\n\n :param other: another RecognizerResult\n :return:\n \"\"\"\n return self.start == other.start and self.end == other.end\n\n def __gt__(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if one result is greater by using the results indices in the text.\n\n :param other: another RecognizerResult\n :return: bool\n \"\"\"\n if self.start == other.start:\n return self.end > other.end\n return self.start > other.start\n\n def __eq__(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check two results are equal by using all class fields.\n\n :param other: another RecognizerResult\n :return: bool\n \"\"\"\n equal_type = self.entity_type == other.entity_type\n equal_score = self.score == other.score\n return self.equal_indices(other) and equal_type and equal_score\n\n def __hash__(self):\n \"\"\"\n Hash the result data by using all class fields.\n\n :return: int\n \"\"\"\n return hash(\n f\"{str(self.start)} {str(self.end)} {str(self.score)} {self.entity_type}\"\n )\n\n def __str__(self) -> str:\n \"\"\"Return a string representation of the instance.\"\"\"\n return (\n f\"type: {self.entity_type}, \"\n f\"start: {self.start}, \"\n f\"end: {self.end}, \"\n f\"score: {self.score}\"\n )\n\n def has_conflict(self, other: \"RecognizerResult\") -> bool:\n \"\"\"\n Check if two recognizer results are conflicted or not.\n\n I have a conflict if:\n 1. My indices are the same as the other and my score is lower.\n 2. If my indices are contained in another.\n\n :param other: RecognizerResult\n :return:\n \"\"\"\n if self.equal_indices(other):\n return self.score <= other.score\n return other.contains(self)\n", "path": "presidio-analyzer/presidio_analyzer/recognizer_result.py"}]} | 2,147 | 172 |
gh_patches_debug_12255 | rasdani/github-patches | git_diff | pypa__pip-5921 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add pre-released package upgrade hints
Before this commit when querying for an installed package using pip if
the package that was returned contained a pre-released version of the
package it was not intuitive that the pre-released version of the
package could not be directly upgraded to without the `--pre` flag.
This commit shows additional information on how to upgrade to
pre-released versions of a package if one is found in the search.
Fixes #5169.
<!---
Thank you for your soon to be pull request. Before you submit this, please
double check to make sure that you've added a news file fragment. In pip we
generate our NEWS.rst from multiple news fragment files, and all pull requests
require either a news file fragment or a marker to indicate they don't require
one.
To read more about adding a news file fragment for your PR, please check out
our documentation at: https://pip.pypa.io/en/latest/development/#adding-a-news-entry
-->
Display pre-release version hint in package version output of "pip search" (and others)
* Pip version: pip 9.0.3 from c:\python36\lib\site-packages (python 3.6)
* Python version: Python 3.6.2
* Operating system: Windows 10
### Description:
In #5168 I learned that version numbers like "2018.04.04a" are interpreted as pre-release versions. Therefore I could not install them without adding the parameter `--pre`.
However, this is _implicit_ knowledge the user must have about interpretations of version numbers and related pip behavior in order to understand the behavior of pip. Otherwise, the user is irritated, just like I was until a couple of minutes ago.
Current situation where 2018.04.04a is not installable when `--pre` is not given:
```
C:\Users\karl.voit>pip search filetags
filetags (2018.04.04a) - Management of simple tags within file names
INSTALLED: 2018.3.19
LATEST: 2018.04.04a
C:\Users\karl.voit>
```
Shouldn't there at least be a hint such as following suggestion?
```
C:\Users\karl.voit>pip search filetags
filetags (2018.04.04a) - Management of simple tags within file names
INSTALLED: 2018.3.19
LATEST: 2018.04.04a (pre-release; install with "pip install --pre")
C:\Users\karl.voit>
```
Maybe this applies for multiple stdout messages.
### What I've run:
See #5168 to give you some background.
</issue>
<code>
[start of src/pip/_internal/commands/search.py]
1 from __future__ import absolute_import
2
3 import logging
4 import sys
5 import textwrap
6 from collections import OrderedDict
7
8 from pip._vendor import pkg_resources
9 from pip._vendor.packaging.version import parse as parse_version
10 # NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is
11 # why we ignore the type on this import
12 from pip._vendor.six.moves import xmlrpc_client # type: ignore
13
14 from pip._internal.cli.base_command import Command
15 from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
16 from pip._internal.download import PipXmlrpcTransport
17 from pip._internal.exceptions import CommandError
18 from pip._internal.models.index import PyPI
19 from pip._internal.utils.compat import get_terminal_size
20 from pip._internal.utils.logging import indent_log
21
22 logger = logging.getLogger(__name__)
23
24
25 class SearchCommand(Command):
26 """Search for PyPI packages whose name or summary contains <query>."""
27 name = 'search'
28 usage = """
29 %prog [options] <query>"""
30 summary = 'Search PyPI for packages.'
31 ignore_require_venv = True
32
33 def __init__(self, *args, **kw):
34 super(SearchCommand, self).__init__(*args, **kw)
35 self.cmd_opts.add_option(
36 '-i', '--index',
37 dest='index',
38 metavar='URL',
39 default=PyPI.pypi_url,
40 help='Base URL of Python Package Index (default %default)')
41
42 self.parser.insert_option_group(0, self.cmd_opts)
43
44 def run(self, options, args):
45 if not args:
46 raise CommandError('Missing required argument (search query).')
47 query = args
48 pypi_hits = self.search(query, options)
49 hits = transform_hits(pypi_hits)
50
51 terminal_width = None
52 if sys.stdout.isatty():
53 terminal_width = get_terminal_size()[0]
54
55 print_results(hits, terminal_width=terminal_width)
56 if pypi_hits:
57 return SUCCESS
58 return NO_MATCHES_FOUND
59
60 def search(self, query, options):
61 index_url = options.index
62 with self._build_session(options) as session:
63 transport = PipXmlrpcTransport(index_url, session)
64 pypi = xmlrpc_client.ServerProxy(index_url, transport)
65 hits = pypi.search({'name': query, 'summary': query}, 'or')
66 return hits
67
68
69 def transform_hits(hits):
70 """
71 The list from pypi is really a list of versions. We want a list of
72 packages with the list of versions stored inline. This converts the
73 list from pypi into one we can use.
74 """
75 packages = OrderedDict()
76 for hit in hits:
77 name = hit['name']
78 summary = hit['summary']
79 version = hit['version']
80
81 if name not in packages.keys():
82 packages[name] = {
83 'name': name,
84 'summary': summary,
85 'versions': [version],
86 }
87 else:
88 packages[name]['versions'].append(version)
89
90 # if this is the highest version, replace summary and score
91 if version == highest_version(packages[name]['versions']):
92 packages[name]['summary'] = summary
93
94 return list(packages.values())
95
96
97 def print_results(hits, name_column_width=None, terminal_width=None):
98 if not hits:
99 return
100 if name_column_width is None:
101 name_column_width = max([
102 len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))
103 for hit in hits
104 ]) + 4
105
106 installed_packages = [p.project_name for p in pkg_resources.working_set]
107 for hit in hits:
108 name = hit['name']
109 summary = hit['summary'] or ''
110 latest = highest_version(hit.get('versions', ['-']))
111 if terminal_width is not None:
112 target_width = terminal_width - name_column_width - 5
113 if target_width > 10:
114 # wrap and indent summary to fit terminal
115 summary = textwrap.wrap(summary, target_width)
116 summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
117
118 line = '%-*s - %s' % (name_column_width,
119 '%s (%s)' % (name, latest), summary)
120 try:
121 logger.info(line)
122 if name in installed_packages:
123 dist = pkg_resources.get_distribution(name)
124 with indent_log():
125 if dist.version == latest:
126 logger.info('INSTALLED: %s (latest)', dist.version)
127 else:
128 logger.info('INSTALLED: %s', dist.version)
129 logger.info('LATEST: %s', latest)
130 except UnicodeEncodeError:
131 pass
132
133
134 def highest_version(versions):
135 return max(versions, key=parse_version)
136
[end of src/pip/_internal/commands/search.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/pip/_internal/commands/search.py b/src/pip/_internal/commands/search.py
--- a/src/pip/_internal/commands/search.py
+++ b/src/pip/_internal/commands/search.py
@@ -126,7 +126,11 @@
logger.info('INSTALLED: %s (latest)', dist.version)
else:
logger.info('INSTALLED: %s', dist.version)
- logger.info('LATEST: %s', latest)
+ if parse_version(latest).pre:
+ logger.info('LATEST: %s (pre-release; install'
+ ' with "pip install --pre")', latest)
+ else:
+ logger.info('LATEST: %s', latest)
except UnicodeEncodeError:
pass
| {"golden_diff": "diff --git a/src/pip/_internal/commands/search.py b/src/pip/_internal/commands/search.py\n--- a/src/pip/_internal/commands/search.py\n+++ b/src/pip/_internal/commands/search.py\n@@ -126,7 +126,11 @@\n logger.info('INSTALLED: %s (latest)', dist.version)\n else:\n logger.info('INSTALLED: %s', dist.version)\n- logger.info('LATEST: %s', latest)\n+ if parse_version(latest).pre:\n+ logger.info('LATEST: %s (pre-release; install'\n+ ' with \"pip install --pre\")', latest)\n+ else:\n+ logger.info('LATEST: %s', latest)\n except UnicodeEncodeError:\n pass\n", "issue": "Add pre-released package upgrade hints\nBefore this commit when querying for an installed package using pip if\r\nthe package that was returned contained a pre-released version of the\r\npackage it was not intuitive that the pre-released version of the\r\npackage could not be directly upgraded to without the `--pre` flag.\r\n\r\nThis commit shows additional information on how to upgrade to\r\npre-released versions of a package if one is found in the search.\r\n\r\nFixes #5169.\r\n\r\n<!---\r\nThank you for your soon to be pull request. Before you submit this, please\r\ndouble check to make sure that you've added a news file fragment. In pip we\r\ngenerate our NEWS.rst from multiple news fragment files, and all pull requests\r\nrequire either a news file fragment or a marker to indicate they don't require\r\none.\r\n\r\nTo read more about adding a news file fragment for your PR, please check out\r\nour documentation at: https://pip.pypa.io/en/latest/development/#adding-a-news-entry\r\n-->\r\n\nDisplay pre-release version hint in package version output of \"pip search\" (and others)\n* Pip version: pip 9.0.3 from c:\\python36\\lib\\site-packages (python 3.6)\r\n* Python version: Python 3.6.2\r\n* Operating system: Windows 10\r\n\r\n### Description:\r\n\r\nIn #5168 I learned that version numbers like \"2018.04.04a\" are interpreted as pre-release versions. Therefore I could not install them without adding the parameter `--pre`.\r\n\r\nHowever, this is _implicit_ knowledge the user must have about interpretations of version numbers and related pip behavior in order to understand the behavior of pip. Otherwise, the user is irritated, just like I was until a couple of minutes ago.\r\n\r\nCurrent situation where 2018.04.04a is not installable when `--pre` is not given:\r\n\r\n```\r\nC:\\Users\\karl.voit>pip search filetags\r\nfiletags (2018.04.04a) - Management of simple tags within file names\r\n INSTALLED: 2018.3.19\r\n LATEST: 2018.04.04a\r\n\r\nC:\\Users\\karl.voit>\r\n```\r\n\r\nShouldn't there at least be a hint such as following suggestion?\r\n\r\n```\r\nC:\\Users\\karl.voit>pip search filetags\r\nfiletags (2018.04.04a) - Management of simple tags within file names\r\n INSTALLED: 2018.3.19\r\n LATEST: 2018.04.04a (pre-release; install with \"pip install --pre\")\r\n\r\nC:\\Users\\karl.voit>\r\n```\r\n\r\nMaybe this applies for multiple stdout messages.\r\n\r\n### What I've run:\r\n\r\nSee #5168 to give you some background.\n", "before_files": [{"content": "from __future__ import absolute_import\n\nimport logging\nimport sys\nimport textwrap\nfrom collections import OrderedDict\n\nfrom pip._vendor import pkg_resources\nfrom pip._vendor.packaging.version import parse as parse_version\n# NOTE: XMLRPC Client is not annotated in typeshed as on 2017-07-17, which is\n# why we ignore the type on this import\nfrom pip._vendor.six.moves import xmlrpc_client # type: ignore\n\nfrom pip._internal.cli.base_command import Command\nfrom pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS\nfrom pip._internal.download import PipXmlrpcTransport\nfrom pip._internal.exceptions import CommandError\nfrom pip._internal.models.index import PyPI\nfrom pip._internal.utils.compat import get_terminal_size\nfrom pip._internal.utils.logging import indent_log\n\nlogger = logging.getLogger(__name__)\n\n\nclass SearchCommand(Command):\n \"\"\"Search for PyPI packages whose name or summary contains <query>.\"\"\"\n name = 'search'\n usage = \"\"\"\n %prog [options] <query>\"\"\"\n summary = 'Search PyPI for packages.'\n ignore_require_venv = True\n\n def __init__(self, *args, **kw):\n super(SearchCommand, self).__init__(*args, **kw)\n self.cmd_opts.add_option(\n '-i', '--index',\n dest='index',\n metavar='URL',\n default=PyPI.pypi_url,\n help='Base URL of Python Package Index (default %default)')\n\n self.parser.insert_option_group(0, self.cmd_opts)\n\n def run(self, options, args):\n if not args:\n raise CommandError('Missing required argument (search query).')\n query = args\n pypi_hits = self.search(query, options)\n hits = transform_hits(pypi_hits)\n\n terminal_width = None\n if sys.stdout.isatty():\n terminal_width = get_terminal_size()[0]\n\n print_results(hits, terminal_width=terminal_width)\n if pypi_hits:\n return SUCCESS\n return NO_MATCHES_FOUND\n\n def search(self, query, options):\n index_url = options.index\n with self._build_session(options) as session:\n transport = PipXmlrpcTransport(index_url, session)\n pypi = xmlrpc_client.ServerProxy(index_url, transport)\n hits = pypi.search({'name': query, 'summary': query}, 'or')\n return hits\n\n\ndef transform_hits(hits):\n \"\"\"\n The list from pypi is really a list of versions. We want a list of\n packages with the list of versions stored inline. This converts the\n list from pypi into one we can use.\n \"\"\"\n packages = OrderedDict()\n for hit in hits:\n name = hit['name']\n summary = hit['summary']\n version = hit['version']\n\n if name not in packages.keys():\n packages[name] = {\n 'name': name,\n 'summary': summary,\n 'versions': [version],\n }\n else:\n packages[name]['versions'].append(version)\n\n # if this is the highest version, replace summary and score\n if version == highest_version(packages[name]['versions']):\n packages[name]['summary'] = summary\n\n return list(packages.values())\n\n\ndef print_results(hits, name_column_width=None, terminal_width=None):\n if not hits:\n return\n if name_column_width is None:\n name_column_width = max([\n len(hit['name']) + len(highest_version(hit.get('versions', ['-'])))\n for hit in hits\n ]) + 4\n\n installed_packages = [p.project_name for p in pkg_resources.working_set]\n for hit in hits:\n name = hit['name']\n summary = hit['summary'] or ''\n latest = highest_version(hit.get('versions', ['-']))\n if terminal_width is not None:\n target_width = terminal_width - name_column_width - 5\n if target_width > 10:\n # wrap and indent summary to fit terminal\n summary = textwrap.wrap(summary, target_width)\n summary = ('\\n' + ' ' * (name_column_width + 3)).join(summary)\n\n line = '%-*s - %s' % (name_column_width,\n '%s (%s)' % (name, latest), summary)\n try:\n logger.info(line)\n if name in installed_packages:\n dist = pkg_resources.get_distribution(name)\n with indent_log():\n if dist.version == latest:\n logger.info('INSTALLED: %s (latest)', dist.version)\n else:\n logger.info('INSTALLED: %s', dist.version)\n logger.info('LATEST: %s', latest)\n except UnicodeEncodeError:\n pass\n\n\ndef highest_version(versions):\n return max(versions, key=parse_version)\n", "path": "src/pip/_internal/commands/search.py"}]} | 2,506 | 175 |
gh_patches_debug_7038 | rasdani/github-patches | git_diff | Textualize__textual-1600 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Populating a `ListView` via constructor vs via inherit and `compose`
There seems to be a subtle difference in the working of a `ListView` if you create one by passing the `ListItem`s to it, vs if you create a custom `ListView` by inheriting from it and using `compose` to populate it. Take the following example code, which places both approaches side-by-side:
```python
from textual.app import App, ComposeResult
from textual.containers import Horizontal
from textual.widgets import Header, Footer, ListView, ListItem, Label
class CustomListView( ListView ):
def __init__( self, choices: list[ str ] ) -> None:
super().__init__()
self._choices = choices
def compose( self ) -> ComposeResult:
"""Compose the child widgets."""
for choice in self._choices:
yield ListItem( Label( choice ) )
class ListViewMakerApp( App[ None ] ):
CSS = """
ListView {
width: 1fr;
height: 1fr;
border: round red;
}
"""
OPTIONS = [ f"This is the nifty test option {n}" for n in range( 20 ) ]
def compose( self ) -> ComposeResult:
yield Header()
yield Horizontal(
ListView(
*[ ListItem( Label( option ) ) for option in self.OPTIONS ]
),
CustomListView( self.OPTIONS )
)
yield Footer()
if __name__ == "__main__":
ListViewMakerApp().run()
```
I feel the `CustomListView` would not be an unusual approach for people to take; perhaps wanting to make their own commonly-used selection list of simple values (or even very complex child values that they want easily handled). Side-by-side the `ListView`s look the same to start with:
<img width="922" alt="Screenshot 2023-01-17 at 11 10 47" src="https://user-images.githubusercontent.com/28237/212884262-8d5252a8-3318-4f72-a3de-ae5f73b5a41c.png">
Note that the non-inherited `ListView` is to the left and that, even though it doesn't have focus, the first item is highlighted. If you <kbd>tab</kbd> into it you can cursor around, etc, just fine. On the other hand notice that the right `ListView` (which is a `CustomListView`) has no obvious highlighted item and if you tab into it nothing gets highlighted.
Further to this, if you (via keyboard) focus the inherited (right hand) `ListView` and then attempt to cursor down, the following exception will be raised:
```
╭─────────────────────────────── Traceback (most recent call last) ────────────────────────────────╮
│ /Users/davep/develop/python/textual-sandbox/.venv/lib/python3.10/site-packages/textual/widgets/_ │
│ list_view.py:127 in action_cursor_down │
│ │
│ 124 │ │ self.emit_no_wait(self.Selected(self, selected_child)) │
│ 125 │ │
│ 126 │ def action_cursor_down(self) -> None: │
│ ❱ 127 │ │ self.index += 1 │
│ 128 │ │
│ 129 │ def action_cursor_up(self) -> None: │
│ 130 │ │ self.index -= 1 │
│ │
│ ╭──────────────────────────── locals ─────────────────────────────╮ │
│ │ self = CustomListView(pseudo_classes={'focus', 'focus-within'}) │ │
│ ╰─────────────────────────────────────────────────────────────────╯ │
╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
TypeError: unsupported operand type(s) for +=: 'NoneType' and 'int'
```
</issue>
<code>
[start of src/textual/widgets/_list_view.py]
1 from __future__ import annotations
2
3 from textual import events
4 from textual.await_remove import AwaitRemove
5 from textual.binding import Binding
6 from textual.containers import Vertical
7 from textual.geometry import clamp
8 from textual.message import Message
9 from textual.reactive import reactive
10 from textual.widget import AwaitMount
11 from textual.widgets._list_item import ListItem
12
13
14 class ListView(Vertical, can_focus=True, can_focus_children=False):
15 """Displays a vertical list of `ListItem`s which can be highlighted
16 and selected using the mouse or keyboard.
17
18 Attributes:
19 index: The index in the list that's currently highlighted.
20 """
21
22 BINDINGS = [
23 Binding("enter", "select_cursor", "Select", show=False),
24 Binding("up", "cursor_up", "Cursor Up", show=False),
25 Binding("down", "cursor_down", "Cursor Down", show=False),
26 ]
27
28 index = reactive(0, always_update=True)
29
30 def __init__(
31 self,
32 *children: ListItem,
33 initial_index: int | None = 0,
34 name: str | None = None,
35 id: str | None = None,
36 classes: str | None = None,
37 ) -> None:
38 """
39 Args:
40 *children: The ListItems to display in the list.
41 initial_index: The index that should be highlighted when the list is first mounted.
42 name: The name of the widget.
43 id: The unique ID of the widget used in CSS/query selection.
44 classes: The CSS classes of the widget.
45 """
46 super().__init__(*children, name=name, id=id, classes=classes)
47 self.index = initial_index
48
49 @property
50 def highlighted_child(self) -> ListItem | None:
51 """ListItem | None: The currently highlighted ListItem,
52 or None if nothing is highlighted.
53 """
54 if self.index is None:
55 return None
56 elif 0 <= self.index < len(self.children):
57 return self.children[self.index]
58
59 def validate_index(self, index: int | None) -> int | None:
60 """Clamp the index to the valid range, or set to None if there's nothing to highlight."""
61 if not self.children or index is None:
62 return None
63 return self._clamp_index(index)
64
65 def _clamp_index(self, index: int) -> int:
66 """Clamp the index to a valid value given the current list of children"""
67 last_index = max(len(self.children) - 1, 0)
68 return clamp(index, 0, last_index)
69
70 def _is_valid_index(self, index: int | None) -> bool:
71 """Return True if the current index is valid given the current list of children"""
72 if index is None:
73 return False
74 return 0 <= index < len(self.children)
75
76 def watch_index(self, old_index: int, new_index: int) -> None:
77 """Updates the highlighting when the index changes."""
78 if self._is_valid_index(old_index):
79 old_child = self.children[old_index]
80 old_child.highlighted = False
81 if self._is_valid_index(new_index):
82 new_child = self.children[new_index]
83 new_child.highlighted = True
84 else:
85 new_child = None
86
87 self._scroll_highlighted_region()
88 self.emit_no_wait(self.Highlighted(self, new_child))
89
90 def append(self, item: ListItem) -> AwaitMount:
91 """Append a new ListItem to the end of the ListView.
92
93 Args:
94 item (ListItem): The ListItem to append.
95
96 Returns:
97 AwaitMount: An awaitable that yields control to the event loop
98 until the DOM has been updated with the new child item.
99 """
100 await_mount = self.mount(item)
101 if len(self) == 1:
102 self.index = 0
103 return await_mount
104
105 def clear(self) -> AwaitRemove:
106 """Clear all items from the ListView.
107
108 Returns:
109 AwaitRemove: An awaitable that yields control to the event loop until
110 the DOM has been updated to reflect all children being removed.
111 """
112 await_remove = self.query("ListView > ListItem").remove()
113 self.index = None
114 return await_remove
115
116 def action_select_cursor(self) -> None:
117 selected_child = self.highlighted_child
118 self.emit_no_wait(self.Selected(self, selected_child))
119
120 def action_cursor_down(self) -> None:
121 self.index += 1
122
123 def action_cursor_up(self) -> None:
124 self.index -= 1
125
126 def on_list_item__child_clicked(self, event: ListItem._ChildClicked) -> None:
127 self.focus()
128 self.index = self.children.index(event.sender)
129 self.emit_no_wait(self.Selected(self, event.sender))
130
131 def _scroll_highlighted_region(self) -> None:
132 """Used to keep the highlighted index within vision"""
133 if self.highlighted_child is not None:
134 self.scroll_to_widget(self.highlighted_child, animate=False)
135
136 def __len__(self):
137 return len(self.children)
138
139 class Highlighted(Message, bubble=True):
140 """Emitted when the highlighted item changes. Highlighted item is controlled using up/down keys.
141
142 Attributes:
143 item (ListItem | None): The highlighted item, if there is one highlighted.
144 """
145
146 def __init__(self, sender: ListView, item: ListItem | None) -> None:
147 super().__init__(sender)
148 self.item = item
149
150 class Selected(Message, bubble=True):
151 """Emitted when a list item is selected, e.g. when you press the enter key on it
152
153 Attributes:
154 item (ListItem): The selected item.
155 """
156
157 def __init__(self, sender: ListView, item: ListItem) -> None:
158 super().__init__(sender)
159 self.item = item
160
[end of src/textual/widgets/_list_view.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/textual/widgets/_list_view.py b/src/textual/widgets/_list_view.py
--- a/src/textual/widgets/_list_view.py
+++ b/src/textual/widgets/_list_view.py
@@ -50,7 +50,11 @@
classes: The CSS classes of the widget.
"""
super().__init__(*children, name=name, id=id, classes=classes)
- self.index = initial_index
+ self._index = initial_index
+
+ def on_mount(self) -> None:
+ """Ensure the ListView is fully-settled after mounting."""
+ self.index = self._index
@property
def highlighted_child(self) -> ListItem | None:
| {"golden_diff": "diff --git a/src/textual/widgets/_list_view.py b/src/textual/widgets/_list_view.py\n--- a/src/textual/widgets/_list_view.py\n+++ b/src/textual/widgets/_list_view.py\n@@ -50,7 +50,11 @@\n classes: The CSS classes of the widget.\n \"\"\"\n super().__init__(*children, name=name, id=id, classes=classes)\n- self.index = initial_index\n+ self._index = initial_index\n+\n+ def on_mount(self) -> None:\n+ \"\"\"Ensure the ListView is fully-settled after mounting.\"\"\"\n+ self.index = self._index\n \n @property\n def highlighted_child(self) -> ListItem | None:\n", "issue": "Populating a `ListView` via constructor vs via inherit and `compose`\nThere seems to be a subtle difference in the working of a `ListView` if you create one by passing the `ListItem`s to it, vs if you create a custom `ListView` by inheriting from it and using `compose` to populate it. Take the following example code, which places both approaches side-by-side:\r\n\r\n```python\r\nfrom textual.app import App, ComposeResult\r\nfrom textual.containers import Horizontal\r\nfrom textual.widgets import Header, Footer, ListView, ListItem, Label\r\n\r\nclass CustomListView( ListView ):\r\n\r\n def __init__( self, choices: list[ str ] ) -> None:\r\n super().__init__()\r\n self._choices = choices\r\n\r\n def compose( self ) -> ComposeResult:\r\n \"\"\"Compose the child widgets.\"\"\"\r\n for choice in self._choices:\r\n yield ListItem( Label( choice ) )\r\n\r\nclass ListViewMakerApp( App[ None ] ):\r\n\r\n CSS = \"\"\"\r\n ListView {\r\n width: 1fr;\r\n height: 1fr;\r\n border: round red;\r\n }\r\n \"\"\"\r\n\r\n OPTIONS = [ f\"This is the nifty test option {n}\" for n in range( 20 ) ]\r\n\r\n def compose( self ) -> ComposeResult:\r\n yield Header()\r\n yield Horizontal(\r\n ListView(\r\n *[ ListItem( Label( option ) ) for option in self.OPTIONS ]\r\n ),\r\n CustomListView( self.OPTIONS )\r\n )\r\n yield Footer()\r\n\r\nif __name__ == \"__main__\":\r\n ListViewMakerApp().run()\r\n```\r\n\r\nI feel the `CustomListView` would not be an unusual approach for people to take; perhaps wanting to make their own commonly-used selection list of simple values (or even very complex child values that they want easily handled). Side-by-side the `ListView`s look the same to start with:\r\n\r\n<img width=\"922\" alt=\"Screenshot 2023-01-17 at 11 10 47\" src=\"https://user-images.githubusercontent.com/28237/212884262-8d5252a8-3318-4f72-a3de-ae5f73b5a41c.png\">\r\n\r\nNote that the non-inherited `ListView` is to the left and that, even though it doesn't have focus, the first item is highlighted. If you <kbd>tab</kbd> into it you can cursor around, etc, just fine. On the other hand notice that the right `ListView` (which is a `CustomListView`) has no obvious highlighted item and if you tab into it nothing gets highlighted.\r\n\r\nFurther to this, if you (via keyboard) focus the inherited (right hand) `ListView` and then attempt to cursor down, the following exception will be raised:\r\n\r\n```\r\n\u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 Traceback (most recent call last) \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e\r\n\u2502 /Users/davep/develop/python/textual-sandbox/.venv/lib/python3.10/site-packages/textual/widgets/_ \u2502\r\n\u2502 list_view.py:127 in action_cursor_down \u2502\r\n\u2502 \u2502\r\n\u2502 124 \u2502 \u2502 self.emit_no_wait(self.Selected(self, selected_child)) \u2502\r\n\u2502 125 \u2502 \u2502\r\n\u2502 126 \u2502 def action_cursor_down(self) -> None: \u2502\r\n\u2502 \u2771 127 \u2502 \u2502 self.index += 1 \u2502\r\n\u2502 128 \u2502 \u2502\r\n\u2502 129 \u2502 def action_cursor_up(self) -> None: \u2502\r\n\u2502 130 \u2502 \u2502 self.index -= 1 \u2502\r\n\u2502 \u2502\r\n\u2502 \u256d\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 locals \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256e \u2502\r\n\u2502 \u2502 self = CustomListView(pseudo_classes={'focus', 'focus-within'}) \u2502 \u2502\r\n\u2502 \u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f \u2502\r\n\u2570\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u256f\r\nTypeError: unsupported operand type(s) for +=: 'NoneType' and 'int'\r\n```\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom textual import events\nfrom textual.await_remove import AwaitRemove\nfrom textual.binding import Binding\nfrom textual.containers import Vertical\nfrom textual.geometry import clamp\nfrom textual.message import Message\nfrom textual.reactive import reactive\nfrom textual.widget import AwaitMount\nfrom textual.widgets._list_item import ListItem\n\n\nclass ListView(Vertical, can_focus=True, can_focus_children=False):\n \"\"\"Displays a vertical list of `ListItem`s which can be highlighted\n and selected using the mouse or keyboard.\n\n Attributes:\n index: The index in the list that's currently highlighted.\n \"\"\"\n\n BINDINGS = [\n Binding(\"enter\", \"select_cursor\", \"Select\", show=False),\n Binding(\"up\", \"cursor_up\", \"Cursor Up\", show=False),\n Binding(\"down\", \"cursor_down\", \"Cursor Down\", show=False),\n ]\n\n index = reactive(0, always_update=True)\n\n def __init__(\n self,\n *children: ListItem,\n initial_index: int | None = 0,\n name: str | None = None,\n id: str | None = None,\n classes: str | None = None,\n ) -> None:\n \"\"\"\n Args:\n *children: The ListItems to display in the list.\n initial_index: The index that should be highlighted when the list is first mounted.\n name: The name of the widget.\n id: The unique ID of the widget used in CSS/query selection.\n classes: The CSS classes of the widget.\n \"\"\"\n super().__init__(*children, name=name, id=id, classes=classes)\n self.index = initial_index\n\n @property\n def highlighted_child(self) -> ListItem | None:\n \"\"\"ListItem | None: The currently highlighted ListItem,\n or None if nothing is highlighted.\n \"\"\"\n if self.index is None:\n return None\n elif 0 <= self.index < len(self.children):\n return self.children[self.index]\n\n def validate_index(self, index: int | None) -> int | None:\n \"\"\"Clamp the index to the valid range, or set to None if there's nothing to highlight.\"\"\"\n if not self.children or index is None:\n return None\n return self._clamp_index(index)\n\n def _clamp_index(self, index: int) -> int:\n \"\"\"Clamp the index to a valid value given the current list of children\"\"\"\n last_index = max(len(self.children) - 1, 0)\n return clamp(index, 0, last_index)\n\n def _is_valid_index(self, index: int | None) -> bool:\n \"\"\"Return True if the current index is valid given the current list of children\"\"\"\n if index is None:\n return False\n return 0 <= index < len(self.children)\n\n def watch_index(self, old_index: int, new_index: int) -> None:\n \"\"\"Updates the highlighting when the index changes.\"\"\"\n if self._is_valid_index(old_index):\n old_child = self.children[old_index]\n old_child.highlighted = False\n if self._is_valid_index(new_index):\n new_child = self.children[new_index]\n new_child.highlighted = True\n else:\n new_child = None\n\n self._scroll_highlighted_region()\n self.emit_no_wait(self.Highlighted(self, new_child))\n\n def append(self, item: ListItem) -> AwaitMount:\n \"\"\"Append a new ListItem to the end of the ListView.\n\n Args:\n item (ListItem): The ListItem to append.\n\n Returns:\n AwaitMount: An awaitable that yields control to the event loop\n until the DOM has been updated with the new child item.\n \"\"\"\n await_mount = self.mount(item)\n if len(self) == 1:\n self.index = 0\n return await_mount\n\n def clear(self) -> AwaitRemove:\n \"\"\"Clear all items from the ListView.\n\n Returns:\n AwaitRemove: An awaitable that yields control to the event loop until\n the DOM has been updated to reflect all children being removed.\n \"\"\"\n await_remove = self.query(\"ListView > ListItem\").remove()\n self.index = None\n return await_remove\n\n def action_select_cursor(self) -> None:\n selected_child = self.highlighted_child\n self.emit_no_wait(self.Selected(self, selected_child))\n\n def action_cursor_down(self) -> None:\n self.index += 1\n\n def action_cursor_up(self) -> None:\n self.index -= 1\n\n def on_list_item__child_clicked(self, event: ListItem._ChildClicked) -> None:\n self.focus()\n self.index = self.children.index(event.sender)\n self.emit_no_wait(self.Selected(self, event.sender))\n\n def _scroll_highlighted_region(self) -> None:\n \"\"\"Used to keep the highlighted index within vision\"\"\"\n if self.highlighted_child is not None:\n self.scroll_to_widget(self.highlighted_child, animate=False)\n\n def __len__(self):\n return len(self.children)\n\n class Highlighted(Message, bubble=True):\n \"\"\"Emitted when the highlighted item changes. Highlighted item is controlled using up/down keys.\n\n Attributes:\n item (ListItem | None): The highlighted item, if there is one highlighted.\n \"\"\"\n\n def __init__(self, sender: ListView, item: ListItem | None) -> None:\n super().__init__(sender)\n self.item = item\n\n class Selected(Message, bubble=True):\n \"\"\"Emitted when a list item is selected, e.g. when you press the enter key on it\n\n Attributes:\n item (ListItem): The selected item.\n \"\"\"\n\n def __init__(self, sender: ListView, item: ListItem) -> None:\n super().__init__(sender)\n self.item = item\n", "path": "src/textual/widgets/_list_view.py"}]} | 3,041 | 152 |
gh_patches_debug_44259 | rasdani/github-patches | git_diff | mathesar-foundation__mathesar-1423 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Incorrect error is thrown when id field is not provided for a column object during a batch update
## Description
The following `Patch` request to `/tables/{id}`
```
{
"columns": [
{
"name": "id",
"type": "INTEGER"
},
{
"name": "Center",
"type": "TEXT"
}
]
}
```
fails with an error `{code: 4999, message: "local variable 'name' referenced before assignment", field: null, detail: null}`.
This is happening due to https://github.com/centerofci/mathesar/blob/a8bad8268c8634bc766fa2e4fde271f4ec1a165e/db/columns/operations/alter.py#L215
always expecting a name which is fetched from the `column id` that is missing from the patch request.
## Expected behavior
`id` field of the column object should be required when batch updating using `/tables/{id}`.
## To Reproduce
Using API
- Create a table using `/api/db/v0/tables/` API
- Send a patch request to `/api/db/v0/tables/{id}`
Using frontend client
- Create a table by importing a csv file
- Error will be thrown when clicking on `Finish import`
</issue>
<code>
[start of mathesar/api/serializers/tables.py]
1 from django.urls import reverse
2 from psycopg2.errors import DuplicateTable
3 from rest_framework import serializers, status
4 from sqlalchemy.exc import ProgrammingError
5
6 from db.types.base import get_db_type_enum_from_id
7
8 from mathesar.api.exceptions.validation_exceptions.exceptions import (
9 ColumnSizeMismatchAPIException, DistinctColumnRequiredAPIException,
10 MultipleDataFileAPIException, UnknownDatabaseTypeIdentifier
11 )
12 from mathesar.api.exceptions.database_exceptions.exceptions import DuplicateTableAPIException
13 from mathesar.api.exceptions.database_exceptions.base_exceptions import ProgrammingAPIException
14 from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin
15 from mathesar.api.serializers.columns import SimpleColumnSerializer
16 from mathesar.models import Table, DataFile
17 from mathesar.utils.tables import gen_table_name, create_table_from_datafile, create_empty_table
18
19
20 class TableSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer):
21 columns = SimpleColumnSerializer(many=True, required=False)
22 records_url = serializers.SerializerMethodField()
23 constraints_url = serializers.SerializerMethodField()
24 columns_url = serializers.SerializerMethodField()
25 type_suggestions_url = serializers.SerializerMethodField()
26 previews_url = serializers.SerializerMethodField()
27 name = serializers.CharField(required=False, allow_blank=True, default='')
28 data_files = serializers.PrimaryKeyRelatedField(
29 required=False, many=True, queryset=DataFile.objects.all()
30 )
31
32 class Meta:
33 model = Table
34 fields = ['id', 'name', 'schema', 'created_at', 'updated_at', 'import_verified',
35 'columns', 'records_url', 'constraints_url', 'columns_url',
36 'type_suggestions_url', 'previews_url', 'data_files',
37 'has_dependencies']
38
39 def get_records_url(self, obj):
40 if isinstance(obj, Table):
41 # Only get records if we are serializing an existing table
42 request = self.context['request']
43 return request.build_absolute_uri(reverse('table-record-list', kwargs={'table_pk': obj.pk}))
44 else:
45 return None
46
47 def get_constraints_url(self, obj):
48 if isinstance(obj, Table):
49 # Only get constraints if we are serializing an existing table
50 request = self.context['request']
51 return request.build_absolute_uri(reverse('table-constraint-list', kwargs={'table_pk': obj.pk}))
52 else:
53 return None
54
55 def get_columns_url(self, obj):
56 if isinstance(obj, Table):
57 # Only get columns if we are serializing an existing table
58 request = self.context['request']
59 return request.build_absolute_uri(reverse('table-column-list', kwargs={'table_pk': obj.pk}))
60 else:
61 return None
62
63 def get_type_suggestions_url(self, obj):
64 if isinstance(obj, Table):
65 # Only get type suggestions if we are serializing an existing table
66 request = self.context['request']
67 return request.build_absolute_uri(reverse('table-type-suggestions', kwargs={'pk': obj.pk}))
68 else:
69 return None
70
71 def get_previews_url(self, obj):
72 if isinstance(obj, Table):
73 # Only get previews if we are serializing an existing table
74 request = self.context['request']
75 return request.build_absolute_uri(reverse('table-previews', kwargs={'pk': obj.pk}))
76 else:
77 return None
78
79 def validate_data_files(self, data_files):
80 if data_files and len(data_files) > 1:
81 raise MultipleDataFileAPIException()
82 return data_files
83
84 def create(self, validated_data):
85 schema = validated_data['schema']
86 data_files = validated_data.get('data_files')
87 name = validated_data.get('name') or gen_table_name(schema, data_files)
88
89 try:
90 if data_files:
91 table = create_table_from_datafile(data_files, name, schema)
92 else:
93 table = create_empty_table(name, schema)
94 except ProgrammingError as e:
95 if type(e.orig) == DuplicateTable:
96 raise DuplicateTableAPIException(
97 e,
98 message=f"Relation {validated_data['name']} already exists in schema {schema.id}",
99 field="name",
100 status_code=status.HTTP_400_BAD_REQUEST
101 )
102 else:
103 raise ProgrammingAPIException(e)
104 return table
105
106
107 class TablePreviewSerializer(MathesarErrorMessageMixin, serializers.Serializer):
108 name = serializers.CharField(required=False)
109 columns = SimpleColumnSerializer(many=True)
110
111 def validate_columns(self, columns):
112 table = self.context['table']
113 column_names = [col["name"] for col in columns]
114 if not len(column_names) == len(set(column_names)):
115 raise DistinctColumnRequiredAPIException()
116 if not len(columns) == len(table.sa_columns):
117 raise ColumnSizeMismatchAPIException()
118 for column in columns:
119 db_type_id = column['type']
120 db_type = get_db_type_enum_from_id(db_type_id)
121 if db_type is None:
122 raise UnknownDatabaseTypeIdentifier(db_type_id=db_type_id)
123 return columns
124
[end of mathesar/api/serializers/tables.py]
[start of mathesar/api/db/viewsets/tables.py]
1 from django_filters import rest_framework as filters
2 from psycopg2.errors import InvalidTextRepresentation, CheckViolation
3 from rest_framework import status, viewsets
4 from rest_framework.decorators import action
5 from rest_framework.mixins import ListModelMixin, RetrieveModelMixin, CreateModelMixin
6 from rest_framework.response import Response
7 from sqlalchemy.exc import DataError, IntegrityError
8
9 from mathesar.api.exceptions.database_exceptions import (
10 exceptions as database_api_exceptions,
11 base_exceptions as database_base_api_exceptions,
12 )
13 from mathesar.api.exceptions.generic_exceptions import base_exceptions as base_api_exceptions
14 from db.types.exceptions import UnsupportedTypeException
15 from mathesar.api.dj_filters import TableFilter
16 from mathesar.api.pagination import DefaultLimitOffsetPagination
17 from mathesar.api.serializers.tables import TableSerializer, TablePreviewSerializer
18 from mathesar.models import Table
19 from mathesar.utils.tables import (
20 get_table_column_types
21 )
22
23
24 class TableViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, viewsets.GenericViewSet):
25 serializer_class = TableSerializer
26 pagination_class = DefaultLimitOffsetPagination
27 filter_backends = (filters.DjangoFilterBackend,)
28 filterset_class = TableFilter
29
30 def get_queryset(self):
31 return Table.objects.all().order_by('-created_at')
32
33 def partial_update(self, request, pk=None):
34 serializer = TableSerializer(
35 data=request.data, context={'request': request}, partial=True
36 )
37 serializer.is_valid(raise_exception=True)
38 table = self.get_object()
39
40 # Save the fields that are stored in the model.
41 present_model_fields = []
42 for model_field in table.MODEL_FIELDS:
43 if model_field in serializer.validated_data:
44 setattr(table, model_field, serializer.validated_data[model_field])
45 present_model_fields.append(model_field)
46 table.save(update_fields=present_model_fields)
47 for key in present_model_fields:
48 del serializer.validated_data[key]
49
50 # Save the fields that are stored in the underlying DB.
51 try:
52 table.update_sa_table(serializer.validated_data)
53 except ValueError as e:
54 raise base_api_exceptions.ValueAPIException(e, status_code=status.HTTP_400_BAD_REQUEST)
55
56 # Reload the table to avoid cached properties
57 table = self.get_object()
58 serializer = TableSerializer(table, context={'request': request})
59 return Response(serializer.data)
60
61 def destroy(self, request, pk=None):
62 table = self.get_object()
63 table.delete_sa_table()
64 table.delete()
65 return Response(status=status.HTTP_204_NO_CONTENT)
66
67 @action(methods=['get'], detail=True)
68 def type_suggestions(self, request, pk=None):
69 table = self.get_object()
70 col_types = get_table_column_types(table)
71 return Response(col_types)
72
73 @action(methods=['post'], detail=True)
74 def previews(self, request, pk=None):
75 table = self.get_object()
76 serializer = TablePreviewSerializer(data=request.data, context={"request": request, 'table': table})
77 serializer.is_valid(raise_exception=True)
78 columns_field_key = "columns"
79 columns = serializer.data[columns_field_key]
80 table_data = TableSerializer(table, context={"request": request}).data
81 try:
82 preview_records = table.get_preview(columns)
83 except (DataError, IntegrityError) as e:
84 if type(e.orig) == InvalidTextRepresentation or type(e.orig) == CheckViolation:
85 raise database_api_exceptions.InvalidTypeCastAPIException(
86 e,
87 status_code=status.HTTP_400_BAD_REQUEST,
88 field='columns'
89 )
90 else:
91 raise database_base_api_exceptions.IntegrityAPIException(
92 e,
93 status_code=status.HTTP_400_BAD_REQUEST,
94 field='columns'
95 )
96 except UnsupportedTypeException as e:
97 raise database_api_exceptions.UnsupportedTypeAPIException(
98 e,
99 field='columns',
100 status_code=status.HTTP_400_BAD_REQUEST
101 )
102 table_data.update(
103 {
104 # There's no way to reflect actual column data without
105 # creating a view, so we just use the submission, assuming
106 # no errors means we changed to the desired names and types
107 "columns": columns,
108 "records": preview_records
109 }
110 )
111
112 return Response(table_data)
113
[end of mathesar/api/db/viewsets/tables.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mathesar/api/db/viewsets/tables.py b/mathesar/api/db/viewsets/tables.py
--- a/mathesar/api/db/viewsets/tables.py
+++ b/mathesar/api/db/viewsets/tables.py
@@ -10,7 +10,6 @@
exceptions as database_api_exceptions,
base_exceptions as database_base_api_exceptions,
)
-from mathesar.api.exceptions.generic_exceptions import base_exceptions as base_api_exceptions
from db.types.exceptions import UnsupportedTypeException
from mathesar.api.dj_filters import TableFilter
from mathesar.api.pagination import DefaultLimitOffsetPagination
@@ -31,27 +30,12 @@
return Table.objects.all().order_by('-created_at')
def partial_update(self, request, pk=None):
+ table = self.get_object()
serializer = TableSerializer(
- data=request.data, context={'request': request}, partial=True
+ table, data=request.data, context={'request': request}, partial=True
)
serializer.is_valid(raise_exception=True)
- table = self.get_object()
-
- # Save the fields that are stored in the model.
- present_model_fields = []
- for model_field in table.MODEL_FIELDS:
- if model_field in serializer.validated_data:
- setattr(table, model_field, serializer.validated_data[model_field])
- present_model_fields.append(model_field)
- table.save(update_fields=present_model_fields)
- for key in present_model_fields:
- del serializer.validated_data[key]
-
- # Save the fields that are stored in the underlying DB.
- try:
- table.update_sa_table(serializer.validated_data)
- except ValueError as e:
- raise base_api_exceptions.ValueAPIException(e, status_code=status.HTTP_400_BAD_REQUEST)
+ serializer.save()
# Reload the table to avoid cached properties
table = self.get_object()
diff --git a/mathesar/api/serializers/tables.py b/mathesar/api/serializers/tables.py
--- a/mathesar/api/serializers/tables.py
+++ b/mathesar/api/serializers/tables.py
@@ -1,6 +1,7 @@
from django.urls import reverse
from psycopg2.errors import DuplicateTable
from rest_framework import serializers, status
+from rest_framework.exceptions import ValidationError
from sqlalchemy.exc import ProgrammingError
from db.types.base import get_db_type_enum_from_id
@@ -11,6 +12,8 @@
)
from mathesar.api.exceptions.database_exceptions.exceptions import DuplicateTableAPIException
from mathesar.api.exceptions.database_exceptions.base_exceptions import ProgrammingAPIException
+from mathesar.api.exceptions.validation_exceptions import base_exceptions as base_validation_exceptions
+from mathesar.api.exceptions.generic_exceptions import base_exceptions as base_api_exceptions
from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin
from mathesar.api.serializers.columns import SimpleColumnSerializer
from mathesar.models import Table, DataFile
@@ -103,6 +106,35 @@
raise ProgrammingAPIException(e)
return table
+ def update(self, instance, validated_data):
+ if self.partial:
+ # Save the fields that are stored in the model.
+ present_model_fields = []
+ for model_field in instance.MODEL_FIELDS:
+ if model_field in validated_data:
+ setattr(instance, model_field, validated_data[model_field])
+ present_model_fields.append(model_field)
+ instance.save(update_fields=present_model_fields)
+ for key in present_model_fields:
+ del validated_data[key]
+ # Save the fields that are stored in the underlying DB.
+ try:
+ instance.update_sa_table(validated_data)
+ except ValueError as e:
+ raise base_api_exceptions.ValueAPIException(e, status_code=status.HTTP_400_BAD_REQUEST)
+ return instance
+
+ def validate(self, data):
+ if self.partial:
+ columns = data.get('columns', None)
+ if columns is not None:
+ for col in columns:
+ id = col.get('id', None)
+ if id is None:
+ message = "'id' field is required while batch updating columns."
+ raise base_validation_exceptions.MathesarValidationException(ValidationError, message=message)
+ return data
+
class TablePreviewSerializer(MathesarErrorMessageMixin, serializers.Serializer):
name = serializers.CharField(required=False)
| {"golden_diff": "diff --git a/mathesar/api/db/viewsets/tables.py b/mathesar/api/db/viewsets/tables.py\n--- a/mathesar/api/db/viewsets/tables.py\n+++ b/mathesar/api/db/viewsets/tables.py\n@@ -10,7 +10,6 @@\n exceptions as database_api_exceptions,\n base_exceptions as database_base_api_exceptions,\n )\n-from mathesar.api.exceptions.generic_exceptions import base_exceptions as base_api_exceptions\n from db.types.exceptions import UnsupportedTypeException\n from mathesar.api.dj_filters import TableFilter\n from mathesar.api.pagination import DefaultLimitOffsetPagination\n@@ -31,27 +30,12 @@\n return Table.objects.all().order_by('-created_at')\n \n def partial_update(self, request, pk=None):\n+ table = self.get_object()\n serializer = TableSerializer(\n- data=request.data, context={'request': request}, partial=True\n+ table, data=request.data, context={'request': request}, partial=True\n )\n serializer.is_valid(raise_exception=True)\n- table = self.get_object()\n-\n- # Save the fields that are stored in the model.\n- present_model_fields = []\n- for model_field in table.MODEL_FIELDS:\n- if model_field in serializer.validated_data:\n- setattr(table, model_field, serializer.validated_data[model_field])\n- present_model_fields.append(model_field)\n- table.save(update_fields=present_model_fields)\n- for key in present_model_fields:\n- del serializer.validated_data[key]\n-\n- # Save the fields that are stored in the underlying DB.\n- try:\n- table.update_sa_table(serializer.validated_data)\n- except ValueError as e:\n- raise base_api_exceptions.ValueAPIException(e, status_code=status.HTTP_400_BAD_REQUEST)\n+ serializer.save()\n \n # Reload the table to avoid cached properties\n table = self.get_object()\ndiff --git a/mathesar/api/serializers/tables.py b/mathesar/api/serializers/tables.py\n--- a/mathesar/api/serializers/tables.py\n+++ b/mathesar/api/serializers/tables.py\n@@ -1,6 +1,7 @@\n from django.urls import reverse\n from psycopg2.errors import DuplicateTable\n from rest_framework import serializers, status\n+from rest_framework.exceptions import ValidationError\n from sqlalchemy.exc import ProgrammingError\n \n from db.types.base import get_db_type_enum_from_id\n@@ -11,6 +12,8 @@\n )\n from mathesar.api.exceptions.database_exceptions.exceptions import DuplicateTableAPIException\n from mathesar.api.exceptions.database_exceptions.base_exceptions import ProgrammingAPIException\n+from mathesar.api.exceptions.validation_exceptions import base_exceptions as base_validation_exceptions\n+from mathesar.api.exceptions.generic_exceptions import base_exceptions as base_api_exceptions\n from mathesar.api.exceptions.mixins import MathesarErrorMessageMixin\n from mathesar.api.serializers.columns import SimpleColumnSerializer\n from mathesar.models import Table, DataFile\n@@ -103,6 +106,35 @@\n raise ProgrammingAPIException(e)\n return table\n \n+ def update(self, instance, validated_data):\n+ if self.partial:\n+ # Save the fields that are stored in the model.\n+ present_model_fields = []\n+ for model_field in instance.MODEL_FIELDS:\n+ if model_field in validated_data:\n+ setattr(instance, model_field, validated_data[model_field])\n+ present_model_fields.append(model_field)\n+ instance.save(update_fields=present_model_fields)\n+ for key in present_model_fields:\n+ del validated_data[key]\n+ # Save the fields that are stored in the underlying DB.\n+ try:\n+ instance.update_sa_table(validated_data)\n+ except ValueError as e:\n+ raise base_api_exceptions.ValueAPIException(e, status_code=status.HTTP_400_BAD_REQUEST)\n+ return instance\n+\n+ def validate(self, data):\n+ if self.partial:\n+ columns = data.get('columns', None)\n+ if columns is not None:\n+ for col in columns:\n+ id = col.get('id', None)\n+ if id is None:\n+ message = \"'id' field is required while batch updating columns.\"\n+ raise base_validation_exceptions.MathesarValidationException(ValidationError, message=message)\n+ return data\n+\n \n class TablePreviewSerializer(MathesarErrorMessageMixin, serializers.Serializer):\n name = serializers.CharField(required=False)\n", "issue": "Incorrect error is thrown when id field is not provided for a column object during a batch update\n## Description\r\nThe following `Patch` request to `/tables/{id}` \r\n\r\n```\r\n\r\n{\r\n \"columns\": [\r\n {\r\n \"name\": \"id\",\r\n \"type\": \"INTEGER\"\r\n },\r\n {\r\n \"name\": \"Center\",\r\n \"type\": \"TEXT\"\r\n }\r\n ]\r\n}\r\n```\r\n\r\n fails with an error `{code: 4999, message: \"local variable 'name' referenced before assignment\", field: null, detail: null}`.\r\n\r\nThis is happening due to https://github.com/centerofci/mathesar/blob/a8bad8268c8634bc766fa2e4fde271f4ec1a165e/db/columns/operations/alter.py#L215\r\n\r\nalways expecting a name which is fetched from the `column id` that is missing from the patch request.\r\n\r\n## Expected behavior\r\n\r\n`id` field of the column object should be required when batch updating using `/tables/{id}`.\r\n\r\n## To Reproduce\r\nUsing API \r\n- Create a table using `/api/db/v0/tables/` API \r\n- Send a patch request to `/api/db/v0/tables/{id}`\r\n\r\nUsing frontend client\r\n- Create a table by importing a csv file\r\n- Error will be thrown when clicking on `Finish import`\r\n\r\n\n", "before_files": [{"content": "from django.urls import reverse\nfrom psycopg2.errors import DuplicateTable\nfrom rest_framework import serializers, status\nfrom sqlalchemy.exc import ProgrammingError\n\nfrom db.types.base import get_db_type_enum_from_id\n\nfrom mathesar.api.exceptions.validation_exceptions.exceptions import (\n ColumnSizeMismatchAPIException, DistinctColumnRequiredAPIException,\n MultipleDataFileAPIException, UnknownDatabaseTypeIdentifier\n)\nfrom mathesar.api.exceptions.database_exceptions.exceptions import DuplicateTableAPIException\nfrom mathesar.api.exceptions.database_exceptions.base_exceptions import ProgrammingAPIException\nfrom mathesar.api.exceptions.mixins import MathesarErrorMessageMixin\nfrom mathesar.api.serializers.columns import SimpleColumnSerializer\nfrom mathesar.models import Table, DataFile\nfrom mathesar.utils.tables import gen_table_name, create_table_from_datafile, create_empty_table\n\n\nclass TableSerializer(MathesarErrorMessageMixin, serializers.ModelSerializer):\n columns = SimpleColumnSerializer(many=True, required=False)\n records_url = serializers.SerializerMethodField()\n constraints_url = serializers.SerializerMethodField()\n columns_url = serializers.SerializerMethodField()\n type_suggestions_url = serializers.SerializerMethodField()\n previews_url = serializers.SerializerMethodField()\n name = serializers.CharField(required=False, allow_blank=True, default='')\n data_files = serializers.PrimaryKeyRelatedField(\n required=False, many=True, queryset=DataFile.objects.all()\n )\n\n class Meta:\n model = Table\n fields = ['id', 'name', 'schema', 'created_at', 'updated_at', 'import_verified',\n 'columns', 'records_url', 'constraints_url', 'columns_url',\n 'type_suggestions_url', 'previews_url', 'data_files',\n 'has_dependencies']\n\n def get_records_url(self, obj):\n if isinstance(obj, Table):\n # Only get records if we are serializing an existing table\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-record-list', kwargs={'table_pk': obj.pk}))\n else:\n return None\n\n def get_constraints_url(self, obj):\n if isinstance(obj, Table):\n # Only get constraints if we are serializing an existing table\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-constraint-list', kwargs={'table_pk': obj.pk}))\n else:\n return None\n\n def get_columns_url(self, obj):\n if isinstance(obj, Table):\n # Only get columns if we are serializing an existing table\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-column-list', kwargs={'table_pk': obj.pk}))\n else:\n return None\n\n def get_type_suggestions_url(self, obj):\n if isinstance(obj, Table):\n # Only get type suggestions if we are serializing an existing table\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-type-suggestions', kwargs={'pk': obj.pk}))\n else:\n return None\n\n def get_previews_url(self, obj):\n if isinstance(obj, Table):\n # Only get previews if we are serializing an existing table\n request = self.context['request']\n return request.build_absolute_uri(reverse('table-previews', kwargs={'pk': obj.pk}))\n else:\n return None\n\n def validate_data_files(self, data_files):\n if data_files and len(data_files) > 1:\n raise MultipleDataFileAPIException()\n return data_files\n\n def create(self, validated_data):\n schema = validated_data['schema']\n data_files = validated_data.get('data_files')\n name = validated_data.get('name') or gen_table_name(schema, data_files)\n\n try:\n if data_files:\n table = create_table_from_datafile(data_files, name, schema)\n else:\n table = create_empty_table(name, schema)\n except ProgrammingError as e:\n if type(e.orig) == DuplicateTable:\n raise DuplicateTableAPIException(\n e,\n message=f\"Relation {validated_data['name']} already exists in schema {schema.id}\",\n field=\"name\",\n status_code=status.HTTP_400_BAD_REQUEST\n )\n else:\n raise ProgrammingAPIException(e)\n return table\n\n\nclass TablePreviewSerializer(MathesarErrorMessageMixin, serializers.Serializer):\n name = serializers.CharField(required=False)\n columns = SimpleColumnSerializer(many=True)\n\n def validate_columns(self, columns):\n table = self.context['table']\n column_names = [col[\"name\"] for col in columns]\n if not len(column_names) == len(set(column_names)):\n raise DistinctColumnRequiredAPIException()\n if not len(columns) == len(table.sa_columns):\n raise ColumnSizeMismatchAPIException()\n for column in columns:\n db_type_id = column['type']\n db_type = get_db_type_enum_from_id(db_type_id)\n if db_type is None:\n raise UnknownDatabaseTypeIdentifier(db_type_id=db_type_id)\n return columns\n", "path": "mathesar/api/serializers/tables.py"}, {"content": "from django_filters import rest_framework as filters\nfrom psycopg2.errors import InvalidTextRepresentation, CheckViolation\nfrom rest_framework import status, viewsets\nfrom rest_framework.decorators import action\nfrom rest_framework.mixins import ListModelMixin, RetrieveModelMixin, CreateModelMixin\nfrom rest_framework.response import Response\nfrom sqlalchemy.exc import DataError, IntegrityError\n\nfrom mathesar.api.exceptions.database_exceptions import (\n exceptions as database_api_exceptions,\n base_exceptions as database_base_api_exceptions,\n)\nfrom mathesar.api.exceptions.generic_exceptions import base_exceptions as base_api_exceptions\nfrom db.types.exceptions import UnsupportedTypeException\nfrom mathesar.api.dj_filters import TableFilter\nfrom mathesar.api.pagination import DefaultLimitOffsetPagination\nfrom mathesar.api.serializers.tables import TableSerializer, TablePreviewSerializer\nfrom mathesar.models import Table\nfrom mathesar.utils.tables import (\n get_table_column_types\n)\n\n\nclass TableViewSet(CreateModelMixin, RetrieveModelMixin, ListModelMixin, viewsets.GenericViewSet):\n serializer_class = TableSerializer\n pagination_class = DefaultLimitOffsetPagination\n filter_backends = (filters.DjangoFilterBackend,)\n filterset_class = TableFilter\n\n def get_queryset(self):\n return Table.objects.all().order_by('-created_at')\n\n def partial_update(self, request, pk=None):\n serializer = TableSerializer(\n data=request.data, context={'request': request}, partial=True\n )\n serializer.is_valid(raise_exception=True)\n table = self.get_object()\n\n # Save the fields that are stored in the model.\n present_model_fields = []\n for model_field in table.MODEL_FIELDS:\n if model_field in serializer.validated_data:\n setattr(table, model_field, serializer.validated_data[model_field])\n present_model_fields.append(model_field)\n table.save(update_fields=present_model_fields)\n for key in present_model_fields:\n del serializer.validated_data[key]\n\n # Save the fields that are stored in the underlying DB.\n try:\n table.update_sa_table(serializer.validated_data)\n except ValueError as e:\n raise base_api_exceptions.ValueAPIException(e, status_code=status.HTTP_400_BAD_REQUEST)\n\n # Reload the table to avoid cached properties\n table = self.get_object()\n serializer = TableSerializer(table, context={'request': request})\n return Response(serializer.data)\n\n def destroy(self, request, pk=None):\n table = self.get_object()\n table.delete_sa_table()\n table.delete()\n return Response(status=status.HTTP_204_NO_CONTENT)\n\n @action(methods=['get'], detail=True)\n def type_suggestions(self, request, pk=None):\n table = self.get_object()\n col_types = get_table_column_types(table)\n return Response(col_types)\n\n @action(methods=['post'], detail=True)\n def previews(self, request, pk=None):\n table = self.get_object()\n serializer = TablePreviewSerializer(data=request.data, context={\"request\": request, 'table': table})\n serializer.is_valid(raise_exception=True)\n columns_field_key = \"columns\"\n columns = serializer.data[columns_field_key]\n table_data = TableSerializer(table, context={\"request\": request}).data\n try:\n preview_records = table.get_preview(columns)\n except (DataError, IntegrityError) as e:\n if type(e.orig) == InvalidTextRepresentation or type(e.orig) == CheckViolation:\n raise database_api_exceptions.InvalidTypeCastAPIException(\n e,\n status_code=status.HTTP_400_BAD_REQUEST,\n field='columns'\n )\n else:\n raise database_base_api_exceptions.IntegrityAPIException(\n e,\n status_code=status.HTTP_400_BAD_REQUEST,\n field='columns'\n )\n except UnsupportedTypeException as e:\n raise database_api_exceptions.UnsupportedTypeAPIException(\n e,\n field='columns',\n status_code=status.HTTP_400_BAD_REQUEST\n )\n table_data.update(\n {\n # There's no way to reflect actual column data without\n # creating a view, so we just use the submission, assuming\n # no errors means we changed to the desired names and types\n \"columns\": columns,\n \"records\": preview_records\n }\n )\n\n return Response(table_data)\n", "path": "mathesar/api/db/viewsets/tables.py"}]} | 3,288 | 928 |
gh_patches_debug_21089 | rasdani/github-patches | git_diff | opsdroid__opsdroid-225 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Allow usage of env vars in config
The configuration should be parsed for environment variables when loaded. This would allow for secrets like api keys to be kept outside of the opsdroid configuration.
#### Example
```yaml
connectors:
- name: slack
default-room: '#general'
bot-name: "opsdroid"
icon-emoji: ":robot:"
api-token: "$SLACK_API_KEY"
```
In this example `$SLACK_API_KEY` would be replaced with the contents of the environment variable of the same name.
</issue>
<code>
[start of opsdroid/loader.py]
1 """Class for loading in modules to OpsDroid."""
2
3 import logging
4 import os
5 import sys
6 import shutil
7 import subprocess
8 import importlib
9 import yaml
10 from opsdroid.const import (
11 DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,
12 DEFAULT_MODULE_BRANCH, DEFAULT_CONFIG_PATH, EXAMPLE_CONFIG_FILE,
13 DEFAULT_MODULE_DEPS_PATH)
14
15
16 _LOGGER = logging.getLogger(__name__)
17
18
19 class Loader:
20 """Class to load in config and modules."""
21
22 def __init__(self, opsdroid):
23 """Create object with opsdroid instance."""
24 self.opsdroid = opsdroid
25 self.modules_directory = None
26 self.current_import_config = None
27 _LOGGER.debug("Loaded loader")
28
29 @staticmethod
30 def import_module(config):
31 """Import module namespace as variable and return it."""
32 try:
33 module = importlib.import_module(
34 config["module_path"] + "." + config["name"])
35 _LOGGER.debug("Loaded " + config["type"] + ": " +
36 config["module_path"])
37 return module
38 except ImportError as error:
39 _LOGGER.debug("Failed to load " + config["type"] +
40 " " + config["module_path"] + "." + config["name"])
41 _LOGGER.debug(error)
42
43 try:
44 module = importlib.import_module(
45 config["module_path"])
46 _LOGGER.debug("Loaded " + config["type"] + ": " +
47 config["module_path"])
48 return module
49 except ImportError as error:
50 _LOGGER.debug("Failed to load " + config["type"] +
51 " " + config["module_path"])
52 _LOGGER.debug(error)
53
54 _LOGGER.error("Failed to load " + config["type"] +
55 " " + config["module_path"])
56 return None
57
58 @staticmethod
59 def check_cache(config):
60 """Remove module if 'no-cache' set in config."""
61 if "no-cache" in config \
62 and config["no-cache"]:
63 _LOGGER.debug("'no-cache' set, removing " + config["install_path"])
64 if os.path.isdir(config["install_path"]):
65 shutil.rmtree(config["install_path"])
66 if os.path.isfile(config["install_path"] + ".py"):
67 os.remove(config["install_path"] + ".py")
68
69 def build_module_path(self, path_type, config):
70 """Generate the module path from name and type."""
71 if path_type == "import":
72 return MODULES_DIRECTORY + "." + config["type"] + \
73 "." + config["name"]
74 elif path_type == "install":
75 return self.modules_directory + "/" + config["type"] + \
76 "/" + config["name"]
77
78 @staticmethod
79 def git_clone(git_url, install_path, branch):
80 """Clone a git repo to a location and wait for finish."""
81 process = subprocess.Popen(["git", "clone", "-b", branch,
82 git_url, install_path], shell=False,
83 stdout=subprocess.PIPE,
84 stderr=subprocess.PIPE)
85 for output in process.communicate():
86 if output != "":
87 for line in output.splitlines():
88 _LOGGER.debug(str(line).strip())
89 process.wait()
90
91 @staticmethod
92 def pip_install_deps(requirements_path):
93 """Pip install a requirements.txt file and wait for finish."""
94 process = subprocess.Popen(["pip", "install",
95 "--target={}".format(
96 DEFAULT_MODULE_DEPS_PATH),
97 "--ignore-installed",
98 "-r", requirements_path],
99 shell=False,
100 stdout=subprocess.PIPE,
101 stderr=subprocess.PIPE)
102 for output in process.communicate():
103 if output != "":
104 for line in output.splitlines():
105 _LOGGER.debug(str(line).strip())
106 process.wait()
107
108 @staticmethod
109 def create_default_config(config_path):
110 """Create a default config file based on the included example."""
111 _LOGGER.info("Creating %s.", config_path)
112 config_dir, _ = os.path.split(config_path)
113 if not os.path.isdir(config_dir):
114 os.makedirs(config_dir)
115 shutil.copyfile(EXAMPLE_CONFIG_FILE, config_path)
116 return config_path
117
118 @staticmethod
119 def _reload_modules(modules):
120 for module in modules:
121 importlib.reload(module["module"])
122
123 def load_config_file(self, config_paths):
124 """Load a yaml config file from path."""
125 config_path = ""
126 for possible_path in config_paths:
127 if not os.path.isfile(possible_path):
128 _LOGGER.debug("Config file " + possible_path +
129 " not found")
130 else:
131 config_path = possible_path
132 break
133
134 if not config_path:
135 _LOGGER.info("No configuration files found.")
136 config_path = self.create_default_config(DEFAULT_CONFIG_PATH)
137
138 try:
139 with open(config_path, 'r') as stream:
140 _LOGGER.info("Loaded config from %s", config_path)
141 return yaml.load(stream)
142 except yaml.YAMLError as error:
143 self.opsdroid.critical(error, 1)
144 except FileNotFoundError as error:
145 self.opsdroid.critical(str(error), 1)
146
147 def setup_modules_directory(self, config):
148 """Create and configure the modules directory."""
149 module_path = os.path.expanduser(
150 config.get("module-path", DEFAULT_MODULES_PATH))
151 sys.path.append(module_path)
152
153 if not os.path.isdir(module_path):
154 os.makedirs(module_path, exist_ok=True)
155
156 self.modules_directory = os.path.join(module_path, MODULES_DIRECTORY)
157
158 # Create modules directory if doesn't exist
159 if not os.path.isdir(self.modules_directory):
160 os.makedirs(self.modules_directory)
161
162 def load_modules_from_config(self, config):
163 """Load all module types based on config."""
164 _LOGGER.debug("Loading modules from config")
165
166 self.setup_modules_directory(config)
167
168 connectors, databases, skills = None, None, None
169
170 if 'databases' in config.keys():
171 databases = self._load_modules('database', config['databases'])
172 else:
173 _LOGGER.warning("No databases in configuration")
174
175 if 'skills' in config.keys():
176 skills = self._load_modules('skill', config['skills'])
177 self.opsdroid.skills = []
178 self._reload_modules(skills)
179 else:
180 self.opsdroid.critical(
181 "No skills in configuration, at least 1 required", 1)
182
183 if 'connectors' in config.keys():
184 connectors = self._load_modules('connector', config['connectors'])
185 else:
186 self.opsdroid.critical(
187 "No connectors in configuration, at least 1 required", 1)
188
189 return connectors, databases, skills
190
191 def _load_modules(self, modules_type, modules):
192 """Install and load modules."""
193 _LOGGER.debug("Loading " + modules_type + " modules")
194 loaded_modules = []
195
196 if not os.path.isdir(DEFAULT_MODULE_DEPS_PATH):
197 os.makedirs(DEFAULT_MODULE_DEPS_PATH)
198 sys.path.append(DEFAULT_MODULE_DEPS_PATH)
199
200 for module in modules:
201
202 # Set up module config
203 config = module
204 config = {} if config is None else config
205 config["name"] = module["name"]
206 config["type"] = modules_type
207 config["module_path"] = self.build_module_path("import", config)
208 config["install_path"] = self.build_module_path("install", config)
209 if "branch" not in config:
210 config["branch"] = DEFAULT_MODULE_BRANCH
211
212 # Remove module for reinstall if no-cache set
213 self.check_cache(config)
214
215 # Install module
216 self._install_module(config)
217
218 # Import module
219 self.current_import_config = config
220 module = self.import_module(config)
221 if module is not None:
222 loaded_modules.append({
223 "module": module,
224 "config": config})
225 else:
226 _LOGGER.error(
227 "Module " + config["name"] + " failed to import")
228
229 return loaded_modules
230
231 def _install_module(self, config):
232 # pylint: disable=R0201
233 """Install a module."""
234 _LOGGER.debug("Installing " + config["name"])
235
236 if os.path.isdir(config["install_path"]) or \
237 os.path.isfile(config["install_path"] + ".py"):
238 # TODO Allow for updating or reinstalling of modules
239 _LOGGER.debug("Module " + config["name"] +
240 " already installed, skipping")
241 return
242
243 if "path" in config:
244 self._install_local_module(config)
245 else:
246 self._install_git_module(config)
247
248 if os.path.isdir(config["install_path"]):
249 _LOGGER.debug("Installed " + config["name"] +
250 " to " + config["install_path"])
251 else:
252 _LOGGER.debug("Install of " + config["name"] + " failed")
253
254 # Install module dependancies
255 if os.path.isfile(config["install_path"] + "/requirements.txt"):
256 self.pip_install_deps(config["install_path"] +
257 "/requirements.txt")
258
259 def _install_git_module(self, config):
260 """Install a module from a git repository."""
261 if config is not None and "repo" in config:
262 git_url = config["repo"]
263 else:
264 git_url = DEFAULT_GIT_URL + config["type"] + \
265 "-" + config["name"] + ".git"
266
267 if any(prefix in git_url for prefix in ["http", "https", "ssh"]):
268 # TODO Test if url or ssh path exists
269 # TODO Handle github authentication
270 _LOGGER.debug("Cloning from remote repository")
271 self.git_clone(git_url, config["install_path"],
272 config["branch"])
273 else:
274 if os.path.isdir(git_url):
275 _LOGGER.debug("Cloning from local repository")
276 self.git_clone(git_url, config["install_path"],
277 config["branch"])
278 else:
279 _LOGGER.debug("Could not find local git repo " + git_url)
280
281 @staticmethod
282 def _install_local_module(config):
283 """Install a module from a local path."""
284 installed = False
285 config["path"] = os.path.expanduser(config["path"])
286
287 installdir, _ = os.path.split(config["install_path"])
288 if not os.path.isdir(installdir):
289 os.makedirs(installdir, exist_ok=True)
290
291 if os.path.isdir(config["path"]):
292 shutil.copytree(config["path"], config["install_path"])
293 installed = True
294
295 if os.path.isfile(config["path"]):
296 os.makedirs(config["install_path"], exist_ok=True)
297 shutil.copyfile(config["path"], config["install_path"] +
298 "/__init__.py")
299 installed = True
300
301 if not installed:
302 _LOGGER.error("Failed to install from " + config["path"])
303
[end of opsdroid/loader.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/opsdroid/loader.py b/opsdroid/loader.py
--- a/opsdroid/loader.py
+++ b/opsdroid/loader.py
@@ -6,6 +6,7 @@
import shutil
import subprocess
import importlib
+import re
import yaml
from opsdroid.const import (
DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,
@@ -135,6 +136,17 @@
_LOGGER.info("No configuration files found.")
config_path = self.create_default_config(DEFAULT_CONFIG_PATH)
+ env_var_pattern = re.compile(r'^\$([A-Z_]*)$')
+ yaml.add_implicit_resolver("!envvar", env_var_pattern)
+
+ def envvar_constructor(loader, node):
+ """Yaml parser for env vars."""
+ value = loader.construct_scalar(node)
+ [env_var] = env_var_pattern.match(value).groups()
+ return os.environ[env_var]
+
+ yaml.add_constructor('!envvar', envvar_constructor)
+
try:
with open(config_path, 'r') as stream:
_LOGGER.info("Loaded config from %s", config_path)
| {"golden_diff": "diff --git a/opsdroid/loader.py b/opsdroid/loader.py\n--- a/opsdroid/loader.py\n+++ b/opsdroid/loader.py\n@@ -6,6 +6,7 @@\n import shutil\n import subprocess\n import importlib\n+import re\n import yaml\n from opsdroid.const import (\n DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,\n@@ -135,6 +136,17 @@\n _LOGGER.info(\"No configuration files found.\")\n config_path = self.create_default_config(DEFAULT_CONFIG_PATH)\n \n+ env_var_pattern = re.compile(r'^\\$([A-Z_]*)$')\n+ yaml.add_implicit_resolver(\"!envvar\", env_var_pattern)\n+\n+ def envvar_constructor(loader, node):\n+ \"\"\"Yaml parser for env vars.\"\"\"\n+ value = loader.construct_scalar(node)\n+ [env_var] = env_var_pattern.match(value).groups()\n+ return os.environ[env_var]\n+\n+ yaml.add_constructor('!envvar', envvar_constructor)\n+\n try:\n with open(config_path, 'r') as stream:\n _LOGGER.info(\"Loaded config from %s\", config_path)\n", "issue": "Allow usage of env vars in config\nThe configuration should be parsed for environment variables when loaded. This would allow for secrets like api keys to be kept outside of the opsdroid configuration.\r\n\r\n#### Example\r\n\r\n```yaml\r\nconnectors:\r\n - name: slack\r\n default-room: '#general'\r\n bot-name: \"opsdroid\"\r\n icon-emoji: \":robot:\"\r\n api-token: \"$SLACK_API_KEY\"\r\n```\r\nIn this example `$SLACK_API_KEY` would be replaced with the contents of the environment variable of the same name.\n", "before_files": [{"content": "\"\"\"Class for loading in modules to OpsDroid.\"\"\"\n\nimport logging\nimport os\nimport sys\nimport shutil\nimport subprocess\nimport importlib\nimport yaml\nfrom opsdroid.const import (\n DEFAULT_GIT_URL, MODULES_DIRECTORY, DEFAULT_MODULES_PATH,\n DEFAULT_MODULE_BRANCH, DEFAULT_CONFIG_PATH, EXAMPLE_CONFIG_FILE,\n DEFAULT_MODULE_DEPS_PATH)\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\nclass Loader:\n \"\"\"Class to load in config and modules.\"\"\"\n\n def __init__(self, opsdroid):\n \"\"\"Create object with opsdroid instance.\"\"\"\n self.opsdroid = opsdroid\n self.modules_directory = None\n self.current_import_config = None\n _LOGGER.debug(\"Loaded loader\")\n\n @staticmethod\n def import_module(config):\n \"\"\"Import module namespace as variable and return it.\"\"\"\n try:\n module = importlib.import_module(\n config[\"module_path\"] + \".\" + config[\"name\"])\n _LOGGER.debug(\"Loaded \" + config[\"type\"] + \": \" +\n config[\"module_path\"])\n return module\n except ImportError as error:\n _LOGGER.debug(\"Failed to load \" + config[\"type\"] +\n \" \" + config[\"module_path\"] + \".\" + config[\"name\"])\n _LOGGER.debug(error)\n\n try:\n module = importlib.import_module(\n config[\"module_path\"])\n _LOGGER.debug(\"Loaded \" + config[\"type\"] + \": \" +\n config[\"module_path\"])\n return module\n except ImportError as error:\n _LOGGER.debug(\"Failed to load \" + config[\"type\"] +\n \" \" + config[\"module_path\"])\n _LOGGER.debug(error)\n\n _LOGGER.error(\"Failed to load \" + config[\"type\"] +\n \" \" + config[\"module_path\"])\n return None\n\n @staticmethod\n def check_cache(config):\n \"\"\"Remove module if 'no-cache' set in config.\"\"\"\n if \"no-cache\" in config \\\n and config[\"no-cache\"]:\n _LOGGER.debug(\"'no-cache' set, removing \" + config[\"install_path\"])\n if os.path.isdir(config[\"install_path\"]):\n shutil.rmtree(config[\"install_path\"])\n if os.path.isfile(config[\"install_path\"] + \".py\"):\n os.remove(config[\"install_path\"] + \".py\")\n\n def build_module_path(self, path_type, config):\n \"\"\"Generate the module path from name and type.\"\"\"\n if path_type == \"import\":\n return MODULES_DIRECTORY + \".\" + config[\"type\"] + \\\n \".\" + config[\"name\"]\n elif path_type == \"install\":\n return self.modules_directory + \"/\" + config[\"type\"] + \\\n \"/\" + config[\"name\"]\n\n @staticmethod\n def git_clone(git_url, install_path, branch):\n \"\"\"Clone a git repo to a location and wait for finish.\"\"\"\n process = subprocess.Popen([\"git\", \"clone\", \"-b\", branch,\n git_url, install_path], shell=False,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n for output in process.communicate():\n if output != \"\":\n for line in output.splitlines():\n _LOGGER.debug(str(line).strip())\n process.wait()\n\n @staticmethod\n def pip_install_deps(requirements_path):\n \"\"\"Pip install a requirements.txt file and wait for finish.\"\"\"\n process = subprocess.Popen([\"pip\", \"install\",\n \"--target={}\".format(\n DEFAULT_MODULE_DEPS_PATH),\n \"--ignore-installed\",\n \"-r\", requirements_path],\n shell=False,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n for output in process.communicate():\n if output != \"\":\n for line in output.splitlines():\n _LOGGER.debug(str(line).strip())\n process.wait()\n\n @staticmethod\n def create_default_config(config_path):\n \"\"\"Create a default config file based on the included example.\"\"\"\n _LOGGER.info(\"Creating %s.\", config_path)\n config_dir, _ = os.path.split(config_path)\n if not os.path.isdir(config_dir):\n os.makedirs(config_dir)\n shutil.copyfile(EXAMPLE_CONFIG_FILE, config_path)\n return config_path\n\n @staticmethod\n def _reload_modules(modules):\n for module in modules:\n importlib.reload(module[\"module\"])\n\n def load_config_file(self, config_paths):\n \"\"\"Load a yaml config file from path.\"\"\"\n config_path = \"\"\n for possible_path in config_paths:\n if not os.path.isfile(possible_path):\n _LOGGER.debug(\"Config file \" + possible_path +\n \" not found\")\n else:\n config_path = possible_path\n break\n\n if not config_path:\n _LOGGER.info(\"No configuration files found.\")\n config_path = self.create_default_config(DEFAULT_CONFIG_PATH)\n\n try:\n with open(config_path, 'r') as stream:\n _LOGGER.info(\"Loaded config from %s\", config_path)\n return yaml.load(stream)\n except yaml.YAMLError as error:\n self.opsdroid.critical(error, 1)\n except FileNotFoundError as error:\n self.opsdroid.critical(str(error), 1)\n\n def setup_modules_directory(self, config):\n \"\"\"Create and configure the modules directory.\"\"\"\n module_path = os.path.expanduser(\n config.get(\"module-path\", DEFAULT_MODULES_PATH))\n sys.path.append(module_path)\n\n if not os.path.isdir(module_path):\n os.makedirs(module_path, exist_ok=True)\n\n self.modules_directory = os.path.join(module_path, MODULES_DIRECTORY)\n\n # Create modules directory if doesn't exist\n if not os.path.isdir(self.modules_directory):\n os.makedirs(self.modules_directory)\n\n def load_modules_from_config(self, config):\n \"\"\"Load all module types based on config.\"\"\"\n _LOGGER.debug(\"Loading modules from config\")\n\n self.setup_modules_directory(config)\n\n connectors, databases, skills = None, None, None\n\n if 'databases' in config.keys():\n databases = self._load_modules('database', config['databases'])\n else:\n _LOGGER.warning(\"No databases in configuration\")\n\n if 'skills' in config.keys():\n skills = self._load_modules('skill', config['skills'])\n self.opsdroid.skills = []\n self._reload_modules(skills)\n else:\n self.opsdroid.critical(\n \"No skills in configuration, at least 1 required\", 1)\n\n if 'connectors' in config.keys():\n connectors = self._load_modules('connector', config['connectors'])\n else:\n self.opsdroid.critical(\n \"No connectors in configuration, at least 1 required\", 1)\n\n return connectors, databases, skills\n\n def _load_modules(self, modules_type, modules):\n \"\"\"Install and load modules.\"\"\"\n _LOGGER.debug(\"Loading \" + modules_type + \" modules\")\n loaded_modules = []\n\n if not os.path.isdir(DEFAULT_MODULE_DEPS_PATH):\n os.makedirs(DEFAULT_MODULE_DEPS_PATH)\n sys.path.append(DEFAULT_MODULE_DEPS_PATH)\n\n for module in modules:\n\n # Set up module config\n config = module\n config = {} if config is None else config\n config[\"name\"] = module[\"name\"]\n config[\"type\"] = modules_type\n config[\"module_path\"] = self.build_module_path(\"import\", config)\n config[\"install_path\"] = self.build_module_path(\"install\", config)\n if \"branch\" not in config:\n config[\"branch\"] = DEFAULT_MODULE_BRANCH\n\n # Remove module for reinstall if no-cache set\n self.check_cache(config)\n\n # Install module\n self._install_module(config)\n\n # Import module\n self.current_import_config = config\n module = self.import_module(config)\n if module is not None:\n loaded_modules.append({\n \"module\": module,\n \"config\": config})\n else:\n _LOGGER.error(\n \"Module \" + config[\"name\"] + \" failed to import\")\n\n return loaded_modules\n\n def _install_module(self, config):\n # pylint: disable=R0201\n \"\"\"Install a module.\"\"\"\n _LOGGER.debug(\"Installing \" + config[\"name\"])\n\n if os.path.isdir(config[\"install_path\"]) or \\\n os.path.isfile(config[\"install_path\"] + \".py\"):\n # TODO Allow for updating or reinstalling of modules\n _LOGGER.debug(\"Module \" + config[\"name\"] +\n \" already installed, skipping\")\n return\n\n if \"path\" in config:\n self._install_local_module(config)\n else:\n self._install_git_module(config)\n\n if os.path.isdir(config[\"install_path\"]):\n _LOGGER.debug(\"Installed \" + config[\"name\"] +\n \" to \" + config[\"install_path\"])\n else:\n _LOGGER.debug(\"Install of \" + config[\"name\"] + \" failed\")\n\n # Install module dependancies\n if os.path.isfile(config[\"install_path\"] + \"/requirements.txt\"):\n self.pip_install_deps(config[\"install_path\"] +\n \"/requirements.txt\")\n\n def _install_git_module(self, config):\n \"\"\"Install a module from a git repository.\"\"\"\n if config is not None and \"repo\" in config:\n git_url = config[\"repo\"]\n else:\n git_url = DEFAULT_GIT_URL + config[\"type\"] + \\\n \"-\" + config[\"name\"] + \".git\"\n\n if any(prefix in git_url for prefix in [\"http\", \"https\", \"ssh\"]):\n # TODO Test if url or ssh path exists\n # TODO Handle github authentication\n _LOGGER.debug(\"Cloning from remote repository\")\n self.git_clone(git_url, config[\"install_path\"],\n config[\"branch\"])\n else:\n if os.path.isdir(git_url):\n _LOGGER.debug(\"Cloning from local repository\")\n self.git_clone(git_url, config[\"install_path\"],\n config[\"branch\"])\n else:\n _LOGGER.debug(\"Could not find local git repo \" + git_url)\n\n @staticmethod\n def _install_local_module(config):\n \"\"\"Install a module from a local path.\"\"\"\n installed = False\n config[\"path\"] = os.path.expanduser(config[\"path\"])\n\n installdir, _ = os.path.split(config[\"install_path\"])\n if not os.path.isdir(installdir):\n os.makedirs(installdir, exist_ok=True)\n\n if os.path.isdir(config[\"path\"]):\n shutil.copytree(config[\"path\"], config[\"install_path\"])\n installed = True\n\n if os.path.isfile(config[\"path\"]):\n os.makedirs(config[\"install_path\"], exist_ok=True)\n shutil.copyfile(config[\"path\"], config[\"install_path\"] +\n \"/__init__.py\")\n installed = True\n\n if not installed:\n _LOGGER.error(\"Failed to install from \" + config[\"path\"])\n", "path": "opsdroid/loader.py"}]} | 3,729 | 254 |
gh_patches_debug_4461 | rasdani/github-patches | git_diff | sunpy__sunpy-3333 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[1.0.3] test_print_config_files fails with installed sunpy
<!-- This comments are hidden when you submit the issue so you do not need to remove them!
Please be sure to check out our contributing guidelines: https://github.com/sunpy/sunpy/blob/master/CONTRIBUTING.rst
Please be sure to check out our code of conduct:
https://github.com/sunpy/sunpy/blob/master/CODE_OF_CONDUCT.rst -->
<!-- Please have a search on our GitHub repository to see if a similar issue has already been posted.
If a similar issue is closed, have a quick look to see if you are satisfied by the resolution.
If not please go ahead and open an issue! -->
### Description
On Debian, we run the provided tests regularly on the installed package ("CI test") to ensure that it still works in the current environment.
With Version 1.0.3 (or before; I didn't check 1.0 … 1.0.2), I see the following failure:
### Actual behavior
`print_test_config_files` tries to write to the installed package, which is denied for an ordinary user:
```
_____________________________________ test_print_config_files _____________________________________
undo_download_dir_patch = None
def test_print_config_files(undo_download_dir_patch):
# TODO: Tidy this up.
stdout = sys.stdout
out = io.StringIO()
sys.stdout = out
> print_config()
/usr/lib/python3/dist-packages/sunpy/util/tests/test_config.py:51:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/usr/lib/python3/dist-packages/sunpy/util/config.py:107: in print_config
for file_ in _find_config_files():
/usr/lib/python3/dist-packages/sunpy/util/config.py:69: in _find_config_files
config_path = Path(_get_user_configdir())
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
def _get_user_configdir():
"""[…]"""
configdir = os.environ.get('SUNPY_CONFIGDIR', CONFIG_DIR)
if not _is_writable_dir(configdir):
> raise RuntimeError(f'Could not write to SUNPY_CONFIGDIR="{configdir}"')
E RuntimeError: Could not write to SUNPY_CONFIGDIR="/usr/lib/python3/dist-packages/sunpy/data"
```
This also happens when I explicitly set `SUNPY_CONFIGDIR` to a different directory; the path mentioned in the `RuntimeError` then still is `/usr/lib/python3/dist-packages/sunpy/data`.
### Steps to Reproduce
```
$ cd /tmp
$ SUNPY_CONFIGDIR=/tmp python3 -c "import sunpy; exit(sunpy.self_test(args=\"-k 'not figure and not online'\"))"
```
### System Details
<!-- We at least need to know the SunPy version you are using. -->
<!-- We provide a short function in SunPy that will provide some of the below information. -->
<!-- It is sunpy.util.system_info(), this is optional but strongly recommended. -->
- SunPy Version: 1.0.3
- Astropy Version: 3.2.1
- Python Version: 3.7.4
- OS information: Debian unstable
</issue>
<code>
[start of sunpy/conftest.py]
1 import os
2 import json
3 import pathlib
4 import warnings
5 import importlib
6 import tempfile
7
8 import pytest
9
10 import astropy
11 import astropy.config.paths
12
13 import sunpy.tests.helpers
14 from sunpy.tests.hash import HASH_LIBRARY_NAME
15 from sunpy.tests.helpers import new_hash_library, generate_figure_webpage
16 from sunpy.util.exceptions import SunpyDeprecationWarning
17
18 # Force MPL to use non-gui backends for testing.
19 try:
20 import matplotlib
21 except ImportError:
22 pass
23 else:
24 matplotlib.use('Agg')
25
26 # Don't actually import pytest_remotedata because that can do things to the
27 # entrypoints code in pytest.
28 remotedata_spec = importlib.util.find_spec("pytest_remotedata")
29 HAVE_REMOTEDATA = remotedata_spec is not None
30
31
32 def pytest_addoption(parser):
33 parser.addoption("--figure_dir", action="store", default="./figure_test_images")
34
35
36 @pytest.fixture(scope='session', autouse=True)
37 def figure_base_dir(request):
38 sunpy.tests.helpers.figure_base_dir = pathlib.Path(
39 request.config.getoption("--figure_dir"))
40
41
42 @pytest.fixture(scope='session', autouse=True)
43 def tmp_config_dir(request):
44 """
45 Globally set the default config for all tests.
46 """
47 tmpdir = tempfile.TemporaryDirectory()
48
49 os.environ["SUNPY_CONFIGDIR"] = str(pathlib.Path(__file__).parent / "data")
50 astropy.config.paths.set_temp_config._temp_path = str(tmpdir.name)
51 astropy.config.paths.set_temp_cache._temp_path = str(tmpdir.name)
52
53 yield
54
55 del os.environ["SUNPY_CONFIGDIR"]
56 astropy.config.paths.set_temp_config._temp_path = None
57 astropy.config.paths.set_temp_cache._temp_path = None
58
59
60 @pytest.fixture()
61 def undo_config_dir_patch():
62 """
63 Provide a way for certain tests to not have the config dir.
64 """
65 oridir = os.environ["SUNPY_CONFIGDIR"]
66 del os.environ["SUNPY_CONFIGDIR"]
67 yield
68 os.environ["SUNPY_CONFIGDIR"] = oridir
69
70
71 @pytest.fixture(scope='session', autouse=True)
72 def tmp_dl_dir(request):
73 """
74 Globally set the default download directory for the test run to a tmp dir.
75 """
76 with tempfile.TemporaryDirectory() as tmpdir:
77 os.environ["SUNPY_DOWNLOADDIR"] = tmpdir
78 yield tmpdir
79 del os.environ["SUNPY_DOWNLOADDIR"]
80
81
82 @pytest.fixture()
83 def undo_download_dir_patch():
84 """
85 Provide a way for certain tests to not have tmp download dir.
86 """
87 oridir = os.environ["SUNPY_DOWNLOADDIR"]
88 del os.environ["SUNPY_DOWNLOADDIR"]
89 yield
90 os.environ["SUNPY_DOWNLOADDIR"] = oridir
91
92
93 def pytest_runtest_setup(item):
94 """
95 pytest hook to skip all tests that have the mark 'remotedata' if the
96 pytest_remotedata plugin is not installed.
97 """
98 if isinstance(item, pytest.Function):
99 if 'remote_data' in item.keywords and not HAVE_REMOTEDATA:
100 pytest.skip("skipping remotedata tests as pytest-remotedata is not installed")
101
102
103 def pytest_unconfigure(config):
104
105 # If at least one figure test has been run, print result image directory
106 if len(new_hash_library) > 0:
107 # Write the new hash library in JSON
108 figure_base_dir = pathlib.Path(config.getoption("--figure_dir"))
109 hashfile = figure_base_dir / HASH_LIBRARY_NAME
110 with open(hashfile, 'w') as outfile:
111 json.dump(new_hash_library, outfile, sort_keys=True, indent=4, separators=(',', ': '))
112
113 """
114 Turn on internet when generating the figure comparison webpage.
115 """
116 if HAVE_REMOTEDATA:
117 from pytest_remotedata.disable_internet import turn_on_internet, turn_off_internet
118 else:
119 def turn_on_internet(): pass
120 def turn_off_internet(): pass
121
122 turn_on_internet()
123 generate_figure_webpage(new_hash_library)
124 turn_off_internet()
125
126 print('All images from image tests can be found in {}'.format(figure_base_dir.resolve()))
127 print("The corresponding hash library is {}".format(hashfile.resolve()))
128
129
130 def pytest_sessionstart(session):
131 warnings.simplefilter("error", SunpyDeprecationWarning)
132
[end of sunpy/conftest.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/sunpy/conftest.py b/sunpy/conftest.py
--- a/sunpy/conftest.py
+++ b/sunpy/conftest.py
@@ -46,7 +46,7 @@
"""
tmpdir = tempfile.TemporaryDirectory()
- os.environ["SUNPY_CONFIGDIR"] = str(pathlib.Path(__file__).parent / "data")
+ os.environ["SUNPY_CONFIGDIR"] = str(tmpdir.name)
astropy.config.paths.set_temp_config._temp_path = str(tmpdir.name)
astropy.config.paths.set_temp_cache._temp_path = str(tmpdir.name)
| {"golden_diff": "diff --git a/sunpy/conftest.py b/sunpy/conftest.py\n--- a/sunpy/conftest.py\n+++ b/sunpy/conftest.py\n@@ -46,7 +46,7 @@\n \"\"\"\n tmpdir = tempfile.TemporaryDirectory()\n \n- os.environ[\"SUNPY_CONFIGDIR\"] = str(pathlib.Path(__file__).parent / \"data\")\n+ os.environ[\"SUNPY_CONFIGDIR\"] = str(tmpdir.name)\n astropy.config.paths.set_temp_config._temp_path = str(tmpdir.name)\n astropy.config.paths.set_temp_cache._temp_path = str(tmpdir.name)\n", "issue": "[1.0.3] test_print_config_files fails with installed sunpy\n<!-- This comments are hidden when you submit the issue so you do not need to remove them!\r\nPlease be sure to check out our contributing guidelines: https://github.com/sunpy/sunpy/blob/master/CONTRIBUTING.rst\r\nPlease be sure to check out our code of conduct:\r\nhttps://github.com/sunpy/sunpy/blob/master/CODE_OF_CONDUCT.rst -->\r\n\r\n<!-- Please have a search on our GitHub repository to see if a similar issue has already been posted.\r\nIf a similar issue is closed, have a quick look to see if you are satisfied by the resolution.\r\nIf not please go ahead and open an issue! -->\r\n\r\n### Description\r\n\r\nOn Debian, we run the provided tests regularly on the installed package (\"CI test\") to ensure that it still works in the current environment. \r\nWith Version 1.0.3 (or before; I didn't check 1.0 \u2026 1.0.2), I see the following failure:\r\n\r\n### Actual behavior\r\n`print_test_config_files` tries to write to the installed package, which is denied for an ordinary user:\r\n```\r\n_____________________________________ test_print_config_files _____________________________________\r\n\r\nundo_download_dir_patch = None\r\n\r\n def test_print_config_files(undo_download_dir_patch):\r\n # TODO: Tidy this up.\r\n stdout = sys.stdout\r\n out = io.StringIO()\r\n sys.stdout = out\r\n> print_config()\r\n\r\n/usr/lib/python3/dist-packages/sunpy/util/tests/test_config.py:51: \r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\r\n/usr/lib/python3/dist-packages/sunpy/util/config.py:107: in print_config\r\n for file_ in _find_config_files():\r\n/usr/lib/python3/dist-packages/sunpy/util/config.py:69: in _find_config_files\r\n config_path = Path(_get_user_configdir())\r\n_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\r\n\r\n def _get_user_configdir():\r\n \"\"\"[\u2026]\"\"\"\r\n configdir = os.environ.get('SUNPY_CONFIGDIR', CONFIG_DIR)\r\n \r\n if not _is_writable_dir(configdir):\r\n> raise RuntimeError(f'Could not write to SUNPY_CONFIGDIR=\"{configdir}\"')\r\nE RuntimeError: Could not write to SUNPY_CONFIGDIR=\"/usr/lib/python3/dist-packages/sunpy/data\"\r\n```\r\nThis also happens when I explicitly set `SUNPY_CONFIGDIR` to a different directory; the path mentioned in the `RuntimeError` then still is `/usr/lib/python3/dist-packages/sunpy/data`.\r\n\r\n### Steps to Reproduce\r\n\r\n```\r\n$ cd /tmp\r\n$ SUNPY_CONFIGDIR=/tmp python3 -c \"import sunpy; exit(sunpy.self_test(args=\\\"-k 'not figure and not online'\\\"))\"\r\n```\r\n\r\n\r\n### System Details\r\n<!-- We at least need to know the SunPy version you are using. -->\r\n<!-- We provide a short function in SunPy that will provide some of the below information. -->\r\n<!-- It is sunpy.util.system_info(), this is optional but strongly recommended. -->\r\n\r\n - SunPy Version: 1.0.3\r\n - Astropy Version: 3.2.1\r\n - Python Version: 3.7.4\r\n - OS information: Debian unstable\r\n\n", "before_files": [{"content": "import os\nimport json\nimport pathlib\nimport warnings\nimport importlib\nimport tempfile\n\nimport pytest\n\nimport astropy\nimport astropy.config.paths\n\nimport sunpy.tests.helpers\nfrom sunpy.tests.hash import HASH_LIBRARY_NAME\nfrom sunpy.tests.helpers import new_hash_library, generate_figure_webpage\nfrom sunpy.util.exceptions import SunpyDeprecationWarning\n\n# Force MPL to use non-gui backends for testing.\ntry:\n import matplotlib\nexcept ImportError:\n pass\nelse:\n matplotlib.use('Agg')\n\n# Don't actually import pytest_remotedata because that can do things to the\n# entrypoints code in pytest.\nremotedata_spec = importlib.util.find_spec(\"pytest_remotedata\")\nHAVE_REMOTEDATA = remotedata_spec is not None\n\n\ndef pytest_addoption(parser):\n parser.addoption(\"--figure_dir\", action=\"store\", default=\"./figure_test_images\")\n\n\[email protected](scope='session', autouse=True)\ndef figure_base_dir(request):\n sunpy.tests.helpers.figure_base_dir = pathlib.Path(\n request.config.getoption(\"--figure_dir\"))\n\n\[email protected](scope='session', autouse=True)\ndef tmp_config_dir(request):\n \"\"\"\n Globally set the default config for all tests.\n \"\"\"\n tmpdir = tempfile.TemporaryDirectory()\n\n os.environ[\"SUNPY_CONFIGDIR\"] = str(pathlib.Path(__file__).parent / \"data\")\n astropy.config.paths.set_temp_config._temp_path = str(tmpdir.name)\n astropy.config.paths.set_temp_cache._temp_path = str(tmpdir.name)\n\n yield\n\n del os.environ[\"SUNPY_CONFIGDIR\"]\n astropy.config.paths.set_temp_config._temp_path = None\n astropy.config.paths.set_temp_cache._temp_path = None\n\n\[email protected]()\ndef undo_config_dir_patch():\n \"\"\"\n Provide a way for certain tests to not have the config dir.\n \"\"\"\n oridir = os.environ[\"SUNPY_CONFIGDIR\"]\n del os.environ[\"SUNPY_CONFIGDIR\"]\n yield\n os.environ[\"SUNPY_CONFIGDIR\"] = oridir\n\n\[email protected](scope='session', autouse=True)\ndef tmp_dl_dir(request):\n \"\"\"\n Globally set the default download directory for the test run to a tmp dir.\n \"\"\"\n with tempfile.TemporaryDirectory() as tmpdir:\n os.environ[\"SUNPY_DOWNLOADDIR\"] = tmpdir\n yield tmpdir\n del os.environ[\"SUNPY_DOWNLOADDIR\"]\n\n\[email protected]()\ndef undo_download_dir_patch():\n \"\"\"\n Provide a way for certain tests to not have tmp download dir.\n \"\"\"\n oridir = os.environ[\"SUNPY_DOWNLOADDIR\"]\n del os.environ[\"SUNPY_DOWNLOADDIR\"]\n yield\n os.environ[\"SUNPY_DOWNLOADDIR\"] = oridir\n\n\ndef pytest_runtest_setup(item):\n \"\"\"\n pytest hook to skip all tests that have the mark 'remotedata' if the\n pytest_remotedata plugin is not installed.\n \"\"\"\n if isinstance(item, pytest.Function):\n if 'remote_data' in item.keywords and not HAVE_REMOTEDATA:\n pytest.skip(\"skipping remotedata tests as pytest-remotedata is not installed\")\n\n\ndef pytest_unconfigure(config):\n\n # If at least one figure test has been run, print result image directory\n if len(new_hash_library) > 0:\n # Write the new hash library in JSON\n figure_base_dir = pathlib.Path(config.getoption(\"--figure_dir\"))\n hashfile = figure_base_dir / HASH_LIBRARY_NAME\n with open(hashfile, 'w') as outfile:\n json.dump(new_hash_library, outfile, sort_keys=True, indent=4, separators=(',', ': '))\n\n \"\"\"\n Turn on internet when generating the figure comparison webpage.\n \"\"\"\n if HAVE_REMOTEDATA:\n from pytest_remotedata.disable_internet import turn_on_internet, turn_off_internet\n else:\n def turn_on_internet(): pass\n def turn_off_internet(): pass\n\n turn_on_internet()\n generate_figure_webpage(new_hash_library)\n turn_off_internet()\n\n print('All images from image tests can be found in {}'.format(figure_base_dir.resolve()))\n print(\"The corresponding hash library is {}\".format(hashfile.resolve()))\n\n\ndef pytest_sessionstart(session):\n warnings.simplefilter(\"error\", SunpyDeprecationWarning)\n", "path": "sunpy/conftest.py"}]} | 2,545 | 139 |
gh_patches_debug_56962 | rasdani/github-patches | git_diff | tensorflow__tensor2tensor-360 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Error with `.decode` on `str` object when generating `summarize_cnn_dailymail32k` data
## CMD
```
t2t-trainer \
--generate_data \
--data_dir="$data" \
--problems=summarize_cnn_dailymail32k \
--model=transformer \
--hparams_set=transformer_base_single_gpu \
--output_dir="$root"
```
## OUTPUT
```
INFO:tensorflow:Generating data for summarize_cnn_dailymail32k
INFO:tensorflow:Generating vocab file: t2t_data/vocab.cnndailymail.32768
INFO:tensorflow:Downloading https://drive.google.com/uc?export=download&id=0BwmD_VLjROrfTHk4NFg2SndKcjQ&confirm=ak1U to /tmp/t2t_datagen/cnn_stories.tgz
INFO:tensorflow:Succesfully downloaded cnn_stories.tgz, 158577824 bytes.
INFO:tensorflow:Downloading https://drive.google.com/uc?export=download&id=0BwmD_VLjROrfM1BxdkxVaTY2bWs&confirm=1N53 to /tmp/t2t_datagen/dailymail_stories.tgz
INFO:tensorflow:Succesfully downloaded dailymail_stories.tgz, 375893739 bytes.
Traceback (most recent call last):
File "/home/pltrdy/anaconda3/bin/t2t-trainer", line 96, in <module>
tf.app.run()
File "/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensorflow/python/platform/app.py", line 48, in run
_sys.exit(main(_sys.argv[:1] + flags_passthrough))
File "/home/pltrdy/anaconda3/bin/t2t-trainer", line 83, in main
problem.generate_data(data_dir, tmp_dir)
File "/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/problem.py", line 625, in generate_data
self.generator(data_dir, tmp_dir, True), all_paths)
File "/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/generator_utils.py", line 143, in generate_files
for case in generator:
File "/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/cnn_dailymail.py", line 132, in generator
story_generator(tmp_dir))
File "/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/generator_utils.py", line 328, in get_or_generate_vocab_inner
for item in generator:
File "/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/cnn_dailymail.py", line 78, in story_generator
line = unicode(line, "utf-8") if six.PY2 else line.decode("utf-8")
AttributeError: 'str' object has no attribute 'decode'
```
## CONFIG
* Python version: `Python 3.6.1 |Anaconda custom (64-bit)| (default, May 11 2017, 13:09:58)`
* Tensor2Tensor version:
```
$ pip show tensor2tensor
Name: tensor2tensor
Version: 1.2.4
Summary: Tensor2Tensor
Home-page: http://github.com/tensorflow/tensor2tensor
Author: Google Inc.
Author-email: [email protected]
License: Apache 2.0
Location: /home/pltrdy/anaconda3/lib/python3.6/site-packages
Requires: sympy, numpy, requests, six, future, bz2file
```
## QUICKFIX
```
--- a/tensor2tensor/data_generators/cnn_dailymail.py
+++ b/tensor2tensor/data_generators/cnn_dailymail.py
@@ -74,7 +74,7 @@ def story_generator(tmp_dir):
for path in paths:
for story_file in tf.gfile.Glob(path + "*"):
story = u""
- for line in tf.gfile.Open(story_file):
+ for line in tf.gfile.Open(story_file, 'rb'):
```
</issue>
<code>
[start of tensor2tensor/data_generators/cnn_dailymail.py]
1 # coding=utf-8
2 # Copyright 2017 The Tensor2Tensor Authors.
3 #
4 # Licensed under the Apache License, Version 2.0 (the "License");
5 # you may not use this file except in compliance with the License.
6 # You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 """Data generators for the CNN and Daily Mail datasets."""
17
18 from __future__ import absolute_import
19 from __future__ import division
20 from __future__ import print_function
21
22 import os
23 import tarfile
24
25 # Dependency imports
26
27 import six
28 from tensor2tensor.data_generators import generator_utils
29 from tensor2tensor.data_generators import problem
30 from tensor2tensor.data_generators import text_encoder
31 from tensor2tensor.utils import registry
32
33 import tensorflow as tf
34
35
36 # Links to data from http://cs.nyu.edu/~kcho/DMQA/
37 _CNN_STORIES_DRIVE_URL = "https://drive.google.com/uc?export=download&id=0BwmD_VLjROrfTHk4NFg2SndKcjQ"
38
39 _DAILYMAIL_STORIES_DRIVE_URL = "https://drive.google.com/uc?export=download&id=0BwmD_VLjROrfM1BxdkxVaTY2bWs"
40
41
42 # End-of-sentence marker.
43 EOS = text_encoder.EOS_ID
44
45
46 def _maybe_download_corpora(tmp_dir):
47 """Download corpora if necessary and unzip them.
48
49 Args:
50 tmp_dir: directory containing dataset.
51
52 Returns:
53 filepath of the downloaded corpus file.
54 """
55 cnn_filename = "cnn_stories.tgz"
56 cnn_finalpath = os.path.join(tmp_dir, "cnn/stories/")
57 dailymail_filename = "dailymail_stories.tgz"
58 dailymail_finalpath = os.path.join(tmp_dir, "dailymail/stories/")
59 if not tf.gfile.Exists(cnn_finalpath):
60 cnn_file = generator_utils.maybe_download_from_drive(
61 tmp_dir, cnn_filename, _CNN_STORIES_DRIVE_URL)
62 with tarfile.open(cnn_file, "r:gz") as cnn_tar:
63 cnn_tar.extractall(tmp_dir)
64 if not tf.gfile.Exists(dailymail_finalpath):
65 dailymail_file = generator_utils.maybe_download_from_drive(
66 tmp_dir, dailymail_filename, _DAILYMAIL_STORIES_DRIVE_URL)
67 with tarfile.open(dailymail_file, "r:gz") as dailymail_tar:
68 dailymail_tar.extractall(tmp_dir)
69 return [cnn_finalpath, dailymail_finalpath]
70
71
72 def story_generator(tmp_dir):
73 paths = _maybe_download_corpora(tmp_dir)
74 for path in paths:
75 for story_file in tf.gfile.Glob(path + "*"):
76 story = u""
77 for line in tf.gfile.Open(story_file):
78 line = unicode(line, "utf-8") if six.PY2 else line.decode("utf-8")
79 story += line
80 yield story
81
82
83 def _story_summary_split(story):
84 end_pos = story.find("\n\n") # Upto first empty line.
85 assert end_pos != -1
86 return story[:end_pos], story[end_pos:].strip()
87
88
89 @registry.register_problem
90 class SummarizeCnnDailymail32k(problem.Text2TextProblem):
91 """Summarize CNN and Daily Mail articles to their first paragraph."""
92
93 @property
94 def is_character_level(self):
95 return False
96
97 @property
98 def has_inputs(self):
99 return True
100
101 @property
102 def input_space_id(self):
103 return problem.SpaceID.EN_TOK
104
105 @property
106 def target_space_id(self):
107 return problem.SpaceID.EN_TOK
108
109 @property
110 def num_shards(self):
111 return 100
112
113 @property
114 def vocab_name(self):
115 return "vocab.cnndailymail"
116
117 @property
118 def use_subword_tokenizer(self):
119 return True
120
121 @property
122 def targeted_vocab_size(self):
123 return 2**15 # 32768
124
125 @property
126 def use_train_shards_for_dev(self):
127 return True
128
129 def generator(self, data_dir, tmp_dir, _):
130 encoder = generator_utils.get_or_generate_vocab_inner(
131 data_dir, self.vocab_file, self.targeted_vocab_size,
132 story_generator(tmp_dir))
133 for story in story_generator(tmp_dir):
134 summary, rest = _story_summary_split(story)
135 encoded_summary = encoder.encode(summary) + [EOS]
136 encoded_story = encoder.encode(rest) + [EOS]
137 yield {"inputs": encoded_story, "targets": encoded_summary}
138
[end of tensor2tensor/data_generators/cnn_dailymail.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/tensor2tensor/data_generators/cnn_dailymail.py b/tensor2tensor/data_generators/cnn_dailymail.py
--- a/tensor2tensor/data_generators/cnn_dailymail.py
+++ b/tensor2tensor/data_generators/cnn_dailymail.py
@@ -74,7 +74,7 @@
for path in paths:
for story_file in tf.gfile.Glob(path + "*"):
story = u""
- for line in tf.gfile.Open(story_file):
+ for line in tf.gfile.Open(story_file, 'rb'):
line = unicode(line, "utf-8") if six.PY2 else line.decode("utf-8")
story += line
yield story
| {"golden_diff": "diff --git a/tensor2tensor/data_generators/cnn_dailymail.py b/tensor2tensor/data_generators/cnn_dailymail.py\n--- a/tensor2tensor/data_generators/cnn_dailymail.py\n+++ b/tensor2tensor/data_generators/cnn_dailymail.py\n@@ -74,7 +74,7 @@\n for path in paths:\n for story_file in tf.gfile.Glob(path + \"*\"):\n story = u\"\"\n- for line in tf.gfile.Open(story_file):\n+ for line in tf.gfile.Open(story_file, 'rb'):\n line = unicode(line, \"utf-8\") if six.PY2 else line.decode(\"utf-8\")\n story += line\n yield story\n", "issue": "Error with `.decode` on `str` object when generating `summarize_cnn_dailymail32k` data\n## CMD\r\n```\r\nt2t-trainer \\\r\n --generate_data \\\r\n --data_dir=\"$data\" \\\r\n --problems=summarize_cnn_dailymail32k \\\r\n --model=transformer \\\r\n --hparams_set=transformer_base_single_gpu \\\r\n --output_dir=\"$root\"\r\n```\r\n\r\n## OUTPUT\r\n```\r\nINFO:tensorflow:Generating data for summarize_cnn_dailymail32k\r\nINFO:tensorflow:Generating vocab file: t2t_data/vocab.cnndailymail.32768\r\nINFO:tensorflow:Downloading https://drive.google.com/uc?export=download&id=0BwmD_VLjROrfTHk4NFg2SndKcjQ&confirm=ak1U to /tmp/t2t_datagen/cnn_stories.tgz\r\n\r\nINFO:tensorflow:Succesfully downloaded cnn_stories.tgz, 158577824 bytes.\r\nINFO:tensorflow:Downloading https://drive.google.com/uc?export=download&id=0BwmD_VLjROrfM1BxdkxVaTY2bWs&confirm=1N53 to /tmp/t2t_datagen/dailymail_stories.tgz\r\n\r\nINFO:tensorflow:Succesfully downloaded dailymail_stories.tgz, 375893739 bytes.\r\nTraceback (most recent call last):\r\n File \"/home/pltrdy/anaconda3/bin/t2t-trainer\", line 96, in <module>\r\n tf.app.run()\r\n File \"/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensorflow/python/platform/app.py\", line 48, in run\r\n _sys.exit(main(_sys.argv[:1] + flags_passthrough))\r\n File \"/home/pltrdy/anaconda3/bin/t2t-trainer\", line 83, in main\r\n problem.generate_data(data_dir, tmp_dir)\r\n File \"/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/problem.py\", line 625, in generate_data\r\n self.generator(data_dir, tmp_dir, True), all_paths)\r\n File \"/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/generator_utils.py\", line 143, in generate_files\r\n for case in generator:\r\n File \"/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/cnn_dailymail.py\", line 132, in generator\r\n story_generator(tmp_dir))\r\n File \"/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/generator_utils.py\", line 328, in get_or_generate_vocab_inner\r\n for item in generator:\r\n File \"/home/pltrdy/anaconda3/lib/python3.6/site-packages/tensor2tensor/data_generators/cnn_dailymail.py\", line 78, in story_generator\r\n line = unicode(line, \"utf-8\") if six.PY2 else line.decode(\"utf-8\")\r\nAttributeError: 'str' object has no attribute 'decode'\r\n```\r\n\r\n## CONFIG\r\n* Python version: `Python 3.6.1 |Anaconda custom (64-bit)| (default, May 11 2017, 13:09:58)`\r\n* Tensor2Tensor version:\r\n```\r\n$ pip show tensor2tensor\r\nName: tensor2tensor\r\nVersion: 1.2.4\r\nSummary: Tensor2Tensor\r\nHome-page: http://github.com/tensorflow/tensor2tensor\r\nAuthor: Google Inc.\r\nAuthor-email: [email protected]\r\nLicense: Apache 2.0\r\nLocation: /home/pltrdy/anaconda3/lib/python3.6/site-packages\r\nRequires: sympy, numpy, requests, six, future, bz2file\r\n```\r\n\r\n## QUICKFIX\r\n```\r\n--- a/tensor2tensor/data_generators/cnn_dailymail.py\r\n+++ b/tensor2tensor/data_generators/cnn_dailymail.py\r\n@@ -74,7 +74,7 @@ def story_generator(tmp_dir):\r\n for path in paths:\r\n for story_file in tf.gfile.Glob(path + \"*\"):\r\n story = u\"\"\r\n- for line in tf.gfile.Open(story_file):\r\n+ for line in tf.gfile.Open(story_file, 'rb'):\r\n\r\n```\n", "before_files": [{"content": "# coding=utf-8\n# Copyright 2017 The Tensor2Tensor Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Data generators for the CNN and Daily Mail datasets.\"\"\"\n\nfrom __future__ import absolute_import\nfrom __future__ import division\nfrom __future__ import print_function\n\nimport os\nimport tarfile\n\n# Dependency imports\n\nimport six\nfrom tensor2tensor.data_generators import generator_utils\nfrom tensor2tensor.data_generators import problem\nfrom tensor2tensor.data_generators import text_encoder\nfrom tensor2tensor.utils import registry\n\nimport tensorflow as tf\n\n\n# Links to data from http://cs.nyu.edu/~kcho/DMQA/\n_CNN_STORIES_DRIVE_URL = \"https://drive.google.com/uc?export=download&id=0BwmD_VLjROrfTHk4NFg2SndKcjQ\"\n\n_DAILYMAIL_STORIES_DRIVE_URL = \"https://drive.google.com/uc?export=download&id=0BwmD_VLjROrfM1BxdkxVaTY2bWs\"\n\n\n# End-of-sentence marker.\nEOS = text_encoder.EOS_ID\n\n\ndef _maybe_download_corpora(tmp_dir):\n \"\"\"Download corpora if necessary and unzip them.\n\n Args:\n tmp_dir: directory containing dataset.\n\n Returns:\n filepath of the downloaded corpus file.\n \"\"\"\n cnn_filename = \"cnn_stories.tgz\"\n cnn_finalpath = os.path.join(tmp_dir, \"cnn/stories/\")\n dailymail_filename = \"dailymail_stories.tgz\"\n dailymail_finalpath = os.path.join(tmp_dir, \"dailymail/stories/\")\n if not tf.gfile.Exists(cnn_finalpath):\n cnn_file = generator_utils.maybe_download_from_drive(\n tmp_dir, cnn_filename, _CNN_STORIES_DRIVE_URL)\n with tarfile.open(cnn_file, \"r:gz\") as cnn_tar:\n cnn_tar.extractall(tmp_dir)\n if not tf.gfile.Exists(dailymail_finalpath):\n dailymail_file = generator_utils.maybe_download_from_drive(\n tmp_dir, dailymail_filename, _DAILYMAIL_STORIES_DRIVE_URL)\n with tarfile.open(dailymail_file, \"r:gz\") as dailymail_tar:\n dailymail_tar.extractall(tmp_dir)\n return [cnn_finalpath, dailymail_finalpath]\n\n\ndef story_generator(tmp_dir):\n paths = _maybe_download_corpora(tmp_dir)\n for path in paths:\n for story_file in tf.gfile.Glob(path + \"*\"):\n story = u\"\"\n for line in tf.gfile.Open(story_file):\n line = unicode(line, \"utf-8\") if six.PY2 else line.decode(\"utf-8\")\n story += line\n yield story\n\n\ndef _story_summary_split(story):\n end_pos = story.find(\"\\n\\n\") # Upto first empty line.\n assert end_pos != -1\n return story[:end_pos], story[end_pos:].strip()\n\n\[email protected]_problem\nclass SummarizeCnnDailymail32k(problem.Text2TextProblem):\n \"\"\"Summarize CNN and Daily Mail articles to their first paragraph.\"\"\"\n\n @property\n def is_character_level(self):\n return False\n\n @property\n def has_inputs(self):\n return True\n\n @property\n def input_space_id(self):\n return problem.SpaceID.EN_TOK\n\n @property\n def target_space_id(self):\n return problem.SpaceID.EN_TOK\n\n @property\n def num_shards(self):\n return 100\n\n @property\n def vocab_name(self):\n return \"vocab.cnndailymail\"\n\n @property\n def use_subword_tokenizer(self):\n return True\n\n @property\n def targeted_vocab_size(self):\n return 2**15 # 32768\n\n @property\n def use_train_shards_for_dev(self):\n return True\n\n def generator(self, data_dir, tmp_dir, _):\n encoder = generator_utils.get_or_generate_vocab_inner(\n data_dir, self.vocab_file, self.targeted_vocab_size,\n story_generator(tmp_dir))\n for story in story_generator(tmp_dir):\n summary, rest = _story_summary_split(story)\n encoded_summary = encoder.encode(summary) + [EOS]\n encoded_story = encoder.encode(rest) + [EOS]\n yield {\"inputs\": encoded_story, \"targets\": encoded_summary}\n", "path": "tensor2tensor/data_generators/cnn_dailymail.py"}]} | 2,956 | 167 |
gh_patches_debug_11751 | rasdani/github-patches | git_diff | localstack__localstack-10170 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Invoking a lambda using a function url always returns 200
### Is there an existing issue for this?
- [X] I have searched the existing issues
### Current Behavior
Given the following javascript lambda function
```javascript
exports.main = async (event) => {
console.log('Hello World');
return {
statusCode: '302',
body: {},
headers: {
Location: 'https://example.com'
}
};
}
```
When deploying to local stack and adding a function url, the url returns the correct `body` and `headers` but it will return a 200 status code.
In a real aws environment, an actual 302 is returned.
### Expected Behavior
The correct status code is returned
### How are you starting LocalStack?
With a docker-compose file
### Steps To Reproduce
My docker compose file
```yaml
version: '3.4'
services:
localstack:
image: localstack/localstack:2.0.2
environment:
- SERVICES=dynamodb,lambda
ports:
- '4566:4566'
expose:
- '4566'
volumes:
- /var/run/docker.sock:/var/run/docker.sock
```
#### How are you starting localstack (e.g., `bin/localstack` command, arguments, or `docker-compose.yml`)
deploying using terraform
```hcl
data "aws_iam_policy_document" "assume_role" {
statement {
effect = "Allow"
principals {
type = "Service"
identifiers = ["lambda.amazonaws.com"]
}
actions = ["sts:AssumeRole"]
}
}
resource "aws_iam_role" "iam_for_lambda" {
name = "iam_for_lambda"
assume_role_policy = data.aws_iam_policy_document.assume_role.json
}
data "archive_file" "lambda" {
type = "zip"
source_file = "../index.js"
output_path = local.archive_file
}
resource "aws_lambda_function" "redirect_lambda" {
filename = local.archive_file
function_name = "redirects"
role = aws_iam_role.iam_for_lambda.arn
handler = "index.main"
source_code_hash = data.archive_file.lambda.output_base64sha256
runtime = "nodejs18.x"
environment {
variables = {
foo = "bar"
}
}
}
resource "aws_lambda_function_url" "lambda_url" {
function_name = aws_lambda_function.redirect_lambda.function_name
authorization_type = "NONE"
}
```
### Environment
```markdown
- OS: docker
- LocalStack: latest
```
### Anything else?
_No response_
</issue>
<code>
[start of localstack/services/lambda_/urlrouter.py]
1 """Routing for Lambda function URLs: https://docs.aws.amazon.com/lambda/latest/dg/lambda-urls.html"""
2 import base64
3 import json
4 import logging
5 import urllib
6 from datetime import datetime
7 from http import HTTPStatus
8
9 from localstack.aws.api import HttpResponse
10 from localstack.aws.api.lambda_ import InvocationType
11 from localstack.aws.protocol.serializer import gen_amzn_requestid
12 from localstack.http import Request, Router
13 from localstack.http.dispatcher import Handler
14 from localstack.services.lambda_.api_utils import FULL_FN_ARN_PATTERN
15 from localstack.services.lambda_.invocation.lambda_models import InvocationResult
16 from localstack.services.lambda_.invocation.lambda_service import LambdaService
17 from localstack.services.lambda_.invocation.models import lambda_stores
18 from localstack.utils.aws.request_context import AWS_REGION_REGEX
19 from localstack.utils.strings import long_uid, to_bytes, to_str
20 from localstack.utils.time import TIMESTAMP_READABLE_FORMAT, mktime, timestamp
21 from localstack.utils.urls import localstack_host
22
23 LOG = logging.getLogger(__name__)
24
25
26 class FunctionUrlRouter:
27 router: Router[Handler]
28 lambda_service: LambdaService
29
30 def __init__(self, router: Router[Handler], lambda_service: LambdaService):
31 self.router = router
32 self.registered = False
33 self.lambda_service = lambda_service
34
35 def register_routes(self) -> None:
36 if self.registered:
37 LOG.debug("Skipped Lambda URL route registration (routes already registered).")
38 return
39 self.registered = True
40
41 LOG.debug("Registering parameterized Lambda routes.")
42
43 self.router.add(
44 "/",
45 host=f"<api_id>.lambda-url.<regex('{AWS_REGION_REGEX}'):region>.<regex('.*'):server>",
46 endpoint=self.handle_lambda_url_invocation,
47 defaults={"path": ""},
48 )
49 self.router.add(
50 "/<path:path>",
51 host=f"<api_id>.lambda-url.<regex('{AWS_REGION_REGEX}'):region>.<regex('.*'):server>",
52 endpoint=self.handle_lambda_url_invocation,
53 )
54
55 def handle_lambda_url_invocation(
56 self, request: Request, api_id: str, region: str, **url_params: dict[str, str]
57 ) -> HttpResponse:
58 response = HttpResponse(headers={"Content-type": "application/json"})
59
60 lambda_url_config = None
61 try:
62 for account_id in lambda_stores.keys():
63 store = lambda_stores[account_id][region]
64 for fn in store.functions.values():
65 for url_config in fn.function_url_configs.values():
66 if url_config.url_id == api_id:
67 lambda_url_config = url_config
68 except IndexError as e:
69 LOG.warning(f"Lambda URL ({api_id}) not found: {e}")
70 response.set_json({"Message": None})
71 response.status = "404"
72 return response
73
74 event = event_for_lambda_url(
75 api_id, request.full_path, request.data, request.headers, request.method
76 )
77
78 match = FULL_FN_ARN_PATTERN.search(lambda_url_config.function_arn).groupdict()
79
80 result = self.lambda_service.invoke(
81 function_name=match.get("function_name"),
82 qualifier=match.get("qualifier"),
83 account_id=match.get("account_id"),
84 region=match.get("region_name"),
85 invocation_type=InvocationType.RequestResponse,
86 client_context="{}", # TODO: test
87 payload=to_bytes(json.dumps(event)),
88 request_id=gen_amzn_requestid(),
89 )
90 if result.is_error:
91 response = HttpResponse("Internal Server Error", HTTPStatus.BAD_GATEWAY)
92 else:
93 response = lambda_result_to_response(result)
94 return response
95
96
97 def event_for_lambda_url(api_id: str, path: str, data, headers, method: str) -> dict:
98 raw_path = path.split("?")[0]
99 raw_query_string = path.split("?")[1] if len(path.split("?")) > 1 else ""
100 query_string_parameters = (
101 {} if not raw_query_string else dict(urllib.parse.parse_qsl(raw_query_string))
102 )
103
104 now = datetime.utcnow()
105 readable = timestamp(time=now, format=TIMESTAMP_READABLE_FORMAT)
106 if not any(char in readable for char in ["+", "-"]):
107 readable += "+0000"
108
109 source_ip = headers.get("Remote-Addr", "")
110 request_context = {
111 "accountId": "anonymous",
112 "apiId": api_id,
113 "domainName": headers.get("Host", ""),
114 "domainPrefix": api_id,
115 "http": {
116 "method": method,
117 "path": raw_path,
118 "protocol": "HTTP/1.1",
119 "sourceIp": source_ip,
120 "userAgent": headers.get("User-Agent", ""),
121 },
122 "requestId": long_uid(),
123 "routeKey": "$default",
124 "stage": "$default",
125 "time": readable,
126 "timeEpoch": mktime(ts=now, millis=True),
127 }
128
129 content_type = headers.get("Content-Type", "").lower()
130 content_type_is_text = any(text_type in content_type for text_type in ["text", "json", "xml"])
131
132 is_base64_encoded = not (data.isascii() and content_type_is_text) if data else False
133 body = base64.b64encode(data).decode() if is_base64_encoded else data
134 if isinstance(body, bytes):
135 body = to_str(body)
136
137 ignored_headers = ["connection", "x-localstack-tgt-api", "x-localstack-request-url"]
138 event_headers = {k.lower(): v for k, v in headers.items() if k.lower() not in ignored_headers}
139
140 event_headers.update(
141 {
142 "x-amzn-tls-cipher-suite": "ECDHE-RSA-AES128-GCM-SHA256",
143 "x-amzn-tls-version": "TLSv1.2",
144 "x-forwarded-proto": "http",
145 "x-forwarded-for": source_ip,
146 "x-forwarded-port": str(localstack_host().port),
147 }
148 )
149
150 event = {
151 "version": "2.0",
152 "routeKey": "$default",
153 "rawPath": raw_path,
154 "rawQueryString": raw_query_string,
155 "headers": event_headers,
156 "queryStringParameters": query_string_parameters,
157 "requestContext": request_context,
158 "body": body,
159 "isBase64Encoded": is_base64_encoded,
160 }
161
162 if not data:
163 event.pop("body")
164
165 return event
166
167
168 def lambda_result_to_response(result: InvocationResult):
169 response = HttpResponse()
170
171 # Set default headers
172 response.headers.update(
173 {
174 "Content-Type": "application/json",
175 "Connection": "keep-alive",
176 "x-amzn-requestid": result.request_id,
177 "x-amzn-trace-id": long_uid(), # TODO: get the proper trace id here
178 }
179 )
180
181 original_payload = to_str(result.payload)
182 parsed_result = json.loads(original_payload)
183
184 # patch to fix whitespaces
185 # TODO: check if this is a downstream issue of invocation result serialization
186 original_payload = json.dumps(parsed_result, separators=(",", ":"))
187
188 if isinstance(parsed_result, str):
189 # a string is a special case here and is returned as-is
190 response.data = parsed_result
191 elif isinstance(parsed_result, dict):
192 # if it's a dict it might be a proper response
193 if isinstance(parsed_result.get("headers"), dict):
194 response.headers.update(parsed_result.get("headers"))
195 if "body" not in parsed_result:
196 # TODO: test if providing a status code but no body actually works
197 response.data = original_payload
198 elif isinstance(parsed_result.get("body"), dict):
199 response.data = json.dumps(parsed_result.get("body"))
200 elif parsed_result.get("isBase64Encoded", False):
201 body_bytes = to_bytes(to_str(parsed_result.get("body", "")))
202 decoded_body_bytes = base64.b64decode(body_bytes)
203 response.data = decoded_body_bytes
204 else:
205 response.data = parsed_result.get("body")
206 else:
207 response.data = original_payload
208
209 return response
210
[end of localstack/services/lambda_/urlrouter.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/localstack/services/lambda_/urlrouter.py b/localstack/services/lambda_/urlrouter.py
--- a/localstack/services/lambda_/urlrouter.py
+++ b/localstack/services/lambda_/urlrouter.py
@@ -192,6 +192,8 @@
# if it's a dict it might be a proper response
if isinstance(parsed_result.get("headers"), dict):
response.headers.update(parsed_result.get("headers"))
+ if "statusCode" in parsed_result:
+ response.status_code = int(parsed_result["statusCode"])
if "body" not in parsed_result:
# TODO: test if providing a status code but no body actually works
response.data = original_payload
| {"golden_diff": "diff --git a/localstack/services/lambda_/urlrouter.py b/localstack/services/lambda_/urlrouter.py\n--- a/localstack/services/lambda_/urlrouter.py\n+++ b/localstack/services/lambda_/urlrouter.py\n@@ -192,6 +192,8 @@\n # if it's a dict it might be a proper response\n if isinstance(parsed_result.get(\"headers\"), dict):\n response.headers.update(parsed_result.get(\"headers\"))\n+ if \"statusCode\" in parsed_result:\n+ response.status_code = int(parsed_result[\"statusCode\"])\n if \"body\" not in parsed_result:\n # TODO: test if providing a status code but no body actually works\n response.data = original_payload\n", "issue": "Invoking a lambda using a function url always returns 200\n### Is there an existing issue for this?\n\n- [X] I have searched the existing issues\n\n### Current Behavior\n\nGiven the following javascript lambda function\r\n```javascript\r\nexports.main = async (event) => {\r\n console.log('Hello World');\r\n return {\r\n statusCode: '302',\r\n body: {},\r\n headers: {\r\n Location: 'https://example.com'\r\n }\r\n };\r\n}\r\n```\r\n\r\nWhen deploying to local stack and adding a function url, the url returns the correct `body` and `headers` but it will return a 200 status code.\r\n\r\nIn a real aws environment, an actual 302 is returned.\n\n### Expected Behavior\n\nThe correct status code is returned\n\n### How are you starting LocalStack?\n\nWith a docker-compose file\n\n### Steps To Reproduce\n\nMy docker compose file\r\n\r\n```yaml\r\nversion: '3.4'\r\nservices:\r\n localstack:\r\n image: localstack/localstack:2.0.2\r\n environment:\r\n - SERVICES=dynamodb,lambda\r\n ports:\r\n - '4566:4566'\r\n expose:\r\n - '4566'\r\n volumes:\r\n - /var/run/docker.sock:/var/run/docker.sock\r\n```\r\n\r\n#### How are you starting localstack (e.g., `bin/localstack` command, arguments, or `docker-compose.yml`)\r\n\r\ndeploying using terraform\r\n\r\n```hcl\r\ndata \"aws_iam_policy_document\" \"assume_role\" {\r\n statement {\r\n effect = \"Allow\"\r\n\r\n principals {\r\n type = \"Service\"\r\n identifiers = [\"lambda.amazonaws.com\"]\r\n }\r\n\r\n actions = [\"sts:AssumeRole\"]\r\n }\r\n}\r\n\r\nresource \"aws_iam_role\" \"iam_for_lambda\" {\r\n name = \"iam_for_lambda\"\r\n assume_role_policy = data.aws_iam_policy_document.assume_role.json\r\n}\r\n\r\ndata \"archive_file\" \"lambda\" {\r\n type = \"zip\"\r\n source_file = \"../index.js\"\r\n output_path = local.archive_file\r\n}\r\n\r\nresource \"aws_lambda_function\" \"redirect_lambda\" {\r\n filename = local.archive_file\r\n function_name = \"redirects\"\r\n role = aws_iam_role.iam_for_lambda.arn\r\n handler = \"index.main\"\r\n\r\n source_code_hash = data.archive_file.lambda.output_base64sha256\r\n\r\n runtime = \"nodejs18.x\"\r\n\r\n environment {\r\n variables = {\r\n foo = \"bar\"\r\n }\r\n }\r\n}\r\n\r\nresource \"aws_lambda_function_url\" \"lambda_url\" {\r\n function_name = aws_lambda_function.redirect_lambda.function_name\r\n authorization_type = \"NONE\"\r\n}\r\n```\r\n\n\n### Environment\n\n```markdown\n- OS: docker\r\n- LocalStack: latest\n```\n\n\n### Anything else?\n\n_No response_\n", "before_files": [{"content": "\"\"\"Routing for Lambda function URLs: https://docs.aws.amazon.com/lambda/latest/dg/lambda-urls.html\"\"\"\nimport base64\nimport json\nimport logging\nimport urllib\nfrom datetime import datetime\nfrom http import HTTPStatus\n\nfrom localstack.aws.api import HttpResponse\nfrom localstack.aws.api.lambda_ import InvocationType\nfrom localstack.aws.protocol.serializer import gen_amzn_requestid\nfrom localstack.http import Request, Router\nfrom localstack.http.dispatcher import Handler\nfrom localstack.services.lambda_.api_utils import FULL_FN_ARN_PATTERN\nfrom localstack.services.lambda_.invocation.lambda_models import InvocationResult\nfrom localstack.services.lambda_.invocation.lambda_service import LambdaService\nfrom localstack.services.lambda_.invocation.models import lambda_stores\nfrom localstack.utils.aws.request_context import AWS_REGION_REGEX\nfrom localstack.utils.strings import long_uid, to_bytes, to_str\nfrom localstack.utils.time import TIMESTAMP_READABLE_FORMAT, mktime, timestamp\nfrom localstack.utils.urls import localstack_host\n\nLOG = logging.getLogger(__name__)\n\n\nclass FunctionUrlRouter:\n router: Router[Handler]\n lambda_service: LambdaService\n\n def __init__(self, router: Router[Handler], lambda_service: LambdaService):\n self.router = router\n self.registered = False\n self.lambda_service = lambda_service\n\n def register_routes(self) -> None:\n if self.registered:\n LOG.debug(\"Skipped Lambda URL route registration (routes already registered).\")\n return\n self.registered = True\n\n LOG.debug(\"Registering parameterized Lambda routes.\")\n\n self.router.add(\n \"/\",\n host=f\"<api_id>.lambda-url.<regex('{AWS_REGION_REGEX}'):region>.<regex('.*'):server>\",\n endpoint=self.handle_lambda_url_invocation,\n defaults={\"path\": \"\"},\n )\n self.router.add(\n \"/<path:path>\",\n host=f\"<api_id>.lambda-url.<regex('{AWS_REGION_REGEX}'):region>.<regex('.*'):server>\",\n endpoint=self.handle_lambda_url_invocation,\n )\n\n def handle_lambda_url_invocation(\n self, request: Request, api_id: str, region: str, **url_params: dict[str, str]\n ) -> HttpResponse:\n response = HttpResponse(headers={\"Content-type\": \"application/json\"})\n\n lambda_url_config = None\n try:\n for account_id in lambda_stores.keys():\n store = lambda_stores[account_id][region]\n for fn in store.functions.values():\n for url_config in fn.function_url_configs.values():\n if url_config.url_id == api_id:\n lambda_url_config = url_config\n except IndexError as e:\n LOG.warning(f\"Lambda URL ({api_id}) not found: {e}\")\n response.set_json({\"Message\": None})\n response.status = \"404\"\n return response\n\n event = event_for_lambda_url(\n api_id, request.full_path, request.data, request.headers, request.method\n )\n\n match = FULL_FN_ARN_PATTERN.search(lambda_url_config.function_arn).groupdict()\n\n result = self.lambda_service.invoke(\n function_name=match.get(\"function_name\"),\n qualifier=match.get(\"qualifier\"),\n account_id=match.get(\"account_id\"),\n region=match.get(\"region_name\"),\n invocation_type=InvocationType.RequestResponse,\n client_context=\"{}\", # TODO: test\n payload=to_bytes(json.dumps(event)),\n request_id=gen_amzn_requestid(),\n )\n if result.is_error:\n response = HttpResponse(\"Internal Server Error\", HTTPStatus.BAD_GATEWAY)\n else:\n response = lambda_result_to_response(result)\n return response\n\n\ndef event_for_lambda_url(api_id: str, path: str, data, headers, method: str) -> dict:\n raw_path = path.split(\"?\")[0]\n raw_query_string = path.split(\"?\")[1] if len(path.split(\"?\")) > 1 else \"\"\n query_string_parameters = (\n {} if not raw_query_string else dict(urllib.parse.parse_qsl(raw_query_string))\n )\n\n now = datetime.utcnow()\n readable = timestamp(time=now, format=TIMESTAMP_READABLE_FORMAT)\n if not any(char in readable for char in [\"+\", \"-\"]):\n readable += \"+0000\"\n\n source_ip = headers.get(\"Remote-Addr\", \"\")\n request_context = {\n \"accountId\": \"anonymous\",\n \"apiId\": api_id,\n \"domainName\": headers.get(\"Host\", \"\"),\n \"domainPrefix\": api_id,\n \"http\": {\n \"method\": method,\n \"path\": raw_path,\n \"protocol\": \"HTTP/1.1\",\n \"sourceIp\": source_ip,\n \"userAgent\": headers.get(\"User-Agent\", \"\"),\n },\n \"requestId\": long_uid(),\n \"routeKey\": \"$default\",\n \"stage\": \"$default\",\n \"time\": readable,\n \"timeEpoch\": mktime(ts=now, millis=True),\n }\n\n content_type = headers.get(\"Content-Type\", \"\").lower()\n content_type_is_text = any(text_type in content_type for text_type in [\"text\", \"json\", \"xml\"])\n\n is_base64_encoded = not (data.isascii() and content_type_is_text) if data else False\n body = base64.b64encode(data).decode() if is_base64_encoded else data\n if isinstance(body, bytes):\n body = to_str(body)\n\n ignored_headers = [\"connection\", \"x-localstack-tgt-api\", \"x-localstack-request-url\"]\n event_headers = {k.lower(): v for k, v in headers.items() if k.lower() not in ignored_headers}\n\n event_headers.update(\n {\n \"x-amzn-tls-cipher-suite\": \"ECDHE-RSA-AES128-GCM-SHA256\",\n \"x-amzn-tls-version\": \"TLSv1.2\",\n \"x-forwarded-proto\": \"http\",\n \"x-forwarded-for\": source_ip,\n \"x-forwarded-port\": str(localstack_host().port),\n }\n )\n\n event = {\n \"version\": \"2.0\",\n \"routeKey\": \"$default\",\n \"rawPath\": raw_path,\n \"rawQueryString\": raw_query_string,\n \"headers\": event_headers,\n \"queryStringParameters\": query_string_parameters,\n \"requestContext\": request_context,\n \"body\": body,\n \"isBase64Encoded\": is_base64_encoded,\n }\n\n if not data:\n event.pop(\"body\")\n\n return event\n\n\ndef lambda_result_to_response(result: InvocationResult):\n response = HttpResponse()\n\n # Set default headers\n response.headers.update(\n {\n \"Content-Type\": \"application/json\",\n \"Connection\": \"keep-alive\",\n \"x-amzn-requestid\": result.request_id,\n \"x-amzn-trace-id\": long_uid(), # TODO: get the proper trace id here\n }\n )\n\n original_payload = to_str(result.payload)\n parsed_result = json.loads(original_payload)\n\n # patch to fix whitespaces\n # TODO: check if this is a downstream issue of invocation result serialization\n original_payload = json.dumps(parsed_result, separators=(\",\", \":\"))\n\n if isinstance(parsed_result, str):\n # a string is a special case here and is returned as-is\n response.data = parsed_result\n elif isinstance(parsed_result, dict):\n # if it's a dict it might be a proper response\n if isinstance(parsed_result.get(\"headers\"), dict):\n response.headers.update(parsed_result.get(\"headers\"))\n if \"body\" not in parsed_result:\n # TODO: test if providing a status code but no body actually works\n response.data = original_payload\n elif isinstance(parsed_result.get(\"body\"), dict):\n response.data = json.dumps(parsed_result.get(\"body\"))\n elif parsed_result.get(\"isBase64Encoded\", False):\n body_bytes = to_bytes(to_str(parsed_result.get(\"body\", \"\")))\n decoded_body_bytes = base64.b64decode(body_bytes)\n response.data = decoded_body_bytes\n else:\n response.data = parsed_result.get(\"body\")\n else:\n response.data = original_payload\n\n return response\n", "path": "localstack/services/lambda_/urlrouter.py"}]} | 3,414 | 152 |
gh_patches_debug_348 | rasdani/github-patches | git_diff | ranaroussi__yfinance-295 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Deprecation warning due to invalid escape sequences
Deprecation warnings are raised due to invalid escape sequences. This can be fixed by using raw strings or escaping the literals. pyupgrade also helps in automatic conversion : https://github.com/asottile/pyupgrade/
```
find . -iname '*.py' | grep -Ev 'test.py' | xargs -P4 -I{} python3.8 -Wall -m py_compile {}
./yfinance/utils.py:67: DeprecationWarning: invalid escape sequence \g
return [_re.sub("([a-z])([A-Z])", "\g<1> \g<2>", i).title() for i in o]
```
</issue>
<code>
[start of yfinance/utils.py]
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Yahoo! Finance market data downloader (+fix for Pandas Datareader)
5 # https://github.com/ranaroussi/yfinance
6 #
7 # Copyright 2017-2019 Ran Aroussi
8 #
9 # Licensed under the Apache License, Version 2.0 (the "License");
10 # you may not use this file except in compliance with the License.
11 # You may obtain a copy of the License at
12 #
13 # http://www.apache.org/licenses/LICENSE-2.0
14 #
15 # Unless required by applicable law or agreed to in writing, software
16 # distributed under the License is distributed on an "AS IS" BASIS,
17 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 # See the License for the specific language governing permissions and
19 # limitations under the License.
20 #
21
22 from __future__ import print_function
23
24 import requests as _requests
25 import re as _re
26 import pandas as _pd
27 import numpy as _np
28 import sys as _sys
29 import re as _re
30
31 try:
32 import ujson as _json
33 except ImportError:
34 import json as _json
35
36
37 def empty_df(index=[]):
38 empty = _pd.DataFrame(index=index, data={
39 'Open': _np.nan, 'High': _np.nan, 'Low': _np.nan,
40 'Close': _np.nan, 'Adj Close': _np.nan, 'Volume': _np.nan})
41 empty.index.name = 'Date'
42 return empty
43
44
45 def get_json(url, proxy=None):
46 html = _requests.get(url=url, proxies=proxy).text
47
48 if "QuoteSummaryStore" not in html:
49 html = _requests.get(url=url, proxies=proxy).text
50 if "QuoteSummaryStore" not in html:
51 return {}
52
53 json_str = html.split('root.App.main =')[1].split(
54 '(this)')[0].split(';\n}')[0].strip()
55 data = _json.loads(json_str)[
56 'context']['dispatcher']['stores']['QuoteSummaryStore']
57
58 # return data
59 new_data = _json.dumps(data).replace('{}', 'null')
60 new_data = _re.sub(
61 r'\{[\'|\"]raw[\'|\"]:(.*?),(.*?)\}', r'\1', new_data)
62
63 return _json.loads(new_data)
64
65
66 def camel2title(o):
67 return [_re.sub("([a-z])([A-Z])", "\g<1> \g<2>", i).title() for i in o]
68
69
70 def auto_adjust(data):
71 df = data.copy()
72 ratio = df["Close"] / df["Adj Close"]
73 df["Adj Open"] = df["Open"] / ratio
74 df["Adj High"] = df["High"] / ratio
75 df["Adj Low"] = df["Low"] / ratio
76
77 df.drop(
78 ["Open", "High", "Low", "Close"],
79 axis=1, inplace=True)
80
81 df.rename(columns={
82 "Adj Open": "Open", "Adj High": "High",
83 "Adj Low": "Low", "Adj Close": "Close"
84 }, inplace=True)
85
86 df = df[["Open", "High", "Low", "Close", "Volume"]]
87 return df[["Open", "High", "Low", "Close", "Volume"]]
88
89
90 def back_adjust(data):
91 """ back-adjusted data to mimic true historical prices """
92
93 df = data.copy()
94 ratio = df["Adj Close"] / df["Close"]
95 df["Adj Open"] = df["Open"] * ratio
96 df["Adj High"] = df["High"] * ratio
97 df["Adj Low"] = df["Low"] * ratio
98
99 df.drop(
100 ["Open", "High", "Low", "Adj Close"],
101 axis=1, inplace=True)
102
103 df.rename(columns={
104 "Adj Open": "Open", "Adj High": "High",
105 "Adj Low": "Low"
106 }, inplace=True)
107
108 return df[["Open", "High", "Low", "Close", "Volume"]]
109
110
111 def parse_quotes(data, tz=None):
112 timestamps = data["timestamp"]
113 ohlc = data["indicators"]["quote"][0]
114 volumes = ohlc["volume"]
115 opens = ohlc["open"]
116 closes = ohlc["close"]
117 lows = ohlc["low"]
118 highs = ohlc["high"]
119
120 adjclose = closes
121 if "adjclose" in data["indicators"]:
122 adjclose = data["indicators"]["adjclose"][0]["adjclose"]
123
124 quotes = _pd.DataFrame({"Open": opens,
125 "High": highs,
126 "Low": lows,
127 "Close": closes,
128 "Adj Close": adjclose,
129 "Volume": volumes})
130
131 quotes.index = _pd.to_datetime(timestamps, unit="s")
132 quotes.sort_index(inplace=True)
133
134 if tz is not None:
135 quotes.index = quotes.index.tz_localize(tz)
136
137 return quotes
138
139
140 def parse_actions(data, tz=None):
141 dividends = _pd.DataFrame(columns=["Dividends"])
142 splits = _pd.DataFrame(columns=["Stock Splits"])
143
144 if "events" in data:
145 if "dividends" in data["events"]:
146 dividends = _pd.DataFrame(
147 data=list(data["events"]["dividends"].values()))
148 dividends.set_index("date", inplace=True)
149 dividends.index = _pd.to_datetime(dividends.index, unit="s")
150 dividends.sort_index(inplace=True)
151 if tz is not None:
152 dividends.index = dividends.index.tz_localize(tz)
153
154 dividends.columns = ["Dividends"]
155
156 if "splits" in data["events"]:
157 splits = _pd.DataFrame(
158 data=list(data["events"]["splits"].values()))
159 splits.set_index("date", inplace=True)
160 splits.index = _pd.to_datetime(splits.index, unit="s")
161 splits.sort_index(inplace=True)
162 if tz is not None:
163 splits.index = splits.index.tz_localize(tz)
164 splits["Stock Splits"] = splits["numerator"] / \
165 splits["denominator"]
166 splits = splits["Stock Splits"]
167
168 return dividends, splits
169
170
171 class ProgressBar:
172 def __init__(self, iterations, text='completed'):
173 self.text = text
174 self.iterations = iterations
175 self.prog_bar = '[]'
176 self.fill_char = '*'
177 self.width = 50
178 self.__update_amount(0)
179 self.elapsed = 1
180
181 def completed(self):
182 if self.elapsed > self.iterations:
183 self.elapsed = self.iterations
184 self.update_iteration(1)
185 print('\r' + str(self), end='')
186 _sys.stdout.flush()
187 print()
188
189 def animate(self, iteration=None):
190 if iteration is None:
191 self.elapsed += 1
192 iteration = self.elapsed
193 else:
194 self.elapsed += iteration
195
196 print('\r' + str(self), end='')
197 _sys.stdout.flush()
198 self.update_iteration()
199
200 def update_iteration(self, val=None):
201 val = val if val is not None else self.elapsed / float(self.iterations)
202 self.__update_amount(val * 100.0)
203 self.prog_bar += ' %s of %s %s' % (
204 self.elapsed, self.iterations, self.text)
205
206 def __update_amount(self, new_amount):
207 percent_done = int(round((new_amount / 100.0) * 100.0))
208 all_full = self.width - 2
209 num_hashes = int(round((percent_done / 100.0) * all_full))
210 self.prog_bar = '[' + self.fill_char * \
211 num_hashes + ' ' * (all_full - num_hashes) + ']'
212 pct_place = (len(self.prog_bar) // 2) - len(str(percent_done))
213 pct_string = '%d%%' % percent_done
214 self.prog_bar = self.prog_bar[0:pct_place] + \
215 (pct_string + self.prog_bar[pct_place + len(pct_string):])
216
217 def __str__(self):
218 return str(self.prog_bar)
219
[end of yfinance/utils.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/yfinance/utils.py b/yfinance/utils.py
--- a/yfinance/utils.py
+++ b/yfinance/utils.py
@@ -64,7 +64,7 @@
def camel2title(o):
- return [_re.sub("([a-z])([A-Z])", "\g<1> \g<2>", i).title() for i in o]
+ return [_re.sub("([a-z])([A-Z])", r"\g<1> \g<2>", i).title() for i in o]
def auto_adjust(data):
| {"golden_diff": "diff --git a/yfinance/utils.py b/yfinance/utils.py\n--- a/yfinance/utils.py\n+++ b/yfinance/utils.py\n@@ -64,7 +64,7 @@\n \n \n def camel2title(o):\n- return [_re.sub(\"([a-z])([A-Z])\", \"\\g<1> \\g<2>\", i).title() for i in o]\n+ return [_re.sub(\"([a-z])([A-Z])\", r\"\\g<1> \\g<2>\", i).title() for i in o]\n \n \n def auto_adjust(data):\n", "issue": "Deprecation warning due to invalid escape sequences\nDeprecation warnings are raised due to invalid escape sequences. This can be fixed by using raw strings or escaping the literals. pyupgrade also helps in automatic conversion : https://github.com/asottile/pyupgrade/\r\n\r\n```\r\nfind . -iname '*.py' | grep -Ev 'test.py' | xargs -P4 -I{} python3.8 -Wall -m py_compile {}\r\n./yfinance/utils.py:67: DeprecationWarning: invalid escape sequence \\g\r\n return [_re.sub(\"([a-z])([A-Z])\", \"\\g<1> \\g<2>\", i).title() for i in o]\r\n```\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Yahoo! Finance market data downloader (+fix for Pandas Datareader)\n# https://github.com/ranaroussi/yfinance\n#\n# Copyright 2017-2019 Ran Aroussi\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\nfrom __future__ import print_function\n\nimport requests as _requests\nimport re as _re\nimport pandas as _pd\nimport numpy as _np\nimport sys as _sys\nimport re as _re\n\ntry:\n import ujson as _json\nexcept ImportError:\n import json as _json\n\n\ndef empty_df(index=[]):\n empty = _pd.DataFrame(index=index, data={\n 'Open': _np.nan, 'High': _np.nan, 'Low': _np.nan,\n 'Close': _np.nan, 'Adj Close': _np.nan, 'Volume': _np.nan})\n empty.index.name = 'Date'\n return empty\n\n\ndef get_json(url, proxy=None):\n html = _requests.get(url=url, proxies=proxy).text\n\n if \"QuoteSummaryStore\" not in html:\n html = _requests.get(url=url, proxies=proxy).text\n if \"QuoteSummaryStore\" not in html:\n return {}\n\n json_str = html.split('root.App.main =')[1].split(\n '(this)')[0].split(';\\n}')[0].strip()\n data = _json.loads(json_str)[\n 'context']['dispatcher']['stores']['QuoteSummaryStore']\n\n # return data\n new_data = _json.dumps(data).replace('{}', 'null')\n new_data = _re.sub(\n r'\\{[\\'|\\\"]raw[\\'|\\\"]:(.*?),(.*?)\\}', r'\\1', new_data)\n\n return _json.loads(new_data)\n\n\ndef camel2title(o):\n return [_re.sub(\"([a-z])([A-Z])\", \"\\g<1> \\g<2>\", i).title() for i in o]\n\n\ndef auto_adjust(data):\n df = data.copy()\n ratio = df[\"Close\"] / df[\"Adj Close\"]\n df[\"Adj Open\"] = df[\"Open\"] / ratio\n df[\"Adj High\"] = df[\"High\"] / ratio\n df[\"Adj Low\"] = df[\"Low\"] / ratio\n\n df.drop(\n [\"Open\", \"High\", \"Low\", \"Close\"],\n axis=1, inplace=True)\n\n df.rename(columns={\n \"Adj Open\": \"Open\", \"Adj High\": \"High\",\n \"Adj Low\": \"Low\", \"Adj Close\": \"Close\"\n }, inplace=True)\n\n df = df[[\"Open\", \"High\", \"Low\", \"Close\", \"Volume\"]]\n return df[[\"Open\", \"High\", \"Low\", \"Close\", \"Volume\"]]\n\n\ndef back_adjust(data):\n \"\"\" back-adjusted data to mimic true historical prices \"\"\"\n\n df = data.copy()\n ratio = df[\"Adj Close\"] / df[\"Close\"]\n df[\"Adj Open\"] = df[\"Open\"] * ratio\n df[\"Adj High\"] = df[\"High\"] * ratio\n df[\"Adj Low\"] = df[\"Low\"] * ratio\n\n df.drop(\n [\"Open\", \"High\", \"Low\", \"Adj Close\"],\n axis=1, inplace=True)\n\n df.rename(columns={\n \"Adj Open\": \"Open\", \"Adj High\": \"High\",\n \"Adj Low\": \"Low\"\n }, inplace=True)\n\n return df[[\"Open\", \"High\", \"Low\", \"Close\", \"Volume\"]]\n\n\ndef parse_quotes(data, tz=None):\n timestamps = data[\"timestamp\"]\n ohlc = data[\"indicators\"][\"quote\"][0]\n volumes = ohlc[\"volume\"]\n opens = ohlc[\"open\"]\n closes = ohlc[\"close\"]\n lows = ohlc[\"low\"]\n highs = ohlc[\"high\"]\n\n adjclose = closes\n if \"adjclose\" in data[\"indicators\"]:\n adjclose = data[\"indicators\"][\"adjclose\"][0][\"adjclose\"]\n\n quotes = _pd.DataFrame({\"Open\": opens,\n \"High\": highs,\n \"Low\": lows,\n \"Close\": closes,\n \"Adj Close\": adjclose,\n \"Volume\": volumes})\n\n quotes.index = _pd.to_datetime(timestamps, unit=\"s\")\n quotes.sort_index(inplace=True)\n\n if tz is not None:\n quotes.index = quotes.index.tz_localize(tz)\n\n return quotes\n\n\ndef parse_actions(data, tz=None):\n dividends = _pd.DataFrame(columns=[\"Dividends\"])\n splits = _pd.DataFrame(columns=[\"Stock Splits\"])\n\n if \"events\" in data:\n if \"dividends\" in data[\"events\"]:\n dividends = _pd.DataFrame(\n data=list(data[\"events\"][\"dividends\"].values()))\n dividends.set_index(\"date\", inplace=True)\n dividends.index = _pd.to_datetime(dividends.index, unit=\"s\")\n dividends.sort_index(inplace=True)\n if tz is not None:\n dividends.index = dividends.index.tz_localize(tz)\n\n dividends.columns = [\"Dividends\"]\n\n if \"splits\" in data[\"events\"]:\n splits = _pd.DataFrame(\n data=list(data[\"events\"][\"splits\"].values()))\n splits.set_index(\"date\", inplace=True)\n splits.index = _pd.to_datetime(splits.index, unit=\"s\")\n splits.sort_index(inplace=True)\n if tz is not None:\n splits.index = splits.index.tz_localize(tz)\n splits[\"Stock Splits\"] = splits[\"numerator\"] / \\\n splits[\"denominator\"]\n splits = splits[\"Stock Splits\"]\n\n return dividends, splits\n\n\nclass ProgressBar:\n def __init__(self, iterations, text='completed'):\n self.text = text\n self.iterations = iterations\n self.prog_bar = '[]'\n self.fill_char = '*'\n self.width = 50\n self.__update_amount(0)\n self.elapsed = 1\n\n def completed(self):\n if self.elapsed > self.iterations:\n self.elapsed = self.iterations\n self.update_iteration(1)\n print('\\r' + str(self), end='')\n _sys.stdout.flush()\n print()\n\n def animate(self, iteration=None):\n if iteration is None:\n self.elapsed += 1\n iteration = self.elapsed\n else:\n self.elapsed += iteration\n\n print('\\r' + str(self), end='')\n _sys.stdout.flush()\n self.update_iteration()\n\n def update_iteration(self, val=None):\n val = val if val is not None else self.elapsed / float(self.iterations)\n self.__update_amount(val * 100.0)\n self.prog_bar += ' %s of %s %s' % (\n self.elapsed, self.iterations, self.text)\n\n def __update_amount(self, new_amount):\n percent_done = int(round((new_amount / 100.0) * 100.0))\n all_full = self.width - 2\n num_hashes = int(round((percent_done / 100.0) * all_full))\n self.prog_bar = '[' + self.fill_char * \\\n num_hashes + ' ' * (all_full - num_hashes) + ']'\n pct_place = (len(self.prog_bar) // 2) - len(str(percent_done))\n pct_string = '%d%%' % percent_done\n self.prog_bar = self.prog_bar[0:pct_place] + \\\n (pct_string + self.prog_bar[pct_place + len(pct_string):])\n\n def __str__(self):\n return str(self.prog_bar)\n", "path": "yfinance/utils.py"}]} | 3,014 | 124 |
gh_patches_debug_18416 | rasdani/github-patches | git_diff | kartoza__prj.app-126 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
We should add Rischan to the email recipients list for projecta errors
</issue>
<code>
[start of django_project/core/settings/base.py]
1 # coding=utf-8
2 """
3 core.settings.base
4 """
5 # Django settings for projecta project.
6
7 from .utils import absolute_path
8
9 ADMINS = (
10 ('Tim Sutton', '[email protected]'),
11 )
12 SERVER_EMAIL = '[email protected]'
13 EMAIL_HOST = 'localhost'
14 DEFAULT_FROM_EMAIL = '[email protected]'
15
16 MANAGERS = ADMINS
17
18 # Local time zone for this installation. Choices can be found here:
19 # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
20 # although not all choices may be available on all operating systems.
21 # In a Windows environment this must be set to your system time zone.
22 TIME_ZONE = 'America/Chicago'
23
24 # Language code for this installation. All choices can be found here:
25 # http://www.i18nguy.com/unicode/language-identifiers.html
26 LANGUAGE_CODE = 'en-us'
27
28 SITE_ID = 1
29
30 # If you set this to False, Django will make some optimizations so as not
31 # to load the internationalization machinery.
32 USE_I18N = True
33
34 # If you set this to False, Django will not format dates, numbers and
35 # calendars according to the current locale.
36 USE_L10N = True
37
38 # If you set this to False, Django will not use timezone-aware datetimes.
39 USE_TZ = True
40
41 # Absolute filesystem path to the directory that will hold user-uploaded files.
42 # Example: "/var/www/example.com/media/"
43 MEDIA_ROOT = '/home/web/media'
44
45 # URL that handles the media served from MEDIA_ROOT. Make sure to use a
46 # trailing slash.
47 # Examples: "http://example.com/media/", "http://media.example.com/"
48 # MEDIA_URL = '/media/'
49 # setting full MEDIA_URL to be able to use it for the feeds
50 MEDIA_URL = '/media/'
51
52 # Absolute path to the directory static files should be collected to.
53 # Don't put anything in this directory yourself; store your static files
54 # in apps' "static/" subdirectories and in STATICFILES_DIRS.
55 # Example: "/var/www/example.com/static/"
56 STATIC_ROOT = '/home/web/static'
57
58 # URL prefix for static files.
59 # Example: "http://example.com/static/", "http://static.example.com/"
60 STATIC_URL = '/static/'
61
62 # Additional locations of static files
63 STATICFILES_DIRS = (
64 # Put strings here, like "/home/html/static" or "C:/www/django/static".
65 # Always use forward slashes, even on Windows.
66 # Don't forget to use absolute paths, not relative paths.
67 absolute_path('core', 'base_static'),
68 )
69
70 # List of finder classes that know how to find static files in
71 # various locations.
72 STATICFILES_FINDERS = (
73 'django.contrib.staticfiles.finders.FileSystemFinder',
74 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
75 # 'django.contrib.staticfiles.finders.DefaultStorageFinder',
76 )
77
78 # import SECRET_KEY into current namespace
79 # noinspection PyUnresolvedReferences
80 from .secret import SECRET_KEY # noqa
81
82 # List of callables that know how to import templates from various sources.
83 TEMPLATE_LOADERS = (
84 'django.template.loaders.filesystem.Loader',
85 'django.template.loaders.app_directories.Loader',
86 # 'django.template.loaders.eggs.Loader',
87 )
88
89 TEMPLATE_CONTEXT_PROCESSORS = (
90 'django.contrib.auth.context_processors.auth',
91 'django.core.context_processors.request',
92 'django.core.context_processors.media',
93 'core.context_processors.add_intercom_app_id',
94 'django.template.context_processors.i18n'
95 )
96
97 MIDDLEWARE_CLASSES = (
98 'django.middleware.common.CommonMiddleware',
99 'django.contrib.sessions.middleware.SessionMiddleware',
100 'django.middleware.csrf.CsrfViewMiddleware',
101 'django.contrib.auth.middleware.AuthenticationMiddleware',
102 'django.contrib.messages.middleware.MessageMiddleware',
103 # Uncomment the next line for simple clickjacking protection:
104 # 'django.middleware.clickjacking.XFrameOptionsMiddleware',
105 'core.custom_middleware.NavContextMiddleware',
106 'django.middleware.locale.LocaleMiddleware'
107 )
108
109 ROOT_URLCONF = 'core.urls'
110
111 # Python dotted path to the WSGI application used by Django's runserver.
112 WSGI_APPLICATION = 'core.wsgi.application'
113
114 TEMPLATE_DIRS = (
115 # project level templates
116 absolute_path('core', 'base_templates'),
117 absolute_path('vota', 'templates'),
118 absolute_path('changes', 'templates'),
119
120 )
121
122 INSTALLED_APPS = (
123 'django.contrib.auth',
124 'django.contrib.contenttypes',
125 'django.contrib.sessions',
126 'django.contrib.sites',
127 'django.contrib.messages',
128 'django.contrib.staticfiles',
129 'django.contrib.admin',
130 'django.contrib.syndication',
131 )
132
133 # A sample logging configuration. The only tangible logging
134 # performed by this configuration is to send an email to
135 # the site admins on every HTTP 500 error when DEBUG=False.
136 # See http://docs.djangoproject.com/en/dev/topics/logging for
137 # more details on how to customize your logging configuration.
138 LOGGING = {
139 'version': 1,
140 'disable_existing_loggers': False,
141 'filters': {
142 'require_debug_false': {
143 '()': 'django.utils.log.RequireDebugFalse'
144 }
145 },
146 'handlers': {
147 'mail_admins': {
148 'level': 'ERROR',
149 'filters': ['require_debug_false'],
150 'class': 'django.utils.log.AdminEmailHandler'
151 }
152 },
153 'loggers': {
154 'django.request': {
155 'handlers': ['mail_admins'],
156 'level': 'ERROR',
157 'propagate': True,
158 },
159 }
160 }
161
[end of django_project/core/settings/base.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/django_project/core/settings/base.py b/django_project/core/settings/base.py
--- a/django_project/core/settings/base.py
+++ b/django_project/core/settings/base.py
@@ -7,7 +7,8 @@
from .utils import absolute_path
ADMINS = (
- ('Tim Sutton', '[email protected]'),
+ ('Tim Sutton', '[email protected]'),
+ ('Rischan Mafrur', '[email protected]')
)
SERVER_EMAIL = '[email protected]'
EMAIL_HOST = 'localhost'
@@ -19,7 +20,7 @@
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
-TIME_ZONE = 'America/Chicago'
+TIME_ZONE = 'Africa/Johannesburg'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
| {"golden_diff": "diff --git a/django_project/core/settings/base.py b/django_project/core/settings/base.py\n--- a/django_project/core/settings/base.py\n+++ b/django_project/core/settings/base.py\n@@ -7,7 +7,8 @@\n from .utils import absolute_path\n \n ADMINS = (\n- ('Tim Sutton', '[email protected]'),\n+ ('Tim Sutton', '[email protected]'),\n+ ('Rischan Mafrur', '[email protected]')\n )\n SERVER_EMAIL = '[email protected]'\n EMAIL_HOST = 'localhost'\n@@ -19,7 +20,7 @@\n # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name\n # although not all choices may be available on all operating systems.\n # In a Windows environment this must be set to your system time zone.\n-TIME_ZONE = 'America/Chicago'\n+TIME_ZONE = 'Africa/Johannesburg'\n \n # Language code for this installation. All choices can be found here:\n # http://www.i18nguy.com/unicode/language-identifiers.html\n", "issue": "We should add Rischan to the email recipients list for projecta errors\n\n", "before_files": [{"content": "# coding=utf-8\n\"\"\"\ncore.settings.base\n\"\"\"\n# Django settings for projecta project.\n\nfrom .utils import absolute_path\n\nADMINS = (\n ('Tim Sutton', '[email protected]'),\n)\nSERVER_EMAIL = '[email protected]'\nEMAIL_HOST = 'localhost'\nDEFAULT_FROM_EMAIL = '[email protected]'\n\nMANAGERS = ADMINS\n\n# Local time zone for this installation. Choices can be found here:\n# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name\n# although not all choices may be available on all operating systems.\n# In a Windows environment this must be set to your system time zone.\nTIME_ZONE = 'America/Chicago'\n\n# Language code for this installation. All choices can be found here:\n# http://www.i18nguy.com/unicode/language-identifiers.html\nLANGUAGE_CODE = 'en-us'\n\nSITE_ID = 1\n\n# If you set this to False, Django will make some optimizations so as not\n# to load the internationalization machinery.\nUSE_I18N = True\n\n# If you set this to False, Django will not format dates, numbers and\n# calendars according to the current locale.\nUSE_L10N = True\n\n# If you set this to False, Django will not use timezone-aware datetimes.\nUSE_TZ = True\n\n# Absolute filesystem path to the directory that will hold user-uploaded files.\n# Example: \"/var/www/example.com/media/\"\nMEDIA_ROOT = '/home/web/media'\n\n# URL that handles the media served from MEDIA_ROOT. Make sure to use a\n# trailing slash.\n# Examples: \"http://example.com/media/\", \"http://media.example.com/\"\n# MEDIA_URL = '/media/'\n# setting full MEDIA_URL to be able to use it for the feeds\nMEDIA_URL = '/media/'\n\n# Absolute path to the directory static files should be collected to.\n# Don't put anything in this directory yourself; store your static files\n# in apps' \"static/\" subdirectories and in STATICFILES_DIRS.\n# Example: \"/var/www/example.com/static/\"\nSTATIC_ROOT = '/home/web/static'\n\n# URL prefix for static files.\n# Example: \"http://example.com/static/\", \"http://static.example.com/\"\nSTATIC_URL = '/static/'\n\n# Additional locations of static files\nSTATICFILES_DIRS = (\n # Put strings here, like \"/home/html/static\" or \"C:/www/django/static\".\n # Always use forward slashes, even on Windows.\n # Don't forget to use absolute paths, not relative paths.\n absolute_path('core', 'base_static'),\n)\n\n# List of finder classes that know how to find static files in\n# various locations.\nSTATICFILES_FINDERS = (\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder',\n # 'django.contrib.staticfiles.finders.DefaultStorageFinder',\n)\n\n# import SECRET_KEY into current namespace\n# noinspection PyUnresolvedReferences\nfrom .secret import SECRET_KEY # noqa\n\n# List of callables that know how to import templates from various sources.\nTEMPLATE_LOADERS = (\n 'django.template.loaders.filesystem.Loader',\n 'django.template.loaders.app_directories.Loader',\n # 'django.template.loaders.eggs.Loader',\n)\n\nTEMPLATE_CONTEXT_PROCESSORS = (\n 'django.contrib.auth.context_processors.auth',\n 'django.core.context_processors.request',\n 'django.core.context_processors.media',\n 'core.context_processors.add_intercom_app_id',\n 'django.template.context_processors.i18n'\n)\n\nMIDDLEWARE_CLASSES = (\n 'django.middleware.common.CommonMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n # Uncomment the next line for simple clickjacking protection:\n # 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n 'core.custom_middleware.NavContextMiddleware',\n 'django.middleware.locale.LocaleMiddleware'\n)\n\nROOT_URLCONF = 'core.urls'\n\n# Python dotted path to the WSGI application used by Django's runserver.\nWSGI_APPLICATION = 'core.wsgi.application'\n\nTEMPLATE_DIRS = (\n # project level templates\n absolute_path('core', 'base_templates'),\n absolute_path('vota', 'templates'),\n absolute_path('changes', 'templates'),\n\n)\n\nINSTALLED_APPS = (\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.sites',\n 'django.contrib.messages',\n 'django.contrib.staticfiles',\n 'django.contrib.admin',\n 'django.contrib.syndication',\n)\n\n# A sample logging configuration. The only tangible logging\n# performed by this configuration is to send an email to\n# the site admins on every HTTP 500 error when DEBUG=False.\n# See http://docs.djangoproject.com/en/dev/topics/logging for\n# more details on how to customize your logging configuration.\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'filters': {\n 'require_debug_false': {\n '()': 'django.utils.log.RequireDebugFalse'\n }\n },\n 'handlers': {\n 'mail_admins': {\n 'level': 'ERROR',\n 'filters': ['require_debug_false'],\n 'class': 'django.utils.log.AdminEmailHandler'\n }\n },\n 'loggers': {\n 'django.request': {\n 'handlers': ['mail_admins'],\n 'level': 'ERROR',\n 'propagate': True,\n },\n }\n}\n", "path": "django_project/core/settings/base.py"}]} | 2,116 | 236 |
gh_patches_debug_39022 | rasdani/github-patches | git_diff | opsdroid__opsdroid-693 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add Google Style Docstrings
We should implement Google Style Docstrings to every function, method, class in opsdroid. This style will support existing documentation and will help in the future by generating documentation automatically.
This consists in a bit of effort so this issue can be worked by more than one contributor, just make sure that everyone knows what you are working on in order to avoid other contributors spending time on something that you are working on.
If you are unfamiliar with the Google Style Docstrings I'd recommend that you check these resources:
- [Sphix 1.8.0+ - Google Style Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)
Docstrings that need to be updated:
- main.py
- [x] configure_lang
- [ ] configure_log
- [ ] get_logging_level
- [ ] check_dependencies
- [ ] print_version
- [ ] print_example_config
- [ ] edit_files
- [x] welcome_message
- ~~helper.py~~
- [x] get_opsdroid
- [x] del_rw
- [x] move_config_to_appdir
- memory.py
- [x] Memory
- [x] get
- [x] put
- [x] _get_from_database
- [x] _put_to_database
- message.py
- [x] Message
- [x] __init__
- [x] _thinking_delay
- [x] _typing delay
- [x] respond
- [x] react
- web.py
- [ ] Web
- [x] get_port
- [x] get_host
- [x] get_ssl_context
- [ ] start
- [ ] build_response
- [ ] web_index_handler
- [ ] web_stats_handler
- matchers.py
- [ ] match_regex
- [ ] match_apiai_action
- [ ] match_apiai_intent
- [ ] match_dialogflow_action
- [ ] match_dialogflow_intent
- [ ] match_luisai_intent
- [ ] match_rasanlu
- [ ] match_recastai
- [ ] match_witai
- [ ] match_crontab
- [ ] match_webhook
- [ ] match_always
- core.py
- [ ] OpsDroid
- [ ] default_connector
- [ ] exit
- [ ] critical
- [ ] call_stop
- [ ] disconnect
- [ ] stop
- [ ] load
- [ ] start_loop
- [x] setup_skills
- [ ] train_parsers
- [ ] start_connector_tasks
- [ ] start_database
- [ ] run_skill
- [ ] get_ranked_skills
- [ ] parse
- loader.py
- [ ] Loader
- [x] import_module_from_spec
- [x] import_module
- [x] check_cache
- [x] build_module_import_path
- [x] build_module_install_path
- [x] git_clone
- [x] git_pull
- [x] pip_install_deps
- [x] create_default_config
- [x] load_config_file
- [ ] envvar_constructor
- [ ] include_constructor
- [x] setup_modules_directory
- [x] load_modules_from_config
- [x] _load_modules
- [x] _install_module
- [x] _update_module
- [ ] _install_git_module
- [x] _install_local_module
---- ORIGINAL POST ----
I've been wondering about this for a while now and I would like to know if we should replace/update all the docstrings in opsdroid with the Google Style doc strings.
I think this could help new and old contributors to contribute and commit to opsdroid since the Google Style docstrings give more information about every method/function and specifies clearly what sort of input the function/method expects, what will it return and what will be raised (if applicable).
The downsize of this style is that the length of every .py file will increase due to the doc strings, but since most IDE's allow you to hide those fields it shouldn't be too bad.
Here is a good example of Google Style Doc strings: [Sphix 1.8.0+ - Google Style Docstrings](http://www.sphinx-doc.org/en/master/ext/example_google.html)
I would like to know what you all think about this idea and if its worth spending time on it.
</issue>
<code>
[start of opsdroid/memory.py]
1 """Class for persisting information in opsdroid."""
2
3 import logging
4
5
6 _LOGGER = logging.getLogger(__name__)
7
8
9 class Memory:
10 """An object to store and persist data outside of opsdroid."""
11
12 def __init__(self):
13 """Create memory dictionary."""
14 self.memory = {}
15 self.databases = []
16
17 async def get(self, key):
18 """Get data object for a given key."""
19 _LOGGER.debug(_("Getting %s from memory."), key)
20 database_result = await self._get_from_database(key)
21 if database_result is not None:
22 self.memory[key] = database_result
23 if key in self.memory:
24 return self.memory[key]
25
26 return None
27
28 async def put(self, key, data):
29 """Put a data object to a given key."""
30 _LOGGER.debug(_("Putting %s to memory"), key)
31 self.memory[key] = data
32 await self._put_to_database(key, self.memory[key])
33
34 async def _get_from_database(self, key):
35 """Get updates from databases for a given key."""
36 if not self.databases:
37 return None
38
39 results = []
40 for database in self.databases:
41 results.append(await database.get(key))
42 # TODO: Handle multiple databases
43 return results[0]
44
45 async def _put_to_database(self, key, data):
46 """Put updates into databases for a given key."""
47 if self.databases:
48 for database in self.databases:
49 await database.put(key, data)
50
[end of opsdroid/memory.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/opsdroid/memory.py b/opsdroid/memory.py
--- a/opsdroid/memory.py
+++ b/opsdroid/memory.py
@@ -7,15 +7,33 @@
class Memory:
- """An object to store and persist data outside of opsdroid."""
+ """A Memory object.
+
+ An object to obtain, store and persist data outside of opsdroid.
+
+ Attributes:
+ databases (:obj:`list` of :obj:`Database`): List of database objects.
+ memory (:obj:`dict`): In-memory dictionary to store data.
+
+ """
def __init__(self):
- """Create memory dictionary."""
+ """Create object with minimum properties."""
self.memory = {}
self.databases = []
async def get(self, key):
- """Get data object for a given key."""
+ """Get data object for a given key.
+
+ Gets the key value found in-memory or from the database(s).
+
+ Args:
+ key (str): Key to retrieve data.
+
+ Returns:
+ A data object for the given key, otherwise `None`.
+
+ """
_LOGGER.debug(_("Getting %s from memory."), key)
database_result = await self._get_from_database(key)
if database_result is not None:
@@ -26,24 +44,53 @@
return None
async def put(self, key, data):
- """Put a data object to a given key."""
+ """Put a data object to a given key.
+
+ Stores the key and value in memory and the database(s).
+
+ Args:
+ key (str): Key for the data to store.
+ data (obj): Data object to store.
+
+ """
_LOGGER.debug(_("Putting %s to memory"), key)
self.memory[key] = data
await self._put_to_database(key, self.memory[key])
async def _get_from_database(self, key):
- """Get updates from databases for a given key."""
+ """Get updates from databases for a given key.
+
+ Gets the first key value found from the database(s).
+
+ Args:
+ key (str): Key to retrieve data from a database.
+
+ Returns:
+ The first key value (data object) found from the database(s).
+ Or `None` when no database is defined or no value is found.
+
+ Todo:
+ * Handle multiple databases
+
+ """
if not self.databases:
return None
results = []
for database in self.databases:
results.append(await database.get(key))
- # TODO: Handle multiple databases
return results[0]
async def _put_to_database(self, key, data):
- """Put updates into databases for a given key."""
+ """Put updates into databases for a given key.
+
+ Stores the key and value on each database defined.
+
+ Args:
+ key (str): Key for the data to store.
+ data (obj): Data object to store.
+
+ """
if self.databases:
for database in self.databases:
await database.put(key, data)
| {"golden_diff": "diff --git a/opsdroid/memory.py b/opsdroid/memory.py\n--- a/opsdroid/memory.py\n+++ b/opsdroid/memory.py\n@@ -7,15 +7,33 @@\n \n \n class Memory:\n- \"\"\"An object to store and persist data outside of opsdroid.\"\"\"\n+ \"\"\"A Memory object.\n+\n+ An object to obtain, store and persist data outside of opsdroid.\n+\n+ Attributes:\n+ databases (:obj:`list` of :obj:`Database`): List of database objects.\n+ memory (:obj:`dict`): In-memory dictionary to store data.\n+\n+ \"\"\"\n \n def __init__(self):\n- \"\"\"Create memory dictionary.\"\"\"\n+ \"\"\"Create object with minimum properties.\"\"\"\n self.memory = {}\n self.databases = []\n \n async def get(self, key):\n- \"\"\"Get data object for a given key.\"\"\"\n+ \"\"\"Get data object for a given key.\n+\n+ Gets the key value found in-memory or from the database(s).\n+\n+ Args:\n+ key (str): Key to retrieve data.\n+\n+ Returns:\n+ A data object for the given key, otherwise `None`.\n+\n+ \"\"\"\n _LOGGER.debug(_(\"Getting %s from memory.\"), key)\n database_result = await self._get_from_database(key)\n if database_result is not None:\n@@ -26,24 +44,53 @@\n return None\n \n async def put(self, key, data):\n- \"\"\"Put a data object to a given key.\"\"\"\n+ \"\"\"Put a data object to a given key.\n+\n+ Stores the key and value in memory and the database(s).\n+\n+ Args:\n+ key (str): Key for the data to store.\n+ data (obj): Data object to store.\n+\n+ \"\"\"\n _LOGGER.debug(_(\"Putting %s to memory\"), key)\n self.memory[key] = data\n await self._put_to_database(key, self.memory[key])\n \n async def _get_from_database(self, key):\n- \"\"\"Get updates from databases for a given key.\"\"\"\n+ \"\"\"Get updates from databases for a given key.\n+\n+ Gets the first key value found from the database(s).\n+\n+ Args:\n+ key (str): Key to retrieve data from a database.\n+\n+ Returns:\n+ The first key value (data object) found from the database(s).\n+ Or `None` when no database is defined or no value is found.\n+\n+ Todo:\n+ * Handle multiple databases\n+\n+ \"\"\"\n if not self.databases:\n return None\n \n results = []\n for database in self.databases:\n results.append(await database.get(key))\n- # TODO: Handle multiple databases\n return results[0]\n \n async def _put_to_database(self, key, data):\n- \"\"\"Put updates into databases for a given key.\"\"\"\n+ \"\"\"Put updates into databases for a given key.\n+\n+ Stores the key and value on each database defined.\n+\n+ Args:\n+ key (str): Key for the data to store.\n+ data (obj): Data object to store.\n+\n+ \"\"\"\n if self.databases:\n for database in self.databases:\n await database.put(key, data)\n", "issue": "Add Google Style Docstrings\nWe should implement Google Style Docstrings to every function, method, class in opsdroid. This style will support existing documentation and will help in the future by generating documentation automatically.\r\n\r\nThis consists in a bit of effort so this issue can be worked by more than one contributor, just make sure that everyone knows what you are working on in order to avoid other contributors spending time on something that you are working on.\r\n\r\nIf you are unfamiliar with the Google Style Docstrings I'd recommend that you check these resources:\r\n\r\n - [Sphix 1.8.0+ - Google Style Docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html)\r\n\r\n\r\n\r\nDocstrings that need to be updated:\r\n\r\n- main.py\r\n - [x] configure_lang\r\n - [ ] configure_log\r\n - [ ] get_logging_level\r\n - [ ] check_dependencies\r\n - [ ] print_version\r\n - [ ] print_example_config\r\n - [ ] edit_files\r\n - [x] welcome_message\r\n- ~~helper.py~~\r\n - [x] get_opsdroid\r\n - [x] del_rw\r\n - [x] move_config_to_appdir\r\n- memory.py\r\n - [x] Memory\r\n - [x] get\r\n - [x] put\r\n - [x] _get_from_database\r\n - [x] _put_to_database\r\n- message.py\r\n - [x] Message\r\n - [x] __init__\r\n - [x] _thinking_delay\r\n - [x] _typing delay\r\n - [x] respond\r\n - [x] react\r\n- web.py\r\n - [ ] Web\r\n - [x] get_port\r\n - [x] get_host\r\n - [x] get_ssl_context\r\n - [ ] start\r\n - [ ] build_response\r\n - [ ] web_index_handler\r\n - [ ] web_stats_handler\r\n- matchers.py\r\n - [ ] match_regex\r\n - [ ] match_apiai_action\r\n - [ ] match_apiai_intent\r\n - [ ] match_dialogflow_action\r\n - [ ] match_dialogflow_intent\r\n - [ ] match_luisai_intent\r\n - [ ] match_rasanlu\r\n - [ ] match_recastai\r\n - [ ] match_witai\r\n - [ ] match_crontab\r\n - [ ] match_webhook\r\n - [ ] match_always\r\n- core.py\r\n - [ ] OpsDroid\r\n - [ ] default_connector\r\n - [ ] exit\r\n - [ ] critical\r\n - [ ] call_stop\r\n - [ ] disconnect\r\n - [ ] stop\r\n - [ ] load\r\n - [ ] start_loop\r\n - [x] setup_skills\r\n - [ ] train_parsers\r\n - [ ] start_connector_tasks\r\n - [ ] start_database\r\n - [ ] run_skill\r\n - [ ] get_ranked_skills\r\n - [ ] parse\r\n- loader.py\r\n - [ ] Loader\r\n - [x] import_module_from_spec\r\n - [x] import_module\r\n - [x] check_cache\r\n - [x] build_module_import_path\r\n - [x] build_module_install_path\r\n - [x] git_clone\r\n - [x] git_pull\r\n - [x] pip_install_deps\r\n - [x] create_default_config\r\n - [x] load_config_file\r\n - [ ] envvar_constructor\r\n - [ ] include_constructor\r\n - [x] setup_modules_directory\r\n - [x] load_modules_from_config\r\n - [x] _load_modules\r\n - [x] _install_module\r\n - [x] _update_module\r\n - [ ] _install_git_module\r\n - [x] _install_local_module\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n---- ORIGINAL POST ---- \r\nI've been wondering about this for a while now and I would like to know if we should replace/update all the docstrings in opsdroid with the Google Style doc strings. \r\n\r\nI think this could help new and old contributors to contribute and commit to opsdroid since the Google Style docstrings give more information about every method/function and specifies clearly what sort of input the function/method expects, what will it return and what will be raised (if applicable).\r\n\r\nThe downsize of this style is that the length of every .py file will increase due to the doc strings, but since most IDE's allow you to hide those fields it shouldn't be too bad.\r\n\r\nHere is a good example of Google Style Doc strings: [Sphix 1.8.0+ - Google Style Docstrings](http://www.sphinx-doc.org/en/master/ext/example_google.html)\r\n\r\nI would like to know what you all think about this idea and if its worth spending time on it.\n", "before_files": [{"content": "\"\"\"Class for persisting information in opsdroid.\"\"\"\n\nimport logging\n\n\n_LOGGER = logging.getLogger(__name__)\n\n\nclass Memory:\n \"\"\"An object to store and persist data outside of opsdroid.\"\"\"\n\n def __init__(self):\n \"\"\"Create memory dictionary.\"\"\"\n self.memory = {}\n self.databases = []\n\n async def get(self, key):\n \"\"\"Get data object for a given key.\"\"\"\n _LOGGER.debug(_(\"Getting %s from memory.\"), key)\n database_result = await self._get_from_database(key)\n if database_result is not None:\n self.memory[key] = database_result\n if key in self.memory:\n return self.memory[key]\n\n return None\n\n async def put(self, key, data):\n \"\"\"Put a data object to a given key.\"\"\"\n _LOGGER.debug(_(\"Putting %s to memory\"), key)\n self.memory[key] = data\n await self._put_to_database(key, self.memory[key])\n\n async def _get_from_database(self, key):\n \"\"\"Get updates from databases for a given key.\"\"\"\n if not self.databases:\n return None\n\n results = []\n for database in self.databases:\n results.append(await database.get(key))\n # TODO: Handle multiple databases\n return results[0]\n\n async def _put_to_database(self, key, data):\n \"\"\"Put updates into databases for a given key.\"\"\"\n if self.databases:\n for database in self.databases:\n await database.put(key, data)\n", "path": "opsdroid/memory.py"}]} | 1,983 | 701 |
gh_patches_debug_10081 | rasdani/github-patches | git_diff | qtile__qtile-1659 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
qtile-top crash 'Client' object has no attribute 'tracemalloc_dump'
<!--
Please do not ask general questions here! There are [community
contact](https://github.com/qtile/qtile#community) options for that.
-->
# Issue description
<!--
A brief discussion of what failed and how it failed. A description of
what you tried is helpful, i.e. "When I use lazy.kill() on a window I get
the following stack trace" instead of "Closing windows doesn't work".
-->
Tried to run `qtile-top` and it crashed. I'm not sure should be done before that script - no documentation.
Tried
```
arnas@zordmachine ~ qtile-cmd -o cmd -f tracemalloc_dump
[False, 'Trace not started']
arnas@zordmachine ~ qtile-cmd -o cmd -f tracemalloc_toggle
arnas@zordmachine ~ qtile-cmd -o cmd -f tracemalloc_dump
[True, '/home/arnas/.cache/qtile/qtile_tracemalloc.dump']
```
but this didn't helped
# Qtile version
<!--
Please include the exact commit hash of the version of Qtile that failed.
-->
5bd8e57f78b72ed9de4d10bf51af6db51561d794 (latest git version)
# Stack traces
<!--
Please attach any stack traces found in:
* `~/.xsession-errors`
* `~/.local/share/qtile/qtile.log`
-->
Traceback (most recent call last):
File "/usr/bin/qtile-top", line 11, in <module>
load_entry_point('qtile==0.15.0', 'console_scripts', 'qtile-top')()
File "/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py", line 164, in main
curses.wrapper(get_stats, client, limit=lines, seconds=seconds,
File "/usr/lib/python3.8/curses/__init__.py", line 105, in wrapper
return func(stdscr, *args, **kwds)
File "/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py", line 91, in get_stats
snapshot = get_trace(client, force_start)
File "/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py", line 63, in get_trace
(started, path) = client.tracemalloc_dump()
AttributeError: 'Client' object has no attribute 'tracemalloc_dump'
# Configuration
<!--
Please include a link or attach your configuration to the issue.
-->
https://github.com/zordsdavini/qtile-config
Arch
qtile-top crash 'Client' object has no attribute 'tracemalloc_dump'
<!--
Please do not ask general questions here! There are [community
contact](https://github.com/qtile/qtile#community) options for that.
-->
# Issue description
<!--
A brief discussion of what failed and how it failed. A description of
what you tried is helpful, i.e. "When I use lazy.kill() on a window I get
the following stack trace" instead of "Closing windows doesn't work".
-->
Tried to run `qtile-top` and it crashed. I'm not sure should be done before that script - no documentation.
Tried
```
arnas@zordmachine ~ qtile-cmd -o cmd -f tracemalloc_dump
[False, 'Trace not started']
arnas@zordmachine ~ qtile-cmd -o cmd -f tracemalloc_toggle
arnas@zordmachine ~ qtile-cmd -o cmd -f tracemalloc_dump
[True, '/home/arnas/.cache/qtile/qtile_tracemalloc.dump']
```
but this didn't helped
# Qtile version
<!--
Please include the exact commit hash of the version of Qtile that failed.
-->
5bd8e57f78b72ed9de4d10bf51af6db51561d794 (latest git version)
# Stack traces
<!--
Please attach any stack traces found in:
* `~/.xsession-errors`
* `~/.local/share/qtile/qtile.log`
-->
Traceback (most recent call last):
File "/usr/bin/qtile-top", line 11, in <module>
load_entry_point('qtile==0.15.0', 'console_scripts', 'qtile-top')()
File "/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py", line 164, in main
curses.wrapper(get_stats, client, limit=lines, seconds=seconds,
File "/usr/lib/python3.8/curses/__init__.py", line 105, in wrapper
return func(stdscr, *args, **kwds)
File "/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py", line 91, in get_stats
snapshot = get_trace(client, force_start)
File "/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py", line 63, in get_trace
(started, path) = client.tracemalloc_dump()
AttributeError: 'Client' object has no attribute 'tracemalloc_dump'
# Configuration
<!--
Please include a link or attach your configuration to the issue.
-->
https://github.com/zordsdavini/qtile-config
Arch
</issue>
<code>
[start of libqtile/scripts/qtile_top.py]
1 # Copyright (c) 2015, Roger Duran
2 #
3 # Permission is hereby granted, free of charge, to any person obtaining a copy
4 # of this software and associated documentation files (the "Software"), to deal
5 # in the Software without restriction, including without limitation the rights
6 # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 # copies of the Software, and to permit persons to whom the Software is
8 # furnished to do so, subject to the following conditions:
9 #
10 # The above copyright notice and this permission notice shall be included in
11 # all copies or substantial portions of the Software.
12 #
13 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19 # SOFTWARE.
20
21 """
22 Command-line top like for qtile
23 """
24
25 import argparse
26 import curses
27 import linecache
28 import os
29 import time
30 import tracemalloc
31 from tracemalloc import Snapshot
32
33 from libqtile import ipc
34
35
36 class TraceNotStarted(Exception):
37 pass
38
39
40 class TraceCantStart(Exception):
41 pass
42
43
44 def parse_args():
45 parser = argparse.ArgumentParser(description="Top like for qtile")
46 parser.add_argument('-l', '--lines', type=int, dest="lines", default=10,
47 help='Number of lines.')
48 parser.add_argument('-r', '--raw', dest="raw", action="store_true",
49 default=False, help='Output raw without curses')
50 parser.add_argument('-t', '--time', type=float, dest="seconds",
51 default=1.5, help='Number of seconds to refresh')
52 parser.add_argument('--force-start', dest="force_start",
53 action="store_true", default=False,
54 help='Force start tracemalloc on qtile')
55 parser.add_argument('-s', '--socket', type=str, dest="socket",
56 help='Use specified communication socket.')
57
58 opts = parser.parse_args()
59 return opts
60
61
62 def get_trace(client, force_start):
63 (started, path) = client.tracemalloc_dump()
64 if force_start and not started:
65 client.tracemalloc_toggle()
66 (started, path) = client.tracemalloc_dump()
67 if not started:
68 raise TraceCantStart
69 elif not started:
70 raise TraceNotStarted
71
72 return Snapshot.load(path)
73
74
75 def filter_snapshot(snapshot):
76 return snapshot.filter_traces((
77 tracemalloc.Filter(False, "<frozen importlib._bootstrap>"),
78 tracemalloc.Filter(False, "<unknown>"),
79 ))
80
81
82 def get_stats(scr, client, group_by='lineno', limit=10, seconds=1.5,
83 force_start=False):
84 (max_y, max_x) = scr.getmaxyx()
85 curses.init_pair(1, curses.COLOR_GREEN, curses.COLOR_BLACK)
86 while True:
87 scr.addstr(0, 0, "Qtile - Top {} lines".format(limit))
88 scr.addstr(1, 0, '{0:<3s} {1:<40s} {2:<30s} {3:<16s}'.format('#', 'Line', 'Memory', ' ' * (max_x - 71)),
89 curses.A_BOLD | curses.A_REVERSE)
90
91 snapshot = get_trace(client, force_start)
92 snapshot = filter_snapshot(snapshot)
93 top_stats = snapshot.statistics(group_by)
94 cnt = 1
95 for index, stat in enumerate(top_stats[:limit], 1):
96 frame = stat.traceback[0]
97 # replace "/path/to/module/file.py" with "module/file.py"
98 filename = os.sep.join(frame.filename.split(os.sep)[-2:])
99 code = ""
100 line = linecache.getline(frame.filename, frame.lineno).strip()
101 if line:
102 code = line
103 mem = "{:.1f} KiB".format(stat.size / 1024.0)
104 filename = "{}:{}".format(filename, frame.lineno)
105 scr.addstr(cnt + 1, 0, '{:<3} {:<40} {:<30}'.format(index, filename, mem))
106 scr.addstr(cnt + 2, 4, code, curses.color_pair(1))
107 cnt += 2
108
109 other = top_stats[limit:]
110 cnt += 2
111 if other:
112 size = sum(stat.size for stat in other)
113 other_size = ("{:d} other: {:.1f} KiB".format(len(other), size / 1024.0))
114 scr.addstr(cnt, 0, other_size, curses.A_BOLD)
115 cnt += 1
116
117 total = sum(stat.size for stat in top_stats)
118 total_size = "Total allocated size: {0:.1f} KiB".format(total / 1024.0)
119 scr.addstr(cnt, 0, total_size, curses.A_BOLD)
120
121 scr.move(max_y - 2, max_y - 2)
122 scr.refresh()
123 time.sleep(seconds)
124 scr.erase()
125
126
127 def raw_stats(client, group_by='lineno', limit=10, force_start=False):
128 snapshot = get_trace(client, force_start)
129 snapshot = filter_snapshot(snapshot)
130 top_stats = snapshot.statistics(group_by)
131
132 print("Qtile - Top {} lines".format(limit))
133 for index, stat in enumerate(top_stats[:limit], 1):
134 frame = stat.traceback[0]
135 # replace "/path/to/module/file.py" with "module/file.py"
136 filename = os.sep.join(frame.filename.split(os.sep)[-2:])
137 print("#{}: {}:{}: {:.1f} KiB"
138 .format(index, filename, frame.lineno, stat.size / 1024.0))
139 line = linecache.getline(frame.filename, frame.lineno).strip()
140 if line:
141 print(' {}'.format(line))
142
143 other = top_stats[limit:]
144 if other:
145 size = sum(stat.size for stat in other)
146 print("{:d} other: {:.1f} KiB".format(len(other), size / 1024.0))
147 total = sum(stat.size for stat in top_stats)
148 print("Total allocated size: {0:.1f} KiB".format(total / 1024.0))
149
150
151 def main():
152 opts = parse_args()
153 lines = opts.lines
154 seconds = opts.seconds
155 force_start = opts.force_start
156 if opts.socket is None:
157 socket = ipc.find_sockfile()
158 else:
159 socket = opts.socket
160 client = ipc.Client(socket)
161
162 try:
163 if not opts.raw:
164 curses.wrapper(get_stats, client, limit=lines, seconds=seconds,
165 force_start=force_start)
166 else:
167 raw_stats(client, limit=lines, force_start=force_start)
168 except TraceNotStarted:
169 print("tracemalloc not started on qtile, start by setting "
170 "PYTHONTRACEMALLOC=1 before starting qtile")
171 print("or force start tracemalloc now, but you'll lose early traces")
172 exit(1)
173 except TraceCantStart:
174 print("Can't start tracemalloc on qtile, check the logs")
175 except KeyboardInterrupt:
176 exit(-1)
177
[end of libqtile/scripts/qtile_top.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/libqtile/scripts/qtile_top.py b/libqtile/scripts/qtile_top.py
--- a/libqtile/scripts/qtile_top.py
+++ b/libqtile/scripts/qtile_top.py
@@ -30,7 +30,7 @@
import tracemalloc
from tracemalloc import Snapshot
-from libqtile import ipc
+from libqtile import command_client, command_interface, ipc
class TraceNotStarted(Exception):
@@ -158,6 +158,8 @@
else:
socket = opts.socket
client = ipc.Client(socket)
+ client = command_interface.IPCCommandInterface(client)
+ client = command_client.InteractiveCommandClient(client)
try:
if not opts.raw:
| {"golden_diff": "diff --git a/libqtile/scripts/qtile_top.py b/libqtile/scripts/qtile_top.py\n--- a/libqtile/scripts/qtile_top.py\n+++ b/libqtile/scripts/qtile_top.py\n@@ -30,7 +30,7 @@\n import tracemalloc\n from tracemalloc import Snapshot\n \n-from libqtile import ipc\n+from libqtile import command_client, command_interface, ipc\n \n \n class TraceNotStarted(Exception):\n@@ -158,6 +158,8 @@\n else:\n socket = opts.socket\n client = ipc.Client(socket)\n+ client = command_interface.IPCCommandInterface(client)\n+ client = command_client.InteractiveCommandClient(client)\n \n try:\n if not opts.raw:\n", "issue": "qtile-top crash 'Client' object has no attribute 'tracemalloc_dump'\n<!--\r\nPlease do not ask general questions here! There are [community\r\ncontact](https://github.com/qtile/qtile#community) options for that.\r\n-->\r\n\r\n# Issue description\r\n\r\n<!--\r\nA brief discussion of what failed and how it failed. A description of\r\nwhat you tried is helpful, i.e. \"When I use lazy.kill() on a window I get\r\nthe following stack trace\" instead of \"Closing windows doesn't work\".\r\n-->\r\nTried to run `qtile-top` and it crashed. I'm not sure should be done before that script - no documentation.\r\nTried \r\n```\r\n arnas@zordmachine \ue0b0 ~ \ue0b0 qtile-cmd -o cmd -f tracemalloc_dump\r\n[False, 'Trace not started']\r\n arnas@zordmachine \ue0b0 ~ \ue0b0 qtile-cmd -o cmd -f tracemalloc_toggle\r\n arnas@zordmachine \ue0b0 ~ \ue0b0 qtile-cmd -o cmd -f tracemalloc_dump\r\n[True, '/home/arnas/.cache/qtile/qtile_tracemalloc.dump']\r\n```\r\nbut this didn't helped\r\n\r\n# Qtile version\r\n\r\n<!--\r\nPlease include the exact commit hash of the version of Qtile that failed.\r\n-->\r\n\r\n5bd8e57f78b72ed9de4d10bf51af6db51561d794 (latest git version)\r\n\r\n# Stack traces\r\n\r\n<!--\r\nPlease attach any stack traces found in:\r\n\r\n* `~/.xsession-errors`\r\n* `~/.local/share/qtile/qtile.log`\r\n-->\r\n\r\nTraceback (most recent call last):\r\n File \"/usr/bin/qtile-top\", line 11, in <module>\r\n load_entry_point('qtile==0.15.0', 'console_scripts', 'qtile-top')()\r\n File \"/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py\", line 164, in main\r\n curses.wrapper(get_stats, client, limit=lines, seconds=seconds,\r\n File \"/usr/lib/python3.8/curses/__init__.py\", line 105, in wrapper\r\n return func(stdscr, *args, **kwds)\r\n File \"/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py\", line 91, in get_stats\r\n snapshot = get_trace(client, force_start)\r\n File \"/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py\", line 63, in get_trace\r\n (started, path) = client.tracemalloc_dump()\r\nAttributeError: 'Client' object has no attribute 'tracemalloc_dump'\r\n\r\n\r\n# Configuration\r\n\r\n<!--\r\nPlease include a link or attach your configuration to the issue.\r\n-->\r\nhttps://github.com/zordsdavini/qtile-config\r\nArch\nqtile-top crash 'Client' object has no attribute 'tracemalloc_dump'\n<!--\r\nPlease do not ask general questions here! There are [community\r\ncontact](https://github.com/qtile/qtile#community) options for that.\r\n-->\r\n\r\n# Issue description\r\n\r\n<!--\r\nA brief discussion of what failed and how it failed. A description of\r\nwhat you tried is helpful, i.e. \"When I use lazy.kill() on a window I get\r\nthe following stack trace\" instead of \"Closing windows doesn't work\".\r\n-->\r\nTried to run `qtile-top` and it crashed. I'm not sure should be done before that script - no documentation.\r\nTried \r\n```\r\n arnas@zordmachine \ue0b0 ~ \ue0b0 qtile-cmd -o cmd -f tracemalloc_dump\r\n[False, 'Trace not started']\r\n arnas@zordmachine \ue0b0 ~ \ue0b0 qtile-cmd -o cmd -f tracemalloc_toggle\r\n arnas@zordmachine \ue0b0 ~ \ue0b0 qtile-cmd -o cmd -f tracemalloc_dump\r\n[True, '/home/arnas/.cache/qtile/qtile_tracemalloc.dump']\r\n```\r\nbut this didn't helped\r\n\r\n# Qtile version\r\n\r\n<!--\r\nPlease include the exact commit hash of the version of Qtile that failed.\r\n-->\r\n\r\n5bd8e57f78b72ed9de4d10bf51af6db51561d794 (latest git version)\r\n\r\n# Stack traces\r\n\r\n<!--\r\nPlease attach any stack traces found in:\r\n\r\n* `~/.xsession-errors`\r\n* `~/.local/share/qtile/qtile.log`\r\n-->\r\n\r\nTraceback (most recent call last):\r\n File \"/usr/bin/qtile-top\", line 11, in <module>\r\n load_entry_point('qtile==0.15.0', 'console_scripts', 'qtile-top')()\r\n File \"/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py\", line 164, in main\r\n curses.wrapper(get_stats, client, limit=lines, seconds=seconds,\r\n File \"/usr/lib/python3.8/curses/__init__.py\", line 105, in wrapper\r\n return func(stdscr, *args, **kwds)\r\n File \"/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py\", line 91, in get_stats\r\n snapshot = get_trace(client, force_start)\r\n File \"/usr/lib/python3.8/site-packages/libqtile/scripts/qtile_top.py\", line 63, in get_trace\r\n (started, path) = client.tracemalloc_dump()\r\nAttributeError: 'Client' object has no attribute 'tracemalloc_dump'\r\n\r\n\r\n# Configuration\r\n\r\n<!--\r\nPlease include a link or attach your configuration to the issue.\r\n-->\r\nhttps://github.com/zordsdavini/qtile-config\r\nArch\n", "before_files": [{"content": "# Copyright (c) 2015, Roger Duran\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to deal\n# in the Software without restriction, including without limitation the rights\n# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n# copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n# SOFTWARE.\n\n\"\"\"\n Command-line top like for qtile\n\"\"\"\n\nimport argparse\nimport curses\nimport linecache\nimport os\nimport time\nimport tracemalloc\nfrom tracemalloc import Snapshot\n\nfrom libqtile import ipc\n\n\nclass TraceNotStarted(Exception):\n pass\n\n\nclass TraceCantStart(Exception):\n pass\n\n\ndef parse_args():\n parser = argparse.ArgumentParser(description=\"Top like for qtile\")\n parser.add_argument('-l', '--lines', type=int, dest=\"lines\", default=10,\n help='Number of lines.')\n parser.add_argument('-r', '--raw', dest=\"raw\", action=\"store_true\",\n default=False, help='Output raw without curses')\n parser.add_argument('-t', '--time', type=float, dest=\"seconds\",\n default=1.5, help='Number of seconds to refresh')\n parser.add_argument('--force-start', dest=\"force_start\",\n action=\"store_true\", default=False,\n help='Force start tracemalloc on qtile')\n parser.add_argument('-s', '--socket', type=str, dest=\"socket\",\n help='Use specified communication socket.')\n\n opts = parser.parse_args()\n return opts\n\n\ndef get_trace(client, force_start):\n (started, path) = client.tracemalloc_dump()\n if force_start and not started:\n client.tracemalloc_toggle()\n (started, path) = client.tracemalloc_dump()\n if not started:\n raise TraceCantStart\n elif not started:\n raise TraceNotStarted\n\n return Snapshot.load(path)\n\n\ndef filter_snapshot(snapshot):\n return snapshot.filter_traces((\n tracemalloc.Filter(False, \"<frozen importlib._bootstrap>\"),\n tracemalloc.Filter(False, \"<unknown>\"),\n ))\n\n\ndef get_stats(scr, client, group_by='lineno', limit=10, seconds=1.5,\n force_start=False):\n (max_y, max_x) = scr.getmaxyx()\n curses.init_pair(1, curses.COLOR_GREEN, curses.COLOR_BLACK)\n while True:\n scr.addstr(0, 0, \"Qtile - Top {} lines\".format(limit))\n scr.addstr(1, 0, '{0:<3s} {1:<40s} {2:<30s} {3:<16s}'.format('#', 'Line', 'Memory', ' ' * (max_x - 71)),\n curses.A_BOLD | curses.A_REVERSE)\n\n snapshot = get_trace(client, force_start)\n snapshot = filter_snapshot(snapshot)\n top_stats = snapshot.statistics(group_by)\n cnt = 1\n for index, stat in enumerate(top_stats[:limit], 1):\n frame = stat.traceback[0]\n # replace \"/path/to/module/file.py\" with \"module/file.py\"\n filename = os.sep.join(frame.filename.split(os.sep)[-2:])\n code = \"\"\n line = linecache.getline(frame.filename, frame.lineno).strip()\n if line:\n code = line\n mem = \"{:.1f} KiB\".format(stat.size / 1024.0)\n filename = \"{}:{}\".format(filename, frame.lineno)\n scr.addstr(cnt + 1, 0, '{:<3} {:<40} {:<30}'.format(index, filename, mem))\n scr.addstr(cnt + 2, 4, code, curses.color_pair(1))\n cnt += 2\n\n other = top_stats[limit:]\n cnt += 2\n if other:\n size = sum(stat.size for stat in other)\n other_size = (\"{:d} other: {:.1f} KiB\".format(len(other), size / 1024.0))\n scr.addstr(cnt, 0, other_size, curses.A_BOLD)\n cnt += 1\n\n total = sum(stat.size for stat in top_stats)\n total_size = \"Total allocated size: {0:.1f} KiB\".format(total / 1024.0)\n scr.addstr(cnt, 0, total_size, curses.A_BOLD)\n\n scr.move(max_y - 2, max_y - 2)\n scr.refresh()\n time.sleep(seconds)\n scr.erase()\n\n\ndef raw_stats(client, group_by='lineno', limit=10, force_start=False):\n snapshot = get_trace(client, force_start)\n snapshot = filter_snapshot(snapshot)\n top_stats = snapshot.statistics(group_by)\n\n print(\"Qtile - Top {} lines\".format(limit))\n for index, stat in enumerate(top_stats[:limit], 1):\n frame = stat.traceback[0]\n # replace \"/path/to/module/file.py\" with \"module/file.py\"\n filename = os.sep.join(frame.filename.split(os.sep)[-2:])\n print(\"#{}: {}:{}: {:.1f} KiB\"\n .format(index, filename, frame.lineno, stat.size / 1024.0))\n line = linecache.getline(frame.filename, frame.lineno).strip()\n if line:\n print(' {}'.format(line))\n\n other = top_stats[limit:]\n if other:\n size = sum(stat.size for stat in other)\n print(\"{:d} other: {:.1f} KiB\".format(len(other), size / 1024.0))\n total = sum(stat.size for stat in top_stats)\n print(\"Total allocated size: {0:.1f} KiB\".format(total / 1024.0))\n\n\ndef main():\n opts = parse_args()\n lines = opts.lines\n seconds = opts.seconds\n force_start = opts.force_start\n if opts.socket is None:\n socket = ipc.find_sockfile()\n else:\n socket = opts.socket\n client = ipc.Client(socket)\n\n try:\n if not opts.raw:\n curses.wrapper(get_stats, client, limit=lines, seconds=seconds,\n force_start=force_start)\n else:\n raw_stats(client, limit=lines, force_start=force_start)\n except TraceNotStarted:\n print(\"tracemalloc not started on qtile, start by setting \"\n \"PYTHONTRACEMALLOC=1 before starting qtile\")\n print(\"or force start tracemalloc now, but you'll lose early traces\")\n exit(1)\n except TraceCantStart:\n print(\"Can't start tracemalloc on qtile, check the logs\")\n except KeyboardInterrupt:\n exit(-1)\n", "path": "libqtile/scripts/qtile_top.py"}]} | 3,869 | 162 |
gh_patches_debug_62336 | rasdani/github-patches | git_diff | googleapis__google-api-python-client-273 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Error importing discovery from the client API
Hello, I'm getting a name exception when running a simple script. I've tried running the script in two ways:
```
from apiclient import discovery
import json
from pprint import pprint
from httplib2 import Http
from oauth2client.service_account import ServiceAccountCredentials
scopes = ['https://www.googleapis.com/auth/fusiontables']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
'service-account.json', scopes)
ft = discovery.build('fusiontables', 'v2', credentials=credentials)
pprint(ft)
```
Which gives:
> Traceback (most recent call last):
> File "apiTest.py", line 1, in <module>
> from apiclient import discovery
> File "build/bdist.linux-x86_64/egg/apiclient/**init**.py", line 18, in <module>
> File "build/bdist.linux-x86_64/egg/googleapiclient/channel.py", line 64, in <module>
> ImportError: cannot import name util
And:
```
from apiclient.discovery import build
import json
from pprint import pprint
from httplib2 import Http
from oauth2client.service_account import ServiceAccountCredentials
scopes = ['https://www.googleapis.com/auth/fusiontables']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
'service-account.json', scopes)
ft = discovery.build('fusiontables', 'v2', credentials=credentials)
pprint(ft)
#print response
```
Which yields the same exception:
> python apiTest.py
> Traceback (most recent call last):
> File "apiTest.py", line 1, in <module>
> from apiclient.discovery import build
> File "build/bdist.linux-x86_64/egg/apiclient/**init**.py", line 18, in <module>
> File "build/bdist.linux-x86_64/egg/googleapiclient/channel.py", line 64, in <module>
> ImportError: cannot import name util
I'm not sure if I'm missing a prereq or what, but I'm not able to find a way around this.
</issue>
<code>
[start of googleapiclient/channel.py]
1 """Channel notifications support.
2
3 Classes and functions to support channel subscriptions and notifications
4 on those channels.
5
6 Notes:
7 - This code is based on experimental APIs and is subject to change.
8 - Notification does not do deduplication of notification ids, that's up to
9 the receiver.
10 - Storing the Channel between calls is up to the caller.
11
12
13 Example setting up a channel:
14
15 # Create a new channel that gets notifications via webhook.
16 channel = new_webhook_channel("https://example.com/my_web_hook")
17
18 # Store the channel, keyed by 'channel.id'. Store it before calling the
19 # watch method because notifications may start arriving before the watch
20 # method returns.
21 ...
22
23 resp = service.objects().watchAll(
24 bucket="some_bucket_id", body=channel.body()).execute()
25 channel.update(resp)
26
27 # Store the channel, keyed by 'channel.id'. Store it after being updated
28 # since the resource_id value will now be correct, and that's needed to
29 # stop a subscription.
30 ...
31
32
33 An example Webhook implementation using webapp2. Note that webapp2 puts
34 headers in a case insensitive dictionary, as headers aren't guaranteed to
35 always be upper case.
36
37 id = self.request.headers[X_GOOG_CHANNEL_ID]
38
39 # Retrieve the channel by id.
40 channel = ...
41
42 # Parse notification from the headers, including validating the id.
43 n = notification_from_headers(channel, self.request.headers)
44
45 # Do app specific stuff with the notification here.
46 if n.resource_state == 'sync':
47 # Code to handle sync state.
48 elif n.resource_state == 'exists':
49 # Code to handle the exists state.
50 elif n.resource_state == 'not_exists':
51 # Code to handle the not exists state.
52
53
54 Example of unsubscribing.
55
56 service.channels().stop(channel.body())
57 """
58 from __future__ import absolute_import
59
60 import datetime
61 import uuid
62
63 from googleapiclient import errors
64 from oauth2client import util
65 import six
66
67 # Oauth2client < 3 has the positional helper in 'util', >= 3 has it
68 # in '_helpers'.
69 try:
70 from oauth2client import util
71 except ImportError:
72 from oauth2client import _helpers as util
73
74
75 # The unix time epoch starts at midnight 1970.
76 EPOCH = datetime.datetime.utcfromtimestamp(0)
77
78 # Map the names of the parameters in the JSON channel description to
79 # the parameter names we use in the Channel class.
80 CHANNEL_PARAMS = {
81 'address': 'address',
82 'id': 'id',
83 'expiration': 'expiration',
84 'params': 'params',
85 'resourceId': 'resource_id',
86 'resourceUri': 'resource_uri',
87 'type': 'type',
88 'token': 'token',
89 }
90
91 X_GOOG_CHANNEL_ID = 'X-GOOG-CHANNEL-ID'
92 X_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER'
93 X_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE'
94 X_GOOG_RESOURCE_URI = 'X-GOOG-RESOURCE-URI'
95 X_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID'
96
97
98 def _upper_header_keys(headers):
99 new_headers = {}
100 for k, v in six.iteritems(headers):
101 new_headers[k.upper()] = v
102 return new_headers
103
104
105 class Notification(object):
106 """A Notification from a Channel.
107
108 Notifications are not usually constructed directly, but are returned
109 from functions like notification_from_headers().
110
111 Attributes:
112 message_number: int, The unique id number of this notification.
113 state: str, The state of the resource being monitored.
114 uri: str, The address of the resource being monitored.
115 resource_id: str, The unique identifier of the version of the resource at
116 this event.
117 """
118 @util.positional(5)
119 def __init__(self, message_number, state, resource_uri, resource_id):
120 """Notification constructor.
121
122 Args:
123 message_number: int, The unique id number of this notification.
124 state: str, The state of the resource being monitored. Can be one
125 of "exists", "not_exists", or "sync".
126 resource_uri: str, The address of the resource being monitored.
127 resource_id: str, The identifier of the watched resource.
128 """
129 self.message_number = message_number
130 self.state = state
131 self.resource_uri = resource_uri
132 self.resource_id = resource_id
133
134
135 class Channel(object):
136 """A Channel for notifications.
137
138 Usually not constructed directly, instead it is returned from helper
139 functions like new_webhook_channel().
140
141 Attributes:
142 type: str, The type of delivery mechanism used by this channel. For
143 example, 'web_hook'.
144 id: str, A UUID for the channel.
145 token: str, An arbitrary string associated with the channel that
146 is delivered to the target address with each event delivered
147 over this channel.
148 address: str, The address of the receiving entity where events are
149 delivered. Specific to the channel type.
150 expiration: int, The time, in milliseconds from the epoch, when this
151 channel will expire.
152 params: dict, A dictionary of string to string, with additional parameters
153 controlling delivery channel behavior.
154 resource_id: str, An opaque id that identifies the resource that is
155 being watched. Stable across different API versions.
156 resource_uri: str, The canonicalized ID of the watched resource.
157 """
158
159 @util.positional(5)
160 def __init__(self, type, id, token, address, expiration=None,
161 params=None, resource_id="", resource_uri=""):
162 """Create a new Channel.
163
164 In user code, this Channel constructor will not typically be called
165 manually since there are functions for creating channels for each specific
166 type with a more customized set of arguments to pass.
167
168 Args:
169 type: str, The type of delivery mechanism used by this channel. For
170 example, 'web_hook'.
171 id: str, A UUID for the channel.
172 token: str, An arbitrary string associated with the channel that
173 is delivered to the target address with each event delivered
174 over this channel.
175 address: str, The address of the receiving entity where events are
176 delivered. Specific to the channel type.
177 expiration: int, The time, in milliseconds from the epoch, when this
178 channel will expire.
179 params: dict, A dictionary of string to string, with additional parameters
180 controlling delivery channel behavior.
181 resource_id: str, An opaque id that identifies the resource that is
182 being watched. Stable across different API versions.
183 resource_uri: str, The canonicalized ID of the watched resource.
184 """
185 self.type = type
186 self.id = id
187 self.token = token
188 self.address = address
189 self.expiration = expiration
190 self.params = params
191 self.resource_id = resource_id
192 self.resource_uri = resource_uri
193
194 def body(self):
195 """Build a body from the Channel.
196
197 Constructs a dictionary that's appropriate for passing into watch()
198 methods as the value of body argument.
199
200 Returns:
201 A dictionary representation of the channel.
202 """
203 result = {
204 'id': self.id,
205 'token': self.token,
206 'type': self.type,
207 'address': self.address
208 }
209 if self.params:
210 result['params'] = self.params
211 if self.resource_id:
212 result['resourceId'] = self.resource_id
213 if self.resource_uri:
214 result['resourceUri'] = self.resource_uri
215 if self.expiration:
216 result['expiration'] = self.expiration
217
218 return result
219
220 def update(self, resp):
221 """Update a channel with information from the response of watch().
222
223 When a request is sent to watch() a resource, the response returned
224 from the watch() request is a dictionary with updated channel information,
225 such as the resource_id, which is needed when stopping a subscription.
226
227 Args:
228 resp: dict, The response from a watch() method.
229 """
230 for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
231 value = resp.get(json_name)
232 if value is not None:
233 setattr(self, param_name, value)
234
235
236 def notification_from_headers(channel, headers):
237 """Parse a notification from the webhook request headers, validate
238 the notification, and return a Notification object.
239
240 Args:
241 channel: Channel, The channel that the notification is associated with.
242 headers: dict, A dictionary like object that contains the request headers
243 from the webhook HTTP request.
244
245 Returns:
246 A Notification object.
247
248 Raises:
249 errors.InvalidNotificationError if the notification is invalid.
250 ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
251 """
252 headers = _upper_header_keys(headers)
253 channel_id = headers[X_GOOG_CHANNEL_ID]
254 if channel.id != channel_id:
255 raise errors.InvalidNotificationError(
256 'Channel id mismatch: %s != %s' % (channel.id, channel_id))
257 else:
258 message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
259 state = headers[X_GOOG_RESOURCE_STATE]
260 resource_uri = headers[X_GOOG_RESOURCE_URI]
261 resource_id = headers[X_GOOG_RESOURCE_ID]
262 return Notification(message_number, state, resource_uri, resource_id)
263
264
265 @util.positional(2)
266 def new_webhook_channel(url, token=None, expiration=None, params=None):
267 """Create a new webhook Channel.
268
269 Args:
270 url: str, URL to post notifications to.
271 token: str, An arbitrary string associated with the channel that
272 is delivered to the target address with each notification delivered
273 over this channel.
274 expiration: datetime.datetime, A time in the future when the channel
275 should expire. Can also be None if the subscription should use the
276 default expiration. Note that different services may have different
277 limits on how long a subscription lasts. Check the response from the
278 watch() method to see the value the service has set for an expiration
279 time.
280 params: dict, Extra parameters to pass on channel creation. Currently
281 not used for webhook channels.
282 """
283 expiration_ms = 0
284 if expiration:
285 delta = expiration - EPOCH
286 expiration_ms = delta.microseconds/1000 + (
287 delta.seconds + delta.days*24*3600)*1000
288 if expiration_ms < 0:
289 expiration_ms = 0
290
291 return Channel('web_hook', str(uuid.uuid4()),
292 token, url, expiration=expiration_ms,
293 params=params)
294
295
[end of googleapiclient/channel.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/googleapiclient/channel.py b/googleapiclient/channel.py
--- a/googleapiclient/channel.py
+++ b/googleapiclient/channel.py
@@ -61,7 +61,6 @@
import uuid
from googleapiclient import errors
-from oauth2client import util
import six
# Oauth2client < 3 has the positional helper in 'util', >= 3 has it
| {"golden_diff": "diff --git a/googleapiclient/channel.py b/googleapiclient/channel.py\n--- a/googleapiclient/channel.py\n+++ b/googleapiclient/channel.py\n@@ -61,7 +61,6 @@\n import uuid\n \n from googleapiclient import errors\n-from oauth2client import util\n import six\n \n # Oauth2client < 3 has the positional helper in 'util', >= 3 has it\n", "issue": "Error importing discovery from the client API\nHello, I'm getting a name exception when running a simple script. I've tried running the script in two ways:\n\n```\nfrom apiclient import discovery\nimport json\n\nfrom pprint import pprint\nfrom httplib2 import Http\n\nfrom oauth2client.service_account import ServiceAccountCredentials\n\nscopes = ['https://www.googleapis.com/auth/fusiontables']\n\ncredentials = ServiceAccountCredentials.from_json_keyfile_name(\n 'service-account.json', scopes)\n\nft = discovery.build('fusiontables', 'v2', credentials=credentials)\npprint(ft)\n```\n\nWhich gives:\n\n> Traceback (most recent call last):\n> File \"apiTest.py\", line 1, in <module>\n> from apiclient import discovery\n> File \"build/bdist.linux-x86_64/egg/apiclient/**init**.py\", line 18, in <module>\n> File \"build/bdist.linux-x86_64/egg/googleapiclient/channel.py\", line 64, in <module>\n> ImportError: cannot import name util\n\nAnd:\n\n```\nfrom apiclient.discovery import build\nimport json\n\nfrom pprint import pprint\nfrom httplib2 import Http\n\nfrom oauth2client.service_account import ServiceAccountCredentials\n\nscopes = ['https://www.googleapis.com/auth/fusiontables']\n\ncredentials = ServiceAccountCredentials.from_json_keyfile_name(\n 'service-account.json', scopes)\n\nft = discovery.build('fusiontables', 'v2', credentials=credentials)\npprint(ft)\n#print response\n```\n\nWhich yields the same exception:\n\n> python apiTest.py \n> Traceback (most recent call last):\n> File \"apiTest.py\", line 1, in <module>\n> from apiclient.discovery import build\n> File \"build/bdist.linux-x86_64/egg/apiclient/**init**.py\", line 18, in <module>\n> File \"build/bdist.linux-x86_64/egg/googleapiclient/channel.py\", line 64, in <module>\n> ImportError: cannot import name util\n\nI'm not sure if I'm missing a prereq or what, but I'm not able to find a way around this.\n\n", "before_files": [{"content": "\"\"\"Channel notifications support.\n\nClasses and functions to support channel subscriptions and notifications\non those channels.\n\nNotes:\n - This code is based on experimental APIs and is subject to change.\n - Notification does not do deduplication of notification ids, that's up to\n the receiver.\n - Storing the Channel between calls is up to the caller.\n\n\nExample setting up a channel:\n\n # Create a new channel that gets notifications via webhook.\n channel = new_webhook_channel(\"https://example.com/my_web_hook\")\n\n # Store the channel, keyed by 'channel.id'. Store it before calling the\n # watch method because notifications may start arriving before the watch\n # method returns.\n ...\n\n resp = service.objects().watchAll(\n bucket=\"some_bucket_id\", body=channel.body()).execute()\n channel.update(resp)\n\n # Store the channel, keyed by 'channel.id'. Store it after being updated\n # since the resource_id value will now be correct, and that's needed to\n # stop a subscription.\n ...\n\n\nAn example Webhook implementation using webapp2. Note that webapp2 puts\nheaders in a case insensitive dictionary, as headers aren't guaranteed to\nalways be upper case.\n\n id = self.request.headers[X_GOOG_CHANNEL_ID]\n\n # Retrieve the channel by id.\n channel = ...\n\n # Parse notification from the headers, including validating the id.\n n = notification_from_headers(channel, self.request.headers)\n\n # Do app specific stuff with the notification here.\n if n.resource_state == 'sync':\n # Code to handle sync state.\n elif n.resource_state == 'exists':\n # Code to handle the exists state.\n elif n.resource_state == 'not_exists':\n # Code to handle the not exists state.\n\n\nExample of unsubscribing.\n\n service.channels().stop(channel.body())\n\"\"\"\nfrom __future__ import absolute_import\n\nimport datetime\nimport uuid\n\nfrom googleapiclient import errors\nfrom oauth2client import util\nimport six\n\n# Oauth2client < 3 has the positional helper in 'util', >= 3 has it\n# in '_helpers'.\ntry:\n from oauth2client import util\nexcept ImportError:\n from oauth2client import _helpers as util\n\n\n# The unix time epoch starts at midnight 1970.\nEPOCH = datetime.datetime.utcfromtimestamp(0)\n\n# Map the names of the parameters in the JSON channel description to\n# the parameter names we use in the Channel class.\nCHANNEL_PARAMS = {\n 'address': 'address',\n 'id': 'id',\n 'expiration': 'expiration',\n 'params': 'params',\n 'resourceId': 'resource_id',\n 'resourceUri': 'resource_uri',\n 'type': 'type',\n 'token': 'token',\n }\n\nX_GOOG_CHANNEL_ID = 'X-GOOG-CHANNEL-ID'\nX_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER'\nX_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE'\nX_GOOG_RESOURCE_URI = 'X-GOOG-RESOURCE-URI'\nX_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID'\n\n\ndef _upper_header_keys(headers):\n new_headers = {}\n for k, v in six.iteritems(headers):\n new_headers[k.upper()] = v\n return new_headers\n\n\nclass Notification(object):\n \"\"\"A Notification from a Channel.\n\n Notifications are not usually constructed directly, but are returned\n from functions like notification_from_headers().\n\n Attributes:\n message_number: int, The unique id number of this notification.\n state: str, The state of the resource being monitored.\n uri: str, The address of the resource being monitored.\n resource_id: str, The unique identifier of the version of the resource at\n this event.\n \"\"\"\n @util.positional(5)\n def __init__(self, message_number, state, resource_uri, resource_id):\n \"\"\"Notification constructor.\n\n Args:\n message_number: int, The unique id number of this notification.\n state: str, The state of the resource being monitored. Can be one\n of \"exists\", \"not_exists\", or \"sync\".\n resource_uri: str, The address of the resource being monitored.\n resource_id: str, The identifier of the watched resource.\n \"\"\"\n self.message_number = message_number\n self.state = state\n self.resource_uri = resource_uri\n self.resource_id = resource_id\n\n\nclass Channel(object):\n \"\"\"A Channel for notifications.\n\n Usually not constructed directly, instead it is returned from helper\n functions like new_webhook_channel().\n\n Attributes:\n type: str, The type of delivery mechanism used by this channel. For\n example, 'web_hook'.\n id: str, A UUID for the channel.\n token: str, An arbitrary string associated with the channel that\n is delivered to the target address with each event delivered\n over this channel.\n address: str, The address of the receiving entity where events are\n delivered. Specific to the channel type.\n expiration: int, The time, in milliseconds from the epoch, when this\n channel will expire.\n params: dict, A dictionary of string to string, with additional parameters\n controlling delivery channel behavior.\n resource_id: str, An opaque id that identifies the resource that is\n being watched. Stable across different API versions.\n resource_uri: str, The canonicalized ID of the watched resource.\n \"\"\"\n\n @util.positional(5)\n def __init__(self, type, id, token, address, expiration=None,\n params=None, resource_id=\"\", resource_uri=\"\"):\n \"\"\"Create a new Channel.\n\n In user code, this Channel constructor will not typically be called\n manually since there are functions for creating channels for each specific\n type with a more customized set of arguments to pass.\n\n Args:\n type: str, The type of delivery mechanism used by this channel. For\n example, 'web_hook'.\n id: str, A UUID for the channel.\n token: str, An arbitrary string associated with the channel that\n is delivered to the target address with each event delivered\n over this channel.\n address: str, The address of the receiving entity where events are\n delivered. Specific to the channel type.\n expiration: int, The time, in milliseconds from the epoch, when this\n channel will expire.\n params: dict, A dictionary of string to string, with additional parameters\n controlling delivery channel behavior.\n resource_id: str, An opaque id that identifies the resource that is\n being watched. Stable across different API versions.\n resource_uri: str, The canonicalized ID of the watched resource.\n \"\"\"\n self.type = type\n self.id = id\n self.token = token\n self.address = address\n self.expiration = expiration\n self.params = params\n self.resource_id = resource_id\n self.resource_uri = resource_uri\n\n def body(self):\n \"\"\"Build a body from the Channel.\n\n Constructs a dictionary that's appropriate for passing into watch()\n methods as the value of body argument.\n\n Returns:\n A dictionary representation of the channel.\n \"\"\"\n result = {\n 'id': self.id,\n 'token': self.token,\n 'type': self.type,\n 'address': self.address\n }\n if self.params:\n result['params'] = self.params\n if self.resource_id:\n result['resourceId'] = self.resource_id\n if self.resource_uri:\n result['resourceUri'] = self.resource_uri\n if self.expiration:\n result['expiration'] = self.expiration\n\n return result\n\n def update(self, resp):\n \"\"\"Update a channel with information from the response of watch().\n\n When a request is sent to watch() a resource, the response returned\n from the watch() request is a dictionary with updated channel information,\n such as the resource_id, which is needed when stopping a subscription.\n\n Args:\n resp: dict, The response from a watch() method.\n \"\"\"\n for json_name, param_name in six.iteritems(CHANNEL_PARAMS):\n value = resp.get(json_name)\n if value is not None:\n setattr(self, param_name, value)\n\n\ndef notification_from_headers(channel, headers):\n \"\"\"Parse a notification from the webhook request headers, validate\n the notification, and return a Notification object.\n\n Args:\n channel: Channel, The channel that the notification is associated with.\n headers: dict, A dictionary like object that contains the request headers\n from the webhook HTTP request.\n\n Returns:\n A Notification object.\n\n Raises:\n errors.InvalidNotificationError if the notification is invalid.\n ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.\n \"\"\"\n headers = _upper_header_keys(headers)\n channel_id = headers[X_GOOG_CHANNEL_ID]\n if channel.id != channel_id:\n raise errors.InvalidNotificationError(\n 'Channel id mismatch: %s != %s' % (channel.id, channel_id))\n else:\n message_number = int(headers[X_GOOG_MESSAGE_NUMBER])\n state = headers[X_GOOG_RESOURCE_STATE]\n resource_uri = headers[X_GOOG_RESOURCE_URI]\n resource_id = headers[X_GOOG_RESOURCE_ID]\n return Notification(message_number, state, resource_uri, resource_id)\n\n\[email protected](2)\ndef new_webhook_channel(url, token=None, expiration=None, params=None):\n \"\"\"Create a new webhook Channel.\n\n Args:\n url: str, URL to post notifications to.\n token: str, An arbitrary string associated with the channel that\n is delivered to the target address with each notification delivered\n over this channel.\n expiration: datetime.datetime, A time in the future when the channel\n should expire. Can also be None if the subscription should use the\n default expiration. Note that different services may have different\n limits on how long a subscription lasts. Check the response from the\n watch() method to see the value the service has set for an expiration\n time.\n params: dict, Extra parameters to pass on channel creation. Currently\n not used for webhook channels.\n \"\"\"\n expiration_ms = 0\n if expiration:\n delta = expiration - EPOCH\n expiration_ms = delta.microseconds/1000 + (\n delta.seconds + delta.days*24*3600)*1000\n if expiration_ms < 0:\n expiration_ms = 0\n\n return Channel('web_hook', str(uuid.uuid4()),\n token, url, expiration=expiration_ms,\n params=params)\n\n", "path": "googleapiclient/channel.py"}]} | 4,084 | 91 |
gh_patches_debug_24411 | rasdani/github-patches | git_diff | python__python-docs-es-40 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Mejorar la guía de CONTRIBUTING
Tenemos una pequeña guía que explica el procedimiento. Sin embargo, estaría bueno mejorarla un poco para que sea más fácil de seguir para persona que no sepan mucho de github y demás herramientas: https://github.com/raulcd/python-docs-es/blob/3.7/CONTRIBUTING.rst
</issue>
<code>
[start of conf.py]
1 # Sphinx configuration file.
2 #
3 # - import original configurations from cpython/Doc/conf.py
4 # - append the path considering the cpython submodule is at ./cpython
5 # - create the symbolic links under ./cpython/locale/es/LC_MESSAGES
6 # - make the build to work under Read the Docs
7 #
8 # The git submodule was created using this Stack Overflow answer
9 # to fetch only the commit that I needed and avoid clonning the whole history
10 # https://stackoverflow.com/a/27445058
11 #
12 # This can be built locally using `sphinx-build` by running
13 #
14 # $ sphinx-build -b html -n -d _build/doctrees -D language=es . _build/html
15
16 import sys, os, time
17 sys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))
18 sys.path.append(os.path.abspath('cpython/Doc/includes'))
19
20 # Import all the Sphinx settings from cpython
21 sys.path.append(os.path.abspath('cpython/Doc'))
22 from conf import *
23
24 version = '3.7'
25 release = '3.7.7'
26
27 project = 'Python en Español'
28 copyright = '2001-%s, Python Software Foundation' % time.strftime('%Y')
29
30 html_theme_path = ['cpython/Doc/tools']
31 templates_path = ['cpython/Doc/tools/templates']
32 html_static_path = ['cpython/Doc/tools/static']
33
34 os.system('mkdir -p cpython/locales/es/')
35 os.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')
36
37 gettext_compact = False
38 locale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir
39
40 def setup(app):
41 # Change the sourcedir programmatically because Read the Docs always call it with `.`
42 app.srcdir = 'cpython/Doc'
43
[end of conf.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/conf.py b/conf.py
--- a/conf.py
+++ b/conf.py
@@ -33,10 +33,34 @@
os.system('mkdir -p cpython/locales/es/')
os.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')
+os.system('ln -nfs `pwd`/CONTRIBUTING.rst cpython/Doc/CONTRIBUTING.rst')
gettext_compact = False
locale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir
def setup(app):
+
+ def add_contributing_banner(app, doctree):
+ """
+ Insert a banner at the top of the index.
+
+ This way, we can easily communicate people to help with the translation,
+ pointing them to different resources.
+ """
+ from docutils import nodes, core
+
+ message = '¡Ayúdanos a traducir la documentación oficial de Python al Español! ' \
+ f'Puedes encontrar más información en `Como contribuir </es/{version}/CONTRIBUTING.html>`_ ' \
+ 'y así ayudarnos a acercar Python a más personas de habla hispana.'
+
+ paragraph = core.publish_doctree(message)[0]
+ banner = nodes.warning(ids=['contributing-banner'])
+ banner.append(paragraph)
+
+ for document in doctree.traverse(nodes.document):
+ document.insert(0, banner)
+
# Change the sourcedir programmatically because Read the Docs always call it with `.`
app.srcdir = 'cpython/Doc'
+
+ app.connect('doctree-read', add_contributing_banner)
| {"golden_diff": "diff --git a/conf.py b/conf.py\n--- a/conf.py\n+++ b/conf.py\n@@ -33,10 +33,34 @@\n \n os.system('mkdir -p cpython/locales/es/')\n os.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')\n+os.system('ln -nfs `pwd`/CONTRIBUTING.rst cpython/Doc/CONTRIBUTING.rst')\n \n gettext_compact = False\n locale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir\n \n def setup(app):\n+\n+ def add_contributing_banner(app, doctree):\n+ \"\"\"\n+ Insert a banner at the top of the index.\n+\n+ This way, we can easily communicate people to help with the translation,\n+ pointing them to different resources.\n+ \"\"\"\n+ from docutils import nodes, core\n+\n+ message = '\u00a1Ay\u00fadanos a traducir la documentaci\u00f3n oficial de Python al Espa\u00f1ol! ' \\\n+ f'Puedes encontrar m\u00e1s informaci\u00f3n en `Como contribuir </es/{version}/CONTRIBUTING.html>`_ ' \\\n+ 'y as\u00ed ayudarnos a acercar Python a m\u00e1s personas de habla hispana.'\n+\n+ paragraph = core.publish_doctree(message)[0]\n+ banner = nodes.warning(ids=['contributing-banner'])\n+ banner.append(paragraph)\n+\n+ for document in doctree.traverse(nodes.document):\n+ document.insert(0, banner)\n+\n # Change the sourcedir programmatically because Read the Docs always call it with `.`\n app.srcdir = 'cpython/Doc'\n+\n+ app.connect('doctree-read', add_contributing_banner)\n", "issue": "Mejorar la gu\u00eda de CONTRIBUTING\nTenemos una peque\u00f1a gu\u00eda que explica el procedimiento. Sin embargo, estar\u00eda bueno mejorarla un poco para que sea m\u00e1s f\u00e1cil de seguir para persona que no sepan mucho de github y dem\u00e1s herramientas: https://github.com/raulcd/python-docs-es/blob/3.7/CONTRIBUTING.rst\n", "before_files": [{"content": "# Sphinx configuration file.\n#\n# - import original configurations from cpython/Doc/conf.py\n# - append the path considering the cpython submodule is at ./cpython\n# - create the symbolic links under ./cpython/locale/es/LC_MESSAGES\n# - make the build to work under Read the Docs\n#\n# The git submodule was created using this Stack Overflow answer\n# to fetch only the commit that I needed and avoid clonning the whole history\n# https://stackoverflow.com/a/27445058\n#\n# This can be built locally using `sphinx-build` by running\n#\n# $ sphinx-build -b html -n -d _build/doctrees -D language=es . _build/html\n\nimport sys, os, time\nsys.path.append(os.path.abspath('cpython/Doc/tools/extensions'))\nsys.path.append(os.path.abspath('cpython/Doc/includes'))\n\n# Import all the Sphinx settings from cpython\nsys.path.append(os.path.abspath('cpython/Doc'))\nfrom conf import *\n\nversion = '3.7'\nrelease = '3.7.7'\n\nproject = 'Python en Espa\u00f1ol'\ncopyright = '2001-%s, Python Software Foundation' % time.strftime('%Y')\n\nhtml_theme_path = ['cpython/Doc/tools']\ntemplates_path = ['cpython/Doc/tools/templates']\nhtml_static_path = ['cpython/Doc/tools/static']\n\nos.system('mkdir -p cpython/locales/es/')\nos.system('ln -nfs `pwd` cpython/locales/es/LC_MESSAGES')\n\ngettext_compact = False\nlocale_dirs = ['../locales', 'cpython/locales'] # relative to the sourcedir\n\ndef setup(app):\n # Change the sourcedir programmatically because Read the Docs always call it with `.`\n app.srcdir = 'cpython/Doc'\n", "path": "conf.py"}]} | 1,081 | 376 |
gh_patches_debug_18972 | rasdani/github-patches | git_diff | holoviz__panel-343 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
visual changes to subway map
With the current subway map its not apparent where we are on the map. Suggestions to improve the visual effect include making the horizontal line thinner and/or the 'gray' location indicator to something more obvious like red or blue.
</issue>
<code>
[start of panel/pipeline.py]
1 from __future__ import absolute_import, division, unicode_literals
2
3 import os
4
5 import param
6 import numpy as np
7
8 from .layout import Row, Column, HSpacer, VSpacer
9 from .pane import HoloViews, Markdown, Pane
10 from .param import Param
11 from .util import param_reprs
12
13
14 class Pipeline(param.Parameterized):
15 """
16 Allows connecting a linear series of panels to define a workflow.
17 Each stage in a pipeline should declare a panel method which
18 returns a panel object that can be displayed and annotate its
19 outputs using the param.output decorator.
20 """
21
22 debug = param.Boolean(default=False, precedence=-1, doc="""
23 Whether to raise errors, useful for debugging while building an application.""")
24
25 inherit_params = param.Boolean(default=True, precedence=-1, doc="""
26 Whether parameters should be inherited between pipeline stages""")
27
28 next = param.Action(default=lambda x: x.param.trigger('next'))
29
30 previous = param.Action(default=lambda x: x.param.trigger('previous'))
31
32 def __init__(self, stages=[], **params):
33 try:
34 import holoviews as hv
35 except:
36 raise ImportError('Pipeline requires holoviews to be installed')
37
38 self._stages = list(stages)
39 self._stage = 0
40 super(Pipeline, self).__init__(**params)
41 self._error = Markdown('')
42 self._states = []
43 self._state = None
44 self._progress_sel = hv.streams.Selection1D()
45 self._progress_sel.add_subscriber(self._set_stage)
46 prev_button = Param(self.param, parameters=['previous'], show_name=False)
47 next_button = Param(self.param, parameters=['next'], show_name=False)
48 prev_button.layout[0].disabled = True
49 self._progress_bar = Row(self._make_progress, prev_button, next_button)
50 spinner = Pane(os.path.join(os.path.dirname(__file__), 'assets', 'spinner.gif'))
51 self._spinner_layout = Row(HSpacer(), Column(VSpacer(), spinner, VSpacer()), HSpacer())
52 stage_layout = Row()
53 if len(stages):
54 stage_layout.append(self._init_stage())
55 self._layout = Column(self._progress_bar, self._error, stage_layout)
56
57 def add_stage(self, name, stage):
58 self._validate(stage)
59 self._stages.append((name, stage))
60 if len(self._stages) == 1:
61 self._layout[2].append(self._init_stage())
62
63 def _validate(self, stage):
64 if any(stage is s for n, s in self._stages):
65 raise ValueError('Stage %s is already in pipeline' % stage)
66 elif not ((isinstance(stage, type) and issubclass(stage, param.Parameterized))
67 or isinstance(stage, param.Parameterized)):
68 raise ValueError('Pipeline stages must be Parameterized classes or instances.')
69
70 def __repr__(self):
71 repr_str = 'Pipeline:'
72 for i, (name, stage) in enumerate(self._stages):
73 if isinstance(stage, param.Parameterized):
74 cls_name = type(stage).__name__
75 else:
76 cls_name = stage.__name__
77 params = ', '.join(param_reprs(stage))
78 repr_str += '\n [%d] %s: %s(%s)' % (i, name, cls_name, params)
79 return repr_str
80
81 def __getitem__(self, index):
82 return self._stages[index][1]
83
84 @property
85 def layout(self):
86 self._progress_bar[0] = self._make_progress
87 return self._layout
88
89
90 def _init_stage(self):
91 name, stage = self._stages[self._stage]
92 kwargs = {}
93 if self._state:
94 results = {}
95 for name, (_, method, index) in self._state.param.outputs().items():
96 if name not in stage.param:
97 continue
98 if method not in results:
99 results[method] = method()
100 result = results[method]
101 if index is not None:
102 result = result[index]
103 kwargs[name] = result
104 if self.inherit_params:
105 params = [k for k, v in self._state.param.objects('existing').items()
106 if v.precedence is None or v.precedence >= 0]
107 kwargs.update({k: v for k, v in self._state.param.get_param_values()
108 if k in stage.param and k != 'name' and k in params})
109
110 if isinstance(stage, param.Parameterized):
111 stage.set_param(**kwargs)
112 self._state = stage
113 else:
114 self._state = stage(**kwargs)
115 if len(self._states) <= self._stage:
116 self._states.append(self._state)
117 else:
118 self._states[self._stage] = self._state
119 return self._state.panel()
120
121 def _set_stage(self, index):
122 idx = index[0]
123 steps = idx-self._stage
124 if steps < 0:
125 for i in range(abs(steps)):
126 e = self._previous()
127 if e:
128 break
129 else:
130 for i in range(steps):
131 e = self._next()
132 if e:
133 break
134
135 def _update_button(self):
136 # Disable previous button
137 if self._stage == 0:
138 self._progress_bar[1].layout[0].disabled = True
139 else:
140 self._progress_bar[1].layout[0].disabled = False
141
142 # Disable next button
143 if self._stage == len(self._stages)-1:
144 self._progress_bar[2].layout[0].disabled = True
145 else:
146 self._progress_bar[2].layout[0].disabled = False
147
148 @param.depends('next', watch=True)
149 def _next(self):
150 self._stage += 1
151 prev_state = self._layout[2][0]
152 self._layout[2][0] = self._spinner_layout
153 try:
154 new_stage = self._init_stage()
155 self._layout[2][0] = new_stage
156 self._update_button()
157 except Exception as e:
158 self._stage -= 1
159 self._error.object = ('Next stage raised following error:\n\n\t%s: %s'
160 % (type(e).__name__, str(e)))
161 self._layout[2][0] = prev_state
162 if self.debug:
163 raise e
164 return e
165 else:
166 self._error.object = ''
167
168 @param.depends('previous', watch=True)
169 def _previous(self):
170 self._stage -= 1
171 try:
172 self._state = self._states[self._stage]
173 self._layout[2][0] = self._state.panel()
174 self._update_button()
175 except Exception as e:
176 self._stage += 1
177 self._error.object = ('Previous stage raised following error:\n\n\t%s: %s'
178 % (type(e).__name__, str(e)))
179 if self.debug:
180 raise e
181 else:
182 self._error.object = ''
183
184 @param.depends('previous', 'next')
185 def _make_progress(self):
186 import holoviews as hv
187 import holoviews.plotting.bokeh # noqa
188 stages = len(self._stages)
189 line = hv.Path([[(0, 0), (stages-1, 0)]]).options(
190 line_width=10, color='black', backend='bokeh'
191 )
192 vals = np.arange(stages)
193 active = [1 if v == self._stage else 0 for v in vals]
194 points = hv.Points((vals, np.zeros(stages), active), vdims=['active']).options(
195 color_index='active', line_color='black', cmap={0: 'white', 1: 'gray'},
196 show_legend=False, size=20, default_tools=[], tools=['tap'],
197 nonselection_alpha=1, backend='bokeh'
198 )
199 point_labels = points.add_dimension('text', 0, [n for n, _ in self._stages], vdim=True)
200 labels = hv.Labels(point_labels).options(yoffset=-2.5, backend='bokeh')
201 self._progress_sel.source = points
202 hv_plot = (line * points * labels).options(
203 xaxis=None, yaxis=None, width=800, show_frame=False, toolbar=None,
204 height=80, xlim=(-0.5, stages-0.5), ylim=(-4, 1.5),
205 clone=False, backend='bokeh'
206 )
207 return HoloViews(hv_plot, backend='bokeh')
208
[end of panel/pipeline.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/panel/pipeline.py b/panel/pipeline.py
--- a/panel/pipeline.py
+++ b/panel/pipeline.py
@@ -187,12 +187,12 @@
import holoviews.plotting.bokeh # noqa
stages = len(self._stages)
line = hv.Path([[(0, 0), (stages-1, 0)]]).options(
- line_width=10, color='black', backend='bokeh'
+ line_width=6, color='black', backend='bokeh'
)
vals = np.arange(stages)
active = [1 if v == self._stage else 0 for v in vals]
points = hv.Points((vals, np.zeros(stages), active), vdims=['active']).options(
- color_index='active', line_color='black', cmap={0: 'white', 1: 'gray'},
+ color_index='active', line_color='black', cmap={0: 'white', 1: '#5cb85c'},
show_legend=False, size=20, default_tools=[], tools=['tap'],
nonselection_alpha=1, backend='bokeh'
)
| {"golden_diff": "diff --git a/panel/pipeline.py b/panel/pipeline.py\n--- a/panel/pipeline.py\n+++ b/panel/pipeline.py\n@@ -187,12 +187,12 @@\n import holoviews.plotting.bokeh # noqa\n stages = len(self._stages)\n line = hv.Path([[(0, 0), (stages-1, 0)]]).options(\n- line_width=10, color='black', backend='bokeh'\n+ line_width=6, color='black', backend='bokeh'\n )\n vals = np.arange(stages)\n active = [1 if v == self._stage else 0 for v in vals]\n points = hv.Points((vals, np.zeros(stages), active), vdims=['active']).options(\n- color_index='active', line_color='black', cmap={0: 'white', 1: 'gray'},\n+ color_index='active', line_color='black', cmap={0: 'white', 1: '#5cb85c'},\n show_legend=False, size=20, default_tools=[], tools=['tap'],\n nonselection_alpha=1, backend='bokeh'\n )\n", "issue": "visual changes to subway map\nWith the current subway map its not apparent where we are on the map. Suggestions to improve the visual effect include making the horizontal line thinner and/or the 'gray' location indicator to something more obvious like red or blue. \n", "before_files": [{"content": "from __future__ import absolute_import, division, unicode_literals\n\nimport os\n\nimport param\nimport numpy as np\n\nfrom .layout import Row, Column, HSpacer, VSpacer\nfrom .pane import HoloViews, Markdown, Pane\nfrom .param import Param\nfrom .util import param_reprs\n\n\nclass Pipeline(param.Parameterized):\n \"\"\"\n Allows connecting a linear series of panels to define a workflow.\n Each stage in a pipeline should declare a panel method which\n returns a panel object that can be displayed and annotate its\n outputs using the param.output decorator.\n \"\"\"\n\n debug = param.Boolean(default=False, precedence=-1, doc=\"\"\"\n Whether to raise errors, useful for debugging while building an application.\"\"\")\n\n inherit_params = param.Boolean(default=True, precedence=-1, doc=\"\"\"\n Whether parameters should be inherited between pipeline stages\"\"\")\n\n next = param.Action(default=lambda x: x.param.trigger('next'))\n\n previous = param.Action(default=lambda x: x.param.trigger('previous'))\n\n def __init__(self, stages=[], **params):\n try:\n import holoviews as hv\n except:\n raise ImportError('Pipeline requires holoviews to be installed')\n\n self._stages = list(stages)\n self._stage = 0\n super(Pipeline, self).__init__(**params)\n self._error = Markdown('')\n self._states = []\n self._state = None\n self._progress_sel = hv.streams.Selection1D()\n self._progress_sel.add_subscriber(self._set_stage)\n prev_button = Param(self.param, parameters=['previous'], show_name=False)\n next_button = Param(self.param, parameters=['next'], show_name=False)\n prev_button.layout[0].disabled = True\n self._progress_bar = Row(self._make_progress, prev_button, next_button)\n spinner = Pane(os.path.join(os.path.dirname(__file__), 'assets', 'spinner.gif'))\n self._spinner_layout = Row(HSpacer(), Column(VSpacer(), spinner, VSpacer()), HSpacer())\n stage_layout = Row()\n if len(stages):\n stage_layout.append(self._init_stage())\n self._layout = Column(self._progress_bar, self._error, stage_layout)\n\n def add_stage(self, name, stage):\n self._validate(stage)\n self._stages.append((name, stage))\n if len(self._stages) == 1:\n self._layout[2].append(self._init_stage())\n\n def _validate(self, stage):\n if any(stage is s for n, s in self._stages):\n raise ValueError('Stage %s is already in pipeline' % stage)\n elif not ((isinstance(stage, type) and issubclass(stage, param.Parameterized))\n or isinstance(stage, param.Parameterized)):\n raise ValueError('Pipeline stages must be Parameterized classes or instances.')\n\n def __repr__(self):\n repr_str = 'Pipeline:'\n for i, (name, stage) in enumerate(self._stages):\n if isinstance(stage, param.Parameterized):\n cls_name = type(stage).__name__\n else:\n cls_name = stage.__name__\n params = ', '.join(param_reprs(stage))\n repr_str += '\\n [%d] %s: %s(%s)' % (i, name, cls_name, params)\n return repr_str\n\n def __getitem__(self, index):\n return self._stages[index][1]\n\n @property\n def layout(self):\n self._progress_bar[0] = self._make_progress\n return self._layout\n\n\n def _init_stage(self):\n name, stage = self._stages[self._stage]\n kwargs = {}\n if self._state:\n results = {}\n for name, (_, method, index) in self._state.param.outputs().items():\n if name not in stage.param:\n continue\n if method not in results:\n results[method] = method()\n result = results[method]\n if index is not None:\n result = result[index]\n kwargs[name] = result\n if self.inherit_params:\n params = [k for k, v in self._state.param.objects('existing').items()\n if v.precedence is None or v.precedence >= 0]\n kwargs.update({k: v for k, v in self._state.param.get_param_values()\n if k in stage.param and k != 'name' and k in params})\n\n if isinstance(stage, param.Parameterized):\n stage.set_param(**kwargs)\n self._state = stage\n else:\n self._state = stage(**kwargs)\n if len(self._states) <= self._stage:\n self._states.append(self._state)\n else:\n self._states[self._stage] = self._state\n return self._state.panel()\n\n def _set_stage(self, index):\n idx = index[0]\n steps = idx-self._stage\n if steps < 0:\n for i in range(abs(steps)):\n e = self._previous()\n if e:\n break\n else:\n for i in range(steps):\n e = self._next()\n if e:\n break\n\n def _update_button(self):\n # Disable previous button\n if self._stage == 0:\n self._progress_bar[1].layout[0].disabled = True\n else:\n self._progress_bar[1].layout[0].disabled = False\n\n # Disable next button\n if self._stage == len(self._stages)-1:\n self._progress_bar[2].layout[0].disabled = True\n else:\n self._progress_bar[2].layout[0].disabled = False\n\n @param.depends('next', watch=True)\n def _next(self):\n self._stage += 1\n prev_state = self._layout[2][0]\n self._layout[2][0] = self._spinner_layout\n try:\n new_stage = self._init_stage()\n self._layout[2][0] = new_stage\n self._update_button()\n except Exception as e:\n self._stage -= 1\n self._error.object = ('Next stage raised following error:\\n\\n\\t%s: %s'\n % (type(e).__name__, str(e)))\n self._layout[2][0] = prev_state\n if self.debug:\n raise e\n return e\n else:\n self._error.object = ''\n\n @param.depends('previous', watch=True)\n def _previous(self):\n self._stage -= 1\n try:\n self._state = self._states[self._stage]\n self._layout[2][0] = self._state.panel()\n self._update_button()\n except Exception as e:\n self._stage += 1\n self._error.object = ('Previous stage raised following error:\\n\\n\\t%s: %s'\n % (type(e).__name__, str(e)))\n if self.debug:\n raise e\n else:\n self._error.object = ''\n\n @param.depends('previous', 'next')\n def _make_progress(self):\n import holoviews as hv\n import holoviews.plotting.bokeh # noqa\n stages = len(self._stages)\n line = hv.Path([[(0, 0), (stages-1, 0)]]).options(\n line_width=10, color='black', backend='bokeh'\n )\n vals = np.arange(stages)\n active = [1 if v == self._stage else 0 for v in vals]\n points = hv.Points((vals, np.zeros(stages), active), vdims=['active']).options(\n color_index='active', line_color='black', cmap={0: 'white', 1: 'gray'},\n show_legend=False, size=20, default_tools=[], tools=['tap'],\n nonselection_alpha=1, backend='bokeh'\n )\n point_labels = points.add_dimension('text', 0, [n for n, _ in self._stages], vdim=True)\n labels = hv.Labels(point_labels).options(yoffset=-2.5, backend='bokeh')\n self._progress_sel.source = points\n hv_plot = (line * points * labels).options(\n xaxis=None, yaxis=None, width=800, show_frame=False, toolbar=None,\n height=80, xlim=(-0.5, stages-0.5), ylim=(-4, 1.5),\n clone=False, backend='bokeh'\n )\n return HoloViews(hv_plot, backend='bokeh')\n", "path": "panel/pipeline.py"}]} | 2,964 | 264 |
gh_patches_debug_18863 | rasdani/github-patches | git_diff | vega__altair-2785 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Equal default chart dimensions
Currently the default theme in Altair specifies an aspect ratio of 4/3, with a width of 400 and a height of 300 (for continuous data):

The default in VegaLite is to make both dimensions of equal length, which I think makes sense since it spreads the data over the same amount of pixels on both the X and Y axis. This could have benefits in terms of making it easier to fairly compare the distribution of the data between the two plotted variables instead of it appearing more spread out over the X axis due to the increase chart width. The default in Vega-Lite is to use 200 px for the width and height which I think is a bit small, but setting both to 300 px looks good:

What do you all think about changing the default width in Altair to 300 px, so that both the X and Y axes occupy the same amount of pixels by default? Are there benefits of having an unequal aspect ratio like the current default that I am missing (maybe that it is more similar to the screen aspect ratio)? I don't think this is a major concern, but thought I would bring it up and see if others also regard it as a small improvement or just a matter of personal aesthetics/taste.
</issue>
<code>
[start of altair/vegalite/v5/theme.py]
1 """Tools for enabling and registering chart themes"""
2
3 from ...utils.theme import ThemeRegistry
4
5 VEGA_THEMES = [
6 "ggplot2",
7 "quartz",
8 "vox",
9 "fivethirtyeight",
10 "dark",
11 "latimes",
12 "urbaninstitute",
13 ]
14
15
16 class VegaTheme(object):
17 """Implementation of a builtin vega theme."""
18
19 def __init__(self, theme):
20 self.theme = theme
21
22 def __call__(self):
23 return {
24 "usermeta": {"embedOptions": {"theme": self.theme}},
25 "config": {"view": {"continuousWidth": 400, "continuousHeight": 300}},
26 }
27
28 def __repr__(self):
29 return "VegaTheme({!r})".format(self.theme)
30
31
32 # The entry point group that can be used by other packages to declare other
33 # renderers that will be auto-detected. Explicit registration is also
34 # allowed by the PluginRegistery API.
35 ENTRY_POINT_GROUP = "altair.vegalite.v5.theme" # type: str
36 themes = ThemeRegistry(entry_point_group=ENTRY_POINT_GROUP)
37
38 themes.register(
39 "default",
40 lambda: {"config": {"view": {"continuousWidth": 400, "continuousHeight": 300}}},
41 )
42 themes.register(
43 "opaque",
44 lambda: {
45 "config": {
46 "background": "white",
47 "view": {"continuousWidth": 400, "continuousHeight": 300},
48 }
49 },
50 )
51 themes.register("none", lambda: {})
52
53 for theme in VEGA_THEMES:
54 themes.register(theme, VegaTheme(theme))
55
56 themes.enable("default")
57
[end of altair/vegalite/v5/theme.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/altair/vegalite/v5/theme.py b/altair/vegalite/v5/theme.py
--- a/altair/vegalite/v5/theme.py
+++ b/altair/vegalite/v5/theme.py
@@ -22,7 +22,7 @@
def __call__(self):
return {
"usermeta": {"embedOptions": {"theme": self.theme}},
- "config": {"view": {"continuousWidth": 400, "continuousHeight": 300}},
+ "config": {"view": {"continuousWidth": 300, "continuousHeight": 300}},
}
def __repr__(self):
@@ -37,14 +37,14 @@
themes.register(
"default",
- lambda: {"config": {"view": {"continuousWidth": 400, "continuousHeight": 300}}},
+ lambda: {"config": {"view": {"continuousWidth": 300, "continuousHeight": 300}}},
)
themes.register(
"opaque",
lambda: {
"config": {
"background": "white",
- "view": {"continuousWidth": 400, "continuousHeight": 300},
+ "view": {"continuousWidth": 300, "continuousHeight": 300},
}
},
)
| {"golden_diff": "diff --git a/altair/vegalite/v5/theme.py b/altair/vegalite/v5/theme.py\n--- a/altair/vegalite/v5/theme.py\n+++ b/altair/vegalite/v5/theme.py\n@@ -22,7 +22,7 @@\n def __call__(self):\n return {\n \"usermeta\": {\"embedOptions\": {\"theme\": self.theme}},\n- \"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}},\n+ \"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}},\n }\n \n def __repr__(self):\n@@ -37,14 +37,14 @@\n \n themes.register(\n \"default\",\n- lambda: {\"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}}},\n+ lambda: {\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}}},\n )\n themes.register(\n \"opaque\",\n lambda: {\n \"config\": {\n \"background\": \"white\",\n- \"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300},\n+ \"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300},\n }\n },\n )\n", "issue": "Equal default chart dimensions\nCurrently the default theme in Altair specifies an aspect ratio of 4/3, with a width of 400 and a height of 300 (for continuous data):\r\n\r\n\r\n\r\nThe default in VegaLite is to make both dimensions of equal length, which I think makes sense since it spreads the data over the same amount of pixels on both the X and Y axis. This could have benefits in terms of making it easier to fairly compare the distribution of the data between the two plotted variables instead of it appearing more spread out over the X axis due to the increase chart width. The default in Vega-Lite is to use 200 px for the width and height which I think is a bit small, but setting both to 300 px looks good:\r\n\r\n\r\n\r\nWhat do you all think about changing the default width in Altair to 300 px, so that both the X and Y axes occupy the same amount of pixels by default? Are there benefits of having an unequal aspect ratio like the current default that I am missing (maybe that it is more similar to the screen aspect ratio)? I don't think this is a major concern, but thought I would bring it up and see if others also regard it as a small improvement or just a matter of personal aesthetics/taste.\n", "before_files": [{"content": "\"\"\"Tools for enabling and registering chart themes\"\"\"\n\nfrom ...utils.theme import ThemeRegistry\n\nVEGA_THEMES = [\n \"ggplot2\",\n \"quartz\",\n \"vox\",\n \"fivethirtyeight\",\n \"dark\",\n \"latimes\",\n \"urbaninstitute\",\n]\n\n\nclass VegaTheme(object):\n \"\"\"Implementation of a builtin vega theme.\"\"\"\n\n def __init__(self, theme):\n self.theme = theme\n\n def __call__(self):\n return {\n \"usermeta\": {\"embedOptions\": {\"theme\": self.theme}},\n \"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}},\n }\n\n def __repr__(self):\n return \"VegaTheme({!r})\".format(self.theme)\n\n\n# The entry point group that can be used by other packages to declare other\n# renderers that will be auto-detected. Explicit registration is also\n# allowed by the PluginRegistery API.\nENTRY_POINT_GROUP = \"altair.vegalite.v5.theme\" # type: str\nthemes = ThemeRegistry(entry_point_group=ENTRY_POINT_GROUP)\n\nthemes.register(\n \"default\",\n lambda: {\"config\": {\"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300}}},\n)\nthemes.register(\n \"opaque\",\n lambda: {\n \"config\": {\n \"background\": \"white\",\n \"view\": {\"continuousWidth\": 400, \"continuousHeight\": 300},\n }\n },\n)\nthemes.register(\"none\", lambda: {})\n\nfor theme in VEGA_THEMES:\n themes.register(theme, VegaTheme(theme))\n\nthemes.enable(\"default\")\n", "path": "altair/vegalite/v5/theme.py"}]} | 1,406 | 308 |
gh_patches_debug_22548 | rasdani/github-patches | git_diff | scalableminds__webknossos-libs-236 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
NIFTI Division by zero error
When converting a nifti file only consisting of zeros, the to_target_datatype function divides by 0.
</issue>
<code>
[start of wkcuber/convert_nifti.py]
1 import time
2 import logging
3 import numpy as np
4 from argparse import ArgumentParser
5 from pathlib import Path
6 import nibabel as nib
7
8 from wkcuber.api.Dataset import WKDataset, TiffDataset
9 from .utils import (
10 add_verbose_flag,
11 setup_logging,
12 add_scale_flag,
13 pad_or_crop_to_size_and_topleft,
14 parse_bounding_box,
15 )
16
17 from wkcuber.utils import DEFAULT_WKW_FILE_LEN, DEFAULT_WKW_VOXELS_PER_BLOCK
18
19
20 def create_parser():
21 parser = ArgumentParser()
22
23 parser.add_argument(
24 "source_path",
25 help="Path to NIFTY file or to a directory if multiple NIFTI files should be converted. "
26 "In the latter case, also see --color_file and --segmentation_file.",
27 )
28
29 parser.add_argument(
30 "target_path", help="Output directory for the generated WKW dataset."
31 )
32
33 parser.add_argument(
34 "--layer_name",
35 "-l",
36 help="Name of the cubed layer (color or segmentation).",
37 default="color",
38 )
39
40 parser.add_argument(
41 "--dtype", "-d", help="Target datatype (e.g. uint8, uint16).", default="uint8"
42 )
43
44 parser.add_argument(
45 "--color_file",
46 help="When converting folder, name of file to become color layer",
47 default=None,
48 )
49
50 parser.add_argument(
51 "--segmentation_file",
52 help="When converting folder, name of file to become segmentation layer",
53 default=None,
54 )
55
56 parser.add_argument(
57 "--write_tiff",
58 help="Output tiff dataset instead of wkw.",
59 default=False,
60 action="store_true",
61 )
62
63 parser.add_argument(
64 "--use_orientation_header",
65 help="Use orientation information from header to interpret the input data (should be tried if output orientation seems to be wrong).",
66 default=False,
67 action="store_true",
68 )
69
70 parser.add_argument(
71 "--enforce_bounding_box",
72 help="The BoundingBox to which the input data should be written. If the input data is too small, it will be padded. If it's too large, it will be cropped. The input format is x,y,z,width,height,depth.",
73 default=None,
74 type=parse_bounding_box,
75 )
76
77 parser.add_argument(
78 "--flip_axes",
79 help="The axes at which should be flipped. Input format is a comma separated list of axis indices. For example, 1,2,3 will flip the x, y and z axes.",
80 default=None,
81 )
82
83 add_scale_flag(parser, required=False)
84 add_verbose_flag(parser)
85
86 return parser
87
88
89 def to_target_datatype(
90 data: np.ndarray, target_dtype, is_probably_binary: bool
91 ) -> np.ndarray:
92 if is_probably_binary:
93 logging.info(
94 f"Casting directly to {target_dtype}, as input seems to be binary."
95 )
96 return data.astype(np.dtype(target_dtype))
97
98 if data.dtype == np.dtype("float32"):
99 factor = data.max()
100 elif data.dtype == np.dtype("float64"):
101 factor = data.max() / np.iinfo(target_dtype).max
102 else:
103 factor = np.iinfo(data.dtype).max / np.iinfo(target_dtype).max
104
105 return (data / factor).astype(np.dtype(target_dtype))
106
107
108 def convert_nifti(
109 source_nifti_path,
110 target_path,
111 layer_name,
112 dtype,
113 scale,
114 mag=1,
115 file_len=DEFAULT_WKW_FILE_LEN,
116 bbox_to_enforce=None,
117 write_tiff=False,
118 use_orientation_header=False,
119 flip_axes=None,
120 ):
121 voxels_per_cube = file_len * DEFAULT_WKW_VOXELS_PER_BLOCK
122 ref_time = time.time()
123
124 source_nifti = nib.load(str(source_nifti_path.resolve()))
125
126 if use_orientation_header:
127 # Get canonical representation of data to incorporate
128 # encoded transformations. Needs to be flipped later
129 # to match the coordinate system of WKW.
130 source_nifti = nib.funcs.as_closest_canonical(source_nifti, enforce_diag=False)
131
132 cube_data = np.array(source_nifti.get_fdata())
133
134 is_probably_binary = np.unique(cube_data).shape[0] == 2
135 assume_segmentation_layer = (
136 False
137 ) # Since webKnossos does not support multiple segmention layers, this is hardcoded to False right now.
138
139 max_cell_id_args = (
140 {"largest_segment_id": int(np.max(cube_data) + 1)}
141 if assume_segmentation_layer
142 else {}
143 )
144 category_type = "segmentation" if assume_segmentation_layer else "color"
145 logging.debug(f"Assuming {category_type} as layer type for {layer_name}")
146
147 if len(source_nifti.shape) == 3:
148 cube_data = cube_data.reshape((1,) + source_nifti.shape)
149
150 elif len(source_nifti.shape) == 4:
151 cube_data = np.transpose(cube_data, (3, 0, 1, 2))
152
153 else:
154 logging.warning(
155 "Converting of {} failed! Too many or too less dimensions".format(
156 source_nifti_path
157 )
158 )
159
160 return
161
162 if use_orientation_header:
163 # Flip y and z to transform data into wkw's coordinate system.
164 cube_data = np.flip(cube_data, (2, 3))
165
166 if flip_axes:
167 cube_data = np.flip(cube_data, flip_axes)
168
169 if scale is None:
170 scale = tuple(map(float, source_nifti.header["pixdim"][:3]))
171 logging.info(f"Using scale: {scale}")
172 cube_data = to_target_datatype(cube_data, dtype, is_probably_binary)
173
174 # everything needs to be padded to
175 if bbox_to_enforce is not None:
176 target_topleft = np.array((0,) + tuple(bbox_to_enforce.topleft))
177 target_size = np.array((1,) + tuple(bbox_to_enforce.size))
178
179 cube_data = pad_or_crop_to_size_and_topleft(
180 cube_data, target_size, target_topleft
181 )
182
183 # Writing wkw compressed requires files of shape (voxels_per_cube, voxels_per_cube, voxels_per_cube)
184 # Pad data accordingly
185 padding_offset = voxels_per_cube - np.array(cube_data.shape[1:4]) % voxels_per_cube
186 padding_offset = (0, 0, 0)
187 cube_data = np.pad(
188 cube_data,
189 (
190 (0, 0),
191 (0, int(padding_offset[0])),
192 (0, int(padding_offset[1])),
193 (0, int(padding_offset[2])),
194 ),
195 )
196
197 if write_tiff:
198 ds = TiffDataset.get_or_create(target_path, scale=scale or (1, 1, 1))
199 layer = ds.get_or_add_layer(
200 layer_name, category_type, np.dtype(dtype), **max_cell_id_args
201 )
202 mag = layer.get_or_add_mag("1")
203
204 mag.write(cube_data.squeeze())
205 else:
206 ds = WKDataset.get_or_create(target_path, scale=scale or (1, 1, 1))
207 layer = ds.get_or_add_layer(
208 layer_name, category_type, np.dtype(dtype), **max_cell_id_args
209 )
210 mag = layer.get_or_add_mag("1", file_len=file_len)
211 mag.write(cube_data)
212
213 logging.debug(
214 "Converting of {} took {:.8f}s".format(
215 source_nifti_path, time.time() - ref_time
216 )
217 )
218
219
220 def convert_folder_nifti(
221 source_folder_path,
222 target_path,
223 color_subpath,
224 segmentation_subpath,
225 scale,
226 use_orientation_header=False,
227 bbox_to_enforce=None,
228 write_tiff=False,
229 flip_axes=None,
230 ):
231 paths = list(source_folder_path.rglob("**/*.nii"))
232
233 color_path = None
234 segmentation_path = None
235 if color_subpath is not None:
236 color_path = target_path / color_subpath
237 if color_path not in paths:
238 logging.warning(
239 "Specified color file {} not in source path {}!".format(
240 color_path, source_folder_path
241 )
242 )
243
244 if segmentation_subpath is not None:
245 segmentation_path = target_path / segmentation_subpath
246 if segmentation_path not in paths:
247 logging.warning(
248 "Specified segmentation_file file {} not in source path {}!".format(
249 segmentation_path, segmentation_path
250 )
251 )
252
253 logging.info("Segmentation file will also use uint8 as a datatype.")
254
255 conversion_args = {
256 "scale": scale,
257 "write_tiff": write_tiff,
258 "bbox_to_enforce": bbox_to_enforce,
259 "use_orientation_header": use_orientation_header,
260 "flip_axes": flip_axes,
261 }
262 for path in paths:
263 if path == color_path:
264 convert_nifti(path, target_path, "color", "uint8", **conversion_args)
265 elif path == segmentation_path:
266 convert_nifti(path, target_path, "segmentation", "uint8", **conversion_args)
267 else:
268 convert_nifti(path, target_path, path.stem, "uint8", **conversion_args)
269
270
271 def main():
272 args = create_parser().parse_args()
273 setup_logging(args)
274
275 source_path = Path(args.source_path)
276
277 flip_axes = None
278 if args.flip_axes is not None:
279 flip_axes = tuple(int(x) for x in args.flip_axes.split(","))
280 for index in flip_axes:
281 assert (
282 0 <= index <= 3
283 ), "flip_axes parameter must only contain indices between 0 and 3."
284
285 conversion_args = {
286 "scale": args.scale,
287 "write_tiff": args.write_tiff,
288 "bbox_to_enforce": args.enforce_bounding_box,
289 "use_orientation_header": args.use_orientation_header,
290 "flip_axes": flip_axes,
291 }
292
293 if source_path.is_dir():
294 convert_folder_nifti(
295 source_path,
296 Path(args.target_path),
297 args.color_file,
298 args.segmentation_file,
299 **conversion_args,
300 )
301 else:
302 convert_nifti(
303 source_path,
304 Path(args.target_path),
305 args.layer_name,
306 args.dtype,
307 **conversion_args,
308 )
309
310
311 if __name__ == "__main__":
312 main()
313
[end of wkcuber/convert_nifti.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/wkcuber/convert_nifti.py b/wkcuber/convert_nifti.py
--- a/wkcuber/convert_nifti.py
+++ b/wkcuber/convert_nifti.py
@@ -1,21 +1,22 @@
-import time
import logging
-import numpy as np
+import time
from argparse import ArgumentParser
from pathlib import Path
+
import nibabel as nib
+import numpy as np
-from wkcuber.api.Dataset import WKDataset, TiffDataset
-from .utils import (
- add_verbose_flag,
- setup_logging,
+from wkcuber.api.Dataset import TiffDataset, WKDataset
+from wkcuber.utils import (
+ DEFAULT_WKW_FILE_LEN,
+ DEFAULT_WKW_VOXELS_PER_BLOCK,
add_scale_flag,
+ add_verbose_flag,
pad_or_crop_to_size_and_topleft,
parse_bounding_box,
+ setup_logging,
)
-from wkcuber.utils import DEFAULT_WKW_FILE_LEN, DEFAULT_WKW_VOXELS_PER_BLOCK
-
def create_parser():
parser = ArgumentParser()
@@ -102,6 +103,10 @@
else:
factor = np.iinfo(data.dtype).max / np.iinfo(target_dtype).max
+ if data.max() == 0:
+ logging.warning("Not rescaling data since maximum is 0")
+ factor = 1
+
return (data / factor).astype(np.dtype(target_dtype))
| {"golden_diff": "diff --git a/wkcuber/convert_nifti.py b/wkcuber/convert_nifti.py\n--- a/wkcuber/convert_nifti.py\n+++ b/wkcuber/convert_nifti.py\n@@ -1,21 +1,22 @@\n-import time\n import logging\n-import numpy as np\n+import time\n from argparse import ArgumentParser\n from pathlib import Path\n+\n import nibabel as nib\n+import numpy as np\n \n-from wkcuber.api.Dataset import WKDataset, TiffDataset\n-from .utils import (\n- add_verbose_flag,\n- setup_logging,\n+from wkcuber.api.Dataset import TiffDataset, WKDataset\n+from wkcuber.utils import (\n+ DEFAULT_WKW_FILE_LEN,\n+ DEFAULT_WKW_VOXELS_PER_BLOCK,\n add_scale_flag,\n+ add_verbose_flag,\n pad_or_crop_to_size_and_topleft,\n parse_bounding_box,\n+ setup_logging,\n )\n \n-from wkcuber.utils import DEFAULT_WKW_FILE_LEN, DEFAULT_WKW_VOXELS_PER_BLOCK\n-\n \n def create_parser():\n parser = ArgumentParser()\n@@ -102,6 +103,10 @@\n else:\n factor = np.iinfo(data.dtype).max / np.iinfo(target_dtype).max\n \n+ if data.max() == 0:\n+ logging.warning(\"Not rescaling data since maximum is 0\")\n+ factor = 1\n+\n return (data / factor).astype(np.dtype(target_dtype))\n", "issue": "NIFTI Division by zero error\nWhen converting a nifti file only consisting of zeros, the to_target_datatype function divides by 0.\n", "before_files": [{"content": "import time\nimport logging\nimport numpy as np\nfrom argparse import ArgumentParser\nfrom pathlib import Path\nimport nibabel as nib\n\nfrom wkcuber.api.Dataset import WKDataset, TiffDataset\nfrom .utils import (\n add_verbose_flag,\n setup_logging,\n add_scale_flag,\n pad_or_crop_to_size_and_topleft,\n parse_bounding_box,\n)\n\nfrom wkcuber.utils import DEFAULT_WKW_FILE_LEN, DEFAULT_WKW_VOXELS_PER_BLOCK\n\n\ndef create_parser():\n parser = ArgumentParser()\n\n parser.add_argument(\n \"source_path\",\n help=\"Path to NIFTY file or to a directory if multiple NIFTI files should be converted. \"\n \"In the latter case, also see --color_file and --segmentation_file.\",\n )\n\n parser.add_argument(\n \"target_path\", help=\"Output directory for the generated WKW dataset.\"\n )\n\n parser.add_argument(\n \"--layer_name\",\n \"-l\",\n help=\"Name of the cubed layer (color or segmentation).\",\n default=\"color\",\n )\n\n parser.add_argument(\n \"--dtype\", \"-d\", help=\"Target datatype (e.g. uint8, uint16).\", default=\"uint8\"\n )\n\n parser.add_argument(\n \"--color_file\",\n help=\"When converting folder, name of file to become color layer\",\n default=None,\n )\n\n parser.add_argument(\n \"--segmentation_file\",\n help=\"When converting folder, name of file to become segmentation layer\",\n default=None,\n )\n\n parser.add_argument(\n \"--write_tiff\",\n help=\"Output tiff dataset instead of wkw.\",\n default=False,\n action=\"store_true\",\n )\n\n parser.add_argument(\n \"--use_orientation_header\",\n help=\"Use orientation information from header to interpret the input data (should be tried if output orientation seems to be wrong).\",\n default=False,\n action=\"store_true\",\n )\n\n parser.add_argument(\n \"--enforce_bounding_box\",\n help=\"The BoundingBox to which the input data should be written. If the input data is too small, it will be padded. If it's too large, it will be cropped. The input format is x,y,z,width,height,depth.\",\n default=None,\n type=parse_bounding_box,\n )\n\n parser.add_argument(\n \"--flip_axes\",\n help=\"The axes at which should be flipped. Input format is a comma separated list of axis indices. For example, 1,2,3 will flip the x, y and z axes.\",\n default=None,\n )\n\n add_scale_flag(parser, required=False)\n add_verbose_flag(parser)\n\n return parser\n\n\ndef to_target_datatype(\n data: np.ndarray, target_dtype, is_probably_binary: bool\n) -> np.ndarray:\n if is_probably_binary:\n logging.info(\n f\"Casting directly to {target_dtype}, as input seems to be binary.\"\n )\n return data.astype(np.dtype(target_dtype))\n\n if data.dtype == np.dtype(\"float32\"):\n factor = data.max()\n elif data.dtype == np.dtype(\"float64\"):\n factor = data.max() / np.iinfo(target_dtype).max\n else:\n factor = np.iinfo(data.dtype).max / np.iinfo(target_dtype).max\n\n return (data / factor).astype(np.dtype(target_dtype))\n\n\ndef convert_nifti(\n source_nifti_path,\n target_path,\n layer_name,\n dtype,\n scale,\n mag=1,\n file_len=DEFAULT_WKW_FILE_LEN,\n bbox_to_enforce=None,\n write_tiff=False,\n use_orientation_header=False,\n flip_axes=None,\n):\n voxels_per_cube = file_len * DEFAULT_WKW_VOXELS_PER_BLOCK\n ref_time = time.time()\n\n source_nifti = nib.load(str(source_nifti_path.resolve()))\n\n if use_orientation_header:\n # Get canonical representation of data to incorporate\n # encoded transformations. Needs to be flipped later\n # to match the coordinate system of WKW.\n source_nifti = nib.funcs.as_closest_canonical(source_nifti, enforce_diag=False)\n\n cube_data = np.array(source_nifti.get_fdata())\n\n is_probably_binary = np.unique(cube_data).shape[0] == 2\n assume_segmentation_layer = (\n False\n ) # Since webKnossos does not support multiple segmention layers, this is hardcoded to False right now.\n\n max_cell_id_args = (\n {\"largest_segment_id\": int(np.max(cube_data) + 1)}\n if assume_segmentation_layer\n else {}\n )\n category_type = \"segmentation\" if assume_segmentation_layer else \"color\"\n logging.debug(f\"Assuming {category_type} as layer type for {layer_name}\")\n\n if len(source_nifti.shape) == 3:\n cube_data = cube_data.reshape((1,) + source_nifti.shape)\n\n elif len(source_nifti.shape) == 4:\n cube_data = np.transpose(cube_data, (3, 0, 1, 2))\n\n else:\n logging.warning(\n \"Converting of {} failed! Too many or too less dimensions\".format(\n source_nifti_path\n )\n )\n\n return\n\n if use_orientation_header:\n # Flip y and z to transform data into wkw's coordinate system.\n cube_data = np.flip(cube_data, (2, 3))\n\n if flip_axes:\n cube_data = np.flip(cube_data, flip_axes)\n\n if scale is None:\n scale = tuple(map(float, source_nifti.header[\"pixdim\"][:3]))\n logging.info(f\"Using scale: {scale}\")\n cube_data = to_target_datatype(cube_data, dtype, is_probably_binary)\n\n # everything needs to be padded to\n if bbox_to_enforce is not None:\n target_topleft = np.array((0,) + tuple(bbox_to_enforce.topleft))\n target_size = np.array((1,) + tuple(bbox_to_enforce.size))\n\n cube_data = pad_or_crop_to_size_and_topleft(\n cube_data, target_size, target_topleft\n )\n\n # Writing wkw compressed requires files of shape (voxels_per_cube, voxels_per_cube, voxels_per_cube)\n # Pad data accordingly\n padding_offset = voxels_per_cube - np.array(cube_data.shape[1:4]) % voxels_per_cube\n padding_offset = (0, 0, 0)\n cube_data = np.pad(\n cube_data,\n (\n (0, 0),\n (0, int(padding_offset[0])),\n (0, int(padding_offset[1])),\n (0, int(padding_offset[2])),\n ),\n )\n\n if write_tiff:\n ds = TiffDataset.get_or_create(target_path, scale=scale or (1, 1, 1))\n layer = ds.get_or_add_layer(\n layer_name, category_type, np.dtype(dtype), **max_cell_id_args\n )\n mag = layer.get_or_add_mag(\"1\")\n\n mag.write(cube_data.squeeze())\n else:\n ds = WKDataset.get_or_create(target_path, scale=scale or (1, 1, 1))\n layer = ds.get_or_add_layer(\n layer_name, category_type, np.dtype(dtype), **max_cell_id_args\n )\n mag = layer.get_or_add_mag(\"1\", file_len=file_len)\n mag.write(cube_data)\n\n logging.debug(\n \"Converting of {} took {:.8f}s\".format(\n source_nifti_path, time.time() - ref_time\n )\n )\n\n\ndef convert_folder_nifti(\n source_folder_path,\n target_path,\n color_subpath,\n segmentation_subpath,\n scale,\n use_orientation_header=False,\n bbox_to_enforce=None,\n write_tiff=False,\n flip_axes=None,\n):\n paths = list(source_folder_path.rglob(\"**/*.nii\"))\n\n color_path = None\n segmentation_path = None\n if color_subpath is not None:\n color_path = target_path / color_subpath\n if color_path not in paths:\n logging.warning(\n \"Specified color file {} not in source path {}!\".format(\n color_path, source_folder_path\n )\n )\n\n if segmentation_subpath is not None:\n segmentation_path = target_path / segmentation_subpath\n if segmentation_path not in paths:\n logging.warning(\n \"Specified segmentation_file file {} not in source path {}!\".format(\n segmentation_path, segmentation_path\n )\n )\n\n logging.info(\"Segmentation file will also use uint8 as a datatype.\")\n\n conversion_args = {\n \"scale\": scale,\n \"write_tiff\": write_tiff,\n \"bbox_to_enforce\": bbox_to_enforce,\n \"use_orientation_header\": use_orientation_header,\n \"flip_axes\": flip_axes,\n }\n for path in paths:\n if path == color_path:\n convert_nifti(path, target_path, \"color\", \"uint8\", **conversion_args)\n elif path == segmentation_path:\n convert_nifti(path, target_path, \"segmentation\", \"uint8\", **conversion_args)\n else:\n convert_nifti(path, target_path, path.stem, \"uint8\", **conversion_args)\n\n\ndef main():\n args = create_parser().parse_args()\n setup_logging(args)\n\n source_path = Path(args.source_path)\n\n flip_axes = None\n if args.flip_axes is not None:\n flip_axes = tuple(int(x) for x in args.flip_axes.split(\",\"))\n for index in flip_axes:\n assert (\n 0 <= index <= 3\n ), \"flip_axes parameter must only contain indices between 0 and 3.\"\n\n conversion_args = {\n \"scale\": args.scale,\n \"write_tiff\": args.write_tiff,\n \"bbox_to_enforce\": args.enforce_bounding_box,\n \"use_orientation_header\": args.use_orientation_header,\n \"flip_axes\": flip_axes,\n }\n\n if source_path.is_dir():\n convert_folder_nifti(\n source_path,\n Path(args.target_path),\n args.color_file,\n args.segmentation_file,\n **conversion_args,\n )\n else:\n convert_nifti(\n source_path,\n Path(args.target_path),\n args.layer_name,\n args.dtype,\n **conversion_args,\n )\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "wkcuber/convert_nifti.py"}]} | 3,671 | 321 |
gh_patches_debug_31862 | rasdani/github-patches | git_diff | freedomofpress__securedrop-4935 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Python3 admin virtualenv is broken if improperly set up (doesn't contain pip3)
## Description
Initially discovered while reviewing https://github.com/freedomofpress/securedrop/pull/4927#issuecomment-543763957 , if the admin virtualenv is improperly setup, one must manually remove `admin/.venv3` to fix the virtualenv.
## Steps to Reproduce
This is very hard to reproduce, but you can do it by pressing ctrl+c when the virtualenv is being setup as part of an initial `./securedrop-admin setup` run. Subsequent `./securedrop-admin setup` invocations will no longer work
## Expected Behavior
`./securedrop-admin setup` should not fail
## Actual Behavior
`./securedrop-admin setup` fails with the following output:
```
amnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin setup
INFO: Virtualenv already exists, not creating
INFO: Checking Python dependencies for securedrop-admin
ERROR: Failed to install pip dependencies. Check network connection and try again.
```
</issue>
<code>
[start of admin/bootstrap.py]
1 # -*- mode: python; coding: utf-8 -*-
2 #
3 # Copyright (C) 2013-2018 Freedom of the Press Foundation & al
4 # Copyright (C) 2018 Loic Dachary <[email protected]>
5 #
6 # This program is free software: you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation, either version 3 of the License, or
9 # (at your option) any later version.
10 #
11 # This program is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
15 #
16 # You should have received a copy of the GNU General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
18 #
19
20 import argparse
21 import logging
22 import os
23 import shutil
24 import subprocess
25 import sys
26
27 sdlog = logging.getLogger(__name__)
28
29 DIR = os.path.dirname(os.path.realpath(__file__))
30 VENV_DIR = os.path.join(DIR, ".venv3")
31
32
33 def setup_logger(verbose=False):
34 """ Configure logging handler """
35 # Set default level on parent
36 sdlog.setLevel(logging.DEBUG)
37 level = logging.DEBUG if verbose else logging.INFO
38
39 stdout = logging.StreamHandler(sys.stdout)
40 stdout.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
41 stdout.setLevel(level)
42 sdlog.addHandler(stdout)
43
44
45 def run_command(command):
46 """
47 Wrapper function to display stdout for running command,
48 similar to how shelling out in a Bash script displays rolling output.
49
50 Yields a list of the stdout from the `command`, and raises a
51 CalledProcessError if `command` returns non-zero.
52 """
53 popen = subprocess.Popen(command,
54 stdout=subprocess.PIPE,
55 stderr=subprocess.STDOUT)
56 for stdout_line in iter(popen.stdout.readline, b""):
57 yield stdout_line
58 popen.stdout.close()
59 return_code = popen.wait()
60 if return_code:
61 raise subprocess.CalledProcessError(return_code, command)
62
63
64 def is_tails():
65 try:
66 id = subprocess.check_output('lsb_release --id --short',
67 shell=True).decode('utf-8').strip()
68 except subprocess.CalledProcessError:
69 id = None
70
71 # dirty hack to unreliably detect Tails 4.0~beta2
72 if id == 'Debian':
73 if os.uname()[1] == 'amnesia':
74 id = 'Tails'
75
76 return id == 'Tails'
77
78
79 def clean_up_tails3_venv(virtualenv_dir=VENV_DIR):
80 """
81 Tails 3.x, based on debian stretch uses libpython3.5, whereas Tails 4.x is
82 based on Debian Buster and uses libpython3.7. This means that the Tails 3.x
83 virtualenv will not work under Tails 4.x, and will need to be destroyed and
84 rebuilt. We can detect if the version of libpython is 3.5 in the
85 admin/.venv3/ folder, and delete it if that's the case. This will ensure a
86 smooth upgrade from Tails 3.x to Tails 4.x.
87 """
88 if is_tails():
89 try:
90 dist = subprocess.check_output('lsb_release --codename --short',
91 shell=True).strip()
92 except subprocess.CalledProcessError:
93 dist = None
94
95 # tails4 is based on buster
96 if dist == b'buster':
97 python_lib_path = os.path.join(virtualenv_dir, "lib/python3.5")
98 if os.path.exists(os.path.join(python_lib_path)):
99 sdlog.info(
100 "Tails 3 Python 3 virtualenv detected. "
101 "Removing it."
102 )
103 shutil.rmtree(virtualenv_dir)
104 sdlog.info("Tails 3 Python 3 virtualenv deleted.")
105
106
107 def checkenv(args):
108 clean_up_tails3_venv(VENV_DIR)
109 if not os.path.exists(os.path.join(VENV_DIR, "bin/activate")):
110 sdlog.error('Please run "securedrop-admin setup".')
111 sys.exit(1)
112
113
114 def maybe_torify():
115 if is_tails():
116 return ['torify']
117 else:
118 return []
119
120
121 def install_apt_dependencies(args):
122 """
123 Install apt dependencies in Tails. In order to install Ansible in
124 a virtualenv, first there are a number of Python prerequisites.
125 """
126 sdlog.info("Installing SecureDrop Admin dependencies")
127 sdlog.info(("You'll be prompted for the temporary Tails admin password,"
128 " which was set on Tails login screen"))
129
130 apt_command = ['sudo', 'su', '-c',
131 "apt-get update && \
132 apt-get -q -o=Dpkg::Use-Pty=0 install -y \
133 python3-virtualenv \
134 python3-yaml \
135 python3-pip \
136 ccontrol \
137 virtualenv \
138 libffi-dev \
139 libssl-dev \
140 libpython3-dev",
141 ]
142
143 try:
144 # Print command results in real-time, to keep Admin apprised
145 # of progress during long-running command.
146 for output_line in run_command(apt_command):
147 print(output_line.decode('utf-8').rstrip())
148 except subprocess.CalledProcessError:
149 # Tails supports apt persistence, which was used by SecureDrop
150 # under Tails 2.x. If updates are being applied, don't try to pile
151 # on with more apt requests.
152 sdlog.error(("Failed to install apt dependencies. Check network"
153 " connection and try again."))
154 raise
155
156
157 def envsetup(args):
158 """Installs Admin tooling required for managing SecureDrop. Specifically:
159
160 * updates apt-cache
161 * installs apt packages for Python virtualenv
162 * creates virtualenv
163 * installs pip packages inside virtualenv
164
165 The virtualenv is created within the Persistence volume in Tails, so that
166 Ansible is available to the Admin on subsequent boots without requiring
167 installation of packages again.
168 """
169 # clean up tails 3.x venv when migrating to tails 4.x
170 clean_up_tails3_venv(VENV_DIR)
171
172 # virtualenv doesnt exist? Install dependencies and create
173 if not os.path.exists(VENV_DIR):
174
175 install_apt_dependencies(args)
176
177 # Technically you can create a virtualenv from within python
178 # but pip can only be run over tor on tails, and debugging that
179 # along with instaling a third-party dependency is not worth
180 # the effort here.
181 sdlog.info("Setting up virtualenv")
182 try:
183 sdlog.debug(subprocess.check_output(
184 maybe_torify() + ['virtualenv', '--python=python3', VENV_DIR],
185 stderr=subprocess.STDOUT))
186 except subprocess.CalledProcessError as e:
187 sdlog.debug(e.output)
188 sdlog.error(("Unable to create virtualenv. Check network settings"
189 " and try again."))
190 raise
191 else:
192 sdlog.info("Virtualenv already exists, not creating")
193
194 install_pip_dependencies(args)
195 if os.path.exists(os.path.join(DIR, 'setup.py')):
196 install_pip_self(args)
197
198 sdlog.info("Finished installing SecureDrop dependencies")
199
200
201 def install_pip_self(args):
202 pip_install_cmd = [
203 os.path.join(VENV_DIR, 'bin', 'pip3'),
204 'install', '-e', DIR
205 ]
206 try:
207 subprocess.check_output(maybe_torify() + pip_install_cmd,
208 stderr=subprocess.STDOUT)
209 except subprocess.CalledProcessError as e:
210 sdlog.debug(e.output)
211 sdlog.error("Unable to install self, run with -v for more information")
212 raise
213
214
215 def install_pip_dependencies(args, pip_install_cmd=[
216 os.path.join(VENV_DIR, 'bin', 'pip3'),
217 'install',
218 # Specify requirements file.
219 '-r', os.path.join(DIR, 'requirements.txt'),
220 '--require-hashes',
221 # Make sure to upgrade packages only if necessary.
222 '-U', '--upgrade-strategy', 'only-if-needed',
223 ]):
224 """
225 Install Python dependencies via pip into virtualenv.
226 """
227
228 sdlog.info("Checking Python dependencies for securedrop-admin")
229 try:
230 pip_output = subprocess.check_output(maybe_torify() + pip_install_cmd,
231 stderr=subprocess.STDOUT)
232 except subprocess.CalledProcessError as e:
233 sdlog.debug(e.output)
234 sdlog.error(("Failed to install pip dependencies. Check network"
235 " connection and try again."))
236 raise
237
238 sdlog.debug(pip_output)
239 if "Successfully installed" in str(pip_output):
240 sdlog.info("Python dependencies for securedrop-admin upgraded")
241 else:
242 sdlog.info("Python dependencies for securedrop-admin are up-to-date")
243
244
245 def parse_argv(argv):
246 parser = argparse.ArgumentParser()
247 parser.add_argument('-v', action='store_true', default=False,
248 help="Increase verbosity on output")
249 parser.set_defaults(func=envsetup)
250
251 subparsers = parser.add_subparsers()
252
253 envsetup_parser = subparsers.add_parser(
254 'envsetup',
255 help='Set up the admin virtualenv.'
256 )
257 envsetup_parser.set_defaults(func=envsetup)
258
259 checkenv_parser = subparsers.add_parser(
260 'checkenv',
261 help='Check that the admin virtualenv is properly set up.'
262 )
263 checkenv_parser.set_defaults(func=checkenv)
264
265 return parser.parse_args(argv)
266
267
268 if __name__ == "__main__":
269 args = parse_argv(sys.argv[1:])
270 setup_logger(args.v)
271
272 try:
273 args.func(args)
274 except Exception:
275 sys.exit(1)
276 else:
277 sys.exit(0)
278
[end of admin/bootstrap.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/admin/bootstrap.py b/admin/bootstrap.py
--- a/admin/bootstrap.py
+++ b/admin/bootstrap.py
@@ -154,7 +154,7 @@
raise
-def envsetup(args):
+def envsetup(args, virtualenv_dir=VENV_DIR):
"""Installs Admin tooling required for managing SecureDrop. Specifically:
* updates apt-cache
@@ -167,10 +167,10 @@
installation of packages again.
"""
# clean up tails 3.x venv when migrating to tails 4.x
- clean_up_tails3_venv(VENV_DIR)
+ clean_up_tails3_venv(virtualenv_dir)
# virtualenv doesnt exist? Install dependencies and create
- if not os.path.exists(VENV_DIR):
+ if not os.path.exists(virtualenv_dir):
install_apt_dependencies(args)
@@ -181,12 +181,18 @@
sdlog.info("Setting up virtualenv")
try:
sdlog.debug(subprocess.check_output(
- maybe_torify() + ['virtualenv', '--python=python3', VENV_DIR],
+ maybe_torify() + ['virtualenv',
+ '--python=python3',
+ virtualenv_dir
+ ],
stderr=subprocess.STDOUT))
except subprocess.CalledProcessError as e:
sdlog.debug(e.output)
sdlog.error(("Unable to create virtualenv. Check network settings"
" and try again."))
+ sdlog.debug("Cleaning up virtualenv")
+ if os.path.exists(virtualenv_dir):
+ shutil.rmtree(virtualenv_dir)
raise
else:
sdlog.info("Virtualenv already exists, not creating")
| {"golden_diff": "diff --git a/admin/bootstrap.py b/admin/bootstrap.py\n--- a/admin/bootstrap.py\n+++ b/admin/bootstrap.py\n@@ -154,7 +154,7 @@\n raise\n \n \n-def envsetup(args):\n+def envsetup(args, virtualenv_dir=VENV_DIR):\n \"\"\"Installs Admin tooling required for managing SecureDrop. Specifically:\n \n * updates apt-cache\n@@ -167,10 +167,10 @@\n installation of packages again.\n \"\"\"\n # clean up tails 3.x venv when migrating to tails 4.x\n- clean_up_tails3_venv(VENV_DIR)\n+ clean_up_tails3_venv(virtualenv_dir)\n \n # virtualenv doesnt exist? Install dependencies and create\n- if not os.path.exists(VENV_DIR):\n+ if not os.path.exists(virtualenv_dir):\n \n install_apt_dependencies(args)\n \n@@ -181,12 +181,18 @@\n sdlog.info(\"Setting up virtualenv\")\n try:\n sdlog.debug(subprocess.check_output(\n- maybe_torify() + ['virtualenv', '--python=python3', VENV_DIR],\n+ maybe_torify() + ['virtualenv',\n+ '--python=python3',\n+ virtualenv_dir\n+ ],\n stderr=subprocess.STDOUT))\n except subprocess.CalledProcessError as e:\n sdlog.debug(e.output)\n sdlog.error((\"Unable to create virtualenv. Check network settings\"\n \" and try again.\"))\n+ sdlog.debug(\"Cleaning up virtualenv\")\n+ if os.path.exists(virtualenv_dir):\n+ shutil.rmtree(virtualenv_dir)\n raise\n else:\n sdlog.info(\"Virtualenv already exists, not creating\")\n", "issue": "Python3 admin virtualenv is broken if improperly set up (doesn't contain pip3)\n## Description\r\n\r\nInitially discovered while reviewing https://github.com/freedomofpress/securedrop/pull/4927#issuecomment-543763957 , if the admin virtualenv is improperly setup, one must manually remove `admin/.venv3` to fix the virtualenv.\r\n\r\n## Steps to Reproduce\r\nThis is very hard to reproduce, but you can do it by pressing ctrl+c when the virtualenv is being setup as part of an initial `./securedrop-admin setup` run. Subsequent `./securedrop-admin setup` invocations will no longer work\r\n\r\n## Expected Behavior\r\n`./securedrop-admin setup` should not fail\r\n\r\n## Actual Behavior\r\n`./securedrop-admin setup` fails with the following output:\r\n```\r\namnesia@amnesia:~/Persistent/securedrop$ ./securedrop-admin setup\r\nINFO: Virtualenv already exists, not creating\r\nINFO: Checking Python dependencies for securedrop-admin\r\nERROR: Failed to install pip dependencies. Check network connection and try again.\r\n```\n", "before_files": [{"content": "# -*- mode: python; coding: utf-8 -*-\n#\n# Copyright (C) 2013-2018 Freedom of the Press Foundation & al\n# Copyright (C) 2018 Loic Dachary <[email protected]>\n#\n# This program is free software: you can redistribute it and/or modify\n# it under the terms of the GNU General Public License as published by\n# the Free Software Foundation, either version 3 of the License, or\n# (at your option) any later version.\n#\n# This program is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n# GNU General Public License for more details.\n#\n# You should have received a copy of the GNU General Public License\n# along with this program. If not, see <http://www.gnu.org/licenses/>.\n#\n\nimport argparse\nimport logging\nimport os\nimport shutil\nimport subprocess\nimport sys\n\nsdlog = logging.getLogger(__name__)\n\nDIR = os.path.dirname(os.path.realpath(__file__))\nVENV_DIR = os.path.join(DIR, \".venv3\")\n\n\ndef setup_logger(verbose=False):\n \"\"\" Configure logging handler \"\"\"\n # Set default level on parent\n sdlog.setLevel(logging.DEBUG)\n level = logging.DEBUG if verbose else logging.INFO\n\n stdout = logging.StreamHandler(sys.stdout)\n stdout.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))\n stdout.setLevel(level)\n sdlog.addHandler(stdout)\n\n\ndef run_command(command):\n \"\"\"\n Wrapper function to display stdout for running command,\n similar to how shelling out in a Bash script displays rolling output.\n\n Yields a list of the stdout from the `command`, and raises a\n CalledProcessError if `command` returns non-zero.\n \"\"\"\n popen = subprocess.Popen(command,\n stdout=subprocess.PIPE,\n stderr=subprocess.STDOUT)\n for stdout_line in iter(popen.stdout.readline, b\"\"):\n yield stdout_line\n popen.stdout.close()\n return_code = popen.wait()\n if return_code:\n raise subprocess.CalledProcessError(return_code, command)\n\n\ndef is_tails():\n try:\n id = subprocess.check_output('lsb_release --id --short',\n shell=True).decode('utf-8').strip()\n except subprocess.CalledProcessError:\n id = None\n\n # dirty hack to unreliably detect Tails 4.0~beta2\n if id == 'Debian':\n if os.uname()[1] == 'amnesia':\n id = 'Tails'\n\n return id == 'Tails'\n\n\ndef clean_up_tails3_venv(virtualenv_dir=VENV_DIR):\n \"\"\"\n Tails 3.x, based on debian stretch uses libpython3.5, whereas Tails 4.x is\n based on Debian Buster and uses libpython3.7. This means that the Tails 3.x\n virtualenv will not work under Tails 4.x, and will need to be destroyed and\n rebuilt. We can detect if the version of libpython is 3.5 in the\n admin/.venv3/ folder, and delete it if that's the case. This will ensure a\n smooth upgrade from Tails 3.x to Tails 4.x.\n \"\"\"\n if is_tails():\n try:\n dist = subprocess.check_output('lsb_release --codename --short',\n shell=True).strip()\n except subprocess.CalledProcessError:\n dist = None\n\n # tails4 is based on buster\n if dist == b'buster':\n python_lib_path = os.path.join(virtualenv_dir, \"lib/python3.5\")\n if os.path.exists(os.path.join(python_lib_path)):\n sdlog.info(\n \"Tails 3 Python 3 virtualenv detected. \"\n \"Removing it.\"\n )\n shutil.rmtree(virtualenv_dir)\n sdlog.info(\"Tails 3 Python 3 virtualenv deleted.\")\n\n\ndef checkenv(args):\n clean_up_tails3_venv(VENV_DIR)\n if not os.path.exists(os.path.join(VENV_DIR, \"bin/activate\")):\n sdlog.error('Please run \"securedrop-admin setup\".')\n sys.exit(1)\n\n\ndef maybe_torify():\n if is_tails():\n return ['torify']\n else:\n return []\n\n\ndef install_apt_dependencies(args):\n \"\"\"\n Install apt dependencies in Tails. In order to install Ansible in\n a virtualenv, first there are a number of Python prerequisites.\n \"\"\"\n sdlog.info(\"Installing SecureDrop Admin dependencies\")\n sdlog.info((\"You'll be prompted for the temporary Tails admin password,\"\n \" which was set on Tails login screen\"))\n\n apt_command = ['sudo', 'su', '-c',\n \"apt-get update && \\\n apt-get -q -o=Dpkg::Use-Pty=0 install -y \\\n python3-virtualenv \\\n python3-yaml \\\n python3-pip \\\n ccontrol \\\n virtualenv \\\n libffi-dev \\\n libssl-dev \\\n libpython3-dev\",\n ]\n\n try:\n # Print command results in real-time, to keep Admin apprised\n # of progress during long-running command.\n for output_line in run_command(apt_command):\n print(output_line.decode('utf-8').rstrip())\n except subprocess.CalledProcessError:\n # Tails supports apt persistence, which was used by SecureDrop\n # under Tails 2.x. If updates are being applied, don't try to pile\n # on with more apt requests.\n sdlog.error((\"Failed to install apt dependencies. Check network\"\n \" connection and try again.\"))\n raise\n\n\ndef envsetup(args):\n \"\"\"Installs Admin tooling required for managing SecureDrop. Specifically:\n\n * updates apt-cache\n * installs apt packages for Python virtualenv\n * creates virtualenv\n * installs pip packages inside virtualenv\n\n The virtualenv is created within the Persistence volume in Tails, so that\n Ansible is available to the Admin on subsequent boots without requiring\n installation of packages again.\n \"\"\"\n # clean up tails 3.x venv when migrating to tails 4.x\n clean_up_tails3_venv(VENV_DIR)\n\n # virtualenv doesnt exist? Install dependencies and create\n if not os.path.exists(VENV_DIR):\n\n install_apt_dependencies(args)\n\n # Technically you can create a virtualenv from within python\n # but pip can only be run over tor on tails, and debugging that\n # along with instaling a third-party dependency is not worth\n # the effort here.\n sdlog.info(\"Setting up virtualenv\")\n try:\n sdlog.debug(subprocess.check_output(\n maybe_torify() + ['virtualenv', '--python=python3', VENV_DIR],\n stderr=subprocess.STDOUT))\n except subprocess.CalledProcessError as e:\n sdlog.debug(e.output)\n sdlog.error((\"Unable to create virtualenv. Check network settings\"\n \" and try again.\"))\n raise\n else:\n sdlog.info(\"Virtualenv already exists, not creating\")\n\n install_pip_dependencies(args)\n if os.path.exists(os.path.join(DIR, 'setup.py')):\n install_pip_self(args)\n\n sdlog.info(\"Finished installing SecureDrop dependencies\")\n\n\ndef install_pip_self(args):\n pip_install_cmd = [\n os.path.join(VENV_DIR, 'bin', 'pip3'),\n 'install', '-e', DIR\n ]\n try:\n subprocess.check_output(maybe_torify() + pip_install_cmd,\n stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as e:\n sdlog.debug(e.output)\n sdlog.error(\"Unable to install self, run with -v for more information\")\n raise\n\n\ndef install_pip_dependencies(args, pip_install_cmd=[\n os.path.join(VENV_DIR, 'bin', 'pip3'),\n 'install',\n # Specify requirements file.\n '-r', os.path.join(DIR, 'requirements.txt'),\n '--require-hashes',\n # Make sure to upgrade packages only if necessary.\n '-U', '--upgrade-strategy', 'only-if-needed',\n]):\n \"\"\"\n Install Python dependencies via pip into virtualenv.\n \"\"\"\n\n sdlog.info(\"Checking Python dependencies for securedrop-admin\")\n try:\n pip_output = subprocess.check_output(maybe_torify() + pip_install_cmd,\n stderr=subprocess.STDOUT)\n except subprocess.CalledProcessError as e:\n sdlog.debug(e.output)\n sdlog.error((\"Failed to install pip dependencies. Check network\"\n \" connection and try again.\"))\n raise\n\n sdlog.debug(pip_output)\n if \"Successfully installed\" in str(pip_output):\n sdlog.info(\"Python dependencies for securedrop-admin upgraded\")\n else:\n sdlog.info(\"Python dependencies for securedrop-admin are up-to-date\")\n\n\ndef parse_argv(argv):\n parser = argparse.ArgumentParser()\n parser.add_argument('-v', action='store_true', default=False,\n help=\"Increase verbosity on output\")\n parser.set_defaults(func=envsetup)\n\n subparsers = parser.add_subparsers()\n\n envsetup_parser = subparsers.add_parser(\n 'envsetup',\n help='Set up the admin virtualenv.'\n )\n envsetup_parser.set_defaults(func=envsetup)\n\n checkenv_parser = subparsers.add_parser(\n 'checkenv',\n help='Check that the admin virtualenv is properly set up.'\n )\n checkenv_parser.set_defaults(func=checkenv)\n\n return parser.parse_args(argv)\n\n\nif __name__ == \"__main__\":\n args = parse_argv(sys.argv[1:])\n setup_logger(args.v)\n\n try:\n args.func(args)\n except Exception:\n sys.exit(1)\n else:\n sys.exit(0)\n", "path": "admin/bootstrap.py"}]} | 3,648 | 384 |
gh_patches_debug_6214 | rasdani/github-patches | git_diff | voicepaw__so-vits-svc-fork-1157 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Unable to use svc pre-sd with pyannote.audio 3.1.1
### Describe the bug
To use svc pre-sd for a long audio file with multiple speakers, I followed the [setup guide](https://github.com/voicepaw/so-vits-svc-fork/#before-training) and manually installed pyannote.audio, getting the latest version, 3.1.1.
Attempting to run svc pre-sd triggered the following error messages:
```
Model was trained with pyannote.audio 0.0.1, yours is 3.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.
Model was trained with torch 1.10.0+cu102, yours is 2.2.2+cu121. Bad things might happen unless you revert torch to 1.x.
```
According to [PyPI](https://pypi.org/project/pyannote.audio/3.1.1/), pyannote.audio 3.1.1 works with speaker-diarization-3.1. So, it’s necessary to explicitly specify this version in the code.
### To Reproduce
1. Set up the environment. (I'm using torch 2.2.2+cu121.)
2. Install so-vits-svc-fork and its dependencies.
3. Install pyannote.audio with `pip3 install pyannote-audio`.
4. Prepare your data and organize data folders.
5. Run svc pre-sd with options suited to your data.
### Additional context
_No response_
### Version
4.1.61
### Platform
WSL-Ubuntu 22.04 LTS
### Code of Conduct
- [X] I agree to follow this project's Code of Conduct.
### No Duplicate
- [X] I have checked existing issues to avoid duplicates.
</issue>
<code>
[start of src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py]
1 from __future__ import annotations
2
3 from collections import defaultdict
4 from logging import getLogger
5 from pathlib import Path
6
7 import librosa
8 import soundfile as sf
9 import torch
10 from joblib import Parallel, delayed
11 from pyannote.audio import Pipeline
12 from tqdm import tqdm
13 from tqdm_joblib import tqdm_joblib
14
15 LOG = getLogger(__name__)
16
17
18 def _process_one(
19 input_path: Path,
20 output_dir: Path,
21 sr: int,
22 *,
23 min_speakers: int = 1,
24 max_speakers: int = 1,
25 huggingface_token: str | None = None,
26 ) -> None:
27 try:
28 audio, sr = librosa.load(input_path, sr=sr, mono=True)
29 except Exception as e:
30 LOG.warning(f"Failed to read {input_path}: {e}")
31 return
32 pipeline = Pipeline.from_pretrained(
33 "pyannote/speaker-diarization", use_auth_token=huggingface_token
34 )
35 if pipeline is None:
36 raise ValueError("Failed to load pipeline")
37 pipeline = pipeline.to(torch.device("cuda"))
38 LOG.info(f"Processing {input_path}. This may take a while...")
39 diarization = pipeline(
40 input_path, min_speakers=min_speakers, max_speakers=max_speakers
41 )
42
43 LOG.info(f"Found {len(diarization)} tracks, writing to {output_dir}")
44 speaker_count = defaultdict(int)
45
46 output_dir.mkdir(parents=True, exist_ok=True)
47 for segment, track, speaker in tqdm(
48 list(diarization.itertracks(yield_label=True)), desc=f"Writing {input_path}"
49 ):
50 if segment.end - segment.start < 1:
51 continue
52 speaker_count[speaker] += 1
53 audio_cut = audio[int(segment.start * sr) : int(segment.end * sr)]
54 sf.write(
55 (output_dir / f"{speaker}_{speaker_count[speaker]}.wav"),
56 audio_cut,
57 sr,
58 )
59
60 LOG.info(f"Speaker count: {speaker_count}")
61
62
63 def preprocess_speaker_diarization(
64 input_dir: Path | str,
65 output_dir: Path | str,
66 sr: int,
67 *,
68 min_speakers: int = 1,
69 max_speakers: int = 1,
70 huggingface_token: str | None = None,
71 n_jobs: int = -1,
72 ) -> None:
73 if huggingface_token is not None and not huggingface_token.startswith("hf_"):
74 LOG.warning("Huggingface token probably should start with hf_")
75 if not torch.cuda.is_available():
76 LOG.warning("CUDA is not available. This will be extremely slow.")
77 input_dir = Path(input_dir)
78 output_dir = Path(output_dir)
79 input_dir.mkdir(parents=True, exist_ok=True)
80 output_dir.mkdir(parents=True, exist_ok=True)
81 input_paths = list(input_dir.rglob("*.*"))
82 with tqdm_joblib(desc="Preprocessing speaker diarization", total=len(input_paths)):
83 Parallel(n_jobs=n_jobs)(
84 delayed(_process_one)(
85 input_path,
86 output_dir / input_path.relative_to(input_dir).parent / input_path.stem,
87 sr,
88 max_speakers=max_speakers,
89 min_speakers=min_speakers,
90 huggingface_token=huggingface_token,
91 )
92 for input_path in input_paths
93 )
94
[end of src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py b/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py
--- a/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py
+++ b/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py
@@ -30,7 +30,7 @@
LOG.warning(f"Failed to read {input_path}: {e}")
return
pipeline = Pipeline.from_pretrained(
- "pyannote/speaker-diarization", use_auth_token=huggingface_token
+ "pyannote/speaker-diarization-3.1", use_auth_token=huggingface_token
)
if pipeline is None:
raise ValueError("Failed to load pipeline")
| {"golden_diff": "diff --git a/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py b/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py\n--- a/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py\n+++ b/src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py\n@@ -30,7 +30,7 @@\n LOG.warning(f\"Failed to read {input_path}: {e}\")\n return\n pipeline = Pipeline.from_pretrained(\n- \"pyannote/speaker-diarization\", use_auth_token=huggingface_token\n+ \"pyannote/speaker-diarization-3.1\", use_auth_token=huggingface_token\n )\n if pipeline is None:\n raise ValueError(\"Failed to load pipeline\")\n", "issue": "Unable to use svc pre-sd with pyannote.audio 3.1.1\n### Describe the bug\n\nTo use svc pre-sd for a long audio file with multiple speakers, I followed the [setup guide](https://github.com/voicepaw/so-vits-svc-fork/#before-training) and manually installed pyannote.audio, getting the latest version, 3.1.1.\r\n\r\nAttempting to run svc pre-sd triggered the following error messages:\r\n\r\n```\r\nModel was trained with pyannote.audio 0.0.1, yours is 3.1.1. Bad things might happen unless you revert pyannote.audio to 0.x.\r\n\r\nModel was trained with torch 1.10.0+cu102, yours is 2.2.2+cu121. Bad things might happen unless you revert torch to 1.x.\r\n```\r\n\r\nAccording to [PyPI](https://pypi.org/project/pyannote.audio/3.1.1/), pyannote.audio 3.1.1 works with speaker-diarization-3.1. So, it\u2019s necessary to explicitly specify this version in the code.\n\n### To Reproduce\n\n1. Set up the environment. (I'm using torch 2.2.2+cu121.)\r\n2. Install so-vits-svc-fork and its dependencies.\r\n3. Install pyannote.audio with `pip3 install pyannote-audio`.\r\n4. Prepare your data and organize data folders.\r\n5. Run svc pre-sd with options suited to your data.\n\n### Additional context\n\n_No response_\n\n### Version\n\n4.1.61\n\n### Platform\n\nWSL-Ubuntu 22.04 LTS\n\n### Code of Conduct\n\n- [X] I agree to follow this project's Code of Conduct.\n\n### No Duplicate\n\n- [X] I have checked existing issues to avoid duplicates.\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom collections import defaultdict\nfrom logging import getLogger\nfrom pathlib import Path\n\nimport librosa\nimport soundfile as sf\nimport torch\nfrom joblib import Parallel, delayed\nfrom pyannote.audio import Pipeline\nfrom tqdm import tqdm\nfrom tqdm_joblib import tqdm_joblib\n\nLOG = getLogger(__name__)\n\n\ndef _process_one(\n input_path: Path,\n output_dir: Path,\n sr: int,\n *,\n min_speakers: int = 1,\n max_speakers: int = 1,\n huggingface_token: str | None = None,\n) -> None:\n try:\n audio, sr = librosa.load(input_path, sr=sr, mono=True)\n except Exception as e:\n LOG.warning(f\"Failed to read {input_path}: {e}\")\n return\n pipeline = Pipeline.from_pretrained(\n \"pyannote/speaker-diarization\", use_auth_token=huggingface_token\n )\n if pipeline is None:\n raise ValueError(\"Failed to load pipeline\")\n pipeline = pipeline.to(torch.device(\"cuda\"))\n LOG.info(f\"Processing {input_path}. This may take a while...\")\n diarization = pipeline(\n input_path, min_speakers=min_speakers, max_speakers=max_speakers\n )\n\n LOG.info(f\"Found {len(diarization)} tracks, writing to {output_dir}\")\n speaker_count = defaultdict(int)\n\n output_dir.mkdir(parents=True, exist_ok=True)\n for segment, track, speaker in tqdm(\n list(diarization.itertracks(yield_label=True)), desc=f\"Writing {input_path}\"\n ):\n if segment.end - segment.start < 1:\n continue\n speaker_count[speaker] += 1\n audio_cut = audio[int(segment.start * sr) : int(segment.end * sr)]\n sf.write(\n (output_dir / f\"{speaker}_{speaker_count[speaker]}.wav\"),\n audio_cut,\n sr,\n )\n\n LOG.info(f\"Speaker count: {speaker_count}\")\n\n\ndef preprocess_speaker_diarization(\n input_dir: Path | str,\n output_dir: Path | str,\n sr: int,\n *,\n min_speakers: int = 1,\n max_speakers: int = 1,\n huggingface_token: str | None = None,\n n_jobs: int = -1,\n) -> None:\n if huggingface_token is not None and not huggingface_token.startswith(\"hf_\"):\n LOG.warning(\"Huggingface token probably should start with hf_\")\n if not torch.cuda.is_available():\n LOG.warning(\"CUDA is not available. This will be extremely slow.\")\n input_dir = Path(input_dir)\n output_dir = Path(output_dir)\n input_dir.mkdir(parents=True, exist_ok=True)\n output_dir.mkdir(parents=True, exist_ok=True)\n input_paths = list(input_dir.rglob(\"*.*\"))\n with tqdm_joblib(desc=\"Preprocessing speaker diarization\", total=len(input_paths)):\n Parallel(n_jobs=n_jobs)(\n delayed(_process_one)(\n input_path,\n output_dir / input_path.relative_to(input_dir).parent / input_path.stem,\n sr,\n max_speakers=max_speakers,\n min_speakers=min_speakers,\n huggingface_token=huggingface_token,\n )\n for input_path in input_paths\n )\n", "path": "src/so_vits_svc_fork/preprocessing/preprocess_speaker_diarization.py"}]} | 1,874 | 183 |
gh_patches_debug_6467 | rasdani/github-patches | git_diff | getnikola__nikola-1145 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Never copy/list listings/*.py[co]
kinda related to invariance
</issue>
<code>
[start of nikola/plugins/task/listings.py]
1 # -*- coding: utf-8 -*-
2
3 # Copyright © 2012-2014 Roberto Alsina and others.
4
5 # Permission is hereby granted, free of charge, to any
6 # person obtaining a copy of this software and associated
7 # documentation files (the "Software"), to deal in the
8 # Software without restriction, including without limitation
9 # the rights to use, copy, modify, merge, publish,
10 # distribute, sublicense, and/or sell copies of the
11 # Software, and to permit persons to whom the Software is
12 # furnished to do so, subject to the following conditions:
13 #
14 # The above copyright notice and this permission notice
15 # shall be included in all copies or substantial portions of
16 # the Software.
17 #
18 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
19 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
20 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
21 # PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
22 # OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
23 # OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
24 # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26
27 from __future__ import unicode_literals, print_function
28
29 import os
30
31 from pygments import highlight
32 from pygments.lexers import get_lexer_for_filename, TextLexer
33 from pygments.formatters import HtmlFormatter
34
35 from nikola.plugin_categories import Task
36 from nikola import utils
37
38
39 class Listings(Task):
40 """Render pretty listings."""
41
42 name = "render_listings"
43
44 def set_site(self, site):
45 site.register_path_handler('listing', self.listing_path)
46 return super(Listings, self).set_site(site)
47
48 def gen_tasks(self):
49 """Render pretty code listings."""
50 kw = {
51 "default_lang": self.site.config["DEFAULT_LANG"],
52 "listings_folder": self.site.config["LISTINGS_FOLDER"],
53 "output_folder": self.site.config["OUTPUT_FOLDER"],
54 "index_file": self.site.config["INDEX_FILE"],
55 }
56
57 # Things to ignore in listings
58 ignored_extensions = (".pyc", ".pyo")
59
60 def render_listing(in_name, out_name, folders=[], files=[]):
61 if in_name:
62 with open(in_name, 'r') as fd:
63 try:
64 lexer = get_lexer_for_filename(in_name)
65 except:
66 lexer = TextLexer()
67 code = highlight(fd.read(), lexer,
68 HtmlFormatter(cssclass='code',
69 linenos="table", nowrap=False,
70 lineanchors=utils.slugify(in_name),
71 anchorlinenos=True))
72 title = os.path.basename(in_name)
73 else:
74 code = ''
75 title = ''
76 crumbs = utils.get_crumbs(os.path.relpath(out_name,
77 kw['output_folder']),
78 is_file=True)
79 context = {
80 'code': code,
81 'title': title,
82 'crumbs': crumbs,
83 'lang': kw['default_lang'],
84 'folders': folders,
85 'files': files,
86 'description': title,
87 }
88 self.site.render_template('listing.tmpl', out_name,
89 context)
90
91 yield self.group_task()
92
93 template_deps = self.site.template_system.template_deps('listing.tmpl')
94 for root, dirs, files in os.walk(kw['listings_folder']):
95 # Render all files
96 out_name = os.path.join(
97 kw['output_folder'],
98 root, kw['index_file']
99 )
100 yield {
101 'basename': self.name,
102 'name': out_name,
103 'file_dep': template_deps,
104 'targets': [out_name],
105 'actions': [(render_listing, [None, out_name, dirs, files])],
106 # This is necessary to reflect changes in blog title,
107 # sidebar links, etc.
108 'uptodate': [utils.config_changed(
109 self.site.GLOBAL_CONTEXT)],
110 'clean': True,
111 }
112 for f in files:
113 ext = os.path.splitext(f)[-1]
114 if ext in ignored_extensions:
115 continue
116 in_name = os.path.join(root, f)
117 out_name = os.path.join(
118 kw['output_folder'],
119 root,
120 f) + '.html'
121 yield {
122 'basename': self.name,
123 'name': out_name,
124 'file_dep': template_deps + [in_name],
125 'targets': [out_name],
126 'actions': [(render_listing, [in_name, out_name])],
127 # This is necessary to reflect changes in blog title,
128 # sidebar links, etc.
129 'uptodate': [utils.config_changed(
130 self.site.GLOBAL_CONTEXT)],
131 'clean': True,
132 }
133
134 def listing_path(self, name, lang):
135 return [_f for _f in [self.site.config['LISTINGS_FOLDER'], name +
136 '.html'] if _f]
137
[end of nikola/plugins/task/listings.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/nikola/plugins/task/listings.py b/nikola/plugins/task/listings.py
--- a/nikola/plugins/task/listings.py
+++ b/nikola/plugins/task/listings.py
@@ -92,6 +92,7 @@
template_deps = self.site.template_system.template_deps('listing.tmpl')
for root, dirs, files in os.walk(kw['listings_folder']):
+ files = [f for f in files if os.path.splitext(f)[-1] not in ignored_extensions]
# Render all files
out_name = os.path.join(
kw['output_folder'],
| {"golden_diff": "diff --git a/nikola/plugins/task/listings.py b/nikola/plugins/task/listings.py\n--- a/nikola/plugins/task/listings.py\n+++ b/nikola/plugins/task/listings.py\n@@ -92,6 +92,7 @@\n \n template_deps = self.site.template_system.template_deps('listing.tmpl')\n for root, dirs, files in os.walk(kw['listings_folder']):\n+ files = [f for f in files if os.path.splitext(f)[-1] not in ignored_extensions]\n # Render all files\n out_name = os.path.join(\n kw['output_folder'],\n", "issue": "Never copy/list listings/*.py[co]\nkinda related to invariance\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright \u00a9 2012-2014 Roberto Alsina and others.\n\n# Permission is hereby granted, free of charge, to any\n# person obtaining a copy of this software and associated\n# documentation files (the \"Software\"), to deal in the\n# Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the\n# Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice\n# shall be included in all copies or substantial portions of\n# the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY\n# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE\n# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR\n# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS\n# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR\n# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE\n# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nfrom __future__ import unicode_literals, print_function\n\nimport os\n\nfrom pygments import highlight\nfrom pygments.lexers import get_lexer_for_filename, TextLexer\nfrom pygments.formatters import HtmlFormatter\n\nfrom nikola.plugin_categories import Task\nfrom nikola import utils\n\n\nclass Listings(Task):\n \"\"\"Render pretty listings.\"\"\"\n\n name = \"render_listings\"\n\n def set_site(self, site):\n site.register_path_handler('listing', self.listing_path)\n return super(Listings, self).set_site(site)\n\n def gen_tasks(self):\n \"\"\"Render pretty code listings.\"\"\"\n kw = {\n \"default_lang\": self.site.config[\"DEFAULT_LANG\"],\n \"listings_folder\": self.site.config[\"LISTINGS_FOLDER\"],\n \"output_folder\": self.site.config[\"OUTPUT_FOLDER\"],\n \"index_file\": self.site.config[\"INDEX_FILE\"],\n }\n\n # Things to ignore in listings\n ignored_extensions = (\".pyc\", \".pyo\")\n\n def render_listing(in_name, out_name, folders=[], files=[]):\n if in_name:\n with open(in_name, 'r') as fd:\n try:\n lexer = get_lexer_for_filename(in_name)\n except:\n lexer = TextLexer()\n code = highlight(fd.read(), lexer,\n HtmlFormatter(cssclass='code',\n linenos=\"table\", nowrap=False,\n lineanchors=utils.slugify(in_name),\n anchorlinenos=True))\n title = os.path.basename(in_name)\n else:\n code = ''\n title = ''\n crumbs = utils.get_crumbs(os.path.relpath(out_name,\n kw['output_folder']),\n is_file=True)\n context = {\n 'code': code,\n 'title': title,\n 'crumbs': crumbs,\n 'lang': kw['default_lang'],\n 'folders': folders,\n 'files': files,\n 'description': title,\n }\n self.site.render_template('listing.tmpl', out_name,\n context)\n\n yield self.group_task()\n\n template_deps = self.site.template_system.template_deps('listing.tmpl')\n for root, dirs, files in os.walk(kw['listings_folder']):\n # Render all files\n out_name = os.path.join(\n kw['output_folder'],\n root, kw['index_file']\n )\n yield {\n 'basename': self.name,\n 'name': out_name,\n 'file_dep': template_deps,\n 'targets': [out_name],\n 'actions': [(render_listing, [None, out_name, dirs, files])],\n # This is necessary to reflect changes in blog title,\n # sidebar links, etc.\n 'uptodate': [utils.config_changed(\n self.site.GLOBAL_CONTEXT)],\n 'clean': True,\n }\n for f in files:\n ext = os.path.splitext(f)[-1]\n if ext in ignored_extensions:\n continue\n in_name = os.path.join(root, f)\n out_name = os.path.join(\n kw['output_folder'],\n root,\n f) + '.html'\n yield {\n 'basename': self.name,\n 'name': out_name,\n 'file_dep': template_deps + [in_name],\n 'targets': [out_name],\n 'actions': [(render_listing, [in_name, out_name])],\n # This is necessary to reflect changes in blog title,\n # sidebar links, etc.\n 'uptodate': [utils.config_changed(\n self.site.GLOBAL_CONTEXT)],\n 'clean': True,\n }\n\n def listing_path(self, name, lang):\n return [_f for _f in [self.site.config['LISTINGS_FOLDER'], name +\n '.html'] if _f]\n", "path": "nikola/plugins/task/listings.py"}]} | 1,899 | 132 |
gh_patches_debug_20260 | rasdani/github-patches | git_diff | quantumlib__Cirq-5731 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Qasm output decomposes single qubit MatrixGate into multiple operations instead of one u3 operation
```
import cirq
c = cirq.Circuit(
cirq.MatrixGate(cirq.unitary(cirq.H) @ cirq.unitary(cirq.T)).on(cirq.LineQubit(0)),
)
print(c.to_qasm())
```
Outputs:
```
OPENQASM 2.0;
include "qelib1.inc";
// Qubits: [q(0)]
qreg q[1];
// Gate: [[ 0.707+0.j 0.5 +0.5j]
// [ 0.707+0.j -0.5 -0.5j]]
rz(pi*0.75) q[0];
sx q[0];
s q[0];
```
But should output:
```
// Generated from Cirq v0.15.0
OPENQASM 2.0;
include "qelib1.inc";
// Qubits: [q(0)]
qreg q[1];
u3(pi*1.5,pi*1.0,pi*0.25) q[0];
```
Which I got by using `cirq.PhasedXZGate.from_matrix(...)` and turning that into qasm.
</issue>
<code>
[start of cirq-core/cirq/ops/matrix_gates.py]
1 # Copyright 2018 The Cirq Developers
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Quantum gates defined by a matrix."""
16
17 from typing import Any, cast, Dict, Iterable, Optional, Tuple, TYPE_CHECKING
18
19 import numpy as np
20
21 from cirq import linalg, protocols, _import
22 from cirq._compat import proper_repr
23 from cirq.ops import raw_types
24
25 if TYPE_CHECKING:
26 import cirq
27
28 single_qubit_decompositions = _import.LazyLoader(
29 'single_qubit_decompositions', globals(), 'cirq.transformers.analytical_decompositions'
30 )
31 two_qubit_to_cz = _import.LazyLoader(
32 'two_qubit_to_cz', globals(), 'cirq.transformers.analytical_decompositions'
33 )
34 three_qubit_decomposition = _import.LazyLoader(
35 'three_qubit_decomposition', globals(), 'cirq.transformers.analytical_decompositions'
36 )
37
38
39 class MatrixGate(raw_types.Gate):
40 r"""A unitary qubit or qudit gate defined entirely by its numpy matrix.
41
42 For example `cirq.MatrixGate(np.array([[0, 1j], [1, 0]]))` has the unitary matrix:
43
44 $$
45 \begin{bmatrix}
46 0 & i \\
47 1 & 0
48 \end{bmatrix}
49 $$
50 """
51
52 def __init__(
53 self,
54 matrix: np.ndarray,
55 *,
56 name: str = None,
57 qid_shape: Optional[Iterable[int]] = None,
58 unitary_check_rtol: float = 1e-5,
59 unitary_check_atol: float = 1e-8,
60 ) -> None:
61 """Initializes a matrix gate.
62
63 Args:
64 matrix: The matrix that defines the gate.
65 name: The optional name of the gate to be displayed.
66 qid_shape: The shape of state tensor that the matrix applies to.
67 If not specified, this value is inferred by assuming that the
68 matrix is supposed to apply to qubits.
69 unitary_check_rtol: The relative tolerance for checking whether the supplied matrix
70 is unitary. See `cirq.is_unitary`.
71 unitary_check_atol: The absolute tolerance for checking whether the supplied matrix
72 is unitary. See `cirq.is_unitary`.
73
74 Raises:
75 ValueError: If the matrix is not a square numpy array, if the matrix does not match
76 the `qid_shape`, if `qid_shape` is not supplied and the matrix dimension is
77 not a power of 2, or if the matrix not unitary (to the supplied precisions).
78 """
79 if len(matrix.shape) != 2 or matrix.shape[0] != matrix.shape[1]:
80 raise ValueError('`matrix` must be a square 2d numpy array.')
81
82 if qid_shape is None:
83 n = int(np.round(np.log2(matrix.shape[0] or 1)))
84 if 2**n != matrix.shape[0]:
85 raise ValueError(
86 f'Matrix width ({matrix.shape[0]}) is not a power of 2 and '
87 f'qid_shape is not specified.'
88 )
89 qid_shape = (2,) * n
90
91 self._matrix = matrix
92 self._qid_shape = tuple(qid_shape)
93 self._name = name
94 m = int(np.prod(self._qid_shape, dtype=np.int64))
95 if self._matrix.shape != (m, m):
96 raise ValueError(
97 'Wrong matrix shape for qid_shape.\n'
98 f'Matrix shape: {self._matrix.shape}\n'
99 f'qid_shape: {self._qid_shape}\n'
100 )
101
102 if not linalg.is_unitary(matrix, rtol=unitary_check_rtol, atol=unitary_check_atol):
103 raise ValueError(f'Not a unitary matrix: {self._matrix}')
104
105 def _json_dict_(self) -> Dict[str, Any]:
106 return {'matrix': self._matrix.tolist(), 'qid_shape': self._qid_shape}
107
108 @classmethod
109 def _from_json_dict_(cls, matrix, qid_shape, **kwargs):
110 return cls(matrix=np.array(matrix), qid_shape=qid_shape)
111
112 def _qid_shape_(self) -> Tuple[int, ...]:
113 return self._qid_shape
114
115 def __pow__(self, exponent: Any) -> 'MatrixGate':
116 if not isinstance(exponent, (int, float)):
117 return NotImplemented
118 e = cast(float, exponent)
119 new_mat = linalg.map_eigenvalues(self._matrix, lambda b: b**e)
120 return MatrixGate(new_mat, qid_shape=self._qid_shape)
121
122 def _phase_by_(self, phase_turns: float, qubit_index: int) -> 'MatrixGate':
123 if not isinstance(phase_turns, (int, float)):
124 return NotImplemented
125 if self._qid_shape[qubit_index] != 2:
126 return NotImplemented
127 result = np.copy(self._matrix).reshape(self._qid_shape * 2)
128
129 p = np.exp(2j * np.pi * phase_turns)
130 i = qubit_index
131 j = qubit_index + len(self._qid_shape)
132 result[linalg.slice_for_qubits_equal_to([i], 1)] *= p
133 result[linalg.slice_for_qubits_equal_to([j], 1)] *= np.conj(p)
134 return MatrixGate(matrix=result.reshape(self._matrix.shape), qid_shape=self._qid_shape)
135
136 def _decompose_(self, qubits: Tuple['cirq.Qid', ...]) -> 'cirq.OP_TREE':
137 if self._qid_shape == (2,):
138 return [
139 g.on(qubits[0])
140 for g in single_qubit_decompositions.single_qubit_matrix_to_gates(self._matrix)
141 ]
142 if self._qid_shape == (2,) * 2:
143 return two_qubit_to_cz.two_qubit_matrix_to_cz_operations(
144 *qubits, self._matrix, allow_partial_czs=True
145 )
146 if self._qid_shape == (2,) * 3:
147 return three_qubit_decomposition.three_qubit_matrix_to_operations(*qubits, self._matrix)
148 return NotImplemented
149
150 def _has_unitary_(self) -> bool:
151 return True
152
153 def _unitary_(self) -> np.ndarray:
154 return np.copy(self._matrix)
155
156 def _circuit_diagram_info_(
157 self, args: 'cirq.CircuitDiagramInfoArgs'
158 ) -> 'cirq.CircuitDiagramInfo':
159 n_qubits = len(self._qid_shape)
160 if self._name is not None:
161 symbols = (
162 [self._name] if n_qubits == 1 else [f'{self._name}[{i+1}]' for i in range(n_qubits)]
163 )
164 return protocols.CircuitDiagramInfo(wire_symbols=symbols)
165 main = _matrix_to_diagram_symbol(self._matrix, args)
166 rest = [f'#{i+1}' for i in range(1, n_qubits)]
167 return protocols.CircuitDiagramInfo(wire_symbols=[main, *rest])
168
169 def __hash__(self) -> int:
170 vals = tuple(v for _, v in np.ndenumerate(self._matrix))
171 return hash((MatrixGate, vals))
172
173 def _approx_eq_(self, other: Any, atol) -> bool:
174 if not isinstance(other, type(self)):
175 return NotImplemented
176 return np.allclose(self._matrix, other._matrix, rtol=0, atol=atol)
177
178 def __eq__(self, other):
179 if not isinstance(other, type(self)):
180 return NotImplemented
181 return self._qid_shape == other._qid_shape and np.array_equal(self._matrix, other._matrix)
182
183 def __ne__(self, other):
184 return not self == other
185
186 def __repr__(self) -> str:
187 if all(e == 2 for e in self._qid_shape):
188 return f'cirq.MatrixGate({proper_repr(self._matrix)})'
189 return f'cirq.MatrixGate({proper_repr(self._matrix)}, qid_shape={self._qid_shape})'
190
191 def __str__(self) -> str:
192 return str(self._matrix.round(3))
193
194
195 def _matrix_to_diagram_symbol(matrix: np.ndarray, args: 'protocols.CircuitDiagramInfoArgs') -> str:
196 if args.precision is not None:
197 matrix = matrix.round(args.precision)
198 result = str(matrix)
199 if args.use_unicode_characters:
200 lines = result.split('\n')
201 for i in range(len(lines)):
202 lines[i] = lines[i].replace('[[', '')
203 lines[i] = lines[i].replace(' [', '')
204 lines[i] = lines[i].replace(']', '')
205 w = max(len(line) for line in lines)
206 for i in range(len(lines)):
207 lines[i] = '│' + lines[i].ljust(w) + '│'
208 lines.insert(0, '┌' + ' ' * w + '┐')
209 lines.append('└' + ' ' * w + '┘')
210 result = '\n'.join(lines)
211 return result
212
[end of cirq-core/cirq/ops/matrix_gates.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/cirq-core/cirq/ops/matrix_gates.py b/cirq-core/cirq/ops/matrix_gates.py
--- a/cirq-core/cirq/ops/matrix_gates.py
+++ b/cirq-core/cirq/ops/matrix_gates.py
@@ -20,7 +20,7 @@
from cirq import linalg, protocols, _import
from cirq._compat import proper_repr
-from cirq.ops import raw_types
+from cirq.ops import raw_types, phased_x_z_gate
if TYPE_CHECKING:
import cirq
@@ -166,6 +166,14 @@
rest = [f'#{i+1}' for i in range(1, n_qubits)]
return protocols.CircuitDiagramInfo(wire_symbols=[main, *rest])
+ def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:
+ args.validate_version('2.0')
+ if self._qid_shape == (2,):
+ return protocols.qasm(
+ phased_x_z_gate.PhasedXZGate.from_matrix(self._matrix), args=args, qubits=qubits
+ )
+ return None
+
def __hash__(self) -> int:
vals = tuple(v for _, v in np.ndenumerate(self._matrix))
return hash((MatrixGate, vals))
| {"golden_diff": "diff --git a/cirq-core/cirq/ops/matrix_gates.py b/cirq-core/cirq/ops/matrix_gates.py\n--- a/cirq-core/cirq/ops/matrix_gates.py\n+++ b/cirq-core/cirq/ops/matrix_gates.py\n@@ -20,7 +20,7 @@\n \n from cirq import linalg, protocols, _import\n from cirq._compat import proper_repr\n-from cirq.ops import raw_types\n+from cirq.ops import raw_types, phased_x_z_gate\n \n if TYPE_CHECKING:\n import cirq\n@@ -166,6 +166,14 @@\n rest = [f'#{i+1}' for i in range(1, n_qubits)]\n return protocols.CircuitDiagramInfo(wire_symbols=[main, *rest])\n \n+ def _qasm_(self, args: 'cirq.QasmArgs', qubits: Tuple['cirq.Qid', ...]) -> Optional[str]:\n+ args.validate_version('2.0')\n+ if self._qid_shape == (2,):\n+ return protocols.qasm(\n+ phased_x_z_gate.PhasedXZGate.from_matrix(self._matrix), args=args, qubits=qubits\n+ )\n+ return None\n+\n def __hash__(self) -> int:\n vals = tuple(v for _, v in np.ndenumerate(self._matrix))\n return hash((MatrixGate, vals))\n", "issue": "Qasm output decomposes single qubit MatrixGate into multiple operations instead of one u3 operation\n```\r\nimport cirq\r\n\r\nc = cirq.Circuit(\r\n cirq.MatrixGate(cirq.unitary(cirq.H) @ cirq.unitary(cirq.T)).on(cirq.LineQubit(0)),\r\n)\r\nprint(c.to_qasm())\r\n```\r\n\r\nOutputs:\r\n\r\n```\r\nOPENQASM 2.0;\r\ninclude \"qelib1.inc\";\r\n\r\n\r\n// Qubits: [q(0)]\r\nqreg q[1];\r\n\r\n\r\n// Gate: [[ 0.707+0.j 0.5 +0.5j]\r\n // [ 0.707+0.j -0.5 -0.5j]]\r\nrz(pi*0.75) q[0];\r\nsx q[0];\r\ns q[0];\r\n```\r\n\r\nBut should output:\r\n\r\n```\r\n// Generated from Cirq v0.15.0\r\n\r\nOPENQASM 2.0;\r\ninclude \"qelib1.inc\";\r\n\r\n\r\n// Qubits: [q(0)]\r\nqreg q[1];\r\n\r\n\r\nu3(pi*1.5,pi*1.0,pi*0.25) q[0];\r\n```\r\n\r\nWhich I got by using `cirq.PhasedXZGate.from_matrix(...)` and turning that into qasm.\n", "before_files": [{"content": "# Copyright 2018 The Cirq Developers\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Quantum gates defined by a matrix.\"\"\"\n\nfrom typing import Any, cast, Dict, Iterable, Optional, Tuple, TYPE_CHECKING\n\nimport numpy as np\n\nfrom cirq import linalg, protocols, _import\nfrom cirq._compat import proper_repr\nfrom cirq.ops import raw_types\n\nif TYPE_CHECKING:\n import cirq\n\nsingle_qubit_decompositions = _import.LazyLoader(\n 'single_qubit_decompositions', globals(), 'cirq.transformers.analytical_decompositions'\n)\ntwo_qubit_to_cz = _import.LazyLoader(\n 'two_qubit_to_cz', globals(), 'cirq.transformers.analytical_decompositions'\n)\nthree_qubit_decomposition = _import.LazyLoader(\n 'three_qubit_decomposition', globals(), 'cirq.transformers.analytical_decompositions'\n)\n\n\nclass MatrixGate(raw_types.Gate):\n r\"\"\"A unitary qubit or qudit gate defined entirely by its numpy matrix.\n\n For example `cirq.MatrixGate(np.array([[0, 1j], [1, 0]]))` has the unitary matrix:\n\n $$\n \\begin{bmatrix}\n 0 & i \\\\\n 1 & 0\n \\end{bmatrix}\n $$\n \"\"\"\n\n def __init__(\n self,\n matrix: np.ndarray,\n *,\n name: str = None,\n qid_shape: Optional[Iterable[int]] = None,\n unitary_check_rtol: float = 1e-5,\n unitary_check_atol: float = 1e-8,\n ) -> None:\n \"\"\"Initializes a matrix gate.\n\n Args:\n matrix: The matrix that defines the gate.\n name: The optional name of the gate to be displayed.\n qid_shape: The shape of state tensor that the matrix applies to.\n If not specified, this value is inferred by assuming that the\n matrix is supposed to apply to qubits.\n unitary_check_rtol: The relative tolerance for checking whether the supplied matrix\n is unitary. See `cirq.is_unitary`.\n unitary_check_atol: The absolute tolerance for checking whether the supplied matrix\n is unitary. See `cirq.is_unitary`.\n\n Raises:\n ValueError: If the matrix is not a square numpy array, if the matrix does not match\n the `qid_shape`, if `qid_shape` is not supplied and the matrix dimension is\n not a power of 2, or if the matrix not unitary (to the supplied precisions).\n \"\"\"\n if len(matrix.shape) != 2 or matrix.shape[0] != matrix.shape[1]:\n raise ValueError('`matrix` must be a square 2d numpy array.')\n\n if qid_shape is None:\n n = int(np.round(np.log2(matrix.shape[0] or 1)))\n if 2**n != matrix.shape[0]:\n raise ValueError(\n f'Matrix width ({matrix.shape[0]}) is not a power of 2 and '\n f'qid_shape is not specified.'\n )\n qid_shape = (2,) * n\n\n self._matrix = matrix\n self._qid_shape = tuple(qid_shape)\n self._name = name\n m = int(np.prod(self._qid_shape, dtype=np.int64))\n if self._matrix.shape != (m, m):\n raise ValueError(\n 'Wrong matrix shape for qid_shape.\\n'\n f'Matrix shape: {self._matrix.shape}\\n'\n f'qid_shape: {self._qid_shape}\\n'\n )\n\n if not linalg.is_unitary(matrix, rtol=unitary_check_rtol, atol=unitary_check_atol):\n raise ValueError(f'Not a unitary matrix: {self._matrix}')\n\n def _json_dict_(self) -> Dict[str, Any]:\n return {'matrix': self._matrix.tolist(), 'qid_shape': self._qid_shape}\n\n @classmethod\n def _from_json_dict_(cls, matrix, qid_shape, **kwargs):\n return cls(matrix=np.array(matrix), qid_shape=qid_shape)\n\n def _qid_shape_(self) -> Tuple[int, ...]:\n return self._qid_shape\n\n def __pow__(self, exponent: Any) -> 'MatrixGate':\n if not isinstance(exponent, (int, float)):\n return NotImplemented\n e = cast(float, exponent)\n new_mat = linalg.map_eigenvalues(self._matrix, lambda b: b**e)\n return MatrixGate(new_mat, qid_shape=self._qid_shape)\n\n def _phase_by_(self, phase_turns: float, qubit_index: int) -> 'MatrixGate':\n if not isinstance(phase_turns, (int, float)):\n return NotImplemented\n if self._qid_shape[qubit_index] != 2:\n return NotImplemented\n result = np.copy(self._matrix).reshape(self._qid_shape * 2)\n\n p = np.exp(2j * np.pi * phase_turns)\n i = qubit_index\n j = qubit_index + len(self._qid_shape)\n result[linalg.slice_for_qubits_equal_to([i], 1)] *= p\n result[linalg.slice_for_qubits_equal_to([j], 1)] *= np.conj(p)\n return MatrixGate(matrix=result.reshape(self._matrix.shape), qid_shape=self._qid_shape)\n\n def _decompose_(self, qubits: Tuple['cirq.Qid', ...]) -> 'cirq.OP_TREE':\n if self._qid_shape == (2,):\n return [\n g.on(qubits[0])\n for g in single_qubit_decompositions.single_qubit_matrix_to_gates(self._matrix)\n ]\n if self._qid_shape == (2,) * 2:\n return two_qubit_to_cz.two_qubit_matrix_to_cz_operations(\n *qubits, self._matrix, allow_partial_czs=True\n )\n if self._qid_shape == (2,) * 3:\n return three_qubit_decomposition.three_qubit_matrix_to_operations(*qubits, self._matrix)\n return NotImplemented\n\n def _has_unitary_(self) -> bool:\n return True\n\n def _unitary_(self) -> np.ndarray:\n return np.copy(self._matrix)\n\n def _circuit_diagram_info_(\n self, args: 'cirq.CircuitDiagramInfoArgs'\n ) -> 'cirq.CircuitDiagramInfo':\n n_qubits = len(self._qid_shape)\n if self._name is not None:\n symbols = (\n [self._name] if n_qubits == 1 else [f'{self._name}[{i+1}]' for i in range(n_qubits)]\n )\n return protocols.CircuitDiagramInfo(wire_symbols=symbols)\n main = _matrix_to_diagram_symbol(self._matrix, args)\n rest = [f'#{i+1}' for i in range(1, n_qubits)]\n return protocols.CircuitDiagramInfo(wire_symbols=[main, *rest])\n\n def __hash__(self) -> int:\n vals = tuple(v for _, v in np.ndenumerate(self._matrix))\n return hash((MatrixGate, vals))\n\n def _approx_eq_(self, other: Any, atol) -> bool:\n if not isinstance(other, type(self)):\n return NotImplemented\n return np.allclose(self._matrix, other._matrix, rtol=0, atol=atol)\n\n def __eq__(self, other):\n if not isinstance(other, type(self)):\n return NotImplemented\n return self._qid_shape == other._qid_shape and np.array_equal(self._matrix, other._matrix)\n\n def __ne__(self, other):\n return not self == other\n\n def __repr__(self) -> str:\n if all(e == 2 for e in self._qid_shape):\n return f'cirq.MatrixGate({proper_repr(self._matrix)})'\n return f'cirq.MatrixGate({proper_repr(self._matrix)}, qid_shape={self._qid_shape})'\n\n def __str__(self) -> str:\n return str(self._matrix.round(3))\n\n\ndef _matrix_to_diagram_symbol(matrix: np.ndarray, args: 'protocols.CircuitDiagramInfoArgs') -> str:\n if args.precision is not None:\n matrix = matrix.round(args.precision)\n result = str(matrix)\n if args.use_unicode_characters:\n lines = result.split('\\n')\n for i in range(len(lines)):\n lines[i] = lines[i].replace('[[', '')\n lines[i] = lines[i].replace(' [', '')\n lines[i] = lines[i].replace(']', '')\n w = max(len(line) for line in lines)\n for i in range(len(lines)):\n lines[i] = '\u2502' + lines[i].ljust(w) + '\u2502'\n lines.insert(0, '\u250c' + ' ' * w + '\u2510')\n lines.append('\u2514' + ' ' * w + '\u2518')\n result = '\\n'.join(lines)\n return result\n", "path": "cirq-core/cirq/ops/matrix_gates.py"}]} | 3,467 | 312 |
gh_patches_debug_18123 | rasdani/github-patches | git_diff | svthalia__concrexit-1719 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Missing frontpage articles in API v2
### Describe the bug
When there are multiple frontpage articles that should be visible (such as currently on staging), `api/v2/announcements/frontpage-articles/` shows only one.
</issue>
<code>
[start of website/announcements/models.py]
1 """The models defined by the announcement package."""
2 from django.core.validators import (
3 FileExtensionValidator,
4 get_available_image_extensions,
5 )
6 from django.db import models
7 from django.db.models import CharField, Manager, Q
8 from django.utils import timezone
9 from django.utils.translation import gettext_lazy as _
10 from tinymce.models import HTMLField
11
12
13 class VisibleObjectManager(Manager):
14 """Get all active members, i.e. who have a committee membership."""
15
16 def get_queryset(self):
17 """Select all visible items."""
18 return (
19 super()
20 .get_queryset()
21 .filter(
22 (Q(until__isnull=True) | Q(until__gt=timezone.now()))
23 & (Q(since__isnull=True) | Q(since__lte=timezone.now()))
24 & ~(Q(since__isnull=True) & Q(until__isnull=True))
25 )
26 )
27
28
29 class Announcement(models.Model):
30 """Describes an announcement."""
31
32 objects = models.Manager()
33 visible_objects = VisibleObjectManager()
34
35 content = HTMLField(
36 verbose_name=_("Content"),
37 help_text=_("The content of the announcement; what text to display."),
38 blank=False,
39 max_length=500,
40 )
41
42 since = models.DateTimeField(
43 verbose_name=_("Display since"),
44 help_text=_("Hide this announcement before this time."),
45 default=timezone.now,
46 )
47
48 until = models.DateTimeField(
49 verbose_name=_("Display until"),
50 help_text=_("Hide this announcement after this time."),
51 blank=True,
52 null=True,
53 )
54
55 icon = models.CharField(
56 verbose_name=_("Font Awesome icon"),
57 help_text=_("Font Awesome abbreviation for icon to use."),
58 max_length=150,
59 default="bullhorn",
60 )
61
62 closeable = models.BooleanField(default=True)
63
64 class Meta:
65 ordering = ("-since",)
66
67 def __str__(self):
68 return str(self.content)
69
70 @property
71 def is_visible(self):
72 """Is this announcement currently visible."""
73 return (
74 (self.until is None or self.until > timezone.now())
75 and (self.since is None or self.since <= timezone.now())
76 and not (self.since is None and self.until is None)
77 )
78
79
80 class FrontpageArticle(models.Model):
81 """Front page articles."""
82
83 objects = models.Manager()
84 visible_objects = VisibleObjectManager()
85
86 title = models.CharField(
87 verbose_name=_("Title"),
88 help_text=_("The title of the article; what goes in the header"),
89 blank=False,
90 max_length=80,
91 )
92
93 content = HTMLField(
94 verbose_name=_("Content"),
95 help_text=_("The content of the article; what text to display."),
96 blank=False,
97 max_length=5000,
98 )
99
100 since = models.DateTimeField(
101 verbose_name=_("Display since"),
102 help_text=_("Hide this article before this time."),
103 default=timezone.now,
104 )
105
106 until = models.DateTimeField(
107 verbose_name=_("Display until"),
108 help_text=_("Hide this article after this time."),
109 blank=True,
110 null=True,
111 )
112
113 class Meta:
114 ordering = ("-since",)
115
116 def __str__(self):
117 return str(self.title)
118
119 @property
120 def is_visible(self):
121 """Is this announcement currently visible."""
122 return (
123 (self.until is None or self.until > timezone.now())
124 and (self.since is None or self.since <= timezone.now())
125 and not (self.since is None and self.until is None)
126 )
127
128
129 def validate_image(value):
130 return FileExtensionValidator(
131 allowed_extensions=[*get_available_image_extensions(), "svg"]
132 )(value)
133
134
135 class Slide(models.Model):
136 """Describes an announcement."""
137
138 objects = models.Manager()
139 visible_objects = VisibleObjectManager()
140
141 title = CharField(
142 verbose_name=_("Title"),
143 help_text=_("The title of the slide; just for the admin."),
144 blank=False,
145 max_length=100,
146 )
147
148 content = models.FileField(
149 verbose_name=_("Content"),
150 help_text=_("The content of the slide; what image to display."),
151 blank=False,
152 upload_to="public/announcements/slides/",
153 validators=[validate_image],
154 )
155
156 since = models.DateTimeField(
157 verbose_name=_("Display since"),
158 help_text=_(
159 "Hide this slide before this time. When all date- and "
160 "time-fields are left blank, the slide won't "
161 "be visible. It will, however, be visible on an event-page "
162 "if it's linked to an event."
163 ),
164 default=timezone.now,
165 blank=True,
166 null=True,
167 )
168
169 until = models.DateTimeField(
170 verbose_name=_("Display until"),
171 help_text=_("Hide this slide after this time."),
172 blank=True,
173 null=True,
174 )
175
176 order = models.PositiveIntegerField(
177 verbose_name=_("Order"),
178 help_text=_("Approximately where this slide should appear in the order"),
179 default=0,
180 )
181
182 members_only = models.BooleanField(
183 verbose_name=_("Display only for authenticated members"), default=False
184 )
185
186 url = models.URLField(
187 verbose_name=_("Link"),
188 help_text=_("Place the user is taken to when clicking the slide"),
189 blank=True,
190 null=True,
191 )
192
193 url_blank = models.BooleanField(
194 verbose_name=_("Link outside thalia.nu"),
195 help_text=_("Clicking the slide will open a new tab"),
196 default=False,
197 )
198
199 class Meta:
200 ordering = ("-since",)
201
202 @property
203 def is_visible(self):
204 """Is this slide currently visible."""
205 return (
206 (self.until is None or self.until > timezone.now())
207 and (self.since is None or self.since <= timezone.now())
208 and not (self.since is None and self.until is None)
209 )
210
211 def __str__(self):
212 return str(self.title)
213
[end of website/announcements/models.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/website/announcements/models.py b/website/announcements/models.py
--- a/website/announcements/models.py
+++ b/website/announcements/models.py
@@ -5,6 +5,7 @@
)
from django.db import models
from django.db.models import CharField, Manager, Q
+from django.db.models.functions import Now
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from tinymce.models import HTMLField
@@ -19,8 +20,8 @@
super()
.get_queryset()
.filter(
- (Q(until__isnull=True) | Q(until__gt=timezone.now()))
- & (Q(since__isnull=True) | Q(since__lte=timezone.now()))
+ (Q(until__isnull=True) | Q(until__gt=Now()))
+ & (Q(since__isnull=True) | Q(since__lte=Now()))
& ~(Q(since__isnull=True) & Q(until__isnull=True))
)
)
| {"golden_diff": "diff --git a/website/announcements/models.py b/website/announcements/models.py\n--- a/website/announcements/models.py\n+++ b/website/announcements/models.py\n@@ -5,6 +5,7 @@\n )\n from django.db import models\n from django.db.models import CharField, Manager, Q\n+from django.db.models.functions import Now\n from django.utils import timezone\n from django.utils.translation import gettext_lazy as _\n from tinymce.models import HTMLField\n@@ -19,8 +20,8 @@\n super()\n .get_queryset()\n .filter(\n- (Q(until__isnull=True) | Q(until__gt=timezone.now()))\n- & (Q(since__isnull=True) | Q(since__lte=timezone.now()))\n+ (Q(until__isnull=True) | Q(until__gt=Now()))\n+ & (Q(since__isnull=True) | Q(since__lte=Now()))\n & ~(Q(since__isnull=True) & Q(until__isnull=True))\n )\n )\n", "issue": "Missing frontpage articles in API v2\n### Describe the bug\r\nWhen there are multiple frontpage articles that should be visible (such as currently on staging), `api/v2/announcements/frontpage-articles/` shows only one.\r\n\n", "before_files": [{"content": "\"\"\"The models defined by the announcement package.\"\"\"\nfrom django.core.validators import (\n FileExtensionValidator,\n get_available_image_extensions,\n)\nfrom django.db import models\nfrom django.db.models import CharField, Manager, Q\nfrom django.utils import timezone\nfrom django.utils.translation import gettext_lazy as _\nfrom tinymce.models import HTMLField\n\n\nclass VisibleObjectManager(Manager):\n \"\"\"Get all active members, i.e. who have a committee membership.\"\"\"\n\n def get_queryset(self):\n \"\"\"Select all visible items.\"\"\"\n return (\n super()\n .get_queryset()\n .filter(\n (Q(until__isnull=True) | Q(until__gt=timezone.now()))\n & (Q(since__isnull=True) | Q(since__lte=timezone.now()))\n & ~(Q(since__isnull=True) & Q(until__isnull=True))\n )\n )\n\n\nclass Announcement(models.Model):\n \"\"\"Describes an announcement.\"\"\"\n\n objects = models.Manager()\n visible_objects = VisibleObjectManager()\n\n content = HTMLField(\n verbose_name=_(\"Content\"),\n help_text=_(\"The content of the announcement; what text to display.\"),\n blank=False,\n max_length=500,\n )\n\n since = models.DateTimeField(\n verbose_name=_(\"Display since\"),\n help_text=_(\"Hide this announcement before this time.\"),\n default=timezone.now,\n )\n\n until = models.DateTimeField(\n verbose_name=_(\"Display until\"),\n help_text=_(\"Hide this announcement after this time.\"),\n blank=True,\n null=True,\n )\n\n icon = models.CharField(\n verbose_name=_(\"Font Awesome icon\"),\n help_text=_(\"Font Awesome abbreviation for icon to use.\"),\n max_length=150,\n default=\"bullhorn\",\n )\n\n closeable = models.BooleanField(default=True)\n\n class Meta:\n ordering = (\"-since\",)\n\n def __str__(self):\n return str(self.content)\n\n @property\n def is_visible(self):\n \"\"\"Is this announcement currently visible.\"\"\"\n return (\n (self.until is None or self.until > timezone.now())\n and (self.since is None or self.since <= timezone.now())\n and not (self.since is None and self.until is None)\n )\n\n\nclass FrontpageArticle(models.Model):\n \"\"\"Front page articles.\"\"\"\n\n objects = models.Manager()\n visible_objects = VisibleObjectManager()\n\n title = models.CharField(\n verbose_name=_(\"Title\"),\n help_text=_(\"The title of the article; what goes in the header\"),\n blank=False,\n max_length=80,\n )\n\n content = HTMLField(\n verbose_name=_(\"Content\"),\n help_text=_(\"The content of the article; what text to display.\"),\n blank=False,\n max_length=5000,\n )\n\n since = models.DateTimeField(\n verbose_name=_(\"Display since\"),\n help_text=_(\"Hide this article before this time.\"),\n default=timezone.now,\n )\n\n until = models.DateTimeField(\n verbose_name=_(\"Display until\"),\n help_text=_(\"Hide this article after this time.\"),\n blank=True,\n null=True,\n )\n\n class Meta:\n ordering = (\"-since\",)\n\n def __str__(self):\n return str(self.title)\n\n @property\n def is_visible(self):\n \"\"\"Is this announcement currently visible.\"\"\"\n return (\n (self.until is None or self.until > timezone.now())\n and (self.since is None or self.since <= timezone.now())\n and not (self.since is None and self.until is None)\n )\n\n\ndef validate_image(value):\n return FileExtensionValidator(\n allowed_extensions=[*get_available_image_extensions(), \"svg\"]\n )(value)\n\n\nclass Slide(models.Model):\n \"\"\"Describes an announcement.\"\"\"\n\n objects = models.Manager()\n visible_objects = VisibleObjectManager()\n\n title = CharField(\n verbose_name=_(\"Title\"),\n help_text=_(\"The title of the slide; just for the admin.\"),\n blank=False,\n max_length=100,\n )\n\n content = models.FileField(\n verbose_name=_(\"Content\"),\n help_text=_(\"The content of the slide; what image to display.\"),\n blank=False,\n upload_to=\"public/announcements/slides/\",\n validators=[validate_image],\n )\n\n since = models.DateTimeField(\n verbose_name=_(\"Display since\"),\n help_text=_(\n \"Hide this slide before this time. When all date- and \"\n \"time-fields are left blank, the slide won't \"\n \"be visible. It will, however, be visible on an event-page \"\n \"if it's linked to an event.\"\n ),\n default=timezone.now,\n blank=True,\n null=True,\n )\n\n until = models.DateTimeField(\n verbose_name=_(\"Display until\"),\n help_text=_(\"Hide this slide after this time.\"),\n blank=True,\n null=True,\n )\n\n order = models.PositiveIntegerField(\n verbose_name=_(\"Order\"),\n help_text=_(\"Approximately where this slide should appear in the order\"),\n default=0,\n )\n\n members_only = models.BooleanField(\n verbose_name=_(\"Display only for authenticated members\"), default=False\n )\n\n url = models.URLField(\n verbose_name=_(\"Link\"),\n help_text=_(\"Place the user is taken to when clicking the slide\"),\n blank=True,\n null=True,\n )\n\n url_blank = models.BooleanField(\n verbose_name=_(\"Link outside thalia.nu\"),\n help_text=_(\"Clicking the slide will open a new tab\"),\n default=False,\n )\n\n class Meta:\n ordering = (\"-since\",)\n\n @property\n def is_visible(self):\n \"\"\"Is this slide currently visible.\"\"\"\n return (\n (self.until is None or self.until > timezone.now())\n and (self.since is None or self.since <= timezone.now())\n and not (self.since is None and self.until is None)\n )\n\n def __str__(self):\n return str(self.title)\n", "path": "website/announcements/models.py"}]} | 2,380 | 233 |
gh_patches_debug_58737 | rasdani/github-patches | git_diff | goauthentik__authentik-9182 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
OIDC Well-known URL won't configure endpoint URLs
**Describe the bug**
When entering the OIDC Well-known URL for an OpenID OAuth source, it won't automatically configure the endpoint URLs. Instead it prompts a "Bad Request" error and says "This field may not be blank." for every endpoint.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to 'Directory > Federation and Social login'
2. Click on 'Create > OpenID OAuth Source > Next'
3. Enter your credentials (Name, Slug, Key, Secret and the Well-known URL)
4. Click on 'Finish'
5. See error
**Expected behavior**
Uppon clicking on 'Finish' authentik should configure the missing endpoints using the Well-known URL.
**Screenshots**

**Version and Deployment:**
- authentik version: 2024.2.2
- Deployment: docker-compose
**Additional context**
I thought, it might have something to do with connectivity and proxy, since we are running authentik behind a corporate proxy. But I can curl the Well-known URL from inside the containers.
</issue>
<code>
[start of authentik/sources/oauth/api/source.py]
1 """OAuth Source Serializer"""
2
3 from django.urls.base import reverse_lazy
4 from django_filters.filters import BooleanFilter
5 from django_filters.filterset import FilterSet
6 from drf_spectacular.types import OpenApiTypes
7 from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_field
8 from requests import RequestException
9 from rest_framework.decorators import action
10 from rest_framework.fields import BooleanField, CharField, ChoiceField, SerializerMethodField
11 from rest_framework.request import Request
12 from rest_framework.response import Response
13 from rest_framework.serializers import ValidationError
14 from rest_framework.viewsets import ModelViewSet
15
16 from authentik.core.api.sources import SourceSerializer
17 from authentik.core.api.used_by import UsedByMixin
18 from authentik.core.api.utils import PassiveSerializer
19 from authentik.lib.utils.http import get_http_session
20 from authentik.sources.oauth.models import OAuthSource
21 from authentik.sources.oauth.types.registry import SourceType, registry
22
23
24 class SourceTypeSerializer(PassiveSerializer):
25 """Serializer for SourceType"""
26
27 name = CharField(required=True)
28 verbose_name = CharField(required=True)
29 urls_customizable = BooleanField()
30 request_token_url = CharField(read_only=True, allow_null=True)
31 authorization_url = CharField(read_only=True, allow_null=True)
32 access_token_url = CharField(read_only=True, allow_null=True)
33 profile_url = CharField(read_only=True, allow_null=True)
34 oidc_well_known_url = CharField(read_only=True, allow_null=True)
35 oidc_jwks_url = CharField(read_only=True, allow_null=True)
36
37
38 class OAuthSourceSerializer(SourceSerializer):
39 """OAuth Source Serializer"""
40
41 provider_type = ChoiceField(choices=registry.get_name_tuple())
42 callback_url = SerializerMethodField()
43 type = SerializerMethodField()
44
45 def get_callback_url(self, instance: OAuthSource) -> str:
46 """Get OAuth Callback URL"""
47 relative_url = reverse_lazy(
48 "authentik_sources_oauth:oauth-client-callback",
49 kwargs={"source_slug": instance.slug},
50 )
51 if "request" not in self.context:
52 return relative_url
53 return self.context["request"].build_absolute_uri(relative_url)
54
55 @extend_schema_field(SourceTypeSerializer)
56 def get_type(self, instance: OAuthSource) -> SourceTypeSerializer:
57 """Get source's type configuration"""
58 return SourceTypeSerializer(instance.source_type).data
59
60 def validate(self, attrs: dict) -> dict:
61 session = get_http_session()
62 source_type = registry.find_type(attrs["provider_type"])
63
64 well_known = attrs.get("oidc_well_known_url") or source_type.oidc_well_known_url
65 inferred_oidc_jwks_url = None
66
67 if well_known and well_known != "":
68 try:
69 well_known_config = session.get(well_known)
70 well_known_config.raise_for_status()
71 except RequestException as exc:
72 text = exc.response.text if exc.response else str(exc)
73 raise ValidationError({"oidc_well_known_url": text}) from None
74 config = well_known_config.json()
75 if "issuer" not in config:
76 raise ValidationError({"oidc_well_known_url": "Invalid well-known configuration"})
77 field_map = {
78 # authentik field to oidc field
79 "authorization_url": "authorization_endpoint",
80 "access_token_url": "token_endpoint",
81 "profile_url": "userinfo_endpoint",
82 }
83 for ak_key, oidc_key in field_map.items():
84 # Don't overwrite user-set values
85 if ak_key in attrs and attrs[ak_key]:
86 continue
87 attrs[ak_key] = config.get(oidc_key, "")
88 inferred_oidc_jwks_url = config.get("jwks_uri", "")
89
90 # Prefer user-entered URL to inferred URL to default URL
91 jwks_url = attrs.get("oidc_jwks_url") or inferred_oidc_jwks_url or source_type.oidc_jwks_url
92 if jwks_url and jwks_url != "":
93 attrs["oidc_jwks_url"] = jwks_url
94 try:
95 jwks_config = session.get(jwks_url)
96 jwks_config.raise_for_status()
97 except RequestException as exc:
98 text = exc.response.text if exc.response else str(exc)
99 raise ValidationError({"oidc_jwks_url": text}) from None
100 config = jwks_config.json()
101 attrs["oidc_jwks"] = config
102
103 provider_type = registry.find_type(attrs.get("provider_type", ""))
104 for url in [
105 "authorization_url",
106 "access_token_url",
107 "profile_url",
108 ]:
109 if getattr(provider_type, url, None) is None:
110 if url not in attrs:
111 raise ValidationError(
112 f"{url} is required for provider {provider_type.verbose_name}"
113 )
114 return attrs
115
116 class Meta:
117 model = OAuthSource
118 fields = SourceSerializer.Meta.fields + [
119 "provider_type",
120 "request_token_url",
121 "authorization_url",
122 "access_token_url",
123 "profile_url",
124 "consumer_key",
125 "consumer_secret",
126 "callback_url",
127 "additional_scopes",
128 "type",
129 "oidc_well_known_url",
130 "oidc_jwks_url",
131 "oidc_jwks",
132 ]
133 extra_kwargs = {"consumer_secret": {"write_only": True}}
134
135
136 class OAuthSourceFilter(FilterSet):
137 """OAuth Source filter set"""
138
139 has_jwks = BooleanFilter(label="Only return sources with JWKS data", method="filter_has_jwks")
140
141 def filter_has_jwks(self, queryset, name, value): # pragma: no cover
142 """Only return sources with JWKS data"""
143 return queryset.exclude(oidc_jwks__iexact="{}")
144
145 class Meta:
146 model = OAuthSource
147 fields = [
148 "name",
149 "slug",
150 "enabled",
151 "authentication_flow",
152 "enrollment_flow",
153 "policy_engine_mode",
154 "user_matching_mode",
155 "provider_type",
156 "request_token_url",
157 "authorization_url",
158 "access_token_url",
159 "profile_url",
160 "consumer_key",
161 "additional_scopes",
162 ]
163
164
165 class OAuthSourceViewSet(UsedByMixin, ModelViewSet):
166 """Source Viewset"""
167
168 queryset = OAuthSource.objects.all()
169 serializer_class = OAuthSourceSerializer
170 lookup_field = "slug"
171 filterset_class = OAuthSourceFilter
172 search_fields = ["name", "slug"]
173 ordering = ["name"]
174
175 @extend_schema(
176 responses={200: SourceTypeSerializer(many=True)},
177 parameters=[
178 OpenApiParameter(
179 name="name",
180 location=OpenApiParameter.QUERY,
181 type=OpenApiTypes.STR,
182 )
183 ],
184 )
185 @action(detail=False, pagination_class=None, filter_backends=[])
186 def source_types(self, request: Request) -> Response:
187 """Get all creatable source types. If ?name is set, only returns the type for <name>.
188 If <name> isn't found, returns the default type."""
189 data = []
190 if "name" in request.query_params:
191 source_type = registry.find_type(request.query_params.get("name"))
192 if source_type.__class__ != SourceType:
193 data.append(SourceTypeSerializer(source_type).data)
194 else:
195 for source_type in registry.get():
196 data.append(SourceTypeSerializer(source_type).data)
197 return Response(data)
198
[end of authentik/sources/oauth/api/source.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/authentik/sources/oauth/api/source.py b/authentik/sources/oauth/api/source.py
--- a/authentik/sources/oauth/api/source.py
+++ b/authentik/sources/oauth/api/source.py
@@ -130,7 +130,13 @@
"oidc_jwks_url",
"oidc_jwks",
]
- extra_kwargs = {"consumer_secret": {"write_only": True}}
+ extra_kwargs = {
+ "consumer_secret": {"write_only": True},
+ "request_token_url": {"allow_blank": True},
+ "authorization_url": {"allow_blank": True},
+ "access_token_url": {"allow_blank": True},
+ "profile_url": {"allow_blank": True},
+ }
class OAuthSourceFilter(FilterSet):
| {"golden_diff": "diff --git a/authentik/sources/oauth/api/source.py b/authentik/sources/oauth/api/source.py\n--- a/authentik/sources/oauth/api/source.py\n+++ b/authentik/sources/oauth/api/source.py\n@@ -130,7 +130,13 @@\n \"oidc_jwks_url\",\n \"oidc_jwks\",\n ]\n- extra_kwargs = {\"consumer_secret\": {\"write_only\": True}}\n+ extra_kwargs = {\n+ \"consumer_secret\": {\"write_only\": True},\n+ \"request_token_url\": {\"allow_blank\": True},\n+ \"authorization_url\": {\"allow_blank\": True},\n+ \"access_token_url\": {\"allow_blank\": True},\n+ \"profile_url\": {\"allow_blank\": True},\n+ }\n \n \n class OAuthSourceFilter(FilterSet):\n", "issue": "OIDC Well-known URL won't configure endpoint URLs\n**Describe the bug**\r\nWhen entering the OIDC Well-known URL for an OpenID OAuth source, it won't automatically configure the endpoint URLs. Instead it prompts a \"Bad Request\" error and says \"This field may not be blank.\" for every endpoint.\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n\r\n1. Go to 'Directory > Federation and Social login'\r\n2. Click on 'Create > OpenID OAuth Source > Next'\r\n3. Enter your credentials (Name, Slug, Key, Secret and the Well-known URL)\r\n4. Click on 'Finish'\r\n5. See error\r\n\r\n**Expected behavior**\r\nUppon clicking on 'Finish' authentik should configure the missing endpoints using the Well-known URL.\r\n\r\n**Screenshots**\r\n\r\n\r\n\r\n**Version and Deployment:**\r\n\r\n- authentik version: 2024.2.2 \r\n- Deployment: docker-compose\r\n\r\n**Additional context**\r\nI thought, it might have something to do with connectivity and proxy, since we are running authentik behind a corporate proxy. But I can curl the Well-known URL from inside the containers.\r\n\n", "before_files": [{"content": "\"\"\"OAuth Source Serializer\"\"\"\n\nfrom django.urls.base import reverse_lazy\nfrom django_filters.filters import BooleanFilter\nfrom django_filters.filterset import FilterSet\nfrom drf_spectacular.types import OpenApiTypes\nfrom drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_field\nfrom requests import RequestException\nfrom rest_framework.decorators import action\nfrom rest_framework.fields import BooleanField, CharField, ChoiceField, SerializerMethodField\nfrom rest_framework.request import Request\nfrom rest_framework.response import Response\nfrom rest_framework.serializers import ValidationError\nfrom rest_framework.viewsets import ModelViewSet\n\nfrom authentik.core.api.sources import SourceSerializer\nfrom authentik.core.api.used_by import UsedByMixin\nfrom authentik.core.api.utils import PassiveSerializer\nfrom authentik.lib.utils.http import get_http_session\nfrom authentik.sources.oauth.models import OAuthSource\nfrom authentik.sources.oauth.types.registry import SourceType, registry\n\n\nclass SourceTypeSerializer(PassiveSerializer):\n \"\"\"Serializer for SourceType\"\"\"\n\n name = CharField(required=True)\n verbose_name = CharField(required=True)\n urls_customizable = BooleanField()\n request_token_url = CharField(read_only=True, allow_null=True)\n authorization_url = CharField(read_only=True, allow_null=True)\n access_token_url = CharField(read_only=True, allow_null=True)\n profile_url = CharField(read_only=True, allow_null=True)\n oidc_well_known_url = CharField(read_only=True, allow_null=True)\n oidc_jwks_url = CharField(read_only=True, allow_null=True)\n\n\nclass OAuthSourceSerializer(SourceSerializer):\n \"\"\"OAuth Source Serializer\"\"\"\n\n provider_type = ChoiceField(choices=registry.get_name_tuple())\n callback_url = SerializerMethodField()\n type = SerializerMethodField()\n\n def get_callback_url(self, instance: OAuthSource) -> str:\n \"\"\"Get OAuth Callback URL\"\"\"\n relative_url = reverse_lazy(\n \"authentik_sources_oauth:oauth-client-callback\",\n kwargs={\"source_slug\": instance.slug},\n )\n if \"request\" not in self.context:\n return relative_url\n return self.context[\"request\"].build_absolute_uri(relative_url)\n\n @extend_schema_field(SourceTypeSerializer)\n def get_type(self, instance: OAuthSource) -> SourceTypeSerializer:\n \"\"\"Get source's type configuration\"\"\"\n return SourceTypeSerializer(instance.source_type).data\n\n def validate(self, attrs: dict) -> dict:\n session = get_http_session()\n source_type = registry.find_type(attrs[\"provider_type\"])\n\n well_known = attrs.get(\"oidc_well_known_url\") or source_type.oidc_well_known_url\n inferred_oidc_jwks_url = None\n\n if well_known and well_known != \"\":\n try:\n well_known_config = session.get(well_known)\n well_known_config.raise_for_status()\n except RequestException as exc:\n text = exc.response.text if exc.response else str(exc)\n raise ValidationError({\"oidc_well_known_url\": text}) from None\n config = well_known_config.json()\n if \"issuer\" not in config:\n raise ValidationError({\"oidc_well_known_url\": \"Invalid well-known configuration\"})\n field_map = {\n # authentik field to oidc field\n \"authorization_url\": \"authorization_endpoint\",\n \"access_token_url\": \"token_endpoint\",\n \"profile_url\": \"userinfo_endpoint\",\n }\n for ak_key, oidc_key in field_map.items():\n # Don't overwrite user-set values\n if ak_key in attrs and attrs[ak_key]:\n continue\n attrs[ak_key] = config.get(oidc_key, \"\")\n inferred_oidc_jwks_url = config.get(\"jwks_uri\", \"\")\n\n # Prefer user-entered URL to inferred URL to default URL\n jwks_url = attrs.get(\"oidc_jwks_url\") or inferred_oidc_jwks_url or source_type.oidc_jwks_url\n if jwks_url and jwks_url != \"\":\n attrs[\"oidc_jwks_url\"] = jwks_url\n try:\n jwks_config = session.get(jwks_url)\n jwks_config.raise_for_status()\n except RequestException as exc:\n text = exc.response.text if exc.response else str(exc)\n raise ValidationError({\"oidc_jwks_url\": text}) from None\n config = jwks_config.json()\n attrs[\"oidc_jwks\"] = config\n\n provider_type = registry.find_type(attrs.get(\"provider_type\", \"\"))\n for url in [\n \"authorization_url\",\n \"access_token_url\",\n \"profile_url\",\n ]:\n if getattr(provider_type, url, None) is None:\n if url not in attrs:\n raise ValidationError(\n f\"{url} is required for provider {provider_type.verbose_name}\"\n )\n return attrs\n\n class Meta:\n model = OAuthSource\n fields = SourceSerializer.Meta.fields + [\n \"provider_type\",\n \"request_token_url\",\n \"authorization_url\",\n \"access_token_url\",\n \"profile_url\",\n \"consumer_key\",\n \"consumer_secret\",\n \"callback_url\",\n \"additional_scopes\",\n \"type\",\n \"oidc_well_known_url\",\n \"oidc_jwks_url\",\n \"oidc_jwks\",\n ]\n extra_kwargs = {\"consumer_secret\": {\"write_only\": True}}\n\n\nclass OAuthSourceFilter(FilterSet):\n \"\"\"OAuth Source filter set\"\"\"\n\n has_jwks = BooleanFilter(label=\"Only return sources with JWKS data\", method=\"filter_has_jwks\")\n\n def filter_has_jwks(self, queryset, name, value): # pragma: no cover\n \"\"\"Only return sources with JWKS data\"\"\"\n return queryset.exclude(oidc_jwks__iexact=\"{}\")\n\n class Meta:\n model = OAuthSource\n fields = [\n \"name\",\n \"slug\",\n \"enabled\",\n \"authentication_flow\",\n \"enrollment_flow\",\n \"policy_engine_mode\",\n \"user_matching_mode\",\n \"provider_type\",\n \"request_token_url\",\n \"authorization_url\",\n \"access_token_url\",\n \"profile_url\",\n \"consumer_key\",\n \"additional_scopes\",\n ]\n\n\nclass OAuthSourceViewSet(UsedByMixin, ModelViewSet):\n \"\"\"Source Viewset\"\"\"\n\n queryset = OAuthSource.objects.all()\n serializer_class = OAuthSourceSerializer\n lookup_field = \"slug\"\n filterset_class = OAuthSourceFilter\n search_fields = [\"name\", \"slug\"]\n ordering = [\"name\"]\n\n @extend_schema(\n responses={200: SourceTypeSerializer(many=True)},\n parameters=[\n OpenApiParameter(\n name=\"name\",\n location=OpenApiParameter.QUERY,\n type=OpenApiTypes.STR,\n )\n ],\n )\n @action(detail=False, pagination_class=None, filter_backends=[])\n def source_types(self, request: Request) -> Response:\n \"\"\"Get all creatable source types. If ?name is set, only returns the type for <name>.\n If <name> isn't found, returns the default type.\"\"\"\n data = []\n if \"name\" in request.query_params:\n source_type = registry.find_type(request.query_params.get(\"name\"))\n if source_type.__class__ != SourceType:\n data.append(SourceTypeSerializer(source_type).data)\n else:\n for source_type in registry.get():\n data.append(SourceTypeSerializer(source_type).data)\n return Response(data)\n", "path": "authentik/sources/oauth/api/source.py"}]} | 2,925 | 172 |
gh_patches_debug_12806 | rasdani/github-patches | git_diff | bokeh__bokeh-4677 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
undeprecate .from_df
With server apps it is necessary to update `.data` all "at once" so a method to construct a dictionary for `.data` is actually useful and needed. Going to un-deprecate for now, can discuss other ideas for this functionality later.
</issue>
<code>
[start of bokeh/models/sources.py]
1 from __future__ import absolute_import
2
3 from ..core import validation
4 from ..core.validation.errors import COLUMN_LENGTHS
5 from ..core.properties import abstract
6 from ..core.properties import Any, Int, String, Instance, List, Dict, Bool, Enum, JSON
7 from ..model import Model
8 from ..util.dependencies import import_optional
9 from ..util.deprecate import deprecated
10 from ..util.serialization import transform_column_source_data
11 from .callbacks import Callback
12
13 pd = import_optional('pandas')
14
15 @abstract
16 class DataSource(Model):
17 """ A base class for data source types. ``DataSource`` is
18 not generally useful to instantiate on its own.
19
20 """
21
22 selected = Dict(String, Dict(String, Any), default={
23 '0d': {'glyph': None, 'indices': []},
24 '1d': {'indices': []},
25 '2d': {'indices': []}
26 }, help="""
27 A dict to indicate selected indices on different dimensions on this DataSource. Keys are:
28
29 - 0d: indicates whether a Line or Patch glyphs have been hit. Value is a
30 dict with the following keys:
31
32 - flag (boolean): true if glyph was with false otherwise
33 - indices (list): indices hit (if applicable)
34
35 - 1d: indicates whether any of all other glyph (except [multi]line or
36 patches) was hit:
37
38 - indices (list): indices that were hit/selected
39
40 - 2d: indicates whether a [multi]line or patches) were hit:
41
42 - indices (list(list)): indices of the lines/patches that were
43 hit/selected
44 """)
45
46 callback = Instance(Callback, help="""
47 A callback to run in the browser whenever the selection is changed.
48 """)
49
50 class ColumnDataSource(DataSource):
51 """ Maps names of columns to sequences or arrays.
52
53 If the ColumnDataSource initializer is called with a single argument that
54 is a dict or pandas.DataFrame, that argument is used as the value for the
55 "data" attribute. For example::
56
57 ColumnDataSource(mydict) # same as ColumnDataSource(data=mydict)
58 ColumnDataSource(df) # same as ColumnDataSource(data=df)
59
60 .. note::
61 There is an implicit assumption that all the columns in a
62 a given ColumnDataSource have the same length.
63
64 """
65
66 data = Dict(String, Any, help="""
67 Mapping of column names to sequences of data. The data can be, e.g,
68 Python lists or tuples, NumPy arrays, etc.
69 """)
70
71 column_names = List(String, help="""
72 An list of names for all the columns in this DataSource.
73 """)
74
75 def __init__(self, *args, **kw):
76 """ If called with a single argument that is a dict or
77 pandas.DataFrame, treat that implicitly as the "data" attribute.
78 """
79 if len(args) == 1 and "data" not in kw:
80 kw["data"] = args[0]
81 # TODO (bev) invalid to pass args and "data", check and raise exception
82 raw_data = kw.pop("data", {})
83 if not isinstance(raw_data, dict):
84 if pd and isinstance(raw_data, pd.DataFrame):
85 raw_data = self._data_from_df(raw_data)
86 else:
87 raise ValueError("expected a dict or pandas.DataFrame, got %s" % raw_data)
88 super(ColumnDataSource, self).__init__(**kw)
89 for name, data in raw_data.items():
90 self.add(data, name)
91
92 @staticmethod
93 def _data_from_df(df):
94 """ Create a ``dict`` of columns from a Pandas DataFrame,
95 suitable for creating a ColumnDataSource.
96
97 Args:
98 df (DataFrame) : data to convert
99
100 Returns:
101 dict(str, list)
102
103 """
104 index = df.index
105 new_data = {}
106 for colname in df:
107 new_data[colname] = df[colname].tolist()
108 if index.name:
109 new_data[index.name] = index.tolist()
110 elif index.names and not all([x is None for x in index.names]):
111 new_data["_".join(index.names)] = index.tolist()
112 else:
113 new_data["index"] = index.tolist()
114 return new_data
115
116 @classmethod
117 @deprecated("Bokeh 0.9.3", "ColumnDataSource initializer")
118 def from_df(cls, data):
119 """ Create a ``dict`` of columns from a Pandas DataFrame,
120 suitable for creating a ColumnDataSource.
121
122 Args:
123 data (DataFrame) : data to convert
124
125 Returns:
126 dict(str, list)
127
128 """
129 import warnings
130 warnings.warn("Method deprecated in Bokeh 0.9.3")
131 return cls._data_from_df(data)
132
133 def to_df(self):
134 """ Convert this data source to pandas dataframe.
135
136 If ``column_names`` is set, use those. Otherwise let Pandas
137 infer the column names. The ``column_names`` property can be
138 used both to order and filter the columns.
139
140 Returns:
141 DataFrame
142
143 """
144 if not pd:
145 raise RuntimeError('Pandas must be installed to convert to a Pandas Dataframe')
146 if self.column_names:
147 return pd.DataFrame(self.data, columns=self.column_names)
148 else:
149 return pd.DataFrame(self.data)
150
151 def add(self, data, name=None):
152 """ Appends a new column of data to the data source.
153
154 Args:
155 data (seq) : new data to add
156 name (str, optional) : column name to use.
157 If not supplied, generate a name go the form "Series ####"
158
159 Returns:
160 str: the column name used
161
162 """
163 if name is None:
164 n = len(self.data)
165 while "Series %d"%n in self.data:
166 n += 1
167 name = "Series %d"%n
168 self.column_names.append(name)
169 self.data[name] = data
170 return name
171
172 def _to_json_like(self, include_defaults):
173 attrs = super(ColumnDataSource, self)._to_json_like(include_defaults=include_defaults)
174 if 'data' in attrs:
175 attrs['data'] = transform_column_source_data(attrs['data'])
176 return attrs
177
178 def remove(self, name):
179 """ Remove a column of data.
180
181 Args:
182 name (str) : name of the column to remove
183
184 Returns:
185 None
186
187 .. note::
188 If the column name does not exist, a warning is issued.
189
190 """
191 try:
192 self.column_names.remove(name)
193 del self.data[name]
194 except (ValueError, KeyError):
195 import warnings
196 warnings.warn("Unable to find column '%s' in data source" % name)
197
198 @deprecated("Bokeh 0.11.0", "bokeh.io.push_notebook")
199 def push_notebook(self):
200 """ Update a data source for a plot in a Jupyter notebook.
201
202 This function can be be used to update data in plot data sources
203 in the Jupyter notebook, without having to use the Bokeh server.
204
205 .. warning::
206 This function has been deprecated. Please use
207 ``bokeh.io.push_notebook()`` which will push all changes
208 (not just data sources) to the last shown plot in a Jupyter
209 notebook.
210
211 Returns:
212 None
213
214 """
215 from bokeh.io import push_notebook
216 push_notebook()
217
218 @validation.error(COLUMN_LENGTHS)
219 def _check_column_lengths(self):
220 lengths = set(len(x) for x in self.data.values())
221 if len(lengths) > 1:
222 return str(self)
223
224
225 def stream(self, new_data, rollover=None):
226 import numpy as np
227
228 newkeys = set(new_data.keys())
229 oldkeys = set(self.data.keys())
230 if newkeys != oldkeys:
231 missing = oldkeys - newkeys
232 extra = newkeys - oldkeys
233 if missing and extra:
234 raise ValueError("Must stream updates to all existing columns (missing: %s, extra: %s)" % (", ".join(sorted(missing)), ", ".join(sorted(extra))))
235 elif missing:
236 raise ValueError("Must stream updates to all existing columns (missing: %s)" % ", ".join(sorted(missing)))
237 else:
238 raise ValueError("Must stream updates to all existing columns (extra: %s)" % ", ".join(sorted(extra)))
239
240 lengths = set()
241 for x in new_data.values():
242 if isinstance(x, np.ndarray):
243 if len(x.shape) != 1:
244 raise ValueError("stream(...) only supports 1d sequences, got ndarray with size %r" % (x.shape,))
245 lengths.add(x.shape[0])
246 else:
247 lengths.add(len(x))
248
249 if len(lengths) > 1:
250 raise ValueError("All streaming column updates must be the same length")
251
252 self.data._stream(self.document, self, new_data, rollover)
253
254 class GeoJSONDataSource(ColumnDataSource):
255
256 geojson = JSON(help="""
257 GeoJSON that contains features for plotting. Currently GeoJSONDataSource can
258 only process a FeatureCollection or GeometryCollection.
259 """)
260
261
262 @abstract
263 class RemoteSource(ColumnDataSource):
264
265 data_url = String(help="""
266 The URL to the endpoint for the data.
267 """)
268
269 polling_interval = Int(help="""
270 polling interval for updating data source in milliseconds
271 """)
272
273 class AjaxDataSource(RemoteSource):
274
275 method = Enum('POST', 'GET', help="http method - GET or POST")
276
277 mode = Enum("replace", "append", help="""
278 Whether to append new data to existing data (up to ``max_size``),
279 or to replace existing data entirely.
280 """)
281
282 max_size = Int(help="""
283 Maximum size of the data array being kept after each pull requests.
284 Larger than that size, the data will be right shifted.
285 """)
286
287 if_modified = Bool(False, help="""
288 Whether to include an ``If-Modified-Since`` header in AJAX requests
289 to the server. If this header is supported by the server, then only
290 new data since the last request will be returned.
291 """)
292 content_type = String(default='application/json', help="""
293 Set the "contentType" parameter for the Ajax request.
294 """)
295 http_headers = Dict(String, String, help="""
296 HTTP headers to set for the Ajax request.
297 """)
298
[end of bokeh/models/sources.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/bokeh/models/sources.py b/bokeh/models/sources.py
--- a/bokeh/models/sources.py
+++ b/bokeh/models/sources.py
@@ -114,7 +114,6 @@
return new_data
@classmethod
- @deprecated("Bokeh 0.9.3", "ColumnDataSource initializer")
def from_df(cls, data):
""" Create a ``dict`` of columns from a Pandas DataFrame,
suitable for creating a ColumnDataSource.
@@ -126,8 +125,6 @@
dict(str, list)
"""
- import warnings
- warnings.warn("Method deprecated in Bokeh 0.9.3")
return cls._data_from_df(data)
def to_df(self):
| {"golden_diff": "diff --git a/bokeh/models/sources.py b/bokeh/models/sources.py\n--- a/bokeh/models/sources.py\n+++ b/bokeh/models/sources.py\n@@ -114,7 +114,6 @@\n return new_data\n \n @classmethod\n- @deprecated(\"Bokeh 0.9.3\", \"ColumnDataSource initializer\")\n def from_df(cls, data):\n \"\"\" Create a ``dict`` of columns from a Pandas DataFrame,\n suitable for creating a ColumnDataSource.\n@@ -126,8 +125,6 @@\n dict(str, list)\n \n \"\"\"\n- import warnings\n- warnings.warn(\"Method deprecated in Bokeh 0.9.3\")\n return cls._data_from_df(data)\n \n def to_df(self):\n", "issue": "undeprecate .from_df\nWith server apps it is necessary to update `.data` all \"at once\" so a method to construct a dictionary for `.data` is actually useful and needed. Going to un-deprecate for now, can discuss other ideas for this functionality later. \n\n", "before_files": [{"content": "from __future__ import absolute_import\n\nfrom ..core import validation\nfrom ..core.validation.errors import COLUMN_LENGTHS\nfrom ..core.properties import abstract\nfrom ..core.properties import Any, Int, String, Instance, List, Dict, Bool, Enum, JSON\nfrom ..model import Model\nfrom ..util.dependencies import import_optional\nfrom ..util.deprecate import deprecated\nfrom ..util.serialization import transform_column_source_data\nfrom .callbacks import Callback\n\npd = import_optional('pandas')\n\n@abstract\nclass DataSource(Model):\n \"\"\" A base class for data source types. ``DataSource`` is\n not generally useful to instantiate on its own.\n\n \"\"\"\n\n selected = Dict(String, Dict(String, Any), default={\n '0d': {'glyph': None, 'indices': []},\n '1d': {'indices': []},\n '2d': {'indices': []}\n }, help=\"\"\"\n A dict to indicate selected indices on different dimensions on this DataSource. Keys are:\n\n - 0d: indicates whether a Line or Patch glyphs have been hit. Value is a\n dict with the following keys:\n\n - flag (boolean): true if glyph was with false otherwise\n - indices (list): indices hit (if applicable)\n\n - 1d: indicates whether any of all other glyph (except [multi]line or\n patches) was hit:\n\n - indices (list): indices that were hit/selected\n\n - 2d: indicates whether a [multi]line or patches) were hit:\n\n - indices (list(list)): indices of the lines/patches that were\n hit/selected\n \"\"\")\n\n callback = Instance(Callback, help=\"\"\"\n A callback to run in the browser whenever the selection is changed.\n \"\"\")\n\nclass ColumnDataSource(DataSource):\n \"\"\" Maps names of columns to sequences or arrays.\n\n If the ColumnDataSource initializer is called with a single argument that\n is a dict or pandas.DataFrame, that argument is used as the value for the\n \"data\" attribute. For example::\n\n ColumnDataSource(mydict) # same as ColumnDataSource(data=mydict)\n ColumnDataSource(df) # same as ColumnDataSource(data=df)\n\n .. note::\n There is an implicit assumption that all the columns in a\n a given ColumnDataSource have the same length.\n\n \"\"\"\n\n data = Dict(String, Any, help=\"\"\"\n Mapping of column names to sequences of data. The data can be, e.g,\n Python lists or tuples, NumPy arrays, etc.\n \"\"\")\n\n column_names = List(String, help=\"\"\"\n An list of names for all the columns in this DataSource.\n \"\"\")\n\n def __init__(self, *args, **kw):\n \"\"\" If called with a single argument that is a dict or\n pandas.DataFrame, treat that implicitly as the \"data\" attribute.\n \"\"\"\n if len(args) == 1 and \"data\" not in kw:\n kw[\"data\"] = args[0]\n # TODO (bev) invalid to pass args and \"data\", check and raise exception\n raw_data = kw.pop(\"data\", {})\n if not isinstance(raw_data, dict):\n if pd and isinstance(raw_data, pd.DataFrame):\n raw_data = self._data_from_df(raw_data)\n else:\n raise ValueError(\"expected a dict or pandas.DataFrame, got %s\" % raw_data)\n super(ColumnDataSource, self).__init__(**kw)\n for name, data in raw_data.items():\n self.add(data, name)\n\n @staticmethod\n def _data_from_df(df):\n \"\"\" Create a ``dict`` of columns from a Pandas DataFrame,\n suitable for creating a ColumnDataSource.\n\n Args:\n df (DataFrame) : data to convert\n\n Returns:\n dict(str, list)\n\n \"\"\"\n index = df.index\n new_data = {}\n for colname in df:\n new_data[colname] = df[colname].tolist()\n if index.name:\n new_data[index.name] = index.tolist()\n elif index.names and not all([x is None for x in index.names]):\n new_data[\"_\".join(index.names)] = index.tolist()\n else:\n new_data[\"index\"] = index.tolist()\n return new_data\n\n @classmethod\n @deprecated(\"Bokeh 0.9.3\", \"ColumnDataSource initializer\")\n def from_df(cls, data):\n \"\"\" Create a ``dict`` of columns from a Pandas DataFrame,\n suitable for creating a ColumnDataSource.\n\n Args:\n data (DataFrame) : data to convert\n\n Returns:\n dict(str, list)\n\n \"\"\"\n import warnings\n warnings.warn(\"Method deprecated in Bokeh 0.9.3\")\n return cls._data_from_df(data)\n\n def to_df(self):\n \"\"\" Convert this data source to pandas dataframe.\n\n If ``column_names`` is set, use those. Otherwise let Pandas\n infer the column names. The ``column_names`` property can be\n used both to order and filter the columns.\n\n Returns:\n DataFrame\n\n \"\"\"\n if not pd:\n raise RuntimeError('Pandas must be installed to convert to a Pandas Dataframe')\n if self.column_names:\n return pd.DataFrame(self.data, columns=self.column_names)\n else:\n return pd.DataFrame(self.data)\n\n def add(self, data, name=None):\n \"\"\" Appends a new column of data to the data source.\n\n Args:\n data (seq) : new data to add\n name (str, optional) : column name to use.\n If not supplied, generate a name go the form \"Series ####\"\n\n Returns:\n str: the column name used\n\n \"\"\"\n if name is None:\n n = len(self.data)\n while \"Series %d\"%n in self.data:\n n += 1\n name = \"Series %d\"%n\n self.column_names.append(name)\n self.data[name] = data\n return name\n\n def _to_json_like(self, include_defaults):\n attrs = super(ColumnDataSource, self)._to_json_like(include_defaults=include_defaults)\n if 'data' in attrs:\n attrs['data'] = transform_column_source_data(attrs['data'])\n return attrs\n\n def remove(self, name):\n \"\"\" Remove a column of data.\n\n Args:\n name (str) : name of the column to remove\n\n Returns:\n None\n\n .. note::\n If the column name does not exist, a warning is issued.\n\n \"\"\"\n try:\n self.column_names.remove(name)\n del self.data[name]\n except (ValueError, KeyError):\n import warnings\n warnings.warn(\"Unable to find column '%s' in data source\" % name)\n\n @deprecated(\"Bokeh 0.11.0\", \"bokeh.io.push_notebook\")\n def push_notebook(self):\n \"\"\" Update a data source for a plot in a Jupyter notebook.\n\n This function can be be used to update data in plot data sources\n in the Jupyter notebook, without having to use the Bokeh server.\n\n .. warning::\n This function has been deprecated. Please use\n ``bokeh.io.push_notebook()`` which will push all changes\n (not just data sources) to the last shown plot in a Jupyter\n notebook.\n\n Returns:\n None\n\n \"\"\"\n from bokeh.io import push_notebook\n push_notebook()\n\n @validation.error(COLUMN_LENGTHS)\n def _check_column_lengths(self):\n lengths = set(len(x) for x in self.data.values())\n if len(lengths) > 1:\n return str(self)\n\n\n def stream(self, new_data, rollover=None):\n import numpy as np\n\n newkeys = set(new_data.keys())\n oldkeys = set(self.data.keys())\n if newkeys != oldkeys:\n missing = oldkeys - newkeys\n extra = newkeys - oldkeys\n if missing and extra:\n raise ValueError(\"Must stream updates to all existing columns (missing: %s, extra: %s)\" % (\", \".join(sorted(missing)), \", \".join(sorted(extra))))\n elif missing:\n raise ValueError(\"Must stream updates to all existing columns (missing: %s)\" % \", \".join(sorted(missing)))\n else:\n raise ValueError(\"Must stream updates to all existing columns (extra: %s)\" % \", \".join(sorted(extra)))\n\n lengths = set()\n for x in new_data.values():\n if isinstance(x, np.ndarray):\n if len(x.shape) != 1:\n raise ValueError(\"stream(...) only supports 1d sequences, got ndarray with size %r\" % (x.shape,))\n lengths.add(x.shape[0])\n else:\n lengths.add(len(x))\n\n if len(lengths) > 1:\n raise ValueError(\"All streaming column updates must be the same length\")\n\n self.data._stream(self.document, self, new_data, rollover)\n\nclass GeoJSONDataSource(ColumnDataSource):\n\n geojson = JSON(help=\"\"\"\n GeoJSON that contains features for plotting. Currently GeoJSONDataSource can\n only process a FeatureCollection or GeometryCollection.\n \"\"\")\n\n\n@abstract\nclass RemoteSource(ColumnDataSource):\n\n data_url = String(help=\"\"\"\n The URL to the endpoint for the data.\n \"\"\")\n\n polling_interval = Int(help=\"\"\"\n polling interval for updating data source in milliseconds\n \"\"\")\n\nclass AjaxDataSource(RemoteSource):\n\n method = Enum('POST', 'GET', help=\"http method - GET or POST\")\n\n mode = Enum(\"replace\", \"append\", help=\"\"\"\n Whether to append new data to existing data (up to ``max_size``),\n or to replace existing data entirely.\n \"\"\")\n\n max_size = Int(help=\"\"\"\n Maximum size of the data array being kept after each pull requests.\n Larger than that size, the data will be right shifted.\n \"\"\")\n\n if_modified = Bool(False, help=\"\"\"\n Whether to include an ``If-Modified-Since`` header in AJAX requests\n to the server. If this header is supported by the server, then only\n new data since the last request will be returned.\n \"\"\")\n content_type = String(default='application/json', help=\"\"\"\n Set the \"contentType\" parameter for the Ajax request.\n \"\"\")\n http_headers = Dict(String, String, help=\"\"\"\n HTTP headers to set for the Ajax request.\n \"\"\")\n", "path": "bokeh/models/sources.py"}]} | 3,635 | 170 |
gh_patches_debug_27442 | rasdani/github-patches | git_diff | aio-libs-abandoned__aioredis-py-839 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Drop Python 3.5 support
Python 3.5 was a famous release (async/await syntax was introduced) but the version has reached end-of-life.
We can (and should) drop it.
Speaking as the maintainer of other libraries, I can say that it simplifies the code base a little
</issue>
<code>
[start of setup.py]
1 import re
2 import os.path
3 import sys
4 import platform
5 from setuptools import setup, find_packages
6
7
8 install_requires = ['async-timeout']
9 if platform.python_implementation() == 'CPython':
10 install_requires.append('hiredis')
11
12 PY_VER = sys.version_info
13
14 if PY_VER < (3, 5):
15 raise RuntimeError("aioredis doesn't support Python version prior 3.5")
16
17
18 def read(*parts):
19 with open(os.path.join(*parts), 'rt') as f:
20 return f.read().strip()
21
22
23 def read_version():
24 regexp = re.compile(r"^__version__\W*=\W*'([\d.abrc]+)'")
25 init_py = os.path.join(os.path.dirname(__file__),
26 'aioredis', '__init__.py')
27 with open(init_py) as f:
28 for line in f:
29 match = regexp.match(line)
30 if match is not None:
31 return match.group(1)
32 raise RuntimeError('Cannot find version in {}'.format(init_py))
33
34
35 classifiers = [
36 'License :: OSI Approved :: MIT License',
37 'Development Status :: 4 - Beta',
38 'Programming Language :: Python',
39 'Programming Language :: Python :: 3',
40 'Programming Language :: Python :: 3.5',
41 'Programming Language :: Python :: 3.6',
42 'Programming Language :: Python :: 3.7',
43 'Programming Language :: Python :: 3 :: Only',
44 'Operating System :: POSIX',
45 'Environment :: Web Environment',
46 'Intended Audience :: Developers',
47 'Topic :: Software Development',
48 'Topic :: Software Development :: Libraries',
49 'Framework :: AsyncIO',
50 ]
51
52 setup(name='aioredis',
53 version=read_version(),
54 description=("asyncio (PEP 3156) Redis support"),
55 long_description="\n\n".join((read('README.rst'), read('CHANGES.txt'))),
56 classifiers=classifiers,
57 platforms=["POSIX"],
58 author="Alexey Popravka",
59 author_email="[email protected]",
60 url="https://github.com/aio-libs/aioredis",
61 license="MIT",
62 packages=find_packages(exclude=["tests"]),
63 install_requires=install_requires,
64 include_package_data=True,
65 )
66
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -1,20 +1,8 @@
import re
import os.path
-import sys
-import platform
from setuptools import setup, find_packages
-install_requires = ['async-timeout']
-if platform.python_implementation() == 'CPython':
- install_requires.append('hiredis')
-
-PY_VER = sys.version_info
-
-if PY_VER < (3, 5):
- raise RuntimeError("aioredis doesn't support Python version prior 3.5")
-
-
def read(*parts):
with open(os.path.join(*parts), 'rt') as f:
return f.read().strip()
@@ -37,7 +25,6 @@
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
@@ -60,6 +47,10 @@
url="https://github.com/aio-libs/aioredis",
license="MIT",
packages=find_packages(exclude=["tests"]),
- install_requires=install_requires,
+ install_requires=[
+ 'async-timeout',
+ 'hiredis; implementation_name=="cpython"'
+ ],
+ python_requires=">=3.6",
include_package_data=True,
)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -1,20 +1,8 @@\n import re\n import os.path\n-import sys\n-import platform\n from setuptools import setup, find_packages\n \n \n-install_requires = ['async-timeout']\n-if platform.python_implementation() == 'CPython':\n- install_requires.append('hiredis')\n-\n-PY_VER = sys.version_info\n-\n-if PY_VER < (3, 5):\n- raise RuntimeError(\"aioredis doesn't support Python version prior 3.5\")\n-\n-\n def read(*parts):\n with open(os.path.join(*parts), 'rt') as f:\n return f.read().strip()\n@@ -37,7 +25,6 @@\n 'Development Status :: 4 - Beta',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n- 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n@@ -60,6 +47,10 @@\n url=\"https://github.com/aio-libs/aioredis\",\n license=\"MIT\",\n packages=find_packages(exclude=[\"tests\"]),\n- install_requires=install_requires,\n+ install_requires=[\n+ 'async-timeout',\n+ 'hiredis; implementation_name==\"cpython\"'\n+ ],\n+ python_requires=\">=3.6\",\n include_package_data=True,\n )\n", "issue": "Drop Python 3.5 support\nPython 3.5 was a famous release (async/await syntax was introduced) but the version has reached end-of-life.\r\nWe can (and should) drop it.\r\nSpeaking as the maintainer of other libraries, I can say that it simplifies the code base a little\n", "before_files": [{"content": "import re\nimport os.path\nimport sys\nimport platform\nfrom setuptools import setup, find_packages\n\n\ninstall_requires = ['async-timeout']\nif platform.python_implementation() == 'CPython':\n install_requires.append('hiredis')\n\nPY_VER = sys.version_info\n\nif PY_VER < (3, 5):\n raise RuntimeError(\"aioredis doesn't support Python version prior 3.5\")\n\n\ndef read(*parts):\n with open(os.path.join(*parts), 'rt') as f:\n return f.read().strip()\n\n\ndef read_version():\n regexp = re.compile(r\"^__version__\\W*=\\W*'([\\d.abrc]+)'\")\n init_py = os.path.join(os.path.dirname(__file__),\n 'aioredis', '__init__.py')\n with open(init_py) as f:\n for line in f:\n match = regexp.match(line)\n if match is not None:\n return match.group(1)\n raise RuntimeError('Cannot find version in {}'.format(init_py))\n\n\nclassifiers = [\n 'License :: OSI Approved :: MIT License',\n 'Development Status :: 4 - Beta',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Operating System :: POSIX',\n 'Environment :: Web Environment',\n 'Intended Audience :: Developers',\n 'Topic :: Software Development',\n 'Topic :: Software Development :: Libraries',\n 'Framework :: AsyncIO',\n]\n\nsetup(name='aioredis',\n version=read_version(),\n description=(\"asyncio (PEP 3156) Redis support\"),\n long_description=\"\\n\\n\".join((read('README.rst'), read('CHANGES.txt'))),\n classifiers=classifiers,\n platforms=[\"POSIX\"],\n author=\"Alexey Popravka\",\n author_email=\"[email protected]\",\n url=\"https://github.com/aio-libs/aioredis\",\n license=\"MIT\",\n packages=find_packages(exclude=[\"tests\"]),\n install_requires=install_requires,\n include_package_data=True,\n )\n", "path": "setup.py"}]} | 1,209 | 335 |
gh_patches_debug_17186 | rasdani/github-patches | git_diff | microsoft__Qcodes-5008 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Parsing version string error with Keysight_34465A
When I try to connect a Keysight_34465A digital multimeter with
```
from qcodes.instrument_drivers.Keysight.Keysight_34465A_submodules import Keysight_34465A
dmm = Keysight_34465A('dmm', 'TCPIP0::10.21.64.130::inst0::INSTR')
```
I get "InvalidVersion: Invalid version '65.02.17-02.40-02.17-00.52-04-01'", whereas the actual firmware version is 'A.02.17-02.40-02.17-00.52-04-01':
```
---------------------------------------------------------------------------
InvalidVersion Traceback (most recent call last)
Cell In[1], line 2
1 from qcodes.instrument_drivers.Keysight.Keysight_34465A_submodules import Keysight_34465A
----> 2 dmm = Keysight_34465A('dmm', 'TCPIP0::10.21.64.130::inst0::INSTR')
File ~\.conda\envs\qcodes\lib\site-packages\qcodes\instrument\instrument_meta.py:36, in InstrumentMeta.__call__(cls, *args, **kwargs)
31 def __call__(cls, *args: Any, **kwargs: Any) -> Any:
32 """
33 Overloads `type.__call__` to add code that runs only if __init__ completes
34 successfully.
35 """
---> 36 new_inst = super().__call__(*args, **kwargs)
37 is_abstract = new_inst._is_abstract()
38 if is_abstract:
File ~\.conda\envs\qcodes\lib\site-packages\qcodes\instrument_drivers\Keysight\Keysight_34465A_submodules.py:12, in Keysight34465A.__init__(self, name, address, silent, **kwargs)
10 def __init__(self, name: str, address: str, silent: bool = False,
11 **kwargs: Any):
---> 12 super().__init__(name, address, silent, **kwargs)
File ~\.conda\envs\qcodes\lib\site-packages\qcodes\instrument_drivers\Keysight\private\Keysight_344xxA_submodules.py:470, in _Keysight_344xxA.__init__(self, name, address, silent, **kwargs)
463 ####################################
464 # Instrument specifications
466 options = self._options()
467 self.has_DIG = self.is_34465A_34470A and (
468 "DIG" in options
469 or version.parse(convert_legacy_version_to_supported_version("A.03"))
--> 470 <= version.parse(
471 convert_legacy_version_to_supported_version(idn["firmware"])
472 )
473 )
474 # Note that the firmware version check is still needed because
475 # ``_options`` (the ``*OPT?`` command) returns 'DIG' option for
476 # firmware 3.0 only if it has been purchased before
477 self.has_MEM = self.is_34465A_34470A and 'MEM' in options
File ~\.conda\envs\qcodes\lib\site-packages\packaging\version.py:52, in parse(version)
43 def parse(version: str) -> "Version":
44 """Parse the given version string.
45
46 >>> parse('1.0.dev1')
(...)
50 :raises InvalidVersion: When the version string is not a valid version.
51 """
---> 52 return Version(version)
File ~\.conda\envs\qcodes\lib\site-packages\packaging\version.py:197, in Version.__init__(self, version)
195 match = self._regex.search(version)
196 if not match:
--> 197 raise InvalidVersion(f"Invalid version: '{version}'")
199 # Store the parsed out pieces of the version
200 self._version = _Version(
201 epoch=int(match.group("epoch")) if match.group("epoch") else 0,
202 release=tuple(int(i) for i in match.group("release").split(".")),
(...)
208 local=_parse_local_version(match.group("local")),
209 )
```
OS Windows 11
qcodes 0.37.0
Pyvisa 1.13.0
pyserial 3.5
Parsing version string error with Keysight_34465A
When I try to connect a Keysight_34465A digital multimeter with
```
from qcodes.instrument_drivers.Keysight.Keysight_34465A_submodules import Keysight_34465A
dmm = Keysight_34465A('dmm', 'TCPIP0::10.21.64.130::inst0::INSTR')
```
I get "InvalidVersion: Invalid version '65.02.17-02.40-02.17-00.52-04-01'", whereas the actual firmware version is 'A.02.17-02.40-02.17-00.52-04-01':
```
---------------------------------------------------------------------------
InvalidVersion Traceback (most recent call last)
Cell In[1], line 2
1 from qcodes.instrument_drivers.Keysight.Keysight_34465A_submodules import Keysight_34465A
----> 2 dmm = Keysight_34465A('dmm', 'TCPIP0::10.21.64.130::inst0::INSTR')
File ~\.conda\envs\qcodes\lib\site-packages\qcodes\instrument\instrument_meta.py:36, in InstrumentMeta.__call__(cls, *args, **kwargs)
31 def __call__(cls, *args: Any, **kwargs: Any) -> Any:
32 """
33 Overloads `type.__call__` to add code that runs only if __init__ completes
34 successfully.
35 """
---> 36 new_inst = super().__call__(*args, **kwargs)
37 is_abstract = new_inst._is_abstract()
38 if is_abstract:
File ~\.conda\envs\qcodes\lib\site-packages\qcodes\instrument_drivers\Keysight\Keysight_34465A_submodules.py:12, in Keysight34465A.__init__(self, name, address, silent, **kwargs)
10 def __init__(self, name: str, address: str, silent: bool = False,
11 **kwargs: Any):
---> 12 super().__init__(name, address, silent, **kwargs)
File ~\.conda\envs\qcodes\lib\site-packages\qcodes\instrument_drivers\Keysight\private\Keysight_344xxA_submodules.py:470, in _Keysight_344xxA.__init__(self, name, address, silent, **kwargs)
463 ####################################
464 # Instrument specifications
466 options = self._options()
467 self.has_DIG = self.is_34465A_34470A and (
468 "DIG" in options
469 or version.parse(convert_legacy_version_to_supported_version("A.03"))
--> 470 <= version.parse(
471 convert_legacy_version_to_supported_version(idn["firmware"])
472 )
473 )
474 # Note that the firmware version check is still needed because
475 # ``_options`` (the ``*OPT?`` command) returns 'DIG' option for
476 # firmware 3.0 only if it has been purchased before
477 self.has_MEM = self.is_34465A_34470A and 'MEM' in options
File ~\.conda\envs\qcodes\lib\site-packages\packaging\version.py:52, in parse(version)
43 def parse(version: str) -> "Version":
44 """Parse the given version string.
45
46 >>> parse('1.0.dev1')
(...)
50 :raises InvalidVersion: When the version string is not a valid version.
51 """
---> 52 return Version(version)
File ~\.conda\envs\qcodes\lib\site-packages\packaging\version.py:197, in Version.__init__(self, version)
195 match = self._regex.search(version)
196 if not match:
--> 197 raise InvalidVersion(f"Invalid version: '{version}'")
199 # Store the parsed out pieces of the version
200 self._version = _Version(
201 epoch=int(match.group("epoch")) if match.group("epoch") else 0,
202 release=tuple(int(i) for i in match.group("release").split(".")),
(...)
208 local=_parse_local_version(match.group("local")),
209 )
```
OS Windows 11
qcodes 0.37.0
Pyvisa 1.13.0
pyserial 3.5
</issue>
<code>
[start of qcodes/utils/installation_info.py]
1 """
2 This module contains helper functions that provide information about how
3 QCoDeS is installed and about what other packages are installed along with
4 QCoDeS
5 """
6 import json
7 import logging
8 import subprocess
9 import sys
10 from typing import Dict, Optional
11
12 if sys.version_info >= (3, 10):
13 # distribution.name used below became part of the
14 # official api in 3.10
15 from importlib.metadata import distributions
16 else:
17 # 3.9 and earlier
18 from importlib_metadata import distributions
19
20 from qcodes.utils.deprecate import deprecate
21
22 log = logging.getLogger(__name__)
23
24
25 def is_qcodes_installed_editably() -> Optional[bool]:
26 """
27 Try to ask pip whether QCoDeS is installed in editable mode and return
28 the answer a boolean. Returns None if pip somehow did not respond as
29 expected.
30 """
31
32 answer: Optional[bool]
33
34 try:
35 pipproc = subprocess.run(['python', '-m', 'pip', 'list', '-e', '--no-index',
36 '--format=json'],
37 check=True,
38 stdout=subprocess.PIPE)
39 e_pkgs = json.loads(pipproc.stdout.decode('utf-8'))
40 answer = any([d["name"] == 'qcodes' for d in e_pkgs])
41 except Exception as e: # we actually do want a catch-all here
42 log.warning(f'{type(e)}: {str(e)}')
43 answer = None
44
45 return answer
46
47
48 @deprecate("function 'get_qcodes_version'", alternative="qcodes.__version__")
49 def get_qcodes_version() -> str:
50 """
51 Get the version of the currently installed QCoDeS
52 """
53 from qcodes._version import __version__
54 return __version__
55
56
57 def get_all_installed_package_versions() -> Dict[str, str]:
58 """
59 Return a dictionary of the currently installed packages and their versions.
60 """
61 return {d.name: d.version for d in distributions()}
62
63
64 def convert_legacy_version_to_supported_version(ver: str) -> str:
65 """
66 Convert a legacy version str containing single chars rather than
67 numbers to a regular version string. This is done by replacing a char
68 by its ASCII code (using ``ord``). This assumes that the version number
69 only uses at most a single char per level and only ASCII chars.
70 """
71
72 temp_list = []
73 for v in ver:
74 if v.isalpha():
75 temp_list.append(str(ord(v.upper())))
76 else:
77 temp_list.append(v)
78 return "".join(temp_list)
79
[end of qcodes/utils/installation_info.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/qcodes/utils/installation_info.py b/qcodes/utils/installation_info.py
--- a/qcodes/utils/installation_info.py
+++ b/qcodes/utils/installation_info.py
@@ -67,6 +67,11 @@
numbers to a regular version string. This is done by replacing a char
by its ASCII code (using ``ord``). This assumes that the version number
only uses at most a single char per level and only ASCII chars.
+
+ It also splits off anything that comes after the first ``-`` in the version str.
+
+ This is meant to pass versions like ``'A.02.17-02.40-02.17-00.52-04-01'``
+ primarily used by Keysight instruments.
"""
temp_list = []
@@ -75,4 +80,5 @@
temp_list.append(str(ord(v.upper())))
else:
temp_list.append(v)
- return "".join(temp_list)
+ temp_str = "".join(temp_list)
+ return temp_str.split("-")[0]
| {"golden_diff": "diff --git a/qcodes/utils/installation_info.py b/qcodes/utils/installation_info.py\n--- a/qcodes/utils/installation_info.py\n+++ b/qcodes/utils/installation_info.py\n@@ -67,6 +67,11 @@\n numbers to a regular version string. This is done by replacing a char\n by its ASCII code (using ``ord``). This assumes that the version number\n only uses at most a single char per level and only ASCII chars.\n+\n+ It also splits off anything that comes after the first ``-`` in the version str.\n+\n+ This is meant to pass versions like ``'A.02.17-02.40-02.17-00.52-04-01'``\n+ primarily used by Keysight instruments.\n \"\"\"\n \n temp_list = []\n@@ -75,4 +80,5 @@\n temp_list.append(str(ord(v.upper())))\n else:\n temp_list.append(v)\n- return \"\".join(temp_list)\n+ temp_str = \"\".join(temp_list)\n+ return temp_str.split(\"-\")[0]\n", "issue": "Parsing version string error with Keysight_34465A\nWhen I try to connect a Keysight_34465A digital multimeter with \r\n\r\n```\r\nfrom qcodes.instrument_drivers.Keysight.Keysight_34465A_submodules import Keysight_34465A\r\ndmm = Keysight_34465A('dmm', 'TCPIP0::10.21.64.130::inst0::INSTR')\r\n```\r\n\r\nI get \"InvalidVersion: Invalid version '65.02.17-02.40-02.17-00.52-04-01'\", whereas the actual firmware version is 'A.02.17-02.40-02.17-00.52-04-01':\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nInvalidVersion Traceback (most recent call last)\r\nCell In[1], line 2\r\n 1 from qcodes.instrument_drivers.Keysight.Keysight_34465A_submodules import Keysight_34465A\r\n----> 2 dmm = Keysight_34465A('dmm', 'TCPIP0::10.21.64.130::inst0::INSTR')\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\qcodes\\instrument\\instrument_meta.py:36, in InstrumentMeta.__call__(cls, *args, **kwargs)\r\n 31 def __call__(cls, *args: Any, **kwargs: Any) -> Any:\r\n 32 \"\"\"\r\n 33 Overloads `type.__call__` to add code that runs only if __init__ completes\r\n 34 successfully.\r\n 35 \"\"\"\r\n---> 36 new_inst = super().__call__(*args, **kwargs)\r\n 37 is_abstract = new_inst._is_abstract()\r\n 38 if is_abstract:\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\qcodes\\instrument_drivers\\Keysight\\Keysight_34465A_submodules.py:12, in Keysight34465A.__init__(self, name, address, silent, **kwargs)\r\n 10 def __init__(self, name: str, address: str, silent: bool = False,\r\n 11 **kwargs: Any):\r\n---> 12 super().__init__(name, address, silent, **kwargs)\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\qcodes\\instrument_drivers\\Keysight\\private\\Keysight_344xxA_submodules.py:470, in _Keysight_344xxA.__init__(self, name, address, silent, **kwargs)\r\n 463 ####################################\r\n 464 # Instrument specifications\r\n 466 options = self._options()\r\n 467 self.has_DIG = self.is_34465A_34470A and (\r\n 468 \"DIG\" in options\r\n 469 or version.parse(convert_legacy_version_to_supported_version(\"A.03\"))\r\n--> 470 <= version.parse(\r\n 471 convert_legacy_version_to_supported_version(idn[\"firmware\"])\r\n 472 )\r\n 473 )\r\n 474 # Note that the firmware version check is still needed because\r\n 475 # ``_options`` (the ``*OPT?`` command) returns 'DIG' option for\r\n 476 # firmware 3.0 only if it has been purchased before\r\n 477 self.has_MEM = self.is_34465A_34470A and 'MEM' in options\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\packaging\\version.py:52, in parse(version)\r\n 43 def parse(version: str) -> \"Version\":\r\n 44 \"\"\"Parse the given version string.\r\n 45 \r\n 46 >>> parse('1.0.dev1')\r\n (...)\r\n 50 :raises InvalidVersion: When the version string is not a valid version.\r\n 51 \"\"\"\r\n---> 52 return Version(version)\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\packaging\\version.py:197, in Version.__init__(self, version)\r\n 195 match = self._regex.search(version)\r\n 196 if not match:\r\n--> 197 raise InvalidVersion(f\"Invalid version: '{version}'\")\r\n 199 # Store the parsed out pieces of the version\r\n 200 self._version = _Version(\r\n 201 epoch=int(match.group(\"epoch\")) if match.group(\"epoch\") else 0,\r\n 202 release=tuple(int(i) for i in match.group(\"release\").split(\".\")),\r\n (...)\r\n 208 local=_parse_local_version(match.group(\"local\")),\r\n 209 )\r\n```\r\n\r\nOS Windows 11\r\nqcodes 0.37.0\r\nPyvisa 1.13.0\r\npyserial 3.5\nParsing version string error with Keysight_34465A\nWhen I try to connect a Keysight_34465A digital multimeter with \r\n\r\n```\r\nfrom qcodes.instrument_drivers.Keysight.Keysight_34465A_submodules import Keysight_34465A\r\ndmm = Keysight_34465A('dmm', 'TCPIP0::10.21.64.130::inst0::INSTR')\r\n```\r\n\r\nI get \"InvalidVersion: Invalid version '65.02.17-02.40-02.17-00.52-04-01'\", whereas the actual firmware version is 'A.02.17-02.40-02.17-00.52-04-01':\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nInvalidVersion Traceback (most recent call last)\r\nCell In[1], line 2\r\n 1 from qcodes.instrument_drivers.Keysight.Keysight_34465A_submodules import Keysight_34465A\r\n----> 2 dmm = Keysight_34465A('dmm', 'TCPIP0::10.21.64.130::inst0::INSTR')\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\qcodes\\instrument\\instrument_meta.py:36, in InstrumentMeta.__call__(cls, *args, **kwargs)\r\n 31 def __call__(cls, *args: Any, **kwargs: Any) -> Any:\r\n 32 \"\"\"\r\n 33 Overloads `type.__call__` to add code that runs only if __init__ completes\r\n 34 successfully.\r\n 35 \"\"\"\r\n---> 36 new_inst = super().__call__(*args, **kwargs)\r\n 37 is_abstract = new_inst._is_abstract()\r\n 38 if is_abstract:\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\qcodes\\instrument_drivers\\Keysight\\Keysight_34465A_submodules.py:12, in Keysight34465A.__init__(self, name, address, silent, **kwargs)\r\n 10 def __init__(self, name: str, address: str, silent: bool = False,\r\n 11 **kwargs: Any):\r\n---> 12 super().__init__(name, address, silent, **kwargs)\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\qcodes\\instrument_drivers\\Keysight\\private\\Keysight_344xxA_submodules.py:470, in _Keysight_344xxA.__init__(self, name, address, silent, **kwargs)\r\n 463 ####################################\r\n 464 # Instrument specifications\r\n 466 options = self._options()\r\n 467 self.has_DIG = self.is_34465A_34470A and (\r\n 468 \"DIG\" in options\r\n 469 or version.parse(convert_legacy_version_to_supported_version(\"A.03\"))\r\n--> 470 <= version.parse(\r\n 471 convert_legacy_version_to_supported_version(idn[\"firmware\"])\r\n 472 )\r\n 473 )\r\n 474 # Note that the firmware version check is still needed because\r\n 475 # ``_options`` (the ``*OPT?`` command) returns 'DIG' option for\r\n 476 # firmware 3.0 only if it has been purchased before\r\n 477 self.has_MEM = self.is_34465A_34470A and 'MEM' in options\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\packaging\\version.py:52, in parse(version)\r\n 43 def parse(version: str) -> \"Version\":\r\n 44 \"\"\"Parse the given version string.\r\n 45 \r\n 46 >>> parse('1.0.dev1')\r\n (...)\r\n 50 :raises InvalidVersion: When the version string is not a valid version.\r\n 51 \"\"\"\r\n---> 52 return Version(version)\r\n\r\nFile ~\\.conda\\envs\\qcodes\\lib\\site-packages\\packaging\\version.py:197, in Version.__init__(self, version)\r\n 195 match = self._regex.search(version)\r\n 196 if not match:\r\n--> 197 raise InvalidVersion(f\"Invalid version: '{version}'\")\r\n 199 # Store the parsed out pieces of the version\r\n 200 self._version = _Version(\r\n 201 epoch=int(match.group(\"epoch\")) if match.group(\"epoch\") else 0,\r\n 202 release=tuple(int(i) for i in match.group(\"release\").split(\".\")),\r\n (...)\r\n 208 local=_parse_local_version(match.group(\"local\")),\r\n 209 )\r\n```\r\n\r\nOS Windows 11\r\nqcodes 0.37.0\r\nPyvisa 1.13.0\r\npyserial 3.5\n", "before_files": [{"content": "\"\"\"\nThis module contains helper functions that provide information about how\nQCoDeS is installed and about what other packages are installed along with\nQCoDeS\n\"\"\"\nimport json\nimport logging\nimport subprocess\nimport sys\nfrom typing import Dict, Optional\n\nif sys.version_info >= (3, 10):\n # distribution.name used below became part of the\n # official api in 3.10\n from importlib.metadata import distributions\nelse:\n # 3.9 and earlier\n from importlib_metadata import distributions\n\nfrom qcodes.utils.deprecate import deprecate\n\nlog = logging.getLogger(__name__)\n\n\ndef is_qcodes_installed_editably() -> Optional[bool]:\n \"\"\"\n Try to ask pip whether QCoDeS is installed in editable mode and return\n the answer a boolean. Returns None if pip somehow did not respond as\n expected.\n \"\"\"\n\n answer: Optional[bool]\n\n try:\n pipproc = subprocess.run(['python', '-m', 'pip', 'list', '-e', '--no-index',\n '--format=json'],\n check=True,\n stdout=subprocess.PIPE)\n e_pkgs = json.loads(pipproc.stdout.decode('utf-8'))\n answer = any([d[\"name\"] == 'qcodes' for d in e_pkgs])\n except Exception as e: # we actually do want a catch-all here\n log.warning(f'{type(e)}: {str(e)}')\n answer = None\n\n return answer\n\n\n@deprecate(\"function 'get_qcodes_version'\", alternative=\"qcodes.__version__\")\ndef get_qcodes_version() -> str:\n \"\"\"\n Get the version of the currently installed QCoDeS\n \"\"\"\n from qcodes._version import __version__\n return __version__\n\n\ndef get_all_installed_package_versions() -> Dict[str, str]:\n \"\"\"\n Return a dictionary of the currently installed packages and their versions.\n \"\"\"\n return {d.name: d.version for d in distributions()}\n\n\ndef convert_legacy_version_to_supported_version(ver: str) -> str:\n \"\"\"\n Convert a legacy version str containing single chars rather than\n numbers to a regular version string. This is done by replacing a char\n by its ASCII code (using ``ord``). This assumes that the version number\n only uses at most a single char per level and only ASCII chars.\n \"\"\"\n\n temp_list = []\n for v in ver:\n if v.isalpha():\n temp_list.append(str(ord(v.upper())))\n else:\n temp_list.append(v)\n return \"\".join(temp_list)\n", "path": "qcodes/utils/installation_info.py"}]} | 3,588 | 243 |
gh_patches_debug_34889 | rasdani/github-patches | git_diff | scikit-hep__pyhf-1818 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add contextlib support to the pyhf.schema API
> instead of having users overwriting the value by assignment, do you think there is value in offering a `pyhf.utils.update_schema_path` or something that performs this operation through the API?
>
> Thanks @kratsg for taking up my suggestion!
>
> I second @matthewfeickert's suggestion to change schemas via function call rather than assignment. It is much simpler to add functionality to a function if it ever becomes necessary, than to replace an entire existing module with a class instance with a property.
>
> I'd even go so far to say that a context manager doubling as an update function would be ideal IMO:
> ```python
> # in pyhf.utils
> _SCHEMAS = Path(...)
> class use_schema_path: # snake_case to remind of function-like usage
> def __init__(self, path):
> global _SCHEMAS
> self._old_schemas = _SCHEMAS
> _SCHEMAS = pathlib.Path(path)
> def __enter__(self):
> pass
> def __exit__(self, *args, **kwargs):
> global _SCHEMAS
> _SCHEMAS = self._old_schemas
> ```
> which can still be called as a function (only executing `__init__`), so short scripts etc. are not forced to use `with` blocks.
> But it can also be used like so:
> ```python
> def make_my_workspace(spec):
> with pyhf.utils.use_schema_path('/my/very/special/schemas'):
> return pyhf.Workspace(spec)
> ```
> So as a user writing code on top of pyhf, I don't have to worry about resesetting the global variable, the CM does it for me, and there are fewer mistakes to make.
>
_Originally posted by @lhenkelm in https://github.com/scikit-hep/pyhf/issues/1753#issuecomment-1026678066_
</issue>
<code>
[start of src/pyhf/schema/__init__.py]
1 """
2 See :class:`~pyhf.schema.Schema` for documentation.
3 """
4 import pathlib
5 import sys
6 from pyhf.schema.loader import load_schema
7 from pyhf.schema.validator import validate
8 from pyhf.schema import variables
9
10 __all__ = [
11 "load_schema",
12 "validate",
13 "path",
14 "version",
15 ]
16
17
18 def __dir__():
19 return __all__
20
21
22 class Schema(sys.modules[__name__].__class__):
23 """
24 A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas.
25
26 Example:
27 >>> import pyhf.schema
28 >>> import pathlib
29 >>> curr_path = pyhf.schema.path
30 >>> curr_path # doctest: +ELLIPSIS
31 PosixPath('.../pyhf/schemas')
32 >>> pyhf.schema(pathlib.Path('/home/root/my/new/path'))
33 >>> pyhf.schema.path
34 PosixPath('/home/root/my/new/path')
35 >>> pyhf.schema(curr_path)
36 >>> pyhf.schema.path # doctest: +ELLIPSIS
37 PosixPath('.../pyhf/schemas')
38
39 """
40
41 def __call__(self, new_path: pathlib.Path):
42 """
43 Change the local search path for finding schemas locally.
44
45 Args:
46 new_path (pathlib.Path): Path to folder containing the schemas
47
48 Returns:
49 None
50 """
51 variables.schemas = new_path
52
53 @property
54 def path(self):
55 """
56 The local path for schemas.
57 """
58 return variables.schemas
59
60 @property
61 def version(self):
62 """
63 The default version used for finding schemas.
64 """
65 return variables.SCHEMA_VERSION
66
67
68 sys.modules[__name__].__class__ = Schema
69
[end of src/pyhf/schema/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/src/pyhf/schema/__init__.py b/src/pyhf/schema/__init__.py
--- a/src/pyhf/schema/__init__.py
+++ b/src/pyhf/schema/__init__.py
@@ -23,17 +23,40 @@
"""
A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas.
- Example:
+ .. rubric:: Example (callable)
+
+ .. code-block:: pycon
+
>>> import pyhf.schema
>>> import pathlib
>>> curr_path = pyhf.schema.path
- >>> curr_path # doctest: +ELLIPSIS
+ >>> curr_path # doctest: +ELLIPSIS
PosixPath('.../pyhf/schemas')
- >>> pyhf.schema(pathlib.Path('/home/root/my/new/path'))
+ >>> new_path = pathlib.Path("/home/root/my/new/path")
+ >>> pyhf.schema(new_path) # doctest: +ELLIPSIS
+ <module 'pyhf.schema' from ...>
>>> pyhf.schema.path
PosixPath('/home/root/my/new/path')
- >>> pyhf.schema(curr_path)
- >>> pyhf.schema.path # doctest: +ELLIPSIS
+ >>> pyhf.schema(curr_path) # doctest: +ELLIPSIS
+ <module 'pyhf.schema' from ...>
+ >>> pyhf.schema.path # doctest: +ELLIPSIS
+ PosixPath('.../pyhf/schemas')
+
+ .. rubric:: Example (context-manager)
+
+ .. code-block:: pycon
+
+ >>> import pyhf.schema
+ >>> import pathlib
+ >>> curr_path = pyhf.schema.path
+ >>> curr_path # doctest: +ELLIPSIS
+ PosixPath('.../pyhf/schemas')
+ >>> new_path = pathlib.Path("/home/root/my/new/path")
+ >>> with pyhf.schema(new_path):
+ ... print(repr(pyhf.schema.path))
+ ...
+ PosixPath('/home/root/my/new/path')
+ >>> pyhf.schema.path # doctest: +ELLIPSIS
PosixPath('.../pyhf/schemas')
"""
@@ -45,10 +68,23 @@
Args:
new_path (pathlib.Path): Path to folder containing the schemas
+ Returns:
+ self (pyhf.schema.Schema): Returns itself (for contextlib management)
+ """
+ self.orig_path, variables.schemas = variables.schemas, new_path
+ return self
+
+ def __enter__(self):
+ pass
+
+ def __exit__(self, *args, **kwargs):
+ """
+ Reset the local search path for finding schemas locally.
+
Returns:
None
"""
- variables.schemas = new_path
+ variables.schemas = self.orig_path
@property
def path(self):
| {"golden_diff": "diff --git a/src/pyhf/schema/__init__.py b/src/pyhf/schema/__init__.py\n--- a/src/pyhf/schema/__init__.py\n+++ b/src/pyhf/schema/__init__.py\n@@ -23,17 +23,40 @@\n \"\"\"\n A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas.\n \n- Example:\n+ .. rubric:: Example (callable)\n+\n+ .. code-block:: pycon\n+\n >>> import pyhf.schema\n >>> import pathlib\n >>> curr_path = pyhf.schema.path\n- >>> curr_path # doctest: +ELLIPSIS\n+ >>> curr_path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n- >>> pyhf.schema(pathlib.Path('/home/root/my/new/path'))\n+ >>> new_path = pathlib.Path(\"/home/root/my/new/path\")\n+ >>> pyhf.schema(new_path) # doctest: +ELLIPSIS\n+ <module 'pyhf.schema' from ...>\n >>> pyhf.schema.path\n PosixPath('/home/root/my/new/path')\n- >>> pyhf.schema(curr_path)\n- >>> pyhf.schema.path # doctest: +ELLIPSIS\n+ >>> pyhf.schema(curr_path) # doctest: +ELLIPSIS\n+ <module 'pyhf.schema' from ...>\n+ >>> pyhf.schema.path # doctest: +ELLIPSIS\n+ PosixPath('.../pyhf/schemas')\n+\n+ .. rubric:: Example (context-manager)\n+\n+ .. code-block:: pycon\n+\n+ >>> import pyhf.schema\n+ >>> import pathlib\n+ >>> curr_path = pyhf.schema.path\n+ >>> curr_path # doctest: +ELLIPSIS\n+ PosixPath('.../pyhf/schemas')\n+ >>> new_path = pathlib.Path(\"/home/root/my/new/path\")\n+ >>> with pyhf.schema(new_path):\n+ ... print(repr(pyhf.schema.path))\n+ ...\n+ PosixPath('/home/root/my/new/path')\n+ >>> pyhf.schema.path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n \n \"\"\"\n@@ -45,10 +68,23 @@\n Args:\n new_path (pathlib.Path): Path to folder containing the schemas\n \n+ Returns:\n+ self (pyhf.schema.Schema): Returns itself (for contextlib management)\n+ \"\"\"\n+ self.orig_path, variables.schemas = variables.schemas, new_path\n+ return self\n+\n+ def __enter__(self):\n+ pass\n+\n+ def __exit__(self, *args, **kwargs):\n+ \"\"\"\n+ Reset the local search path for finding schemas locally.\n+\n Returns:\n None\n \"\"\"\n- variables.schemas = new_path\n+ variables.schemas = self.orig_path\n \n @property\n def path(self):\n", "issue": "Add contextlib support to the pyhf.schema API\n> instead of having users overwriting the value by assignment, do you think there is value in offering a `pyhf.utils.update_schema_path` or something that performs this operation through the API?\r\n>\r\n> Thanks @kratsg for taking up my suggestion!\r\n> \r\n> I second @matthewfeickert's suggestion to change schemas via function call rather than assignment. It is much simpler to add functionality to a function if it ever becomes necessary, than to replace an entire existing module with a class instance with a property.\r\n> \r\n> I'd even go so far to say that a context manager doubling as an update function would be ideal IMO:\r\n> ```python\r\n> # in pyhf.utils\r\n> _SCHEMAS = Path(...)\r\n> class use_schema_path: # snake_case to remind of function-like usage\r\n> def __init__(self, path):\r\n> global _SCHEMAS\r\n> self._old_schemas = _SCHEMAS\r\n> _SCHEMAS = pathlib.Path(path)\r\n> def __enter__(self):\r\n> pass\r\n> def __exit__(self, *args, **kwargs):\r\n> global _SCHEMAS\r\n> _SCHEMAS = self._old_schemas\r\n> ```\r\n> which can still be called as a function (only executing `__init__`), so short scripts etc. are not forced to use `with` blocks.\r\n> But it can also be used like so:\r\n> ```python\r\n> def make_my_workspace(spec):\r\n> with pyhf.utils.use_schema_path('/my/very/special/schemas'):\r\n> return pyhf.Workspace(spec)\r\n> ```\r\n> So as a user writing code on top of pyhf, I don't have to worry about resesetting the global variable, the CM does it for me, and there are fewer mistakes to make.\r\n> \r\n_Originally posted by @lhenkelm in https://github.com/scikit-hep/pyhf/issues/1753#issuecomment-1026678066_\n", "before_files": [{"content": "\"\"\"\nSee :class:`~pyhf.schema.Schema` for documentation.\n\"\"\"\nimport pathlib\nimport sys\nfrom pyhf.schema.loader import load_schema\nfrom pyhf.schema.validator import validate\nfrom pyhf.schema import variables\n\n__all__ = [\n \"load_schema\",\n \"validate\",\n \"path\",\n \"version\",\n]\n\n\ndef __dir__():\n return __all__\n\n\nclass Schema(sys.modules[__name__].__class__):\n \"\"\"\n A module-level wrapper around :mod:`pyhf.schema` which will provide additional functionality for interacting with schemas.\n\n Example:\n >>> import pyhf.schema\n >>> import pathlib\n >>> curr_path = pyhf.schema.path\n >>> curr_path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n >>> pyhf.schema(pathlib.Path('/home/root/my/new/path'))\n >>> pyhf.schema.path\n PosixPath('/home/root/my/new/path')\n >>> pyhf.schema(curr_path)\n >>> pyhf.schema.path # doctest: +ELLIPSIS\n PosixPath('.../pyhf/schemas')\n\n \"\"\"\n\n def __call__(self, new_path: pathlib.Path):\n \"\"\"\n Change the local search path for finding schemas locally.\n\n Args:\n new_path (pathlib.Path): Path to folder containing the schemas\n\n Returns:\n None\n \"\"\"\n variables.schemas = new_path\n\n @property\n def path(self):\n \"\"\"\n The local path for schemas.\n \"\"\"\n return variables.schemas\n\n @property\n def version(self):\n \"\"\"\n The default version used for finding schemas.\n \"\"\"\n return variables.SCHEMA_VERSION\n\n\nsys.modules[__name__].__class__ = Schema\n", "path": "src/pyhf/schema/__init__.py"}]} | 1,485 | 656 |
gh_patches_debug_3498 | rasdani/github-patches | git_diff | readthedocs__readthedocs.org-1530 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Social Network Login Failure
When clicking on connect GitHub on the [social accounts page](https://readthedocs.org/accounts/social/connections/?) I get a message:
> An error occurred while attempting to login via your social network account.
There's a `?` in the url. Could that be a hint? Is it missing some request arguments? If I omit it, the bug persists.
Cheers!
</issue>
<code>
[start of readthedocs/settings/postgres.py]
1 import os
2
3 from .base import * # noqa
4
5
6 DATABASES = {
7 'default': {
8 'ENGINE': 'django.db.backends.postgresql_psycopg2',
9 'NAME': 'docs',
10 'USER': 'postgres', # Not used with sqlite3.
11 'PASSWORD': '',
12 'HOST': '10.177.73.97',
13 'PORT': '',
14 }
15 }
16
17 DEBUG = False
18 TEMPLATE_DEBUG = False
19 CELERY_ALWAYS_EAGER = False
20
21 MEDIA_URL = 'https://media.readthedocs.org/'
22 STATIC_URL = 'https://media.readthedocs.org/static/'
23 ADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'
24 SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
25
26 HAYSTACK_CONNECTIONS = {
27 'default': {
28 'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
29 'URL': 'http://odin:8983/solr',
30 }
31 }
32
33 CACHES = {
34 'default': {
35 'BACKEND': 'redis_cache.RedisCache',
36 'LOCATION': 'localhost:6379',
37 'PREFIX': 'docs',
38 'OPTIONS': {
39 'DB': 1,
40 'PARSER_CLASS': 'redis.connection.HiredisParser'
41 },
42 },
43 }
44
45 # Elasticsearch settings.
46 ES_HOSTS = ['backup:9200', 'db:9200']
47 ES_DEFAULT_NUM_REPLICAS = 1
48 ES_DEFAULT_NUM_SHARDS = 5
49
50 SLUMBER_API_HOST = 'https://readthedocs.org'
51 WEBSOCKET_HOST = 'websocket.readthedocs.org:8088'
52
53 PRODUCTION_DOMAIN = 'readthedocs.org'
54 USE_SUBDOMAIN = True
55 NGINX_X_ACCEL_REDIRECT = True
56
57 SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
58
59 # Lock builds for 10 minutes
60 REPO_LOCK_SECONDS = 300
61
62 # Don't re-confirm existing accounts
63 ACCOUNT_EMAIL_VERIFICATION = 'none'
64
65 FILE_SYNCER = 'privacy.backends.syncers.DoubleRemotePuller'
66
67 # set GitHub scope
68 SOCIALACCOUNT_PROVIDERS = {
69 'github': {'SCOPE': ['user:email', 'read:org', 'admin:repo_hook', 'repo:status']}
70 }
71
72 if not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):
73 try:
74 from local_settings import * # noqa
75 except ImportError:
76 pass
77
[end of readthedocs/settings/postgres.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/readthedocs/settings/postgres.py b/readthedocs/settings/postgres.py
--- a/readthedocs/settings/postgres.py
+++ b/readthedocs/settings/postgres.py
@@ -69,6 +69,9 @@
'github': {'SCOPE': ['user:email', 'read:org', 'admin:repo_hook', 'repo:status']}
}
+# allauth settings
+ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https'
+
if not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):
try:
from local_settings import * # noqa
| {"golden_diff": "diff --git a/readthedocs/settings/postgres.py b/readthedocs/settings/postgres.py\n--- a/readthedocs/settings/postgres.py\n+++ b/readthedocs/settings/postgres.py\n@@ -69,6 +69,9 @@\n 'github': {'SCOPE': ['user:email', 'read:org', 'admin:repo_hook', 'repo:status']}\n }\n \n+# allauth settings\n+ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https'\n+\n if not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):\n try:\n from local_settings import * # noqa\n", "issue": "Social Network Login Failure\nWhen clicking on connect GitHub on the [social accounts page](https://readthedocs.org/accounts/social/connections/?) I get a message:\n\n> An error occurred while attempting to login via your social network account.\n\nThere's a `?` in the url. Could that be a hint? Is it missing some request arguments? If I omit it, the bug persists.\n\nCheers!\n\n", "before_files": [{"content": "import os\n\nfrom .base import * # noqa\n\n\nDATABASES = {\n 'default': {\n 'ENGINE': 'django.db.backends.postgresql_psycopg2',\n 'NAME': 'docs',\n 'USER': 'postgres', # Not used with sqlite3.\n 'PASSWORD': '',\n 'HOST': '10.177.73.97',\n 'PORT': '',\n }\n}\n\nDEBUG = False\nTEMPLATE_DEBUG = False\nCELERY_ALWAYS_EAGER = False\n\nMEDIA_URL = 'https://media.readthedocs.org/'\nSTATIC_URL = 'https://media.readthedocs.org/static/'\nADMIN_MEDIA_PREFIX = MEDIA_URL + 'admin/'\nSESSION_ENGINE = \"django.contrib.sessions.backends.cached_db\"\n\nHAYSTACK_CONNECTIONS = {\n 'default': {\n 'ENGINE': 'haystack.backends.solr_backend.SolrEngine',\n 'URL': 'http://odin:8983/solr',\n }\n}\n\nCACHES = {\n 'default': {\n 'BACKEND': 'redis_cache.RedisCache',\n 'LOCATION': 'localhost:6379',\n 'PREFIX': 'docs',\n 'OPTIONS': {\n 'DB': 1,\n 'PARSER_CLASS': 'redis.connection.HiredisParser'\n },\n },\n}\n\n# Elasticsearch settings.\nES_HOSTS = ['backup:9200', 'db:9200']\nES_DEFAULT_NUM_REPLICAS = 1\nES_DEFAULT_NUM_SHARDS = 5\n\nSLUMBER_API_HOST = 'https://readthedocs.org'\nWEBSOCKET_HOST = 'websocket.readthedocs.org:8088'\n\nPRODUCTION_DOMAIN = 'readthedocs.org'\nUSE_SUBDOMAIN = True\nNGINX_X_ACCEL_REDIRECT = True\n\nSECURE_PROXY_SSL_HEADER = (\"HTTP_X_FORWARDED_PROTO\", \"https\")\n\n# Lock builds for 10 minutes\nREPO_LOCK_SECONDS = 300\n\n# Don't re-confirm existing accounts\nACCOUNT_EMAIL_VERIFICATION = 'none'\n\nFILE_SYNCER = 'privacy.backends.syncers.DoubleRemotePuller'\n\n# set GitHub scope\nSOCIALACCOUNT_PROVIDERS = {\n 'github': {'SCOPE': ['user:email', 'read:org', 'admin:repo_hook', 'repo:status']}\n}\n\nif not os.environ.get('DJANGO_SETTINGS_SKIP_LOCAL', False):\n try:\n from local_settings import * # noqa\n except ImportError:\n pass\n", "path": "readthedocs/settings/postgres.py"}]} | 1,294 | 125 |
gh_patches_debug_16013 | rasdani/github-patches | git_diff | pyro-ppl__pyro-3167 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
bug with OneCycleLR on Apple Silicone
### Guidelines
**NOTE:** Issues are for bugs and feature requests only. If you have a question about using Pyro or general modeling questions, please post it on the [forum](https://forum.pyro.ai/).
If you would like to address any minor bugs in the documentation or source, please feel free to contribute a Pull Request without creating an issue first.
Please tag the issue appropriately in the title e.g. [bug], [feature request], [discussion], etc.
Please provide the following details:
--------------------------------------------------------------------------------------------------
### Issue Description
Provide a brief description of the issue.
### Environment
For any bugs, please provide the following:
- OS and python version.
- PyTorch version, or if relevant, output of `pip freeze`.
- Pyro version: output of `python -c 'import pyro; print pyro.__version__'`
### Code Snippet
Provide any relevant code snippets and commands run to replicate the issue.
</issue>
<code>
[start of pyro/optim/pytorch_optimizers.py]
1 # Copyright (c) 2017-2019 Uber Technologies, Inc.
2 # SPDX-License-Identifier: Apache-2.0
3
4 import torch
5
6 from pyro.optim import PyroOptim
7 from pyro.optim.lr_scheduler import PyroLRScheduler
8
9 __all__ = []
10 # Programmatically load all optimizers from PyTorch.
11 for _name, _Optim in torch.optim.__dict__.items():
12 if not isinstance(_Optim, type):
13 continue
14 if not issubclass(_Optim, torch.optim.Optimizer):
15 continue
16 if _Optim is torch.optim.Optimizer:
17 continue
18 if _Optim is torch.optim.LBFGS:
19 # XXX LBFGS is not supported for SVI yet
20 continue
21
22 _PyroOptim = (
23 lambda _Optim: lambda optim_args, clip_args=None: PyroOptim(
24 _Optim, optim_args, clip_args
25 )
26 )(_Optim)
27 _PyroOptim.__name__ = _name
28 _PyroOptim.__doc__ = "Wraps :class:`torch.optim.{}` with :class:`~pyro.optim.optim.PyroOptim`.".format(
29 _name
30 )
31
32 locals()[_name] = _PyroOptim
33 __all__.append(_name)
34 del _PyroOptim
35
36 # Load all schedulers from PyTorch
37 for _name, _Optim in torch.optim.lr_scheduler.__dict__.items():
38 if not isinstance(_Optim, type):
39 continue
40 if (
41 not issubclass(_Optim, torch.optim.lr_scheduler._LRScheduler)
42 and _name != "ReduceLROnPlateau"
43 ):
44 continue
45 if _Optim is torch.optim.Optimizer:
46 continue
47
48 _PyroOptim = (
49 lambda _Optim: lambda optim_args, clip_args=None: PyroLRScheduler(
50 _Optim, optim_args, clip_args
51 )
52 )(_Optim)
53 _PyroOptim.__name__ = _name
54 _PyroOptim.__doc__ = (
55 "Wraps :class:`torch.optim.{}` with ".format(_name)
56 + ":class:`~pyro.optim.lr_scheduler.PyroLRScheduler`."
57 )
58
59 locals()[_name] = _PyroOptim
60 __all__.append(_name)
61 del _PyroOptim
62
[end of pyro/optim/pytorch_optimizers.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pyro/optim/pytorch_optimizers.py b/pyro/optim/pytorch_optimizers.py
--- a/pyro/optim/pytorch_optimizers.py
+++ b/pyro/optim/pytorch_optimizers.py
@@ -34,13 +34,16 @@
del _PyroOptim
# Load all schedulers from PyTorch
+# breaking change in torch >= 1.14: LRScheduler is new base class
+if hasattr(torch.optim.lr_scheduler, "LRScheduler"):
+ _torch_scheduler_base = torch.optim.lr_scheduler.LRScheduler
+else: # for torch < 1.13, _LRScheduler is base class
+ _torch_scheduler_base = torch.optim.lr_scheduler._LRScheduler
+
for _name, _Optim in torch.optim.lr_scheduler.__dict__.items():
if not isinstance(_Optim, type):
continue
- if (
- not issubclass(_Optim, torch.optim.lr_scheduler._LRScheduler)
- and _name != "ReduceLROnPlateau"
- ):
+ if not issubclass(_Optim, _torch_scheduler_base) and _name != "ReduceLROnPlateau":
continue
if _Optim is torch.optim.Optimizer:
continue
| {"golden_diff": "diff --git a/pyro/optim/pytorch_optimizers.py b/pyro/optim/pytorch_optimizers.py\n--- a/pyro/optim/pytorch_optimizers.py\n+++ b/pyro/optim/pytorch_optimizers.py\n@@ -34,13 +34,16 @@\n del _PyroOptim\n \n # Load all schedulers from PyTorch\n+# breaking change in torch >= 1.14: LRScheduler is new base class\n+if hasattr(torch.optim.lr_scheduler, \"LRScheduler\"):\n+ _torch_scheduler_base = torch.optim.lr_scheduler.LRScheduler\n+else: # for torch < 1.13, _LRScheduler is base class\n+ _torch_scheduler_base = torch.optim.lr_scheduler._LRScheduler\n+\n for _name, _Optim in torch.optim.lr_scheduler.__dict__.items():\n if not isinstance(_Optim, type):\n continue\n- if (\n- not issubclass(_Optim, torch.optim.lr_scheduler._LRScheduler)\n- and _name != \"ReduceLROnPlateau\"\n- ):\n+ if not issubclass(_Optim, _torch_scheduler_base) and _name != \"ReduceLROnPlateau\":\n continue\n if _Optim is torch.optim.Optimizer:\n continue\n", "issue": "bug with OneCycleLR on Apple Silicone \n### Guidelines\r\n\r\n**NOTE:** Issues are for bugs and feature requests only. If you have a question about using Pyro or general modeling questions, please post it on the [forum](https://forum.pyro.ai/).\r\n\r\nIf you would like to address any minor bugs in the documentation or source, please feel free to contribute a Pull Request without creating an issue first. \r\n\r\nPlease tag the issue appropriately in the title e.g. [bug], [feature request], [discussion], etc.\r\n\r\nPlease provide the following details:\r\n--------------------------------------------------------------------------------------------------\r\n### Issue Description\r\nProvide a brief description of the issue.\r\n\r\n### Environment\r\nFor any bugs, please provide the following:\r\n - OS and python version.\r\n - PyTorch version, or if relevant, output of `pip freeze`.\r\n - Pyro version: output of `python -c 'import pyro; print pyro.__version__'`\r\n\r\n### Code Snippet\r\nProvide any relevant code snippets and commands run to replicate the issue.\r\n\n", "before_files": [{"content": "# Copyright (c) 2017-2019 Uber Technologies, Inc.\n# SPDX-License-Identifier: Apache-2.0\n\nimport torch\n\nfrom pyro.optim import PyroOptim\nfrom pyro.optim.lr_scheduler import PyroLRScheduler\n\n__all__ = []\n# Programmatically load all optimizers from PyTorch.\nfor _name, _Optim in torch.optim.__dict__.items():\n if not isinstance(_Optim, type):\n continue\n if not issubclass(_Optim, torch.optim.Optimizer):\n continue\n if _Optim is torch.optim.Optimizer:\n continue\n if _Optim is torch.optim.LBFGS:\n # XXX LBFGS is not supported for SVI yet\n continue\n\n _PyroOptim = (\n lambda _Optim: lambda optim_args, clip_args=None: PyroOptim(\n _Optim, optim_args, clip_args\n )\n )(_Optim)\n _PyroOptim.__name__ = _name\n _PyroOptim.__doc__ = \"Wraps :class:`torch.optim.{}` with :class:`~pyro.optim.optim.PyroOptim`.\".format(\n _name\n )\n\n locals()[_name] = _PyroOptim\n __all__.append(_name)\n del _PyroOptim\n\n# Load all schedulers from PyTorch\nfor _name, _Optim in torch.optim.lr_scheduler.__dict__.items():\n if not isinstance(_Optim, type):\n continue\n if (\n not issubclass(_Optim, torch.optim.lr_scheduler._LRScheduler)\n and _name != \"ReduceLROnPlateau\"\n ):\n continue\n if _Optim is torch.optim.Optimizer:\n continue\n\n _PyroOptim = (\n lambda _Optim: lambda optim_args, clip_args=None: PyroLRScheduler(\n _Optim, optim_args, clip_args\n )\n )(_Optim)\n _PyroOptim.__name__ = _name\n _PyroOptim.__doc__ = (\n \"Wraps :class:`torch.optim.{}` with \".format(_name)\n + \":class:`~pyro.optim.lr_scheduler.PyroLRScheduler`.\"\n )\n\n locals()[_name] = _PyroOptim\n __all__.append(_name)\n del _PyroOptim\n", "path": "pyro/optim/pytorch_optimizers.py"}]} | 1,400 | 285 |
gh_patches_debug_33404 | rasdani/github-patches | git_diff | aws__aws-sam-cli-1348 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
sam 0.19.0 crashes on trying to create .aws-sam directory when not permitted
### Description
While running `sam validate` from a freshly built docker image, it crashes while trying to create the `.aws-sam` config directory.
### Steps to reproduce
In our CI pipeline, we run `sam` from within a docker container built according to this `Dockerfile`:
```
FROM alpine:3.9
RUN apk add --no-cache \
build-base \
python \
python-dev \
py-pip \
groff \
docker && \
pip install awscli aws-sam-cli
```
AWS credentials are passed in as environment variables, volumes mounted from the Jenkins workspace into the docker image, and then we simply run:
`sam validate -t ./.../template.yaml`
### Observed result
Logs from the Jenkins build:
```
$ docker run -t -d -u 3000:100 --network=host -w /data/hudson/workspace/loper-portal_feature_jenkinstest -v /data/hudson/workspace/loper-portal_feature_jenkinstest:/data/hudson/workspace/loper-portal_feature_jenkinstest:rw,z -v /data/hudson/workspace/loper-portal_feature_jenkinstest@tmp:/data/hudson/workspace/loper-portal_feature_jenkinstest@tmp:rw,z -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** xxxx-aws-sam-cli cat
$ docker top 04fa30b17ceb0ae6d45b66190f32f4bad8dedd57386352a31e61f0da6ac18aa2 -eo pid,comm
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withCredentials
Masking supported pattern matches of $AWS_ACCESS_KEY_ID or $AWS_SECRET_ACCESS_KEY
[Pipeline] {
[Pipeline] stage
[Pipeline] { (Validate CloudFormation template)
[Pipeline] sh
+ sam validate --debug -t cloudformation/template.yaml
Traceback (most recent call last):
File "/usr/bin/sam", line 11, in <module>
sys.exit(cli())
File "/usr/lib/python2.7/site-packages/click/core.py", line 722, in __call__
return self.main(*args, **kwargs)
File "/usr/lib/python2.7/site-packages/click/core.py", line 697, in main
rv = self.invoke(ctx)
File "/usr/lib/python2.7/site-packages/click/core.py", line 1063, in invoke
Command.invoke(self, ctx)
File "/usr/lib/python2.7/site-packages/click/core.py", line 895, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/lib/python2.7/site-packages/click/core.py", line 535, in invoke
return callback(*args, **kwargs)
File "/usr/lib/python2.7/site-packages/click/decorators.py", line 64, in new_func
return ctx.invoke(f, obj, *args[1:], **kwargs)
File "/usr/lib/python2.7/site-packages/click/core.py", line 535, in invoke
return callback(*args, **kwargs)
File "/usr/lib/python2.7/site-packages/samcli/cli/main.py", line 83, in cli
if global_cfg.telemetry_enabled is None:
File "/usr/lib/python2.7/site-packages/samcli/cli/global_config.py", line 113, in telemetry_enabled
self._telemetry_enabled = self._get_value(TELEMETRY_ENABLED_KEY)
File "/usr/lib/python2.7/site-packages/samcli/cli/global_config.py", line 145, in _get_value
cfg_path = self._get_config_file_path(CONFIG_FILENAME)
File "/usr/lib/python2.7/site-packages/samcli/cli/global_config.py", line 170, in _get_config_file_path
self._create_dir()
File "/usr/lib/python2.7/site-packages/samcli/cli/global_config.py", line 167, in _create_dir
self.config_dir.mkdir(mode=0o700, parents=True, exist_ok=True)
File "/usr/lib/python2.7/site-packages/pathlib2/__init__.py", line 1540, in mkdir
_try_except_filenotfounderror(_try_func, _exc_func)
File "/usr/lib/python2.7/site-packages/pathlib2/__init__.py", line 113, in _try_except_filenotfounderror
try_func()
File "/usr/lib/python2.7/site-packages/pathlib2/__init__.py", line 1531, in _try_func
self._accessor.mkdir(self, mode)
File "/usr/lib/python2.7/site-packages/pathlib2/__init__.py", line 585, in wrapped
return strfunc(str(pathobj), *args)
OSError: [Errno 13] Permission denied: '/.aws-sam'
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
```
### Expected result
A non-crashing sam :)
### Additional environment details (Ex: Windows, Mac, Amazon Linux etc)
1. OS: alpine linux, docker container
2. `sam --version`: 0.19.0
### Others
This was added in the 0.19.0 Telemetry release, where in commit dbd534a `samcli/cli/global_config.py` was added, that tries to write the config dir if it doesn't exist.
Actually we should've pinned the versions used in our `Dockerfile`. Not pinning it had us miss the addition of the Telemetry feature in 0.19.0. Although I'd love to contribute, enabling Telemetry would undoubtedly go against some company policy. There _is_ the message in the first run of the new version (if it was able to write the config dir), but in our setup that's quite easy to miss. Basically I'm advocating an opt-in instead
</issue>
<code>
[start of samcli/cli/global_config.py]
1 """
2 Provides global configuration helpers.
3 """
4
5 import json
6 import logging
7 import uuid
8 import os
9
10 import click
11
12 try:
13 from pathlib import Path
14 except ImportError: # pragma: no cover
15 from pathlib2 import Path # pragma: no cover
16
17 LOG = logging.getLogger(__name__)
18
19 CONFIG_FILENAME = "metadata.json"
20 INSTALLATION_ID_KEY = "installationId"
21 TELEMETRY_ENABLED_KEY = "telemetryEnabled"
22
23
24 class GlobalConfig(object):
25 """
26 Contains helper methods for global configuration files and values. Handles
27 configuration file creation, updates, and fetching in a platform-neutral way.
28
29 Generally uses '~/.aws-sam/' or 'C:\\Users\\<user>\\AppData\\Roaming\\AWS SAM' as
30 the base directory, depending on platform.
31 """
32
33 def __init__(self, config_dir=None, installation_id=None, telemetry_enabled=None):
34 """
35 Initializes the class, with options provided to assist with testing.
36
37 :param config_dir: Optional, overrides the default config directory path.
38 :param installation_id: Optional, will use this installation id rather than checking config values.
39 """
40 self._config_dir = config_dir
41 self._installation_id = installation_id
42 self._telemetry_enabled = telemetry_enabled
43
44 @property
45 def config_dir(self):
46 if not self._config_dir:
47 # Internal Environment variable to customize SAM CLI App Dir. Currently used only by integ tests.
48 app_dir = os.getenv("__SAM_CLI_APP_DIR")
49 self._config_dir = Path(app_dir) if app_dir else Path(click.get_app_dir('AWS SAM', force_posix=True))
50
51 return Path(self._config_dir)
52
53 @property
54 def installation_id(self):
55 """
56 Returns the installation UUID for this AWS SAM CLI installation. If the
57 installation id has not yet been set, it will be set before returning.
58
59 Examples
60 --------
61
62 >>> gc = GlobalConfig()
63 >>> gc.installation_id
64 "7b7d4db7-2f54-45ba-bf2f-a2cbc9e74a34"
65
66 >>> gc = GlobalConfig()
67 >>> gc.installation_id
68 None
69
70 Returns
71 -------
72 A string containing the installation UUID, or None in case of an error.
73 """
74 if self._installation_id:
75 return self._installation_id
76 try:
77 self._installation_id = self._get_or_set_uuid(INSTALLATION_ID_KEY)
78 return self._installation_id
79 except (ValueError, IOError):
80 return None
81
82 @property
83 def telemetry_enabled(self):
84 """
85 Check if telemetry is enabled for this installation. Default value of
86 False. It first tries to get value from SAM_CLI_TELEMETRY environment variable. If its not set,
87 then it fetches the value from config file.
88
89 To enable telemetry, set SAM_CLI_TELEMETRY environment variable equal to integer 1 or string '1'.
90 All other values including words like 'True', 'true', 'false', 'False', 'abcd' etc will disable Telemetry
91
92 Examples
93 --------
94
95 >>> gc = GlobalConfig()
96 >>> gc.telemetry_enabled
97 True
98
99 Returns
100 -------
101 Boolean flag value. True if telemetry is enabled for this installation,
102 False otherwise.
103 """
104 if self._telemetry_enabled is not None:
105 return self._telemetry_enabled
106
107 # If environment variable is set, its value takes precedence over the value from config file.
108 env_name = "SAM_CLI_TELEMETRY"
109 if env_name in os.environ:
110 return os.getenv(env_name) in ('1', 1)
111
112 try:
113 self._telemetry_enabled = self._get_value(TELEMETRY_ENABLED_KEY)
114 return self._telemetry_enabled
115 except (ValueError, IOError) as ex:
116 LOG.debug("Error when retrieving telemetry_enabled flag", exc_info=ex)
117 return False
118
119 @telemetry_enabled.setter
120 def telemetry_enabled(self, value):
121 """
122 Sets the telemetry_enabled flag to the provided boolean value.
123
124 Examples
125 --------
126 >>> gc = GlobalConfig()
127 >>> gc.telemetry_enabled
128 False
129 >>> gc.telemetry_enabled = True
130 >>> gc.telemetry_enabled
131 True
132
133 Raises
134 ------
135 IOError
136 If there are errors opening or writing to the global config file.
137
138 JSONDecodeError
139 If the config file exists, and is not valid JSON.
140 """
141 self._set_value("telemetryEnabled", value)
142 self._telemetry_enabled = value
143
144 def _get_value(self, key):
145 cfg_path = self._get_config_file_path(CONFIG_FILENAME)
146 if not cfg_path.exists():
147 return None
148 with open(str(cfg_path)) as fp:
149 body = fp.read()
150 json_body = json.loads(body)
151 return json_body.get(key)
152
153 def _set_value(self, key, value):
154 cfg_path = self._get_config_file_path(CONFIG_FILENAME)
155 if not cfg_path.exists():
156 return self._set_json_cfg(cfg_path, key, value)
157 with open(str(cfg_path)) as fp:
158 body = fp.read()
159 try:
160 json_body = json.loads(body)
161 except ValueError as ex:
162 LOG.debug("Failed to decode JSON in {cfg_path}", exc_info=ex)
163 raise ex
164 return self._set_json_cfg(cfg_path, key, value, json_body)
165
166 def _create_dir(self):
167 self.config_dir.mkdir(mode=0o700, parents=True, exist_ok=True)
168
169 def _get_config_file_path(self, filename):
170 self._create_dir()
171 filepath = self.config_dir.joinpath(filename)
172 return filepath
173
174 def _get_or_set_uuid(self, key):
175 """
176 Special logic method for when we want a UUID to always be present, this
177 method behaves as a getter with side effects. Essentially, if the value
178 is not present, we will set it with a generated UUID.
179
180 If we have multiple such values in the future, a possible refactor is
181 to just be _get_or_set_value, where we also take a default value as a
182 parameter.
183 """
184 cfg_value = self._get_value(key)
185 if cfg_value is not None:
186 return cfg_value
187 return self._set_value(key, str(uuid.uuid4()))
188
189 def _set_json_cfg(self, filepath, key, value, json_body=None):
190 """
191 Special logic method to add a value to a JSON configuration file. This
192 method will write a new version of the file in question, so it will
193 either write a new file with only the first config value, or if a JSON
194 body is provided, it will upsert starting from that JSON body.
195 """
196 json_body = json_body or {}
197 json_body[key] = value
198 file_body = json.dumps(json_body, indent=4) + "\n"
199 try:
200 with open(str(filepath), 'w') as f:
201 f.write(file_body)
202 except IOError as ex:
203 LOG.debug("Error writing to {filepath}", exc_info=ex)
204 raise ex
205 return value
206
[end of samcli/cli/global_config.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/samcli/cli/global_config.py b/samcli/cli/global_config.py
--- a/samcli/cli/global_config.py
+++ b/samcli/cli/global_config.py
@@ -47,7 +47,6 @@
# Internal Environment variable to customize SAM CLI App Dir. Currently used only by integ tests.
app_dir = os.getenv("__SAM_CLI_APP_DIR")
self._config_dir = Path(app_dir) if app_dir else Path(click.get_app_dir('AWS SAM', force_posix=True))
-
return Path(self._config_dir)
@property
@@ -76,7 +75,7 @@
try:
self._installation_id = self._get_or_set_uuid(INSTALLATION_ID_KEY)
return self._installation_id
- except (ValueError, IOError):
+ except (ValueError, IOError, OSError):
return None
@property
@@ -112,7 +111,7 @@
try:
self._telemetry_enabled = self._get_value(TELEMETRY_ENABLED_KEY)
return self._telemetry_enabled
- except (ValueError, IOError) as ex:
+ except (ValueError, IOError, OSError) as ex:
LOG.debug("Error when retrieving telemetry_enabled flag", exc_info=ex)
return False
@@ -164,6 +163,10 @@
return self._set_json_cfg(cfg_path, key, value, json_body)
def _create_dir(self):
+ """
+ Creates configuration directory if it does not already exist, otherwise does nothing.
+ May raise an OSError if we do not have permissions to create the directory.
+ """
self.config_dir.mkdir(mode=0o700, parents=True, exist_ok=True)
def _get_config_file_path(self, filename):
| {"golden_diff": "diff --git a/samcli/cli/global_config.py b/samcli/cli/global_config.py\n--- a/samcli/cli/global_config.py\n+++ b/samcli/cli/global_config.py\n@@ -47,7 +47,6 @@\n # Internal Environment variable to customize SAM CLI App Dir. Currently used only by integ tests.\n app_dir = os.getenv(\"__SAM_CLI_APP_DIR\")\n self._config_dir = Path(app_dir) if app_dir else Path(click.get_app_dir('AWS SAM', force_posix=True))\n-\n return Path(self._config_dir)\n \n @property\n@@ -76,7 +75,7 @@\n try:\n self._installation_id = self._get_or_set_uuid(INSTALLATION_ID_KEY)\n return self._installation_id\n- except (ValueError, IOError):\n+ except (ValueError, IOError, OSError):\n return None\n \n @property\n@@ -112,7 +111,7 @@\n try:\n self._telemetry_enabled = self._get_value(TELEMETRY_ENABLED_KEY)\n return self._telemetry_enabled\n- except (ValueError, IOError) as ex:\n+ except (ValueError, IOError, OSError) as ex:\n LOG.debug(\"Error when retrieving telemetry_enabled flag\", exc_info=ex)\n return False\n \n@@ -164,6 +163,10 @@\n return self._set_json_cfg(cfg_path, key, value, json_body)\n \n def _create_dir(self):\n+ \"\"\"\n+ Creates configuration directory if it does not already exist, otherwise does nothing.\n+ May raise an OSError if we do not have permissions to create the directory.\n+ \"\"\"\n self.config_dir.mkdir(mode=0o700, parents=True, exist_ok=True)\n \n def _get_config_file_path(self, filename):\n", "issue": "sam 0.19.0 crashes on trying to create .aws-sam directory when not permitted\n### Description\r\n\r\nWhile running `sam validate` from a freshly built docker image, it crashes while trying to create the `.aws-sam` config directory.\r\n\r\n### Steps to reproduce\r\n\r\nIn our CI pipeline, we run `sam` from within a docker container built according to this `Dockerfile`:\r\n\r\n```\r\nFROM alpine:3.9\r\n\r\nRUN apk add --no-cache \\\r\n build-base \\\r\n python \\\r\n python-dev \\\r\n py-pip \\\r\n groff \\\r\n docker && \\\r\n pip install awscli aws-sam-cli\r\n```\r\n\r\nAWS credentials are passed in as environment variables, volumes mounted from the Jenkins workspace into the docker image, and then we simply run:\r\n\r\n`sam validate -t ./.../template.yaml`\r\n\r\n### Observed result\r\n\r\nLogs from the Jenkins build:\r\n\r\n```\r\n$ docker run -t -d -u 3000:100 --network=host -w /data/hudson/workspace/loper-portal_feature_jenkinstest -v /data/hudson/workspace/loper-portal_feature_jenkinstest:/data/hudson/workspace/loper-portal_feature_jenkinstest:rw,z -v /data/hudson/workspace/loper-portal_feature_jenkinstest@tmp:/data/hudson/workspace/loper-portal_feature_jenkinstest@tmp:rw,z -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** -e ******** xxxx-aws-sam-cli cat\r\n$ docker top 04fa30b17ceb0ae6d45b66190f32f4bad8dedd57386352a31e61f0da6ac18aa2 -eo pid,comm\r\n[Pipeline] {\r\n[Pipeline] withEnv\r\n[Pipeline] {\r\n[Pipeline] withCredentials\r\nMasking supported pattern matches of $AWS_ACCESS_KEY_ID or $AWS_SECRET_ACCESS_KEY\r\n[Pipeline] {\r\n[Pipeline] stage\r\n[Pipeline] { (Validate CloudFormation template)\r\n[Pipeline] sh\r\n+ sam validate --debug -t cloudformation/template.yaml\r\nTraceback (most recent call last):\r\n File \"/usr/bin/sam\", line 11, in <module>\r\n sys.exit(cli())\r\n File \"/usr/lib/python2.7/site-packages/click/core.py\", line 722, in __call__\r\n return self.main(*args, **kwargs)\r\n File \"/usr/lib/python2.7/site-packages/click/core.py\", line 697, in main\r\n rv = self.invoke(ctx)\r\n File \"/usr/lib/python2.7/site-packages/click/core.py\", line 1063, in invoke\r\n Command.invoke(self, ctx)\r\n File \"/usr/lib/python2.7/site-packages/click/core.py\", line 895, in invoke\r\n return ctx.invoke(self.callback, **ctx.params)\r\n File \"/usr/lib/python2.7/site-packages/click/core.py\", line 535, in invoke\r\n return callback(*args, **kwargs)\r\n File \"/usr/lib/python2.7/site-packages/click/decorators.py\", line 64, in new_func\r\n return ctx.invoke(f, obj, *args[1:], **kwargs)\r\n File \"/usr/lib/python2.7/site-packages/click/core.py\", line 535, in invoke\r\n return callback(*args, **kwargs)\r\n File \"/usr/lib/python2.7/site-packages/samcli/cli/main.py\", line 83, in cli\r\n if global_cfg.telemetry_enabled is None:\r\n File \"/usr/lib/python2.7/site-packages/samcli/cli/global_config.py\", line 113, in telemetry_enabled\r\n self._telemetry_enabled = self._get_value(TELEMETRY_ENABLED_KEY)\r\n File \"/usr/lib/python2.7/site-packages/samcli/cli/global_config.py\", line 145, in _get_value\r\n cfg_path = self._get_config_file_path(CONFIG_FILENAME)\r\n File \"/usr/lib/python2.7/site-packages/samcli/cli/global_config.py\", line 170, in _get_config_file_path\r\n self._create_dir()\r\n File \"/usr/lib/python2.7/site-packages/samcli/cli/global_config.py\", line 167, in _create_dir\r\n self.config_dir.mkdir(mode=0o700, parents=True, exist_ok=True)\r\n File \"/usr/lib/python2.7/site-packages/pathlib2/__init__.py\", line 1540, in mkdir\r\n _try_except_filenotfounderror(_try_func, _exc_func)\r\n File \"/usr/lib/python2.7/site-packages/pathlib2/__init__.py\", line 113, in _try_except_filenotfounderror\r\n try_func()\r\n File \"/usr/lib/python2.7/site-packages/pathlib2/__init__.py\", line 1531, in _try_func\r\n self._accessor.mkdir(self, mode)\r\n File \"/usr/lib/python2.7/site-packages/pathlib2/__init__.py\", line 585, in wrapped\r\n return strfunc(str(pathobj), *args)\r\nOSError: [Errno 13] Permission denied: '/.aws-sam'\r\n[Pipeline] }\r\n[Pipeline] // stage\r\n[Pipeline] }\r\n[Pipeline] // withCredentials\r\n[Pipeline] }\r\n[Pipeline] // withEnv\r\n[Pipeline] }\r\n```\r\n### Expected result\r\n\r\nA non-crashing sam :)\r\n\r\n### Additional environment details (Ex: Windows, Mac, Amazon Linux etc)\r\n\r\n1. OS: alpine linux, docker container\r\n2. `sam --version`: 0.19.0\r\n\r\n### Others\r\n\r\nThis was added in the 0.19.0 Telemetry release, where in commit dbd534a `samcli/cli/global_config.py` was added, that tries to write the config dir if it doesn't exist. \r\n\r\nActually we should've pinned the versions used in our `Dockerfile`. Not pinning it had us miss the addition of the Telemetry feature in 0.19.0. Although I'd love to contribute, enabling Telemetry would undoubtedly go against some company policy. There _is_ the message in the first run of the new version (if it was able to write the config dir), but in our setup that's quite easy to miss. Basically I'm advocating an opt-in instead\r\n\n", "before_files": [{"content": "\"\"\"\nProvides global configuration helpers.\n\"\"\"\n\nimport json\nimport logging\nimport uuid\nimport os\n\nimport click\n\ntry:\n from pathlib import Path\nexcept ImportError: # pragma: no cover\n from pathlib2 import Path # pragma: no cover\n\nLOG = logging.getLogger(__name__)\n\nCONFIG_FILENAME = \"metadata.json\"\nINSTALLATION_ID_KEY = \"installationId\"\nTELEMETRY_ENABLED_KEY = \"telemetryEnabled\"\n\n\nclass GlobalConfig(object):\n \"\"\"\n Contains helper methods for global configuration files and values. Handles\n configuration file creation, updates, and fetching in a platform-neutral way.\n\n Generally uses '~/.aws-sam/' or 'C:\\\\Users\\\\<user>\\\\AppData\\\\Roaming\\\\AWS SAM' as\n the base directory, depending on platform.\n \"\"\"\n\n def __init__(self, config_dir=None, installation_id=None, telemetry_enabled=None):\n \"\"\"\n Initializes the class, with options provided to assist with testing.\n\n :param config_dir: Optional, overrides the default config directory path.\n :param installation_id: Optional, will use this installation id rather than checking config values.\n \"\"\"\n self._config_dir = config_dir\n self._installation_id = installation_id\n self._telemetry_enabled = telemetry_enabled\n\n @property\n def config_dir(self):\n if not self._config_dir:\n # Internal Environment variable to customize SAM CLI App Dir. Currently used only by integ tests.\n app_dir = os.getenv(\"__SAM_CLI_APP_DIR\")\n self._config_dir = Path(app_dir) if app_dir else Path(click.get_app_dir('AWS SAM', force_posix=True))\n\n return Path(self._config_dir)\n\n @property\n def installation_id(self):\n \"\"\"\n Returns the installation UUID for this AWS SAM CLI installation. If the\n installation id has not yet been set, it will be set before returning.\n\n Examples\n --------\n\n >>> gc = GlobalConfig()\n >>> gc.installation_id\n \"7b7d4db7-2f54-45ba-bf2f-a2cbc9e74a34\"\n\n >>> gc = GlobalConfig()\n >>> gc.installation_id\n None\n\n Returns\n -------\n A string containing the installation UUID, or None in case of an error.\n \"\"\"\n if self._installation_id:\n return self._installation_id\n try:\n self._installation_id = self._get_or_set_uuid(INSTALLATION_ID_KEY)\n return self._installation_id\n except (ValueError, IOError):\n return None\n\n @property\n def telemetry_enabled(self):\n \"\"\"\n Check if telemetry is enabled for this installation. Default value of\n False. It first tries to get value from SAM_CLI_TELEMETRY environment variable. If its not set,\n then it fetches the value from config file.\n\n To enable telemetry, set SAM_CLI_TELEMETRY environment variable equal to integer 1 or string '1'.\n All other values including words like 'True', 'true', 'false', 'False', 'abcd' etc will disable Telemetry\n\n Examples\n --------\n\n >>> gc = GlobalConfig()\n >>> gc.telemetry_enabled\n True\n\n Returns\n -------\n Boolean flag value. True if telemetry is enabled for this installation,\n False otherwise.\n \"\"\"\n if self._telemetry_enabled is not None:\n return self._telemetry_enabled\n\n # If environment variable is set, its value takes precedence over the value from config file.\n env_name = \"SAM_CLI_TELEMETRY\"\n if env_name in os.environ:\n return os.getenv(env_name) in ('1', 1)\n\n try:\n self._telemetry_enabled = self._get_value(TELEMETRY_ENABLED_KEY)\n return self._telemetry_enabled\n except (ValueError, IOError) as ex:\n LOG.debug(\"Error when retrieving telemetry_enabled flag\", exc_info=ex)\n return False\n\n @telemetry_enabled.setter\n def telemetry_enabled(self, value):\n \"\"\"\n Sets the telemetry_enabled flag to the provided boolean value.\n\n Examples\n --------\n >>> gc = GlobalConfig()\n >>> gc.telemetry_enabled\n False\n >>> gc.telemetry_enabled = True\n >>> gc.telemetry_enabled\n True\n\n Raises\n ------\n IOError\n If there are errors opening or writing to the global config file.\n\n JSONDecodeError\n If the config file exists, and is not valid JSON.\n \"\"\"\n self._set_value(\"telemetryEnabled\", value)\n self._telemetry_enabled = value\n\n def _get_value(self, key):\n cfg_path = self._get_config_file_path(CONFIG_FILENAME)\n if not cfg_path.exists():\n return None\n with open(str(cfg_path)) as fp:\n body = fp.read()\n json_body = json.loads(body)\n return json_body.get(key)\n\n def _set_value(self, key, value):\n cfg_path = self._get_config_file_path(CONFIG_FILENAME)\n if not cfg_path.exists():\n return self._set_json_cfg(cfg_path, key, value)\n with open(str(cfg_path)) as fp:\n body = fp.read()\n try:\n json_body = json.loads(body)\n except ValueError as ex:\n LOG.debug(\"Failed to decode JSON in {cfg_path}\", exc_info=ex)\n raise ex\n return self._set_json_cfg(cfg_path, key, value, json_body)\n\n def _create_dir(self):\n self.config_dir.mkdir(mode=0o700, parents=True, exist_ok=True)\n\n def _get_config_file_path(self, filename):\n self._create_dir()\n filepath = self.config_dir.joinpath(filename)\n return filepath\n\n def _get_or_set_uuid(self, key):\n \"\"\"\n Special logic method for when we want a UUID to always be present, this\n method behaves as a getter with side effects. Essentially, if the value\n is not present, we will set it with a generated UUID.\n\n If we have multiple such values in the future, a possible refactor is\n to just be _get_or_set_value, where we also take a default value as a\n parameter.\n \"\"\"\n cfg_value = self._get_value(key)\n if cfg_value is not None:\n return cfg_value\n return self._set_value(key, str(uuid.uuid4()))\n\n def _set_json_cfg(self, filepath, key, value, json_body=None):\n \"\"\"\n Special logic method to add a value to a JSON configuration file. This\n method will write a new version of the file in question, so it will\n either write a new file with only the first config value, or if a JSON\n body is provided, it will upsert starting from that JSON body.\n \"\"\"\n json_body = json_body or {}\n json_body[key] = value\n file_body = json.dumps(json_body, indent=4) + \"\\n\"\n try:\n with open(str(filepath), 'w') as f:\n f.write(file_body)\n except IOError as ex:\n LOG.debug(\"Error writing to {filepath}\", exc_info=ex)\n raise ex\n return value\n", "path": "samcli/cli/global_config.py"}]} | 4,042 | 394 |
gh_patches_debug_22001 | rasdani/github-patches | git_diff | nautobot__nautobot-5264 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Job Buttons do not commit changes in their job.
<!--
NOTE: IF YOUR ISSUE DOES NOT FOLLOW THIS TEMPLATE, IT WILL BE CLOSED.
This form is only for reporting reproducible bugs. If you need assistance
with Nautobot installation, or if you have a general question, please start a
discussion instead: https://github.com/nautobot/nautobot/discussions
Please describe the environment in which you are running Nautobot. Be sure
that you are running an unmodified instance of the latest stable release
before submitting a bug report, and that any plugins have been disabled.
-->
### Environment
* Nautobot version (Docker tag too if applicable): 1.6.11
* Python version: 3.11
* Database platform, version: MySQL
* Middleware(s): NA
<!--
Describe in detail the exact steps that someone else can take to reproduce
this bug using the current stable release of Nautobot. Begin with the
creation of any necessary database objects and call out every operation
being performed explicitly. If reporting a bug in the REST API, be sure to
reconstruct the raw HTTP request(s) being made: Don't rely on a client
library such as pynautobot.
-->
### Steps to Reproduce
1. Create JobButtonReciever, set it to commit by default.
2. Create a Job Button and assign to a ContentType.
3. Click the Job Button on the assigned Content Type.
<!-- What did you expect to happen? -->
### Expected Behavior
The job runs and commits the changes.
<!-- What happened instead? -->
### Observed Behavior
The Job runs and the database changes are reverted.

</issue>
<code>
[start of nautobot/extras/templatetags/job_buttons.py]
1 from collections import OrderedDict
2
3 from django import template
4 from django.contrib.contenttypes.models import ContentType
5 from django.urls import reverse
6 from django.utils.html import format_html
7 from django.utils.safestring import mark_safe
8
9 from nautobot.extras.models import Job, JobButton
10 from nautobot.utilities.utils import render_jinja2
11
12
13 register = template.Library()
14
15 GROUP_DROPDOWN = """
16 <div class="btn-group">
17 <button type="button" class="btn btn-sm btn-{group_button_class} dropdown-toggle" data-toggle="dropdown">
18 {group_name} <span class="caret"></span>
19 </button>
20 <ul class="dropdown-menu pull-right">
21 {grouped_buttons}
22 </ul>
23 </div>
24 """
25
26 HIDDEN_INPUTS = """
27 <input type="hidden" name="csrfmiddlewaretoken" value="{csrf_token}">
28 <input type="hidden" name="object_pk" value="{object_pk}">
29 <input type="hidden" name="object_model_name" value="{object_model_name}">
30 <input type="hidden" name="_schedule_type" value="immediately">
31 <input type="hidden" name="_return_url" value="{redirect_path}">
32 """
33
34 NO_CONFIRM_BUTTON = """
35 <button type="submit" form="form_id_{button_id}" class="btn btn-sm btn-{button_class}" {disabled}>{button_text}</button>
36 """
37
38 NO_CONFIRM_FORM = """
39 <form id="form_id_{button_id}" action="{button_url}" method="post" class="form">
40 {hidden_inputs}
41 </form>
42 """
43
44 CONFIRM_BUTTON = """
45 <button type="button" class="btn btn-sm btn-{button_class}" data-toggle="modal" data-target="#confirm_modal_id_{button_id}" {disabled}>
46 {button_text}
47 </button>
48 """
49
50 CONFIRM_MODAL = """
51 <div class="modal fade" id="confirm_modal_id_{button_id}" tabindex="-1" role="dialog" aria-labelledby="confirm_modal_label_{button_id}">
52 <div class="modal-dialog" role="document">
53 <div class="modal-content">
54 <div class="modal-header">
55 <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">×</span></button>
56 <h4 class="modal-title" id="confirm_modal_label_{button_id}">Confirmation</h4>
57 </div>
58 <form id="form_id_{button_id}" action="{button_url}" method="post" class="form">
59 <div class="modal-body">
60 {hidden_inputs}
61 Run Job <strong>'{job}'</strong> with object <strong>'{object}'</strong>?
62 </div>
63 <div class="modal-footer">
64 <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
65 <button type="submit" class="btn btn-primary">Confirm</button>
66 </div>
67 </form>
68 </div>
69 </div>
70 </div>
71 """
72
73 SAFE_EMPTY_STR = mark_safe("") # noqa: S308
74
75
76 def _render_job_button_for_obj(job_button, obj, context, content_type):
77 """
78 Helper method for job_buttons templatetag to reduce repetition of code.
79
80 Returns:
81 (str, str): (button_html, form_html)
82 """
83 # Pass select context data when rendering the JobButton text as Jinja2
84 button_context = {
85 "obj": obj,
86 "debug": context.get("debug", False), # django.template.context_processors.debug
87 "request": context["request"], # django.template.context_processors.request
88 "user": context["user"], # django.contrib.auth.context_processors.auth
89 "perms": context["perms"], # django.contrib.auth.context_processors.auth
90 }
91 try:
92 text_rendered = render_jinja2(job_button.text, button_context)
93 except Exception as exc:
94 return (
95 format_html(
96 '<a class="btn btn-sm btn-{}" disabled="disabled" title="{}"><i class="mdi mdi-alert"></i> {}</a>\n',
97 "default" if not job_button.group_name else "link",
98 exc,
99 job_button.name,
100 ),
101 SAFE_EMPTY_STR,
102 )
103
104 if not text_rendered:
105 return (SAFE_EMPTY_STR, SAFE_EMPTY_STR)
106
107 # Disable buttons if the user doesn't have permission to run the underlying Job.
108 has_run_perm = Job.objects.check_perms(context["user"], instance=job_button.job, action="run")
109 hidden_inputs = format_html(
110 HIDDEN_INPUTS,
111 csrf_token=context["csrf_token"],
112 object_pk=obj.pk,
113 object_model_name=f"{content_type.app_label}.{content_type.model}",
114 redirect_path=context["request"].path,
115 )
116 template_args = {
117 "button_id": job_button.pk,
118 "button_text": text_rendered,
119 "button_class": job_button.button_class if not job_button.group_name else "link",
120 "button_url": reverse("extras:job_run", kwargs={"slug": job_button.job.slug}),
121 "object": obj,
122 "job": job_button.job,
123 "hidden_inputs": hidden_inputs,
124 "disabled": "" if has_run_perm else "disabled",
125 }
126
127 if job_button.confirmation:
128 return (
129 format_html(CONFIRM_BUTTON, **template_args),
130 format_html(CONFIRM_MODAL, **template_args),
131 )
132 else:
133 return (
134 format_html(NO_CONFIRM_BUTTON, **template_args),
135 format_html(NO_CONFIRM_FORM, **template_args),
136 )
137
138
139 @register.simple_tag(takes_context=True)
140 def job_buttons(context, obj):
141 """
142 Render all applicable job buttons for the given object.
143 """
144 content_type = ContentType.objects.get_for_model(obj)
145 # We will enforce "run" permission later in deciding which buttons to show as disabled.
146 buttons = JobButton.objects.filter(content_types=content_type)
147 if not buttons:
148 return SAFE_EMPTY_STR
149
150 buttons_html = forms_html = SAFE_EMPTY_STR
151 group_names = OrderedDict()
152
153 for jb in buttons:
154 # Organize job buttons by group for later processing
155 if jb.group_name:
156 group_names.setdefault(jb.group_name, []).append(jb)
157
158 # Render and add non-grouped buttons
159 else:
160 button_html, form_html = _render_job_button_for_obj(jb, obj, context, content_type)
161 buttons_html += button_html
162 forms_html += form_html
163
164 # Add grouped buttons to template
165 for group_name, buttons in group_names.items():
166 group_button_class = buttons[0].button_class
167
168 buttons_rendered = SAFE_EMPTY_STR
169
170 for jb in buttons:
171 # Render grouped buttons as list items
172 button_html, form_html = _render_job_button_for_obj(jb, obj, context, content_type)
173 buttons_rendered += format_html("<li>{}</li>", button_html)
174 forms_html += form_html
175
176 if buttons_rendered:
177 buttons_html += format_html(
178 GROUP_DROPDOWN,
179 group_button_class=group_button_class,
180 group_name=group_name,
181 grouped_buttons=buttons_rendered,
182 )
183
184 # We want all of the buttons first and then any modals and forms so the buttons render properly
185 return buttons_html + forms_html
186
[end of nautobot/extras/templatetags/job_buttons.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/nautobot/extras/templatetags/job_buttons.py b/nautobot/extras/templatetags/job_buttons.py
--- a/nautobot/extras/templatetags/job_buttons.py
+++ b/nautobot/extras/templatetags/job_buttons.py
@@ -29,6 +29,7 @@
<input type="hidden" name="object_model_name" value="{object_model_name}">
<input type="hidden" name="_schedule_type" value="immediately">
<input type="hidden" name="_return_url" value="{redirect_path}">
+<input type="hidden" name="_commit" value="on">
"""
NO_CONFIRM_BUTTON = """
@@ -170,8 +171,10 @@
for jb in buttons:
# Render grouped buttons as list items
button_html, form_html = _render_job_button_for_obj(jb, obj, context, content_type)
- buttons_rendered += format_html("<li>{}</li>", button_html)
- forms_html += form_html
+ if button_html:
+ buttons_rendered += format_html("<li>{}</li>", button_html)
+ if form_html:
+ forms_html += form_html
if buttons_rendered:
buttons_html += format_html(
| {"golden_diff": "diff --git a/nautobot/extras/templatetags/job_buttons.py b/nautobot/extras/templatetags/job_buttons.py\n--- a/nautobot/extras/templatetags/job_buttons.py\n+++ b/nautobot/extras/templatetags/job_buttons.py\n@@ -29,6 +29,7 @@\n <input type=\"hidden\" name=\"object_model_name\" value=\"{object_model_name}\">\n <input type=\"hidden\" name=\"_schedule_type\" value=\"immediately\">\n <input type=\"hidden\" name=\"_return_url\" value=\"{redirect_path}\">\n+<input type=\"hidden\" name=\"_commit\" value=\"on\">\n \"\"\"\n \n NO_CONFIRM_BUTTON = \"\"\"\n@@ -170,8 +171,10 @@\n for jb in buttons:\n # Render grouped buttons as list items\n button_html, form_html = _render_job_button_for_obj(jb, obj, context, content_type)\n- buttons_rendered += format_html(\"<li>{}</li>\", button_html)\n- forms_html += form_html\n+ if button_html:\n+ buttons_rendered += format_html(\"<li>{}</li>\", button_html)\n+ if form_html:\n+ forms_html += form_html\n \n if buttons_rendered:\n buttons_html += format_html(\n", "issue": "Job Buttons do not commit changes in their job.\n<!--\r\n NOTE: IF YOUR ISSUE DOES NOT FOLLOW THIS TEMPLATE, IT WILL BE CLOSED.\r\n\r\n This form is only for reporting reproducible bugs. If you need assistance\r\n with Nautobot installation, or if you have a general question, please start a\r\n discussion instead: https://github.com/nautobot/nautobot/discussions\r\n\r\n Please describe the environment in which you are running Nautobot. Be sure\r\n that you are running an unmodified instance of the latest stable release\r\n before submitting a bug report, and that any plugins have been disabled.\r\n-->\r\n### Environment\r\n* Nautobot version (Docker tag too if applicable): 1.6.11\r\n* Python version: 3.11\r\n* Database platform, version: MySQL\r\n* Middleware(s): NA\r\n\r\n<!--\r\n Describe in detail the exact steps that someone else can take to reproduce\r\n this bug using the current stable release of Nautobot. Begin with the\r\n creation of any necessary database objects and call out every operation\r\n being performed explicitly. If reporting a bug in the REST API, be sure to\r\n reconstruct the raw HTTP request(s) being made: Don't rely on a client\r\n library such as pynautobot.\r\n-->\r\n### Steps to Reproduce\r\n1. Create JobButtonReciever, set it to commit by default.\r\n2. Create a Job Button and assign to a ContentType.\r\n3. Click the Job Button on the assigned Content Type.\r\n\r\n<!-- What did you expect to happen? -->\r\n### Expected Behavior\r\n\r\nThe job runs and commits the changes.\r\n\r\n<!-- What happened instead? -->\r\n### Observed Behavior\r\n\r\nThe Job runs and the database changes are reverted.\r\n\r\n\n", "before_files": [{"content": "from collections import OrderedDict\n\nfrom django import template\nfrom django.contrib.contenttypes.models import ContentType\nfrom django.urls import reverse\nfrom django.utils.html import format_html\nfrom django.utils.safestring import mark_safe\n\nfrom nautobot.extras.models import Job, JobButton\nfrom nautobot.utilities.utils import render_jinja2\n\n\nregister = template.Library()\n\nGROUP_DROPDOWN = \"\"\"\n<div class=\"btn-group\">\n <button type=\"button\" class=\"btn btn-sm btn-{group_button_class} dropdown-toggle\" data-toggle=\"dropdown\">\n {group_name} <span class=\"caret\"></span>\n </button>\n <ul class=\"dropdown-menu pull-right\">\n {grouped_buttons}\n </ul>\n</div>\n\"\"\"\n\nHIDDEN_INPUTS = \"\"\"\n<input type=\"hidden\" name=\"csrfmiddlewaretoken\" value=\"{csrf_token}\">\n<input type=\"hidden\" name=\"object_pk\" value=\"{object_pk}\">\n<input type=\"hidden\" name=\"object_model_name\" value=\"{object_model_name}\">\n<input type=\"hidden\" name=\"_schedule_type\" value=\"immediately\">\n<input type=\"hidden\" name=\"_return_url\" value=\"{redirect_path}\">\n\"\"\"\n\nNO_CONFIRM_BUTTON = \"\"\"\n<button type=\"submit\" form=\"form_id_{button_id}\" class=\"btn btn-sm btn-{button_class}\" {disabled}>{button_text}</button>\n\"\"\"\n\nNO_CONFIRM_FORM = \"\"\"\n<form id=\"form_id_{button_id}\" action=\"{button_url}\" method=\"post\" class=\"form\">\n {hidden_inputs}\n</form>\n\"\"\"\n\nCONFIRM_BUTTON = \"\"\"\n<button type=\"button\" class=\"btn btn-sm btn-{button_class}\" data-toggle=\"modal\" data-target=\"#confirm_modal_id_{button_id}\" {disabled}>\n {button_text}\n</button>\n\"\"\"\n\nCONFIRM_MODAL = \"\"\"\n<div class=\"modal fade\" id=\"confirm_modal_id_{button_id}\" tabindex=\"-1\" role=\"dialog\" aria-labelledby=\"confirm_modal_label_{button_id}\">\n <div class=\"modal-dialog\" role=\"document\">\n <div class=\"modal-content\">\n <div class=\"modal-header\">\n <button type=\"button\" class=\"close\" data-dismiss=\"modal\" aria-label=\"Close\"><span aria-hidden=\"true\">×</span></button>\n <h4 class=\"modal-title\" id=\"confirm_modal_label_{button_id}\">Confirmation</h4>\n </div>\n <form id=\"form_id_{button_id}\" action=\"{button_url}\" method=\"post\" class=\"form\">\n <div class=\"modal-body\">\n {hidden_inputs}\n Run Job <strong>'{job}'</strong> with object <strong>'{object}'</strong>?\n </div>\n <div class=\"modal-footer\">\n <button type=\"button\" class=\"btn btn-default\" data-dismiss=\"modal\">Cancel</button>\n <button type=\"submit\" class=\"btn btn-primary\">Confirm</button>\n </div>\n </form>\n </div>\n </div>\n</div>\n\"\"\"\n\nSAFE_EMPTY_STR = mark_safe(\"\") # noqa: S308\n\n\ndef _render_job_button_for_obj(job_button, obj, context, content_type):\n \"\"\"\n Helper method for job_buttons templatetag to reduce repetition of code.\n\n Returns:\n (str, str): (button_html, form_html)\n \"\"\"\n # Pass select context data when rendering the JobButton text as Jinja2\n button_context = {\n \"obj\": obj,\n \"debug\": context.get(\"debug\", False), # django.template.context_processors.debug\n \"request\": context[\"request\"], # django.template.context_processors.request\n \"user\": context[\"user\"], # django.contrib.auth.context_processors.auth\n \"perms\": context[\"perms\"], # django.contrib.auth.context_processors.auth\n }\n try:\n text_rendered = render_jinja2(job_button.text, button_context)\n except Exception as exc:\n return (\n format_html(\n '<a class=\"btn btn-sm btn-{}\" disabled=\"disabled\" title=\"{}\"><i class=\"mdi mdi-alert\"></i> {}</a>\\n',\n \"default\" if not job_button.group_name else \"link\",\n exc,\n job_button.name,\n ),\n SAFE_EMPTY_STR,\n )\n\n if not text_rendered:\n return (SAFE_EMPTY_STR, SAFE_EMPTY_STR)\n\n # Disable buttons if the user doesn't have permission to run the underlying Job.\n has_run_perm = Job.objects.check_perms(context[\"user\"], instance=job_button.job, action=\"run\")\n hidden_inputs = format_html(\n HIDDEN_INPUTS,\n csrf_token=context[\"csrf_token\"],\n object_pk=obj.pk,\n object_model_name=f\"{content_type.app_label}.{content_type.model}\",\n redirect_path=context[\"request\"].path,\n )\n template_args = {\n \"button_id\": job_button.pk,\n \"button_text\": text_rendered,\n \"button_class\": job_button.button_class if not job_button.group_name else \"link\",\n \"button_url\": reverse(\"extras:job_run\", kwargs={\"slug\": job_button.job.slug}),\n \"object\": obj,\n \"job\": job_button.job,\n \"hidden_inputs\": hidden_inputs,\n \"disabled\": \"\" if has_run_perm else \"disabled\",\n }\n\n if job_button.confirmation:\n return (\n format_html(CONFIRM_BUTTON, **template_args),\n format_html(CONFIRM_MODAL, **template_args),\n )\n else:\n return (\n format_html(NO_CONFIRM_BUTTON, **template_args),\n format_html(NO_CONFIRM_FORM, **template_args),\n )\n\n\[email protected]_tag(takes_context=True)\ndef job_buttons(context, obj):\n \"\"\"\n Render all applicable job buttons for the given object.\n \"\"\"\n content_type = ContentType.objects.get_for_model(obj)\n # We will enforce \"run\" permission later in deciding which buttons to show as disabled.\n buttons = JobButton.objects.filter(content_types=content_type)\n if not buttons:\n return SAFE_EMPTY_STR\n\n buttons_html = forms_html = SAFE_EMPTY_STR\n group_names = OrderedDict()\n\n for jb in buttons:\n # Organize job buttons by group for later processing\n if jb.group_name:\n group_names.setdefault(jb.group_name, []).append(jb)\n\n # Render and add non-grouped buttons\n else:\n button_html, form_html = _render_job_button_for_obj(jb, obj, context, content_type)\n buttons_html += button_html\n forms_html += form_html\n\n # Add grouped buttons to template\n for group_name, buttons in group_names.items():\n group_button_class = buttons[0].button_class\n\n buttons_rendered = SAFE_EMPTY_STR\n\n for jb in buttons:\n # Render grouped buttons as list items\n button_html, form_html = _render_job_button_for_obj(jb, obj, context, content_type)\n buttons_rendered += format_html(\"<li>{}</li>\", button_html)\n forms_html += form_html\n\n if buttons_rendered:\n buttons_html += format_html(\n GROUP_DROPDOWN,\n group_button_class=group_button_class,\n group_name=group_name,\n grouped_buttons=buttons_rendered,\n )\n\n # We want all of the buttons first and then any modals and forms so the buttons render properly\n return buttons_html + forms_html\n", "path": "nautobot/extras/templatetags/job_buttons.py"}]} | 2,975 | 276 |
gh_patches_debug_67407 | rasdani/github-patches | git_diff | mitmproxy__mitmproxy-1711 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
pathoc does not accept `:pa,f` to pause forever at end of message
##### Steps to reproduce the problem:
`pathoc www.example.com 'get:/:pa,f'`
##### What is the expected behavior?
Send request, but pause forever after sending.
##### What went wrong?
I get a stack trace with "a float is required".
```
$ pathoc www.example.com 'get:/:pa,f'
08-09-16 16:59:41: >> 'GET':/:pa,f
Traceback (most recent call last):
File "/usr/local/bin/pathoc", line 11, in <module>
sys.exit(go_pathoc())
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc_cmdline.py", line 226, in go_pathoc
pathoc.main(args)
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py", line 522, in main
ret = p.request(spec)
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py", line 452, in request
return self.http(r)
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py", line 432, in http
return resp
File "/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py", line 411, in http
req = language.serve(r, self.wfile, self.settings)
File "/usr/local/lib/python2.7/dist-packages/pathod/language/__init__.py", line 105, in serve
disconnect = writer.write_values(fp, vals, actions[:])
File "/usr/local/lib/python2.7/dist-packages/pathod/language/writer.py", line 61, in write_values
time.sleep(a[2])
TypeError: a float is required
```
##### Any other comments? What have you tried so far?
All other combinations of pause flags work as expected:
```
$ pathoc www.example.com 'get:/:p2,5'
08-09-16 17:05:07: >> 'GET':/:p2,5
<< 200 OK: 1270 bytes
$ pathoc www.example.com 'get:/:pr,5'
08-09-16 17:05:21: >> 'GET':/:pr,5
<< 200 OK: 1270 bytes
$ pathoc www.example.com 'get:/:pa,5'
08-09-16 17:05:41: >> 'GET':/:pa,5
<< 200 OK: 1270 bytes
$ pathoc www.example.com 'get:/:p2,f'
^C08-09-16 17:04:46: >> 'GET':/:p2,f
$ pathoc www.example.com 'get:/:pr,f'
^C08-09-16 17:04:55: >> 'GET':/:pr,f
```
---
pathoc version: 0.17
Operating System: Debian Linux 8.5 "Jessie" x64
</issue>
<code>
[start of pathod/language/writer.py]
1 import time
2 from mitmproxy import exceptions
3
4 BLOCKSIZE = 1024
5 # It's not clear what the upper limit for time.sleep is. It's lower than the
6 # maximum int or float. 1 year should do.
7 FOREVER = 60 * 60 * 24 * 365
8
9
10 def send_chunk(fp, val, blocksize, start, end):
11 """
12 (start, end): Inclusive lower bound, exclusive upper bound.
13 """
14 for i in range(start, end, blocksize):
15 fp.write(
16 val[i:min(i + blocksize, end)]
17 )
18 return end - start
19
20
21 def write_values(fp, vals, actions, sofar=0, blocksize=BLOCKSIZE):
22 """
23 vals: A list of values, which may be strings or Value objects.
24
25 actions: A list of (offset, action, arg) tuples. Action may be "inject",
26 "pause" or "disconnect".
27
28 Both vals and actions are in reverse order, with the first items last.
29
30 Return True if connection should disconnect.
31 """
32 sofar = 0
33 try:
34 while vals:
35 v = vals.pop()
36 offset = 0
37 while actions and actions[-1][0] < (sofar + len(v)):
38 a = actions.pop()
39 offset += send_chunk(
40 fp,
41 v,
42 blocksize,
43 offset,
44 a[0] - sofar - offset
45 )
46 if a[1] == "pause":
47 time.sleep(
48 FOREVER if a[2] == "f" else a[2]
49 )
50 elif a[1] == "disconnect":
51 return True
52 elif a[1] == "inject":
53 send_chunk(fp, a[2], blocksize, 0, len(a[2]))
54 send_chunk(fp, v, blocksize, offset, len(v))
55 sofar += len(v)
56 # Remainders
57 while actions:
58 a = actions.pop()
59 if a[1] == "pause":
60 time.sleep(a[2])
61 elif a[1] == "disconnect":
62 return True
63 elif a[1] == "inject":
64 send_chunk(fp, a[2], blocksize, 0, len(a[2]))
65 except exceptions.TcpDisconnect: # pragma: no cover
66 return True
67
[end of pathod/language/writer.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pathod/language/writer.py b/pathod/language/writer.py
--- a/pathod/language/writer.py
+++ b/pathod/language/writer.py
@@ -57,7 +57,9 @@
while actions:
a = actions.pop()
if a[1] == "pause":
- time.sleep(a[2])
+ time.sleep(
+ FOREVER if a[2] == "f" else a[2]
+ )
elif a[1] == "disconnect":
return True
elif a[1] == "inject":
| {"golden_diff": "diff --git a/pathod/language/writer.py b/pathod/language/writer.py\n--- a/pathod/language/writer.py\n+++ b/pathod/language/writer.py\n@@ -57,7 +57,9 @@\n while actions:\n a = actions.pop()\n if a[1] == \"pause\":\n- time.sleep(a[2])\n+ time.sleep(\n+ FOREVER if a[2] == \"f\" else a[2]\n+ )\n elif a[1] == \"disconnect\":\n return True\n elif a[1] == \"inject\":\n", "issue": "pathoc does not accept `:pa,f` to pause forever at end of message\n##### Steps to reproduce the problem:\n\n`pathoc www.example.com 'get:/:pa,f'`\n##### What is the expected behavior?\n\nSend request, but pause forever after sending.\n##### What went wrong?\n\nI get a stack trace with \"a float is required\".\n\n```\n$ pathoc www.example.com 'get:/:pa,f'\n08-09-16 16:59:41: >> 'GET':/:pa,f\nTraceback (most recent call last):\n File \"/usr/local/bin/pathoc\", line 11, in <module>\n sys.exit(go_pathoc())\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc_cmdline.py\", line 226, in go_pathoc\n pathoc.main(args)\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py\", line 522, in main\n ret = p.request(spec)\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py\", line 452, in request\n return self.http(r)\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py\", line 432, in http\n return resp\n File \"/usr/local/lib/python2.7/dist-packages/pathod/pathoc.py\", line 411, in http\n req = language.serve(r, self.wfile, self.settings)\n File \"/usr/local/lib/python2.7/dist-packages/pathod/language/__init__.py\", line 105, in serve\n disconnect = writer.write_values(fp, vals, actions[:])\n File \"/usr/local/lib/python2.7/dist-packages/pathod/language/writer.py\", line 61, in write_values\n time.sleep(a[2])\nTypeError: a float is required\n```\n##### Any other comments? What have you tried so far?\n\nAll other combinations of pause flags work as expected:\n\n```\n$ pathoc www.example.com 'get:/:p2,5'\n08-09-16 17:05:07: >> 'GET':/:p2,5\n<< 200 OK: 1270 bytes\n$ pathoc www.example.com 'get:/:pr,5'\n08-09-16 17:05:21: >> 'GET':/:pr,5\n<< 200 OK: 1270 bytes\n$ pathoc www.example.com 'get:/:pa,5'\n08-09-16 17:05:41: >> 'GET':/:pa,5\n<< 200 OK: 1270 bytes\n$ pathoc www.example.com 'get:/:p2,f'\n^C08-09-16 17:04:46: >> 'GET':/:p2,f\n$ pathoc www.example.com 'get:/:pr,f'\n^C08-09-16 17:04:55: >> 'GET':/:pr,f\n```\n\n---\n\npathoc version: 0.17\nOperating System: Debian Linux 8.5 \"Jessie\" x64\n\n", "before_files": [{"content": "import time\nfrom mitmproxy import exceptions\n\nBLOCKSIZE = 1024\n# It's not clear what the upper limit for time.sleep is. It's lower than the\n# maximum int or float. 1 year should do.\nFOREVER = 60 * 60 * 24 * 365\n\n\ndef send_chunk(fp, val, blocksize, start, end):\n \"\"\"\n (start, end): Inclusive lower bound, exclusive upper bound.\n \"\"\"\n for i in range(start, end, blocksize):\n fp.write(\n val[i:min(i + blocksize, end)]\n )\n return end - start\n\n\ndef write_values(fp, vals, actions, sofar=0, blocksize=BLOCKSIZE):\n \"\"\"\n vals: A list of values, which may be strings or Value objects.\n\n actions: A list of (offset, action, arg) tuples. Action may be \"inject\",\n \"pause\" or \"disconnect\".\n\n Both vals and actions are in reverse order, with the first items last.\n\n Return True if connection should disconnect.\n \"\"\"\n sofar = 0\n try:\n while vals:\n v = vals.pop()\n offset = 0\n while actions and actions[-1][0] < (sofar + len(v)):\n a = actions.pop()\n offset += send_chunk(\n fp,\n v,\n blocksize,\n offset,\n a[0] - sofar - offset\n )\n if a[1] == \"pause\":\n time.sleep(\n FOREVER if a[2] == \"f\" else a[2]\n )\n elif a[1] == \"disconnect\":\n return True\n elif a[1] == \"inject\":\n send_chunk(fp, a[2], blocksize, 0, len(a[2]))\n send_chunk(fp, v, blocksize, offset, len(v))\n sofar += len(v)\n # Remainders\n while actions:\n a = actions.pop()\n if a[1] == \"pause\":\n time.sleep(a[2])\n elif a[1] == \"disconnect\":\n return True\n elif a[1] == \"inject\":\n send_chunk(fp, a[2], blocksize, 0, len(a[2]))\n except exceptions.TcpDisconnect: # pragma: no cover\n return True\n", "path": "pathod/language/writer.py"}]} | 1,890 | 126 |
gh_patches_debug_10220 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-391 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Missing ABC for Metric
[Missing ABC](https://github.com/open-telemetry/opentelemetry-python/blob/master/opentelemetry-api/src/opentelemetry/metrics/__init__.py#L90) for `Metric` seems strange to me.
I guess in the PR #311 and Issue #66 settle to add ABC for all interfaces.
Maybe it should look like:
```python
class Metric(abc.ABC):
"""...
"""
@abc.abstractmethod
def get_handle(self, label_set: LabelSet) -> "object":
"""...
"""
```
Missing ABC for Metric
[Missing ABC](https://github.com/open-telemetry/opentelemetry-python/blob/master/opentelemetry-api/src/opentelemetry/metrics/__init__.py#L90) for `Metric` seems strange to me.
I guess in the PR #311 and Issue #66 settle to add ABC for all interfaces.
Maybe it should look like:
```python
class Metric(abc.ABC):
"""...
"""
@abc.abstractmethod
def get_handle(self, label_set: LabelSet) -> "object":
"""...
"""
```
</issue>
<code>
[start of opentelemetry-api/src/opentelemetry/metrics/__init__.py]
1 # Copyright 2019, OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """
16 The OpenTelemetry metrics API describes the classes used to report raw
17 measurements, as well as metrics with known aggregation and labels.
18
19 The `Meter` class is used to construct `Metric` s to record raw statistics
20 as well as metrics with predefined aggregation.
21
22 See the `metrics api`_ spec for terminology and context clarification.
23
24 .. _metrics api:
25 https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/api-metrics.md
26
27
28 """
29 import abc
30 from typing import Callable, Dict, Optional, Sequence, Tuple, Type, TypeVar
31
32 from opentelemetry.util import loader
33
34 ValueT = TypeVar("ValueT", int, float)
35
36
37 class DefaultMetricHandle:
38 """The default MetricHandle.
39
40 Used when no MetricHandle implementation is available.
41 """
42
43
44 class CounterHandle:
45 def add(self, value: ValueT) -> None:
46 """Increases the value of the handle by ``value``.
47
48 Args:
49 value: The value to record to the handle.
50 """
51
52
53 class GaugeHandle:
54 def set(self, value: ValueT) -> None:
55 """Sets the current value of the handle to ``value``.
56
57 Args:
58 value: The value to record to the handle.
59 """
60
61
62 class MeasureHandle:
63 def record(self, value: ValueT) -> None:
64 """Records the given ``value`` to this handle.
65
66 Args:
67 value: The value to record to the handle.
68 """
69
70
71 class LabelSet(abc.ABC):
72 """A canonicalized set of labels useful for preaggregation
73
74 Re-usable LabelSet objects provide a potential optimization for scenarios
75 where handles might not be effective. For example, if the LabelSet will be
76 re-used but only used once per metrics, handles do not offer any
77 optimization. It may best to pre-compute a canonicalized LabelSet once and
78 re-use it with the direct calling convention. LabelSets are immutable and
79 should be opaque in implementation.
80 """
81
82
83 class DefaultLabelSet(LabelSet):
84 """The default LabelSet.
85
86 Used when no LabelSet implementation is available.
87 """
88
89
90 class Metric:
91 """Base class for various types of metrics.
92
93 Metric class that inherit from this class are specialized with the type of
94 handle that the metric holds.
95 """
96
97 def get_handle(self, label_set: LabelSet) -> "object":
98 """Gets a handle, used for repeated-use of metrics instruments.
99
100 Handles are useful to reduce the cost of repeatedly recording a metric
101 with a pre-defined set of label values. All metric kinds (counter,
102 gauge, measure) support declaring a set of required label keys. The
103 values corresponding to these keys should be specified in every handle.
104 "Unspecified" label values, in cases where a handle is requested but
105 a value was not provided are permitted.
106
107 Args:
108 label_set: `LabelSet` to associate with the returned handle.
109 """
110
111
112 class DefaultMetric(Metric):
113 """The default Metric used when no Metric implementation is available."""
114
115 def get_handle(self, label_set: LabelSet) -> "DefaultMetricHandle":
116 """Gets a `DefaultMetricHandle`.
117
118 Args:
119 label_set: `LabelSet` to associate with the returned handle.
120 """
121 return DefaultMetricHandle()
122
123
124 class Counter(Metric):
125 """A counter type metric that expresses the computation of a sum."""
126
127 def get_handle(self, label_set: LabelSet) -> "CounterHandle":
128 """Gets a `CounterHandle`."""
129 return CounterHandle()
130
131 def add(self, label_set: LabelSet, value: ValueT) -> None:
132 """Increases the value of the counter by ``value``.
133
134 Args:
135 label_set: `LabelSet` to associate with the returned handle.
136 value: The value to add to the counter metric.
137 """
138
139
140 class Gauge(Metric):
141 """A gauge type metric that expresses a pre-calculated value.
142
143 Gauge metrics have a value that is either ``Set`` by explicit
144 instrumentation or observed through a callback. This kind of metric
145 should be used when the metric cannot be expressed as a sum or because
146 the measurement interval is arbitrary.
147 """
148
149 def get_handle(self, label_set: LabelSet) -> "GaugeHandle":
150 """Gets a `GaugeHandle`."""
151 return GaugeHandle()
152
153 def set(self, label_set: LabelSet, value: ValueT) -> None:
154 """Sets the value of the gauge to ``value``.
155
156 Args:
157 label_set: `LabelSet` to associate with the returned handle.
158 value: The value to set the gauge metric to.
159 """
160
161
162 class Measure(Metric):
163 """A measure type metric that represent raw stats that are recorded.
164
165 Measure metrics represent raw statistics that are recorded. By
166 default, measure metrics can accept both positive and negatives.
167 Negative inputs will be discarded when monotonic is True.
168 """
169
170 def get_handle(self, label_set: LabelSet) -> "MeasureHandle":
171 """Gets a `MeasureHandle` with a float value."""
172 return MeasureHandle()
173
174 def record(self, label_set: LabelSet, value: ValueT) -> None:
175 """Records the ``value`` to the measure.
176
177 Args:
178 label_set: `LabelSet` to associate with the returned handle.
179 value: The value to record to this measure metric.
180 """
181
182
183 MetricT = TypeVar("MetricT", Counter, Gauge, Measure)
184
185
186 # pylint: disable=unused-argument
187 class Meter(abc.ABC):
188 """An interface to allow the recording of metrics.
189
190 `Metric` s are used for recording pre-defined aggregation (gauge and
191 counter), or raw values (measure) in which the aggregation and labels
192 for the exported metric are deferred.
193 """
194
195 @abc.abstractmethod
196 def record_batch(
197 self,
198 label_set: LabelSet,
199 record_tuples: Sequence[Tuple["Metric", ValueT]],
200 ) -> None:
201 """Atomically records a batch of `Metric` and value pairs.
202
203 Allows the functionality of acting upon multiple metrics with
204 a single API call. Implementations should find metric and handles that
205 match the key-value pairs in the label tuples.
206
207 Args:
208 label_set: The `LabelSet` associated with all measurements in
209 the batch. A measurement is a tuple, representing the `Metric`
210 being recorded and the corresponding value to record.
211 record_tuples: A sequence of pairs of `Metric` s and the
212 corresponding value to record for that metric.
213 """
214
215 @abc.abstractmethod
216 def create_metric(
217 self,
218 name: str,
219 description: str,
220 unit: str,
221 value_type: Type[ValueT],
222 metric_type: Type[MetricT],
223 label_keys: Sequence[str] = (),
224 enabled: bool = True,
225 monotonic: bool = False,
226 ) -> "Metric":
227 """Creates a ``metric_kind`` metric with type ``value_type``.
228
229 Args:
230 name: The name of the metric.
231 description: Human-readable description of the metric.
232 unit: Unit of the metric values.
233 value_type: The type of values being recorded by the metric.
234 metric_type: The type of metric being created.
235 label_keys: The keys for the labels with dynamic values.
236 enabled: Whether to report the metric by default.
237 monotonic: Whether to only allow non-negative values.
238
239 Returns: A new ``metric_type`` metric with values of ``value_type``.
240 """
241
242 @abc.abstractmethod
243 def get_label_set(self, labels: Dict[str, str]) -> "LabelSet":
244 """Gets a `LabelSet` with the given labels.
245
246 Args:
247 labels: A dictionary representing label key to label value pairs.
248
249 Returns: A `LabelSet` object canonicalized using the given input.
250 """
251
252
253 class DefaultMeter(Meter):
254 """The default Meter used when no Meter implementation is available."""
255
256 def record_batch(
257 self,
258 label_set: LabelSet,
259 record_tuples: Sequence[Tuple["Metric", ValueT]],
260 ) -> None:
261 pass
262
263 def create_metric(
264 self,
265 name: str,
266 description: str,
267 unit: str,
268 value_type: Type[ValueT],
269 metric_type: Type[MetricT],
270 label_keys: Sequence[str] = (),
271 enabled: bool = True,
272 monotonic: bool = False,
273 ) -> "Metric":
274 # pylint: disable=no-self-use
275 return DefaultMetric()
276
277 def get_label_set(self, labels: Dict[str, str]) -> "LabelSet":
278 # pylint: disable=no-self-use
279 return DefaultLabelSet()
280
281
282 # Once https://github.com/python/mypy/issues/7092 is resolved,
283 # the following type definition should be replaced with
284 # from opentelemetry.util.loader import ImplementationFactory
285 ImplementationFactory = Callable[[Type[Meter]], Optional[Meter]]
286
287 _METER = None
288 _METER_FACTORY = None
289
290
291 def meter() -> Meter:
292 """Gets the current global :class:`~.Meter` object.
293
294 If there isn't one set yet, a default will be loaded.
295 """
296 global _METER, _METER_FACTORY # pylint:disable=global-statement
297
298 if _METER is None:
299 # pylint:disable=protected-access
300 _METER = loader._load_impl(DefaultMeter, _METER_FACTORY)
301 del _METER_FACTORY
302
303 return _METER
304
305
306 def set_preferred_meter_implementation(factory: ImplementationFactory) -> None:
307 """Set the factory to be used to create the meter.
308
309 See :mod:`opentelemetry.util.loader` for details.
310
311 This function may not be called after a meter is already loaded.
312
313 Args:
314 factory: Callback that should create a new :class:`Meter` instance.
315 """
316 global _METER, _METER_FACTORY # pylint:disable=global-statement
317
318 if _METER:
319 raise RuntimeError("Meter already loaded.")
320
321 _METER_FACTORY = factory
322
[end of opentelemetry-api/src/opentelemetry/metrics/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/opentelemetry-api/src/opentelemetry/metrics/__init__.py b/opentelemetry-api/src/opentelemetry/metrics/__init__.py
--- a/opentelemetry-api/src/opentelemetry/metrics/__init__.py
+++ b/opentelemetry-api/src/opentelemetry/metrics/__init__.py
@@ -87,13 +87,14 @@
"""
-class Metric:
+class Metric(abc.ABC):
"""Base class for various types of metrics.
Metric class that inherit from this class are specialized with the type of
handle that the metric holds.
"""
+ @abc.abstractmethod
def get_handle(self, label_set: LabelSet) -> "object":
"""Gets a handle, used for repeated-use of metrics instruments.
| {"golden_diff": "diff --git a/opentelemetry-api/src/opentelemetry/metrics/__init__.py b/opentelemetry-api/src/opentelemetry/metrics/__init__.py\n--- a/opentelemetry-api/src/opentelemetry/metrics/__init__.py\n+++ b/opentelemetry-api/src/opentelemetry/metrics/__init__.py\n@@ -87,13 +87,14 @@\n \"\"\"\n \n \n-class Metric:\n+class Metric(abc.ABC):\n \"\"\"Base class for various types of metrics.\n \n Metric class that inherit from this class are specialized with the type of\n handle that the metric holds.\n \"\"\"\n \n+ @abc.abstractmethod\n def get_handle(self, label_set: LabelSet) -> \"object\":\n \"\"\"Gets a handle, used for repeated-use of metrics instruments.\n", "issue": "Missing ABC for Metric\n[Missing ABC](https://github.com/open-telemetry/opentelemetry-python/blob/master/opentelemetry-api/src/opentelemetry/metrics/__init__.py#L90) for `Metric` seems strange to me.\r\n I guess in the PR #311 and Issue #66 settle to add ABC for all interfaces.\r\n\r\nMaybe it should look like:\r\n```python\r\n\r\nclass Metric(abc.ABC):\r\n \"\"\"...\r\n \"\"\"\r\n\r\n @abc.abstractmethod\r\n def get_handle(self, label_set: LabelSet) -> \"object\":\r\n \"\"\"...\r\n \"\"\"\r\n\r\n\r\n```\nMissing ABC for Metric\n[Missing ABC](https://github.com/open-telemetry/opentelemetry-python/blob/master/opentelemetry-api/src/opentelemetry/metrics/__init__.py#L90) for `Metric` seems strange to me.\r\n I guess in the PR #311 and Issue #66 settle to add ABC for all interfaces.\r\n\r\nMaybe it should look like:\r\n```python\r\n\r\nclass Metric(abc.ABC):\r\n \"\"\"...\r\n \"\"\"\r\n\r\n @abc.abstractmethod\r\n def get_handle(self, label_set: LabelSet) -> \"object\":\r\n \"\"\"...\r\n \"\"\"\r\n\r\n\r\n```\n", "before_files": [{"content": "# Copyright 2019, OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"\nThe OpenTelemetry metrics API describes the classes used to report raw\nmeasurements, as well as metrics with known aggregation and labels.\n\nThe `Meter` class is used to construct `Metric` s to record raw statistics\nas well as metrics with predefined aggregation.\n\nSee the `metrics api`_ spec for terminology and context clarification.\n\n.. _metrics api:\n https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/api-metrics.md\n\n\n\"\"\"\nimport abc\nfrom typing import Callable, Dict, Optional, Sequence, Tuple, Type, TypeVar\n\nfrom opentelemetry.util import loader\n\nValueT = TypeVar(\"ValueT\", int, float)\n\n\nclass DefaultMetricHandle:\n \"\"\"The default MetricHandle.\n\n Used when no MetricHandle implementation is available.\n \"\"\"\n\n\nclass CounterHandle:\n def add(self, value: ValueT) -> None:\n \"\"\"Increases the value of the handle by ``value``.\n\n Args:\n value: The value to record to the handle.\n \"\"\"\n\n\nclass GaugeHandle:\n def set(self, value: ValueT) -> None:\n \"\"\"Sets the current value of the handle to ``value``.\n\n Args:\n value: The value to record to the handle.\n \"\"\"\n\n\nclass MeasureHandle:\n def record(self, value: ValueT) -> None:\n \"\"\"Records the given ``value`` to this handle.\n\n Args:\n value: The value to record to the handle.\n \"\"\"\n\n\nclass LabelSet(abc.ABC):\n \"\"\"A canonicalized set of labels useful for preaggregation\n\n Re-usable LabelSet objects provide a potential optimization for scenarios\n where handles might not be effective. For example, if the LabelSet will be\n re-used but only used once per metrics, handles do not offer any\n optimization. It may best to pre-compute a canonicalized LabelSet once and\n re-use it with the direct calling convention. LabelSets are immutable and\n should be opaque in implementation.\n \"\"\"\n\n\nclass DefaultLabelSet(LabelSet):\n \"\"\"The default LabelSet.\n\n Used when no LabelSet implementation is available.\n \"\"\"\n\n\nclass Metric:\n \"\"\"Base class for various types of metrics.\n\n Metric class that inherit from this class are specialized with the type of\n handle that the metric holds.\n \"\"\"\n\n def get_handle(self, label_set: LabelSet) -> \"object\":\n \"\"\"Gets a handle, used for repeated-use of metrics instruments.\n\n Handles are useful to reduce the cost of repeatedly recording a metric\n with a pre-defined set of label values. All metric kinds (counter,\n gauge, measure) support declaring a set of required label keys. The\n values corresponding to these keys should be specified in every handle.\n \"Unspecified\" label values, in cases where a handle is requested but\n a value was not provided are permitted.\n\n Args:\n label_set: `LabelSet` to associate with the returned handle.\n \"\"\"\n\n\nclass DefaultMetric(Metric):\n \"\"\"The default Metric used when no Metric implementation is available.\"\"\"\n\n def get_handle(self, label_set: LabelSet) -> \"DefaultMetricHandle\":\n \"\"\"Gets a `DefaultMetricHandle`.\n\n Args:\n label_set: `LabelSet` to associate with the returned handle.\n \"\"\"\n return DefaultMetricHandle()\n\n\nclass Counter(Metric):\n \"\"\"A counter type metric that expresses the computation of a sum.\"\"\"\n\n def get_handle(self, label_set: LabelSet) -> \"CounterHandle\":\n \"\"\"Gets a `CounterHandle`.\"\"\"\n return CounterHandle()\n\n def add(self, label_set: LabelSet, value: ValueT) -> None:\n \"\"\"Increases the value of the counter by ``value``.\n\n Args:\n label_set: `LabelSet` to associate with the returned handle.\n value: The value to add to the counter metric.\n \"\"\"\n\n\nclass Gauge(Metric):\n \"\"\"A gauge type metric that expresses a pre-calculated value.\n\n Gauge metrics have a value that is either ``Set`` by explicit\n instrumentation or observed through a callback. This kind of metric\n should be used when the metric cannot be expressed as a sum or because\n the measurement interval is arbitrary.\n \"\"\"\n\n def get_handle(self, label_set: LabelSet) -> \"GaugeHandle\":\n \"\"\"Gets a `GaugeHandle`.\"\"\"\n return GaugeHandle()\n\n def set(self, label_set: LabelSet, value: ValueT) -> None:\n \"\"\"Sets the value of the gauge to ``value``.\n\n Args:\n label_set: `LabelSet` to associate with the returned handle.\n value: The value to set the gauge metric to.\n \"\"\"\n\n\nclass Measure(Metric):\n \"\"\"A measure type metric that represent raw stats that are recorded.\n\n Measure metrics represent raw statistics that are recorded. By\n default, measure metrics can accept both positive and negatives.\n Negative inputs will be discarded when monotonic is True.\n \"\"\"\n\n def get_handle(self, label_set: LabelSet) -> \"MeasureHandle\":\n \"\"\"Gets a `MeasureHandle` with a float value.\"\"\"\n return MeasureHandle()\n\n def record(self, label_set: LabelSet, value: ValueT) -> None:\n \"\"\"Records the ``value`` to the measure.\n\n Args:\n label_set: `LabelSet` to associate with the returned handle.\n value: The value to record to this measure metric.\n \"\"\"\n\n\nMetricT = TypeVar(\"MetricT\", Counter, Gauge, Measure)\n\n\n# pylint: disable=unused-argument\nclass Meter(abc.ABC):\n \"\"\"An interface to allow the recording of metrics.\n\n `Metric` s are used for recording pre-defined aggregation (gauge and\n counter), or raw values (measure) in which the aggregation and labels\n for the exported metric are deferred.\n \"\"\"\n\n @abc.abstractmethod\n def record_batch(\n self,\n label_set: LabelSet,\n record_tuples: Sequence[Tuple[\"Metric\", ValueT]],\n ) -> None:\n \"\"\"Atomically records a batch of `Metric` and value pairs.\n\n Allows the functionality of acting upon multiple metrics with\n a single API call. Implementations should find metric and handles that\n match the key-value pairs in the label tuples.\n\n Args:\n label_set: The `LabelSet` associated with all measurements in\n the batch. A measurement is a tuple, representing the `Metric`\n being recorded and the corresponding value to record.\n record_tuples: A sequence of pairs of `Metric` s and the\n corresponding value to record for that metric.\n \"\"\"\n\n @abc.abstractmethod\n def create_metric(\n self,\n name: str,\n description: str,\n unit: str,\n value_type: Type[ValueT],\n metric_type: Type[MetricT],\n label_keys: Sequence[str] = (),\n enabled: bool = True,\n monotonic: bool = False,\n ) -> \"Metric\":\n \"\"\"Creates a ``metric_kind`` metric with type ``value_type``.\n\n Args:\n name: The name of the metric.\n description: Human-readable description of the metric.\n unit: Unit of the metric values.\n value_type: The type of values being recorded by the metric.\n metric_type: The type of metric being created.\n label_keys: The keys for the labels with dynamic values.\n enabled: Whether to report the metric by default.\n monotonic: Whether to only allow non-negative values.\n\n Returns: A new ``metric_type`` metric with values of ``value_type``.\n \"\"\"\n\n @abc.abstractmethod\n def get_label_set(self, labels: Dict[str, str]) -> \"LabelSet\":\n \"\"\"Gets a `LabelSet` with the given labels.\n\n Args:\n labels: A dictionary representing label key to label value pairs.\n\n Returns: A `LabelSet` object canonicalized using the given input.\n \"\"\"\n\n\nclass DefaultMeter(Meter):\n \"\"\"The default Meter used when no Meter implementation is available.\"\"\"\n\n def record_batch(\n self,\n label_set: LabelSet,\n record_tuples: Sequence[Tuple[\"Metric\", ValueT]],\n ) -> None:\n pass\n\n def create_metric(\n self,\n name: str,\n description: str,\n unit: str,\n value_type: Type[ValueT],\n metric_type: Type[MetricT],\n label_keys: Sequence[str] = (),\n enabled: bool = True,\n monotonic: bool = False,\n ) -> \"Metric\":\n # pylint: disable=no-self-use\n return DefaultMetric()\n\n def get_label_set(self, labels: Dict[str, str]) -> \"LabelSet\":\n # pylint: disable=no-self-use\n return DefaultLabelSet()\n\n\n# Once https://github.com/python/mypy/issues/7092 is resolved,\n# the following type definition should be replaced with\n# from opentelemetry.util.loader import ImplementationFactory\nImplementationFactory = Callable[[Type[Meter]], Optional[Meter]]\n\n_METER = None\n_METER_FACTORY = None\n\n\ndef meter() -> Meter:\n \"\"\"Gets the current global :class:`~.Meter` object.\n\n If there isn't one set yet, a default will be loaded.\n \"\"\"\n global _METER, _METER_FACTORY # pylint:disable=global-statement\n\n if _METER is None:\n # pylint:disable=protected-access\n _METER = loader._load_impl(DefaultMeter, _METER_FACTORY)\n del _METER_FACTORY\n\n return _METER\n\n\ndef set_preferred_meter_implementation(factory: ImplementationFactory) -> None:\n \"\"\"Set the factory to be used to create the meter.\n\n See :mod:`opentelemetry.util.loader` for details.\n\n This function may not be called after a meter is already loaded.\n\n Args:\n factory: Callback that should create a new :class:`Meter` instance.\n \"\"\"\n global _METER, _METER_FACTORY # pylint:disable=global-statement\n\n if _METER:\n raise RuntimeError(\"Meter already loaded.\")\n\n _METER_FACTORY = factory\n", "path": "opentelemetry-api/src/opentelemetry/metrics/__init__.py"}]} | 3,978 | 169 |
gh_patches_debug_16454 | rasdani/github-patches | git_diff | huggingface__text-generation-inference-1414 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Local Install: No rule to make target 'install-flash-attention-v2'
### System Info
**Release:** v1.3.4, main branch
**Target**: x86_64-unknown-linux-gnu
**Cargo version**: 1.70.0
**Commit sha**: 630800eed37b15c4b0c9eb8e6ab47212026720f7
**Docker label**: N/A
**nvidia-smi**: Wed Jan 3 18:13:50 2024
```
Mon Jan 8 15:13:03 2024
+---------------------------------------------------------------------------------------+
| NVIDIA-SMI 535.54.03 Driver Version: 535.54.03 CUDA Version: 12.2 |
|-----------------------------------------+----------------------+----------------------+
| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |
| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |
| | | MIG M. |
|=========================================+======================+======================|
| 0 NVIDIA GeForce RTX 4090 On | 00000000:01:00.0 Off | Off |
| 0% 40C P8 26W / 450W | 3MiB / 24564MiB | 0% Default |
| | | N/A |
+-----------------------------------------+----------------------+----------------------+
| 1 NVIDIA GeForce RTX 4090 On | 00000000:02:00.0 Off | Off |
| 0% 46C P8 26W / 450W | 3MiB / 24564MiB | 0% Default |
| | | N/A |
+-----------------------------------------+----------------------+----------------------+
+---------------------------------------------------------------------------------------+
| Processes: |
| GPU GI CI PID Type Process name GPU Memory |
| ID ID Usage |
|=======================================================================================|
| No running processes found |
+---------------------------------------------------------------------------------------+
```
### Information
- [ ] Docker
- [X] The CLI directly
### Tasks
- [X] An officially supported command
- [ ] My own modifications
### Reproduction
Followed steps found within the local install instructions (https://github.com/huggingface/text-generation-inference#local-install), in addition to a couple undocumented make steps
1. Fork & pull from main branch, v1.3.4
2. Install and test Rust: 1.70.0 (90c541806 2023-05-31)
3. Install & test Conda: 23.11.0
5. Install and test libprotoc: 3.21.12
6. Execute `export BUILD_EXTENSIONS=True`
7. Execute `make install`
8. Execute `cd server && make install-vllm-cuda && cd ..`
9. Update source code for paged attention [as described here](https://github.com/huggingface/text-generation-inference/pull/1386/commits/ad7f8396732c12368c7961e15fe2287f1aa42db3)
10. Execute `cd server && make install-flash-attention && cd ..`
11. Execute `make run-falcon-7b-instruct`, which yields the following message:
_Flash Attention V2 is not installed.
Use the official Docker image (ghcr.io/huggingface/text-generation-inference:latest) or install flash attention v2 with `cd server && make install install-flash-attention-v2`_
12. Execute `cd server && make install install-flash-attention-v2`, which yields the following error message:
_make: *** No rule to make target 'install-flash-attention-v2'. Stop._
### Expected behavior
Make target should be found and executed. Solution below.
**SOLUTION**:
The file that issues the warning, `server/text_generation_server/utils/flash_attn.py`, is missing the proper make target suffix. It should be:
`make install install-flash-attention-v2-cuda` or `install-flash-attention-v2-rocm`
</issue>
<code>
[start of server/text_generation_server/utils/flash_attn.py]
1 import os
2 import torch
3
4 from loguru import logger
5
6 from text_generation_server.utils.import_utils import IS_CUDA_SYSTEM, IS_ROCM_SYSTEM
7
8 if os.getenv("USE_FLASH_ATTENTION", "").lower() == "false":
9 raise ImportError("`USE_FLASH_ATTENTION` is false.")
10
11 if not torch.cuda.is_available():
12 raise ImportError("CUDA is not available")
13
14 major, minor = torch.cuda.get_device_capability()
15 is_sm75 = major == 7 and minor == 5
16 is_sm8x = major == 8 and minor >= 0
17 is_sm90 = major == 9 and minor == 0
18
19 HAS_FLASH_ATTN = False
20 HAS_FLASH_ATTN_V2_CUDA = False
21 HAS_FLASH_ATTN_V2_ROCM = False
22 try:
23 try:
24 import flash_attn_2_cuda
25 except ImportError:
26 raise ImportError(
27 "Flash Attention V2 is not installed.\n"
28 "Use the official Docker image (ghcr.io/huggingface/text-generation-inference:latest) "
29 "or install flash attention v2 with `cd server && make install install-flash-attention-v2`"
30 )
31 if not (is_sm8x or is_sm90):
32 raise ImportError(
33 f"GPU with CUDA capability {major} {minor} is not supported for "
34 "Flash Attention V2"
35 )
36 HAS_FLASH_ATTN_V2_CUDA = IS_CUDA_SYSTEM
37 HAS_FLASH_ATTN_V2_ROCM = IS_ROCM_SYSTEM
38 except ImportError as e:
39 try:
40 import flash_attn_cuda
41 except ImportError:
42 raise ImportError(
43 "Flash Attention is not installed.\n"
44 "Use the official Docker image (ghcr.io/huggingface/text-generation-inference:latest) "
45 "or install flash attention with `cd server && make install install-flash-attention`"
46 ) from e
47
48 if IS_CUDA_SYSTEM and not (is_sm75 or is_sm8x or is_sm90):
49 raise ImportError(
50 f"GPU with CUDA capability {major} {minor} is not supported"
51 ) from e
52 elif IS_ROCM_SYSTEM:
53 for idx in range(torch.cuda.device_count()):
54 if "MI210" not in torch.cuda.get_device_name(
55 idx
56 ) and "MI250" not in torch.cuda.get_device_name(idx):
57 raise ImportError(
58 f"AMD GPU {torch.cuda.get_device_name(idx)} does not support flash-attention"
59 )
60
61 logger.warning(f"Unable to use Flash Attention V2: {e}")
62 HAS_FLASH_ATTN = True
63
64
65 def attention(
66 q,
67 k,
68 v,
69 out,
70 cu_seqlens,
71 max_s,
72 softmax_scale,
73 window_size_left=-1,
74 ):
75 if window_size_left <= 0 and window_size_left != -1:
76 raise ValueError("`window_size_left` must be > 0 or -1")
77
78 if HAS_FLASH_ATTN_V2_CUDA:
79 return flash_attn_2_cuda.varlen_fwd(
80 q,
81 k,
82 v,
83 out,
84 cu_seqlens,
85 cu_seqlens,
86 max_s,
87 max_s,
88 0.0,
89 softmax_scale,
90 False,
91 True,
92 window_size_left,
93 0,
94 False,
95 None,
96 )
97 elif HAS_FLASH_ATTN_V2_ROCM:
98 if window_size_left != -1:
99 raise ValueError(
100 f"RoCm version of Flash Attention v2 does not support window attention (window_size_left != -1, got window_size_left={window_size_left})."
101 )
102
103 # RoCm flash API does not take the window_size_left and window_size_right arguments.
104 return flash_attn_2_cuda.varlen_fwd(
105 q,
106 k,
107 v,
108 out,
109 cu_seqlens,
110 cu_seqlens,
111 max_s,
112 max_s,
113 0.0,
114 softmax_scale,
115 False,
116 True,
117 False,
118 None,
119 )
120 elif HAS_FLASH_ATTN:
121 if window_size_left != -1:
122 raise NotImplementedError(
123 "window_size_left is only available with flash attn v2"
124 )
125
126 # Flash attention v1 requires q, k and v to have the same number of heads
127 if k.shape[1] != q.shape[1]:
128 # MQA expand
129 if k.shape[1] == 1:
130 k = k.expand(-1, q.shape[1], -1)
131 # Grouped attention reshape
132 else:
133 original_shape = k.shape
134 k = (
135 k.unsqueeze(2)
136 .expand(-1, -1, q.shape[1] // k.shape[1], -1)
137 .reshape(original_shape[0], -1, original_shape[2])
138 )
139 if v.shape[1] != q.shape[1]:
140 # MQA expand
141 if v.shape[1] == 1:
142 v = v.expand(-1, q.shape[1], -1)
143 # Grouped attention reshape
144 else:
145 original_shape = v.shape
146 v = (
147 v.unsqueeze(2)
148 .expand(-1, -1, q.shape[1] // v.shape[1], -1)
149 .reshape(original_shape[0], -1, original_shape[2])
150 )
151
152 return flash_attn_cuda.fwd(
153 q,
154 k,
155 v,
156 out,
157 cu_seqlens,
158 cu_seqlens,
159 max_s,
160 max_s,
161 0.0,
162 softmax_scale,
163 False,
164 True,
165 False,
166 0,
167 None,
168 )
169
170 raise NotImplementedError("flash attention is not installed")
171
[end of server/text_generation_server/utils/flash_attn.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/server/text_generation_server/utils/flash_attn.py b/server/text_generation_server/utils/flash_attn.py
--- a/server/text_generation_server/utils/flash_attn.py
+++ b/server/text_generation_server/utils/flash_attn.py
@@ -23,10 +23,15 @@
try:
import flash_attn_2_cuda
except ImportError:
+ architecture_suffix = ""
+ if IS_CUDA_SYSTEM:
+ architecture_suffix = "-cuda"
+ elif IS_ROCM_SYSTEM:
+ architecture_suffix = "-rocm"
raise ImportError(
"Flash Attention V2 is not installed.\n"
"Use the official Docker image (ghcr.io/huggingface/text-generation-inference:latest) "
- "or install flash attention v2 with `cd server && make install install-flash-attention-v2`"
+ f"or install flash attention v2 with `cd server && make install install-flash-attention-v2{architecture_suffix}`"
)
if not (is_sm8x or is_sm90):
raise ImportError(
| {"golden_diff": "diff --git a/server/text_generation_server/utils/flash_attn.py b/server/text_generation_server/utils/flash_attn.py\n--- a/server/text_generation_server/utils/flash_attn.py\n+++ b/server/text_generation_server/utils/flash_attn.py\n@@ -23,10 +23,15 @@\n try:\n import flash_attn_2_cuda\n except ImportError:\n+ architecture_suffix = \"\"\n+ if IS_CUDA_SYSTEM:\n+ architecture_suffix = \"-cuda\"\n+ elif IS_ROCM_SYSTEM:\n+ architecture_suffix = \"-rocm\"\n raise ImportError(\n \"Flash Attention V2 is not installed.\\n\"\n \"Use the official Docker image (ghcr.io/huggingface/text-generation-inference:latest) \"\n- \"or install flash attention v2 with `cd server && make install install-flash-attention-v2`\"\n+ f\"or install flash attention v2 with `cd server && make install install-flash-attention-v2{architecture_suffix}`\"\n )\n if not (is_sm8x or is_sm90):\n raise ImportError(\n", "issue": "Local Install: No rule to make target 'install-flash-attention-v2'\n### System Info\r\n\r\n**Release:** v1.3.4, main branch\r\n**Target**: x86_64-unknown-linux-gnu\r\n**Cargo version**: 1.70.0\r\n**Commit sha**: 630800eed37b15c4b0c9eb8e6ab47212026720f7\r\n**Docker label**: N/A\r\n**nvidia-smi**: Wed Jan 3 18:13:50 2024\r\n \r\n```\r\nMon Jan 8 15:13:03 2024\r\n+---------------------------------------------------------------------------------------+\r\n| NVIDIA-SMI 535.54.03 Driver Version: 535.54.03 CUDA Version: 12.2 |\r\n|-----------------------------------------+----------------------+----------------------+\r\n| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |\r\n| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |\r\n| | | MIG M. |\r\n|=========================================+======================+======================|\r\n| 0 NVIDIA GeForce RTX 4090 On | 00000000:01:00.0 Off | Off |\r\n| 0% 40C P8 26W / 450W | 3MiB / 24564MiB | 0% Default |\r\n| | | N/A |\r\n+-----------------------------------------+----------------------+----------------------+\r\n| 1 NVIDIA GeForce RTX 4090 On | 00000000:02:00.0 Off | Off |\r\n| 0% 46C P8 26W / 450W | 3MiB / 24564MiB | 0% Default |\r\n| | | N/A |\r\n+-----------------------------------------+----------------------+----------------------+\r\n\r\n+---------------------------------------------------------------------------------------+\r\n| Processes: |\r\n| GPU GI CI PID Type Process name GPU Memory |\r\n| ID ID Usage |\r\n|=======================================================================================|\r\n| No running processes found |\r\n+---------------------------------------------------------------------------------------+\r\n```\r\n\r\n### Information\r\n\r\n- [ ] Docker\r\n- [X] The CLI directly\r\n\r\n### Tasks\r\n\r\n- [X] An officially supported command\r\n- [ ] My own modifications\r\n\r\n### Reproduction\r\n\r\nFollowed steps found within the local install instructions (https://github.com/huggingface/text-generation-inference#local-install), in addition to a couple undocumented make steps \r\n\r\n1. Fork & pull from main branch, v1.3.4\r\n2. Install and test Rust: 1.70.0 (90c541806 2023-05-31)\r\n3. Install & test Conda: 23.11.0\r\n5. Install and test libprotoc: 3.21.12\r\n6. Execute `export BUILD_EXTENSIONS=True`\r\n7. Execute `make install`\r\n8. Execute `cd server && make install-vllm-cuda && cd ..`\r\n9. Update source code for paged attention [as described here](https://github.com/huggingface/text-generation-inference/pull/1386/commits/ad7f8396732c12368c7961e15fe2287f1aa42db3)\r\n10. Execute `cd server && make install-flash-attention && cd ..`\r\n11. Execute `make run-falcon-7b-instruct`, which yields the following message:\r\n\r\n_Flash Attention V2 is not installed.\r\nUse the official Docker image (ghcr.io/huggingface/text-generation-inference:latest) or install flash attention v2 with `cd server && make install install-flash-attention-v2`_\r\n\r\n12. Execute `cd server && make install install-flash-attention-v2`, which yields the following error message:\r\n\r\n_make: *** No rule to make target 'install-flash-attention-v2'. Stop._\r\n\r\n### Expected behavior\r\n\r\nMake target should be found and executed. Solution below.\r\n\r\n**SOLUTION**: \r\n\r\nThe file that issues the warning, `server/text_generation_server/utils/flash_attn.py`, is missing the proper make target suffix. It should be: \r\n\r\n`make install install-flash-attention-v2-cuda` or `install-flash-attention-v2-rocm`\n", "before_files": [{"content": "import os\nimport torch\n\nfrom loguru import logger\n\nfrom text_generation_server.utils.import_utils import IS_CUDA_SYSTEM, IS_ROCM_SYSTEM\n\nif os.getenv(\"USE_FLASH_ATTENTION\", \"\").lower() == \"false\":\n raise ImportError(\"`USE_FLASH_ATTENTION` is false.\")\n\nif not torch.cuda.is_available():\n raise ImportError(\"CUDA is not available\")\n\nmajor, minor = torch.cuda.get_device_capability()\nis_sm75 = major == 7 and minor == 5\nis_sm8x = major == 8 and minor >= 0\nis_sm90 = major == 9 and minor == 0\n\nHAS_FLASH_ATTN = False\nHAS_FLASH_ATTN_V2_CUDA = False\nHAS_FLASH_ATTN_V2_ROCM = False\ntry:\n try:\n import flash_attn_2_cuda\n except ImportError:\n raise ImportError(\n \"Flash Attention V2 is not installed.\\n\"\n \"Use the official Docker image (ghcr.io/huggingface/text-generation-inference:latest) \"\n \"or install flash attention v2 with `cd server && make install install-flash-attention-v2`\"\n )\n if not (is_sm8x or is_sm90):\n raise ImportError(\n f\"GPU with CUDA capability {major} {minor} is not supported for \"\n \"Flash Attention V2\"\n )\n HAS_FLASH_ATTN_V2_CUDA = IS_CUDA_SYSTEM\n HAS_FLASH_ATTN_V2_ROCM = IS_ROCM_SYSTEM\nexcept ImportError as e:\n try:\n import flash_attn_cuda\n except ImportError:\n raise ImportError(\n \"Flash Attention is not installed.\\n\"\n \"Use the official Docker image (ghcr.io/huggingface/text-generation-inference:latest) \"\n \"or install flash attention with `cd server && make install install-flash-attention`\"\n ) from e\n\n if IS_CUDA_SYSTEM and not (is_sm75 or is_sm8x or is_sm90):\n raise ImportError(\n f\"GPU with CUDA capability {major} {minor} is not supported\"\n ) from e\n elif IS_ROCM_SYSTEM:\n for idx in range(torch.cuda.device_count()):\n if \"MI210\" not in torch.cuda.get_device_name(\n idx\n ) and \"MI250\" not in torch.cuda.get_device_name(idx):\n raise ImportError(\n f\"AMD GPU {torch.cuda.get_device_name(idx)} does not support flash-attention\"\n )\n\n logger.warning(f\"Unable to use Flash Attention V2: {e}\")\n HAS_FLASH_ATTN = True\n\n\ndef attention(\n q,\n k,\n v,\n out,\n cu_seqlens,\n max_s,\n softmax_scale,\n window_size_left=-1,\n):\n if window_size_left <= 0 and window_size_left != -1:\n raise ValueError(\"`window_size_left` must be > 0 or -1\")\n\n if HAS_FLASH_ATTN_V2_CUDA:\n return flash_attn_2_cuda.varlen_fwd(\n q,\n k,\n v,\n out,\n cu_seqlens,\n cu_seqlens,\n max_s,\n max_s,\n 0.0,\n softmax_scale,\n False,\n True,\n window_size_left,\n 0,\n False,\n None,\n )\n elif HAS_FLASH_ATTN_V2_ROCM:\n if window_size_left != -1:\n raise ValueError(\n f\"RoCm version of Flash Attention v2 does not support window attention (window_size_left != -1, got window_size_left={window_size_left}).\"\n )\n\n # RoCm flash API does not take the window_size_left and window_size_right arguments.\n return flash_attn_2_cuda.varlen_fwd(\n q,\n k,\n v,\n out,\n cu_seqlens,\n cu_seqlens,\n max_s,\n max_s,\n 0.0,\n softmax_scale,\n False,\n True,\n False,\n None,\n )\n elif HAS_FLASH_ATTN:\n if window_size_left != -1:\n raise NotImplementedError(\n \"window_size_left is only available with flash attn v2\"\n )\n\n # Flash attention v1 requires q, k and v to have the same number of heads\n if k.shape[1] != q.shape[1]:\n # MQA expand\n if k.shape[1] == 1:\n k = k.expand(-1, q.shape[1], -1)\n # Grouped attention reshape\n else:\n original_shape = k.shape\n k = (\n k.unsqueeze(2)\n .expand(-1, -1, q.shape[1] // k.shape[1], -1)\n .reshape(original_shape[0], -1, original_shape[2])\n )\n if v.shape[1] != q.shape[1]:\n # MQA expand\n if v.shape[1] == 1:\n v = v.expand(-1, q.shape[1], -1)\n # Grouped attention reshape\n else:\n original_shape = v.shape\n v = (\n v.unsqueeze(2)\n .expand(-1, -1, q.shape[1] // v.shape[1], -1)\n .reshape(original_shape[0], -1, original_shape[2])\n )\n\n return flash_attn_cuda.fwd(\n q,\n k,\n v,\n out,\n cu_seqlens,\n cu_seqlens,\n max_s,\n max_s,\n 0.0,\n softmax_scale,\n False,\n True,\n False,\n 0,\n None,\n )\n\n raise NotImplementedError(\"flash attention is not installed\")\n", "path": "server/text_generation_server/utils/flash_attn.py"}]} | 3,195 | 230 |
gh_patches_debug_22811 | rasdani/github-patches | git_diff | goauthentik__authentik-9441 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Redis (Unix) Socket?
**Describe your question/**
Is it possible to run an authentik docker container (server and worker) with a redis unix socket rather than tcp?
Directly overwriting redis variables via user_settings.py doesn't appear to modify anything despite @BeryJu suggesting in [pull request #4036](https://github.com/goauthentik/authentik/pull/4036) it should be viable.
**Relevant infos**
Docker compose:
version: '3.4'
services:
server:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:latest
restart: unless-stopped
command: server
environment:
AUTHENTIK_POSTGRESQL__HOST: ${PG_HOST}
AUTHENTIK_POSTGRESQL__USER: ${PG_USER}
AUTHENTIK_POSTGRESQL__NAME: ${PG_NAME}
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}
AUTHENTIK_REDIS__HOST: ${REDIS_HOST}
AUTHENTIK_SECRET_KEY: ${AUTHENTIK_SECRET_KEY}
AUTHENTIK_LISTEN__HTTP: ${AUTHENTIK_LISTEN__HTTP}
AUTHENTIK_LISTEN__HTTPS: ${AUTHENTIK_LISTEN__HTTPS}
volumes:
- ./media:/media
- ./custom-templates:/templates
- /run/redis/redis-server.sock:/run/redis/redis-server.sock
- /var/run/postgresql/.s.PGSQL.5432:/var/run/postgresql/.s.PGSQL.5432
- ./user_settings.py:/data/user_settings.py
ports:
- "${AUTHENTIK_PORT_HTTP}:${AUTHENTIK_PORT_HTTP}"
- "${AUTHENTIK_PORT_HTTPS}:${AUTHENTIK_PORT_HTTPS}"
worker:
image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:latest
restart: unless-stopped
command: worker
environment:
AUTHENTIK_POSTGRESQL__HOST: ${PG_HOST}
AUTHENTIK_POSTGRESQL__USER: ${PG_USER}
AUTHENTIK_POSTGRESQL__NAME: ${PG_NAME}
AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}
AUTHENTIK_REDIS__HOST: ${REDIS_HOST}
AUTHENTIK_SECRET_KEY: ${AUTHENTIK_SECRET_KEY}
user: authentik
volumes:
- ./media:/media
- ./certs:/certs
- ./custom-templates:/templates
- /run/docker.sock:/var/run/docker.sock
- /run/redis/redis-server.sock:/run/redis/redis-server.sock
- /var/run/postgresql/.s.PGSQL.5432:/var/run/postgresql/.s.PGSQL.5432
- ./user_settings.py:/data/user_settings.py
I didn't include the definition for ${REDIS_HOST} as sockets cannot be passed via environmental variables per [pull 4036](https://github.com/goauthentik/authentik/pull/4036).
**Logs**
DBG undefined | event=Loaded config logger=authentik.lib.config timestamp=1679734632.5913134 file=/authentik/lib/default.yml
DBG undefined | event=Loaded environment variables logger=authentik.lib.config timestamp=1679734632.5915806 count=7
INF undefined | event=Starting authentik bootstrap logger=authentik.lib.config timestamp=1679734632.59165
INF undefined | event=PostgreSQL connection successful logger=authentik.lib.config timestamp=1679734632.5988
INF undefined | event=Redis Connection failed, retrying... (Error 99 connecting to localhost:6379. Cannot assign requested address.) logger=authentik.lib.config timestamp=1679734633.600902 redis_url=redis://:@localhost:6379/0
**Version and Deployment (please complete the following information):**
- authentik version: 2023.3
- Deployment: docker-compose
**Additional context**
docker exec -it authentik-stack-server-1 bash
authentik@283a42f97c09:/$ cd /data
authentik@283a42f97c09:/data$ ls -la
total 27
drwxr-xr-x 2 root root 3 Mar 25 08:24 .
drwxr-xr-x 33 root root 39 Mar 25 08:24 ..
-rw-rw-r-- 1 1022 _runit-log 16463 Mar 25 08:45 user_settings.py
authentik@283a42f97c09:/data$ cat user_settings.py | grep redis
REDIS_PROTOCOL_PREFIX = "redis://"
if CONFIG.y_bool("redis.tls", False):
REDIS_PROTOCOL_PREFIX = "rediss://"
REDIS_CELERY_TLS_REQUIREMENTS = f"?ssl_cert_reqs={CONFIG.y('redis.tls_reqs')}"
_redis_url = (
f"{quote_plus(CONFIG.y('redis.password'))}@{quote_plus(CONFIG.y('redis.host'))}:"
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": 'unix://run/redis/redis-server.sock',
"TIMEOUT": int(CONFIG.y("redis.cache_timeout", 300)),
"OPTIONS": {"CLIENT_CLASS": "django_redis.client.DefaultClient"},
"BACKEND": "channels_redis.core.RedisChannelLayer",
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": 'unix://run/redis/redis-server.sock',
CELERY_BROKER_URL = 'redis+socket:///run/redis/redis-server.sock'
CELERY_RESULT_BACKEND = 'redis+socket:///run/redis/redis-server.sock'
"redis": "WARNING",
authentik@283a42f97c09:/data$
</issue>
<code>
[start of lifecycle/wait_for_db.py]
1 #!/usr/bin/env python
2 """This file needs to be run from the root of the project to correctly
3 import authentik. This is done by the dockerfile."""
4 from sys import exit as sysexit
5 from time import sleep
6 from urllib.parse import quote_plus
7
8 from psycopg import OperationalError, connect
9 from redis import Redis
10 from redis.exceptions import RedisError
11
12 from authentik.lib.config import CONFIG, redis_url
13
14
15 def check_postgres():
16 while True:
17 try:
18 conn = connect(
19 dbname=CONFIG.get("postgresql.name"),
20 user=CONFIG.get("postgresql.user"),
21 password=CONFIG.get("postgresql.password"),
22 host=CONFIG.get("postgresql.host"),
23 port=CONFIG.get_int("postgresql.port"),
24 sslmode=CONFIG.get("postgresql.sslmode"),
25 sslrootcert=CONFIG.get("postgresql.sslrootcert"),
26 sslcert=CONFIG.get("postgresql.sslcert"),
27 sslkey=CONFIG.get("postgresql.sslkey"),
28 )
29 conn.cursor()
30 break
31 except OperationalError as exc:
32 sleep(1)
33 CONFIG.log("info", f"PostgreSQL connection failed, retrying... ({exc})")
34 CONFIG.log("info", "PostgreSQL connection successful")
35
36
37 def check_redis():
38 url = redis_url(CONFIG.get("redis.db"))
39 while True:
40 try:
41 redis = Redis.from_url(url)
42 redis.ping()
43 break
44 except RedisError as exc:
45 sleep(1)
46 sanitized_url = url.replace(quote_plus(CONFIG.get("redis.password")), "******")
47 CONFIG.log(
48 "info", f"Redis Connection failed, retrying... ({exc})", redis_url=sanitized_url
49 )
50 CONFIG.log("info", "Redis Connection successful")
51
52
53 def wait_for_db():
54 CONFIG.log("info", "Starting authentik bootstrap")
55 # Sanity check, ensure SECRET_KEY is set before we even check for database connectivity
56 if CONFIG.get("secret_key") is None or len(CONFIG.get("secret_key")) == 0:
57 CONFIG.log("info", "----------------------------------------------------------------------")
58 CONFIG.log("info", "Secret key missing, check https://goauthentik.io/docs/installation/.")
59 CONFIG.log("info", "----------------------------------------------------------------------")
60 sysexit(1)
61 check_postgres()
62 check_redis()
63 CONFIG.log("info", "Finished authentik bootstrap")
64
65
66 if __name__ == "__main__":
67 wait_for_db()
68
[end of lifecycle/wait_for_db.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/lifecycle/wait_for_db.py b/lifecycle/wait_for_db.py
--- a/lifecycle/wait_for_db.py
+++ b/lifecycle/wait_for_db.py
@@ -3,7 +3,6 @@
import authentik. This is done by the dockerfile."""
from sys import exit as sysexit
from time import sleep
-from urllib.parse import quote_plus
from psycopg import OperationalError, connect
from redis import Redis
@@ -35,7 +34,7 @@
def check_redis():
- url = redis_url(CONFIG.get("redis.db"))
+ url = CONFIG.get("cache.url") or redis_url(CONFIG.get("redis.db"))
while True:
try:
redis = Redis.from_url(url)
@@ -43,10 +42,7 @@
break
except RedisError as exc:
sleep(1)
- sanitized_url = url.replace(quote_plus(CONFIG.get("redis.password")), "******")
- CONFIG.log(
- "info", f"Redis Connection failed, retrying... ({exc})", redis_url=sanitized_url
- )
+ CONFIG.log("info", f"Redis Connection failed, retrying... ({exc})")
CONFIG.log("info", "Redis Connection successful")
| {"golden_diff": "diff --git a/lifecycle/wait_for_db.py b/lifecycle/wait_for_db.py\n--- a/lifecycle/wait_for_db.py\n+++ b/lifecycle/wait_for_db.py\n@@ -3,7 +3,6 @@\n import authentik. This is done by the dockerfile.\"\"\"\n from sys import exit as sysexit\n from time import sleep\n-from urllib.parse import quote_plus\n \n from psycopg import OperationalError, connect\n from redis import Redis\n@@ -35,7 +34,7 @@\n \n \n def check_redis():\n- url = redis_url(CONFIG.get(\"redis.db\"))\n+ url = CONFIG.get(\"cache.url\") or redis_url(CONFIG.get(\"redis.db\"))\n while True:\n try:\n redis = Redis.from_url(url)\n@@ -43,10 +42,7 @@\n break\n except RedisError as exc:\n sleep(1)\n- sanitized_url = url.replace(quote_plus(CONFIG.get(\"redis.password\")), \"******\")\n- CONFIG.log(\n- \"info\", f\"Redis Connection failed, retrying... ({exc})\", redis_url=sanitized_url\n- )\n+ CONFIG.log(\"info\", f\"Redis Connection failed, retrying... ({exc})\")\n CONFIG.log(\"info\", \"Redis Connection successful\")\n", "issue": "Redis (Unix) Socket?\n**Describe your question/**\r\nIs it possible to run an authentik docker container (server and worker) with a redis unix socket rather than tcp?\r\nDirectly overwriting redis variables via user_settings.py doesn't appear to modify anything despite @BeryJu suggesting in [pull request #4036](https://github.com/goauthentik/authentik/pull/4036) it should be viable.\r\n\r\n**Relevant infos**\r\nDocker compose:\r\nversion: '3.4'\r\n\r\nservices:\r\n server:\r\n image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:latest\r\n restart: unless-stopped\r\n command: server\r\n environment:\r\n AUTHENTIK_POSTGRESQL__HOST: ${PG_HOST}\r\n AUTHENTIK_POSTGRESQL__USER: ${PG_USER}\r\n AUTHENTIK_POSTGRESQL__NAME: ${PG_NAME}\r\n AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}\r\n AUTHENTIK_REDIS__HOST: ${REDIS_HOST}\r\n AUTHENTIK_SECRET_KEY: ${AUTHENTIK_SECRET_KEY}\r\n AUTHENTIK_LISTEN__HTTP: ${AUTHENTIK_LISTEN__HTTP}\r\n AUTHENTIK_LISTEN__HTTPS: ${AUTHENTIK_LISTEN__HTTPS}\r\n volumes:\r\n - ./media:/media\r\n - ./custom-templates:/templates\r\n - /run/redis/redis-server.sock:/run/redis/redis-server.sock\r\n - /var/run/postgresql/.s.PGSQL.5432:/var/run/postgresql/.s.PGSQL.5432\r\n - ./user_settings.py:/data/user_settings.py\r\n ports:\r\n - \"${AUTHENTIK_PORT_HTTP}:${AUTHENTIK_PORT_HTTP}\"\r\n - \"${AUTHENTIK_PORT_HTTPS}:${AUTHENTIK_PORT_HTTPS}\"\r\n worker:\r\n image: ${AUTHENTIK_IMAGE:-ghcr.io/goauthentik/server}:latest\r\n restart: unless-stopped\r\n command: worker\r\n environment:\r\n AUTHENTIK_POSTGRESQL__HOST: ${PG_HOST}\r\n AUTHENTIK_POSTGRESQL__USER: ${PG_USER}\r\n AUTHENTIK_POSTGRESQL__NAME: ${PG_NAME}\r\n AUTHENTIK_POSTGRESQL__PASSWORD: ${PG_PASS}\r\n AUTHENTIK_REDIS__HOST: ${REDIS_HOST}\r\n AUTHENTIK_SECRET_KEY: ${AUTHENTIK_SECRET_KEY}\r\n user: authentik\r\n volumes:\r\n - ./media:/media\r\n - ./certs:/certs\r\n - ./custom-templates:/templates\r\n - /run/docker.sock:/var/run/docker.sock\r\n - /run/redis/redis-server.sock:/run/redis/redis-server.sock\r\n - /var/run/postgresql/.s.PGSQL.5432:/var/run/postgresql/.s.PGSQL.5432\r\n - ./user_settings.py:/data/user_settings.py\r\n\r\nI didn't include the definition for ${REDIS_HOST} as sockets cannot be passed via environmental variables per [pull 4036](https://github.com/goauthentik/authentik/pull/4036).\r\n\r\n**Logs**\r\nDBG undefined | event=Loaded config logger=authentik.lib.config timestamp=1679734632.5913134 file=/authentik/lib/default.yml \r\nDBG undefined | event=Loaded environment variables logger=authentik.lib.config timestamp=1679734632.5915806 count=7 \r\nINF undefined | event=Starting authentik bootstrap logger=authentik.lib.config timestamp=1679734632.59165 \r\nINF undefined | event=PostgreSQL connection successful logger=authentik.lib.config timestamp=1679734632.5988 \r\nINF undefined | event=Redis Connection failed, retrying... (Error 99 connecting to localhost:6379. Cannot assign requested address.) logger=authentik.lib.config timestamp=1679734633.600902 redis_url=redis://:@localhost:6379/0 \r\n\r\n**Version and Deployment (please complete the following information):**\r\n - authentik version: 2023.3\r\n - Deployment: docker-compose\r\n\r\n**Additional context**\r\ndocker exec -it authentik-stack-server-1 bash\r\nauthentik@283a42f97c09:/$ cd /data\r\nauthentik@283a42f97c09:/data$ ls -la\r\ntotal 27\r\ndrwxr-xr-x 2 root root 3 Mar 25 08:24 .\r\ndrwxr-xr-x 33 root root 39 Mar 25 08:24 ..\r\n-rw-rw-r-- 1 1022 _runit-log 16463 Mar 25 08:45 user_settings.py\r\nauthentik@283a42f97c09:/data$ cat user_settings.py | grep redis\r\nREDIS_PROTOCOL_PREFIX = \"redis://\"\r\nif CONFIG.y_bool(\"redis.tls\", False):\r\n REDIS_PROTOCOL_PREFIX = \"rediss://\"\r\n REDIS_CELERY_TLS_REQUIREMENTS = f\"?ssl_cert_reqs={CONFIG.y('redis.tls_reqs')}\"\r\n_redis_url = (\r\n f\"{quote_plus(CONFIG.y('redis.password'))}@{quote_plus(CONFIG.y('redis.host'))}:\"\r\n \"BACKEND\": \"django_redis.cache.RedisCache\",\r\n \"LOCATION\": 'unix://run/redis/redis-server.sock',\r\n \"TIMEOUT\": int(CONFIG.y(\"redis.cache_timeout\", 300)),\r\n \"OPTIONS\": {\"CLIENT_CLASS\": \"django_redis.client.DefaultClient\"},\r\n \"BACKEND\": \"channels_redis.core.RedisChannelLayer\",\r\n \"BACKEND\": \"django_redis.cache.RedisCache\",\r\n \"LOCATION\": 'unix://run/redis/redis-server.sock',\r\nCELERY_BROKER_URL = 'redis+socket:///run/redis/redis-server.sock'\r\nCELERY_RESULT_BACKEND = 'redis+socket:///run/redis/redis-server.sock'\r\n \"redis\": \"WARNING\",\r\nauthentik@283a42f97c09:/data$ \n", "before_files": [{"content": "#!/usr/bin/env python\n\"\"\"This file needs to be run from the root of the project to correctly\nimport authentik. This is done by the dockerfile.\"\"\"\nfrom sys import exit as sysexit\nfrom time import sleep\nfrom urllib.parse import quote_plus\n\nfrom psycopg import OperationalError, connect\nfrom redis import Redis\nfrom redis.exceptions import RedisError\n\nfrom authentik.lib.config import CONFIG, redis_url\n\n\ndef check_postgres():\n while True:\n try:\n conn = connect(\n dbname=CONFIG.get(\"postgresql.name\"),\n user=CONFIG.get(\"postgresql.user\"),\n password=CONFIG.get(\"postgresql.password\"),\n host=CONFIG.get(\"postgresql.host\"),\n port=CONFIG.get_int(\"postgresql.port\"),\n sslmode=CONFIG.get(\"postgresql.sslmode\"),\n sslrootcert=CONFIG.get(\"postgresql.sslrootcert\"),\n sslcert=CONFIG.get(\"postgresql.sslcert\"),\n sslkey=CONFIG.get(\"postgresql.sslkey\"),\n )\n conn.cursor()\n break\n except OperationalError as exc:\n sleep(1)\n CONFIG.log(\"info\", f\"PostgreSQL connection failed, retrying... ({exc})\")\n CONFIG.log(\"info\", \"PostgreSQL connection successful\")\n\n\ndef check_redis():\n url = redis_url(CONFIG.get(\"redis.db\"))\n while True:\n try:\n redis = Redis.from_url(url)\n redis.ping()\n break\n except RedisError as exc:\n sleep(1)\n sanitized_url = url.replace(quote_plus(CONFIG.get(\"redis.password\")), \"******\")\n CONFIG.log(\n \"info\", f\"Redis Connection failed, retrying... ({exc})\", redis_url=sanitized_url\n )\n CONFIG.log(\"info\", \"Redis Connection successful\")\n\n\ndef wait_for_db():\n CONFIG.log(\"info\", \"Starting authentik bootstrap\")\n # Sanity check, ensure SECRET_KEY is set before we even check for database connectivity\n if CONFIG.get(\"secret_key\") is None or len(CONFIG.get(\"secret_key\")) == 0:\n CONFIG.log(\"info\", \"----------------------------------------------------------------------\")\n CONFIG.log(\"info\", \"Secret key missing, check https://goauthentik.io/docs/installation/.\")\n CONFIG.log(\"info\", \"----------------------------------------------------------------------\")\n sysexit(1)\n check_postgres()\n check_redis()\n CONFIG.log(\"info\", \"Finished authentik bootstrap\")\n\n\nif __name__ == \"__main__\":\n wait_for_db()\n", "path": "lifecycle/wait_for_db.py"}]} | 2,532 | 266 |
gh_patches_debug_6997 | rasdani/github-patches | git_diff | mkdocs__mkdocs-1878 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Theme files override docs_dir files on Windows
As investigated and discussed in https://github.com/squidfunk/mkdocs-material/issues/1282, custom files in docs_dir are overriden by theme files.
For example, if you use the default MkDocs theme and create a custom `favicon.ico` in `docs/img`, the custom icon does not appear in the output. It is overwritten by the theme's own favicon.
The issue was already brought up in https://github.com/mkdocs/mkdocs/issues/1671 and fixed by https://github.com/mkdocs/mkdocs/pull/1672.
However, the fix (https://github.com/mkdocs/mkdocs/pull/1672) **only works on Unix systems (including Mac). It does not work on Windows.**
Windows handles path names differently, so I suppose the issue must be caused by that. See especially the use of `os.path` in `files.py` (https://github.com/mkdocs/mkdocs/commit/f8ac3c7ee).
I had a similar issue in mkdocs-exclude, see here: https://github.com/apenwarr/mkdocs-exclude/issues/2
</issue>
<code>
[start of mkdocs/structure/files.py]
1 # coding: utf-8
2
3 from __future__ import unicode_literals
4 import fnmatch
5 import os
6 import logging
7 from functools import cmp_to_key
8
9 from mkdocs import utils
10
11
12 log = logging.getLogger(__name__)
13 log.addFilter(utils.warning_filter)
14
15
16 class Files(object):
17 """ A collection of File objects. """
18 def __init__(self, files):
19 self._files = files
20 self.src_paths = {file.src_path: file for file in files}
21
22 def __iter__(self):
23 return iter(self._files)
24
25 def __len__(self):
26 return len(self._files)
27
28 def __contains__(self, path):
29 return path in self.src_paths
30
31 def get_file_from_path(self, path):
32 """ Return a File instance with File.src_path equal to path. """
33 return self.src_paths.get(os.path.normpath(path))
34
35 def append(self, file):
36 """ Append file to Files collection. """
37 self._files.append(file)
38 self.src_paths[file.src_path] = file
39
40 def copy_static_files(self, dirty=False):
41 """ Copy static files from source to destination. """
42 for file in self:
43 if not file.is_documentation_page():
44 file.copy_file(dirty)
45
46 def documentation_pages(self):
47 """ Return iterable of all Markdown page file objects. """
48 return [file for file in self if file.is_documentation_page()]
49
50 def static_pages(self):
51 """ Return iterable of all static page file objects. """
52 return [file for file in self if file.is_static_page()]
53
54 def media_files(self):
55 """ Return iterable of all file objects which are not documentation or static pages. """
56 return [file for file in self if file.is_media_file()]
57
58 def javascript_files(self):
59 """ Return iterable of all javascript file objects. """
60 return [file for file in self if file.is_javascript()]
61
62 def css_files(self):
63 """ Return iterable of all CSS file objects. """
64 return [file for file in self if file.is_css()]
65
66 def add_files_from_theme(self, env, config):
67 """ Retrieve static files from Jinja environment and add to collection. """
68 def filter(name):
69 patterns = ['.*', '*.py', '*.pyc', '*.html', '*readme*', 'mkdocs_theme.yml']
70 patterns.extend('*{0}'.format(x) for x in utils.markdown_extensions)
71 patterns.extend(config['theme'].static_templates)
72 for pattern in patterns:
73 if fnmatch.fnmatch(name.lower(), pattern):
74 return False
75 return True
76 for path in env.list_templates(filter_func=filter):
77 # Theme files do not override docs_dir files
78 if path not in self:
79 for dir in config['theme'].dirs:
80 # Find the first theme dir which contains path
81 if os.path.isfile(os.path.join(dir, path)):
82 self.append(File(path, dir, config['site_dir'], config['use_directory_urls']))
83 break
84
85
86 class File(object):
87 """
88 A MkDocs File object.
89
90 Points to the source and destination locations of a file.
91
92 The `path` argument must be a path that exists relative to `src_dir`.
93
94 The `src_dir` and `dest_dir` must be absolute paths on the local file system.
95
96 The `use_directory_urls` argument controls how destination paths are generated. If `False`, a Markdown file is
97 mapped to an HTML file of the same name (the file extension is changed to `.html`). If True, a Markdown file is
98 mapped to an HTML index file (`index.html`) nested in a directory using the "name" of the file in `path`. The
99 `use_directory_urls` argument has no effect on non-Markdown files.
100
101 File objects have the following properties, which are Unicode strings:
102
103 File.src_path
104 The pure path of the source file relative to the source directory.
105
106 File.abs_src_path
107 The absolute concrete path of the source file.
108
109 File.dest_path
110 The pure path of the destination file relative to the destination directory.
111
112 File.abs_dest_path
113 The absolute concrete path of the destination file.
114
115 File.url
116 The url of the destination file relative to the destination directory as a string.
117 """
118 def __init__(self, path, src_dir, dest_dir, use_directory_urls):
119 self.page = None
120 self.src_path = os.path.normpath(path)
121 self.abs_src_path = os.path.normpath(os.path.join(src_dir, self.src_path))
122 self.name = self._get_stem()
123 self.dest_path = self._get_dest_path(use_directory_urls)
124 self.abs_dest_path = os.path.normpath(os.path.join(dest_dir, self.dest_path))
125 self.url = self._get_url(use_directory_urls)
126
127 def __eq__(self, other):
128
129 def sub_dict(d):
130 return dict((key, value) for key, value in d.items() if key in ['src_path', 'abs_src_path', 'url'])
131
132 return (isinstance(other, self.__class__) and sub_dict(self.__dict__) == sub_dict(other.__dict__))
133
134 def __ne__(self, other):
135 return not self.__eq__(other)
136
137 def _get_stem(self):
138 """ Return the name of the file without it's extension. """
139 filename = os.path.basename(self.src_path)
140 stem, ext = os.path.splitext(filename)
141 return 'index' if stem in ('index', 'README') else stem
142
143 def _get_dest_path(self, use_directory_urls):
144 """ Return destination path based on source path. """
145 if self.is_documentation_page():
146 if use_directory_urls:
147 parent, filename = os.path.split(self.src_path)
148 if self.name == 'index':
149 # index.md or README.md => index.html
150 return os.path.join(parent, 'index.html')
151 else:
152 # foo.md => foo/index.html
153 return os.path.join(parent, self.name, 'index.html')
154 else:
155 # foo.md => foo.html
156 root, ext = os.path.splitext(self.src_path)
157 return root + '.html'
158 return self.src_path
159
160 def _get_url(self, use_directory_urls):
161 """ Return url based in destination path. """
162 url = self.dest_path.replace(os.path.sep, '/')
163 dirname, filename = os.path.split(url)
164 if use_directory_urls and filename == 'index.html':
165 if dirname == '':
166 url = '.'
167 else:
168 url = dirname + '/'
169 return utils.urlquote(url)
170
171 def url_relative_to(self, other):
172 """ Return url for file relative to other file. """
173 return utils.get_relative_url(self.url, other.url if isinstance(other, File) else other)
174
175 def copy_file(self, dirty=False):
176 """ Copy source file to destination, ensuring parent directories exist. """
177 if dirty and not self.is_modified():
178 log.debug("Skip copying unmodified file: '{}'".format(self.src_path))
179 else:
180 log.debug("Copying media file: '{}'".format(self.src_path))
181 utils.copy_file(self.abs_src_path, self.abs_dest_path)
182
183 def is_modified(self):
184 if os.path.isfile(self.abs_dest_path):
185 return os.path.getmtime(self.abs_dest_path) < os.path.getmtime(self.abs_src_path)
186 return True
187
188 def is_documentation_page(self):
189 """ Return True if file is a Markdown page. """
190 return os.path.splitext(self.src_path)[1] in utils.markdown_extensions
191
192 def is_static_page(self):
193 """ Return True if file is a static page (html, xml, json). """
194 return os.path.splitext(self.src_path)[1] in (
195 '.html',
196 '.htm',
197 '.xml',
198 '.json',
199 )
200
201 def is_media_file(self):
202 """ Return True if file is not a documentation or static page. """
203 return not (self.is_documentation_page() or self.is_static_page())
204
205 def is_javascript(self):
206 """ Return True if file is a JavaScript file. """
207 return os.path.splitext(self.src_path)[1] in (
208 '.js',
209 '.javascript',
210 )
211
212 def is_css(self):
213 """ Return True if file is a CSS file. """
214 return os.path.splitext(self.src_path)[1] in (
215 '.css',
216 )
217
218
219 def get_files(config):
220 """ Walk the `docs_dir` and return a Files collection. """
221 files = []
222 exclude = ['.*', '/templates']
223
224 for source_dir, dirnames, filenames in os.walk(config['docs_dir'], followlinks=True):
225 relative_dir = os.path.relpath(source_dir, config['docs_dir'])
226
227 for dirname in list(dirnames):
228 path = os.path.normpath(os.path.join(relative_dir, dirname))
229 # Skip any excluded directories
230 if _filter_paths(basename=dirname, path=path, is_dir=True, exclude=exclude):
231 dirnames.remove(dirname)
232 dirnames.sort()
233
234 for filename in _sort_files(filenames):
235 path = os.path.normpath(os.path.join(relative_dir, filename))
236 # Skip any excluded files
237 if _filter_paths(basename=filename, path=path, is_dir=False, exclude=exclude):
238 continue
239 # Skip README.md if an index file also exists in dir
240 if filename.lower() == 'readme.md' and 'index.md' in filenames:
241 log.warning("Both index.md and readme.md found. Skipping readme.md from {}".format(source_dir))
242 continue
243 files.append(File(path, config['docs_dir'], config['site_dir'], config['use_directory_urls']))
244
245 return Files(files)
246
247
248 def _sort_files(filenames):
249 """ Always sort `index` or `README` as first filename in list. """
250
251 def compare(x, y):
252 if x == y:
253 return 0
254 if os.path.splitext(y)[0] in ['index', 'README']:
255 return 1
256 if os.path.splitext(x)[0] in ['index', 'README'] or x < y:
257 return -1
258 return 1
259
260 return sorted(filenames, key=cmp_to_key(compare))
261
262
263 def _filter_paths(basename, path, is_dir, exclude):
264 """ .gitignore style file filtering. """
265 for item in exclude:
266 # Items ending in '/' apply only to directories.
267 if item.endswith('/') and not is_dir:
268 continue
269 # Items starting with '/' apply to the whole path.
270 # In any other cases just the basename is used.
271 match = path if item.startswith('/') else basename
272 if fnmatch.fnmatch(match, item.strip('/')):
273 return True
274 return False
275
[end of mkdocs/structure/files.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/mkdocs/structure/files.py b/mkdocs/structure/files.py
--- a/mkdocs/structure/files.py
+++ b/mkdocs/structure/files.py
@@ -75,6 +75,7 @@
return True
for path in env.list_templates(filter_func=filter):
# Theme files do not override docs_dir files
+ path = os.path.normpath(path)
if path not in self:
for dir in config['theme'].dirs:
# Find the first theme dir which contains path
| {"golden_diff": "diff --git a/mkdocs/structure/files.py b/mkdocs/structure/files.py\n--- a/mkdocs/structure/files.py\n+++ b/mkdocs/structure/files.py\n@@ -75,6 +75,7 @@\n return True\n for path in env.list_templates(filter_func=filter):\n # Theme files do not override docs_dir files\n+ path = os.path.normpath(path)\n if path not in self:\n for dir in config['theme'].dirs:\n # Find the first theme dir which contains path\n", "issue": "Theme files override docs_dir files on Windows\nAs investigated and discussed in https://github.com/squidfunk/mkdocs-material/issues/1282, custom files in docs_dir are overriden by theme files. \r\n\r\nFor example, if you use the default MkDocs theme and create a custom `favicon.ico` in `docs/img`, the custom icon does not appear in the output. It is overwritten by the theme's own favicon.\r\n\r\nThe issue was already brought up in https://github.com/mkdocs/mkdocs/issues/1671 and fixed by https://github.com/mkdocs/mkdocs/pull/1672.\r\n\r\nHowever, the fix (https://github.com/mkdocs/mkdocs/pull/1672) **only works on Unix systems (including Mac). It does not work on Windows.**\r\n\r\nWindows handles path names differently, so I suppose the issue must be caused by that. See especially the use of `os.path` in `files.py` (https://github.com/mkdocs/mkdocs/commit/f8ac3c7ee).\r\n\r\nI had a similar issue in mkdocs-exclude, see here: https://github.com/apenwarr/mkdocs-exclude/issues/2\r\n\n", "before_files": [{"content": "# coding: utf-8\n\nfrom __future__ import unicode_literals\nimport fnmatch\nimport os\nimport logging\nfrom functools import cmp_to_key\n\nfrom mkdocs import utils\n\n\nlog = logging.getLogger(__name__)\nlog.addFilter(utils.warning_filter)\n\n\nclass Files(object):\n \"\"\" A collection of File objects. \"\"\"\n def __init__(self, files):\n self._files = files\n self.src_paths = {file.src_path: file for file in files}\n\n def __iter__(self):\n return iter(self._files)\n\n def __len__(self):\n return len(self._files)\n\n def __contains__(self, path):\n return path in self.src_paths\n\n def get_file_from_path(self, path):\n \"\"\" Return a File instance with File.src_path equal to path. \"\"\"\n return self.src_paths.get(os.path.normpath(path))\n\n def append(self, file):\n \"\"\" Append file to Files collection. \"\"\"\n self._files.append(file)\n self.src_paths[file.src_path] = file\n\n def copy_static_files(self, dirty=False):\n \"\"\" Copy static files from source to destination. \"\"\"\n for file in self:\n if not file.is_documentation_page():\n file.copy_file(dirty)\n\n def documentation_pages(self):\n \"\"\" Return iterable of all Markdown page file objects. \"\"\"\n return [file for file in self if file.is_documentation_page()]\n\n def static_pages(self):\n \"\"\" Return iterable of all static page file objects. \"\"\"\n return [file for file in self if file.is_static_page()]\n\n def media_files(self):\n \"\"\" Return iterable of all file objects which are not documentation or static pages. \"\"\"\n return [file for file in self if file.is_media_file()]\n\n def javascript_files(self):\n \"\"\" Return iterable of all javascript file objects. \"\"\"\n return [file for file in self if file.is_javascript()]\n\n def css_files(self):\n \"\"\" Return iterable of all CSS file objects. \"\"\"\n return [file for file in self if file.is_css()]\n\n def add_files_from_theme(self, env, config):\n \"\"\" Retrieve static files from Jinja environment and add to collection. \"\"\"\n def filter(name):\n patterns = ['.*', '*.py', '*.pyc', '*.html', '*readme*', 'mkdocs_theme.yml']\n patterns.extend('*{0}'.format(x) for x in utils.markdown_extensions)\n patterns.extend(config['theme'].static_templates)\n for pattern in patterns:\n if fnmatch.fnmatch(name.lower(), pattern):\n return False\n return True\n for path in env.list_templates(filter_func=filter):\n # Theme files do not override docs_dir files\n if path not in self:\n for dir in config['theme'].dirs:\n # Find the first theme dir which contains path\n if os.path.isfile(os.path.join(dir, path)):\n self.append(File(path, dir, config['site_dir'], config['use_directory_urls']))\n break\n\n\nclass File(object):\n \"\"\"\n A MkDocs File object.\n\n Points to the source and destination locations of a file.\n\n The `path` argument must be a path that exists relative to `src_dir`.\n\n The `src_dir` and `dest_dir` must be absolute paths on the local file system.\n\n The `use_directory_urls` argument controls how destination paths are generated. If `False`, a Markdown file is\n mapped to an HTML file of the same name (the file extension is changed to `.html`). If True, a Markdown file is\n mapped to an HTML index file (`index.html`) nested in a directory using the \"name\" of the file in `path`. The\n `use_directory_urls` argument has no effect on non-Markdown files.\n\n File objects have the following properties, which are Unicode strings:\n\n File.src_path\n The pure path of the source file relative to the source directory.\n\n File.abs_src_path\n The absolute concrete path of the source file.\n\n File.dest_path\n The pure path of the destination file relative to the destination directory.\n\n File.abs_dest_path\n The absolute concrete path of the destination file.\n\n File.url\n The url of the destination file relative to the destination directory as a string.\n \"\"\"\n def __init__(self, path, src_dir, dest_dir, use_directory_urls):\n self.page = None\n self.src_path = os.path.normpath(path)\n self.abs_src_path = os.path.normpath(os.path.join(src_dir, self.src_path))\n self.name = self._get_stem()\n self.dest_path = self._get_dest_path(use_directory_urls)\n self.abs_dest_path = os.path.normpath(os.path.join(dest_dir, self.dest_path))\n self.url = self._get_url(use_directory_urls)\n\n def __eq__(self, other):\n\n def sub_dict(d):\n return dict((key, value) for key, value in d.items() if key in ['src_path', 'abs_src_path', 'url'])\n\n return (isinstance(other, self.__class__) and sub_dict(self.__dict__) == sub_dict(other.__dict__))\n\n def __ne__(self, other):\n return not self.__eq__(other)\n\n def _get_stem(self):\n \"\"\" Return the name of the file without it's extension. \"\"\"\n filename = os.path.basename(self.src_path)\n stem, ext = os.path.splitext(filename)\n return 'index' if stem in ('index', 'README') else stem\n\n def _get_dest_path(self, use_directory_urls):\n \"\"\" Return destination path based on source path. \"\"\"\n if self.is_documentation_page():\n if use_directory_urls:\n parent, filename = os.path.split(self.src_path)\n if self.name == 'index':\n # index.md or README.md => index.html\n return os.path.join(parent, 'index.html')\n else:\n # foo.md => foo/index.html\n return os.path.join(parent, self.name, 'index.html')\n else:\n # foo.md => foo.html\n root, ext = os.path.splitext(self.src_path)\n return root + '.html'\n return self.src_path\n\n def _get_url(self, use_directory_urls):\n \"\"\" Return url based in destination path. \"\"\"\n url = self.dest_path.replace(os.path.sep, '/')\n dirname, filename = os.path.split(url)\n if use_directory_urls and filename == 'index.html':\n if dirname == '':\n url = '.'\n else:\n url = dirname + '/'\n return utils.urlquote(url)\n\n def url_relative_to(self, other):\n \"\"\" Return url for file relative to other file. \"\"\"\n return utils.get_relative_url(self.url, other.url if isinstance(other, File) else other)\n\n def copy_file(self, dirty=False):\n \"\"\" Copy source file to destination, ensuring parent directories exist. \"\"\"\n if dirty and not self.is_modified():\n log.debug(\"Skip copying unmodified file: '{}'\".format(self.src_path))\n else:\n log.debug(\"Copying media file: '{}'\".format(self.src_path))\n utils.copy_file(self.abs_src_path, self.abs_dest_path)\n\n def is_modified(self):\n if os.path.isfile(self.abs_dest_path):\n return os.path.getmtime(self.abs_dest_path) < os.path.getmtime(self.abs_src_path)\n return True\n\n def is_documentation_page(self):\n \"\"\" Return True if file is a Markdown page. \"\"\"\n return os.path.splitext(self.src_path)[1] in utils.markdown_extensions\n\n def is_static_page(self):\n \"\"\" Return True if file is a static page (html, xml, json). \"\"\"\n return os.path.splitext(self.src_path)[1] in (\n '.html',\n '.htm',\n '.xml',\n '.json',\n )\n\n def is_media_file(self):\n \"\"\" Return True if file is not a documentation or static page. \"\"\"\n return not (self.is_documentation_page() or self.is_static_page())\n\n def is_javascript(self):\n \"\"\" Return True if file is a JavaScript file. \"\"\"\n return os.path.splitext(self.src_path)[1] in (\n '.js',\n '.javascript',\n )\n\n def is_css(self):\n \"\"\" Return True if file is a CSS file. \"\"\"\n return os.path.splitext(self.src_path)[1] in (\n '.css',\n )\n\n\ndef get_files(config):\n \"\"\" Walk the `docs_dir` and return a Files collection. \"\"\"\n files = []\n exclude = ['.*', '/templates']\n\n for source_dir, dirnames, filenames in os.walk(config['docs_dir'], followlinks=True):\n relative_dir = os.path.relpath(source_dir, config['docs_dir'])\n\n for dirname in list(dirnames):\n path = os.path.normpath(os.path.join(relative_dir, dirname))\n # Skip any excluded directories\n if _filter_paths(basename=dirname, path=path, is_dir=True, exclude=exclude):\n dirnames.remove(dirname)\n dirnames.sort()\n\n for filename in _sort_files(filenames):\n path = os.path.normpath(os.path.join(relative_dir, filename))\n # Skip any excluded files\n if _filter_paths(basename=filename, path=path, is_dir=False, exclude=exclude):\n continue\n # Skip README.md if an index file also exists in dir\n if filename.lower() == 'readme.md' and 'index.md' in filenames:\n log.warning(\"Both index.md and readme.md found. Skipping readme.md from {}\".format(source_dir))\n continue\n files.append(File(path, config['docs_dir'], config['site_dir'], config['use_directory_urls']))\n\n return Files(files)\n\n\ndef _sort_files(filenames):\n \"\"\" Always sort `index` or `README` as first filename in list. \"\"\"\n\n def compare(x, y):\n if x == y:\n return 0\n if os.path.splitext(y)[0] in ['index', 'README']:\n return 1\n if os.path.splitext(x)[0] in ['index', 'README'] or x < y:\n return -1\n return 1\n\n return sorted(filenames, key=cmp_to_key(compare))\n\n\ndef _filter_paths(basename, path, is_dir, exclude):\n \"\"\" .gitignore style file filtering. \"\"\"\n for item in exclude:\n # Items ending in '/' apply only to directories.\n if item.endswith('/') and not is_dir:\n continue\n # Items starting with '/' apply to the whole path.\n # In any other cases just the basename is used.\n match = path if item.startswith('/') else basename\n if fnmatch.fnmatch(match, item.strip('/')):\n return True\n return False\n", "path": "mkdocs/structure/files.py"}]} | 3,799 | 117 |
gh_patches_debug_8792 | rasdani/github-patches | git_diff | rasterio__rasterio-401 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
rio-merge ignores --driver
Due to a [typo](https://github.com/mapbox/rasterio/blob/master/rasterio/rio/merge.py#L90):
``` python
kwargs['driver'] == driver
```
Instead of:
``` python
kwargs['driver'] = driver
```
</issue>
<code>
[start of rasterio/rio/merge.py]
1 # Merge command.
2
3
4 import logging
5 import math
6 import os.path
7 import warnings
8
9 import click
10 from cligj import files_inout_arg, format_opt
11
12 from .helpers import resolve_inout
13 from . import options
14 import rasterio
15 from rasterio.transform import Affine
16
17
18 @click.command(short_help="Merge a stack of raster datasets.")
19 @files_inout_arg
20 @options.output_opt
21 @format_opt
22 @options.bounds_opt
23 @click.option('-r', '--res', nargs=2, type=float, default=None,
24 help="Output dataset resolution: pixel width, pixel height")
25 @click.option('--nodata', type=float, default=None,
26 help="Override nodata values defined in input datasets")
27 @click.pass_context
28 def merge(ctx, files, output, driver, bounds, res, nodata):
29 """Copy valid pixels from input files to an output file.
30
31 All files must have the same number of bands, data type, and
32 coordinate reference system.
33
34 Input files are merged in their listed order using the reverse
35 painter's algorithm. If the output file exists, its values will be
36 overwritten by input values.
37
38 Geospatial bounds and resolution of a new output file in the
39 units of the input file coordinate reference system may be provided
40 and are otherwise taken from the first input file.
41 """
42 import numpy as np
43
44 verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 1
45 logger = logging.getLogger('rio')
46
47 try:
48 with rasterio.drivers(CPL_DEBUG=verbosity>2):
49 output, files = resolve_inout(files=files, output=output)
50
51 with rasterio.open(files[0]) as first:
52 first_res = first.res
53 kwargs = first.meta
54 kwargs.pop('affine')
55 nodataval = first.nodatavals[0]
56 dtype = first.dtypes[0]
57
58 if os.path.exists(output):
59 # TODO: prompt user to update existing file (-i option) like:
60 # overwrite b.tif? (y/n [n]) n
61 # not overwritten
62 dst = rasterio.open(output, 'r+')
63 nodataval = dst.nodatavals[0]
64 dtype = dst.dtypes[0]
65 dest = np.zeros((dst.count,) + dst.shape, dtype=dtype)
66 else:
67 # Create new output file.
68 # Extent from option or extent of all inputs.
69 if not bounds:
70 # scan input files.
71 xs = []
72 ys = []
73 for f in files:
74 with rasterio.open(f) as src:
75 left, bottom, right, top = src.bounds
76 xs.extend([left, right])
77 ys.extend([bottom, top])
78 bounds = min(xs), min(ys), max(xs), max(ys)
79 output_transform = Affine.translation(bounds[0], bounds[3])
80
81 # Resolution/pixel size.
82 if not res:
83 res = first_res
84 output_transform *= Affine.scale(res[0], -res[1])
85
86 # Dataset shape.
87 output_width = int(math.ceil((bounds[2]-bounds[0])/res[0]))
88 output_height = int(math.ceil((bounds[3]-bounds[1])/res[1]))
89
90 kwargs['driver'] == driver
91 kwargs['transform'] = output_transform
92 kwargs['width'] = output_width
93 kwargs['height'] = output_height
94
95 logger.debug("Kwargs: %r", kwargs)
96 logger.debug("bounds: %r", bounds)
97 logger.debug("Res: %r", res)
98
99 dst = rasterio.open(output, 'w', **kwargs)
100 dest = np.zeros((first.count, output_height, output_width),
101 dtype=dtype)
102
103 logger.debug("In merge, dest shape: %r", dest.shape)
104
105 if nodata is not None:
106 nodataval = nodata
107
108 if nodataval is not None:
109 # Only fill if the nodataval is within dtype's range.
110 inrange = False
111 if np.dtype(dtype).kind in ('i', 'u'):
112 info = np.iinfo(dtype)
113 inrange = (info.min <= nodataval <= info.max)
114 elif np.dtype(dtype).kind == 'f':
115 info = np.finfo(dtype)
116 inrange = (info.min <= nodataval <= info.max)
117 if inrange:
118 dest.fill(nodataval)
119 else:
120 warnings.warn(
121 "Input file's nodata value, %s, is beyond the valid "
122 "range of its data type, %s. Consider overriding it "
123 "using the --nodata option for better results." % (
124 nodataval, dtype))
125 else:
126 nodataval = 0
127
128 dst_w, dst_s, dst_e, dst_n = dst.bounds
129
130 for fname in reversed(files):
131 with rasterio.open(fname) as src:
132 # Real World (tm) use of boundless reads.
133 # This approach uses the maximum amount of memory to solve
134 # the problem. Making it more efficient is a TODO.
135
136 # 1. Compute spatial intersection of destination
137 # and source.
138 src_w, src_s, src_e, src_n = src.bounds
139
140 int_w = src_w if src_w > dst_w else dst_w
141 int_s = src_s if src_s > dst_s else dst_s
142 int_e = src_e if src_e < dst_e else dst_e
143 int_n = src_n if src_n < dst_n else dst_n
144
145 # 2. Compute the source window.
146 src_window = src.window(int_w, int_s, int_e, int_n)
147
148 # 3. Compute the destination window.
149 dst_window = dst.window(int_w, int_s, int_e, int_n)
150
151 # 4. Initialize temp array.
152 temp = np.zeros(
153 (first.count,) + tuple(b - a for a, b in dst_window),
154 dtype=dtype)
155
156 temp = src.read(
157 out=temp,
158 window=src_window,
159 boundless=False,
160 masked=True)
161
162 # 5. Copy elements of temp into dest.
163 roff, coff = dst.index(int_w, int_n)
164 h, w = temp.shape[-2:]
165
166 region = dest[:,roff:roff+h,coff:coff+w]
167 np.copyto(region, temp,
168 where=np.logical_and(
169 region==nodataval, temp.mask==False))
170
171 if dst.mode == 'r+':
172 temp = dst.read(masked=True)
173 np.copyto(dest, temp,
174 where=np.logical_and(
175 dest==nodataval, temp.mask==False))
176
177 dst.write(dest)
178 dst.close()
179
180 except Exception:
181 logger.exception("Exception caught during processing")
182 raise click.Abort()
183
[end of rasterio/rio/merge.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/rasterio/rio/merge.py b/rasterio/rio/merge.py
--- a/rasterio/rio/merge.py
+++ b/rasterio/rio/merge.py
@@ -87,7 +87,7 @@
output_width = int(math.ceil((bounds[2]-bounds[0])/res[0]))
output_height = int(math.ceil((bounds[3]-bounds[1])/res[1]))
- kwargs['driver'] == driver
+ kwargs['driver'] = driver
kwargs['transform'] = output_transform
kwargs['width'] = output_width
kwargs['height'] = output_height
| {"golden_diff": "diff --git a/rasterio/rio/merge.py b/rasterio/rio/merge.py\n--- a/rasterio/rio/merge.py\n+++ b/rasterio/rio/merge.py\n@@ -87,7 +87,7 @@\n output_width = int(math.ceil((bounds[2]-bounds[0])/res[0]))\n output_height = int(math.ceil((bounds[3]-bounds[1])/res[1]))\n \n- kwargs['driver'] == driver\n+ kwargs['driver'] = driver\n kwargs['transform'] = output_transform\n kwargs['width'] = output_width\n kwargs['height'] = output_height\n", "issue": "rio-merge ignores --driver\nDue to a [typo](https://github.com/mapbox/rasterio/blob/master/rasterio/rio/merge.py#L90):\n\n``` python\nkwargs['driver'] == driver\n```\n\nInstead of:\n\n``` python\nkwargs['driver'] = driver\n```\n\n", "before_files": [{"content": "# Merge command.\n\n\nimport logging\nimport math\nimport os.path\nimport warnings\n\nimport click\nfrom cligj import files_inout_arg, format_opt\n\nfrom .helpers import resolve_inout\nfrom . import options\nimport rasterio\nfrom rasterio.transform import Affine\n\n\[email protected](short_help=\"Merge a stack of raster datasets.\")\n@files_inout_arg\[email protected]_opt\n@format_opt\[email protected]_opt\[email protected]('-r', '--res', nargs=2, type=float, default=None,\n help=\"Output dataset resolution: pixel width, pixel height\")\[email protected]('--nodata', type=float, default=None,\n help=\"Override nodata values defined in input datasets\")\[email protected]_context\ndef merge(ctx, files, output, driver, bounds, res, nodata):\n \"\"\"Copy valid pixels from input files to an output file.\n\n All files must have the same number of bands, data type, and\n coordinate reference system.\n\n Input files are merged in their listed order using the reverse\n painter's algorithm. If the output file exists, its values will be\n overwritten by input values.\n\n Geospatial bounds and resolution of a new output file in the\n units of the input file coordinate reference system may be provided\n and are otherwise taken from the first input file.\n \"\"\"\n import numpy as np\n\n verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 1\n logger = logging.getLogger('rio')\n\n try:\n with rasterio.drivers(CPL_DEBUG=verbosity>2):\n output, files = resolve_inout(files=files, output=output)\n\n with rasterio.open(files[0]) as first:\n first_res = first.res\n kwargs = first.meta\n kwargs.pop('affine')\n nodataval = first.nodatavals[0]\n dtype = first.dtypes[0]\n\n if os.path.exists(output):\n # TODO: prompt user to update existing file (-i option) like:\n # overwrite b.tif? (y/n [n]) n\n # not overwritten\n dst = rasterio.open(output, 'r+')\n nodataval = dst.nodatavals[0]\n dtype = dst.dtypes[0]\n dest = np.zeros((dst.count,) + dst.shape, dtype=dtype)\n else:\n # Create new output file.\n # Extent from option or extent of all inputs.\n if not bounds:\n # scan input files.\n xs = []\n ys = []\n for f in files:\n with rasterio.open(f) as src:\n left, bottom, right, top = src.bounds\n xs.extend([left, right])\n ys.extend([bottom, top])\n bounds = min(xs), min(ys), max(xs), max(ys)\n output_transform = Affine.translation(bounds[0], bounds[3])\n\n # Resolution/pixel size.\n if not res:\n res = first_res\n output_transform *= Affine.scale(res[0], -res[1])\n\n # Dataset shape.\n output_width = int(math.ceil((bounds[2]-bounds[0])/res[0]))\n output_height = int(math.ceil((bounds[3]-bounds[1])/res[1]))\n\n kwargs['driver'] == driver\n kwargs['transform'] = output_transform\n kwargs['width'] = output_width\n kwargs['height'] = output_height\n\n logger.debug(\"Kwargs: %r\", kwargs)\n logger.debug(\"bounds: %r\", bounds)\n logger.debug(\"Res: %r\", res)\n\n dst = rasterio.open(output, 'w', **kwargs)\n dest = np.zeros((first.count, output_height, output_width),\n dtype=dtype)\n\n logger.debug(\"In merge, dest shape: %r\", dest.shape)\n\n if nodata is not None:\n nodataval = nodata\n\n if nodataval is not None:\n # Only fill if the nodataval is within dtype's range.\n inrange = False\n if np.dtype(dtype).kind in ('i', 'u'):\n info = np.iinfo(dtype)\n inrange = (info.min <= nodataval <= info.max)\n elif np.dtype(dtype).kind == 'f':\n info = np.finfo(dtype)\n inrange = (info.min <= nodataval <= info.max)\n if inrange:\n dest.fill(nodataval)\n else:\n warnings.warn(\n \"Input file's nodata value, %s, is beyond the valid \"\n \"range of its data type, %s. Consider overriding it \"\n \"using the --nodata option for better results.\" % (\n nodataval, dtype))\n else:\n nodataval = 0\n\n dst_w, dst_s, dst_e, dst_n = dst.bounds\n\n for fname in reversed(files):\n with rasterio.open(fname) as src:\n # Real World (tm) use of boundless reads.\n # This approach uses the maximum amount of memory to solve\n # the problem. Making it more efficient is a TODO.\n\n # 1. Compute spatial intersection of destination\n # and source.\n src_w, src_s, src_e, src_n = src.bounds\n\n int_w = src_w if src_w > dst_w else dst_w\n int_s = src_s if src_s > dst_s else dst_s\n int_e = src_e if src_e < dst_e else dst_e\n int_n = src_n if src_n < dst_n else dst_n\n\n # 2. Compute the source window.\n src_window = src.window(int_w, int_s, int_e, int_n)\n\n # 3. Compute the destination window.\n dst_window = dst.window(int_w, int_s, int_e, int_n)\n\n # 4. Initialize temp array.\n temp = np.zeros(\n (first.count,) + tuple(b - a for a, b in dst_window),\n dtype=dtype)\n\n temp = src.read(\n out=temp,\n window=src_window,\n boundless=False,\n masked=True)\n\n # 5. Copy elements of temp into dest.\n roff, coff = dst.index(int_w, int_n)\n h, w = temp.shape[-2:]\n\n region = dest[:,roff:roff+h,coff:coff+w]\n np.copyto(region, temp,\n where=np.logical_and(\n region==nodataval, temp.mask==False))\n\n if dst.mode == 'r+':\n temp = dst.read(masked=True)\n np.copyto(dest, temp,\n where=np.logical_and(\n dest==nodataval, temp.mask==False))\n\n dst.write(dest)\n dst.close()\n\n except Exception:\n logger.exception(\"Exception caught during processing\")\n raise click.Abort()\n", "path": "rasterio/rio/merge.py"}]} | 2,529 | 142 |
gh_patches_debug_18789 | rasdani/github-patches | git_diff | numpy__numpy-14207 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Wrong capitalization on bitgenerator MT19937
Wrong capitalization of MT19937 in default argument values of functions in numpy.random._pickle.py
### Reproducing code example:
```python
import numpy as np
np.random._pickle.__randomstate_ctor() # => error
np.random._pickle.__randomstate_ctor('mt19937') # => error
np.random._pickle.__randomstate_ctor('MT19937') # => works
```
### Error message:
*** ValueError: mt19937 is not a known BitGenerator module.
### Numpy/Python version information:
1.17.0 3.6.8 (default, Jan 14 2019, 11:02:34)
[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]]
</issue>
<code>
[start of numpy/random/_pickle.py]
1 from .mtrand import RandomState
2 from .philox import Philox
3 from .pcg64 import PCG64
4 from .sfc64 import SFC64
5
6 from .generator import Generator
7 from .mt19937 import MT19937
8
9 BitGenerators = {'MT19937': MT19937,
10 'PCG64': PCG64,
11 'Philox': Philox,
12 'SFC64': SFC64,
13 }
14
15
16 def __generator_ctor(bit_generator_name='mt19937'):
17 """
18 Pickling helper function that returns a Generator object
19
20 Parameters
21 ----------
22 bit_generator_name: str
23 String containing the core BitGenerator
24
25 Returns
26 -------
27 rg: Generator
28 Generator using the named core BitGenerator
29 """
30 if bit_generator_name in BitGenerators:
31 bit_generator = BitGenerators[bit_generator_name]
32 else:
33 raise ValueError(str(bit_generator_name) + ' is not a known '
34 'BitGenerator module.')
35
36 return Generator(bit_generator())
37
38
39 def __bit_generator_ctor(bit_generator_name='mt19937'):
40 """
41 Pickling helper function that returns a bit generator object
42
43 Parameters
44 ----------
45 bit_generator_name: str
46 String containing the name of the BitGenerator
47
48 Returns
49 -------
50 bit_generator: BitGenerator
51 BitGenerator instance
52 """
53 if bit_generator_name in BitGenerators:
54 bit_generator = BitGenerators[bit_generator_name]
55 else:
56 raise ValueError(str(bit_generator_name) + ' is not a known '
57 'BitGenerator module.')
58
59 return bit_generator()
60
61
62 def __randomstate_ctor(bit_generator_name='mt19937'):
63 """
64 Pickling helper function that returns a legacy RandomState-like object
65
66 Parameters
67 ----------
68 bit_generator_name: str
69 String containing the core BitGenerator
70
71 Returns
72 -------
73 rs: RandomState
74 Legacy RandomState using the named core BitGenerator
75 """
76 if bit_generator_name in BitGenerators:
77 bit_generator = BitGenerators[bit_generator_name]
78 else:
79 raise ValueError(str(bit_generator_name) + ' is not a known '
80 'BitGenerator module.')
81
82 return RandomState(bit_generator())
83
[end of numpy/random/_pickle.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/numpy/random/_pickle.py b/numpy/random/_pickle.py
--- a/numpy/random/_pickle.py
+++ b/numpy/random/_pickle.py
@@ -13,7 +13,7 @@
}
-def __generator_ctor(bit_generator_name='mt19937'):
+def __generator_ctor(bit_generator_name='MT19937'):
"""
Pickling helper function that returns a Generator object
@@ -36,7 +36,7 @@
return Generator(bit_generator())
-def __bit_generator_ctor(bit_generator_name='mt19937'):
+def __bit_generator_ctor(bit_generator_name='MT19937'):
"""
Pickling helper function that returns a bit generator object
@@ -59,7 +59,7 @@
return bit_generator()
-def __randomstate_ctor(bit_generator_name='mt19937'):
+def __randomstate_ctor(bit_generator_name='MT19937'):
"""
Pickling helper function that returns a legacy RandomState-like object
| {"golden_diff": "diff --git a/numpy/random/_pickle.py b/numpy/random/_pickle.py\n--- a/numpy/random/_pickle.py\n+++ b/numpy/random/_pickle.py\n@@ -13,7 +13,7 @@\n }\n \n \n-def __generator_ctor(bit_generator_name='mt19937'):\n+def __generator_ctor(bit_generator_name='MT19937'):\n \"\"\"\n Pickling helper function that returns a Generator object\n \n@@ -36,7 +36,7 @@\n return Generator(bit_generator())\n \n \n-def __bit_generator_ctor(bit_generator_name='mt19937'):\n+def __bit_generator_ctor(bit_generator_name='MT19937'):\n \"\"\"\n Pickling helper function that returns a bit generator object\n \n@@ -59,7 +59,7 @@\n return bit_generator()\n \n \n-def __randomstate_ctor(bit_generator_name='mt19937'):\n+def __randomstate_ctor(bit_generator_name='MT19937'):\n \"\"\"\n Pickling helper function that returns a legacy RandomState-like object\n", "issue": "Wrong capitalization on bitgenerator MT19937\nWrong capitalization of MT19937 in default argument values of functions in numpy.random._pickle.py\r\n\r\n### Reproducing code example:\r\n\r\n```python\r\nimport numpy as np\r\nnp.random._pickle.__randomstate_ctor() # => error\r\nnp.random._pickle.__randomstate_ctor('mt19937') # => error\r\nnp.random._pickle.__randomstate_ctor('MT19937') # => works\r\n```\r\n\r\n### Error message:\r\n\r\n*** ValueError: mt19937 is not a known BitGenerator module.\r\n\r\n### Numpy/Python version information:\r\n\r\n1.17.0 3.6.8 (default, Jan 14 2019, 11:02:34) \r\n[GCC 8.0.1 20180414 (experimental) [trunk revision 259383]]\r\n\n", "before_files": [{"content": "from .mtrand import RandomState\nfrom .philox import Philox\nfrom .pcg64 import PCG64\nfrom .sfc64 import SFC64\n\nfrom .generator import Generator\nfrom .mt19937 import MT19937\n\nBitGenerators = {'MT19937': MT19937,\n 'PCG64': PCG64,\n 'Philox': Philox,\n 'SFC64': SFC64,\n }\n\n\ndef __generator_ctor(bit_generator_name='mt19937'):\n \"\"\"\n Pickling helper function that returns a Generator object\n\n Parameters\n ----------\n bit_generator_name: str\n String containing the core BitGenerator\n\n Returns\n -------\n rg: Generator\n Generator using the named core BitGenerator\n \"\"\"\n if bit_generator_name in BitGenerators:\n bit_generator = BitGenerators[bit_generator_name]\n else:\n raise ValueError(str(bit_generator_name) + ' is not a known '\n 'BitGenerator module.')\n\n return Generator(bit_generator())\n\n\ndef __bit_generator_ctor(bit_generator_name='mt19937'):\n \"\"\"\n Pickling helper function that returns a bit generator object\n\n Parameters\n ----------\n bit_generator_name: str\n String containing the name of the BitGenerator\n\n Returns\n -------\n bit_generator: BitGenerator\n BitGenerator instance\n \"\"\"\n if bit_generator_name in BitGenerators:\n bit_generator = BitGenerators[bit_generator_name]\n else:\n raise ValueError(str(bit_generator_name) + ' is not a known '\n 'BitGenerator module.')\n\n return bit_generator()\n\n\ndef __randomstate_ctor(bit_generator_name='mt19937'):\n \"\"\"\n Pickling helper function that returns a legacy RandomState-like object\n\n Parameters\n ----------\n bit_generator_name: str\n String containing the core BitGenerator\n\n Returns\n -------\n rs: RandomState\n Legacy RandomState using the named core BitGenerator\n \"\"\"\n if bit_generator_name in BitGenerators:\n bit_generator = BitGenerators[bit_generator_name]\n else:\n raise ValueError(str(bit_generator_name) + ' is not a known '\n 'BitGenerator module.')\n\n return RandomState(bit_generator())\n", "path": "numpy/random/_pickle.py"}]} | 1,400 | 228 |
gh_patches_debug_10624 | rasdani/github-patches | git_diff | mlcommons__GaNDLF-429 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Add an error check for compute dimensions not supported by GaNDLF
### > I think the error message is caused by the yaml file. My input image size is 128 x 128 .
>
**> this is error mesage :**
`(venv_gandlf) C:\Users\pc\GaNDLF>python gandlf_run -c ./images_and_labels/model.yaml -i ./images_and_labels/train.csv -m ./images_and_labels/model -t True -d cpu
WARNING: Initializing 'norm_type' as 'batch'
WARNING: Initializing 'verbose' as False
WARNING: Initializing 'medcam_enabled' as False
WARNING: Initializing 'save_training' as False
WARNING: Initializing 'save_output' as False
WARNING: Initializing 'in_memory' as False
WARNING: Initializing 'pin_memory_dataloader' as False
WARNING: Initializing 'enable_padding' as False
WARNING: Initializing 'scaling_factor' as 1
WARNING: Initializing 'clip_grad' as None
WARNING: Initializing 'track_memory_usage' as False
WARNING: Initializing 'print_rgb_label_warning' as True
WARNING: Initializing 'data_postprocessing' as {}
WARNING: Initializing 'grid_aggregator_overlap' as crop
WARNING: Initializing 'clip_mode' as None
WARNING: Setting default step_size to: 0.02
Trying to resume training without changing any parameters from previous run.
Using previously saved parameter file ./images_and_labels/model\parameters.pkl
Using previously saved testing data ./images_and_labels/model\testing_0\data_testing.pkl
Using previously saved training+validation data ./images_and_labels/model\testing_0\data_trainAndVal.pkl
Number of channels : 1
Constructing queue for train data: 100%|████████████████████████| 643/643 [00:02<00:00, 216.35it/s]
C:\Users\pc\anaconda3\envs\venv_gandlf\lib\site-packages\torch\utils\data\dataloader.py:477: UserWarning: This DataLoader will create 16 worker processes in total. Our suggested max number of worker in current system is 4 (`cpuset` is not taken into account), which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.
cpuset_checked))
Calculating weights
Constructing queue for penalty data: 100%|██████████████████████| 643/643 [00:02<00:00, 241.46it/s]
Looping over training data for penalty calculation: 100%|███████| 643/643 [00:05<00:00, 121.90it/s]
Constructing queue for validation data: 100%|███████████████████| 161/161 [00:01<00:00, 124.04it/s]
All Keys : ['subject_id', '1', 'spacing', 'label', 'path_to_metadata']
Traceback (most recent call last):
File "gandlf_run", line 116, in <module>
args.reset,
File "C:\Users\pc\GaNDLF\GANDLF\cli\main_run.py", line 91, in main_run
reset=reset,
File "C:\Users\pc\GaNDLF\GANDLF\training_manager.py", line 254, in TrainingManager
testing_data=testingData,
File "C:\Users\pc\GaNDLF\GANDLF\compute\training_loop.py", line 239, in training_loop
) = create_pytorch_objects(params, training_data, validation_data, device)
File "C:\Users\pc\GaNDLF\GANDLF\compute\generic.py", line 69, in create_pytorch_objects
model = get_model(parameters)
File "C:\Users\pc\GaNDLF\GANDLF\models\__init__.py", line 106, in get_model
return global_models_dict[params["model"]["architecture"]](parameters=params)
File "C:\Users\pc\GaNDLF\GANDLF\models\unet.py", line 41, in __init__
conv=self.Conv,
File "C:\Users\pc\anaconda3\envs\venv_gandlf\lib\site-packages\torch\nn\modules\module.py", line 948, in __getattr__
type(self).__name__, name))
AttributeError: 'unet' object has no attribute 'Conv'`
**> And this is my yaml file**
[model.pdf](https://github.com/CBICA/GaNDLF/files/8631955/model.pdf)
_Originally posted by @HILALOZTEMEL in https://github.com/CBICA/GaNDLF/discussions/427_
</issue>
<code>
[start of GANDLF/models/modelBase.py]
1 # -*- coding: utf-8 -*-
2 """All Models in GANDLF are to be derived from this base class code."""
3
4 import torch
5 import torch.nn as nn
6 import torch.nn.functional as F
7
8 from GANDLF.utils import get_linear_interpolation_mode
9 from GANDLF.utils.modelbase import get_modelbase_final_layer
10 from GANDLF.models.seg_modules.average_pool import (
11 GlobalAveragePooling3D,
12 GlobalAveragePooling2D,
13 )
14
15
16 class ModelBase(nn.Module):
17 """
18 This is the base model class that all other architectures will need to derive from
19 """
20
21 def __init__(self, parameters):
22 """
23 This defines all defaults that the model base uses
24
25 Args:
26 parameters (dict): This is a dictionary of all parameters that are needed for the model.
27 """
28 super(ModelBase, self).__init__()
29 self.model_name = parameters["model"]["architecture"]
30 self.n_dimensions = parameters["model"]["dimension"]
31 self.n_channels = parameters["model"]["num_channels"]
32 if "num_classes" in parameters["model"]:
33 self.n_classes = parameters["model"]["num_classes"]
34 else:
35 self.n_classes = len(parameters["model"]["class_list"])
36 self.base_filters = parameters["model"]["base_filters"]
37 self.norm_type = parameters["model"]["norm_type"]
38 self.patch_size = parameters["patch_size"]
39 self.batch_size = parameters["batch_size"]
40 self.amp = parameters["model"]["amp"]
41 self.final_convolution_layer = self.get_final_layer(
42 parameters["model"]["final_layer"]
43 )
44
45 self.linear_interpolation_mode = get_linear_interpolation_mode(
46 self.n_dimensions
47 )
48
49 self.sigmoid_input_multiplier = parameters["model"].get(
50 "sigmoid_input_multiplier", 1.0
51 )
52
53 # based on dimensionality, the following need to defined:
54 # convolution, batch_norm, instancenorm, dropout
55 if self.n_dimensions == 2:
56 self.Conv = nn.Conv2d
57 self.ConvTranspose = nn.ConvTranspose2d
58 self.InstanceNorm = nn.InstanceNorm2d
59 self.Dropout = nn.Dropout2d
60 self.BatchNorm = nn.BatchNorm2d
61 self.MaxPool = nn.MaxPool2d
62 self.AvgPool = nn.AvgPool2d
63 self.AdaptiveAvgPool = nn.AdaptiveAvgPool2d
64 self.AdaptiveMaxPool = nn.AdaptiveMaxPool2d
65 self.GlobalAvgPool = GlobalAveragePooling2D
66 self.Norm = self.get_norm_type(self.norm_type.lower(), self.n_dimensions)
67
68 elif self.n_dimensions == 3:
69 self.Conv = nn.Conv3d
70 self.ConvTranspose = nn.ConvTranspose3d
71 self.InstanceNorm = nn.InstanceNorm3d
72 self.Dropout = nn.Dropout3d
73 self.BatchNorm = nn.BatchNorm3d
74 self.MaxPool = nn.MaxPool3d
75 self.AvgPool = nn.AvgPool3d
76 self.AdaptiveAvgPool = nn.AdaptiveAvgPool3d
77 self.AdaptiveMaxPool = nn.AdaptiveMaxPool3d
78 self.GlobalAvgPool = GlobalAveragePooling3D
79 self.Norm = self.get_norm_type(self.norm_type.lower(), self.n_dimensions)
80
81 def get_final_layer(self, final_convolution_layer):
82 return get_modelbase_final_layer(final_convolution_layer)
83
84 def get_norm_type(self, norm_type, dimensions):
85 """
86 This function gets the normalization type for the model.
87
88 Args:
89 norm_type (str): Normalization type as a string.
90 dimensions (str): The dimensionality of the model.
91
92 Returns:
93 _InstanceNorm or _BatchNorm: The normalization type for the model.
94 """
95 if dimensions == 3:
96 if norm_type == "batch":
97 norm_type = nn.BatchNorm3d
98 elif norm_type == "instance":
99 norm_type = nn.InstanceNorm3d
100 else:
101 norm_type = None
102 elif dimensions == 2:
103 if norm_type == "batch":
104 norm_type = nn.BatchNorm2d
105 elif norm_type == "instance":
106 norm_type = nn.InstanceNorm2d
107 else:
108 norm_type = None
109
110 return norm_type
111
[end of GANDLF/models/modelBase.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/GANDLF/models/modelBase.py b/GANDLF/models/modelBase.py
--- a/GANDLF/models/modelBase.py
+++ b/GANDLF/models/modelBase.py
@@ -78,6 +78,13 @@
self.GlobalAvgPool = GlobalAveragePooling3D
self.Norm = self.get_norm_type(self.norm_type.lower(), self.n_dimensions)
+ else:
+ raise ValueError(
+ "GaNDLF only supports 2D and 3D computations. {}D computations are not currently supported".format(
+ self.n_dimensions
+ )
+ )
+
def get_final_layer(self, final_convolution_layer):
return get_modelbase_final_layer(final_convolution_layer)
| {"golden_diff": "diff --git a/GANDLF/models/modelBase.py b/GANDLF/models/modelBase.py\n--- a/GANDLF/models/modelBase.py\n+++ b/GANDLF/models/modelBase.py\n@@ -78,6 +78,13 @@\n self.GlobalAvgPool = GlobalAveragePooling3D\n self.Norm = self.get_norm_type(self.norm_type.lower(), self.n_dimensions)\n \n+ else:\n+ raise ValueError(\n+ \"GaNDLF only supports 2D and 3D computations. {}D computations are not currently supported\".format(\n+ self.n_dimensions\n+ )\n+ )\n+\n def get_final_layer(self, final_convolution_layer):\n return get_modelbase_final_layer(final_convolution_layer)\n", "issue": "Add an error check for compute dimensions not supported by GaNDLF\n### > I think the error message is caused by the yaml file. My input image size is 128 x 128 . \r\n> \r\n **> this is error mesage :**\r\n\r\n`(venv_gandlf) C:\\Users\\pc\\GaNDLF>python gandlf_run -c ./images_and_labels/model.yaml -i ./images_and_labels/train.csv -m ./images_and_labels/model -t True -d cpu\r\nWARNING: Initializing 'norm_type' as 'batch'\r\nWARNING: Initializing 'verbose' as False\r\nWARNING: Initializing 'medcam_enabled' as False\r\nWARNING: Initializing 'save_training' as False\r\nWARNING: Initializing 'save_output' as False\r\nWARNING: Initializing 'in_memory' as False\r\nWARNING: Initializing 'pin_memory_dataloader' as False\r\nWARNING: Initializing 'enable_padding' as False\r\nWARNING: Initializing 'scaling_factor' as 1\r\nWARNING: Initializing 'clip_grad' as None\r\nWARNING: Initializing 'track_memory_usage' as False\r\nWARNING: Initializing 'print_rgb_label_warning' as True\r\nWARNING: Initializing 'data_postprocessing' as {}\r\nWARNING: Initializing 'grid_aggregator_overlap' as crop\r\nWARNING: Initializing 'clip_mode' as None\r\nWARNING: Setting default step_size to: 0.02\r\nTrying to resume training without changing any parameters from previous run.\r\nUsing previously saved parameter file ./images_and_labels/model\\parameters.pkl\r\nUsing previously saved testing data ./images_and_labels/model\\testing_0\\data_testing.pkl\r\nUsing previously saved training+validation data ./images_and_labels/model\\testing_0\\data_trainAndVal.pkl\r\nNumber of channels : 1\r\nConstructing queue for train data: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 643/643 [00:02<00:00, 216.35it/s]\r\nC:\\Users\\pc\\anaconda3\\envs\\venv_gandlf\\lib\\site-packages\\torch\\utils\\data\\dataloader.py:477: UserWarning: This DataLoader will create 16 worker processes in total. Our suggested max number of worker in current system is 4 (`cpuset` is not taken into account), which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\r\n cpuset_checked))\r\nCalculating weights\r\nConstructing queue for penalty data: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 643/643 [00:02<00:00, 241.46it/s]\r\nLooping over training data for penalty calculation: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 643/643 [00:05<00:00, 121.90it/s]\r\nConstructing queue for validation data: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 161/161 [00:01<00:00, 124.04it/s]\r\nAll Keys : ['subject_id', '1', 'spacing', 'label', 'path_to_metadata']\r\nTraceback (most recent call last):\r\n File \"gandlf_run\", line 116, in <module>\r\n args.reset,\r\n File \"C:\\Users\\pc\\GaNDLF\\GANDLF\\cli\\main_run.py\", line 91, in main_run\r\n reset=reset,\r\n File \"C:\\Users\\pc\\GaNDLF\\GANDLF\\training_manager.py\", line 254, in TrainingManager\r\n testing_data=testingData,\r\n File \"C:\\Users\\pc\\GaNDLF\\GANDLF\\compute\\training_loop.py\", line 239, in training_loop\r\n ) = create_pytorch_objects(params, training_data, validation_data, device)\r\n File \"C:\\Users\\pc\\GaNDLF\\GANDLF\\compute\\generic.py\", line 69, in create_pytorch_objects\r\n model = get_model(parameters)\r\n File \"C:\\Users\\pc\\GaNDLF\\GANDLF\\models\\__init__.py\", line 106, in get_model\r\n return global_models_dict[params[\"model\"][\"architecture\"]](parameters=params)\r\n File \"C:\\Users\\pc\\GaNDLF\\GANDLF\\models\\unet.py\", line 41, in __init__\r\n conv=self.Conv,\r\n File \"C:\\Users\\pc\\anaconda3\\envs\\venv_gandlf\\lib\\site-packages\\torch\\nn\\modules\\module.py\", line 948, in __getattr__\r\n type(self).__name__, name))\r\nAttributeError: 'unet' object has no attribute 'Conv'`\r\n\r\n\r\n**> And this is my yaml file**\r\n\r\n\r\n[model.pdf](https://github.com/CBICA/GaNDLF/files/8631955/model.pdf)\r\n\r\n_Originally posted by @HILALOZTEMEL in https://github.com/CBICA/GaNDLF/discussions/427_\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\"\"\"All Models in GANDLF are to be derived from this base class code.\"\"\"\n\nimport torch\nimport torch.nn as nn\nimport torch.nn.functional as F\n\nfrom GANDLF.utils import get_linear_interpolation_mode\nfrom GANDLF.utils.modelbase import get_modelbase_final_layer\nfrom GANDLF.models.seg_modules.average_pool import (\n GlobalAveragePooling3D,\n GlobalAveragePooling2D,\n)\n\n\nclass ModelBase(nn.Module):\n \"\"\"\n This is the base model class that all other architectures will need to derive from\n \"\"\"\n\n def __init__(self, parameters):\n \"\"\"\n This defines all defaults that the model base uses\n\n Args:\n parameters (dict): This is a dictionary of all parameters that are needed for the model.\n \"\"\"\n super(ModelBase, self).__init__()\n self.model_name = parameters[\"model\"][\"architecture\"]\n self.n_dimensions = parameters[\"model\"][\"dimension\"]\n self.n_channels = parameters[\"model\"][\"num_channels\"]\n if \"num_classes\" in parameters[\"model\"]:\n self.n_classes = parameters[\"model\"][\"num_classes\"]\n else:\n self.n_classes = len(parameters[\"model\"][\"class_list\"])\n self.base_filters = parameters[\"model\"][\"base_filters\"]\n self.norm_type = parameters[\"model\"][\"norm_type\"]\n self.patch_size = parameters[\"patch_size\"]\n self.batch_size = parameters[\"batch_size\"]\n self.amp = parameters[\"model\"][\"amp\"]\n self.final_convolution_layer = self.get_final_layer(\n parameters[\"model\"][\"final_layer\"]\n )\n\n self.linear_interpolation_mode = get_linear_interpolation_mode(\n self.n_dimensions\n )\n\n self.sigmoid_input_multiplier = parameters[\"model\"].get(\n \"sigmoid_input_multiplier\", 1.0\n )\n\n # based on dimensionality, the following need to defined:\n # convolution, batch_norm, instancenorm, dropout\n if self.n_dimensions == 2:\n self.Conv = nn.Conv2d\n self.ConvTranspose = nn.ConvTranspose2d\n self.InstanceNorm = nn.InstanceNorm2d\n self.Dropout = nn.Dropout2d\n self.BatchNorm = nn.BatchNorm2d\n self.MaxPool = nn.MaxPool2d\n self.AvgPool = nn.AvgPool2d\n self.AdaptiveAvgPool = nn.AdaptiveAvgPool2d\n self.AdaptiveMaxPool = nn.AdaptiveMaxPool2d\n self.GlobalAvgPool = GlobalAveragePooling2D\n self.Norm = self.get_norm_type(self.norm_type.lower(), self.n_dimensions)\n\n elif self.n_dimensions == 3:\n self.Conv = nn.Conv3d\n self.ConvTranspose = nn.ConvTranspose3d\n self.InstanceNorm = nn.InstanceNorm3d\n self.Dropout = nn.Dropout3d\n self.BatchNorm = nn.BatchNorm3d\n self.MaxPool = nn.MaxPool3d\n self.AvgPool = nn.AvgPool3d\n self.AdaptiveAvgPool = nn.AdaptiveAvgPool3d\n self.AdaptiveMaxPool = nn.AdaptiveMaxPool3d\n self.GlobalAvgPool = GlobalAveragePooling3D\n self.Norm = self.get_norm_type(self.norm_type.lower(), self.n_dimensions)\n\n def get_final_layer(self, final_convolution_layer):\n return get_modelbase_final_layer(final_convolution_layer)\n\n def get_norm_type(self, norm_type, dimensions):\n \"\"\"\n This function gets the normalization type for the model.\n\n Args:\n norm_type (str): Normalization type as a string.\n dimensions (str): The dimensionality of the model.\n\n Returns:\n _InstanceNorm or _BatchNorm: The normalization type for the model.\n \"\"\"\n if dimensions == 3:\n if norm_type == \"batch\":\n norm_type = nn.BatchNorm3d\n elif norm_type == \"instance\":\n norm_type = nn.InstanceNorm3d\n else:\n norm_type = None\n elif dimensions == 2:\n if norm_type == \"batch\":\n norm_type = nn.BatchNorm2d\n elif norm_type == \"instance\":\n norm_type = nn.InstanceNorm2d\n else:\n norm_type = None\n\n return norm_type\n", "path": "GANDLF/models/modelBase.py"}]} | 2,789 | 157 |
gh_patches_debug_23308 | rasdani/github-patches | git_diff | pyro-ppl__numpyro-912 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
ZeroDivisionError when running SVI with num_steps < 20
Reproducible code
```python
import jax
import numpyro
def model():
pass
def guide():
pass
svi = numpyro.infer.SVI(model, guide, numpyro.optim.Adam(1), numpyro.infer.Trace_ELBO())
svi.run(jax.random.PRNGKey(0), 10)
```
Proposed fix: setting `batch = max(num_steps // 20, 1)` in [SVI.run](https://github.com/pyro-ppl/numpyro/blob/aee945895dff9ffb6a12637a9bfbddb1d8c20363/numpyro/infer/svi.py#L179) to guarantee that `batch` is positive
</issue>
<code>
[start of numpyro/infer/svi.py]
1 # Copyright Contributors to the Pyro project.
2 # SPDX-License-Identifier: Apache-2.0
3
4 from functools import namedtuple, partial
5
6 import tqdm
7
8 from jax import jit, lax, random
9 import jax.numpy as jnp
10
11 from numpyro.distributions import constraints
12 from numpyro.distributions.transforms import biject_to
13 from numpyro.handlers import replay, seed, trace
14 from numpyro.infer.util import transform_fn
15
16 SVIState = namedtuple('SVIState', ['optim_state', 'rng_key'])
17 """
18 A :func:`~collections.namedtuple` consisting of the following fields:
19 - **optim_state** - current optimizer's state.
20 - **rng_key** - random number generator seed used for the iteration.
21 """
22
23
24 SVIRunResult = namedtuple('SVIRunResult', ['params', 'losses'])
25 """
26 A :func:`~collections.namedtuple` consisting of the following fields:
27 - **params** - the optimized parameters.
28 - **losses** - the losses collected at every step.
29 """
30
31
32 def _apply_loss_fn(loss_fn, rng_key, constrain_fn, model, guide,
33 args, kwargs, static_kwargs, params):
34 return loss_fn(rng_key, constrain_fn(params), model, guide, *args, **kwargs, **static_kwargs)
35
36
37 class SVI(object):
38 """
39 Stochastic Variational Inference given an ELBO loss objective.
40
41 **References**
42
43 1. *SVI Part I: An Introduction to Stochastic Variational Inference in Pyro*,
44 (http://pyro.ai/examples/svi_part_i.html)
45
46 **Example:**
47
48 .. doctest::
49
50 >>> from jax import random
51 >>> import jax.numpy as jnp
52 >>> import numpyro
53 >>> import numpyro.distributions as dist
54 >>> from numpyro.distributions import constraints
55 >>> from numpyro.infer import SVI, Trace_ELBO
56
57 >>> def model(data):
58 ... f = numpyro.sample("latent_fairness", dist.Beta(10, 10))
59 ... with numpyro.plate("N", data.shape[0]):
60 ... numpyro.sample("obs", dist.Bernoulli(f), obs=data)
61
62 >>> def guide(data):
63 ... alpha_q = numpyro.param("alpha_q", 15., constraint=constraints.positive)
64 ... beta_q = numpyro.param("beta_q", lambda rng_key: random.exponential(rng_key),
65 ... constraint=constraints.positive)
66 ... numpyro.sample("latent_fairness", dist.Beta(alpha_q, beta_q))
67
68 >>> data = jnp.concatenate([jnp.ones(6), jnp.zeros(4)])
69 >>> optimizer = numpyro.optim.Adam(step_size=0.0005)
70 >>> svi = SVI(model, guide, optimizer, loss=Trace_ELBO())
71 >>> svi_result = svi.run(random.PRNGKey(0), 2000, data)
72 >>> params = svi_result.params
73 >>> inferred_mean = params["alpha_q"] / (params["alpha_q"] + params["beta_q"])
74
75 :param model: Python callable with Pyro primitives for the model.
76 :param guide: Python callable with Pyro primitives for the guide
77 (recognition network).
78 :param optim: an instance of :class:`~numpyro.optim._NumpyroOptim`.
79 :param loss: ELBO loss, i.e. negative Evidence Lower Bound, to minimize.
80 :param static_kwargs: static arguments for the model / guide, i.e. arguments
81 that remain constant during fitting.
82 :return: tuple of `(init_fn, update_fn, evaluate)`.
83 """
84 def __init__(self, model, guide, optim, loss, **static_kwargs):
85 self.model = model
86 self.guide = guide
87 self.loss = loss
88 self.optim = optim
89 self.static_kwargs = static_kwargs
90 self.constrain_fn = None
91
92 def init(self, rng_key, *args, **kwargs):
93 """
94 Gets the initial SVI state.
95
96 :param jax.random.PRNGKey rng_key: random number generator seed.
97 :param args: arguments to the model / guide (these can possibly vary during
98 the course of fitting).
99 :param kwargs: keyword arguments to the model / guide (these can possibly vary
100 during the course of fitting).
101 :return: the initial :data:`SVIState`
102 """
103 rng_key, model_seed, guide_seed = random.split(rng_key, 3)
104 model_init = seed(self.model, model_seed)
105 guide_init = seed(self.guide, guide_seed)
106 guide_trace = trace(guide_init).get_trace(*args, **kwargs, **self.static_kwargs)
107 model_trace = trace(replay(model_init, guide_trace)).get_trace(*args, **kwargs, **self.static_kwargs)
108 params = {}
109 inv_transforms = {}
110 # NB: params in model_trace will be overwritten by params in guide_trace
111 for site in list(model_trace.values()) + list(guide_trace.values()):
112 if site['type'] == 'param':
113 constraint = site['kwargs'].pop('constraint', constraints.real)
114 transform = biject_to(constraint)
115 inv_transforms[site['name']] = transform
116 params[site['name']] = transform.inv(site['value'])
117
118 self.constrain_fn = partial(transform_fn, inv_transforms)
119 return SVIState(self.optim.init(params), rng_key)
120
121 def get_params(self, svi_state):
122 """
123 Gets values at `param` sites of the `model` and `guide`.
124
125 :param svi_state: current state of SVI.
126 :return: the corresponding parameters
127 """
128 params = self.constrain_fn(self.optim.get_params(svi_state.optim_state))
129 return params
130
131 def update(self, svi_state, *args, **kwargs):
132 """
133 Take a single step of SVI (possibly on a batch / minibatch of data),
134 using the optimizer.
135
136 :param svi_state: current state of SVI.
137 :param args: arguments to the model / guide (these can possibly vary during
138 the course of fitting).
139 :param kwargs: keyword arguments to the model / guide (these can possibly vary
140 during the course of fitting).
141 :return: tuple of `(svi_state, loss)`.
142 """
143 rng_key, rng_key_step = random.split(svi_state.rng_key)
144 loss_fn = partial(_apply_loss_fn, self.loss.loss, rng_key_step, self.constrain_fn, self.model,
145 self.guide, args, kwargs, self.static_kwargs)
146 loss_val, optim_state = self.optim.eval_and_update(loss_fn, svi_state.optim_state)
147 return SVIState(optim_state, rng_key), loss_val
148
149 def run(self, rng_key, num_steps, *args, progress_bar=True, **kwargs):
150 """
151 (EXPERIMENTAL INTERFACE) Run SVI with `num_steps` iterations, then return
152 the optimized parameters and the stacked losses at every step. If `num_steps`
153 is large, setting `progress_bar=False` can make the run faster.
154
155 .. note:: For a complex training process (e.g. the one requires early stopping,
156 epoch training, varying args/kwargs,...), we recommend to use the more
157 flexible methods :meth:`init`, :meth:`update`, :meth:`evaluate` to
158 customize your training procedure.
159
160 :param jax.random.PRNGKey rng_key: random number generator seed.
161 :param int num_steps: the number of optimization steps.
162 :param args: arguments to the model / guide
163 :param bool progress_bar: Whether to enable progress bar updates. Defaults to
164 ``True``.
165 :param kwargs: keyword arguments to the model / guide
166 :return: a namedtuple with fields `params` and `losses` where `params`
167 holds the optimized values at :class:`numpyro.param` sites,
168 and `losses` is the collected loss during the process.
169 :rtype: SVIRunResult
170 """
171 def body_fn(svi_state, carry):
172 svi_state, loss = self.update(svi_state, *args, **kwargs)
173 return svi_state, loss
174
175 svi_state = self.init(rng_key, *args, **kwargs)
176 if progress_bar:
177 losses = []
178 with tqdm.trange(1, num_steps + 1) as t:
179 batch = num_steps // 20
180 for i in t:
181 svi_state, loss = jit(body_fn)(svi_state, None)
182 if i % batch == 0:
183 avg_loss = sum(losses[i-batch:]) / batch
184 t.set_postfix_str("init loss: {:.4f}, avg. loss [{}-{}]: {:.4f}"
185 .format(losses[0], i - batch + 1, i, avg_loss),
186 refresh=False)
187 losses.append(loss)
188 losses = jnp.stack(losses)
189 else:
190 svi_state, losses = lax.scan(body_fn, svi_state, None, length=num_steps)
191
192 return SVIRunResult(self.get_params(svi_state), losses)
193
194 def evaluate(self, svi_state, *args, **kwargs):
195 """
196 Take a single step of SVI (possibly on a batch / minibatch of data).
197
198 :param svi_state: current state of SVI.
199 :param args: arguments to the model / guide (these can possibly vary during
200 the course of fitting).
201 :param kwargs: keyword arguments to the model / guide.
202 :return: evaluate ELBO loss given the current parameter values
203 (held within `svi_state.optim_state`).
204 """
205 # we split to have the same seed as `update_fn` given an svi_state
206 _, rng_key_eval = random.split(svi_state.rng_key)
207 params = self.get_params(svi_state)
208 return self.loss.loss(rng_key_eval, params, self.model, self.guide,
209 *args, **kwargs, **self.static_kwargs)
210
[end of numpyro/infer/svi.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/numpyro/infer/svi.py b/numpyro/infer/svi.py
--- a/numpyro/infer/svi.py
+++ b/numpyro/infer/svi.py
@@ -176,15 +176,15 @@
if progress_bar:
losses = []
with tqdm.trange(1, num_steps + 1) as t:
- batch = num_steps // 20
+ batch = max(num_steps // 20, 1)
for i in t:
svi_state, loss = jit(body_fn)(svi_state, None)
+ losses.append(loss)
if i % batch == 0:
avg_loss = sum(losses[i-batch:]) / batch
t.set_postfix_str("init loss: {:.4f}, avg. loss [{}-{}]: {:.4f}"
.format(losses[0], i - batch + 1, i, avg_loss),
refresh=False)
- losses.append(loss)
losses = jnp.stack(losses)
else:
svi_state, losses = lax.scan(body_fn, svi_state, None, length=num_steps)
| {"golden_diff": "diff --git a/numpyro/infer/svi.py b/numpyro/infer/svi.py\n--- a/numpyro/infer/svi.py\n+++ b/numpyro/infer/svi.py\n@@ -176,15 +176,15 @@\n if progress_bar:\n losses = []\n with tqdm.trange(1, num_steps + 1) as t:\n- batch = num_steps // 20\n+ batch = max(num_steps // 20, 1)\n for i in t:\n svi_state, loss = jit(body_fn)(svi_state, None)\n+ losses.append(loss)\n if i % batch == 0:\n avg_loss = sum(losses[i-batch:]) / batch\n t.set_postfix_str(\"init loss: {:.4f}, avg. loss [{}-{}]: {:.4f}\"\n .format(losses[0], i - batch + 1, i, avg_loss),\n refresh=False)\n- losses.append(loss)\n losses = jnp.stack(losses)\n else:\n svi_state, losses = lax.scan(body_fn, svi_state, None, length=num_steps)\n", "issue": "ZeroDivisionError when running SVI with num_steps < 20\nReproducible code\r\n```python\r\nimport jax\r\nimport numpyro\r\n\r\ndef model():\r\n pass\r\n\r\ndef guide():\r\n pass\r\n\r\nsvi = numpyro.infer.SVI(model, guide, numpyro.optim.Adam(1), numpyro.infer.Trace_ELBO())\r\nsvi.run(jax.random.PRNGKey(0), 10)\r\n```\r\n\r\nProposed fix: setting `batch = max(num_steps // 20, 1)` in [SVI.run](https://github.com/pyro-ppl/numpyro/blob/aee945895dff9ffb6a12637a9bfbddb1d8c20363/numpyro/infer/svi.py#L179) to guarantee that `batch` is positive\n", "before_files": [{"content": "# Copyright Contributors to the Pyro project.\n# SPDX-License-Identifier: Apache-2.0\n\nfrom functools import namedtuple, partial\n\nimport tqdm\n\nfrom jax import jit, lax, random\nimport jax.numpy as jnp\n\nfrom numpyro.distributions import constraints\nfrom numpyro.distributions.transforms import biject_to\nfrom numpyro.handlers import replay, seed, trace\nfrom numpyro.infer.util import transform_fn\n\nSVIState = namedtuple('SVIState', ['optim_state', 'rng_key'])\n\"\"\"\nA :func:`~collections.namedtuple` consisting of the following fields:\n - **optim_state** - current optimizer's state.\n - **rng_key** - random number generator seed used for the iteration.\n\"\"\"\n\n\nSVIRunResult = namedtuple('SVIRunResult', ['params', 'losses'])\n\"\"\"\nA :func:`~collections.namedtuple` consisting of the following fields:\n - **params** - the optimized parameters.\n - **losses** - the losses collected at every step.\n\"\"\"\n\n\ndef _apply_loss_fn(loss_fn, rng_key, constrain_fn, model, guide,\n args, kwargs, static_kwargs, params):\n return loss_fn(rng_key, constrain_fn(params), model, guide, *args, **kwargs, **static_kwargs)\n\n\nclass SVI(object):\n \"\"\"\n Stochastic Variational Inference given an ELBO loss objective.\n\n **References**\n\n 1. *SVI Part I: An Introduction to Stochastic Variational Inference in Pyro*,\n (http://pyro.ai/examples/svi_part_i.html)\n\n **Example:**\n\n .. doctest::\n\n >>> from jax import random\n >>> import jax.numpy as jnp\n >>> import numpyro\n >>> import numpyro.distributions as dist\n >>> from numpyro.distributions import constraints\n >>> from numpyro.infer import SVI, Trace_ELBO\n\n >>> def model(data):\n ... f = numpyro.sample(\"latent_fairness\", dist.Beta(10, 10))\n ... with numpyro.plate(\"N\", data.shape[0]):\n ... numpyro.sample(\"obs\", dist.Bernoulli(f), obs=data)\n\n >>> def guide(data):\n ... alpha_q = numpyro.param(\"alpha_q\", 15., constraint=constraints.positive)\n ... beta_q = numpyro.param(\"beta_q\", lambda rng_key: random.exponential(rng_key),\n ... constraint=constraints.positive)\n ... numpyro.sample(\"latent_fairness\", dist.Beta(alpha_q, beta_q))\n\n >>> data = jnp.concatenate([jnp.ones(6), jnp.zeros(4)])\n >>> optimizer = numpyro.optim.Adam(step_size=0.0005)\n >>> svi = SVI(model, guide, optimizer, loss=Trace_ELBO())\n >>> svi_result = svi.run(random.PRNGKey(0), 2000, data)\n >>> params = svi_result.params\n >>> inferred_mean = params[\"alpha_q\"] / (params[\"alpha_q\"] + params[\"beta_q\"])\n\n :param model: Python callable with Pyro primitives for the model.\n :param guide: Python callable with Pyro primitives for the guide\n (recognition network).\n :param optim: an instance of :class:`~numpyro.optim._NumpyroOptim`.\n :param loss: ELBO loss, i.e. negative Evidence Lower Bound, to minimize.\n :param static_kwargs: static arguments for the model / guide, i.e. arguments\n that remain constant during fitting.\n :return: tuple of `(init_fn, update_fn, evaluate)`.\n \"\"\"\n def __init__(self, model, guide, optim, loss, **static_kwargs):\n self.model = model\n self.guide = guide\n self.loss = loss\n self.optim = optim\n self.static_kwargs = static_kwargs\n self.constrain_fn = None\n\n def init(self, rng_key, *args, **kwargs):\n \"\"\"\n Gets the initial SVI state.\n\n :param jax.random.PRNGKey rng_key: random number generator seed.\n :param args: arguments to the model / guide (these can possibly vary during\n the course of fitting).\n :param kwargs: keyword arguments to the model / guide (these can possibly vary\n during the course of fitting).\n :return: the initial :data:`SVIState`\n \"\"\"\n rng_key, model_seed, guide_seed = random.split(rng_key, 3)\n model_init = seed(self.model, model_seed)\n guide_init = seed(self.guide, guide_seed)\n guide_trace = trace(guide_init).get_trace(*args, **kwargs, **self.static_kwargs)\n model_trace = trace(replay(model_init, guide_trace)).get_trace(*args, **kwargs, **self.static_kwargs)\n params = {}\n inv_transforms = {}\n # NB: params in model_trace will be overwritten by params in guide_trace\n for site in list(model_trace.values()) + list(guide_trace.values()):\n if site['type'] == 'param':\n constraint = site['kwargs'].pop('constraint', constraints.real)\n transform = biject_to(constraint)\n inv_transforms[site['name']] = transform\n params[site['name']] = transform.inv(site['value'])\n\n self.constrain_fn = partial(transform_fn, inv_transforms)\n return SVIState(self.optim.init(params), rng_key)\n\n def get_params(self, svi_state):\n \"\"\"\n Gets values at `param` sites of the `model` and `guide`.\n\n :param svi_state: current state of SVI.\n :return: the corresponding parameters\n \"\"\"\n params = self.constrain_fn(self.optim.get_params(svi_state.optim_state))\n return params\n\n def update(self, svi_state, *args, **kwargs):\n \"\"\"\n Take a single step of SVI (possibly on a batch / minibatch of data),\n using the optimizer.\n\n :param svi_state: current state of SVI.\n :param args: arguments to the model / guide (these can possibly vary during\n the course of fitting).\n :param kwargs: keyword arguments to the model / guide (these can possibly vary\n during the course of fitting).\n :return: tuple of `(svi_state, loss)`.\n \"\"\"\n rng_key, rng_key_step = random.split(svi_state.rng_key)\n loss_fn = partial(_apply_loss_fn, self.loss.loss, rng_key_step, self.constrain_fn, self.model,\n self.guide, args, kwargs, self.static_kwargs)\n loss_val, optim_state = self.optim.eval_and_update(loss_fn, svi_state.optim_state)\n return SVIState(optim_state, rng_key), loss_val\n\n def run(self, rng_key, num_steps, *args, progress_bar=True, **kwargs):\n \"\"\"\n (EXPERIMENTAL INTERFACE) Run SVI with `num_steps` iterations, then return\n the optimized parameters and the stacked losses at every step. If `num_steps`\n is large, setting `progress_bar=False` can make the run faster.\n\n .. note:: For a complex training process (e.g. the one requires early stopping,\n epoch training, varying args/kwargs,...), we recommend to use the more\n flexible methods :meth:`init`, :meth:`update`, :meth:`evaluate` to\n customize your training procedure.\n\n :param jax.random.PRNGKey rng_key: random number generator seed.\n :param int num_steps: the number of optimization steps.\n :param args: arguments to the model / guide\n :param bool progress_bar: Whether to enable progress bar updates. Defaults to\n ``True``.\n :param kwargs: keyword arguments to the model / guide\n :return: a namedtuple with fields `params` and `losses` where `params`\n holds the optimized values at :class:`numpyro.param` sites,\n and `losses` is the collected loss during the process.\n :rtype: SVIRunResult\n \"\"\"\n def body_fn(svi_state, carry):\n svi_state, loss = self.update(svi_state, *args, **kwargs)\n return svi_state, loss\n\n svi_state = self.init(rng_key, *args, **kwargs)\n if progress_bar:\n losses = []\n with tqdm.trange(1, num_steps + 1) as t:\n batch = num_steps // 20\n for i in t:\n svi_state, loss = jit(body_fn)(svi_state, None)\n if i % batch == 0:\n avg_loss = sum(losses[i-batch:]) / batch\n t.set_postfix_str(\"init loss: {:.4f}, avg. loss [{}-{}]: {:.4f}\"\n .format(losses[0], i - batch + 1, i, avg_loss),\n refresh=False)\n losses.append(loss)\n losses = jnp.stack(losses)\n else:\n svi_state, losses = lax.scan(body_fn, svi_state, None, length=num_steps)\n\n return SVIRunResult(self.get_params(svi_state), losses)\n\n def evaluate(self, svi_state, *args, **kwargs):\n \"\"\"\n Take a single step of SVI (possibly on a batch / minibatch of data).\n\n :param svi_state: current state of SVI.\n :param args: arguments to the model / guide (these can possibly vary during\n the course of fitting).\n :param kwargs: keyword arguments to the model / guide.\n :return: evaluate ELBO loss given the current parameter values\n (held within `svi_state.optim_state`).\n \"\"\"\n # we split to have the same seed as `update_fn` given an svi_state\n _, rng_key_eval = random.split(svi_state.rng_key)\n params = self.get_params(svi_state)\n return self.loss.loss(rng_key_eval, params, self.model, self.guide,\n *args, **kwargs, **self.static_kwargs)\n", "path": "numpyro/infer/svi.py"}]} | 3,423 | 254 |
gh_patches_debug_25352 | rasdani/github-patches | git_diff | OpenCTI-Platform__connectors-975 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
[VXVault] Connector imports garbage data (HTML tags and blank events)
## Description
The VXVault connector is importing garbage data as indicators and observables, including events like `<pre>`, </pre>` and blank entities.
## Environment
1. OS (where OpenCTI server runs): Docker on Ubuntu 20
2. OpenCTI version: 5.5.2
3. OpenCTI client: connectors/python
## Expected Output
Connector should download the URLs from the VXVault threat feed here: http://vxvault.net/URL_List.php and import them into OpenCTI as Observables and Indicators.
## Actual Output
The connector does import the URLs, but it fails to parse the page correctly and imports some of the HTML tags as well by accident, leading to garbage data like Indicators with names and values as just `<pre>`
## Additional information
I discussed this and a couple of other related issues with the VXVault connector + data with Samuel Hassine in the slack channel: https://filigran-community.slack.com/archives/CHZC2D38C/p1673599524232109
There are three related issues - I will raise a GH issue for two of them and then submit a proposed PR to fix both:
1 - Connector imports garbage data (HTML tags)
2 - Connector leaves trailing white-space at the end of Indicator/Observable name and value (Raised as #974)
3 - VXVault entities do not show up in bulk search, which is caused by issue 2, as bulk-search only does an exact match currently and can't match on the trailing `\n` characters. The developers are aware of this, so I will not raise an issue for it.
</issue>
<code>
[start of external-import/vxvault/src/vxvault.py]
1 import os
2 import ssl
3 import sys
4 import time
5 import urllib.request
6 from datetime import datetime
7
8 import certifi
9 import yaml
10 from pycti import OpenCTIConnectorHelper, get_config_variable
11 from stix2 import TLP_WHITE, URL, Bundle, ExternalReference
12
13
14 class VXVault:
15 def __init__(self):
16 # Instantiate the connector helper from config
17 config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/config.yml"
18 config = (
19 yaml.load(open(config_file_path), Loader=yaml.FullLoader)
20 if os.path.isfile(config_file_path)
21 else {}
22 )
23 self.helper = OpenCTIConnectorHelper(config)
24 # Extra config
25 self.vxvault_url = get_config_variable(
26 "VXVAULT_URL", ["vxvault", "url"], config
27 )
28 self.vxvault_interval = get_config_variable(
29 "VXVAULT_INTERVAL", ["vxvault", "interval"], config, True
30 )
31 self.create_indicators = get_config_variable(
32 "VXVAULT_CREATE_INDICATORS",
33 ["vxvault", "create_indicators"],
34 config,
35 False,
36 True,
37 )
38 self.update_existing_data = get_config_variable(
39 "CONNECTOR_UPDATE_EXISTING_DATA",
40 ["connector", "update_existing_data"],
41 config,
42 )
43 self.identity = self.helper.api.identity.create(
44 type="Organization",
45 name="VX Vault",
46 description="VX Vault is providing URLs of potential malicious payload.",
47 )
48
49 def get_interval(self):
50 return int(self.vxvault_interval) * 60 * 60 * 24
51
52 def next_run(self, seconds):
53 return
54
55 def run(self):
56 self.helper.log_info("Fetching VXVault dataset...")
57 while True:
58 try:
59 # Get the current timestamp and check
60 timestamp = int(time.time())
61 current_state = self.helper.get_state()
62 if current_state is not None and "last_run" in current_state:
63 last_run = current_state["last_run"]
64 self.helper.log_info(
65 "Connector last run: "
66 + datetime.utcfromtimestamp(last_run).strftime(
67 "%Y-%m-%d %H:%M:%S"
68 )
69 )
70 else:
71 last_run = None
72 self.helper.log_info("Connector has never run")
73 # If the last_run is more than interval-1 day
74 if last_run is None or (
75 (timestamp - last_run)
76 > ((int(self.vxvault_interval) - 1) * 60 * 60 * 24)
77 ):
78 self.helper.log_info("Connector will run!")
79 now = datetime.utcfromtimestamp(timestamp)
80 friendly_name = "VXVault run @ " + now.strftime("%Y-%m-%d %H:%M:%S")
81 work_id = self.helper.api.work.initiate_work(
82 self.helper.connect_id, friendly_name
83 )
84 try:
85 response = urllib.request.urlopen(
86 self.vxvault_url,
87 context=ssl.create_default_context(cafile=certifi.where()),
88 )
89 image = response.read()
90 with open(
91 os.path.dirname(os.path.abspath(__file__)) + "/data.txt",
92 "wb",
93 ) as file:
94 file.write(image)
95 count = 0
96 bundle_objects = []
97 with open(
98 os.path.dirname(os.path.abspath(__file__)) + "/data.txt"
99 ) as fp:
100 for line in fp:
101 count += 1
102 if count <= 3:
103 continue
104 external_reference = ExternalReference(
105 source_name="VX Vault",
106 url="http://vxvault.net",
107 description="VX Vault repository URL",
108 )
109 stix_observable = URL(
110 value=line,
111 object_marking_refs=[TLP_WHITE],
112 custom_properties={
113 "description": "VX Vault URL",
114 "x_opencti_score": 80,
115 "created_by_ref": self.identity["standard_id"],
116 "x_opencti_create_indicator": self.create_indicators,
117 "external_references": [external_reference],
118 },
119 )
120 bundle_objects.append(stix_observable)
121 bundle = Bundle(
122 objects=bundle_objects, allow_custom=True
123 ).serialize()
124 self.helper.send_stix2_bundle(
125 bundle,
126 update=self.update_existing_data,
127 work_id=work_id,
128 )
129 if os.path.exists(
130 os.path.dirname(os.path.abspath(__file__)) + "/data.txt"
131 ):
132 os.remove(
133 os.path.dirname(os.path.abspath(__file__)) + "/data.txt"
134 )
135 except Exception as e:
136 self.helper.log_error(str(e))
137 # Store the current timestamp as a last run
138 message = "Connector successfully run, storing last_run as " + str(
139 timestamp
140 )
141 self.helper.log_info(message)
142 self.helper.set_state({"last_run": timestamp})
143 self.helper.api.work.to_processed(work_id, message)
144 self.helper.log_info(
145 "Last_run stored, next run in: "
146 + str(round(self.get_interval() / 60 / 60 / 24, 2))
147 + " days"
148 )
149 else:
150 new_interval = self.get_interval() - (timestamp - last_run)
151 self.helper.log_info(
152 "Connector will not run, next run in: "
153 + str(round(new_interval / 60 / 60 / 24, 2))
154 + " days"
155 )
156 except (KeyboardInterrupt, SystemExit):
157 self.helper.log_info("Connector stop")
158 sys.exit(0)
159 except Exception as e:
160 self.helper.log_error(str(e))
161
162 if self.helper.connect_run_and_terminate:
163 self.helper.log_info("Connector stop")
164 sys.exit(0)
165
166 time.sleep(60)
167
168
169 if __name__ == "__main__":
170 try:
171 VXVaultConnector = VXVault()
172 VXVaultConnector.run()
173 except Exception as e:
174 print(e)
175 time.sleep(10)
176 sys.exit(0)
177
[end of external-import/vxvault/src/vxvault.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/external-import/vxvault/src/vxvault.py b/external-import/vxvault/src/vxvault.py
--- a/external-import/vxvault/src/vxvault.py
+++ b/external-import/vxvault/src/vxvault.py
@@ -1,4 +1,5 @@
import os
+import re
import ssl
import sys
import time
@@ -7,6 +8,7 @@
import certifi
import yaml
+
from pycti import OpenCTIConnectorHelper, get_config_variable
from stix2 import TLP_WHITE, URL, Bundle, ExternalReference
@@ -101,6 +103,13 @@
count += 1
if count <= 3:
continue
+ line=line.strip()
+ matchHtmlTag = re.search(r'^<\/?\w+>', line)
+ if matchHtmlTag:
+ continue
+ matchBlankLine = re.search(r'^\s*$', line)
+ if matchBlankLine:
+ continue
external_reference = ExternalReference(
source_name="VX Vault",
url="http://vxvault.net",
| {"golden_diff": "diff --git a/external-import/vxvault/src/vxvault.py b/external-import/vxvault/src/vxvault.py\n--- a/external-import/vxvault/src/vxvault.py\n+++ b/external-import/vxvault/src/vxvault.py\n@@ -1,4 +1,5 @@\n import os\n+import re\n import ssl\n import sys\n import time\n@@ -7,6 +8,7 @@\n \n import certifi\n import yaml\n+\n from pycti import OpenCTIConnectorHelper, get_config_variable\n from stix2 import TLP_WHITE, URL, Bundle, ExternalReference\n \n@@ -101,6 +103,13 @@\n count += 1\n if count <= 3:\n continue\n+ line=line.strip()\n+ matchHtmlTag = re.search(r'^<\\/?\\w+>', line)\n+ if matchHtmlTag:\n+ continue\n+ matchBlankLine = re.search(r'^\\s*$', line)\n+ if matchBlankLine:\n+ continue\n external_reference = ExternalReference(\n source_name=\"VX Vault\",\n url=\"http://vxvault.net\",\n", "issue": "[VXVault] Connector imports garbage data (HTML tags and blank events)\n## Description\r\n\r\nThe VXVault connector is importing garbage data as indicators and observables, including events like `<pre>`, </pre>` and blank entities. \r\n\r\n## Environment\r\n\r\n1. OS (where OpenCTI server runs): Docker on Ubuntu 20 \r\n2. OpenCTI version: 5.5.2\r\n3. OpenCTI client: connectors/python\r\n\r\n## Expected Output\r\nConnector should download the URLs from the VXVault threat feed here: http://vxvault.net/URL_List.php and import them into OpenCTI as Observables and Indicators.\r\n\r\n## Actual Output\r\nThe connector does import the URLs, but it fails to parse the page correctly and imports some of the HTML tags as well by accident, leading to garbage data like Indicators with names and values as just `<pre>`\r\n\r\n## Additional information\r\nI discussed this and a couple of other related issues with the VXVault connector + data with Samuel Hassine in the slack channel: https://filigran-community.slack.com/archives/CHZC2D38C/p1673599524232109\r\n\r\nThere are three related issues - I will raise a GH issue for two of them and then submit a proposed PR to fix both:\r\n1 - Connector imports garbage data (HTML tags)\r\n2 - Connector leaves trailing white-space at the end of Indicator/Observable name and value (Raised as #974)\r\n3 - VXVault entities do not show up in bulk search, which is caused by issue 2, as bulk-search only does an exact match currently and can't match on the trailing `\\n` characters. The developers are aware of this, so I will not raise an issue for it.\n", "before_files": [{"content": "import os\nimport ssl\nimport sys\nimport time\nimport urllib.request\nfrom datetime import datetime\n\nimport certifi\nimport yaml\nfrom pycti import OpenCTIConnectorHelper, get_config_variable\nfrom stix2 import TLP_WHITE, URL, Bundle, ExternalReference\n\n\nclass VXVault:\n def __init__(self):\n # Instantiate the connector helper from config\n config_file_path = os.path.dirname(os.path.abspath(__file__)) + \"/config.yml\"\n config = (\n yaml.load(open(config_file_path), Loader=yaml.FullLoader)\n if os.path.isfile(config_file_path)\n else {}\n )\n self.helper = OpenCTIConnectorHelper(config)\n # Extra config\n self.vxvault_url = get_config_variable(\n \"VXVAULT_URL\", [\"vxvault\", \"url\"], config\n )\n self.vxvault_interval = get_config_variable(\n \"VXVAULT_INTERVAL\", [\"vxvault\", \"interval\"], config, True\n )\n self.create_indicators = get_config_variable(\n \"VXVAULT_CREATE_INDICATORS\",\n [\"vxvault\", \"create_indicators\"],\n config,\n False,\n True,\n )\n self.update_existing_data = get_config_variable(\n \"CONNECTOR_UPDATE_EXISTING_DATA\",\n [\"connector\", \"update_existing_data\"],\n config,\n )\n self.identity = self.helper.api.identity.create(\n type=\"Organization\",\n name=\"VX Vault\",\n description=\"VX Vault is providing URLs of potential malicious payload.\",\n )\n\n def get_interval(self):\n return int(self.vxvault_interval) * 60 * 60 * 24\n\n def next_run(self, seconds):\n return\n\n def run(self):\n self.helper.log_info(\"Fetching VXVault dataset...\")\n while True:\n try:\n # Get the current timestamp and check\n timestamp = int(time.time())\n current_state = self.helper.get_state()\n if current_state is not None and \"last_run\" in current_state:\n last_run = current_state[\"last_run\"]\n self.helper.log_info(\n \"Connector last run: \"\n + datetime.utcfromtimestamp(last_run).strftime(\n \"%Y-%m-%d %H:%M:%S\"\n )\n )\n else:\n last_run = None\n self.helper.log_info(\"Connector has never run\")\n # If the last_run is more than interval-1 day\n if last_run is None or (\n (timestamp - last_run)\n > ((int(self.vxvault_interval) - 1) * 60 * 60 * 24)\n ):\n self.helper.log_info(\"Connector will run!\")\n now = datetime.utcfromtimestamp(timestamp)\n friendly_name = \"VXVault run @ \" + now.strftime(\"%Y-%m-%d %H:%M:%S\")\n work_id = self.helper.api.work.initiate_work(\n self.helper.connect_id, friendly_name\n )\n try:\n response = urllib.request.urlopen(\n self.vxvault_url,\n context=ssl.create_default_context(cafile=certifi.where()),\n )\n image = response.read()\n with open(\n os.path.dirname(os.path.abspath(__file__)) + \"/data.txt\",\n \"wb\",\n ) as file:\n file.write(image)\n count = 0\n bundle_objects = []\n with open(\n os.path.dirname(os.path.abspath(__file__)) + \"/data.txt\"\n ) as fp:\n for line in fp:\n count += 1\n if count <= 3:\n continue\n external_reference = ExternalReference(\n source_name=\"VX Vault\",\n url=\"http://vxvault.net\",\n description=\"VX Vault repository URL\",\n )\n stix_observable = URL(\n value=line,\n object_marking_refs=[TLP_WHITE],\n custom_properties={\n \"description\": \"VX Vault URL\",\n \"x_opencti_score\": 80,\n \"created_by_ref\": self.identity[\"standard_id\"],\n \"x_opencti_create_indicator\": self.create_indicators,\n \"external_references\": [external_reference],\n },\n )\n bundle_objects.append(stix_observable)\n bundle = Bundle(\n objects=bundle_objects, allow_custom=True\n ).serialize()\n self.helper.send_stix2_bundle(\n bundle,\n update=self.update_existing_data,\n work_id=work_id,\n )\n if os.path.exists(\n os.path.dirname(os.path.abspath(__file__)) + \"/data.txt\"\n ):\n os.remove(\n os.path.dirname(os.path.abspath(__file__)) + \"/data.txt\"\n )\n except Exception as e:\n self.helper.log_error(str(e))\n # Store the current timestamp as a last run\n message = \"Connector successfully run, storing last_run as \" + str(\n timestamp\n )\n self.helper.log_info(message)\n self.helper.set_state({\"last_run\": timestamp})\n self.helper.api.work.to_processed(work_id, message)\n self.helper.log_info(\n \"Last_run stored, next run in: \"\n + str(round(self.get_interval() / 60 / 60 / 24, 2))\n + \" days\"\n )\n else:\n new_interval = self.get_interval() - (timestamp - last_run)\n self.helper.log_info(\n \"Connector will not run, next run in: \"\n + str(round(new_interval / 60 / 60 / 24, 2))\n + \" days\"\n )\n except (KeyboardInterrupt, SystemExit):\n self.helper.log_info(\"Connector stop\")\n sys.exit(0)\n except Exception as e:\n self.helper.log_error(str(e))\n\n if self.helper.connect_run_and_terminate:\n self.helper.log_info(\"Connector stop\")\n sys.exit(0)\n\n time.sleep(60)\n\n\nif __name__ == \"__main__\":\n try:\n VXVaultConnector = VXVault()\n VXVaultConnector.run()\n except Exception as e:\n print(e)\n time.sleep(10)\n sys.exit(0)\n", "path": "external-import/vxvault/src/vxvault.py"}]} | 2,621 | 248 |
gh_patches_debug_31177 | rasdani/github-patches | git_diff | pypi__warehouse-9341 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Reset password doesn't work if user doesn't have verified/primary email address.
**Describe the bug**
Password reset is not working. I get the email, follow the link, enter the password twice, submit the form, and get back a page with no error messages. But the page title is "Error processing form -- Log in".
**Expected behavior**
I expected a positive confirmation that my password was reset, and to be able to login with the new password. Neither one happens.
**To Reproduce**
1. Start at `https://pypi.org/account/login/`
2. Click "Forgot password?" link
3. Enter the email address associated with my PyPI account (gerg.ward at gmail dot com)
4. Check my inbox and the email is already there.
5. Click the link: `https://pypi.org/account/reset-password/?token=.eJw1...`
6. Generate a new password outside my browser.
7. Copy/paste the new password into both password fields; text changes to "Passwords match" and the "Reset password" button is enabled.
8. Click "Reset password"
9. Receive the same "Reset password" form again, but with positive confirmation and no error messages. Only the page title gives a clue: "Error processing form – Reset your password · PyPI". When I try to login with the new password (again, copy/pasted), it does not work.
I'll attach HTML and screenshot.
**My Platform**
Firefox 87.0 on Ubuntu 20.04.2.
Same behaviour with Chromium.
No HTTP proxy.
**Additional context**
</issue>
<code>
[start of warehouse/admin/views/users.py]
1 # Licensed under the Apache License, Version 2.0 (the "License");
2 # you may not use this file except in compliance with the License.
3 # You may obtain a copy of the License at
4 #
5 # http://www.apache.org/licenses/LICENSE-2.0
6 #
7 # Unless required by applicable law or agreed to in writing, software
8 # distributed under the License is distributed on an "AS IS" BASIS,
9 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 # See the License for the specific language governing permissions and
11 # limitations under the License.
12
13 import shlex
14
15 import wtforms
16 import wtforms.fields.html5
17
18 from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage
19 from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther
20 from pyramid.view import view_config
21 from sqlalchemy import or_
22 from sqlalchemy.orm import joinedload
23 from sqlalchemy.orm.exc import NoResultFound
24
25 from warehouse import forms
26 from warehouse.accounts.interfaces import IUserService
27 from warehouse.accounts.models import DisableReason, Email, User
28 from warehouse.email import send_password_compromised_email
29 from warehouse.packaging.models import JournalEntry, Project, Role
30 from warehouse.utils.paginate import paginate_url_factory
31
32
33 @view_config(
34 route_name="admin.user.list",
35 renderer="admin/users/list.html",
36 permission="moderator",
37 uses_session=True,
38 )
39 def user_list(request):
40 q = request.params.get("q")
41
42 try:
43 page_num = int(request.params.get("page", 1))
44 except ValueError:
45 raise HTTPBadRequest("'page' must be an integer.") from None
46
47 users_query = request.db.query(User).order_by(User.username)
48
49 if q:
50 terms = shlex.split(q)
51
52 filters = []
53 for term in terms:
54 if ":" in term:
55 field, value = term.split(":", 1)
56 if field.lower() == "email":
57 filters.append(User.emails.any(Email.email.ilike(value)))
58 else:
59 filters.append(User.username.ilike(term))
60
61 users_query = users_query.filter(or_(*filters))
62
63 users = SQLAlchemyORMPage(
64 users_query,
65 page=page_num,
66 items_per_page=25,
67 url_maker=paginate_url_factory(request),
68 )
69
70 return {"users": users, "query": q}
71
72
73 class EmailForm(forms.Form):
74
75 email = wtforms.fields.html5.EmailField(
76 validators=[wtforms.validators.DataRequired()]
77 )
78 primary = wtforms.fields.BooleanField()
79 verified = wtforms.fields.BooleanField()
80 public = wtforms.fields.BooleanField()
81
82
83 class UserForm(forms.Form):
84
85 name = wtforms.StringField(
86 validators=[wtforms.validators.Optional(), wtforms.validators.Length(max=100)]
87 )
88
89 is_active = wtforms.fields.BooleanField()
90 is_superuser = wtforms.fields.BooleanField()
91 is_moderator = wtforms.fields.BooleanField()
92 is_psf_staff = wtforms.fields.BooleanField()
93
94 prohibit_password_reset = wtforms.fields.BooleanField()
95
96 emails = wtforms.fields.FieldList(wtforms.fields.FormField(EmailForm))
97
98
99 @view_config(
100 route_name="admin.user.detail",
101 renderer="admin/users/detail.html",
102 permission="moderator",
103 request_method="GET",
104 uses_session=True,
105 require_csrf=True,
106 require_methods=False,
107 )
108 @view_config(
109 route_name="admin.user.detail",
110 renderer="admin/users/detail.html",
111 permission="admin",
112 request_method="POST",
113 uses_session=True,
114 require_csrf=True,
115 require_methods=False,
116 )
117 def user_detail(request):
118 try:
119 user = (
120 request.db.query(User).filter(User.id == request.matchdict["user_id"]).one()
121 )
122 except NoResultFound:
123 raise HTTPNotFound
124
125 roles = (
126 request.db.query(Role)
127 .join(User)
128 .filter(Role.user == user)
129 .order_by(Role.role_name, Role.project_id)
130 .all()
131 )
132
133 form = UserForm(request.POST if request.method == "POST" else None, user)
134
135 if request.method == "POST" and form.validate():
136 form.populate_obj(user)
137 return HTTPSeeOther(location=request.current_route_path())
138
139 return {"user": user, "form": form, "roles": roles, "add_email_form": EmailForm()}
140
141
142 @view_config(
143 route_name="admin.user.add_email",
144 require_methods=["POST"],
145 permission="admin",
146 uses_session=True,
147 require_csrf=True,
148 )
149 def user_add_email(request):
150 user = request.db.query(User).get(request.matchdict["user_id"])
151 form = EmailForm(request.POST)
152
153 if form.validate():
154 email = Email(
155 email=form.email.data,
156 user=user,
157 primary=form.primary.data,
158 verified=form.verified.data,
159 public=form.public.data,
160 )
161 request.db.add(email)
162 request.session.flash(
163 f"Added email for user {user.username!r}", queue="success"
164 )
165
166 return HTTPSeeOther(request.route_path("admin.user.detail", user_id=user.id))
167
168
169 @view_config(
170 route_name="admin.user.delete",
171 require_methods=["POST"],
172 permission="admin",
173 uses_session=True,
174 require_csrf=True,
175 )
176 def user_delete(request):
177 user = request.db.query(User).get(request.matchdict["user_id"])
178
179 if user.username != request.params.get("username"):
180 request.session.flash("Wrong confirmation input", queue="error")
181 return HTTPSeeOther(request.route_path("admin.user.detail", user_id=user.id))
182
183 # Delete all the user's projects
184 projects = request.db.query(Project).filter(
185 Project.name.in_(
186 request.db.query(Project.name)
187 .join(Role.project)
188 .filter(Role.user == user)
189 .subquery()
190 )
191 )
192 for project in projects:
193 request.db.add(
194 JournalEntry(
195 name=project.name,
196 action="remove project",
197 submitted_by=request.user,
198 submitted_from=request.remote_addr,
199 )
200 )
201 projects.delete(synchronize_session=False)
202
203 # Update all journals to point to `deleted-user` instead
204 deleted_user = request.db.query(User).filter(User.username == "deleted-user").one()
205
206 journals = (
207 request.db.query(JournalEntry)
208 .options(joinedload("submitted_by"))
209 .filter(JournalEntry.submitted_by == user)
210 .all()
211 )
212
213 for journal in journals:
214 journal.submitted_by = deleted_user
215
216 # Delete the user
217 request.db.delete(user)
218 request.db.add(
219 JournalEntry(
220 name=f"user:{user.username}",
221 action="nuke user",
222 submitted_by=request.user,
223 submitted_from=request.remote_addr,
224 )
225 )
226 request.session.flash(f"Nuked user {user.username!r}", queue="success")
227 return HTTPSeeOther(request.route_path("admin.user.list"))
228
229
230 @view_config(
231 route_name="admin.user.reset_password",
232 require_methods=["POST"],
233 permission="admin",
234 has_translations=True,
235 uses_session=True,
236 require_csrf=True,
237 )
238 def user_reset_password(request):
239 user = request.db.query(User).get(request.matchdict["user_id"])
240
241 if user.username != request.params.get("username"):
242 request.session.flash("Wrong confirmation input", queue="error")
243 return HTTPSeeOther(request.route_path("admin.user.detail", user_id=user.id))
244
245 login_service = request.find_service(IUserService, context=None)
246 send_password_compromised_email(request, user)
247 login_service.disable_password(user.id, reason=DisableReason.CompromisedPassword)
248
249 request.session.flash(f"Reset password for {user.username!r}", queue="success")
250 return HTTPSeeOther(request.route_path("admin.user.detail", user_id=user.id))
251
[end of warehouse/admin/views/users.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/warehouse/admin/views/users.py b/warehouse/admin/views/users.py
--- a/warehouse/admin/views/users.py
+++ b/warehouse/admin/views/users.py
@@ -14,6 +14,7 @@
import wtforms
import wtforms.fields.html5
+import wtforms.validators
from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage
from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther
@@ -95,6 +96,14 @@
emails = wtforms.fields.FieldList(wtforms.fields.FormField(EmailForm))
+ def validate_emails(self, field):
+ # If there's no email on the account, it's ok. Otherwise, ensure
+ # we have 1 primary email.
+ if field.data and len([1 for email in field.data if email["primary"]]) != 1:
+ raise wtforms.validators.ValidationError(
+ "There must be exactly one primary email"
+ )
+
@view_config(
route_name="admin.user.detail",
@@ -134,6 +143,7 @@
if request.method == "POST" and form.validate():
form.populate_obj(user)
+ request.session.flash(f"User {user.username!r} updated", queue="success")
return HTTPSeeOther(location=request.current_route_path())
return {"user": user, "form": form, "roles": roles, "add_email_form": EmailForm()}
@@ -151,6 +161,11 @@
form = EmailForm(request.POST)
if form.validate():
+
+ if form.primary.data:
+ for other in user.emails:
+ other.primary = False
+
email = Email(
email=form.email.data,
user=user,
| {"golden_diff": "diff --git a/warehouse/admin/views/users.py b/warehouse/admin/views/users.py\n--- a/warehouse/admin/views/users.py\n+++ b/warehouse/admin/views/users.py\n@@ -14,6 +14,7 @@\n \n import wtforms\n import wtforms.fields.html5\n+import wtforms.validators\n \n from paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage\n from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther\n@@ -95,6 +96,14 @@\n \n emails = wtforms.fields.FieldList(wtforms.fields.FormField(EmailForm))\n \n+ def validate_emails(self, field):\n+ # If there's no email on the account, it's ok. Otherwise, ensure\n+ # we have 1 primary email.\n+ if field.data and len([1 for email in field.data if email[\"primary\"]]) != 1:\n+ raise wtforms.validators.ValidationError(\n+ \"There must be exactly one primary email\"\n+ )\n+\n \n @view_config(\n route_name=\"admin.user.detail\",\n@@ -134,6 +143,7 @@\n \n if request.method == \"POST\" and form.validate():\n form.populate_obj(user)\n+ request.session.flash(f\"User {user.username!r} updated\", queue=\"success\")\n return HTTPSeeOther(location=request.current_route_path())\n \n return {\"user\": user, \"form\": form, \"roles\": roles, \"add_email_form\": EmailForm()}\n@@ -151,6 +161,11 @@\n form = EmailForm(request.POST)\n \n if form.validate():\n+\n+ if form.primary.data:\n+ for other in user.emails:\n+ other.primary = False\n+\n email = Email(\n email=form.email.data,\n user=user,\n", "issue": "Reset password doesn't work if user doesn't have verified/primary email address.\n**Describe the bug**\r\n\r\nPassword reset is not working. I get the email, follow the link, enter the password twice, submit the form, and get back a page with no error messages. But the page title is \"Error processing form -- Log in\".\r\n\r\n**Expected behavior**\r\n\r\nI expected a positive confirmation that my password was reset, and to be able to login with the new password. Neither one happens.\r\n\r\n**To Reproduce**\r\n\r\n1. Start at `https://pypi.org/account/login/`\r\n2. Click \"Forgot password?\" link\r\n3. Enter the email address associated with my PyPI account (gerg.ward at gmail dot com)\r\n4. Check my inbox and the email is already there.\r\n5. Click the link: `https://pypi.org/account/reset-password/?token=.eJw1...`\r\n6. Generate a new password outside my browser.\r\n7. Copy/paste the new password into both password fields; text changes to \"Passwords match\" and the \"Reset password\" button is enabled.\r\n8. Click \"Reset password\"\r\n9. Receive the same \"Reset password\" form again, but with positive confirmation and no error messages. Only the page title gives a clue: \"Error processing form \u2013 Reset your password \u00b7 PyPI\". When I try to login with the new password (again, copy/pasted), it does not work.\r\n \r\nI'll attach HTML and screenshot.\r\n\r\n**My Platform**\r\n\r\nFirefox 87.0 on Ubuntu 20.04.2.\r\n\r\nSame behaviour with Chromium.\r\n\r\nNo HTTP proxy.\r\n\r\n**Additional context**\r\n\n", "before_files": [{"content": "# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport shlex\n\nimport wtforms\nimport wtforms.fields.html5\n\nfrom paginate_sqlalchemy import SqlalchemyOrmPage as SQLAlchemyORMPage\nfrom pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound, HTTPSeeOther\nfrom pyramid.view import view_config\nfrom sqlalchemy import or_\nfrom sqlalchemy.orm import joinedload\nfrom sqlalchemy.orm.exc import NoResultFound\n\nfrom warehouse import forms\nfrom warehouse.accounts.interfaces import IUserService\nfrom warehouse.accounts.models import DisableReason, Email, User\nfrom warehouse.email import send_password_compromised_email\nfrom warehouse.packaging.models import JournalEntry, Project, Role\nfrom warehouse.utils.paginate import paginate_url_factory\n\n\n@view_config(\n route_name=\"admin.user.list\",\n renderer=\"admin/users/list.html\",\n permission=\"moderator\",\n uses_session=True,\n)\ndef user_list(request):\n q = request.params.get(\"q\")\n\n try:\n page_num = int(request.params.get(\"page\", 1))\n except ValueError:\n raise HTTPBadRequest(\"'page' must be an integer.\") from None\n\n users_query = request.db.query(User).order_by(User.username)\n\n if q:\n terms = shlex.split(q)\n\n filters = []\n for term in terms:\n if \":\" in term:\n field, value = term.split(\":\", 1)\n if field.lower() == \"email\":\n filters.append(User.emails.any(Email.email.ilike(value)))\n else:\n filters.append(User.username.ilike(term))\n\n users_query = users_query.filter(or_(*filters))\n\n users = SQLAlchemyORMPage(\n users_query,\n page=page_num,\n items_per_page=25,\n url_maker=paginate_url_factory(request),\n )\n\n return {\"users\": users, \"query\": q}\n\n\nclass EmailForm(forms.Form):\n\n email = wtforms.fields.html5.EmailField(\n validators=[wtforms.validators.DataRequired()]\n )\n primary = wtforms.fields.BooleanField()\n verified = wtforms.fields.BooleanField()\n public = wtforms.fields.BooleanField()\n\n\nclass UserForm(forms.Form):\n\n name = wtforms.StringField(\n validators=[wtforms.validators.Optional(), wtforms.validators.Length(max=100)]\n )\n\n is_active = wtforms.fields.BooleanField()\n is_superuser = wtforms.fields.BooleanField()\n is_moderator = wtforms.fields.BooleanField()\n is_psf_staff = wtforms.fields.BooleanField()\n\n prohibit_password_reset = wtforms.fields.BooleanField()\n\n emails = wtforms.fields.FieldList(wtforms.fields.FormField(EmailForm))\n\n\n@view_config(\n route_name=\"admin.user.detail\",\n renderer=\"admin/users/detail.html\",\n permission=\"moderator\",\n request_method=\"GET\",\n uses_session=True,\n require_csrf=True,\n require_methods=False,\n)\n@view_config(\n route_name=\"admin.user.detail\",\n renderer=\"admin/users/detail.html\",\n permission=\"admin\",\n request_method=\"POST\",\n uses_session=True,\n require_csrf=True,\n require_methods=False,\n)\ndef user_detail(request):\n try:\n user = (\n request.db.query(User).filter(User.id == request.matchdict[\"user_id\"]).one()\n )\n except NoResultFound:\n raise HTTPNotFound\n\n roles = (\n request.db.query(Role)\n .join(User)\n .filter(Role.user == user)\n .order_by(Role.role_name, Role.project_id)\n .all()\n )\n\n form = UserForm(request.POST if request.method == \"POST\" else None, user)\n\n if request.method == \"POST\" and form.validate():\n form.populate_obj(user)\n return HTTPSeeOther(location=request.current_route_path())\n\n return {\"user\": user, \"form\": form, \"roles\": roles, \"add_email_form\": EmailForm()}\n\n\n@view_config(\n route_name=\"admin.user.add_email\",\n require_methods=[\"POST\"],\n permission=\"admin\",\n uses_session=True,\n require_csrf=True,\n)\ndef user_add_email(request):\n user = request.db.query(User).get(request.matchdict[\"user_id\"])\n form = EmailForm(request.POST)\n\n if form.validate():\n email = Email(\n email=form.email.data,\n user=user,\n primary=form.primary.data,\n verified=form.verified.data,\n public=form.public.data,\n )\n request.db.add(email)\n request.session.flash(\n f\"Added email for user {user.username!r}\", queue=\"success\"\n )\n\n return HTTPSeeOther(request.route_path(\"admin.user.detail\", user_id=user.id))\n\n\n@view_config(\n route_name=\"admin.user.delete\",\n require_methods=[\"POST\"],\n permission=\"admin\",\n uses_session=True,\n require_csrf=True,\n)\ndef user_delete(request):\n user = request.db.query(User).get(request.matchdict[\"user_id\"])\n\n if user.username != request.params.get(\"username\"):\n request.session.flash(\"Wrong confirmation input\", queue=\"error\")\n return HTTPSeeOther(request.route_path(\"admin.user.detail\", user_id=user.id))\n\n # Delete all the user's projects\n projects = request.db.query(Project).filter(\n Project.name.in_(\n request.db.query(Project.name)\n .join(Role.project)\n .filter(Role.user == user)\n .subquery()\n )\n )\n for project in projects:\n request.db.add(\n JournalEntry(\n name=project.name,\n action=\"remove project\",\n submitted_by=request.user,\n submitted_from=request.remote_addr,\n )\n )\n projects.delete(synchronize_session=False)\n\n # Update all journals to point to `deleted-user` instead\n deleted_user = request.db.query(User).filter(User.username == \"deleted-user\").one()\n\n journals = (\n request.db.query(JournalEntry)\n .options(joinedload(\"submitted_by\"))\n .filter(JournalEntry.submitted_by == user)\n .all()\n )\n\n for journal in journals:\n journal.submitted_by = deleted_user\n\n # Delete the user\n request.db.delete(user)\n request.db.add(\n JournalEntry(\n name=f\"user:{user.username}\",\n action=\"nuke user\",\n submitted_by=request.user,\n submitted_from=request.remote_addr,\n )\n )\n request.session.flash(f\"Nuked user {user.username!r}\", queue=\"success\")\n return HTTPSeeOther(request.route_path(\"admin.user.list\"))\n\n\n@view_config(\n route_name=\"admin.user.reset_password\",\n require_methods=[\"POST\"],\n permission=\"admin\",\n has_translations=True,\n uses_session=True,\n require_csrf=True,\n)\ndef user_reset_password(request):\n user = request.db.query(User).get(request.matchdict[\"user_id\"])\n\n if user.username != request.params.get(\"username\"):\n request.session.flash(\"Wrong confirmation input\", queue=\"error\")\n return HTTPSeeOther(request.route_path(\"admin.user.detail\", user_id=user.id))\n\n login_service = request.find_service(IUserService, context=None)\n send_password_compromised_email(request, user)\n login_service.disable_password(user.id, reason=DisableReason.CompromisedPassword)\n\n request.session.flash(f\"Reset password for {user.username!r}\", queue=\"success\")\n return HTTPSeeOther(request.route_path(\"admin.user.detail\", user_id=user.id))\n", "path": "warehouse/admin/views/users.py"}]} | 3,175 | 386 |
gh_patches_debug_4272 | rasdani/github-patches | git_diff | akvo__akvo-rsr-3260 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Remove scrollbars when they are not necessary
The home page shows a scroll bar even when there is nothing to scroll


</issue>
<code>
[start of akvo/rest/views/organisation.py]
1 # -*- coding: utf-8 -*-
2
3 # Akvo RSR is covered by the GNU Affero General Public License.
4 # See more details in the license.txt file located at the root folder of the Akvo RSR module.
5 # For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
6
7 from django.conf import settings
8 from django.db.models import Q
9 from django.utils import six
10 from rest_framework.decorators import api_view
11 from rest_framework.exceptions import ParseError
12 from rest_framework.parsers import JSONParser
13 from rest_framework.response import Response
14 from rest_framework_xml.parsers import XMLParser
15 from rest_framework_xml.compat import etree
16
17 from akvo.rest.views.utils import int_or_none, get_qs_elements_for_page
18 from akvo.rsr.filters import location_choices, get_m49_filter
19 from akvo.rsr.models import Project, Organisation, Country
20 from akvo.rsr.views.utils import apply_keywords, org_projects
21 from ..serializers import OrganisationSerializer, OrganisationDirectorySerializer
22 from ..viewsets import BaseRSRViewSet
23
24
25 class AkvoOrganisationParser(XMLParser):
26 def parse(self, stream, media_type=None, parser_context=None):
27 assert etree, 'XMLParser requires defusedxml to be installed'
28
29 parser_context = parser_context or {}
30 encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
31 parser = etree.DefusedXMLParser(encoding=encoding)
32 try:
33 tree = etree.parse(stream, parser=parser, forbid_dtd=True)
34 except (etree.ParseError, ValueError) as exc:
35 raise ParseError('XML parse error - %s' % six.text_type(exc))
36 return self.organisation_data_from_etree(tree.getroot())
37
38 def organisation_data_from_etree(self, tree):
39 def find_text(tree, str):
40 element = tree.find(str)
41 if element is None:
42 return ''
43 return element.text.strip() if element.text else ""
44
45 def location_data(location_tree):
46 if location_tree is None:
47 return []
48 iso_code = find_text(location_tree, 'iso_code').lower()
49 country, created = Country.objects.get_or_create(**Country.fields_from_iso_code(iso_code))
50 country = country.id
51 latitude = find_text(location_tree, 'latitude') or 0
52 longitude = find_text(location_tree, 'longitude') or 0
53 primary = True
54 return [dict(latitude=latitude, longitude=longitude, country=country, primary=primary)]
55
56 long_name = find_text(tree, 'name')
57 name = long_name[:25]
58 description = find_text(tree, 'description')
59 url = find_text(tree, 'url')
60 iati_type = find_text(tree, 'iati_organisation_type')
61 new_organisation_type = int(iati_type) if iati_type else 22
62 organisation_type = Organisation.org_type_from_iati_type(new_organisation_type)
63 locations = location_data(tree.find('location/object'))
64 return dict(
65 name=name, long_name=long_name, description=description, url=url,
66 organisation_type=organisation_type, new_organisation_type=new_organisation_type,
67 locations=locations
68 )
69
70
71 class OrganisationViewSet(BaseRSRViewSet):
72 """
73 API endpoint that allows organisations to be viewed or edited.
74 """
75 queryset = Organisation.objects.all()
76 serializer_class = OrganisationSerializer
77 parser_classes = (AkvoOrganisationParser, JSONParser,)
78
79
80 @api_view(['GET'])
81 def organisation_directory(request):
82 """REST view for the update directory."""
83
84 page = request.rsr_page
85 all_organisations = Organisation.objects.all() if not page else _page_organisations(page)
86
87 # Filter updates based on query parameters
88 filter_, text_filter = _create_filters_query(request)
89 organisations = (
90 all_organisations.filter(filter_).distinct() if filter_ is not None else all_organisations
91 )
92 organisations_text_filtered = (
93 organisations.filter(text_filter) if text_filter is not None else organisations
94 )
95 if organisations_text_filtered.exists():
96 organisations = organisations_text_filtered
97
98 # Get the relevant data for typeaheads based on filtered organisations (minus
99 # text filtering, if no organisations were found)
100 locations = [
101 {'id': choice[0], 'name': choice[1]}
102 for choice in location_choices(organisations)
103 ]
104
105 display_organisations = get_qs_elements_for_page(organisations_text_filtered, request)
106
107 # Get related objects of page at once
108 response = {
109 'project_count': all_organisations.count(),
110 'projects': OrganisationDirectorySerializer(display_organisations, many=True).data,
111 'location': locations,
112 }
113 return Response(response)
114
115
116 def _public_projects():
117 """Return all public projects."""
118 return Project.objects.public().published().select_related('partners')
119
120
121 def _page_organisations(page):
122 """Dig out the list or organisations to use."""
123 projects = org_projects(page.organisation) if page.partner_projects else _public_projects()
124 keyword_projects = apply_keywords(page, projects)
125 return keyword_projects.all_partners()
126
127
128 def _create_filters_query(request):
129 """Returns a Q object expression based on query parameters."""
130 location_param = int_or_none(request.GET.get('location'))
131 title_or_subtitle_param = request.GET.get('title_or_subtitle')
132
133 location_filter = (
134 get_m49_filter(location_param, use_recipient_country=False) if location_param else None
135 )
136 title_filter = (
137 Q(name__icontains=title_or_subtitle_param) |
138 Q(long_name__icontains=title_or_subtitle_param)
139 ) if title_or_subtitle_param else None
140 all_filters = [
141 location_filter,
142 ]
143 filters = filter(None, all_filters)
144 return reduce(lambda x, y: x & y, filters) if filters else None, title_filter
145
[end of akvo/rest/views/organisation.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/akvo/rest/views/organisation.py b/akvo/rest/views/organisation.py
--- a/akvo/rest/views/organisation.py
+++ b/akvo/rest/views/organisation.py
@@ -106,7 +106,7 @@
# Get related objects of page at once
response = {
- 'project_count': all_organisations.count(),
+ 'project_count': organisations_text_filtered.count(),
'projects': OrganisationDirectorySerializer(display_organisations, many=True).data,
'location': locations,
}
| {"golden_diff": "diff --git a/akvo/rest/views/organisation.py b/akvo/rest/views/organisation.py\n--- a/akvo/rest/views/organisation.py\n+++ b/akvo/rest/views/organisation.py\n@@ -106,7 +106,7 @@\n \n # Get related objects of page at once\n response = {\n- 'project_count': all_organisations.count(),\n+ 'project_count': organisations_text_filtered.count(),\n 'projects': OrganisationDirectorySerializer(display_organisations, many=True).data,\n 'location': locations,\n }\n", "issue": "Remove scrollbars when they are not necessary\nThe home page shows a scroll bar even when there is nothing to scroll\r\n\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Akvo RSR is covered by the GNU Affero General Public License.\n# See more details in the license.txt file located at the root folder of the Akvo RSR module.\n# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.\n\nfrom django.conf import settings\nfrom django.db.models import Q\nfrom django.utils import six\nfrom rest_framework.decorators import api_view\nfrom rest_framework.exceptions import ParseError\nfrom rest_framework.parsers import JSONParser\nfrom rest_framework.response import Response\nfrom rest_framework_xml.parsers import XMLParser\nfrom rest_framework_xml.compat import etree\n\nfrom akvo.rest.views.utils import int_or_none, get_qs_elements_for_page\nfrom akvo.rsr.filters import location_choices, get_m49_filter\nfrom akvo.rsr.models import Project, Organisation, Country\nfrom akvo.rsr.views.utils import apply_keywords, org_projects\nfrom ..serializers import OrganisationSerializer, OrganisationDirectorySerializer\nfrom ..viewsets import BaseRSRViewSet\n\n\nclass AkvoOrganisationParser(XMLParser):\n def parse(self, stream, media_type=None, parser_context=None):\n assert etree, 'XMLParser requires defusedxml to be installed'\n\n parser_context = parser_context or {}\n encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)\n parser = etree.DefusedXMLParser(encoding=encoding)\n try:\n tree = etree.parse(stream, parser=parser, forbid_dtd=True)\n except (etree.ParseError, ValueError) as exc:\n raise ParseError('XML parse error - %s' % six.text_type(exc))\n return self.organisation_data_from_etree(tree.getroot())\n\n def organisation_data_from_etree(self, tree):\n def find_text(tree, str):\n element = tree.find(str)\n if element is None:\n return ''\n return element.text.strip() if element.text else \"\"\n\n def location_data(location_tree):\n if location_tree is None:\n return []\n iso_code = find_text(location_tree, 'iso_code').lower()\n country, created = Country.objects.get_or_create(**Country.fields_from_iso_code(iso_code))\n country = country.id\n latitude = find_text(location_tree, 'latitude') or 0\n longitude = find_text(location_tree, 'longitude') or 0\n primary = True\n return [dict(latitude=latitude, longitude=longitude, country=country, primary=primary)]\n\n long_name = find_text(tree, 'name')\n name = long_name[:25]\n description = find_text(tree, 'description')\n url = find_text(tree, 'url')\n iati_type = find_text(tree, 'iati_organisation_type')\n new_organisation_type = int(iati_type) if iati_type else 22\n organisation_type = Organisation.org_type_from_iati_type(new_organisation_type)\n locations = location_data(tree.find('location/object'))\n return dict(\n name=name, long_name=long_name, description=description, url=url,\n organisation_type=organisation_type, new_organisation_type=new_organisation_type,\n locations=locations\n )\n\n\nclass OrganisationViewSet(BaseRSRViewSet):\n \"\"\"\n API endpoint that allows organisations to be viewed or edited.\n \"\"\"\n queryset = Organisation.objects.all()\n serializer_class = OrganisationSerializer\n parser_classes = (AkvoOrganisationParser, JSONParser,)\n\n\n@api_view(['GET'])\ndef organisation_directory(request):\n \"\"\"REST view for the update directory.\"\"\"\n\n page = request.rsr_page\n all_organisations = Organisation.objects.all() if not page else _page_organisations(page)\n\n # Filter updates based on query parameters\n filter_, text_filter = _create_filters_query(request)\n organisations = (\n all_organisations.filter(filter_).distinct() if filter_ is not None else all_organisations\n )\n organisations_text_filtered = (\n organisations.filter(text_filter) if text_filter is not None else organisations\n )\n if organisations_text_filtered.exists():\n organisations = organisations_text_filtered\n\n # Get the relevant data for typeaheads based on filtered organisations (minus\n # text filtering, if no organisations were found)\n locations = [\n {'id': choice[0], 'name': choice[1]}\n for choice in location_choices(organisations)\n ]\n\n display_organisations = get_qs_elements_for_page(organisations_text_filtered, request)\n\n # Get related objects of page at once\n response = {\n 'project_count': all_organisations.count(),\n 'projects': OrganisationDirectorySerializer(display_organisations, many=True).data,\n 'location': locations,\n }\n return Response(response)\n\n\ndef _public_projects():\n \"\"\"Return all public projects.\"\"\"\n return Project.objects.public().published().select_related('partners')\n\n\ndef _page_organisations(page):\n \"\"\"Dig out the list or organisations to use.\"\"\"\n projects = org_projects(page.organisation) if page.partner_projects else _public_projects()\n keyword_projects = apply_keywords(page, projects)\n return keyword_projects.all_partners()\n\n\ndef _create_filters_query(request):\n \"\"\"Returns a Q object expression based on query parameters.\"\"\"\n location_param = int_or_none(request.GET.get('location'))\n title_or_subtitle_param = request.GET.get('title_or_subtitle')\n\n location_filter = (\n get_m49_filter(location_param, use_recipient_country=False) if location_param else None\n )\n title_filter = (\n Q(name__icontains=title_or_subtitle_param) |\n Q(long_name__icontains=title_or_subtitle_param)\n ) if title_or_subtitle_param else None\n all_filters = [\n location_filter,\n ]\n filters = filter(None, all_filters)\n return reduce(lambda x, y: x & y, filters) if filters else None, title_filter\n", "path": "akvo/rest/views/organisation.py"}]} | 2,245 | 121 |
gh_patches_debug_23524 | rasdani/github-patches | git_diff | pypa__setuptools-2316 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Reconsidering distutils replacement strategy
I first expressed concerns about the fact that `distutils` needs to be imported *after* `setuptools` in [this post](https://github.com/pypa/setuptools/pull/2143#issuecomment-637586551), and I did not realize that we had not adequately addressed this before the `distutils` adoption. I think we should re-consider the way this is done, to avoid any sort of requirement on the sort order.
Currently, when you `import setuptools`, [`distutils` is injected into `sys.modules`](https://github.com/pypa/setuptools/blob/37d81f4ce8f08c4baf44b6ff0f3f1bd3f6b2a127/setuptools/distutils_patch.py#L48-L61).
The problem here is that it requires importing `setuptools` before `distutils`, which most people will *not* be doing right now (by almost all common import sort orders, `distutils` comes first).
@jaraco [explains why it's done this way here](https://github.com/pypa/setuptools/issues/2230#issuecomment-657327796):
> I did consider another approach, where Setuptools could add a .pth file that would import setuptools.distutils_patch. Such an approach would always happen earlier and so would not be subject to the race that's happening here, but it would also happen whether or not setuptools was imported (on any invocation of Python in that environment).
I think one preferred solution would be for us to install a `distutils` package that would be imported before `distutils`. The problem is that in the standard `sys.path`, `site-packages` comes *after* the standard library. Since anything we do here will be a tremendous hack, I'm thinking that maybe the most surgical way to accomplish this is to do something like this:
1. Create a `setuptools._distutils/import_hack/distutils/__init__.py` (where there's no `__init__.py` in `import_hack`) that invokes `import setuptools.distutils_patch`.
2. Add a `.pth` file that injects `$SITEPACKAGES/setuptools/_import_hack/` into `sys.path` *before* the standard library.
Another option I think we should consider is stepping up the timetable on providing everything `distutils` provides directly from the `setuptools` namespace, [as I described here](https://github.com/pypa/setuptools/pull/2143#issuecomment-637586551):
> I personally would prefer it if we made the setuptools namespace the canonical namespace, and have distutils just import symbols from setuptools and possibly wrap them in deprecation warnings.
Right now, there are things that you *must* import from `distutils`, and so we can't say, "You can avoid an issue with the sort order by just not importing `distutils`". If we provide everything that `distutils` provides in the `setuptools` namespace, we can just say, "Just use the `setuptools` version and you won't have this problem."
Personally, I think we should do a combination of both — make it so importing `distutils` still works in whatever order you do it in *and* move everything to the canonical `setuptools` namespace. That will make it easier to actively deprecate direct use of `distutils`, and pave the way for us eventually removing the need for the `.pth` file (though the fastest road to not having the `.pth` file will be via removing `distutils` from the standard library).
</issue>
<code>
[start of _distutils_hack/__init__.py]
1 import sys
2 import os
3 import re
4 import importlib
5 import warnings
6
7
8 is_pypy = '__pypy__' in sys.builtin_module_names
9
10
11 def warn_distutils_present():
12 if 'distutils' not in sys.modules:
13 return
14 if is_pypy and sys.version_info < (3, 7):
15 # PyPy for 3.6 unconditionally imports distutils, so bypass the warning
16 # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
17 return
18 warnings.warn(
19 "Distutils was imported before Setuptools. This usage is discouraged "
20 "and may exhibit undesirable behaviors or errors. Please use "
21 "Setuptools' objects directly or at least import Setuptools first.")
22
23
24 def clear_distutils():
25 if 'distutils' not in sys.modules:
26 return
27 warnings.warn("Setuptools is replacing distutils.")
28 mods = [name for name in sys.modules if re.match(r'distutils\b', name)]
29 for name in mods:
30 del sys.modules[name]
31
32
33 def enabled():
34 """
35 Allow selection of distutils by environment variable.
36 """
37 which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')
38 return which == 'local'
39
40
41 def ensure_local_distutils():
42 clear_distutils()
43 distutils = importlib.import_module('setuptools._distutils')
44 distutils.__name__ = 'distutils'
45 sys.modules['distutils'] = distutils
46
47 # sanity check that submodules load as expected
48 core = importlib.import_module('distutils.core')
49 assert '_distutils' in core.__file__, core.__file__
50
51
52 def do_override():
53 """
54 Ensure that the local copy of distutils is preferred over stdlib.
55
56 See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
57 for more motivation.
58 """
59 warn_distutils_present()
60 if enabled():
61 ensure_local_distutils()
62
63
64 class DistutilsMetaFinder:
65 def find_spec(self, fullname, path, target=None):
66 if path is not None or fullname != "distutils":
67 return None
68
69 return self.get_distutils_spec()
70
71 def get_distutils_spec(self):
72 import importlib.util
73
74 class DistutilsLoader(importlib.util.abc.Loader):
75
76 def create_module(self, spec):
77 return importlib.import_module('._distutils', 'setuptools')
78
79 def exec_module(self, module):
80 pass
81
82 return importlib.util.spec_from_loader('distutils', DistutilsLoader())
83
84
85 DISTUTILS_FINDER = DistutilsMetaFinder()
86
87
88 def add_shim():
89 sys.meta_path.insert(0, DISTUTILS_FINDER)
90
91
92 def remove_shim():
93 try:
94 sys.meta_path.remove(DISTUTILS_FINDER)
95 except ValueError:
96 pass
97
[end of _distutils_hack/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py
--- a/_distutils_hack/__init__.py
+++ b/_distutils_hack/__init__.py
@@ -16,9 +16,12 @@
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
return
warnings.warn(
- "Distutils was imported before Setuptools. This usage is discouraged "
- "and may exhibit undesirable behaviors or errors. Please use "
- "Setuptools' objects directly or at least import Setuptools first.")
+ "Distutils was imported before Setuptools, but importing Setuptools "
+ "also replaces the `distutils` module in `sys.modules`. This may lead "
+ "to undesirable behaviors or errors. To avoid these issues, avoid "
+ "using distutils directly, ensure that setuptools is installed in the "
+ "traditional way (e.g. not an editable install), and/or make sure that "
+ "setuptools is always imported before distutils.")
def clear_distutils():
@@ -56,8 +59,8 @@
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
- warn_distutils_present()
if enabled():
+ warn_distutils_present()
ensure_local_distutils()
| {"golden_diff": "diff --git a/_distutils_hack/__init__.py b/_distutils_hack/__init__.py\n--- a/_distutils_hack/__init__.py\n+++ b/_distutils_hack/__init__.py\n@@ -16,9 +16,12 @@\n # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250\n return\n warnings.warn(\n- \"Distutils was imported before Setuptools. This usage is discouraged \"\n- \"and may exhibit undesirable behaviors or errors. Please use \"\n- \"Setuptools' objects directly or at least import Setuptools first.\")\n+ \"Distutils was imported before Setuptools, but importing Setuptools \"\n+ \"also replaces the `distutils` module in `sys.modules`. This may lead \"\n+ \"to undesirable behaviors or errors. To avoid these issues, avoid \"\n+ \"using distutils directly, ensure that setuptools is installed in the \"\n+ \"traditional way (e.g. not an editable install), and/or make sure that \"\n+ \"setuptools is always imported before distutils.\")\n \n \n def clear_distutils():\n@@ -56,8 +59,8 @@\n See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401\n for more motivation.\n \"\"\"\n- warn_distutils_present()\n if enabled():\n+ warn_distutils_present()\n ensure_local_distutils()\n", "issue": "Reconsidering distutils replacement strategy\nI first expressed concerns about the fact that `distutils` needs to be imported *after* `setuptools` in [this post](https://github.com/pypa/setuptools/pull/2143#issuecomment-637586551), and I did not realize that we had not adequately addressed this before the `distutils` adoption. I think we should re-consider the way this is done, to avoid any sort of requirement on the sort order.\r\n\r\nCurrently, when you `import setuptools`, [`distutils` is injected into `sys.modules`](https://github.com/pypa/setuptools/blob/37d81f4ce8f08c4baf44b6ff0f3f1bd3f6b2a127/setuptools/distutils_patch.py#L48-L61).\r\n\r\nThe problem here is that it requires importing `setuptools` before `distutils`, which most people will *not* be doing right now (by almost all common import sort orders, `distutils` comes first).\r\n\r\n@jaraco [explains why it's done this way here](https://github.com/pypa/setuptools/issues/2230#issuecomment-657327796):\r\n\r\n> I did consider another approach, where Setuptools could add a .pth file that would import setuptools.distutils_patch. Such an approach would always happen earlier and so would not be subject to the race that's happening here, but it would also happen whether or not setuptools was imported (on any invocation of Python in that environment).\r\n\r\nI think one preferred solution would be for us to install a `distutils` package that would be imported before `distutils`. The problem is that in the standard `sys.path`, `site-packages` comes *after* the standard library. Since anything we do here will be a tremendous hack, I'm thinking that maybe the most surgical way to accomplish this is to do something like this:\r\n\r\n1. Create a `setuptools._distutils/import_hack/distutils/__init__.py` (where there's no `__init__.py` in `import_hack`) that invokes `import setuptools.distutils_patch`.\r\n2. Add a `.pth` file that injects `$SITEPACKAGES/setuptools/_import_hack/` into `sys.path` *before* the standard library.\r\n\r\nAnother option I think we should consider is stepping up the timetable on providing everything `distutils` provides directly from the `setuptools` namespace, [as I described here](https://github.com/pypa/setuptools/pull/2143#issuecomment-637586551):\r\n\r\n> I personally would prefer it if we made the setuptools namespace the canonical namespace, and have distutils just import symbols from setuptools and possibly wrap them in deprecation warnings.\r\n\r\nRight now, there are things that you *must* import from `distutils`, and so we can't say, \"You can avoid an issue with the sort order by just not importing `distutils`\". If we provide everything that `distutils` provides in the `setuptools` namespace, we can just say, \"Just use the `setuptools` version and you won't have this problem.\"\r\n\r\nPersonally, I think we should do a combination of both \u2014 make it so importing `distutils` still works in whatever order you do it in *and* move everything to the canonical `setuptools` namespace. That will make it easier to actively deprecate direct use of `distutils`, and pave the way for us eventually removing the need for the `.pth` file (though the fastest road to not having the `.pth` file will be via removing `distutils` from the standard library).\n", "before_files": [{"content": "import sys\nimport os\nimport re\nimport importlib\nimport warnings\n\n\nis_pypy = '__pypy__' in sys.builtin_module_names\n\n\ndef warn_distutils_present():\n if 'distutils' not in sys.modules:\n return\n if is_pypy and sys.version_info < (3, 7):\n # PyPy for 3.6 unconditionally imports distutils, so bypass the warning\n # https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250\n return\n warnings.warn(\n \"Distutils was imported before Setuptools. This usage is discouraged \"\n \"and may exhibit undesirable behaviors or errors. Please use \"\n \"Setuptools' objects directly or at least import Setuptools first.\")\n\n\ndef clear_distutils():\n if 'distutils' not in sys.modules:\n return\n warnings.warn(\"Setuptools is replacing distutils.\")\n mods = [name for name in sys.modules if re.match(r'distutils\\b', name)]\n for name in mods:\n del sys.modules[name]\n\n\ndef enabled():\n \"\"\"\n Allow selection of distutils by environment variable.\n \"\"\"\n which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'stdlib')\n return which == 'local'\n\n\ndef ensure_local_distutils():\n clear_distutils()\n distutils = importlib.import_module('setuptools._distutils')\n distutils.__name__ = 'distutils'\n sys.modules['distutils'] = distutils\n\n # sanity check that submodules load as expected\n core = importlib.import_module('distutils.core')\n assert '_distutils' in core.__file__, core.__file__\n\n\ndef do_override():\n \"\"\"\n Ensure that the local copy of distutils is preferred over stdlib.\n\n See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401\n for more motivation.\n \"\"\"\n warn_distutils_present()\n if enabled():\n ensure_local_distutils()\n\n\nclass DistutilsMetaFinder:\n def find_spec(self, fullname, path, target=None):\n if path is not None or fullname != \"distutils\":\n return None\n\n return self.get_distutils_spec()\n\n def get_distutils_spec(self):\n import importlib.util\n\n class DistutilsLoader(importlib.util.abc.Loader):\n\n def create_module(self, spec):\n return importlib.import_module('._distutils', 'setuptools')\n\n def exec_module(self, module):\n pass\n\n return importlib.util.spec_from_loader('distutils', DistutilsLoader())\n\n\nDISTUTILS_FINDER = DistutilsMetaFinder()\n\n\ndef add_shim():\n sys.meta_path.insert(0, DISTUTILS_FINDER)\n\n\ndef remove_shim():\n try:\n sys.meta_path.remove(DISTUTILS_FINDER)\n except ValueError:\n pass\n", "path": "_distutils_hack/__init__.py"}]} | 2,197 | 360 |
gh_patches_debug_2666 | rasdani/github-patches | git_diff | netbox-community__netbox-14935 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Typo in DataSourceBulkEditForm
### Deployment Type
Self-hosted
### NetBox Version
v3.7.1
### Python Version
3.8
### Steps to Reproduce
"lavel" is defined as "Enforce unique space", but I think the correct definition is "Enabled".
https://github.com/netbox-community/netbox/blob/487f1ccfde26ef3c1f8a28089826acc0cd6fadb2/netbox/core/forms/bulk_edit.py#L21-L25
- Add a new data source

- Editing 1 Data Sources

### Expected Behavior
Enabled
### Observed Behavior
Enforce unique space
</issue>
<code>
[start of netbox/core/forms/bulk_edit.py]
1 from django import forms
2 from django.utils.translation import gettext_lazy as _
3
4 from core.models import *
5 from netbox.forms import NetBoxModelBulkEditForm
6 from netbox.utils import get_data_backend_choices
7 from utilities.forms.fields import CommentField
8 from utilities.forms.widgets import BulkEditNullBooleanSelect
9
10 __all__ = (
11 'DataSourceBulkEditForm',
12 )
13
14
15 class DataSourceBulkEditForm(NetBoxModelBulkEditForm):
16 type = forms.ChoiceField(
17 label=_('Type'),
18 choices=get_data_backend_choices,
19 required=False
20 )
21 enabled = forms.NullBooleanField(
22 required=False,
23 widget=BulkEditNullBooleanSelect(),
24 label=_('Enforce unique space')
25 )
26 description = forms.CharField(
27 label=_('Description'),
28 max_length=200,
29 required=False
30 )
31 comments = CommentField()
32 parameters = forms.JSONField(
33 label=_('Parameters'),
34 required=False
35 )
36 ignore_rules = forms.CharField(
37 label=_('Ignore rules'),
38 required=False,
39 widget=forms.Textarea()
40 )
41
42 model = DataSource
43 fieldsets = (
44 (None, ('type', 'enabled', 'description', 'comments', 'parameters', 'ignore_rules')),
45 )
46 nullable_fields = (
47 'description', 'description', 'parameters', 'comments', 'parameters', 'ignore_rules',
48 )
49
[end of netbox/core/forms/bulk_edit.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/netbox/core/forms/bulk_edit.py b/netbox/core/forms/bulk_edit.py
--- a/netbox/core/forms/bulk_edit.py
+++ b/netbox/core/forms/bulk_edit.py
@@ -21,7 +21,7 @@
enabled = forms.NullBooleanField(
required=False,
widget=BulkEditNullBooleanSelect(),
- label=_('Enforce unique space')
+ label=_('Enabled')
)
description = forms.CharField(
label=_('Description'),
| {"golden_diff": "diff --git a/netbox/core/forms/bulk_edit.py b/netbox/core/forms/bulk_edit.py\n--- a/netbox/core/forms/bulk_edit.py\n+++ b/netbox/core/forms/bulk_edit.py\n@@ -21,7 +21,7 @@\n enabled = forms.NullBooleanField(\n required=False,\n widget=BulkEditNullBooleanSelect(),\n- label=_('Enforce unique space')\n+ label=_('Enabled')\n )\n description = forms.CharField(\n label=_('Description'),\n", "issue": "Typo in DataSourceBulkEditForm\n### Deployment Type\n\nSelf-hosted\n\n### NetBox Version\n\nv3.7.1\n\n### Python Version\n\n3.8\n\n### Steps to Reproduce\n\n\"lavel\" is defined as \"Enforce unique space\", but I think the correct definition is \"Enabled\".\r\n\r\nhttps://github.com/netbox-community/netbox/blob/487f1ccfde26ef3c1f8a28089826acc0cd6fadb2/netbox/core/forms/bulk_edit.py#L21-L25\r\n\r\n- Add a new data source\r\n\r\n\r\n- Editing 1 Data Sources\r\n\r\n\n\n### Expected Behavior\n\nEnabled\n\n### Observed Behavior\n\nEnforce unique space\n", "before_files": [{"content": "from django import forms\nfrom django.utils.translation import gettext_lazy as _\n\nfrom core.models import *\nfrom netbox.forms import NetBoxModelBulkEditForm\nfrom netbox.utils import get_data_backend_choices\nfrom utilities.forms.fields import CommentField\nfrom utilities.forms.widgets import BulkEditNullBooleanSelect\n\n__all__ = (\n 'DataSourceBulkEditForm',\n)\n\n\nclass DataSourceBulkEditForm(NetBoxModelBulkEditForm):\n type = forms.ChoiceField(\n label=_('Type'),\n choices=get_data_backend_choices,\n required=False\n )\n enabled = forms.NullBooleanField(\n required=False,\n widget=BulkEditNullBooleanSelect(),\n label=_('Enforce unique space')\n )\n description = forms.CharField(\n label=_('Description'),\n max_length=200,\n required=False\n )\n comments = CommentField()\n parameters = forms.JSONField(\n label=_('Parameters'),\n required=False\n )\n ignore_rules = forms.CharField(\n label=_('Ignore rules'),\n required=False,\n widget=forms.Textarea()\n )\n\n model = DataSource\n fieldsets = (\n (None, ('type', 'enabled', 'description', 'comments', 'parameters', 'ignore_rules')),\n )\n nullable_fields = (\n 'description', 'description', 'parameters', 'comments', 'parameters', 'ignore_rules',\n )\n", "path": "netbox/core/forms/bulk_edit.py"}]} | 1,201 | 105 |
gh_patches_debug_32942 | rasdani/github-patches | git_diff | genialis__resolwe-316 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Data.create returns bad message when missing ADD permission on collection
If a user tries to run a process inside a collection, but doesn't have `ADD` permission, the api returns `{"detail":"Not found."}`
</issue>
<code>
[start of resolwe/flow/views/mixins.py]
1 """Mixins used in Resolwe Viewsets."""
2 from __future__ import absolute_import, division, print_function, unicode_literals
3
4 from django.db import IntegrityError, transaction
5
6 from guardian.shortcuts import assign_perm
7 from rest_framework import mixins, status
8 from rest_framework.decorators import list_route
9 from rest_framework.response import Response
10
11 from resolwe.flow.models import DescriptorSchema
12 from resolwe.permissions.shortcuts import get_objects_for_user
13
14
15 class ResolweCreateModelMixin(mixins.CreateModelMixin):
16 """Mixin to support creating new `Resolwe` models.
17
18 Extends `django_rest_framework`'s class `CreateModelMixin` with:
19
20 * append user's id from request to posted data as `contributor`
21 key
22 * catch `IntegrityError`s, so we can return HTTP status 409
23 instead of raising error
24
25 """
26
27 def create(self, request, *args, **kwargs):
28 """Create a resource."""
29 ds_slug = request.data.get('descriptor_schema', None)
30 if ds_slug:
31 ds_query = DescriptorSchema.objects.filter(slug=ds_slug)
32 ds_query = get_objects_for_user(request.user, 'view_descriptorschema', ds_query)
33 try:
34 request.data['descriptor_schema'] = ds_query.latest().pk
35 except DescriptorSchema.DoesNotExist:
36 return Response(
37 {'descriptor_schema': [
38 'Invalid descriptor_schema slug "{}" - object does not exist.'.format(ds_slug)]},
39 status=status.HTTP_400_BAD_REQUEST)
40
41 request.data['contributor'] = request.user.pk
42 try:
43 return super(ResolweCreateModelMixin, self).create(request, *args, **kwargs)
44
45 except IntegrityError as ex:
46 return Response({u'error': str(ex)}, status=status.HTTP_409_CONFLICT)
47
48 def perform_create(self, serializer):
49 """Create a resource."""
50 with transaction.atomic():
51 instance = serializer.save()
52
53 # Assign all permissions to the object contributor.
54 for permission in list(zip(*instance._meta.permissions))[0]: # pylint: disable=protected-access
55 assign_perm(permission, instance.contributor, instance)
56
57
58 class ResolweUpdateModelMixin(mixins.UpdateModelMixin):
59 """Mixin to support updating `Resolwe` models.
60
61 Extends `django_rest_framework`'s class `UpdateModelMixin` with:
62
63 * translate `descriptor_schema` field from DescriptorSchema's
64 slug to its id and return 400 error Response if it doesn't
65 exists
66
67 """
68
69 def update(self, request, *args, **kwargs):
70 """Update a resource."""
71 ds_slug = request.data.get('descriptor_schema', None)
72 if ds_slug:
73 ds_query = DescriptorSchema.objects.filter(slug=ds_slug)
74 ds_query = get_objects_for_user(request.user, 'view_descriptorschema', ds_query)
75 try:
76 request.data['descriptor_schema'] = ds_query.latest().pk
77 except DescriptorSchema.DoesNotExist:
78 return Response(
79 {'descriptor_schema': [
80 'Invalid descriptor_schema slug "{}" - object does not exist.'.format(ds_slug)]},
81 status=status.HTTP_400_BAD_REQUEST)
82
83 return super(ResolweUpdateModelMixin, self).update(request, *args, **kwargs)
84
85
86 class ResolweCheckSlugMixin(object):
87 """Slug validation."""
88
89 @list_route(methods=[u'get'])
90 def slug_exists(self, request):
91 """Check if given url slug exists.
92
93 Check if slug given in query parameter ``name`` exists. Return
94 ``True`` if slug already exists and ``False`` otherwise.
95
96 """
97 if not request.user.is_authenticated():
98 return Response(status=status.HTTP_401_UNAUTHORIZED)
99
100 if 'name' not in request.query_params:
101 return Response({'error': 'Query parameter `name` must be given.'},
102 status=status.HTTP_400_BAD_REQUEST)
103
104 queryset = self.get_queryset()
105 slug_name = request.query_params['name']
106 return Response(queryset.filter(slug__iexact=slug_name).exists())
107
[end of resolwe/flow/views/mixins.py]
[start of resolwe/flow/views/data.py]
1 """Data viewset."""
2 from __future__ import absolute_import, division, print_function, unicode_literals
3
4 from django.db import transaction
5
6 from guardian.shortcuts import assign_perm
7 from rest_framework import exceptions, mixins, status, viewsets
8 from rest_framework.decorators import list_route
9 from rest_framework.response import Response
10
11 from resolwe.flow.filters import DataFilter
12 from resolwe.flow.managers import manager
13 from resolwe.flow.models import Collection, Data, Process
14 from resolwe.flow.serializers import DataSerializer
15 from resolwe.flow.utils import dict_dot, get_data_checksum, iterate_schema
16 from resolwe.permissions.loader import get_permissions_class
17 from resolwe.permissions.mixins import ResolwePermissionsMixin
18 from resolwe.permissions.shortcuts import get_objects_for_user
19
20 from .mixins import ResolweCheckSlugMixin, ResolweCreateModelMixin, ResolweUpdateModelMixin
21
22
23 class DataViewSet(ResolweCreateModelMixin,
24 mixins.RetrieveModelMixin,
25 ResolweUpdateModelMixin,
26 mixins.DestroyModelMixin,
27 mixins.ListModelMixin,
28 ResolwePermissionsMixin,
29 ResolweCheckSlugMixin,
30 viewsets.GenericViewSet):
31 """API view for :class:`Data` objects."""
32
33 queryset = Data.objects.all().prefetch_related('process', 'descriptor_schema', 'contributor')
34 serializer_class = DataSerializer
35 permission_classes = (get_permissions_class(),)
36 filter_class = DataFilter
37 ordering_fields = ('id', 'created', 'modified', 'started', 'finished', 'name')
38 ordering = ('id',)
39
40 def create(self, request, *args, **kwargs):
41 """Create a resource."""
42 collections = request.data.get('collections', [])
43
44 # check that user has permissions on all collections that Data
45 # object will be added to
46 for collection_id in collections:
47 try:
48 collection = Collection.objects.get(pk=collection_id)
49 except Collection.DoesNotExist:
50 return Response({'collections': ['Invalid pk "{}" - object does not exist.'.format(collection_id)]},
51 status=status.HTTP_400_BAD_REQUEST)
52
53 if not request.user.has_perm('add_collection', obj=collection):
54 if request.user.is_authenticated():
55 raise exceptions.PermissionDenied
56 else:
57 raise exceptions.NotFound
58
59 # translate processe's slug to id
60 process_slug = request.data.get('process', None)
61 process_query = Process.objects.filter(slug=process_slug)
62 process_query = get_objects_for_user(request.user, 'view_process', process_query)
63 try:
64 process = process_query.latest()
65 except Process.DoesNotExist:
66 return Response({'process': ['Invalid process slug "{}" - object does not exist.'.format(process_slug)]},
67 status=status.HTTP_400_BAD_REQUEST)
68 request.data['process'] = process.pk
69
70 # perform "get_or_create" if requested - return existing object
71 # if found
72 if kwargs.pop('get_or_create', False):
73 process_input = request.data.get('input', {})
74
75 # use default values if they are not given
76 for field_schema, fields, path in iterate_schema(process_input, process.input_schema):
77 if 'default' in field_schema and field_schema['name'] not in fields:
78 dict_dot(process_input, path, field_schema['default'])
79
80 checksum = get_data_checksum(process_input, process.slug, process.version)
81 data_qs = Data.objects.filter(
82 checksum=checksum,
83 process__persistence__in=[Process.PERSISTENCE_CACHED, Process.PERSISTENCE_TEMP],
84 )
85 data_qs = get_objects_for_user(request.user, 'view_data', data_qs)
86 if data_qs.exists():
87 data = data_qs.order_by('created').last()
88 serializer = self.get_serializer(data)
89 return Response(serializer.data)
90
91 # create the objects
92 resp = super(DataViewSet, self).create(request, *args, **kwargs)
93
94 # run manager
95 manager.communicate()
96
97 return resp
98
99 @list_route(methods=[u'post'])
100 def get_or_create(self, request, *args, **kwargs):
101 """Get ``Data`` object if similar already exists, otherwise create it."""
102 kwargs['get_or_create'] = True
103 return self.create(request, *args, **kwargs)
104
105 def perform_create(self, serializer):
106 """Create a resource."""
107 with transaction.atomic():
108 instance = serializer.save()
109
110 # Assign all permissions to the object contributor.
111 for permission in list(zip(*instance._meta.permissions))[0]: # pylint: disable=protected-access
112 assign_perm(permission, instance.contributor, instance)
113
114 # Assign data object to all specified collections.
115 collections = self.request.data.get('collections', [])
116 for c in collections:
117 collection = Collection.objects.get(pk=c)
118 collection.data.add(instance)
119
[end of resolwe/flow/views/data.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/resolwe/flow/views/data.py b/resolwe/flow/views/data.py
--- a/resolwe/flow/views/data.py
+++ b/resolwe/flow/views/data.py
@@ -51,10 +51,14 @@
status=status.HTTP_400_BAD_REQUEST)
if not request.user.has_perm('add_collection', obj=collection):
- if request.user.is_authenticated():
- raise exceptions.PermissionDenied
+ if request.user.has_perm('view_collection', obj=collection):
+ raise exceptions.PermissionDenied(
+ "You don't have `ADD` permission on collection (id: {}).".format(collection_id)
+ )
else:
- raise exceptions.NotFound
+ raise exceptions.NotFound(
+ "Collection not found (id: {}).".format(collection_id)
+ )
# translate processe's slug to id
process_slug = request.data.get('process', None)
diff --git a/resolwe/flow/views/mixins.py b/resolwe/flow/views/mixins.py
--- a/resolwe/flow/views/mixins.py
+++ b/resolwe/flow/views/mixins.py
@@ -4,6 +4,7 @@
from django.db import IntegrityError, transaction
from guardian.shortcuts import assign_perm
+from guardian.utils import get_anonymous_user
from rest_framework import mixins, status
from rest_framework.decorators import list_route
from rest_framework.response import Response
@@ -38,7 +39,11 @@
'Invalid descriptor_schema slug "{}" - object does not exist.'.format(ds_slug)]},
status=status.HTTP_400_BAD_REQUEST)
- request.data['contributor'] = request.user.pk
+ if request.user.is_anonymous():
+ request.data['contributor'] = get_anonymous_user().pk
+ else:
+ request.data['contributor'] = request.user.pk
+
try:
return super(ResolweCreateModelMixin, self).create(request, *args, **kwargs)
| {"golden_diff": "diff --git a/resolwe/flow/views/data.py b/resolwe/flow/views/data.py\n--- a/resolwe/flow/views/data.py\n+++ b/resolwe/flow/views/data.py\n@@ -51,10 +51,14 @@\n status=status.HTTP_400_BAD_REQUEST)\n \n if not request.user.has_perm('add_collection', obj=collection):\n- if request.user.is_authenticated():\n- raise exceptions.PermissionDenied\n+ if request.user.has_perm('view_collection', obj=collection):\n+ raise exceptions.PermissionDenied(\n+ \"You don't have `ADD` permission on collection (id: {}).\".format(collection_id)\n+ )\n else:\n- raise exceptions.NotFound\n+ raise exceptions.NotFound(\n+ \"Collection not found (id: {}).\".format(collection_id)\n+ )\n \n # translate processe's slug to id\n process_slug = request.data.get('process', None)\ndiff --git a/resolwe/flow/views/mixins.py b/resolwe/flow/views/mixins.py\n--- a/resolwe/flow/views/mixins.py\n+++ b/resolwe/flow/views/mixins.py\n@@ -4,6 +4,7 @@\n from django.db import IntegrityError, transaction\n \n from guardian.shortcuts import assign_perm\n+from guardian.utils import get_anonymous_user\n from rest_framework import mixins, status\n from rest_framework.decorators import list_route\n from rest_framework.response import Response\n@@ -38,7 +39,11 @@\n 'Invalid descriptor_schema slug \"{}\" - object does not exist.'.format(ds_slug)]},\n status=status.HTTP_400_BAD_REQUEST)\n \n- request.data['contributor'] = request.user.pk\n+ if request.user.is_anonymous():\n+ request.data['contributor'] = get_anonymous_user().pk\n+ else:\n+ request.data['contributor'] = request.user.pk\n+\n try:\n return super(ResolweCreateModelMixin, self).create(request, *args, **kwargs)\n", "issue": "Data.create returns bad message when missing ADD permission on collection\nIf a user tries to run a process inside a collection, but doesn't have `ADD` permission, the api returns `{\"detail\":\"Not found.\"}`\n", "before_files": [{"content": "\"\"\"Mixins used in Resolwe Viewsets.\"\"\"\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom django.db import IntegrityError, transaction\n\nfrom guardian.shortcuts import assign_perm\nfrom rest_framework import mixins, status\nfrom rest_framework.decorators import list_route\nfrom rest_framework.response import Response\n\nfrom resolwe.flow.models import DescriptorSchema\nfrom resolwe.permissions.shortcuts import get_objects_for_user\n\n\nclass ResolweCreateModelMixin(mixins.CreateModelMixin):\n \"\"\"Mixin to support creating new `Resolwe` models.\n\n Extends `django_rest_framework`'s class `CreateModelMixin` with:\n\n * append user's id from request to posted data as `contributor`\n key\n * catch `IntegrityError`s, so we can return HTTP status 409\n instead of raising error\n\n \"\"\"\n\n def create(self, request, *args, **kwargs):\n \"\"\"Create a resource.\"\"\"\n ds_slug = request.data.get('descriptor_schema', None)\n if ds_slug:\n ds_query = DescriptorSchema.objects.filter(slug=ds_slug)\n ds_query = get_objects_for_user(request.user, 'view_descriptorschema', ds_query)\n try:\n request.data['descriptor_schema'] = ds_query.latest().pk\n except DescriptorSchema.DoesNotExist:\n return Response(\n {'descriptor_schema': [\n 'Invalid descriptor_schema slug \"{}\" - object does not exist.'.format(ds_slug)]},\n status=status.HTTP_400_BAD_REQUEST)\n\n request.data['contributor'] = request.user.pk\n try:\n return super(ResolweCreateModelMixin, self).create(request, *args, **kwargs)\n\n except IntegrityError as ex:\n return Response({u'error': str(ex)}, status=status.HTTP_409_CONFLICT)\n\n def perform_create(self, serializer):\n \"\"\"Create a resource.\"\"\"\n with transaction.atomic():\n instance = serializer.save()\n\n # Assign all permissions to the object contributor.\n for permission in list(zip(*instance._meta.permissions))[0]: # pylint: disable=protected-access\n assign_perm(permission, instance.contributor, instance)\n\n\nclass ResolweUpdateModelMixin(mixins.UpdateModelMixin):\n \"\"\"Mixin to support updating `Resolwe` models.\n\n Extends `django_rest_framework`'s class `UpdateModelMixin` with:\n\n * translate `descriptor_schema` field from DescriptorSchema's\n slug to its id and return 400 error Response if it doesn't\n exists\n\n \"\"\"\n\n def update(self, request, *args, **kwargs):\n \"\"\"Update a resource.\"\"\"\n ds_slug = request.data.get('descriptor_schema', None)\n if ds_slug:\n ds_query = DescriptorSchema.objects.filter(slug=ds_slug)\n ds_query = get_objects_for_user(request.user, 'view_descriptorschema', ds_query)\n try:\n request.data['descriptor_schema'] = ds_query.latest().pk\n except DescriptorSchema.DoesNotExist:\n return Response(\n {'descriptor_schema': [\n 'Invalid descriptor_schema slug \"{}\" - object does not exist.'.format(ds_slug)]},\n status=status.HTTP_400_BAD_REQUEST)\n\n return super(ResolweUpdateModelMixin, self).update(request, *args, **kwargs)\n\n\nclass ResolweCheckSlugMixin(object):\n \"\"\"Slug validation.\"\"\"\n\n @list_route(methods=[u'get'])\n def slug_exists(self, request):\n \"\"\"Check if given url slug exists.\n\n Check if slug given in query parameter ``name`` exists. Return\n ``True`` if slug already exists and ``False`` otherwise.\n\n \"\"\"\n if not request.user.is_authenticated():\n return Response(status=status.HTTP_401_UNAUTHORIZED)\n\n if 'name' not in request.query_params:\n return Response({'error': 'Query parameter `name` must be given.'},\n status=status.HTTP_400_BAD_REQUEST)\n\n queryset = self.get_queryset()\n slug_name = request.query_params['name']\n return Response(queryset.filter(slug__iexact=slug_name).exists())\n", "path": "resolwe/flow/views/mixins.py"}, {"content": "\"\"\"Data viewset.\"\"\"\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom django.db import transaction\n\nfrom guardian.shortcuts import assign_perm\nfrom rest_framework import exceptions, mixins, status, viewsets\nfrom rest_framework.decorators import list_route\nfrom rest_framework.response import Response\n\nfrom resolwe.flow.filters import DataFilter\nfrom resolwe.flow.managers import manager\nfrom resolwe.flow.models import Collection, Data, Process\nfrom resolwe.flow.serializers import DataSerializer\nfrom resolwe.flow.utils import dict_dot, get_data_checksum, iterate_schema\nfrom resolwe.permissions.loader import get_permissions_class\nfrom resolwe.permissions.mixins import ResolwePermissionsMixin\nfrom resolwe.permissions.shortcuts import get_objects_for_user\n\nfrom .mixins import ResolweCheckSlugMixin, ResolweCreateModelMixin, ResolweUpdateModelMixin\n\n\nclass DataViewSet(ResolweCreateModelMixin,\n mixins.RetrieveModelMixin,\n ResolweUpdateModelMixin,\n mixins.DestroyModelMixin,\n mixins.ListModelMixin,\n ResolwePermissionsMixin,\n ResolweCheckSlugMixin,\n viewsets.GenericViewSet):\n \"\"\"API view for :class:`Data` objects.\"\"\"\n\n queryset = Data.objects.all().prefetch_related('process', 'descriptor_schema', 'contributor')\n serializer_class = DataSerializer\n permission_classes = (get_permissions_class(),)\n filter_class = DataFilter\n ordering_fields = ('id', 'created', 'modified', 'started', 'finished', 'name')\n ordering = ('id',)\n\n def create(self, request, *args, **kwargs):\n \"\"\"Create a resource.\"\"\"\n collections = request.data.get('collections', [])\n\n # check that user has permissions on all collections that Data\n # object will be added to\n for collection_id in collections:\n try:\n collection = Collection.objects.get(pk=collection_id)\n except Collection.DoesNotExist:\n return Response({'collections': ['Invalid pk \"{}\" - object does not exist.'.format(collection_id)]},\n status=status.HTTP_400_BAD_REQUEST)\n\n if not request.user.has_perm('add_collection', obj=collection):\n if request.user.is_authenticated():\n raise exceptions.PermissionDenied\n else:\n raise exceptions.NotFound\n\n # translate processe's slug to id\n process_slug = request.data.get('process', None)\n process_query = Process.objects.filter(slug=process_slug)\n process_query = get_objects_for_user(request.user, 'view_process', process_query)\n try:\n process = process_query.latest()\n except Process.DoesNotExist:\n return Response({'process': ['Invalid process slug \"{}\" - object does not exist.'.format(process_slug)]},\n status=status.HTTP_400_BAD_REQUEST)\n request.data['process'] = process.pk\n\n # perform \"get_or_create\" if requested - return existing object\n # if found\n if kwargs.pop('get_or_create', False):\n process_input = request.data.get('input', {})\n\n # use default values if they are not given\n for field_schema, fields, path in iterate_schema(process_input, process.input_schema):\n if 'default' in field_schema and field_schema['name'] not in fields:\n dict_dot(process_input, path, field_schema['default'])\n\n checksum = get_data_checksum(process_input, process.slug, process.version)\n data_qs = Data.objects.filter(\n checksum=checksum,\n process__persistence__in=[Process.PERSISTENCE_CACHED, Process.PERSISTENCE_TEMP],\n )\n data_qs = get_objects_for_user(request.user, 'view_data', data_qs)\n if data_qs.exists():\n data = data_qs.order_by('created').last()\n serializer = self.get_serializer(data)\n return Response(serializer.data)\n\n # create the objects\n resp = super(DataViewSet, self).create(request, *args, **kwargs)\n\n # run manager\n manager.communicate()\n\n return resp\n\n @list_route(methods=[u'post'])\n def get_or_create(self, request, *args, **kwargs):\n \"\"\"Get ``Data`` object if similar already exists, otherwise create it.\"\"\"\n kwargs['get_or_create'] = True\n return self.create(request, *args, **kwargs)\n\n def perform_create(self, serializer):\n \"\"\"Create a resource.\"\"\"\n with transaction.atomic():\n instance = serializer.save()\n\n # Assign all permissions to the object contributor.\n for permission in list(zip(*instance._meta.permissions))[0]: # pylint: disable=protected-access\n assign_perm(permission, instance.contributor, instance)\n\n # Assign data object to all specified collections.\n collections = self.request.data.get('collections', [])\n for c in collections:\n collection = Collection.objects.get(pk=c)\n collection.data.add(instance)\n", "path": "resolwe/flow/views/data.py"}]} | 2,974 | 439 |
gh_patches_debug_63309 | rasdani/github-patches | git_diff | scikit-hep__pyhf-924 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Remove cloudpickle constraints when updating to TensorFlow Probability v0.11
# Description
Once TensorFlow Probability `v0.11.0` is released there will no longer be the need for PR #915, and so that should be reverted.
Related Issues: #815
</issue>
<code>
[start of setup.py]
1 from setuptools import setup
2
3 extras_require = {
4 'tensorflow': [
5 'tensorflow~=2.0',
6 'tensorflow-probability~=0.8',
7 'cloudpickle!=1.5.0', # TODO: Temp patch until tfp v0.11
8 ],
9 'torch': ['torch~=1.2'],
10 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],
11 'xmlio': ['uproot'],
12 'minuit': ['iminuit'],
13 }
14 extras_require['backends'] = sorted(
15 set(
16 extras_require['tensorflow']
17 + extras_require['torch']
18 + extras_require['jax']
19 + extras_require['minuit']
20 )
21 )
22 extras_require['contrib'] = sorted(set(['matplotlib']))
23 extras_require['lint'] = sorted(set(['pyflakes', 'black']))
24
25 extras_require['test'] = sorted(
26 set(
27 extras_require['backends']
28 + extras_require['xmlio']
29 + extras_require['contrib']
30 + [
31 'pytest~=3.5',
32 'pytest-cov>=2.5.1',
33 'pytest-mock',
34 'pytest-benchmark[histogram]',
35 'pytest-console-scripts',
36 'pytest-mpl',
37 'pydocstyle',
38 'coverage>=4.0', # coveralls
39 'papermill~=2.0',
40 'nteract-scrapbook~=0.2',
41 'jupyter',
42 'uproot~=3.3',
43 'graphviz',
44 'jsonpatch',
45 ]
46 )
47 )
48 extras_require['docs'] = sorted(
49 set(
50 [
51 'sphinx~=3.0.0', # Sphinx v3.1.X regressions break docs
52 'sphinxcontrib-bibtex',
53 'sphinx-click',
54 'sphinx_rtd_theme',
55 'nbsphinx',
56 'ipywidgets',
57 'sphinx-issues',
58 'sphinx-copybutton>0.2.9',
59 ]
60 )
61 )
62 extras_require['develop'] = sorted(
63 set(
64 extras_require['docs']
65 + extras_require['lint']
66 + extras_require['test']
67 + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'check-manifest', 'twine']
68 )
69 )
70 extras_require['complete'] = sorted(set(sum(extras_require.values(), [])))
71
72
73 setup(
74 extras_require=extras_require,
75 use_scm_version=lambda: {'local_scheme': lambda version: ''},
76 )
77
[end of setup.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -3,8 +3,7 @@
extras_require = {
'tensorflow': [
'tensorflow~=2.0',
- 'tensorflow-probability~=0.8',
- 'cloudpickle!=1.5.0', # TODO: Temp patch until tfp v0.11
+ 'tensorflow-probability~=0.10', # TODO: Temp patch until tfp v0.11
],
'torch': ['torch~=1.2'],
'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -3,8 +3,7 @@\n extras_require = {\n 'tensorflow': [\n 'tensorflow~=2.0',\n- 'tensorflow-probability~=0.8',\n- 'cloudpickle!=1.5.0', # TODO: Temp patch until tfp v0.11\n+ 'tensorflow-probability~=0.10', # TODO: Temp patch until tfp v0.11\n ],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n", "issue": "Remove cloudpickle constraints when updating to TensorFlow Probability v0.11\n# Description\r\n\r\nOnce TensorFlow Probability `v0.11.0` is released there will no longer be the need for PR #915, and so that should be reverted.\r\n\r\nRelated Issues: #815 \r\n\n", "before_files": [{"content": "from setuptools import setup\n\nextras_require = {\n 'tensorflow': [\n 'tensorflow~=2.0',\n 'tensorflow-probability~=0.8',\n 'cloudpickle!=1.5.0', # TODO: Temp patch until tfp v0.11\n ],\n 'torch': ['torch~=1.2'],\n 'jax': ['jax~=0.1,>0.1.51', 'jaxlib~=0.1,>0.1.33'],\n 'xmlio': ['uproot'],\n 'minuit': ['iminuit'],\n}\nextras_require['backends'] = sorted(\n set(\n extras_require['tensorflow']\n + extras_require['torch']\n + extras_require['jax']\n + extras_require['minuit']\n )\n)\nextras_require['contrib'] = sorted(set(['matplotlib']))\nextras_require['lint'] = sorted(set(['pyflakes', 'black']))\n\nextras_require['test'] = sorted(\n set(\n extras_require['backends']\n + extras_require['xmlio']\n + extras_require['contrib']\n + [\n 'pytest~=3.5',\n 'pytest-cov>=2.5.1',\n 'pytest-mock',\n 'pytest-benchmark[histogram]',\n 'pytest-console-scripts',\n 'pytest-mpl',\n 'pydocstyle',\n 'coverage>=4.0', # coveralls\n 'papermill~=2.0',\n 'nteract-scrapbook~=0.2',\n 'jupyter',\n 'uproot~=3.3',\n 'graphviz',\n 'jsonpatch',\n ]\n )\n)\nextras_require['docs'] = sorted(\n set(\n [\n 'sphinx~=3.0.0', # Sphinx v3.1.X regressions break docs\n 'sphinxcontrib-bibtex',\n 'sphinx-click',\n 'sphinx_rtd_theme',\n 'nbsphinx',\n 'ipywidgets',\n 'sphinx-issues',\n 'sphinx-copybutton>0.2.9',\n ]\n )\n)\nextras_require['develop'] = sorted(\n set(\n extras_require['docs']\n + extras_require['lint']\n + extras_require['test']\n + ['nbdime', 'bumpversion', 'ipython', 'pre-commit', 'check-manifest', 'twine']\n )\n)\nextras_require['complete'] = sorted(set(sum(extras_require.values(), [])))\n\n\nsetup(\n extras_require=extras_require,\n use_scm_version=lambda: {'local_scheme': lambda version: ''},\n)\n", "path": "setup.py"}]} | 1,291 | 162 |
gh_patches_debug_29296 | rasdani/github-patches | git_diff | ethereum__consensus-specs-1156 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Use custom types for container definitions
This is the "successor" to #695 which was recently closed for staleness. I'm intending to address this alongside a rewrite of #917.
</issue>
<code>
[start of scripts/build_spec.py]
1 import re
2 from function_puller import (
3 get_spec,
4 SpecObject,
5 )
6 from argparse import ArgumentParser
7 from typing import (
8 Dict,
9 List,
10 Optional,
11 )
12
13
14 PHASE0_IMPORTS = '''from typing import (
15 Any,
16 Dict,
17 List,
18 NewType,
19 Tuple,
20 )
21
22 from eth2spec.utils.ssz.ssz_impl import (
23 hash_tree_root,
24 signing_root,
25 )
26 from eth2spec.utils.ssz.ssz_typing import (
27 # unused: uint8, uint16, uint32, uint128, uint256,
28 uint64, Container, Vector, BytesN
29 )
30 from eth2spec.utils.bls import (
31 bls_aggregate_pubkeys,
32 bls_verify,
33 bls_verify_multiple,
34 )
35 # Note: 'int' type defaults to being interpreted as a uint64 by SSZ implementation.
36
37 from eth2spec.utils.hash_function import hash
38 '''
39 PHASE1_IMPORTS = '''from typing import (
40 Any,
41 Dict,
42 List,
43 NewType,
44 Tuple,
45 )
46
47 from eth2spec.utils.ssz.ssz_impl import (
48 hash_tree_root,
49 signing_root,
50 serialize,
51 is_empty,
52 )
53 from eth2spec.utils.ssz.ssz_typing import (
54 # unused: uint8, uint16, uint32, uint128, uint256,
55 uint64, Container, Vector, BytesN
56 )
57 from eth2spec.utils.bls import (
58 bls_aggregate_pubkeys,
59 bls_verify,
60 bls_verify_multiple,
61 )
62
63 from eth2spec.utils.hash_function import hash
64 '''
65 NEW_TYPES = {
66 'Slot': 'int',
67 'Epoch': 'int',
68 'Shard': 'int',
69 'ValidatorIndex': 'int',
70 'Gwei': 'int',
71 }
72 BYTE_TYPES = [4, 32, 48, 96]
73 SUNDRY_FUNCTIONS = '''
74 def get_ssz_type_by_name(name: str) -> Container:
75 return globals()[name]
76
77
78 # Monkey patch validator compute committee code
79 _compute_committee = compute_committee
80 committee_cache = {}
81
82
83 def compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]:
84 param_hash = (hash_tree_root(indices), seed, index, count)
85
86 if param_hash in committee_cache:
87 return committee_cache[param_hash]
88 else:
89 ret = _compute_committee(indices, seed, index, count)
90 committee_cache[param_hash] = ret
91 return ret
92
93
94 # Monkey patch hash cache
95 _hash = hash
96 hash_cache = {}
97
98
99 def hash(x):
100 if x in hash_cache:
101 return hash_cache[x]
102 else:
103 ret = _hash(x)
104 hash_cache[x] = ret
105 return ret
106
107
108 # Access to overwrite spec constants based on configuration
109 def apply_constants_preset(preset: Dict[str, Any]):
110 global_vars = globals()
111 for k, v in preset.items():
112 global_vars[k] = v
113
114 # Deal with derived constants
115 global_vars['GENESIS_EPOCH'] = slot_to_epoch(GENESIS_SLOT)
116
117 # Initialize SSZ types again, to account for changed lengths
118 init_SSZ_types()
119 '''
120
121
122 def objects_to_spec(functions: Dict[str, str],
123 constants: Dict[str, str],
124 ssz_objects: Dict[str, str],
125 inserts: Dict[str, str],
126 imports: Dict[str, str],
127 new_types: Dict[str, str],
128 byte_types: List[int],
129 ) -> str:
130 """
131 Given all the objects that constitute a spec, combine them into a single pyfile.
132 """
133 new_type_definitions = \
134 '\n'.join(['''%s = NewType('%s', %s)''' % (key, key, value) for key, value in new_types.items()])
135 new_type_definitions += '\n' + '\n'.join(['Bytes%s = BytesN[%s]' % (n, n) for n in byte_types])
136 functions_spec = '\n\n'.join(functions.values())
137 constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))
138 ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values())
139 ssz_objects_reinitialization_spec = (
140 'def init_SSZ_types():\n global_vars = globals()\n\n '
141 + '\n\n '.join([re.sub(r'(?!\n\n)\n', r'\n ', value[:-1]) for value in ssz_objects.values()])
142 + '\n\n'
143 + '\n'.join(map(lambda x: ' global_vars[\'%s\'] = %s' % (x, x), ssz_objects.keys()))
144 )
145 spec = (
146 imports
147 + '\n' + new_type_definitions
148 + '\n\n' + constants_spec
149 + '\n\n\n' + ssz_objects_instantiation_spec
150 + '\n\n' + functions_spec
151 + '\n' + SUNDRY_FUNCTIONS
152 + '\n\n' + ssz_objects_reinitialization_spec
153 + '\n'
154 )
155 # Handle @inserts
156 for key, value in inserts.items():
157 spec = re.sub('[ ]*# %s\\n' % key, value, spec)
158 return spec
159
160
161 def combine_functions(old_functions: Dict[str, str], new_functions: Dict[str, str]) -> Dict[str, str]:
162 for key, value in new_functions.items():
163 old_functions[key] = value
164 return old_functions
165
166
167 def combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, str]) -> Dict[str, str]:
168 for key, value in new_constants.items():
169 old_constants[key] = value
170 return old_constants
171
172
173 def dependency_order_ssz_objects(objects: Dict[str, str]) -> None:
174 """
175 Determines which SSZ Object is depenedent on which other and orders them appropriately
176 """
177 items = list(objects.items())
178 for key, value in items:
179 dependencies = re.findall(r'(: [A-Z][\w[]*)', value)
180 dependencies = map(lambda x: re.sub(r'\W|Vector|List|Container|uint\d+|Bytes\d+|bytes', '', x), dependencies)
181 for dep in dependencies:
182 if dep in NEW_TYPES or len(dep) == 0:
183 continue
184 key_list = list(objects.keys())
185 for item in [dep, key] + key_list[key_list.index(dep)+1:]:
186 objects[item] = objects.pop(item)
187
188
189 def combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]) -> Dict[str, str]:
190 """
191 Takes in old spec and new spec ssz objects, combines them,
192 and returns the newer versions of the objects in dependency order.
193 """
194 for key, value in new_objects.items():
195 if key in old_objects:
196 # remove trailing newline
197 old_objects[key] = old_objects[key]
198 # remove leading variable name
199 value = re.sub(r'^class [\w]*\(Container\):\n', '', value)
200 old_objects[key] = old_objects.get(key, '') + value
201 dependency_order_ssz_objects(old_objects)
202 return old_objects
203
204
205 # inserts are handeled the same way as functions
206 combine_inserts = combine_functions
207
208
209 def combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:
210 """
211 Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.
212 """
213 functions0, constants0, ssz_objects0, inserts0 = spec0
214 functions1, constants1, ssz_objects1, inserts1 = spec1
215 functions = combine_functions(functions0, functions1)
216 constants = combine_constants(constants0, constants1)
217 ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1)
218 inserts = combine_inserts(inserts0, inserts1)
219 return functions, constants, ssz_objects, inserts
220
221
222 def build_phase0_spec(sourcefile: str, outfile: str=None) -> Optional[str]:
223 functions, constants, ssz_objects, inserts = get_spec(sourcefile)
224 spec = objects_to_spec(functions, constants, ssz_objects, inserts, PHASE0_IMPORTS, NEW_TYPES, BYTE_TYPES)
225 if outfile is not None:
226 with open(outfile, 'w') as out:
227 out.write(spec)
228 return spec
229
230
231 def build_phase1_spec(phase0_sourcefile: str,
232 phase1_custody_sourcefile: str,
233 phase1_shard_sourcefile: str,
234 outfile: str=None) -> Optional[str]:
235 phase0_spec = get_spec(phase0_sourcefile)
236 phase1_custody = get_spec(phase1_custody_sourcefile)
237 phase1_shard_data = get_spec(phase1_shard_sourcefile)
238 spec_objects = phase0_spec
239 for value in [phase1_custody, phase1_shard_data]:
240 spec_objects = combine_spec_objects(spec_objects, value)
241 spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS, NEW_TYPES, BYTE_TYPES)
242 if outfile is not None:
243 with open(outfile, 'w') as out:
244 out.write(spec)
245 return spec
246
247
248 if __name__ == '__main__':
249 description = '''
250 Build the specs from the md docs.
251 If building phase 0:
252 1st argument is input spec.md
253 2nd argument is output spec.py
254
255 If building phase 1:
256 1st argument is input spec_phase0.md
257 2nd argument is input spec_phase1_custody.md
258 3rd argument is input spec_phase1_shard_data.md
259 4th argument is output spec.py
260 '''
261 parser = ArgumentParser(description=description)
262 parser.add_argument("-p", "--phase", dest="phase", type=int, default=0, help="Build for phase #")
263 parser.add_argument(dest="files", help="Input and output files", nargs="+")
264
265 args = parser.parse_args()
266 if args.phase == 0:
267 if len(args.files) == 2:
268 build_phase0_spec(*args.files)
269 else:
270 print(" Phase 0 requires an output as well as an input file.")
271 elif args.phase == 1:
272 if len(args.files) == 4:
273 build_phase1_spec(*args.files)
274 else:
275 print(" Phase 1 requires an output as well as 3 input files (phase0.md and phase1.md, phase1.md)")
276 else:
277 print("Invalid phase: {0}".format(args.phase))
278
[end of scripts/build_spec.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/scripts/build_spec.py b/scripts/build_spec.py
--- a/scripts/build_spec.py
+++ b/scripts/build_spec.py
@@ -130,9 +130,10 @@
"""
Given all the objects that constitute a spec, combine them into a single pyfile.
"""
- new_type_definitions = \
- '\n'.join(['''%s = NewType('%s', %s)''' % (key, key, value) for key, value in new_types.items()])
- new_type_definitions += '\n' + '\n'.join(['Bytes%s = BytesN[%s]' % (n, n) for n in byte_types])
+ new_type_definitions = '\n'.join(['Bytes%s = BytesN[%s]' % (n, n) for n in byte_types])
+ new_type_definitions += '\n' + '\n'.join(['Hash = Bytes32', 'BLSPubkey = Bytes48', 'BLSSignature = Bytes96'])
+ new_type_definitions += \
+ '\n' + '\n'.join(['''%s = NewType('%s', %s)''' % (key, key, value) for key, value in new_types.items()])
functions_spec = '\n\n'.join(functions.values())
constants_spec = '\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))
ssz_objects_instantiation_spec = '\n\n'.join(ssz_objects.values())
@@ -177,7 +178,7 @@
items = list(objects.items())
for key, value in items:
dependencies = re.findall(r'(: [A-Z][\w[]*)', value)
- dependencies = map(lambda x: re.sub(r'\W|Vector|List|Container|uint\d+|Bytes\d+|bytes', '', x), dependencies)
+ dependencies = map(lambda x: re.sub(r'\W|Vector|List|Container|Hash|BLSPubkey|BLSSignature|uint\d+|Bytes\d+|bytes', '', x), dependencies)
for dep in dependencies:
if dep in NEW_TYPES or len(dep) == 0:
continue
| {"golden_diff": "diff --git a/scripts/build_spec.py b/scripts/build_spec.py\n--- a/scripts/build_spec.py\n+++ b/scripts/build_spec.py\n@@ -130,9 +130,10 @@\n \"\"\"\n Given all the objects that constitute a spec, combine them into a single pyfile.\n \"\"\"\n- new_type_definitions = \\\n- '\\n'.join(['''%s = NewType('%s', %s)''' % (key, key, value) for key, value in new_types.items()])\n- new_type_definitions += '\\n' + '\\n'.join(['Bytes%s = BytesN[%s]' % (n, n) for n in byte_types])\n+ new_type_definitions = '\\n'.join(['Bytes%s = BytesN[%s]' % (n, n) for n in byte_types])\n+ new_type_definitions += '\\n' + '\\n'.join(['Hash = Bytes32', 'BLSPubkey = Bytes48', 'BLSSignature = Bytes96'])\n+ new_type_definitions += \\\n+ '\\n' + '\\n'.join(['''%s = NewType('%s', %s)''' % (key, key, value) for key, value in new_types.items()])\n functions_spec = '\\n\\n'.join(functions.values())\n constants_spec = '\\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))\n ssz_objects_instantiation_spec = '\\n\\n'.join(ssz_objects.values())\n@@ -177,7 +178,7 @@\n items = list(objects.items())\n for key, value in items:\n dependencies = re.findall(r'(: [A-Z][\\w[]*)', value)\n- dependencies = map(lambda x: re.sub(r'\\W|Vector|List|Container|uint\\d+|Bytes\\d+|bytes', '', x), dependencies)\n+ dependencies = map(lambda x: re.sub(r'\\W|Vector|List|Container|Hash|BLSPubkey|BLSSignature|uint\\d+|Bytes\\d+|bytes', '', x), dependencies)\n for dep in dependencies:\n if dep in NEW_TYPES or len(dep) == 0:\n continue\n", "issue": "Use custom types for container definitions\nThis is the \"successor\" to #695 which was recently closed for staleness. I'm intending to address this alongside a rewrite of #917.\n", "before_files": [{"content": "import re\nfrom function_puller import (\n get_spec,\n SpecObject,\n)\nfrom argparse import ArgumentParser\nfrom typing import (\n Dict,\n List,\n Optional,\n)\n\n\nPHASE0_IMPORTS = '''from typing import (\n Any,\n Dict,\n List,\n NewType,\n Tuple,\n)\n\nfrom eth2spec.utils.ssz.ssz_impl import (\n hash_tree_root,\n signing_root,\n)\nfrom eth2spec.utils.ssz.ssz_typing import (\n # unused: uint8, uint16, uint32, uint128, uint256,\n uint64, Container, Vector, BytesN\n)\nfrom eth2spec.utils.bls import (\n bls_aggregate_pubkeys,\n bls_verify,\n bls_verify_multiple,\n)\n# Note: 'int' type defaults to being interpreted as a uint64 by SSZ implementation.\n\nfrom eth2spec.utils.hash_function import hash\n'''\nPHASE1_IMPORTS = '''from typing import (\n Any,\n Dict,\n List,\n NewType,\n Tuple,\n)\n\nfrom eth2spec.utils.ssz.ssz_impl import (\n hash_tree_root,\n signing_root,\n serialize,\n is_empty,\n)\nfrom eth2spec.utils.ssz.ssz_typing import (\n # unused: uint8, uint16, uint32, uint128, uint256,\n uint64, Container, Vector, BytesN\n)\nfrom eth2spec.utils.bls import (\n bls_aggregate_pubkeys,\n bls_verify,\n bls_verify_multiple,\n)\n\nfrom eth2spec.utils.hash_function import hash\n'''\nNEW_TYPES = {\n 'Slot': 'int',\n 'Epoch': 'int',\n 'Shard': 'int',\n 'ValidatorIndex': 'int',\n 'Gwei': 'int',\n}\nBYTE_TYPES = [4, 32, 48, 96]\nSUNDRY_FUNCTIONS = '''\ndef get_ssz_type_by_name(name: str) -> Container:\n return globals()[name]\n\n\n# Monkey patch validator compute committee code\n_compute_committee = compute_committee\ncommittee_cache = {}\n\n\ndef compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]:\n param_hash = (hash_tree_root(indices), seed, index, count)\n\n if param_hash in committee_cache:\n return committee_cache[param_hash]\n else:\n ret = _compute_committee(indices, seed, index, count)\n committee_cache[param_hash] = ret\n return ret\n\n\n# Monkey patch hash cache\n_hash = hash\nhash_cache = {}\n\n\ndef hash(x):\n if x in hash_cache:\n return hash_cache[x]\n else:\n ret = _hash(x)\n hash_cache[x] = ret\n return ret\n\n\n# Access to overwrite spec constants based on configuration\ndef apply_constants_preset(preset: Dict[str, Any]):\n global_vars = globals()\n for k, v in preset.items():\n global_vars[k] = v\n\n # Deal with derived constants\n global_vars['GENESIS_EPOCH'] = slot_to_epoch(GENESIS_SLOT)\n\n # Initialize SSZ types again, to account for changed lengths\n init_SSZ_types()\n'''\n\n\ndef objects_to_spec(functions: Dict[str, str],\n constants: Dict[str, str],\n ssz_objects: Dict[str, str],\n inserts: Dict[str, str],\n imports: Dict[str, str],\n new_types: Dict[str, str],\n byte_types: List[int],\n ) -> str:\n \"\"\"\n Given all the objects that constitute a spec, combine them into a single pyfile.\n \"\"\"\n new_type_definitions = \\\n '\\n'.join(['''%s = NewType('%s', %s)''' % (key, key, value) for key, value in new_types.items()])\n new_type_definitions += '\\n' + '\\n'.join(['Bytes%s = BytesN[%s]' % (n, n) for n in byte_types])\n functions_spec = '\\n\\n'.join(functions.values())\n constants_spec = '\\n'.join(map(lambda x: '%s = %s' % (x, constants[x]), constants))\n ssz_objects_instantiation_spec = '\\n\\n'.join(ssz_objects.values())\n ssz_objects_reinitialization_spec = (\n 'def init_SSZ_types():\\n global_vars = globals()\\n\\n '\n + '\\n\\n '.join([re.sub(r'(?!\\n\\n)\\n', r'\\n ', value[:-1]) for value in ssz_objects.values()])\n + '\\n\\n'\n + '\\n'.join(map(lambda x: ' global_vars[\\'%s\\'] = %s' % (x, x), ssz_objects.keys()))\n )\n spec = (\n imports\n + '\\n' + new_type_definitions\n + '\\n\\n' + constants_spec\n + '\\n\\n\\n' + ssz_objects_instantiation_spec\n + '\\n\\n' + functions_spec\n + '\\n' + SUNDRY_FUNCTIONS\n + '\\n\\n' + ssz_objects_reinitialization_spec\n + '\\n'\n )\n # Handle @inserts\n for key, value in inserts.items():\n spec = re.sub('[ ]*# %s\\\\n' % key, value, spec)\n return spec\n\n\ndef combine_functions(old_functions: Dict[str, str], new_functions: Dict[str, str]) -> Dict[str, str]:\n for key, value in new_functions.items():\n old_functions[key] = value\n return old_functions\n\n\ndef combine_constants(old_constants: Dict[str, str], new_constants: Dict[str, str]) -> Dict[str, str]:\n for key, value in new_constants.items():\n old_constants[key] = value\n return old_constants\n\n\ndef dependency_order_ssz_objects(objects: Dict[str, str]) -> None:\n \"\"\"\n Determines which SSZ Object is depenedent on which other and orders them appropriately\n \"\"\"\n items = list(objects.items())\n for key, value in items:\n dependencies = re.findall(r'(: [A-Z][\\w[]*)', value)\n dependencies = map(lambda x: re.sub(r'\\W|Vector|List|Container|uint\\d+|Bytes\\d+|bytes', '', x), dependencies)\n for dep in dependencies:\n if dep in NEW_TYPES or len(dep) == 0:\n continue\n key_list = list(objects.keys())\n for item in [dep, key] + key_list[key_list.index(dep)+1:]:\n objects[item] = objects.pop(item)\n\n\ndef combine_ssz_objects(old_objects: Dict[str, str], new_objects: Dict[str, str]) -> Dict[str, str]:\n \"\"\"\n Takes in old spec and new spec ssz objects, combines them,\n and returns the newer versions of the objects in dependency order.\n \"\"\"\n for key, value in new_objects.items():\n if key in old_objects:\n # remove trailing newline\n old_objects[key] = old_objects[key]\n # remove leading variable name\n value = re.sub(r'^class [\\w]*\\(Container\\):\\n', '', value)\n old_objects[key] = old_objects.get(key, '') + value\n dependency_order_ssz_objects(old_objects)\n return old_objects\n\n\n# inserts are handeled the same way as functions\ncombine_inserts = combine_functions\n\n\ndef combine_spec_objects(spec0: SpecObject, spec1: SpecObject) -> SpecObject:\n \"\"\"\n Takes in two spec variants (as tuples of their objects) and combines them using the appropriate combiner function.\n \"\"\"\n functions0, constants0, ssz_objects0, inserts0 = spec0\n functions1, constants1, ssz_objects1, inserts1 = spec1\n functions = combine_functions(functions0, functions1)\n constants = combine_constants(constants0, constants1)\n ssz_objects = combine_ssz_objects(ssz_objects0, ssz_objects1)\n inserts = combine_inserts(inserts0, inserts1)\n return functions, constants, ssz_objects, inserts\n\n\ndef build_phase0_spec(sourcefile: str, outfile: str=None) -> Optional[str]:\n functions, constants, ssz_objects, inserts = get_spec(sourcefile)\n spec = objects_to_spec(functions, constants, ssz_objects, inserts, PHASE0_IMPORTS, NEW_TYPES, BYTE_TYPES)\n if outfile is not None:\n with open(outfile, 'w') as out:\n out.write(spec)\n return spec\n\n\ndef build_phase1_spec(phase0_sourcefile: str,\n phase1_custody_sourcefile: str,\n phase1_shard_sourcefile: str,\n outfile: str=None) -> Optional[str]:\n phase0_spec = get_spec(phase0_sourcefile)\n phase1_custody = get_spec(phase1_custody_sourcefile)\n phase1_shard_data = get_spec(phase1_shard_sourcefile)\n spec_objects = phase0_spec\n for value in [phase1_custody, phase1_shard_data]:\n spec_objects = combine_spec_objects(spec_objects, value)\n spec = objects_to_spec(*spec_objects, PHASE1_IMPORTS, NEW_TYPES, BYTE_TYPES)\n if outfile is not None:\n with open(outfile, 'w') as out:\n out.write(spec)\n return spec\n\n\nif __name__ == '__main__':\n description = '''\nBuild the specs from the md docs.\nIf building phase 0:\n 1st argument is input spec.md\n 2nd argument is output spec.py\n\nIf building phase 1:\n 1st argument is input spec_phase0.md\n 2nd argument is input spec_phase1_custody.md\n 3rd argument is input spec_phase1_shard_data.md\n 4th argument is output spec.py\n'''\n parser = ArgumentParser(description=description)\n parser.add_argument(\"-p\", \"--phase\", dest=\"phase\", type=int, default=0, help=\"Build for phase #\")\n parser.add_argument(dest=\"files\", help=\"Input and output files\", nargs=\"+\")\n\n args = parser.parse_args()\n if args.phase == 0:\n if len(args.files) == 2:\n build_phase0_spec(*args.files)\n else:\n print(\" Phase 0 requires an output as well as an input file.\")\n elif args.phase == 1:\n if len(args.files) == 4:\n build_phase1_spec(*args.files)\n else:\n print(\" Phase 1 requires an output as well as 3 input files (phase0.md and phase1.md, phase1.md)\")\n else:\n print(\"Invalid phase: {0}\".format(args.phase))\n", "path": "scripts/build_spec.py"}]} | 3,623 | 476 |
gh_patches_debug_1435 | rasdani/github-patches | git_diff | keras-team__keras-1039 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
keras.utils.visualize_util
line 9: if type(model) == Sequential
Global name Sequential is not defined
line 25 elif type(model) == Graph:
Global name Graph is not defined
</issue>
<code>
[start of keras/utils/visualize_util.py]
1 import pydot
2 # old pydot will not work with python3, must use one
3 # that works with python3 such as pydot2 or pydot
4
5
6 def plot(model, to_file='model.png'):
7
8 graph = pydot.Dot(graph_type='digraph')
9 if type(model) == Sequential:
10 previous_node = None
11 written_nodes = []
12 n = 1
13 for node in model.get_config()['layers']:
14 # append number in case layers have same name to differentiate
15 if (node['name'] + str(n)) in written_nodes:
16 n += 1
17 current_node = pydot.Node(node['name'] + str(n))
18 written_nodes.append(node['name'] + str(n))
19 graph.add_node(current_node)
20 if previous_node:
21 graph.add_edge(pydot.Edge(previous_node, current_node))
22 previous_node = current_node
23 graph.write_png(to_file)
24
25 elif type(model) == Graph:
26 # don't need to append number for names since all nodes labeled
27 for input_node in model.input_config:
28 graph.add_node(pydot.Node(input_node['name']))
29
30 # intermediate and output nodes have input defined
31 for layer_config in [model.node_config, model.output_config]:
32 for node in layer_config:
33 graph.add_node(pydot.Node(node['name']))
34 # possible to have multiple 'inputs' vs 1 'input'
35 if node['inputs']:
36 for e in node['inputs']:
37 graph.add_edge(pydot.Edge(e, node['name']))
38 else:
39 graph.add_edge(pydot.Edge(node['input'], node['name']))
40
41 graph.write_png(to_file)
42
[end of keras/utils/visualize_util.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/keras/utils/visualize_util.py b/keras/utils/visualize_util.py
--- a/keras/utils/visualize_util.py
+++ b/keras/utils/visualize_util.py
@@ -1,7 +1,7 @@
import pydot
# old pydot will not work with python3, must use one
# that works with python3 such as pydot2 or pydot
-
+from keras.models import Sequential, Graph
def plot(model, to_file='model.png'):
| {"golden_diff": "diff --git a/keras/utils/visualize_util.py b/keras/utils/visualize_util.py\n--- a/keras/utils/visualize_util.py\n+++ b/keras/utils/visualize_util.py\n@@ -1,7 +1,7 @@\n import pydot\n # old pydot will not work with python3, must use one\n # that works with python3 such as pydot2 or pydot\n-\n+from keras.models import Sequential, Graph\n \n def plot(model, to_file='model.png'):\n", "issue": "keras.utils.visualize_util\nline 9: if type(model) == Sequential\nGlobal name Sequential is not defined\nline 25 elif type(model) == Graph:\nGlobal name Graph is not defined\n\n", "before_files": [{"content": "import pydot\n# old pydot will not work with python3, must use one\n# that works with python3 such as pydot2 or pydot\n\n\ndef plot(model, to_file='model.png'):\n\n graph = pydot.Dot(graph_type='digraph')\n if type(model) == Sequential:\n previous_node = None\n written_nodes = []\n n = 1\n for node in model.get_config()['layers']:\n # append number in case layers have same name to differentiate\n if (node['name'] + str(n)) in written_nodes:\n n += 1\n current_node = pydot.Node(node['name'] + str(n))\n written_nodes.append(node['name'] + str(n))\n graph.add_node(current_node)\n if previous_node:\n graph.add_edge(pydot.Edge(previous_node, current_node))\n previous_node = current_node\n graph.write_png(to_file)\n\n elif type(model) == Graph:\n # don't need to append number for names since all nodes labeled\n for input_node in model.input_config:\n graph.add_node(pydot.Node(input_node['name']))\n\n # intermediate and output nodes have input defined\n for layer_config in [model.node_config, model.output_config]:\n for node in layer_config:\n graph.add_node(pydot.Node(node['name']))\n # possible to have multiple 'inputs' vs 1 'input'\n if node['inputs']:\n for e in node['inputs']:\n graph.add_edge(pydot.Edge(e, node['name']))\n else:\n graph.add_edge(pydot.Edge(node['input'], node['name']))\n\n graph.write_png(to_file)\n", "path": "keras/utils/visualize_util.py"}]} | 1,010 | 112 |
gh_patches_debug_25466 | rasdani/github-patches | git_diff | sublimelsp__LSP-1865 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Unwanted splitting on dot of the setting keys
**Describe the bug**
In LSP-yaml, the user can specify schema mapping like so:
```
yaml.schemas: {
"https://json.schemastore.org/composer": "/*"
}
```
This doesn't work in ST because LSP splits the URL key on every dot, creating an object that the server doesn't understand.
**Expected behavior**
Should send the value of the `yaml.schemas` object without altering it
**Additional context**
I'm not sure how VSCode manages to handle it but I would guess it is somewhat taking into consideration the schema itself. We could try to check that first.
</issue>
<code>
[start of plugin/core/collections.py]
1 """
2 Module with additional collections.
3 """
4 from .typing import Optional, Dict, Any, Generator
5 from copy import deepcopy
6 import sublime
7
8
9 class DottedDict:
10
11 __slots__ = ('_d',)
12
13 def __init__(self, d: Optional[Dict[str, Any]] = None) -> None:
14 """
15 Construct a DottedDict, optionally from an existing dictionary.
16
17 :param d: An existing dictionary.
18 """
19 self._d = {} # type: Dict[str, Any]
20 if d is not None:
21 self.update(d)
22
23 @classmethod
24 def from_base_and_override(cls, base: "DottedDict", override: Optional[Dict[str, Any]]) -> "DottedDict":
25 result = DottedDict(base.copy())
26 if override:
27 result.update(override)
28 return result
29
30 def get(self, path: Optional[str] = None) -> Any:
31 """
32 Get a value from the dictionary.
33
34 :param path: The path, e.g. foo.bar.baz, or None.
35
36 :returns: The value stored at the path, or None if it doesn't exist.
37 Note that this cannot distinguish between None values and
38 paths that don't exist. If the path is None, returns the
39 entire dictionary.
40 """
41 if path is None:
42 return self._d
43 current = self._d # type: Any
44 keys = path.split('.')
45 for key in keys:
46 if isinstance(current, dict):
47 current = current.get(key)
48 else:
49 return None
50 return current
51
52 def walk(self, path: str) -> Generator[Any, None, None]:
53 current = self._d # type: Any
54 keys = path.split('.')
55 for key in keys:
56 if isinstance(current, dict):
57 current = current.get(key)
58 yield current
59 else:
60 yield None
61 return
62
63 def set(self, path: str, value: Any) -> None:
64 """
65 Set a value in the dictionary.
66
67 :param path: The path, e.g. foo.bar.baz
68 :param value: The value
69 """
70 current = self._d
71 keys = path.split('.')
72 for i in range(0, len(keys) - 1):
73 key = keys[i]
74 next_current = current.get(key)
75 if not isinstance(next_current, dict):
76 next_current = {}
77 current[key] = next_current
78 current = next_current
79 current[keys[-1]] = value
80
81 def remove(self, path: str) -> None:
82 """
83 Remove a key from the dictionary.
84
85 :param path: The path, e.g. foo.bar.baz
86 """
87 current = self._d
88 keys = path.split('.')
89 for i in range(0, len(keys) - 1):
90 key = keys[i]
91 next_current = current.get(key)
92 if not isinstance(next_current, dict):
93 return
94 current = next_current
95 current.pop(keys[-1], None)
96
97 def copy(self, path: Optional[str] = None) -> Any:
98 """
99 Get a copy of the value from the dictionary or copy of whole dictionary.
100
101 :param path: The path, e.g. foo.bar.baz, or None.
102
103 :returns: A copy of the value stored at the path, or None if it doesn't exist.
104 Note that this cannot distinguish between None values and
105 paths that don't exist. If the path is None, returns a copy of the
106 entire dictionary.
107 """
108 return deepcopy(self.get(path))
109
110 def __bool__(self) -> bool:
111 """
112 If this collection has at least one key-value pair, return True, else return False.
113 """
114 return bool(self._d)
115
116 def __contains__(self, path: str) -> bool:
117 value = self.get(path)
118 return value is not None and value is not False
119
120 def clear(self) -> None:
121 """
122 Remove all key-value pairs.
123 """
124 self._d.clear()
125
126 def assign(self, d: Dict[str, Any]) -> None:
127 """
128 Overwrites the old stored dictionary with a fresh new dictionary.
129
130 :param d: The new dictionary to store
131 """
132 self._d = d
133
134 def update(self, d: Dict[str, Any]) -> None:
135 """
136 Overwrite and/or add new key-value pairs to the collection.
137
138 :param d: The overriding dictionary. Can contain nested dictionaries.
139 """
140 for key, value in d.items():
141 if isinstance(value, dict):
142 self._update_recursive(value, key)
143 else:
144 self.set(key, value)
145
146 def get_resolved(self, variables: Dict[str, str]) -> Dict[str, Any]:
147 """
148 Resolve a DottedDict that may potentially contain template variables like $folder.
149
150 :param variables: The variables
151
152 :returns: A copy of the underlying dictionary, but with the variables replaced
153 """
154 return sublime.expand_variables(self._d, variables)
155
156 def _update_recursive(self, current: Dict[str, Any], prefix: str) -> None:
157 if not current:
158 return self.set(prefix, current)
159 for key, value in current.items():
160 path = "{}.{}".format(prefix, key)
161 if isinstance(value, dict):
162 self._update_recursive(value, path)
163 else:
164 self.set(path, value)
165
166 def __repr__(self) -> str:
167 return "{}({})".format(self.__class__.__name__, repr(self._d))
168
169 def __eq__(self, other: Any) -> bool:
170 if not isinstance(other, DottedDict):
171 return False
172 return self._d == other._d
173
[end of plugin/core/collections.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/plugin/core/collections.py b/plugin/core/collections.py
--- a/plugin/core/collections.py
+++ b/plugin/core/collections.py
@@ -135,13 +135,10 @@
"""
Overwrite and/or add new key-value pairs to the collection.
- :param d: The overriding dictionary. Can contain nested dictionaries.
+ :param d: The overriding dictionary. Keys must be in the new-style dotted format.
"""
for key, value in d.items():
- if isinstance(value, dict):
- self._update_recursive(value, key)
- else:
- self.set(key, value)
+ self.set(key, value)
def get_resolved(self, variables: Dict[str, str]) -> Dict[str, Any]:
"""
@@ -153,16 +150,6 @@
"""
return sublime.expand_variables(self._d, variables)
- def _update_recursive(self, current: Dict[str, Any], prefix: str) -> None:
- if not current:
- return self.set(prefix, current)
- for key, value in current.items():
- path = "{}.{}".format(prefix, key)
- if isinstance(value, dict):
- self._update_recursive(value, path)
- else:
- self.set(path, value)
-
def __repr__(self) -> str:
return "{}({})".format(self.__class__.__name__, repr(self._d))
| {"golden_diff": "diff --git a/plugin/core/collections.py b/plugin/core/collections.py\n--- a/plugin/core/collections.py\n+++ b/plugin/core/collections.py\n@@ -135,13 +135,10 @@\n \"\"\"\n Overwrite and/or add new key-value pairs to the collection.\n \n- :param d: The overriding dictionary. Can contain nested dictionaries.\n+ :param d: The overriding dictionary. Keys must be in the new-style dotted format.\n \"\"\"\n for key, value in d.items():\n- if isinstance(value, dict):\n- self._update_recursive(value, key)\n- else:\n- self.set(key, value)\n+ self.set(key, value)\n \n def get_resolved(self, variables: Dict[str, str]) -> Dict[str, Any]:\n \"\"\"\n@@ -153,16 +150,6 @@\n \"\"\"\n return sublime.expand_variables(self._d, variables)\n \n- def _update_recursive(self, current: Dict[str, Any], prefix: str) -> None:\n- if not current:\n- return self.set(prefix, current)\n- for key, value in current.items():\n- path = \"{}.{}\".format(prefix, key)\n- if isinstance(value, dict):\n- self._update_recursive(value, path)\n- else:\n- self.set(path, value)\n-\n def __repr__(self) -> str:\n return \"{}({})\".format(self.__class__.__name__, repr(self._d))\n", "issue": "Unwanted splitting on dot of the setting keys\n**Describe the bug**\r\n\r\nIn LSP-yaml, the user can specify schema mapping like so:\r\n\r\n```\r\nyaml.schemas: {\r\n \"https://json.schemastore.org/composer\": \"/*\"\r\n}\r\n```\r\n\r\nThis doesn't work in ST because LSP splits the URL key on every dot, creating an object that the server doesn't understand.\r\n\r\n**Expected behavior**\r\n\r\nShould send the value of the `yaml.schemas` object without altering it\r\n\r\n**Additional context**\r\n\r\nI'm not sure how VSCode manages to handle it but I would guess it is somewhat taking into consideration the schema itself. We could try to check that first.\n", "before_files": [{"content": "\"\"\"\nModule with additional collections.\n\"\"\"\nfrom .typing import Optional, Dict, Any, Generator\nfrom copy import deepcopy\nimport sublime\n\n\nclass DottedDict:\n\n __slots__ = ('_d',)\n\n def __init__(self, d: Optional[Dict[str, Any]] = None) -> None:\n \"\"\"\n Construct a DottedDict, optionally from an existing dictionary.\n\n :param d: An existing dictionary.\n \"\"\"\n self._d = {} # type: Dict[str, Any]\n if d is not None:\n self.update(d)\n\n @classmethod\n def from_base_and_override(cls, base: \"DottedDict\", override: Optional[Dict[str, Any]]) -> \"DottedDict\":\n result = DottedDict(base.copy())\n if override:\n result.update(override)\n return result\n\n def get(self, path: Optional[str] = None) -> Any:\n \"\"\"\n Get a value from the dictionary.\n\n :param path: The path, e.g. foo.bar.baz, or None.\n\n :returns: The value stored at the path, or None if it doesn't exist.\n Note that this cannot distinguish between None values and\n paths that don't exist. If the path is None, returns the\n entire dictionary.\n \"\"\"\n if path is None:\n return self._d\n current = self._d # type: Any\n keys = path.split('.')\n for key in keys:\n if isinstance(current, dict):\n current = current.get(key)\n else:\n return None\n return current\n\n def walk(self, path: str) -> Generator[Any, None, None]:\n current = self._d # type: Any\n keys = path.split('.')\n for key in keys:\n if isinstance(current, dict):\n current = current.get(key)\n yield current\n else:\n yield None\n return\n\n def set(self, path: str, value: Any) -> None:\n \"\"\"\n Set a value in the dictionary.\n\n :param path: The path, e.g. foo.bar.baz\n :param value: The value\n \"\"\"\n current = self._d\n keys = path.split('.')\n for i in range(0, len(keys) - 1):\n key = keys[i]\n next_current = current.get(key)\n if not isinstance(next_current, dict):\n next_current = {}\n current[key] = next_current\n current = next_current\n current[keys[-1]] = value\n\n def remove(self, path: str) -> None:\n \"\"\"\n Remove a key from the dictionary.\n\n :param path: The path, e.g. foo.bar.baz\n \"\"\"\n current = self._d\n keys = path.split('.')\n for i in range(0, len(keys) - 1):\n key = keys[i]\n next_current = current.get(key)\n if not isinstance(next_current, dict):\n return\n current = next_current\n current.pop(keys[-1], None)\n\n def copy(self, path: Optional[str] = None) -> Any:\n \"\"\"\n Get a copy of the value from the dictionary or copy of whole dictionary.\n\n :param path: The path, e.g. foo.bar.baz, or None.\n\n :returns: A copy of the value stored at the path, or None if it doesn't exist.\n Note that this cannot distinguish between None values and\n paths that don't exist. If the path is None, returns a copy of the\n entire dictionary.\n \"\"\"\n return deepcopy(self.get(path))\n\n def __bool__(self) -> bool:\n \"\"\"\n If this collection has at least one key-value pair, return True, else return False.\n \"\"\"\n return bool(self._d)\n\n def __contains__(self, path: str) -> bool:\n value = self.get(path)\n return value is not None and value is not False\n\n def clear(self) -> None:\n \"\"\"\n Remove all key-value pairs.\n \"\"\"\n self._d.clear()\n\n def assign(self, d: Dict[str, Any]) -> None:\n \"\"\"\n Overwrites the old stored dictionary with a fresh new dictionary.\n\n :param d: The new dictionary to store\n \"\"\"\n self._d = d\n\n def update(self, d: Dict[str, Any]) -> None:\n \"\"\"\n Overwrite and/or add new key-value pairs to the collection.\n\n :param d: The overriding dictionary. Can contain nested dictionaries.\n \"\"\"\n for key, value in d.items():\n if isinstance(value, dict):\n self._update_recursive(value, key)\n else:\n self.set(key, value)\n\n def get_resolved(self, variables: Dict[str, str]) -> Dict[str, Any]:\n \"\"\"\n Resolve a DottedDict that may potentially contain template variables like $folder.\n\n :param variables: The variables\n\n :returns: A copy of the underlying dictionary, but with the variables replaced\n \"\"\"\n return sublime.expand_variables(self._d, variables)\n\n def _update_recursive(self, current: Dict[str, Any], prefix: str) -> None:\n if not current:\n return self.set(prefix, current)\n for key, value in current.items():\n path = \"{}.{}\".format(prefix, key)\n if isinstance(value, dict):\n self._update_recursive(value, path)\n else:\n self.set(path, value)\n\n def __repr__(self) -> str:\n return \"{}({})\".format(self.__class__.__name__, repr(self._d))\n\n def __eq__(self, other: Any) -> bool:\n if not isinstance(other, DottedDict):\n return False\n return self._d == other._d\n", "path": "plugin/core/collections.py"}]} | 2,350 | 323 |
gh_patches_debug_6143 | rasdani/github-patches | git_diff | getsentry__sentry-python-2371 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
KeyError 'https,http' in async handling 1.31.0
### How do you use Sentry?
Self-hosted/on-premise
### Version
1.31.0
### Steps to Reproduce
I'm running sentry and uvicorn. We also have django-channels. On upgrading to 1.31.0 I'm getting the following:
```
Traceback (most recent call last):
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/uvicorn/protocols/http/httptools_impl.py", line 426, in run_asgi
result = await app( # type: ignore[func-returns-value]
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/uvicorn/middleware/proxy_headers.py", line 84, in __call__
return await self.app(scope, receive, send)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/channels/routing.py", line 62, in __call__
return await application(scope, receive, send)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/django/asgi.py", line 40, in sentry_patched_asgi_handler
return await middleware(scope, receive, send)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py", line 146, in _run_asgi3
return await self._run_app(scope, receive, send, asgi_version=3)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py", line 178, in _run_app
) = self._get_transaction_name_and_source(
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py", line 293, in _get_transaction_name_and_source
name = _get_url(asgi_scope, "http" if ty == "http" else "ws", host=None)
File "/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/_asgi_common.py", line 46, in _get_url
default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
KeyError: 'https,http'
```
I did not research further but it seems to be caused by the recent overhaul of the code (#2335)
### Expected Result
No exception
### Actual Result
.
</issue>
<code>
[start of sentry_sdk/integrations/_asgi_common.py]
1 import urllib
2
3 from sentry_sdk.hub import _should_send_default_pii
4 from sentry_sdk.integrations._wsgi_common import _filter_headers
5 from sentry_sdk._types import TYPE_CHECKING
6
7 if TYPE_CHECKING:
8 from typing import Any
9 from typing import Dict
10 from typing import Optional
11 from typing_extensions import Literal
12
13
14 def _get_headers(asgi_scope):
15 # type: (Any) -> Dict[str, str]
16 """
17 Extract headers from the ASGI scope, in the format that the Sentry protocol expects.
18 """
19 headers = {} # type: Dict[str, str]
20 for raw_key, raw_value in asgi_scope["headers"]:
21 key = raw_key.decode("latin-1")
22 value = raw_value.decode("latin-1")
23 if key in headers:
24 headers[key] = headers[key] + ", " + value
25 else:
26 headers[key] = value
27
28 return headers
29
30
31 def _get_url(asgi_scope, default_scheme, host):
32 # type: (Dict[str, Any], Literal["ws", "http"], Optional[str]) -> str
33 """
34 Extract URL from the ASGI scope, without also including the querystring.
35 """
36 scheme = asgi_scope.get("scheme", default_scheme)
37
38 server = asgi_scope.get("server", None)
39 path = asgi_scope.get("root_path", "") + asgi_scope.get("path", "")
40
41 if host:
42 return "%s://%s%s" % (scheme, host, path)
43
44 if server is not None:
45 host, port = server
46 default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
47 if port != default_port:
48 return "%s://%s:%s%s" % (scheme, host, port, path)
49 return "%s://%s%s" % (scheme, host, path)
50 return path
51
52
53 def _get_query(asgi_scope):
54 # type: (Any) -> Any
55 """
56 Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.
57 """
58 qs = asgi_scope.get("query_string")
59 if not qs:
60 return None
61 return urllib.parse.unquote(qs.decode("latin-1"))
62
63
64 def _get_ip(asgi_scope):
65 # type: (Any) -> str
66 """
67 Extract IP Address from the ASGI scope based on request headers with fallback to scope client.
68 """
69 headers = _get_headers(asgi_scope)
70 try:
71 return headers["x-forwarded-for"].split(",")[0].strip()
72 except (KeyError, IndexError):
73 pass
74
75 try:
76 return headers["x-real-ip"]
77 except KeyError:
78 pass
79
80 return asgi_scope.get("client")[0]
81
82
83 def _get_request_data(asgi_scope):
84 # type: (Any) -> Dict[str, Any]
85 """
86 Returns data related to the HTTP request from the ASGI scope.
87 """
88 request_data = {} # type: Dict[str, Any]
89 ty = asgi_scope["type"]
90 if ty in ("http", "websocket"):
91 request_data["method"] = asgi_scope.get("method")
92
93 request_data["headers"] = headers = _filter_headers(_get_headers(asgi_scope))
94 request_data["query_string"] = _get_query(asgi_scope)
95
96 request_data["url"] = _get_url(
97 asgi_scope, "http" if ty == "http" else "ws", headers.get("host")
98 )
99
100 client = asgi_scope.get("client")
101 if client and _should_send_default_pii():
102 request_data["env"] = {"REMOTE_ADDR": _get_ip(asgi_scope)}
103
104 return request_data
105
[end of sentry_sdk/integrations/_asgi_common.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py
--- a/sentry_sdk/integrations/_asgi_common.py
+++ b/sentry_sdk/integrations/_asgi_common.py
@@ -43,7 +43,7 @@
if server is not None:
host, port = server
- default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
+ default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}.get(scheme)
if port != default_port:
return "%s://%s:%s%s" % (scheme, host, port, path)
return "%s://%s%s" % (scheme, host, path)
| {"golden_diff": "diff --git a/sentry_sdk/integrations/_asgi_common.py b/sentry_sdk/integrations/_asgi_common.py\n--- a/sentry_sdk/integrations/_asgi_common.py\n+++ b/sentry_sdk/integrations/_asgi_common.py\n@@ -43,7 +43,7 @@\n \n if server is not None:\n host, port = server\n- default_port = {\"http\": 80, \"https\": 443, \"ws\": 80, \"wss\": 443}[scheme]\n+ default_port = {\"http\": 80, \"https\": 443, \"ws\": 80, \"wss\": 443}.get(scheme)\n if port != default_port:\n return \"%s://%s:%s%s\" % (scheme, host, port, path)\n return \"%s://%s%s\" % (scheme, host, path)\n", "issue": "KeyError 'https,http' in async handling 1.31.0\n### How do you use Sentry?\n\nSelf-hosted/on-premise\n\n### Version\n\n1.31.0\n\n### Steps to Reproduce\n\nI'm running sentry and uvicorn. We also have django-channels. On upgrading to 1.31.0 I'm getting the following:\r\n\r\n```\r\nTraceback (most recent call last):\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/uvicorn/protocols/http/httptools_impl.py\", line 426, in run_asgi\r\nresult = await app( # type: ignore[func-returns-value]\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/uvicorn/middleware/proxy_headers.py\", line 84, in __call__\r\nreturn await self.app(scope, receive, send)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/channels/routing.py\", line 62, in __call__\r\nreturn await application(scope, receive, send)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/django/asgi.py\", line 40, in sentry_patched_asgi_handler\r\nreturn await middleware(scope, receive, send)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py\", line 146, in _run_asgi3\r\nreturn await self._run_app(scope, receive, send, asgi_version=3)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py\", line 178, in _run_app\r\n) = self._get_transaction_name_and_source(\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/asgi.py\", line 293, in _get_transaction_name_and_source\r\nname = _get_url(asgi_scope, \"http\" if ty == \"http\" else \"ws\", host=None)\r\nFile \"/home/vcap/deps/0/python/lib/python3.9/site-packages/sentry_sdk/integrations/_asgi_common.py\", line 46, in _get_url\r\ndefault_port = {\"http\": 80, \"https\": 443, \"ws\": 80, \"wss\": 443}[scheme]\r\nKeyError: 'https,http'\r\n```\r\n\r\nI did not research further but it seems to be caused by the recent overhaul of the code (#2335)\n\n### Expected Result\n\nNo exception\n\n### Actual Result\n\n.\n", "before_files": [{"content": "import urllib\n\nfrom sentry_sdk.hub import _should_send_default_pii\nfrom sentry_sdk.integrations._wsgi_common import _filter_headers\nfrom sentry_sdk._types import TYPE_CHECKING\n\nif TYPE_CHECKING:\n from typing import Any\n from typing import Dict\n from typing import Optional\n from typing_extensions import Literal\n\n\ndef _get_headers(asgi_scope):\n # type: (Any) -> Dict[str, str]\n \"\"\"\n Extract headers from the ASGI scope, in the format that the Sentry protocol expects.\n \"\"\"\n headers = {} # type: Dict[str, str]\n for raw_key, raw_value in asgi_scope[\"headers\"]:\n key = raw_key.decode(\"latin-1\")\n value = raw_value.decode(\"latin-1\")\n if key in headers:\n headers[key] = headers[key] + \", \" + value\n else:\n headers[key] = value\n\n return headers\n\n\ndef _get_url(asgi_scope, default_scheme, host):\n # type: (Dict[str, Any], Literal[\"ws\", \"http\"], Optional[str]) -> str\n \"\"\"\n Extract URL from the ASGI scope, without also including the querystring.\n \"\"\"\n scheme = asgi_scope.get(\"scheme\", default_scheme)\n\n server = asgi_scope.get(\"server\", None)\n path = asgi_scope.get(\"root_path\", \"\") + asgi_scope.get(\"path\", \"\")\n\n if host:\n return \"%s://%s%s\" % (scheme, host, path)\n\n if server is not None:\n host, port = server\n default_port = {\"http\": 80, \"https\": 443, \"ws\": 80, \"wss\": 443}[scheme]\n if port != default_port:\n return \"%s://%s:%s%s\" % (scheme, host, port, path)\n return \"%s://%s%s\" % (scheme, host, path)\n return path\n\n\ndef _get_query(asgi_scope):\n # type: (Any) -> Any\n \"\"\"\n Extract querystring from the ASGI scope, in the format that the Sentry protocol expects.\n \"\"\"\n qs = asgi_scope.get(\"query_string\")\n if not qs:\n return None\n return urllib.parse.unquote(qs.decode(\"latin-1\"))\n\n\ndef _get_ip(asgi_scope):\n # type: (Any) -> str\n \"\"\"\n Extract IP Address from the ASGI scope based on request headers with fallback to scope client.\n \"\"\"\n headers = _get_headers(asgi_scope)\n try:\n return headers[\"x-forwarded-for\"].split(\",\")[0].strip()\n except (KeyError, IndexError):\n pass\n\n try:\n return headers[\"x-real-ip\"]\n except KeyError:\n pass\n\n return asgi_scope.get(\"client\")[0]\n\n\ndef _get_request_data(asgi_scope):\n # type: (Any) -> Dict[str, Any]\n \"\"\"\n Returns data related to the HTTP request from the ASGI scope.\n \"\"\"\n request_data = {} # type: Dict[str, Any]\n ty = asgi_scope[\"type\"]\n if ty in (\"http\", \"websocket\"):\n request_data[\"method\"] = asgi_scope.get(\"method\")\n\n request_data[\"headers\"] = headers = _filter_headers(_get_headers(asgi_scope))\n request_data[\"query_string\"] = _get_query(asgi_scope)\n\n request_data[\"url\"] = _get_url(\n asgi_scope, \"http\" if ty == \"http\" else \"ws\", headers.get(\"host\")\n )\n\n client = asgi_scope.get(\"client\")\n if client and _should_send_default_pii():\n request_data[\"env\"] = {\"REMOTE_ADDR\": _get_ip(asgi_scope)}\n\n return request_data\n", "path": "sentry_sdk/integrations/_asgi_common.py"}]} | 2,160 | 204 |
gh_patches_debug_19457 | rasdani/github-patches | git_diff | yt-project__yt-3955 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
BUG: yt crashes on startup if the config file is invalid toml
@cphyc originally noted this problem in #3831
```bash
echo "! this is invalid" > yt.toml
python -c "import yt"
```
fails with the backtrace
```backtrace
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/home/XXX/Documents/prog/yt/yt/__init__.py", line 13, in <module>
import yt.utilities.physical_constants as physical_constants
File "/home/XXX/Documents/prog/yt/yt/utilities/physical_constants.py", line 3, in <module>
from yt.units.yt_array import YTQuantity
File "/home/XXX/Documents/prog/yt/yt/units/yt_array.py", line 3, in <module>
from yt.funcs import array_like_field # NOQA: F401
File "/home/XXX/Documents/prog/yt/yt/funcs.py", line 33, in <module>
from yt.utilities.logger import ytLogger as mylog
File "/home/XXX/Documents/prog/yt/yt/utilities/logger.py", line 4, in <module>
from yt.config import ytcfg
File "/home/XXX/Documents/prog/yt/yt/config.py", line 215, in <module>
ytcfg.read(_local_config_file)
File "/home/XXX/Documents/prog/yt/yt/config.py", line 154, in read
data = tomllib.load(fh)
File "/home/XXX/anaconda3/envs/py39/lib/python3.9/site-packages/tomli/_parser.py", line 66, in load
return loads(s, parse_float=parse_float)
File "/home/XXX/anaconda3/envs/py39/lib/python3.9/site-packages/tomli/_parser.py", line 116, in loads
raise suffixed_err(src, pos, "Invalid statement")
tomli.TOMLDecodeError: Invalid statement (at line 1, column 1)
```
_Originally posted by @cphyc in https://github.com/yt-project/yt/pull/3831#pullrequestreview-898674860_
We could embed this step in a `try/except` block and raise a warning instead of an error (still keeping the critical information from the original error).
</issue>
<code>
[start of yt/utilities/configure.py]
1 import os
2 from typing import Callable, List
3
4 # TODO: import tomllib from the standard library instead in Python >= 3.11
5 import tomli as tomllib
6 import tomli_w
7 from more_itertools import always_iterable
8
9 from yt.utilities.configuration_tree import ConfigLeaf, ConfigNode
10
11 configuration_callbacks: List[Callable[["YTConfig"], None]] = []
12
13
14 def config_dir():
15 config_root = os.environ.get(
16 "XDG_CONFIG_HOME", os.path.join(os.path.expanduser("~"), ".config")
17 )
18 conf_dir = os.path.join(config_root, "yt")
19 return conf_dir
20
21
22 class YTConfig:
23 def __init__(self, defaults=None):
24 if defaults is None:
25 defaults = {}
26 self.config_root = ConfigNode(None)
27
28 def get(self, section, *keys, callback=None):
29 node_or_leaf = self.config_root.get(section, *keys)
30 if isinstance(node_or_leaf, ConfigLeaf):
31 if callback is not None:
32 return callback(node_or_leaf)
33 return node_or_leaf.value
34 return node_or_leaf
35
36 def get_most_specific(self, section, *keys, **kwargs):
37 use_fallback = "fallback" in kwargs
38 fallback = kwargs.pop("fallback", None)
39 try:
40 return self.config_root.get_deepest_leaf(section, *keys)
41 except KeyError as err:
42 if use_fallback:
43 return fallback
44 else:
45 raise err
46
47 def update(self, new_values, metadata=None):
48 if metadata is None:
49 metadata = {}
50 self.config_root.update(new_values, metadata)
51
52 def has_section(self, section):
53 try:
54 self.config_root.get_child(section)
55 return True
56 except KeyError:
57 return False
58
59 def add_section(self, section):
60 self.config_root.add_child(section)
61
62 def remove_section(self, section):
63 if self.has_section(section):
64 self.config_root.remove_child(section)
65 return True
66 else:
67 return False
68
69 def set(self, *args, metadata=None):
70 section, *keys, value = args
71 if metadata is None:
72 metadata = {"source": "runtime"}
73 self.config_root.upsert_from_list(
74 [section] + list(keys), value, extra_data=metadata
75 )
76
77 def remove(self, *args):
78 self.config_root.pop_leaf(args)
79
80 def read(self, file_names):
81 file_names_read = []
82 for fname in always_iterable(file_names):
83 if not os.path.exists(fname):
84 continue
85 metadata = {"source": f"file: {fname}"}
86 with open(fname, "rb") as fh:
87 data = tomllib.load(fh)
88 self.update(data, metadata=metadata)
89 file_names_read.append(fname)
90
91 return file_names_read
92
93 def write(self, file_handler):
94 value = self.config_root.as_dict()
95 config_as_str = tomli_w.dumps(value)
96
97 try:
98 # Assuming file_handler has a write attribute
99 file_handler.write(config_as_str)
100 except AttributeError:
101 # Otherwise we expect a path to a file
102 with open(file_handler, mode="w") as fh:
103 fh.write(config_as_str)
104
105 @staticmethod
106 def get_global_config_file():
107 return os.path.join(config_dir(), "yt.toml")
108
109 @staticmethod
110 def get_local_config_file():
111 return os.path.join(os.path.abspath(os.curdir), "yt.toml")
112
113 def __setitem__(self, args, value):
114 section, *keys = always_iterable(args)
115 self.set(section, *keys, value, metadata=None)
116
117 def __getitem__(self, key):
118 section, *keys = always_iterable(key)
119 return self.get(section, *keys)
120
121 def __contains__(self, item):
122 return item in self.config_root
123
124 # Add support for IPython rich display
125 # see https://ipython.readthedocs.io/en/stable/config/integrating.html
126 def _repr_json_(self):
127 return self.config_root._repr_json_()
128
129
130 CONFIG = YTConfig()
131
132
133 def _cast_bool_helper(value):
134 if value == "True":
135 return True
136 elif value == "False":
137 return False
138 else:
139 raise ValueError("Cannot safely cast to bool")
140
141
142 def _expand_all(s):
143 return os.path.expandvars(os.path.expanduser(s))
144
145
146 def _cast_value_helper(value, types=(_cast_bool_helper, int, float, _expand_all)):
147 for t in types:
148 try:
149 retval = t(value)
150 return retval
151 except ValueError:
152 pass
153
154
155 def get_config(section, option):
156 *option_path, option_name = option.split(".")
157 return CONFIG.get(section, *option_path, option_name)
158
159
160 def set_config(section, option, value, config_file):
161 if not CONFIG.has_section(section):
162 CONFIG.add_section(section)
163
164 option_path = option.split(".")
165 CONFIG.set(section, *option_path, _cast_value_helper(value))
166 write_config(config_file)
167
168
169 def write_config(config_file):
170 CONFIG.write(config_file)
171
172
173 def rm_config(section, option, config_file):
174 option_path = option.split(".")
175 CONFIG.remove(section, *option_path)
176 write_config(config_file)
177
[end of yt/utilities/configure.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/yt/utilities/configure.py b/yt/utilities/configure.py
--- a/yt/utilities/configure.py
+++ b/yt/utilities/configure.py
@@ -1,4 +1,5 @@
import os
+import warnings
from typing import Callable, List
# TODO: import tomllib from the standard library instead in Python >= 3.11
@@ -83,10 +84,16 @@
if not os.path.exists(fname):
continue
metadata = {"source": f"file: {fname}"}
- with open(fname, "rb") as fh:
- data = tomllib.load(fh)
- self.update(data, metadata=metadata)
- file_names_read.append(fname)
+ try:
+ with open(fname, "rb") as fh:
+ data = tomllib.load(fh)
+ except tomllib.TOMLDecodeError as exc:
+ warnings.warn(
+ f"Could not load configuration file {fname} (invalid TOML: {exc})"
+ )
+ else:
+ self.update(data, metadata=metadata)
+ file_names_read.append(fname)
return file_names_read
| {"golden_diff": "diff --git a/yt/utilities/configure.py b/yt/utilities/configure.py\n--- a/yt/utilities/configure.py\n+++ b/yt/utilities/configure.py\n@@ -1,4 +1,5 @@\n import os\n+import warnings\n from typing import Callable, List\n \n # TODO: import tomllib from the standard library instead in Python >= 3.11\n@@ -83,10 +84,16 @@\n if not os.path.exists(fname):\n continue\n metadata = {\"source\": f\"file: {fname}\"}\n- with open(fname, \"rb\") as fh:\n- data = tomllib.load(fh)\n- self.update(data, metadata=metadata)\n- file_names_read.append(fname)\n+ try:\n+ with open(fname, \"rb\") as fh:\n+ data = tomllib.load(fh)\n+ except tomllib.TOMLDecodeError as exc:\n+ warnings.warn(\n+ f\"Could not load configuration file {fname} (invalid TOML: {exc})\"\n+ )\n+ else:\n+ self.update(data, metadata=metadata)\n+ file_names_read.append(fname)\n \n return file_names_read\n", "issue": "BUG: yt crashes on startup if the config file is invalid toml\n@cphyc originally noted this problem in #3831\r\n```bash\r\necho \"! this is invalid\" > yt.toml \r\npython -c \"import yt\"\r\n```\r\nfails with the backtrace\r\n```backtrace\r\nTraceback (most recent call last):\r\n File \"<string>\", line 1, in <module>\r\n File \"/home/XXX/Documents/prog/yt/yt/__init__.py\", line 13, in <module>\r\n import yt.utilities.physical_constants as physical_constants\r\n File \"/home/XXX/Documents/prog/yt/yt/utilities/physical_constants.py\", line 3, in <module>\r\n from yt.units.yt_array import YTQuantity\r\n File \"/home/XXX/Documents/prog/yt/yt/units/yt_array.py\", line 3, in <module>\r\n from yt.funcs import array_like_field # NOQA: F401\r\n File \"/home/XXX/Documents/prog/yt/yt/funcs.py\", line 33, in <module>\r\n from yt.utilities.logger import ytLogger as mylog\r\n File \"/home/XXX/Documents/prog/yt/yt/utilities/logger.py\", line 4, in <module>\r\n from yt.config import ytcfg\r\n File \"/home/XXX/Documents/prog/yt/yt/config.py\", line 215, in <module>\r\n ytcfg.read(_local_config_file)\r\n File \"/home/XXX/Documents/prog/yt/yt/config.py\", line 154, in read\r\n data = tomllib.load(fh)\r\n File \"/home/XXX/anaconda3/envs/py39/lib/python3.9/site-packages/tomli/_parser.py\", line 66, in load\r\n return loads(s, parse_float=parse_float)\r\n File \"/home/XXX/anaconda3/envs/py39/lib/python3.9/site-packages/tomli/_parser.py\", line 116, in loads\r\n raise suffixed_err(src, pos, \"Invalid statement\")\r\ntomli.TOMLDecodeError: Invalid statement (at line 1, column 1)\r\n```\r\n\r\n_Originally posted by @cphyc in https://github.com/yt-project/yt/pull/3831#pullrequestreview-898674860_\r\n\r\nWe could embed this step in a `try/except` block and raise a warning instead of an error (still keeping the critical information from the original error).\n", "before_files": [{"content": "import os\nfrom typing import Callable, List\n\n# TODO: import tomllib from the standard library instead in Python >= 3.11\nimport tomli as tomllib\nimport tomli_w\nfrom more_itertools import always_iterable\n\nfrom yt.utilities.configuration_tree import ConfigLeaf, ConfigNode\n\nconfiguration_callbacks: List[Callable[[\"YTConfig\"], None]] = []\n\n\ndef config_dir():\n config_root = os.environ.get(\n \"XDG_CONFIG_HOME\", os.path.join(os.path.expanduser(\"~\"), \".config\")\n )\n conf_dir = os.path.join(config_root, \"yt\")\n return conf_dir\n\n\nclass YTConfig:\n def __init__(self, defaults=None):\n if defaults is None:\n defaults = {}\n self.config_root = ConfigNode(None)\n\n def get(self, section, *keys, callback=None):\n node_or_leaf = self.config_root.get(section, *keys)\n if isinstance(node_or_leaf, ConfigLeaf):\n if callback is not None:\n return callback(node_or_leaf)\n return node_or_leaf.value\n return node_or_leaf\n\n def get_most_specific(self, section, *keys, **kwargs):\n use_fallback = \"fallback\" in kwargs\n fallback = kwargs.pop(\"fallback\", None)\n try:\n return self.config_root.get_deepest_leaf(section, *keys)\n except KeyError as err:\n if use_fallback:\n return fallback\n else:\n raise err\n\n def update(self, new_values, metadata=None):\n if metadata is None:\n metadata = {}\n self.config_root.update(new_values, metadata)\n\n def has_section(self, section):\n try:\n self.config_root.get_child(section)\n return True\n except KeyError:\n return False\n\n def add_section(self, section):\n self.config_root.add_child(section)\n\n def remove_section(self, section):\n if self.has_section(section):\n self.config_root.remove_child(section)\n return True\n else:\n return False\n\n def set(self, *args, metadata=None):\n section, *keys, value = args\n if metadata is None:\n metadata = {\"source\": \"runtime\"}\n self.config_root.upsert_from_list(\n [section] + list(keys), value, extra_data=metadata\n )\n\n def remove(self, *args):\n self.config_root.pop_leaf(args)\n\n def read(self, file_names):\n file_names_read = []\n for fname in always_iterable(file_names):\n if not os.path.exists(fname):\n continue\n metadata = {\"source\": f\"file: {fname}\"}\n with open(fname, \"rb\") as fh:\n data = tomllib.load(fh)\n self.update(data, metadata=metadata)\n file_names_read.append(fname)\n\n return file_names_read\n\n def write(self, file_handler):\n value = self.config_root.as_dict()\n config_as_str = tomli_w.dumps(value)\n\n try:\n # Assuming file_handler has a write attribute\n file_handler.write(config_as_str)\n except AttributeError:\n # Otherwise we expect a path to a file\n with open(file_handler, mode=\"w\") as fh:\n fh.write(config_as_str)\n\n @staticmethod\n def get_global_config_file():\n return os.path.join(config_dir(), \"yt.toml\")\n\n @staticmethod\n def get_local_config_file():\n return os.path.join(os.path.abspath(os.curdir), \"yt.toml\")\n\n def __setitem__(self, args, value):\n section, *keys = always_iterable(args)\n self.set(section, *keys, value, metadata=None)\n\n def __getitem__(self, key):\n section, *keys = always_iterable(key)\n return self.get(section, *keys)\n\n def __contains__(self, item):\n return item in self.config_root\n\n # Add support for IPython rich display\n # see https://ipython.readthedocs.io/en/stable/config/integrating.html\n def _repr_json_(self):\n return self.config_root._repr_json_()\n\n\nCONFIG = YTConfig()\n\n\ndef _cast_bool_helper(value):\n if value == \"True\":\n return True\n elif value == \"False\":\n return False\n else:\n raise ValueError(\"Cannot safely cast to bool\")\n\n\ndef _expand_all(s):\n return os.path.expandvars(os.path.expanduser(s))\n\n\ndef _cast_value_helper(value, types=(_cast_bool_helper, int, float, _expand_all)):\n for t in types:\n try:\n retval = t(value)\n return retval\n except ValueError:\n pass\n\n\ndef get_config(section, option):\n *option_path, option_name = option.split(\".\")\n return CONFIG.get(section, *option_path, option_name)\n\n\ndef set_config(section, option, value, config_file):\n if not CONFIG.has_section(section):\n CONFIG.add_section(section)\n\n option_path = option.split(\".\")\n CONFIG.set(section, *option_path, _cast_value_helper(value))\n write_config(config_file)\n\n\ndef write_config(config_file):\n CONFIG.write(config_file)\n\n\ndef rm_config(section, option, config_file):\n option_path = option.split(\".\")\n CONFIG.remove(section, *option_path)\n write_config(config_file)\n", "path": "yt/utilities/configure.py"}]} | 2,637 | 257 |
gh_patches_debug_13968 | rasdani/github-patches | git_diff | Kinto__kinto-367 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
kinto init should install postgresql dependencies
</issue>
<code>
[start of kinto/__main__.py]
1 from __future__ import print_function
2 import argparse
3 import os
4 import sys
5 from six.moves import input
6 from cliquet.scripts import cliquet
7 from pyramid.scripts import pserve
8 from pyramid.paster import bootstrap
9
10 from kinto.config import init
11
12 CONFIG_FILE = 'config/kinto.ini'
13
14
15 def main(args=None):
16 """The main routine."""
17 if args is None:
18 args = sys.argv[1:]
19
20 parser = argparse.ArgumentParser(description="Kinto commands")
21 parser.add_argument('--ini',
22 help='Application configuration file',
23 dest='ini_file',
24 required=False,
25 default=CONFIG_FILE)
26 parser.add_argument('--backend',
27 help='Specify backend',
28 dest='backend',
29 required=False,
30 default=None)
31
32 subparsers = parser.add_subparsers(title='subcommands',
33 description='valid subcommands',
34 help='init/start/migrate')
35
36 parser_init = subparsers.add_parser('init')
37 parser_init.set_defaults(which='init')
38
39 parser_migrate = subparsers.add_parser('migrate')
40 parser_migrate.set_defaults(which='migrate')
41
42 parser_start = subparsers.add_parser('start')
43 parser_start.set_defaults(which='start')
44
45 args = vars(parser.parse_args())
46 config_file = args['ini_file']
47
48 if args['which'] == 'init':
49 if os.path.exists(config_file):
50 print("%s already exist." % config_file, file=sys.stderr)
51 sys.exit(1)
52
53 backend = args['backend']
54 if not backend:
55 while True:
56 prompt = ("Which backend to use? "
57 "(1 - postgresql, 2 - redis, default - memory) ")
58 answer = input(prompt).strip()
59 try:
60 backends = {"1": "postgresql", "2": "redis", "": "memory"}
61 backend = backends[answer]
62 break
63 except KeyError:
64 pass
65
66 init(config_file, backend)
67
68 elif args['which'] == 'migrate':
69 env = bootstrap(config_file)
70 cliquet.init_schema(env)
71
72 elif args['which'] == 'start':
73 pserve_argv = ['pserve', config_file, '--reload']
74 pserve.main(pserve_argv)
75
76
77 if __name__ == "__main__":
78 main()
79
[end of kinto/__main__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/kinto/__main__.py b/kinto/__main__.py
--- a/kinto/__main__.py
+++ b/kinto/__main__.py
@@ -2,6 +2,8 @@
import argparse
import os
import sys
+
+import pip
from six.moves import input
from cliquet.scripts import cliquet
from pyramid.scripts import pserve
@@ -65,6 +67,13 @@
init(config_file, backend)
+ # Install postgresql libraries if necessary
+ if backend == "postgresql":
+ try:
+ import psycopg2 # NOQA
+ except ImportError:
+ pip.main(['install', "cliquet[postgresql]"])
+
elif args['which'] == 'migrate':
env = bootstrap(config_file)
cliquet.init_schema(env)
| {"golden_diff": "diff --git a/kinto/__main__.py b/kinto/__main__.py\n--- a/kinto/__main__.py\n+++ b/kinto/__main__.py\n@@ -2,6 +2,8 @@\n import argparse\n import os\n import sys\n+\n+import pip\n from six.moves import input\n from cliquet.scripts import cliquet\n from pyramid.scripts import pserve\n@@ -65,6 +67,13 @@\n \n init(config_file, backend)\n \n+ # Install postgresql libraries if necessary\n+ if backend == \"postgresql\":\n+ try:\n+ import psycopg2 # NOQA\n+ except ImportError:\n+ pip.main(['install', \"cliquet[postgresql]\"])\n+\n elif args['which'] == 'migrate':\n env = bootstrap(config_file)\n cliquet.init_schema(env)\n", "issue": "kinto init should install postgresql dependencies\n\n", "before_files": [{"content": "from __future__ import print_function\nimport argparse\nimport os\nimport sys\nfrom six.moves import input\nfrom cliquet.scripts import cliquet\nfrom pyramid.scripts import pserve\nfrom pyramid.paster import bootstrap\n\nfrom kinto.config import init\n\nCONFIG_FILE = 'config/kinto.ini'\n\n\ndef main(args=None):\n \"\"\"The main routine.\"\"\"\n if args is None:\n args = sys.argv[1:]\n\n parser = argparse.ArgumentParser(description=\"Kinto commands\")\n parser.add_argument('--ini',\n help='Application configuration file',\n dest='ini_file',\n required=False,\n default=CONFIG_FILE)\n parser.add_argument('--backend',\n help='Specify backend',\n dest='backend',\n required=False,\n default=None)\n\n subparsers = parser.add_subparsers(title='subcommands',\n description='valid subcommands',\n help='init/start/migrate')\n\n parser_init = subparsers.add_parser('init')\n parser_init.set_defaults(which='init')\n\n parser_migrate = subparsers.add_parser('migrate')\n parser_migrate.set_defaults(which='migrate')\n\n parser_start = subparsers.add_parser('start')\n parser_start.set_defaults(which='start')\n\n args = vars(parser.parse_args())\n config_file = args['ini_file']\n\n if args['which'] == 'init':\n if os.path.exists(config_file):\n print(\"%s already exist.\" % config_file, file=sys.stderr)\n sys.exit(1)\n\n backend = args['backend']\n if not backend:\n while True:\n prompt = (\"Which backend to use? \"\n \"(1 - postgresql, 2 - redis, default - memory) \")\n answer = input(prompt).strip()\n try:\n backends = {\"1\": \"postgresql\", \"2\": \"redis\", \"\": \"memory\"}\n backend = backends[answer]\n break\n except KeyError:\n pass\n\n init(config_file, backend)\n\n elif args['which'] == 'migrate':\n env = bootstrap(config_file)\n cliquet.init_schema(env)\n\n elif args['which'] == 'start':\n pserve_argv = ['pserve', config_file, '--reload']\n pserve.main(pserve_argv)\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "kinto/__main__.py"}]} | 1,184 | 183 |
gh_patches_debug_33610 | rasdani/github-patches | git_diff | litestar-org__litestar-183 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
`from starlite import *` broken if `testing` extra not installed
This is only an issue on main, not in any release.
When I want to try a library out, I'll install it into a fresh env, run python repl and do `from lib import *` and have a play around. If just doing that raised an error it would freak me out a little about the lib.
Possible solution:
- remove `.testing` imports from `starlite.__all__`
- add deprecation warning for top-level `.testing` imports
- remove `if TYPE_CHECKING` too? May as well if we are doing the above, I think?
Refs: #174 #130
</issue>
<code>
[start of starlite/__init__.py]
1 from typing import TYPE_CHECKING, Any
2
3 from starlite.datastructures import File, Redirect, State, Stream, Template
4
5 from .app import Starlite
6 from .config import (
7 CacheConfig,
8 CORSConfig,
9 OpenAPIConfig,
10 StaticFilesConfig,
11 TemplateConfig,
12 )
13 from .connection import Request, WebSocket
14 from .controller import Controller
15 from .dto import DTOFactory
16 from .enums import (
17 HttpMethod,
18 MediaType,
19 OpenAPIMediaType,
20 RequestEncodingType,
21 ScopeType,
22 )
23 from .exceptions import (
24 HTTPException,
25 ImproperlyConfiguredException,
26 InternalServerException,
27 MissingDependencyException,
28 NotAuthorizedException,
29 NotFoundException,
30 PermissionDeniedException,
31 ServiceUnavailableException,
32 StarLiteException,
33 ValidationException,
34 )
35 from .handlers import (
36 ASGIRouteHandler,
37 BaseRouteHandler,
38 HTTPRouteHandler,
39 WebsocketRouteHandler,
40 asgi,
41 delete,
42 get,
43 patch,
44 post,
45 put,
46 route,
47 websocket,
48 )
49 from .logging import LoggingConfig, QueueListenerHandler
50 from .middleware import AbstractAuthenticationMiddleware, AuthenticationResult
51 from .openapi.controller import OpenAPIController
52 from .params import Body, Dependency, Parameter
53 from .plugins import PluginProtocol
54 from .provide import Provide
55 from .response import Response
56 from .router import Router
57 from .routes import BaseRoute, HTTPRoute, WebSocketRoute
58 from .types import MiddlewareProtocol, Partial, ResponseHeader
59
60 if TYPE_CHECKING:
61 from .testing import TestClient, create_test_client, create_test_request
62
63
64 __all__ = [
65 "ASGIRouteHandler",
66 "AbstractAuthenticationMiddleware",
67 "AuthenticationResult",
68 "BaseRoute",
69 "BaseRouteHandler",
70 "Body",
71 "CORSConfig",
72 "CacheConfig",
73 "Controller",
74 "Dependency",
75 "DTOFactory",
76 "File",
77 "HTTPException",
78 "HTTPRoute",
79 "HTTPRouteHandler",
80 "HttpMethod",
81 "ImproperlyConfiguredException",
82 "InternalServerException",
83 "LoggingConfig",
84 "MediaType",
85 "MiddlewareProtocol",
86 "MissingDependencyException",
87 "NotAuthorizedException",
88 "NotFoundException",
89 "OpenAPIConfig",
90 "OpenAPIController",
91 "OpenAPIMediaType",
92 "Parameter",
93 "Partial",
94 "PermissionDeniedException",
95 "PluginProtocol",
96 "Provide",
97 "QueueListenerHandler",
98 "Redirect",
99 "Request",
100 "RequestEncodingType",
101 "Response",
102 "ResponseHeader",
103 "Router",
104 "ScopeType",
105 "ServiceUnavailableException",
106 "StarLiteException",
107 "Starlite",
108 "State",
109 "StaticFilesConfig",
110 "Stream",
111 "Template",
112 "TemplateConfig",
113 "TestClient",
114 "ValidationException",
115 "WebSocket",
116 "WebSocketRoute",
117 "WebsocketRouteHandler",
118 "asgi",
119 "create_test_client",
120 "create_test_request",
121 "delete",
122 "get",
123 "patch",
124 "post",
125 "put",
126 "route",
127 "websocket",
128 ]
129
130 _dynamic_imports = {"TestClient", "create_test_client", "create_test_request"}
131
132
133 # pylint: disable=import-outside-toplevel
134 def __getattr__(name: str) -> Any:
135 """Provide lazy importing as per https://peps.python.org/pep-0562/"""
136 if name not in _dynamic_imports:
137 raise AttributeError(f"Module {__package__} has no attribute {name}")
138
139 from . import testing
140
141 attr = globals()[name] = getattr(testing, name)
142 return attr
143
[end of starlite/__init__.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/starlite/__init__.py b/starlite/__init__.py
--- a/starlite/__init__.py
+++ b/starlite/__init__.py
@@ -1,4 +1,4 @@
-from typing import TYPE_CHECKING, Any
+from typing import Any
from starlite.datastructures import File, Redirect, State, Stream, Template
@@ -57,10 +57,6 @@
from .routes import BaseRoute, HTTPRoute, WebSocketRoute
from .types import MiddlewareProtocol, Partial, ResponseHeader
-if TYPE_CHECKING:
- from .testing import TestClient, create_test_client, create_test_request
-
-
__all__ = [
"ASGIRouteHandler",
"AbstractAuthenticationMiddleware",
@@ -110,14 +106,11 @@
"Stream",
"Template",
"TemplateConfig",
- "TestClient",
"ValidationException",
"WebSocket",
"WebSocketRoute",
"WebsocketRouteHandler",
"asgi",
- "create_test_client",
- "create_test_request",
"delete",
"get",
"patch",
@@ -127,15 +120,24 @@
"websocket",
]
-_dynamic_imports = {"TestClient", "create_test_client", "create_test_request"}
+
+_deprecated_imports = {"TestClient", "create_test_client", "create_test_request"}
# pylint: disable=import-outside-toplevel
def __getattr__(name: str) -> Any:
"""Provide lazy importing as per https://peps.python.org/pep-0562/"""
- if name not in _dynamic_imports:
+ if name not in _deprecated_imports:
raise AttributeError(f"Module {__package__} has no attribute {name}")
+ import warnings
+
+ warnings.warn(
+ f"Importing {name} from {__package__} is deprecated, use `from startlite.testing import {name}` instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
from . import testing
attr = globals()[name] = getattr(testing, name)
| {"golden_diff": "diff --git a/starlite/__init__.py b/starlite/__init__.py\n--- a/starlite/__init__.py\n+++ b/starlite/__init__.py\n@@ -1,4 +1,4 @@\n-from typing import TYPE_CHECKING, Any\n+from typing import Any\n \n from starlite.datastructures import File, Redirect, State, Stream, Template\n \n@@ -57,10 +57,6 @@\n from .routes import BaseRoute, HTTPRoute, WebSocketRoute\n from .types import MiddlewareProtocol, Partial, ResponseHeader\n \n-if TYPE_CHECKING:\n- from .testing import TestClient, create_test_client, create_test_request\n-\n-\n __all__ = [\n \"ASGIRouteHandler\",\n \"AbstractAuthenticationMiddleware\",\n@@ -110,14 +106,11 @@\n \"Stream\",\n \"Template\",\n \"TemplateConfig\",\n- \"TestClient\",\n \"ValidationException\",\n \"WebSocket\",\n \"WebSocketRoute\",\n \"WebsocketRouteHandler\",\n \"asgi\",\n- \"create_test_client\",\n- \"create_test_request\",\n \"delete\",\n \"get\",\n \"patch\",\n@@ -127,15 +120,24 @@\n \"websocket\",\n ]\n \n-_dynamic_imports = {\"TestClient\", \"create_test_client\", \"create_test_request\"}\n+\n+_deprecated_imports = {\"TestClient\", \"create_test_client\", \"create_test_request\"}\n \n \n # pylint: disable=import-outside-toplevel\n def __getattr__(name: str) -> Any:\n \"\"\"Provide lazy importing as per https://peps.python.org/pep-0562/\"\"\"\n- if name not in _dynamic_imports:\n+ if name not in _deprecated_imports:\n raise AttributeError(f\"Module {__package__} has no attribute {name}\")\n \n+ import warnings\n+\n+ warnings.warn(\n+ f\"Importing {name} from {__package__} is deprecated, use `from startlite.testing import {name}` instead\",\n+ DeprecationWarning,\n+ stacklevel=2,\n+ )\n+\n from . import testing\n \n attr = globals()[name] = getattr(testing, name)\n", "issue": "`from starlite import *` broken if `testing` extra not installed\nThis is only an issue on main, not in any release.\r\n\r\nWhen I want to try a library out, I'll install it into a fresh env, run python repl and do `from lib import *` and have a play around. If just doing that raised an error it would freak me out a little about the lib.\r\n\r\nPossible solution:\r\n- remove `.testing` imports from `starlite.__all__`\r\n- add deprecation warning for top-level `.testing` imports\r\n- remove `if TYPE_CHECKING` too? May as well if we are doing the above, I think?\r\n\r\nRefs: #174 #130 \n", "before_files": [{"content": "from typing import TYPE_CHECKING, Any\n\nfrom starlite.datastructures import File, Redirect, State, Stream, Template\n\nfrom .app import Starlite\nfrom .config import (\n CacheConfig,\n CORSConfig,\n OpenAPIConfig,\n StaticFilesConfig,\n TemplateConfig,\n)\nfrom .connection import Request, WebSocket\nfrom .controller import Controller\nfrom .dto import DTOFactory\nfrom .enums import (\n HttpMethod,\n MediaType,\n OpenAPIMediaType,\n RequestEncodingType,\n ScopeType,\n)\nfrom .exceptions import (\n HTTPException,\n ImproperlyConfiguredException,\n InternalServerException,\n MissingDependencyException,\n NotAuthorizedException,\n NotFoundException,\n PermissionDeniedException,\n ServiceUnavailableException,\n StarLiteException,\n ValidationException,\n)\nfrom .handlers import (\n ASGIRouteHandler,\n BaseRouteHandler,\n HTTPRouteHandler,\n WebsocketRouteHandler,\n asgi,\n delete,\n get,\n patch,\n post,\n put,\n route,\n websocket,\n)\nfrom .logging import LoggingConfig, QueueListenerHandler\nfrom .middleware import AbstractAuthenticationMiddleware, AuthenticationResult\nfrom .openapi.controller import OpenAPIController\nfrom .params import Body, Dependency, Parameter\nfrom .plugins import PluginProtocol\nfrom .provide import Provide\nfrom .response import Response\nfrom .router import Router\nfrom .routes import BaseRoute, HTTPRoute, WebSocketRoute\nfrom .types import MiddlewareProtocol, Partial, ResponseHeader\n\nif TYPE_CHECKING:\n from .testing import TestClient, create_test_client, create_test_request\n\n\n__all__ = [\n \"ASGIRouteHandler\",\n \"AbstractAuthenticationMiddleware\",\n \"AuthenticationResult\",\n \"BaseRoute\",\n \"BaseRouteHandler\",\n \"Body\",\n \"CORSConfig\",\n \"CacheConfig\",\n \"Controller\",\n \"Dependency\",\n \"DTOFactory\",\n \"File\",\n \"HTTPException\",\n \"HTTPRoute\",\n \"HTTPRouteHandler\",\n \"HttpMethod\",\n \"ImproperlyConfiguredException\",\n \"InternalServerException\",\n \"LoggingConfig\",\n \"MediaType\",\n \"MiddlewareProtocol\",\n \"MissingDependencyException\",\n \"NotAuthorizedException\",\n \"NotFoundException\",\n \"OpenAPIConfig\",\n \"OpenAPIController\",\n \"OpenAPIMediaType\",\n \"Parameter\",\n \"Partial\",\n \"PermissionDeniedException\",\n \"PluginProtocol\",\n \"Provide\",\n \"QueueListenerHandler\",\n \"Redirect\",\n \"Request\",\n \"RequestEncodingType\",\n \"Response\",\n \"ResponseHeader\",\n \"Router\",\n \"ScopeType\",\n \"ServiceUnavailableException\",\n \"StarLiteException\",\n \"Starlite\",\n \"State\",\n \"StaticFilesConfig\",\n \"Stream\",\n \"Template\",\n \"TemplateConfig\",\n \"TestClient\",\n \"ValidationException\",\n \"WebSocket\",\n \"WebSocketRoute\",\n \"WebsocketRouteHandler\",\n \"asgi\",\n \"create_test_client\",\n \"create_test_request\",\n \"delete\",\n \"get\",\n \"patch\",\n \"post\",\n \"put\",\n \"route\",\n \"websocket\",\n]\n\n_dynamic_imports = {\"TestClient\", \"create_test_client\", \"create_test_request\"}\n\n\n# pylint: disable=import-outside-toplevel\ndef __getattr__(name: str) -> Any:\n \"\"\"Provide lazy importing as per https://peps.python.org/pep-0562/\"\"\"\n if name not in _dynamic_imports:\n raise AttributeError(f\"Module {__package__} has no attribute {name}\")\n\n from . import testing\n\n attr = globals()[name] = getattr(testing, name)\n return attr\n", "path": "starlite/__init__.py"}]} | 1,787 | 471 |
gh_patches_debug_29079 | rasdani/github-patches | git_diff | pulp__pulpcore-133 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Cleanup old publications - general cleanup
**Is your feature request related to a problem? Please describe.**
We do a lot of different new repos and publications - and also delete them after certain time.
Now i see, the publications resist.
Means, in normal handling we have hundreds of publications.
But, we don't know where they do belong to, becuase there is no name tag.
**Describe the solution you'd like**
May be there is a possibility to remove old publications with no belonging or link to a repository or distribution.
**Describe alternatives you've considered**
No idea, i just think about an clenaup job.
</issue>
<code>
[start of pulpcore/app/serializers/publication.py]
1 from gettext import gettext as _
2
3 from django.core import validators
4 from django.db.models import Q
5
6 from rest_framework import serializers
7 from rest_framework.validators import UniqueValidator
8
9 from pulpcore.app import models
10 from pulpcore.app.serializers import (
11 BaseURLField,
12 DetailIdentityField,
13 DetailRelatedField,
14 NestedRelatedField,
15 RelatedField,
16 MasterModelSerializer,
17 ModelSerializer,
18 validate_unknown_fields,
19 )
20
21
22 class PublicationSerializer(MasterModelSerializer):
23 _href = DetailIdentityField()
24 repository_version = NestedRelatedField(
25 view_name='versions-detail',
26 lookup_field='number',
27 parent_lookup_kwargs={'repository_pk': 'repository__pk'},
28 queryset=models.RepositoryVersion.objects.all(),
29 required=False,
30 )
31 repository = serializers.HyperlinkedRelatedField(
32 help_text=_('A URI of the repository to be published.'),
33 required=False,
34 label=_('Repository'),
35 queryset=models.Repository.objects.all(),
36 view_name='repositories-detail',
37 )
38
39 def validate(self, data):
40 if hasattr(self, 'initial_data'):
41 validate_unknown_fields(self.initial_data, self.fields)
42
43 repository = data.pop('repository', None) # not an actual field on publication
44 repository_version = data.get('repository_version')
45 if not repository and not repository_version:
46 raise serializers.ValidationError(
47 _("Either the 'repository' or 'repository_version' need to be specified"))
48 elif not repository and repository_version:
49 return data
50 elif repository and not repository_version:
51 version = models.RepositoryVersion.latest(repository)
52 if version:
53 new_data = {'repository_version': version}
54 new_data.update(data)
55 return new_data
56 else:
57 raise serializers.ValidationError(
58 detail=_('Repository has no version available to create Publication from'))
59 raise serializers.ValidationError(
60 _("Either the 'repository' or 'repository_version' need to be specified "
61 "but not both.")
62 )
63
64 class Meta:
65 abstract = True
66 model = models.Publication
67 fields = MasterModelSerializer.Meta.fields + (
68 'publisher',
69 'repository_version',
70 'repository'
71 )
72
73
74 class ContentGuardSerializer(MasterModelSerializer):
75 _href = DetailIdentityField()
76
77 name = serializers.CharField(
78 help_text=_('The unique name.')
79 )
80 description = serializers.CharField(
81 help_text=_('An optional description.'),
82 allow_blank=True,
83 required=False
84 )
85
86 class Meta:
87 model = models.ContentGuard
88 fields = MasterModelSerializer.Meta.fields + (
89 'name',
90 'description'
91 )
92
93
94 class BaseDistributionSerializer(MasterModelSerializer):
95 _href = DetailIdentityField()
96 base_path = serializers.CharField(
97 help_text=_('The base (relative) path component of the published url. Avoid paths that \
98 overlap with other distribution base paths (e.g. "foo" and "foo/bar")'),
99 validators=[validators.MaxLengthValidator(
100 models.BaseDistribution._meta.get_field('base_path').max_length,
101 message=_('`base_path` length must be less than {} characters').format(
102 models.BaseDistribution._meta.get_field('base_path').max_length
103 )),
104 UniqueValidator(queryset=models.BaseDistribution.objects.all()),
105 ]
106 )
107 base_url = BaseURLField(
108 source='base_path', read_only=True,
109 help_text=_('The URL for accessing the publication as defined by this distribution.')
110 )
111 content_guard = DetailRelatedField(
112 required=False,
113 help_text=_('An optional content-guard.'),
114 queryset=models.ContentGuard.objects.all(),
115 allow_null=True
116 )
117 name = serializers.CharField(
118 help_text=_('A unique name. Ex, `rawhide` and `stable`.'),
119 validators=[validators.MaxLengthValidator(
120 models.BaseDistribution._meta.get_field('name').max_length,
121 message=_('`name` length must be less than {} characters').format(
122 models.BaseDistribution._meta.get_field('name').max_length
123 )),
124 UniqueValidator(queryset=models.BaseDistribution.objects.all())]
125 )
126 remote = DetailRelatedField(
127 required=False,
128 help_text=_('Remote that can be used to fetch content when using pull-through caching.'),
129 queryset=models.Remote.objects.all(),
130 allow_null=True
131 )
132
133 class Meta:
134 abstract = True
135 model = models.BaseDistribution
136 fields = ModelSerializer.Meta.fields + (
137 'base_path',
138 'base_url',
139 'content_guard',
140 'name',
141 'remote',
142 )
143
144 def _validate_path_overlap(self, path):
145 # look for any base paths nested in path
146 search = path.split("/")[0]
147 q = Q(base_path=search)
148 for subdir in path.split("/")[1:]:
149 search = "/".join((search, subdir))
150 q |= Q(base_path=search)
151
152 # look for any base paths that nest path
153 q |= Q(base_path__startswith='{}/'.format(path))
154 qs = models.BaseDistribution.objects.filter(q)
155
156 if self.instance is not None:
157 qs = qs.exclude(pk=self.instance.pk)
158
159 match = qs.first()
160 if match:
161 raise serializers.ValidationError(detail=_("Overlaps with existing distribution '"
162 "{}'").format(match.name))
163
164 return path
165
166 def validate_base_path(self, path):
167 self._validate_relative_path(path)
168 return self._validate_path_overlap(path)
169
170
171 class PublicationDistributionSerializer(BaseDistributionSerializer):
172 publication = DetailRelatedField(
173 required=False,
174 help_text=_('Publication to be served'),
175 queryset=models.Publication.objects.exclude(complete=False),
176 allow_null=True
177 )
178
179 class Meta:
180 abstract = True
181 fields = BaseDistributionSerializer.Meta.fields + (
182 'publication',
183 )
184
185
186 class RepositoryVersionDistributionSerializer(BaseDistributionSerializer):
187 repository = RelatedField(
188 required=False,
189 help_text=_('The latest RepositoryVersion for this Repository will be served.'),
190 queryset=models.Repository.objects.all(),
191 view_name='repositories-detail',
192 allow_null=True
193 )
194 repository_version = NestedRelatedField(
195 required=False,
196 help_text=_('RepositoryVersion to be served'),
197 queryset=models.RepositoryVersion.objects.exclude(complete=False),
198 view_name='versions-detail',
199 allow_null=True,
200 lookup_field='number',
201 parent_lookup_kwargs={'repository_pk': 'repository__pk'},
202 )
203
204 class Meta:
205 abstract = True
206 fields = BaseDistributionSerializer.Meta.fields + (
207 'repository',
208 'repository_version',
209 )
210
211 def validate(self, data):
212 super().validate(data)
213
214 if 'repository' in data and 'repository_version' in data:
215 msg = _("The attributes 'repository' and 'repository_version' must be used"
216 "exclusively.")
217 raise serializers.ValidationError(msg)
218
219 return data
220
[end of pulpcore/app/serializers/publication.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/pulpcore/app/serializers/publication.py b/pulpcore/app/serializers/publication.py
--- a/pulpcore/app/serializers/publication.py
+++ b/pulpcore/app/serializers/publication.py
@@ -92,6 +92,22 @@
class BaseDistributionSerializer(MasterModelSerializer):
+ """
+ The Serializer for the BaseDistribution model.
+
+ The serializer deliberately omits the "remote" field, which is used for
+ pull-through caching only. Plugins implementing pull-through caching will
+ have to add the field in their derived serializer class like this::
+
+ remote = DetailRelatedField(
+ required=False,
+ help_text=_('Remote that can be used to fetch content when using pull-through caching.'),
+ queryset=models.Remote.objects.all(),
+ allow_null=True
+ )
+
+ """
+
_href = DetailIdentityField()
base_path = serializers.CharField(
help_text=_('The base (relative) path component of the published url. Avoid paths that \
@@ -123,12 +139,6 @@
)),
UniqueValidator(queryset=models.BaseDistribution.objects.all())]
)
- remote = DetailRelatedField(
- required=False,
- help_text=_('Remote that can be used to fetch content when using pull-through caching.'),
- queryset=models.Remote.objects.all(),
- allow_null=True
- )
class Meta:
abstract = True
@@ -138,7 +148,6 @@
'base_url',
'content_guard',
'name',
- 'remote',
)
def _validate_path_overlap(self, path):
| {"golden_diff": "diff --git a/pulpcore/app/serializers/publication.py b/pulpcore/app/serializers/publication.py\n--- a/pulpcore/app/serializers/publication.py\n+++ b/pulpcore/app/serializers/publication.py\n@@ -92,6 +92,22 @@\n \n \n class BaseDistributionSerializer(MasterModelSerializer):\n+ \"\"\"\n+ The Serializer for the BaseDistribution model.\n+\n+ The serializer deliberately omits the \"remote\" field, which is used for\n+ pull-through caching only. Plugins implementing pull-through caching will\n+ have to add the field in their derived serializer class like this::\n+\n+ remote = DetailRelatedField(\n+ required=False,\n+ help_text=_('Remote that can be used to fetch content when using pull-through caching.'),\n+ queryset=models.Remote.objects.all(),\n+ allow_null=True\n+ )\n+\n+ \"\"\"\n+\n _href = DetailIdentityField()\n base_path = serializers.CharField(\n help_text=_('The base (relative) path component of the published url. Avoid paths that \\\n@@ -123,12 +139,6 @@\n )),\n UniqueValidator(queryset=models.BaseDistribution.objects.all())]\n )\n- remote = DetailRelatedField(\n- required=False,\n- help_text=_('Remote that can be used to fetch content when using pull-through caching.'),\n- queryset=models.Remote.objects.all(),\n- allow_null=True\n- )\n \n class Meta:\n abstract = True\n@@ -138,7 +148,6 @@\n 'base_url',\n 'content_guard',\n 'name',\n- 'remote',\n )\n \n def _validate_path_overlap(self, path):\n", "issue": "Cleanup old publications - general cleanup\n**Is your feature request related to a problem? Please describe.**\r\nWe do a lot of different new repos and publications - and also delete them after certain time.\r\nNow i see, the publications resist.\r\nMeans, in normal handling we have hundreds of publications.\r\nBut, we don't know where they do belong to, becuase there is no name tag. \r\n\r\n**Describe the solution you'd like**\r\nMay be there is a possibility to remove old publications with no belonging or link to a repository or distribution. \r\n\r\n**Describe alternatives you've considered**\r\nNo idea, i just think about an clenaup job.\r\n\r\n\n", "before_files": [{"content": "from gettext import gettext as _\n\nfrom django.core import validators\nfrom django.db.models import Q\n\nfrom rest_framework import serializers\nfrom rest_framework.validators import UniqueValidator\n\nfrom pulpcore.app import models\nfrom pulpcore.app.serializers import (\n BaseURLField,\n DetailIdentityField,\n DetailRelatedField,\n NestedRelatedField,\n RelatedField,\n MasterModelSerializer,\n ModelSerializer,\n validate_unknown_fields,\n)\n\n\nclass PublicationSerializer(MasterModelSerializer):\n _href = DetailIdentityField()\n repository_version = NestedRelatedField(\n view_name='versions-detail',\n lookup_field='number',\n parent_lookup_kwargs={'repository_pk': 'repository__pk'},\n queryset=models.RepositoryVersion.objects.all(),\n required=False,\n )\n repository = serializers.HyperlinkedRelatedField(\n help_text=_('A URI of the repository to be published.'),\n required=False,\n label=_('Repository'),\n queryset=models.Repository.objects.all(),\n view_name='repositories-detail',\n )\n\n def validate(self, data):\n if hasattr(self, 'initial_data'):\n validate_unknown_fields(self.initial_data, self.fields)\n\n repository = data.pop('repository', None) # not an actual field on publication\n repository_version = data.get('repository_version')\n if not repository and not repository_version:\n raise serializers.ValidationError(\n _(\"Either the 'repository' or 'repository_version' need to be specified\"))\n elif not repository and repository_version:\n return data\n elif repository and not repository_version:\n version = models.RepositoryVersion.latest(repository)\n if version:\n new_data = {'repository_version': version}\n new_data.update(data)\n return new_data\n else:\n raise serializers.ValidationError(\n detail=_('Repository has no version available to create Publication from'))\n raise serializers.ValidationError(\n _(\"Either the 'repository' or 'repository_version' need to be specified \"\n \"but not both.\")\n )\n\n class Meta:\n abstract = True\n model = models.Publication\n fields = MasterModelSerializer.Meta.fields + (\n 'publisher',\n 'repository_version',\n 'repository'\n )\n\n\nclass ContentGuardSerializer(MasterModelSerializer):\n _href = DetailIdentityField()\n\n name = serializers.CharField(\n help_text=_('The unique name.')\n )\n description = serializers.CharField(\n help_text=_('An optional description.'),\n allow_blank=True,\n required=False\n )\n\n class Meta:\n model = models.ContentGuard\n fields = MasterModelSerializer.Meta.fields + (\n 'name',\n 'description'\n )\n\n\nclass BaseDistributionSerializer(MasterModelSerializer):\n _href = DetailIdentityField()\n base_path = serializers.CharField(\n help_text=_('The base (relative) path component of the published url. Avoid paths that \\\n overlap with other distribution base paths (e.g. \"foo\" and \"foo/bar\")'),\n validators=[validators.MaxLengthValidator(\n models.BaseDistribution._meta.get_field('base_path').max_length,\n message=_('`base_path` length must be less than {} characters').format(\n models.BaseDistribution._meta.get_field('base_path').max_length\n )),\n UniqueValidator(queryset=models.BaseDistribution.objects.all()),\n ]\n )\n base_url = BaseURLField(\n source='base_path', read_only=True,\n help_text=_('The URL for accessing the publication as defined by this distribution.')\n )\n content_guard = DetailRelatedField(\n required=False,\n help_text=_('An optional content-guard.'),\n queryset=models.ContentGuard.objects.all(),\n allow_null=True\n )\n name = serializers.CharField(\n help_text=_('A unique name. Ex, `rawhide` and `stable`.'),\n validators=[validators.MaxLengthValidator(\n models.BaseDistribution._meta.get_field('name').max_length,\n message=_('`name` length must be less than {} characters').format(\n models.BaseDistribution._meta.get_field('name').max_length\n )),\n UniqueValidator(queryset=models.BaseDistribution.objects.all())]\n )\n remote = DetailRelatedField(\n required=False,\n help_text=_('Remote that can be used to fetch content when using pull-through caching.'),\n queryset=models.Remote.objects.all(),\n allow_null=True\n )\n\n class Meta:\n abstract = True\n model = models.BaseDistribution\n fields = ModelSerializer.Meta.fields + (\n 'base_path',\n 'base_url',\n 'content_guard',\n 'name',\n 'remote',\n )\n\n def _validate_path_overlap(self, path):\n # look for any base paths nested in path\n search = path.split(\"/\")[0]\n q = Q(base_path=search)\n for subdir in path.split(\"/\")[1:]:\n search = \"/\".join((search, subdir))\n q |= Q(base_path=search)\n\n # look for any base paths that nest path\n q |= Q(base_path__startswith='{}/'.format(path))\n qs = models.BaseDistribution.objects.filter(q)\n\n if self.instance is not None:\n qs = qs.exclude(pk=self.instance.pk)\n\n match = qs.first()\n if match:\n raise serializers.ValidationError(detail=_(\"Overlaps with existing distribution '\"\n \"{}'\").format(match.name))\n\n return path\n\n def validate_base_path(self, path):\n self._validate_relative_path(path)\n return self._validate_path_overlap(path)\n\n\nclass PublicationDistributionSerializer(BaseDistributionSerializer):\n publication = DetailRelatedField(\n required=False,\n help_text=_('Publication to be served'),\n queryset=models.Publication.objects.exclude(complete=False),\n allow_null=True\n )\n\n class Meta:\n abstract = True\n fields = BaseDistributionSerializer.Meta.fields + (\n 'publication',\n )\n\n\nclass RepositoryVersionDistributionSerializer(BaseDistributionSerializer):\n repository = RelatedField(\n required=False,\n help_text=_('The latest RepositoryVersion for this Repository will be served.'),\n queryset=models.Repository.objects.all(),\n view_name='repositories-detail',\n allow_null=True\n )\n repository_version = NestedRelatedField(\n required=False,\n help_text=_('RepositoryVersion to be served'),\n queryset=models.RepositoryVersion.objects.exclude(complete=False),\n view_name='versions-detail',\n allow_null=True,\n lookup_field='number',\n parent_lookup_kwargs={'repository_pk': 'repository__pk'},\n )\n\n class Meta:\n abstract = True\n fields = BaseDistributionSerializer.Meta.fields + (\n 'repository',\n 'repository_version',\n )\n\n def validate(self, data):\n super().validate(data)\n\n if 'repository' in data and 'repository_version' in data:\n msg = _(\"The attributes 'repository' and 'repository_version' must be used\"\n \"exclusively.\")\n raise serializers.ValidationError(msg)\n\n return data\n", "path": "pulpcore/app/serializers/publication.py"}]} | 2,636 | 362 |
gh_patches_debug_38574 | rasdani/github-patches | git_diff | electricitymaps__electricitymaps-contrib-1305 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
CL-SIC parser returns wrong datetime
CL-SIC parser returns datetime of 0018 instead of 2018, as illustrated below:
`datetime: 0018-02-18 (06:43:00.000) CET`
`production: {"unknown":0,"coal":2206.6,"hydro":2416.39660,"wind":309.314,....}`
`updatedAt: 2018-02-20 (23:40:06.979) CET`
@systemcatch any idea?
</issue>
<code>
[start of parsers/lib/quality.py]
1 import datetime
2 import warnings
3
4 import arrow
5
6
7 class ValidationError(ValueError):
8 pass
9
10
11 def validate_consumption(obj, zone_key):
12 # Data quality check
13 if obj['consumption'] is not None and obj['consumption'] < 0:
14 raise ValidationError('%s: consumption has negative value '
15 '%s' % (zone_key, obj['consumption']))
16
17
18 def validate_exchange(item, k):
19 if item.get('sortedZoneKeys', None) != k:
20 raise ValidationError("Sorted country codes %s and %s don't "
21 "match" % (item.get('sortedZoneKeys', None), k))
22 if 'datetime' not in item:
23 raise ValidationError('datetime was not returned for %s' % k)
24 if type(item['datetime']) != datetime.datetime:
25 raise ValidationError('datetime %s is not valid for %s' %
26 (item['datetime'], k))
27 data_time = arrow.get(item['datetime'])
28 if data_time > arrow.now():
29 raise ValidationError("Data from %s can't be in the future, data was "
30 "%s, now is %s" % (k, data_time, arrow.now()))
31 if data_time.year < 2000:
32 raise ValidationError("Data from %s can't be before year 2000, it was "
33 "%s" % (k, data_time))
34
35
36 def validate_production(obj, zone_key):
37 if 'datetime' not in obj:
38 raise ValidationError(
39 'datetime was not returned for %s' % zone_key)
40 if 'countryCode' in obj:
41 warnings.warn('object has field `countryCode`. It should have '
42 '`zoneKey` instead. In {}'.format(obj))
43 if 'zoneKey' not in obj and 'countryCode' not in obj:
44 raise ValidationError('zoneKey was not returned for %s' % zone_key)
45 if not isinstance(obj['datetime'], datetime.datetime):
46 raise ValidationError('datetime %s is not valid for %s' %
47 (obj['datetime'], zone_key))
48 if (obj.get('zoneKey', None) or obj.get('countryCode', None)) != zone_key:
49 raise ValidationError("Zone keys %s and %s don't match in %s" %
50 (obj.get('zoneKey', None), zone_key, obj))
51 data_time = arrow.get(obj['datetime'])
52 arrow_now = arrow.utcnow()
53 if data_time > arrow_now:
54 raise ValidationError(
55 "Data from %s can't be in the future, data was %s, now is "
56 "%s" % (zone_key, data_time, arrow_now))
57
58 if ((obj.get('production', {}).get('unknown', None) is None and
59 obj.get('production', {}).get('coal', None) is None and
60 obj.get('production', {}).get('oil', None) is None and
61 obj.get('production', {}).get('gas', None) is None and zone_key
62 not in ['CH', 'NO', 'AUS-TAS', 'DK-BHM', 'US-NEISO'])):
63 raise ValidationError(
64 "Coal or oil or unknown production value is required for"
65 " %s" % zone_key)
66 for k, v in obj['production'].items():
67 if v is None:
68 continue
69 if v < 0:
70 raise ValidationError('%s: key %s has negative value %s' %
71 (zone_key, k, v))
72
[end of parsers/lib/quality.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/parsers/lib/quality.py b/parsers/lib/quality.py
--- a/parsers/lib/quality.py
+++ b/parsers/lib/quality.py
@@ -8,11 +8,26 @@
pass
+def validate_reasonable_time(item, k):
+ data_time = arrow.get(item['datetime'])
+ if data_time.year < 2000:
+ raise ValidationError("Data from %s can't be before year 2000, it was "
+ "%s" % (k, data_time))
+
+ arrow_now = arrow.utcnow()
+ if data_time > arrow_now:
+ raise ValidationError(
+ "Data from %s can't be in the future, data was %s, now is "
+ "%s" % (k, data_time, arrow_now))
+
+
+
def validate_consumption(obj, zone_key):
# Data quality check
if obj['consumption'] is not None and obj['consumption'] < 0:
raise ValidationError('%s: consumption has negative value '
'%s' % (zone_key, obj['consumption']))
+ validate_reasonable_time(obj, zone_key)
def validate_exchange(item, k):
@@ -24,13 +39,7 @@
if type(item['datetime']) != datetime.datetime:
raise ValidationError('datetime %s is not valid for %s' %
(item['datetime'], k))
- data_time = arrow.get(item['datetime'])
- if data_time > arrow.now():
- raise ValidationError("Data from %s can't be in the future, data was "
- "%s, now is %s" % (k, data_time, arrow.now()))
- if data_time.year < 2000:
- raise ValidationError("Data from %s can't be before year 2000, it was "
- "%s" % (k, data_time))
+ validate_reasonable_time(item, k)
def validate_production(obj, zone_key):
@@ -48,12 +57,6 @@
if (obj.get('zoneKey', None) or obj.get('countryCode', None)) != zone_key:
raise ValidationError("Zone keys %s and %s don't match in %s" %
(obj.get('zoneKey', None), zone_key, obj))
- data_time = arrow.get(obj['datetime'])
- arrow_now = arrow.utcnow()
- if data_time > arrow_now:
- raise ValidationError(
- "Data from %s can't be in the future, data was %s, now is "
- "%s" % (zone_key, data_time, arrow_now))
if ((obj.get('production', {}).get('unknown', None) is None and
obj.get('production', {}).get('coal', None) is None and
@@ -69,3 +72,4 @@
if v < 0:
raise ValidationError('%s: key %s has negative value %s' %
(zone_key, k, v))
+ validate_reasonable_time(obj, zone_key)
| {"golden_diff": "diff --git a/parsers/lib/quality.py b/parsers/lib/quality.py\n--- a/parsers/lib/quality.py\n+++ b/parsers/lib/quality.py\n@@ -8,11 +8,26 @@\n pass\n \n \n+def validate_reasonable_time(item, k):\n+ data_time = arrow.get(item['datetime'])\n+ if data_time.year < 2000:\n+ raise ValidationError(\"Data from %s can't be before year 2000, it was \"\n+ \"%s\" % (k, data_time))\n+\n+ arrow_now = arrow.utcnow()\n+ if data_time > arrow_now:\n+ raise ValidationError(\n+ \"Data from %s can't be in the future, data was %s, now is \"\n+ \"%s\" % (k, data_time, arrow_now))\n+\n+\n+\n def validate_consumption(obj, zone_key):\n # Data quality check\n if obj['consumption'] is not None and obj['consumption'] < 0:\n raise ValidationError('%s: consumption has negative value '\n '%s' % (zone_key, obj['consumption']))\n+ validate_reasonable_time(obj, zone_key)\n \n \n def validate_exchange(item, k):\n@@ -24,13 +39,7 @@\n if type(item['datetime']) != datetime.datetime:\n raise ValidationError('datetime %s is not valid for %s' %\n (item['datetime'], k))\n- data_time = arrow.get(item['datetime'])\n- if data_time > arrow.now():\n- raise ValidationError(\"Data from %s can't be in the future, data was \"\n- \"%s, now is %s\" % (k, data_time, arrow.now()))\n- if data_time.year < 2000:\n- raise ValidationError(\"Data from %s can't be before year 2000, it was \"\n- \"%s\" % (k, data_time))\n+ validate_reasonable_time(item, k)\n \n \n def validate_production(obj, zone_key):\n@@ -48,12 +57,6 @@\n if (obj.get('zoneKey', None) or obj.get('countryCode', None)) != zone_key:\n raise ValidationError(\"Zone keys %s and %s don't match in %s\" %\n (obj.get('zoneKey', None), zone_key, obj))\n- data_time = arrow.get(obj['datetime'])\n- arrow_now = arrow.utcnow()\n- if data_time > arrow_now:\n- raise ValidationError(\n- \"Data from %s can't be in the future, data was %s, now is \"\n- \"%s\" % (zone_key, data_time, arrow_now))\n \n if ((obj.get('production', {}).get('unknown', None) is None and\n obj.get('production', {}).get('coal', None) is None and\n@@ -69,3 +72,4 @@\n if v < 0:\n raise ValidationError('%s: key %s has negative value %s' %\n (zone_key, k, v))\n+ validate_reasonable_time(obj, zone_key)\n", "issue": "CL-SIC parser returns wrong datetime\nCL-SIC parser returns datetime of 0018 instead of 2018, as illustrated below:\r\n\r\n`datetime: 0018-02-18 (06:43:00.000) CET`\r\n`production: {\"unknown\":0,\"coal\":2206.6,\"hydro\":2416.39660,\"wind\":309.314,....}`\r\n`updatedAt: 2018-02-20 (23:40:06.979) CET`\r\n\r\n@systemcatch any idea?\r\n\n", "before_files": [{"content": "import datetime\nimport warnings\n\nimport arrow\n\n\nclass ValidationError(ValueError):\n pass\n\n\ndef validate_consumption(obj, zone_key):\n # Data quality check\n if obj['consumption'] is not None and obj['consumption'] < 0:\n raise ValidationError('%s: consumption has negative value '\n '%s' % (zone_key, obj['consumption']))\n\n\ndef validate_exchange(item, k):\n if item.get('sortedZoneKeys', None) != k:\n raise ValidationError(\"Sorted country codes %s and %s don't \"\n \"match\" % (item.get('sortedZoneKeys', None), k))\n if 'datetime' not in item:\n raise ValidationError('datetime was not returned for %s' % k)\n if type(item['datetime']) != datetime.datetime:\n raise ValidationError('datetime %s is not valid for %s' %\n (item['datetime'], k))\n data_time = arrow.get(item['datetime'])\n if data_time > arrow.now():\n raise ValidationError(\"Data from %s can't be in the future, data was \"\n \"%s, now is %s\" % (k, data_time, arrow.now()))\n if data_time.year < 2000:\n raise ValidationError(\"Data from %s can't be before year 2000, it was \"\n \"%s\" % (k, data_time))\n\n\ndef validate_production(obj, zone_key):\n if 'datetime' not in obj:\n raise ValidationError(\n 'datetime was not returned for %s' % zone_key)\n if 'countryCode' in obj:\n warnings.warn('object has field `countryCode`. It should have '\n '`zoneKey` instead. In {}'.format(obj))\n if 'zoneKey' not in obj and 'countryCode' not in obj:\n raise ValidationError('zoneKey was not returned for %s' % zone_key)\n if not isinstance(obj['datetime'], datetime.datetime):\n raise ValidationError('datetime %s is not valid for %s' %\n (obj['datetime'], zone_key))\n if (obj.get('zoneKey', None) or obj.get('countryCode', None)) != zone_key:\n raise ValidationError(\"Zone keys %s and %s don't match in %s\" %\n (obj.get('zoneKey', None), zone_key, obj))\n data_time = arrow.get(obj['datetime'])\n arrow_now = arrow.utcnow()\n if data_time > arrow_now:\n raise ValidationError(\n \"Data from %s can't be in the future, data was %s, now is \"\n \"%s\" % (zone_key, data_time, arrow_now))\n\n if ((obj.get('production', {}).get('unknown', None) is None and\n obj.get('production', {}).get('coal', None) is None and\n obj.get('production', {}).get('oil', None) is None and\n obj.get('production', {}).get('gas', None) is None and zone_key\n not in ['CH', 'NO', 'AUS-TAS', 'DK-BHM', 'US-NEISO'])):\n raise ValidationError(\n \"Coal or oil or unknown production value is required for\"\n \" %s\" % zone_key)\n for k, v in obj['production'].items():\n if v is None:\n continue\n if v < 0:\n raise ValidationError('%s: key %s has negative value %s' %\n (zone_key, k, v))\n", "path": "parsers/lib/quality.py"}]} | 1,553 | 670 |
gh_patches_debug_1852 | rasdani/github-patches | git_diff | jazzband__pip-tools-488 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Providing a source file which does not have an extension writes to a .txt file in current dir
If you happen to use an extensionless filename as a source of requirements in, pip-compile will deduce the wrong output filename and generate a ".txt" file (relative file, thus at current working dir).
##### Environment Versions
1. OS Type Should be cross platform, but observed on Windows
1. Python version: `$ python -V` 2.7.8
1. pip version: `$ pip --version` 9.0.1
1. pip-tools version: `$ pip-compile --version` 1.8.1rc2
##### Steps to replicate
1. echo "request" > no_extension
2. pip-compile no_extension
##### Expected result
We should error out because it is difficult to deduce a new name if there is no extension to remove.
##### Actual result
a .txt file is generated at the current directory.
</issue>
<code>
[start of piptools/scripts/compile.py]
1 # coding: utf-8
2 from __future__ import (absolute_import, division, print_function,
3 unicode_literals)
4
5 import optparse
6 import os
7 import sys
8 import tempfile
9
10 import pip
11 from pip.req import InstallRequirement, parse_requirements
12
13 from .. import click
14 from ..exceptions import PipToolsError
15 from ..logging import log
16 from ..repositories import LocalRequirementsRepository, PyPIRepository
17 from ..resolver import Resolver
18 from ..utils import (assert_compatible_pip_version, is_pinned_requirement,
19 key_from_req, dedup)
20 from ..writer import OutputWriter
21
22 # Make sure we're using a compatible version of pip
23 assert_compatible_pip_version()
24
25 DEFAULT_REQUIREMENTS_FILE = 'requirements.in'
26
27
28 class PipCommand(pip.basecommand.Command):
29 name = 'PipCommand'
30
31
32 @click.command()
33 @click.version_option()
34 @click.option('-v', '--verbose', is_flag=True, help="Show more output")
35 @click.option('-n', '--dry-run', is_flag=True, help="Only show what would happen, don't change anything")
36 @click.option('-p', '--pre', is_flag=True, default=None, help="Allow resolving to prereleases (default is not)")
37 @click.option('-r', '--rebuild', is_flag=True, help="Clear any caches upfront, rebuild from scratch")
38 @click.option('-f', '--find-links', multiple=True, help="Look for archives in this directory or on this HTML page", envvar='PIP_FIND_LINKS') # noqa
39 @click.option('-i', '--index-url', help="Change index URL (defaults to PyPI)", envvar='PIP_INDEX_URL')
40 @click.option('--extra-index-url', multiple=True, help="Add additional index URL to search", envvar='PIP_EXTRA_INDEX_URL') # noqa
41 @click.option('--client-cert', help="Path to SSL client certificate, a single file containing the private key and the certificate in PEM format.") # noqa
42 @click.option('--trusted-host', multiple=True, envvar='PIP_TRUSTED_HOST',
43 help="Mark this host as trusted, even though it does not have "
44 "valid or any HTTPS.")
45 @click.option('--header/--no-header', is_flag=True, default=True,
46 help="Add header to generated file")
47 @click.option('--index/--no-index', is_flag=True, default=True,
48 help="Add index URL to generated file")
49 @click.option('--emit-trusted-host/--no-emit-trusted-host', is_flag=True,
50 default=True, help="Add trusted host option to generated file")
51 @click.option('--annotate/--no-annotate', is_flag=True, default=True,
52 help="Annotate results, indicating where dependencies come from")
53 @click.option('-U', '--upgrade', is_flag=True, default=False,
54 help='Try to upgrade all dependencies to their latest versions')
55 @click.option('-P', '--upgrade-package', 'upgrade_packages', nargs=1, multiple=True,
56 help="Specify particular packages to upgrade.")
57 @click.option('-o', '--output-file', nargs=1, type=str, default=None,
58 help=('Output file name. Required if more than one input file is given. '
59 'Will be derived from input file otherwise.'))
60 @click.option('--allow-unsafe', is_flag=True, default=False,
61 help="Pin packages considered unsafe: pip, setuptools & distribute")
62 @click.option('--generate-hashes', is_flag=True, default=False,
63 help="Generate pip 8 style hashes in the resulting requirements file.")
64 @click.option('--max-rounds', default=10,
65 help="Maximum number of rounds before resolving the requirements aborts.")
66 @click.argument('src_files', nargs=-1, type=click.Path(exists=True, allow_dash=True))
67 def cli(verbose, dry_run, pre, rebuild, find_links, index_url, extra_index_url,
68 client_cert, trusted_host, header, index, emit_trusted_host, annotate,
69 upgrade, upgrade_packages, output_file, allow_unsafe, generate_hashes,
70 src_files, max_rounds):
71 """Compiles requirements.txt from requirements.in specs."""
72 log.verbose = verbose
73
74 if len(src_files) == 0:
75 if os.path.exists(DEFAULT_REQUIREMENTS_FILE):
76 src_files = (DEFAULT_REQUIREMENTS_FILE,)
77 elif os.path.exists('setup.py'):
78 src_files = ('setup.py',)
79 if not output_file:
80 output_file = 'requirements.txt'
81 else:
82 raise click.BadParameter(("If you do not specify an input file, "
83 "the default is {} or setup.py").format(DEFAULT_REQUIREMENTS_FILE))
84
85 if len(src_files) == 1 and src_files[0] == '-':
86 if not output_file:
87 raise click.BadParameter('--output-file is required if input is from stdin')
88
89 if len(src_files) > 1 and not output_file:
90 raise click.BadParameter('--output-file is required if two or more input files are given.')
91
92 if output_file:
93 dst_file = output_file
94 else:
95 base_name, _, _ = src_files[0].rpartition('.')
96 dst_file = base_name + '.txt'
97
98 if upgrade and upgrade_packages:
99 raise click.BadParameter('Only one of --upgrade or --upgrade-package can be provided as an argument.')
100
101 ###
102 # Setup
103 ###
104
105 pip_command = get_pip_command()
106
107 pip_args = []
108 if find_links:
109 for link in find_links:
110 pip_args.extend(['-f', link])
111 if index_url:
112 pip_args.extend(['-i', index_url])
113 if extra_index_url:
114 for extra_index in extra_index_url:
115 pip_args.extend(['--extra-index-url', extra_index])
116 if client_cert:
117 pip_args.extend(['--client-cert', client_cert])
118 if pre:
119 pip_args.extend(['--pre'])
120 if trusted_host:
121 for host in trusted_host:
122 pip_args.extend(['--trusted-host', host])
123
124 pip_options, _ = pip_command.parse_args(pip_args)
125
126 session = pip_command._build_session(pip_options)
127 repository = PyPIRepository(pip_options, session)
128
129 # Pre-parse the inline package upgrade specs: they should take precedence
130 # over the stuff in the requirements files
131 upgrade_packages = [InstallRequirement.from_line(pkg)
132 for pkg in upgrade_packages]
133
134 # Proxy with a LocalRequirementsRepository if --upgrade is not specified
135 # (= default invocation)
136 if not (upgrade or upgrade_packages) and os.path.exists(dst_file):
137 ireqs = parse_requirements(dst_file, finder=repository.finder, session=repository.session, options=pip_options)
138 existing_pins = {key_from_req(ireq.req): ireq for ireq in ireqs if is_pinned_requirement(ireq)}
139 repository = LocalRequirementsRepository(existing_pins, repository)
140
141 log.debug('Using indexes:')
142 # remove duplicate index urls before processing
143 repository.finder.index_urls = list(dedup(repository.finder.index_urls))
144 for index_url in repository.finder.index_urls:
145 log.debug(' {}'.format(index_url))
146
147 if repository.finder.find_links:
148 log.debug('')
149 log.debug('Configuration:')
150 for find_link in repository.finder.find_links:
151 log.debug(' -f {}'.format(find_link))
152
153 ###
154 # Parsing/collecting initial requirements
155 ###
156
157 constraints = []
158 for src_file in src_files:
159 is_setup_file = os.path.basename(src_file) == 'setup.py'
160 if is_setup_file or src_file == '-':
161 # pip requires filenames and not files. Since we want to support
162 # piping from stdin, we need to briefly save the input from stdin
163 # to a temporary file and have pip read that. also used for
164 # reading requirements from install_requires in setup.py.
165 tmpfile = tempfile.NamedTemporaryFile(mode='wt', delete=False)
166 if is_setup_file:
167 from distutils.core import run_setup
168 dist = run_setup(src_file)
169 tmpfile.write('\n'.join(dist.install_requires))
170 else:
171 tmpfile.write(sys.stdin.read())
172 tmpfile.flush()
173 constraints.extend(parse_requirements(
174 tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options))
175 else:
176 constraints.extend(parse_requirements(
177 src_file, finder=repository.finder, session=repository.session, options=pip_options))
178
179 # Check the given base set of constraints first
180 Resolver.check_constraints(constraints)
181
182 try:
183 resolver = Resolver(constraints, repository, prereleases=pre,
184 clear_caches=rebuild, allow_unsafe=allow_unsafe)
185 results = resolver.resolve(max_rounds=max_rounds)
186 if generate_hashes:
187 hashes = resolver.resolve_hashes(results)
188 else:
189 hashes = None
190 except PipToolsError as e:
191 log.error(str(e))
192 sys.exit(2)
193
194 log.debug('')
195
196 ##
197 # Output
198 ##
199
200 # Compute reverse dependency annotations statically, from the
201 # dependency cache that the resolver has populated by now.
202 #
203 # TODO (1a): reverse deps for any editable package are lost
204 # what SHOULD happen is that they are cached in memory, just
205 # not persisted to disk!
206 #
207 # TODO (1b): perhaps it's easiest if the dependency cache has an API
208 # that could take InstallRequirements directly, like:
209 #
210 # cache.set(ireq, ...)
211 #
212 # then, when ireq is editable, it would store in
213 #
214 # editables[egg_name][link_without_fragment] = deps
215 # editables['pip-tools']['git+...ols.git@future'] = {'click>=3.0', 'six'}
216 #
217 # otherwise:
218 #
219 # self[as_name_version_tuple(ireq)] = {'click>=3.0', 'six'}
220 #
221 reverse_dependencies = None
222 if annotate:
223 reverse_dependencies = resolver.reverse_dependencies(results)
224
225 writer = OutputWriter(src_files, dst_file, dry_run=dry_run,
226 emit_header=header, emit_index=index,
227 emit_trusted_host=emit_trusted_host,
228 annotate=annotate,
229 generate_hashes=generate_hashes,
230 default_index_url=repository.DEFAULT_INDEX_URL,
231 index_urls=repository.finder.index_urls,
232 trusted_hosts=pip_options.trusted_hosts,
233 format_control=repository.finder.format_control)
234 writer.write(results=results,
235 reverse_dependencies=reverse_dependencies,
236 primary_packages={key_from_req(ireq.req) for ireq in constraints if not ireq.constraint},
237 markers={key_from_req(ireq.req): ireq.markers
238 for ireq in constraints if ireq.markers},
239 hashes=hashes)
240
241 if dry_run:
242 log.warning('Dry-run, so nothing updated.')
243
244
245 def get_pip_command():
246 # Use pip's parser for pip.conf management and defaults.
247 # General options (find_links, index_url, extra_index_url, trusted_host,
248 # and pre) are defered to pip.
249 pip_command = PipCommand()
250 index_opts = pip.cmdoptions.make_option_group(
251 pip.cmdoptions.index_group,
252 pip_command.parser,
253 )
254 pip_command.parser.insert_option_group(0, index_opts)
255 pip_command.parser.add_option(optparse.Option('--pre', action='store_true', default=False))
256
257 return pip_command
258
[end of piptools/scripts/compile.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/piptools/scripts/compile.py b/piptools/scripts/compile.py
--- a/piptools/scripts/compile.py
+++ b/piptools/scripts/compile.py
@@ -92,7 +92,7 @@
if output_file:
dst_file = output_file
else:
- base_name, _, _ = src_files[0].rpartition('.')
+ base_name = src_files[0].rsplit('.', 1)[0]
dst_file = base_name + '.txt'
if upgrade and upgrade_packages:
| {"golden_diff": "diff --git a/piptools/scripts/compile.py b/piptools/scripts/compile.py\n--- a/piptools/scripts/compile.py\n+++ b/piptools/scripts/compile.py\n@@ -92,7 +92,7 @@\n if output_file:\n dst_file = output_file\n else:\n- base_name, _, _ = src_files[0].rpartition('.')\n+ base_name = src_files[0].rsplit('.', 1)[0]\n dst_file = base_name + '.txt'\n \n if upgrade and upgrade_packages:\n", "issue": "Providing a source file which does not have an extension writes to a .txt file in current dir\nIf you happen to use an extensionless filename as a source of requirements in, pip-compile will deduce the wrong output filename and generate a \".txt\" file (relative file, thus at current working dir).\r\n\r\n##### Environment Versions\r\n\r\n1. OS Type Should be cross platform, but observed on Windows\r\n1. Python version: `$ python -V` 2.7.8\r\n1. pip version: `$ pip --version` 9.0.1\r\n1. pip-tools version: `$ pip-compile --version` 1.8.1rc2\r\n\r\n##### Steps to replicate\r\n\r\n1. echo \"request\" > no_extension\r\n2. pip-compile no_extension\r\n\r\n##### Expected result\r\n\r\nWe should error out because it is difficult to deduce a new name if there is no extension to remove.\r\n\r\n##### Actual result\r\n\r\na .txt file is generated at the current directory.\r\n\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import (absolute_import, division, print_function,\n unicode_literals)\n\nimport optparse\nimport os\nimport sys\nimport tempfile\n\nimport pip\nfrom pip.req import InstallRequirement, parse_requirements\n\nfrom .. import click\nfrom ..exceptions import PipToolsError\nfrom ..logging import log\nfrom ..repositories import LocalRequirementsRepository, PyPIRepository\nfrom ..resolver import Resolver\nfrom ..utils import (assert_compatible_pip_version, is_pinned_requirement,\n key_from_req, dedup)\nfrom ..writer import OutputWriter\n\n# Make sure we're using a compatible version of pip\nassert_compatible_pip_version()\n\nDEFAULT_REQUIREMENTS_FILE = 'requirements.in'\n\n\nclass PipCommand(pip.basecommand.Command):\n name = 'PipCommand'\n\n\[email protected]()\[email protected]_option()\[email protected]('-v', '--verbose', is_flag=True, help=\"Show more output\")\[email protected]('-n', '--dry-run', is_flag=True, help=\"Only show what would happen, don't change anything\")\[email protected]('-p', '--pre', is_flag=True, default=None, help=\"Allow resolving to prereleases (default is not)\")\[email protected]('-r', '--rebuild', is_flag=True, help=\"Clear any caches upfront, rebuild from scratch\")\[email protected]('-f', '--find-links', multiple=True, help=\"Look for archives in this directory or on this HTML page\", envvar='PIP_FIND_LINKS') # noqa\[email protected]('-i', '--index-url', help=\"Change index URL (defaults to PyPI)\", envvar='PIP_INDEX_URL')\[email protected]('--extra-index-url', multiple=True, help=\"Add additional index URL to search\", envvar='PIP_EXTRA_INDEX_URL') # noqa\[email protected]('--client-cert', help=\"Path to SSL client certificate, a single file containing the private key and the certificate in PEM format.\") # noqa\[email protected]('--trusted-host', multiple=True, envvar='PIP_TRUSTED_HOST',\n help=\"Mark this host as trusted, even though it does not have \"\n \"valid or any HTTPS.\")\[email protected]('--header/--no-header', is_flag=True, default=True,\n help=\"Add header to generated file\")\[email protected]('--index/--no-index', is_flag=True, default=True,\n help=\"Add index URL to generated file\")\[email protected]('--emit-trusted-host/--no-emit-trusted-host', is_flag=True,\n default=True, help=\"Add trusted host option to generated file\")\[email protected]('--annotate/--no-annotate', is_flag=True, default=True,\n help=\"Annotate results, indicating where dependencies come from\")\[email protected]('-U', '--upgrade', is_flag=True, default=False,\n help='Try to upgrade all dependencies to their latest versions')\[email protected]('-P', '--upgrade-package', 'upgrade_packages', nargs=1, multiple=True,\n help=\"Specify particular packages to upgrade.\")\[email protected]('-o', '--output-file', nargs=1, type=str, default=None,\n help=('Output file name. Required if more than one input file is given. '\n 'Will be derived from input file otherwise.'))\[email protected]('--allow-unsafe', is_flag=True, default=False,\n help=\"Pin packages considered unsafe: pip, setuptools & distribute\")\[email protected]('--generate-hashes', is_flag=True, default=False,\n help=\"Generate pip 8 style hashes in the resulting requirements file.\")\[email protected]('--max-rounds', default=10,\n help=\"Maximum number of rounds before resolving the requirements aborts.\")\[email protected]('src_files', nargs=-1, type=click.Path(exists=True, allow_dash=True))\ndef cli(verbose, dry_run, pre, rebuild, find_links, index_url, extra_index_url,\n client_cert, trusted_host, header, index, emit_trusted_host, annotate,\n upgrade, upgrade_packages, output_file, allow_unsafe, generate_hashes,\n src_files, max_rounds):\n \"\"\"Compiles requirements.txt from requirements.in specs.\"\"\"\n log.verbose = verbose\n\n if len(src_files) == 0:\n if os.path.exists(DEFAULT_REQUIREMENTS_FILE):\n src_files = (DEFAULT_REQUIREMENTS_FILE,)\n elif os.path.exists('setup.py'):\n src_files = ('setup.py',)\n if not output_file:\n output_file = 'requirements.txt'\n else:\n raise click.BadParameter((\"If you do not specify an input file, \"\n \"the default is {} or setup.py\").format(DEFAULT_REQUIREMENTS_FILE))\n\n if len(src_files) == 1 and src_files[0] == '-':\n if not output_file:\n raise click.BadParameter('--output-file is required if input is from stdin')\n\n if len(src_files) > 1 and not output_file:\n raise click.BadParameter('--output-file is required if two or more input files are given.')\n\n if output_file:\n dst_file = output_file\n else:\n base_name, _, _ = src_files[0].rpartition('.')\n dst_file = base_name + '.txt'\n\n if upgrade and upgrade_packages:\n raise click.BadParameter('Only one of --upgrade or --upgrade-package can be provided as an argument.')\n\n ###\n # Setup\n ###\n\n pip_command = get_pip_command()\n\n pip_args = []\n if find_links:\n for link in find_links:\n pip_args.extend(['-f', link])\n if index_url:\n pip_args.extend(['-i', index_url])\n if extra_index_url:\n for extra_index in extra_index_url:\n pip_args.extend(['--extra-index-url', extra_index])\n if client_cert:\n pip_args.extend(['--client-cert', client_cert])\n if pre:\n pip_args.extend(['--pre'])\n if trusted_host:\n for host in trusted_host:\n pip_args.extend(['--trusted-host', host])\n\n pip_options, _ = pip_command.parse_args(pip_args)\n\n session = pip_command._build_session(pip_options)\n repository = PyPIRepository(pip_options, session)\n\n # Pre-parse the inline package upgrade specs: they should take precedence\n # over the stuff in the requirements files\n upgrade_packages = [InstallRequirement.from_line(pkg)\n for pkg in upgrade_packages]\n\n # Proxy with a LocalRequirementsRepository if --upgrade is not specified\n # (= default invocation)\n if not (upgrade or upgrade_packages) and os.path.exists(dst_file):\n ireqs = parse_requirements(dst_file, finder=repository.finder, session=repository.session, options=pip_options)\n existing_pins = {key_from_req(ireq.req): ireq for ireq in ireqs if is_pinned_requirement(ireq)}\n repository = LocalRequirementsRepository(existing_pins, repository)\n\n log.debug('Using indexes:')\n # remove duplicate index urls before processing\n repository.finder.index_urls = list(dedup(repository.finder.index_urls))\n for index_url in repository.finder.index_urls:\n log.debug(' {}'.format(index_url))\n\n if repository.finder.find_links:\n log.debug('')\n log.debug('Configuration:')\n for find_link in repository.finder.find_links:\n log.debug(' -f {}'.format(find_link))\n\n ###\n # Parsing/collecting initial requirements\n ###\n\n constraints = []\n for src_file in src_files:\n is_setup_file = os.path.basename(src_file) == 'setup.py'\n if is_setup_file or src_file == '-':\n # pip requires filenames and not files. Since we want to support\n # piping from stdin, we need to briefly save the input from stdin\n # to a temporary file and have pip read that. also used for\n # reading requirements from install_requires in setup.py.\n tmpfile = tempfile.NamedTemporaryFile(mode='wt', delete=False)\n if is_setup_file:\n from distutils.core import run_setup\n dist = run_setup(src_file)\n tmpfile.write('\\n'.join(dist.install_requires))\n else:\n tmpfile.write(sys.stdin.read())\n tmpfile.flush()\n constraints.extend(parse_requirements(\n tmpfile.name, finder=repository.finder, session=repository.session, options=pip_options))\n else:\n constraints.extend(parse_requirements(\n src_file, finder=repository.finder, session=repository.session, options=pip_options))\n\n # Check the given base set of constraints first\n Resolver.check_constraints(constraints)\n\n try:\n resolver = Resolver(constraints, repository, prereleases=pre,\n clear_caches=rebuild, allow_unsafe=allow_unsafe)\n results = resolver.resolve(max_rounds=max_rounds)\n if generate_hashes:\n hashes = resolver.resolve_hashes(results)\n else:\n hashes = None\n except PipToolsError as e:\n log.error(str(e))\n sys.exit(2)\n\n log.debug('')\n\n ##\n # Output\n ##\n\n # Compute reverse dependency annotations statically, from the\n # dependency cache that the resolver has populated by now.\n #\n # TODO (1a): reverse deps for any editable package are lost\n # what SHOULD happen is that they are cached in memory, just\n # not persisted to disk!\n #\n # TODO (1b): perhaps it's easiest if the dependency cache has an API\n # that could take InstallRequirements directly, like:\n #\n # cache.set(ireq, ...)\n #\n # then, when ireq is editable, it would store in\n #\n # editables[egg_name][link_without_fragment] = deps\n # editables['pip-tools']['git+...ols.git@future'] = {'click>=3.0', 'six'}\n #\n # otherwise:\n #\n # self[as_name_version_tuple(ireq)] = {'click>=3.0', 'six'}\n #\n reverse_dependencies = None\n if annotate:\n reverse_dependencies = resolver.reverse_dependencies(results)\n\n writer = OutputWriter(src_files, dst_file, dry_run=dry_run,\n emit_header=header, emit_index=index,\n emit_trusted_host=emit_trusted_host,\n annotate=annotate,\n generate_hashes=generate_hashes,\n default_index_url=repository.DEFAULT_INDEX_URL,\n index_urls=repository.finder.index_urls,\n trusted_hosts=pip_options.trusted_hosts,\n format_control=repository.finder.format_control)\n writer.write(results=results,\n reverse_dependencies=reverse_dependencies,\n primary_packages={key_from_req(ireq.req) for ireq in constraints if not ireq.constraint},\n markers={key_from_req(ireq.req): ireq.markers\n for ireq in constraints if ireq.markers},\n hashes=hashes)\n\n if dry_run:\n log.warning('Dry-run, so nothing updated.')\n\n\ndef get_pip_command():\n # Use pip's parser for pip.conf management and defaults.\n # General options (find_links, index_url, extra_index_url, trusted_host,\n # and pre) are defered to pip.\n pip_command = PipCommand()\n index_opts = pip.cmdoptions.make_option_group(\n pip.cmdoptions.index_group,\n pip_command.parser,\n )\n pip_command.parser.insert_option_group(0, index_opts)\n pip_command.parser.add_option(optparse.Option('--pre', action='store_true', default=False))\n\n return pip_command\n", "path": "piptools/scripts/compile.py"}]} | 3,836 | 119 |
gh_patches_debug_10762 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-contrib-363 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
opentelemetry-instrumentation-grpc fails if client channel is used as a context manager
**Describe your environment**
Verified on python 3.8.6 with opentelemetry 0.17b0
**Steps to reproduce**
Use a gRPC channel as a context manager.
For instance, this happens if you add instrumentation to the [gRPC helloworld example client](https://github.com/grpc/grpc/tree/026046d84301999936a759c1779669c04c6ffaaa/examples/python/helloworld):
```py
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter client."""
from __future__ import print_function
import logging
import grpc
from opentelemetry import trace
from opentelemetry.instrumentation.grpc import GrpcInstrumentorClient
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import (
ConsoleSpanExporter,
SimpleExportSpanProcessor,
)
import helloworld_pb2
import helloworld_pb2_grpc
trace.set_tracer_provider(TracerProvider())
trace.get_tracer_provider().add_span_processor(
SimpleExportSpanProcessor(ConsoleSpanExporter())
)
GrpcInstrumentorClient().instrument()
def run():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('localhost:50051') as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))
print("Greeter client received: " + response.message)
if __name__ == '__main__':
logging.basicConfig()
run()
```
**What is the expected behavior?**
The channel should be usable as normal, except now with instrumentation
**What is the actual behavior?**
```
❯ python greeter_client.py
Traceback (most recent call last):
File "greeter_client.py", line 52, in <module>
run()
File "greeter_client.py", line 44, in run
with grpc.insecure_channel('localhost:50051') as channel:
File "/home/isobel/workspace/tmp/otlp/grpc/lib/python3.8/site-packages/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py", line 235, in __enter__
raise NotImplementedError()
NotImplementedError
```
**Additional context**
LMK and I can make a PR
</issue>
<code>
[start of instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py]
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # pylint:disable=relative-beyond-top-level
16 # pylint:disable=arguments-differ
17 # pylint:disable=no-member
18 # pylint:disable=signature-differs
19
20 """Implementation of gRPC Python interceptors."""
21
22
23 import collections
24
25 import grpc
26
27 from opentelemetry.instrumentation.grpc import grpcext
28
29
30 class _UnaryClientInfo(
31 collections.namedtuple("_UnaryClientInfo", ("full_method", "timeout"))
32 ):
33 pass
34
35
36 class _StreamClientInfo(
37 collections.namedtuple(
38 "_StreamClientInfo",
39 ("full_method", "is_client_stream", "is_server_stream", "timeout"),
40 )
41 ):
42 pass
43
44
45 class _InterceptorUnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
46 def __init__(self, method, base_callable, interceptor):
47 self._method = method
48 self._base_callable = base_callable
49 self._interceptor = interceptor
50
51 def __call__(self, request, timeout=None, metadata=None, credentials=None):
52 def invoker(request, metadata):
53 return self._base_callable(request, timeout, metadata, credentials)
54
55 client_info = _UnaryClientInfo(self._method, timeout)
56 return self._interceptor.intercept_unary(
57 request, metadata, client_info, invoker
58 )
59
60 def with_call(
61 self, request, timeout=None, metadata=None, credentials=None
62 ):
63 def invoker(request, metadata):
64 return self._base_callable.with_call(
65 request, timeout, metadata, credentials
66 )
67
68 client_info = _UnaryClientInfo(self._method, timeout)
69 return self._interceptor.intercept_unary(
70 request, metadata, client_info, invoker
71 )
72
73 def future(self, request, timeout=None, metadata=None, credentials=None):
74 def invoker(request, metadata):
75 return self._base_callable.future(
76 request, timeout, metadata, credentials
77 )
78
79 client_info = _UnaryClientInfo(self._method, timeout)
80 return self._interceptor.intercept_unary(
81 request, metadata, client_info, invoker
82 )
83
84
85 class _InterceptorUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
86 def __init__(self, method, base_callable, interceptor):
87 self._method = method
88 self._base_callable = base_callable
89 self._interceptor = interceptor
90
91 def __call__(self, request, timeout=None, metadata=None, credentials=None):
92 def invoker(request, metadata):
93 return self._base_callable(request, timeout, metadata, credentials)
94
95 client_info = _StreamClientInfo(self._method, False, True, timeout)
96 return self._interceptor.intercept_stream(
97 request, metadata, client_info, invoker
98 )
99
100
101 class _InterceptorStreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
102 def __init__(self, method, base_callable, interceptor):
103 self._method = method
104 self._base_callable = base_callable
105 self._interceptor = interceptor
106
107 def __call__(
108 self, request_iterator, timeout=None, metadata=None, credentials=None
109 ):
110 def invoker(request_iterator, metadata):
111 return self._base_callable(
112 request_iterator, timeout, metadata, credentials
113 )
114
115 client_info = _StreamClientInfo(self._method, True, False, timeout)
116 return self._interceptor.intercept_stream(
117 request_iterator, metadata, client_info, invoker
118 )
119
120 def with_call(
121 self, request_iterator, timeout=None, metadata=None, credentials=None
122 ):
123 def invoker(request_iterator, metadata):
124 return self._base_callable.with_call(
125 request_iterator, timeout, metadata, credentials
126 )
127
128 client_info = _StreamClientInfo(self._method, True, False, timeout)
129 return self._interceptor.intercept_stream(
130 request_iterator, metadata, client_info, invoker
131 )
132
133 def future(
134 self, request_iterator, timeout=None, metadata=None, credentials=None
135 ):
136 def invoker(request_iterator, metadata):
137 return self._base_callable.future(
138 request_iterator, timeout, metadata, credentials
139 )
140
141 client_info = _StreamClientInfo(self._method, True, False, timeout)
142 return self._interceptor.intercept_stream(
143 request_iterator, metadata, client_info, invoker
144 )
145
146
147 class _InterceptorStreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
148 def __init__(self, method, base_callable, interceptor):
149 self._method = method
150 self._base_callable = base_callable
151 self._interceptor = interceptor
152
153 def __call__(
154 self, request_iterator, timeout=None, metadata=None, credentials=None
155 ):
156 def invoker(request_iterator, metadata):
157 return self._base_callable(
158 request_iterator, timeout, metadata, credentials
159 )
160
161 client_info = _StreamClientInfo(self._method, True, True, timeout)
162 return self._interceptor.intercept_stream(
163 request_iterator, metadata, client_info, invoker
164 )
165
166
167 class _InterceptorChannel(grpc.Channel):
168 def __init__(self, channel, interceptor):
169 self._channel = channel
170 self._interceptor = interceptor
171
172 def subscribe(self, *args, **kwargs):
173 self._channel.subscribe(*args, **kwargs)
174
175 def unsubscribe(self, *args, **kwargs):
176 self._channel.unsubscribe(*args, **kwargs)
177
178 def unary_unary(
179 self, method, request_serializer=None, response_deserializer=None
180 ):
181 base_callable = self._channel.unary_unary(
182 method, request_serializer, response_deserializer
183 )
184 if isinstance(self._interceptor, grpcext.UnaryClientInterceptor):
185 return _InterceptorUnaryUnaryMultiCallable(
186 method, base_callable, self._interceptor
187 )
188 return base_callable
189
190 def unary_stream(
191 self, method, request_serializer=None, response_deserializer=None
192 ):
193 base_callable = self._channel.unary_stream(
194 method, request_serializer, response_deserializer
195 )
196 if isinstance(self._interceptor, grpcext.StreamClientInterceptor):
197 return _InterceptorUnaryStreamMultiCallable(
198 method, base_callable, self._interceptor
199 )
200 return base_callable
201
202 def stream_unary(
203 self, method, request_serializer=None, response_deserializer=None
204 ):
205 base_callable = self._channel.stream_unary(
206 method, request_serializer, response_deserializer
207 )
208 if isinstance(self._interceptor, grpcext.StreamClientInterceptor):
209 return _InterceptorStreamUnaryMultiCallable(
210 method, base_callable, self._interceptor
211 )
212 return base_callable
213
214 def stream_stream(
215 self, method, request_serializer=None, response_deserializer=None
216 ):
217 base_callable = self._channel.stream_stream(
218 method, request_serializer, response_deserializer
219 )
220 if isinstance(self._interceptor, grpcext.StreamClientInterceptor):
221 return _InterceptorStreamStreamMultiCallable(
222 method, base_callable, self._interceptor
223 )
224 return base_callable
225
226 def close(self):
227 if not hasattr(self._channel, "close"):
228 raise RuntimeError(
229 "close() is not supported with the installed version of grpcio"
230 )
231 self._channel.close()
232
233 def __enter__(self):
234 """Enters the runtime context related to the channel object."""
235 raise NotImplementedError()
236
237 def __exit__(self, exc_type, exc_val, exc_tb):
238 """Exits the runtime context related to the channel object."""
239 raise NotImplementedError()
240
241
242 def intercept_channel(channel, *interceptors):
243 result = channel
244 for interceptor in interceptors:
245 if not isinstance(
246 interceptor, grpcext.UnaryClientInterceptor
247 ) and not isinstance(interceptor, grpcext.StreamClientInterceptor):
248 raise TypeError(
249 "interceptor must be either a "
250 "grpcext.UnaryClientInterceptor or a "
251 "grpcext.StreamClientInterceptor"
252 )
253 result = _InterceptorChannel(result, interceptor)
254 return result
255
[end of instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py b/instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py
--- a/instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py
+++ b/instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py
@@ -232,11 +232,11 @@
def __enter__(self):
"""Enters the runtime context related to the channel object."""
- raise NotImplementedError()
+ return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exits the runtime context related to the channel object."""
- raise NotImplementedError()
+ self.close()
def intercept_channel(channel, *interceptors):
| {"golden_diff": "diff --git a/instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py b/instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py\n--- a/instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py\n+++ b/instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py\n@@ -232,11 +232,11 @@\n \n def __enter__(self):\n \"\"\"Enters the runtime context related to the channel object.\"\"\"\n- raise NotImplementedError()\n+ return self\n \n def __exit__(self, exc_type, exc_val, exc_tb):\n \"\"\"Exits the runtime context related to the channel object.\"\"\"\n- raise NotImplementedError()\n+ self.close()\n \n \n def intercept_channel(channel, *interceptors):\n", "issue": "opentelemetry-instrumentation-grpc fails if client channel is used as a context manager\n**Describe your environment**\r\n\r\nVerified on python 3.8.6 with opentelemetry 0.17b0\r\n\r\n**Steps to reproduce**\r\n\r\nUse a gRPC channel as a context manager.\r\n\r\nFor instance, this happens if you add instrumentation to the [gRPC helloworld example client](https://github.com/grpc/grpc/tree/026046d84301999936a759c1779669c04c6ffaaa/examples/python/helloworld):\r\n\r\n```py\r\n# Copyright 2015 gRPC authors.\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\"\"\"The Python implementation of the GRPC helloworld.Greeter client.\"\"\"\r\n\r\nfrom __future__ import print_function\r\nimport logging\r\n\r\nimport grpc\r\n\r\nfrom opentelemetry import trace\r\nfrom opentelemetry.instrumentation.grpc import GrpcInstrumentorClient\r\nfrom opentelemetry.sdk.trace import TracerProvider\r\nfrom opentelemetry.sdk.trace.export import (\r\n ConsoleSpanExporter,\r\n SimpleExportSpanProcessor,\r\n)\r\n\r\nimport helloworld_pb2\r\nimport helloworld_pb2_grpc\r\n\r\ntrace.set_tracer_provider(TracerProvider())\r\ntrace.get_tracer_provider().add_span_processor(\r\n SimpleExportSpanProcessor(ConsoleSpanExporter())\r\n)\r\n\r\nGrpcInstrumentorClient().instrument()\r\n\r\n\r\ndef run():\r\n # NOTE(gRPC Python Team): .close() is possible on a channel and should be\r\n # used in circumstances in which the with statement does not fit the needs\r\n # of the code.\r\n with grpc.insecure_channel('localhost:50051') as channel:\r\n stub = helloworld_pb2_grpc.GreeterStub(channel)\r\n response = stub.SayHello(helloworld_pb2.HelloRequest(name='you'))\r\n print(\"Greeter client received: \" + response.message)\r\n\r\n\r\nif __name__ == '__main__':\r\n logging.basicConfig()\r\n run()\r\n```\r\n\r\n**What is the expected behavior?**\r\n\r\nThe channel should be usable as normal, except now with instrumentation\r\n\r\n**What is the actual behavior?**\r\n\r\n```\r\n\u276f python greeter_client.py\r\nTraceback (most recent call last):\r\n File \"greeter_client.py\", line 52, in <module>\r\n run()\r\n File \"greeter_client.py\", line 44, in run\r\n with grpc.insecure_channel('localhost:50051') as channel:\r\n File \"/home/isobel/workspace/tmp/otlp/grpc/lib/python3.8/site-packages/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py\", line 235, in __enter__\r\n raise NotImplementedError()\r\nNotImplementedError\r\n```\r\n\r\n**Additional context**\r\n\r\nLMK and I can make a PR\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# pylint:disable=relative-beyond-top-level\n# pylint:disable=arguments-differ\n# pylint:disable=no-member\n# pylint:disable=signature-differs\n\n\"\"\"Implementation of gRPC Python interceptors.\"\"\"\n\n\nimport collections\n\nimport grpc\n\nfrom opentelemetry.instrumentation.grpc import grpcext\n\n\nclass _UnaryClientInfo(\n collections.namedtuple(\"_UnaryClientInfo\", (\"full_method\", \"timeout\"))\n):\n pass\n\n\nclass _StreamClientInfo(\n collections.namedtuple(\n \"_StreamClientInfo\",\n (\"full_method\", \"is_client_stream\", \"is_server_stream\", \"timeout\"),\n )\n):\n pass\n\n\nclass _InterceptorUnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):\n def __init__(self, method, base_callable, interceptor):\n self._method = method\n self._base_callable = base_callable\n self._interceptor = interceptor\n\n def __call__(self, request, timeout=None, metadata=None, credentials=None):\n def invoker(request, metadata):\n return self._base_callable(request, timeout, metadata, credentials)\n\n client_info = _UnaryClientInfo(self._method, timeout)\n return self._interceptor.intercept_unary(\n request, metadata, client_info, invoker\n )\n\n def with_call(\n self, request, timeout=None, metadata=None, credentials=None\n ):\n def invoker(request, metadata):\n return self._base_callable.with_call(\n request, timeout, metadata, credentials\n )\n\n client_info = _UnaryClientInfo(self._method, timeout)\n return self._interceptor.intercept_unary(\n request, metadata, client_info, invoker\n )\n\n def future(self, request, timeout=None, metadata=None, credentials=None):\n def invoker(request, metadata):\n return self._base_callable.future(\n request, timeout, metadata, credentials\n )\n\n client_info = _UnaryClientInfo(self._method, timeout)\n return self._interceptor.intercept_unary(\n request, metadata, client_info, invoker\n )\n\n\nclass _InterceptorUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):\n def __init__(self, method, base_callable, interceptor):\n self._method = method\n self._base_callable = base_callable\n self._interceptor = interceptor\n\n def __call__(self, request, timeout=None, metadata=None, credentials=None):\n def invoker(request, metadata):\n return self._base_callable(request, timeout, metadata, credentials)\n\n client_info = _StreamClientInfo(self._method, False, True, timeout)\n return self._interceptor.intercept_stream(\n request, metadata, client_info, invoker\n )\n\n\nclass _InterceptorStreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):\n def __init__(self, method, base_callable, interceptor):\n self._method = method\n self._base_callable = base_callable\n self._interceptor = interceptor\n\n def __call__(\n self, request_iterator, timeout=None, metadata=None, credentials=None\n ):\n def invoker(request_iterator, metadata):\n return self._base_callable(\n request_iterator, timeout, metadata, credentials\n )\n\n client_info = _StreamClientInfo(self._method, True, False, timeout)\n return self._interceptor.intercept_stream(\n request_iterator, metadata, client_info, invoker\n )\n\n def with_call(\n self, request_iterator, timeout=None, metadata=None, credentials=None\n ):\n def invoker(request_iterator, metadata):\n return self._base_callable.with_call(\n request_iterator, timeout, metadata, credentials\n )\n\n client_info = _StreamClientInfo(self._method, True, False, timeout)\n return self._interceptor.intercept_stream(\n request_iterator, metadata, client_info, invoker\n )\n\n def future(\n self, request_iterator, timeout=None, metadata=None, credentials=None\n ):\n def invoker(request_iterator, metadata):\n return self._base_callable.future(\n request_iterator, timeout, metadata, credentials\n )\n\n client_info = _StreamClientInfo(self._method, True, False, timeout)\n return self._interceptor.intercept_stream(\n request_iterator, metadata, client_info, invoker\n )\n\n\nclass _InterceptorStreamStreamMultiCallable(grpc.StreamStreamMultiCallable):\n def __init__(self, method, base_callable, interceptor):\n self._method = method\n self._base_callable = base_callable\n self._interceptor = interceptor\n\n def __call__(\n self, request_iterator, timeout=None, metadata=None, credentials=None\n ):\n def invoker(request_iterator, metadata):\n return self._base_callable(\n request_iterator, timeout, metadata, credentials\n )\n\n client_info = _StreamClientInfo(self._method, True, True, timeout)\n return self._interceptor.intercept_stream(\n request_iterator, metadata, client_info, invoker\n )\n\n\nclass _InterceptorChannel(grpc.Channel):\n def __init__(self, channel, interceptor):\n self._channel = channel\n self._interceptor = interceptor\n\n def subscribe(self, *args, **kwargs):\n self._channel.subscribe(*args, **kwargs)\n\n def unsubscribe(self, *args, **kwargs):\n self._channel.unsubscribe(*args, **kwargs)\n\n def unary_unary(\n self, method, request_serializer=None, response_deserializer=None\n ):\n base_callable = self._channel.unary_unary(\n method, request_serializer, response_deserializer\n )\n if isinstance(self._interceptor, grpcext.UnaryClientInterceptor):\n return _InterceptorUnaryUnaryMultiCallable(\n method, base_callable, self._interceptor\n )\n return base_callable\n\n def unary_stream(\n self, method, request_serializer=None, response_deserializer=None\n ):\n base_callable = self._channel.unary_stream(\n method, request_serializer, response_deserializer\n )\n if isinstance(self._interceptor, grpcext.StreamClientInterceptor):\n return _InterceptorUnaryStreamMultiCallable(\n method, base_callable, self._interceptor\n )\n return base_callable\n\n def stream_unary(\n self, method, request_serializer=None, response_deserializer=None\n ):\n base_callable = self._channel.stream_unary(\n method, request_serializer, response_deserializer\n )\n if isinstance(self._interceptor, grpcext.StreamClientInterceptor):\n return _InterceptorStreamUnaryMultiCallable(\n method, base_callable, self._interceptor\n )\n return base_callable\n\n def stream_stream(\n self, method, request_serializer=None, response_deserializer=None\n ):\n base_callable = self._channel.stream_stream(\n method, request_serializer, response_deserializer\n )\n if isinstance(self._interceptor, grpcext.StreamClientInterceptor):\n return _InterceptorStreamStreamMultiCallable(\n method, base_callable, self._interceptor\n )\n return base_callable\n\n def close(self):\n if not hasattr(self._channel, \"close\"):\n raise RuntimeError(\n \"close() is not supported with the installed version of grpcio\"\n )\n self._channel.close()\n\n def __enter__(self):\n \"\"\"Enters the runtime context related to the channel object.\"\"\"\n raise NotImplementedError()\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n \"\"\"Exits the runtime context related to the channel object.\"\"\"\n raise NotImplementedError()\n\n\ndef intercept_channel(channel, *interceptors):\n result = channel\n for interceptor in interceptors:\n if not isinstance(\n interceptor, grpcext.UnaryClientInterceptor\n ) and not isinstance(interceptor, grpcext.StreamClientInterceptor):\n raise TypeError(\n \"interceptor must be either a \"\n \"grpcext.UnaryClientInterceptor or a \"\n \"grpcext.StreamClientInterceptor\"\n )\n result = _InterceptorChannel(result, interceptor)\n return result\n", "path": "instrumentation/opentelemetry-instrumentation-grpc/src/opentelemetry/instrumentation/grpc/grpcext/_interceptor.py"}]} | 3,750 | 208 |
gh_patches_debug_10020 | rasdani/github-patches | git_diff | onnx__sklearn-onnx-440 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
SelectFromModel converted model with threshold such that no features selected give error
SelectFromModel model in Scikit with threshold such that no features are selected returns an array of shape (M, 0), where M is the number of test instances, onnx converter however can't handle it.
RuntimeError: [ONNXRuntimeError] : 1 : GENERAL ERROR : Load model from pca1.onnx failed:[ShapeInferenceError] Shape input must be a one-dimensional tensor.
</issue>
<code>
[start of skl2onnx/operator_converters/feature_selection.py]
1 # -------------------------------------------------------------------------
2 # Copyright (c) Microsoft Corporation. All rights reserved.
3 # Licensed under the MIT License. See License.txt in the project root for
4 # license information.
5 # --------------------------------------------------------------------------
6
7 from ..proto import onnx_proto
8 from ..common._apply_operation import apply_cast
9 from ..common._registration import register_converter
10 from ..common.data_types import FloatTensorType, FloatType
11
12
13 def convert_sklearn_feature_selection(scope, operator, container):
14 op = operator.raw_operator
15 # Get indices of the features selected
16 index = op.get_support(indices=True)
17 needs_cast = not isinstance(operator.inputs[0].type,
18 (FloatTensorType, FloatType))
19 if needs_cast:
20 output_name = scope.get_unique_variable_name('output')
21 else:
22 output_name = operator.outputs[0].full_name
23
24 if index.any():
25 column_indices_name = scope.get_unique_variable_name('column_indices')
26
27 container.add_initializer(column_indices_name,
28 onnx_proto.TensorProto.INT64,
29 [len(index)], index)
30
31 container.add_node(
32 'ArrayFeatureExtractor',
33 [operator.inputs[0].full_name, column_indices_name],
34 output_name, op_domain='ai.onnx.ml',
35 name=scope.get_unique_operator_name('ArrayFeatureExtractor'))
36 else:
37 container.add_node('ConstantOfShape', operator.inputs[0].full_name,
38 output_name, op_version=9)
39 if needs_cast:
40 apply_cast(scope, output_name, operator.outputs[0].full_name,
41 container, to=onnx_proto.TensorProto.FLOAT)
42
43
44 register_converter('SklearnGenericUnivariateSelect',
45 convert_sklearn_feature_selection)
46 register_converter('SklearnRFE', convert_sklearn_feature_selection)
47 register_converter('SklearnRFECV', convert_sklearn_feature_selection)
48 register_converter('SklearnSelectFdr', convert_sklearn_feature_selection)
49 register_converter('SklearnSelectFpr', convert_sklearn_feature_selection)
50 register_converter('SklearnSelectFromModel', convert_sklearn_feature_selection)
51 register_converter('SklearnSelectFwe', convert_sklearn_feature_selection)
52 register_converter('SklearnSelectKBest', convert_sklearn_feature_selection)
53 register_converter('SklearnSelectPercentile',
54 convert_sklearn_feature_selection)
55 register_converter('SklearnVarianceThreshold',
56 convert_sklearn_feature_selection)
57
[end of skl2onnx/operator_converters/feature_selection.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/skl2onnx/operator_converters/feature_selection.py b/skl2onnx/operator_converters/feature_selection.py
--- a/skl2onnx/operator_converters/feature_selection.py
+++ b/skl2onnx/operator_converters/feature_selection.py
@@ -14,6 +14,11 @@
op = operator.raw_operator
# Get indices of the features selected
index = op.get_support(indices=True)
+ if len(index) == 0:
+ raise RuntimeError(
+ "Model '{}' did not select any feature. "
+ "This model cannot be converted into ONNX."
+ "".format(op.__class__.__name__))
needs_cast = not isinstance(operator.inputs[0].type,
(FloatTensorType, FloatType))
if needs_cast:
| {"golden_diff": "diff --git a/skl2onnx/operator_converters/feature_selection.py b/skl2onnx/operator_converters/feature_selection.py\n--- a/skl2onnx/operator_converters/feature_selection.py\n+++ b/skl2onnx/operator_converters/feature_selection.py\n@@ -14,6 +14,11 @@\n op = operator.raw_operator\n # Get indices of the features selected\n index = op.get_support(indices=True)\n+ if len(index) == 0:\n+ raise RuntimeError(\n+ \"Model '{}' did not select any feature. \"\n+ \"This model cannot be converted into ONNX.\"\n+ \"\".format(op.__class__.__name__))\n needs_cast = not isinstance(operator.inputs[0].type,\n (FloatTensorType, FloatType))\n if needs_cast:\n", "issue": "SelectFromModel converted model with threshold such that no features selected give error\nSelectFromModel model in Scikit with threshold such that no features are selected returns an array of shape (M, 0), where M is the number of test instances, onnx converter however can't handle it.\r\n\r\nRuntimeError: [ONNXRuntimeError] : 1 : GENERAL ERROR : Load model from pca1.onnx failed:[ShapeInferenceError] Shape input must be a one-dimensional tensor. \n", "before_files": [{"content": "# -------------------------------------------------------------------------\n# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See License.txt in the project root for\n# license information.\n# --------------------------------------------------------------------------\n\nfrom ..proto import onnx_proto\nfrom ..common._apply_operation import apply_cast\nfrom ..common._registration import register_converter\nfrom ..common.data_types import FloatTensorType, FloatType\n\n\ndef convert_sklearn_feature_selection(scope, operator, container):\n op = operator.raw_operator\n # Get indices of the features selected\n index = op.get_support(indices=True)\n needs_cast = not isinstance(operator.inputs[0].type,\n (FloatTensorType, FloatType))\n if needs_cast:\n output_name = scope.get_unique_variable_name('output')\n else:\n output_name = operator.outputs[0].full_name\n\n if index.any():\n column_indices_name = scope.get_unique_variable_name('column_indices')\n\n container.add_initializer(column_indices_name,\n onnx_proto.TensorProto.INT64,\n [len(index)], index)\n\n container.add_node(\n 'ArrayFeatureExtractor',\n [operator.inputs[0].full_name, column_indices_name],\n output_name, op_domain='ai.onnx.ml',\n name=scope.get_unique_operator_name('ArrayFeatureExtractor'))\n else:\n container.add_node('ConstantOfShape', operator.inputs[0].full_name,\n output_name, op_version=9)\n if needs_cast:\n apply_cast(scope, output_name, operator.outputs[0].full_name,\n container, to=onnx_proto.TensorProto.FLOAT)\n\n\nregister_converter('SklearnGenericUnivariateSelect',\n convert_sklearn_feature_selection)\nregister_converter('SklearnRFE', convert_sklearn_feature_selection)\nregister_converter('SklearnRFECV', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFdr', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFpr', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFromModel', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectFwe', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectKBest', convert_sklearn_feature_selection)\nregister_converter('SklearnSelectPercentile',\n convert_sklearn_feature_selection)\nregister_converter('SklearnVarianceThreshold',\n convert_sklearn_feature_selection)\n", "path": "skl2onnx/operator_converters/feature_selection.py"}]} | 1,235 | 174 |
gh_patches_debug_42030 | rasdani/github-patches | git_diff | ManageIQ__integration_tests-3352 | You will be provided with a partial code base and an issue statement explaining a problem to resolve.
<issue>
Check if we have fallback if no trackerbot URL specified
We had some issues htat people with no trackerbot URL in config were not able to run test:
- Investigate what is happening
- Add fallback (do not check templates at all? Load them from all providers?)
</issue>
<code>
[start of fixtures/provider.py]
1 """``setup_provider`` fixture
2
3 In test modules paramatrized with :py:func:`utils.testgen.provider_by_type` (should be
4 just about any module that needs a provider to run its tests), this fixture will set up
5 the single provider needed to run that test.
6
7 If the provider setup fails, this fixture will record that failure and skip future tests
8 using the provider.
9
10 """
11 import pytest
12
13 from fixtures.artifactor_plugin import art_client, get_test_idents
14 from fixtures.templateloader import TEMPLATES
15 from utils import providers
16 from utils.log import logger
17
18 # failed provider tracking for _setup_provider_fixture
19 _failed_providers = set()
20
21
22 def _setup_provider(provider_key, request=None):
23 def skip(provider_key, previous_fail=False):
24 if request:
25 node = request.node
26 name, location = get_test_idents(node)
27 skip_data = {'type': 'provider', 'reason': provider_key}
28 art_client.fire_hook('skip_test', test_location=location, test_name=name,
29 skip_data=skip_data)
30 if previous_fail:
31 raise pytest.skip('Provider {} failed to set up previously in another test, '
32 'skipping test'.format(provider_key))
33 else:
34 raise pytest.skip('Provider {} failed to set up this time, '
35 'skipping test'.format(provider_key))
36 # This function is dynamically "fixturized" to setup up a specific provider,
37 # optionally skipping the provider setup if that provider has previously failed.
38 if provider_key in _failed_providers:
39 skip(provider_key, previous_fail=True)
40
41 try:
42 providers.setup_provider(provider_key)
43 except Exception as ex:
44 logger.error('Error setting up provider {}'.format(provider_key))
45 logger.exception(ex)
46 _failed_providers.add(provider_key)
47 skip(provider_key)
48
49
50 @pytest.fixture(scope='function')
51 def setup_provider(request, provider):
52 """Function-scoped fixture to set up a provider"""
53 _setup_provider(provider.key, request)
54
55
56 @pytest.fixture(scope='module')
57 def setup_provider_modscope(request, provider):
58 """Function-scoped fixture to set up a provider"""
59 _setup_provider(provider.key, request)
60
61
62 @pytest.fixture(scope='class')
63 def setup_provider_clsscope(request, provider):
64 """Module-scoped fixture to set up a provider"""
65 _setup_provider(provider.key, request)
66
67
68 @pytest.fixture
69 def setup_provider_funcscope(request, provider):
70 """Function-scoped fixture to set up a provider
71
72 Note:
73
74 While there are cases where this is useful, provider fixtures should
75 be module-scoped the majority of the time.
76
77 """
78 _setup_provider(provider.key, request)
79
80
81 @pytest.fixture(scope="session")
82 def any_provider_session():
83 providers.clear_providers() # To make it clean
84 providers.setup_a_provider(validate=True, check_existing=True)
85
86
87 @pytest.fixture(scope="function")
88 def template(template_location, provider):
89 if template_location is not None:
90 o = provider.data
91 try:
92 for field in template_location:
93 o = o[field]
94 except (IndexError, KeyError):
95 logger.info("Cannot apply %s to %s in the template specification, ignoring.",
96 repr(field), repr(o))
97 else:
98 if not isinstance(o, basestring):
99 raise ValueError("{} is not a string! (for template)".format(repr(o)))
100 templates = TEMPLATES.get(provider.key, None)
101 if templates is not None:
102 if o in templates:
103 return o
104 logger.info(
105 "Wanted template {} on {} but it is not there!\n".format(o, provider.key))
106 pytest.skip('Template not available')
107
108
109 def _small_template(provider):
110 template = provider.data.get('small_template', None)
111 if template:
112 templates = TEMPLATES.get(provider.key, None)
113 if templates is not None:
114 if template in templates:
115 return template
116 logger.info(
117 "Wanted template {} on {} but it is not there!\n".format(template, provider.key))
118 pytest.skip('Template not available')
119
120
121 @pytest.fixture(scope="function")
122 def small_template(provider):
123 return _small_template(provider)
124
125
126 @pytest.fixture(scope="module")
127 def small_template_modscope(provider):
128 return _small_template(provider)
129
130
131 @pytest.fixture(scope="function")
132 def full_template(provider):
133 template = provider.data.get('full_template', {})
134 if template:
135 templates = TEMPLATES.get(provider.key, None)
136 if templates is not None:
137 if template['name'] in templates:
138 return template
139 logger.info(
140 "Wanted template {} on {} but it is not there!\n".format(template, provider.key))
141 pytest.skip('Template not available')
142
143
144 @pytest.fixture(scope="function")
145 def provisioning(provider):
146 return provider.data['provisioning']
147
[end of fixtures/provider.py]
</code>
I need you to solve this issue by generating a single patch file that I can apply directly to this repository using git apply. Please respond with a single patch file in the following format.
<patch>
diff --git a/file.py b/file.py
--- a/file.py
+++ b/file.py
@@ -1,27 +1,35 @@
def euclidean(a, b):
- while b:
- a, b = b, a % b
- return a
+ if b == 0:
+ return a
+ return euclidean(b, a % b)
def bresenham(x0, y0, x1, y1):
points = []
dx = abs(x1 - x0)
dy = abs(y1 - y0)
- sx = 1 if x0 < x1 else -1
- sy = 1 if y0 < y1 else -1
- err = dx - dy
+ x, y = x0, y0
+ sx = -1 if x0 > x1 else 1
+ sy = -1 if y0 > y1 else 1
- while True:
- points.append((x0, y0))
- if x0 == x1 and y0 == y1:
- break
- e2 = 2 * err
- if e2 > -dy:
- err -= dy
- x0 += sx
- if e2 < dx:
- err += dx
- y0 += sy
+ if dx > dy:
+ err = dx / 2.0
+ while x != x1:
+ points.append((x, y))
+ err -= dy
+ if err < 0:
+ y += sy
+ err += dx
+ x += sx
+ else:
+ err = dy / 2.0
+ while y != y1:
+ points.append((x, y))
+ err -= dx
+ if err < 0:
+ x += sx
+ err += dy
+ y += sy
+
+ points.append((x, y))
return points
</patch> | diff --git a/fixtures/provider.py b/fixtures/provider.py
--- a/fixtures/provider.py
+++ b/fixtures/provider.py
@@ -9,6 +9,7 @@
"""
import pytest
+import six
from fixtures.artifactor_plugin import art_client, get_test_idents
from fixtures.templateloader import TEMPLATES
@@ -41,7 +42,7 @@
try:
providers.setup_provider(provider_key)
except Exception as ex:
- logger.error('Error setting up provider {}'.format(provider_key))
+ logger.error('Error setting up provider %s', provider_key)
logger.exception(ex)
_failed_providers.add(provider_key)
skip(provider_key)
@@ -92,29 +93,35 @@
for field in template_location:
o = o[field]
except (IndexError, KeyError):
- logger.info("Cannot apply %s to %s in the template specification, ignoring.",
- repr(field), repr(o))
+ logger.info("Cannot apply %r to %r in the template specification, ignoring.", field, o)
else:
- if not isinstance(o, basestring):
- raise ValueError("{} is not a string! (for template)".format(repr(o)))
+ if not isinstance(o, six.string_types):
+ raise ValueError("{!r} is not a string! (for template)".format(o))
+ if not TEMPLATES:
+ # There is nothing in TEMPLATES, that means no trackerbot URL and no data pulled.
+ # This should normally not constitute an issue so continue.
+ return o
templates = TEMPLATES.get(provider.key, None)
if templates is not None:
if o in templates:
return o
- logger.info(
- "Wanted template {} on {} but it is not there!\n".format(o, provider.key))
+ logger.info("Wanted template %s on %s but it is not there!", o, provider.key)
pytest.skip('Template not available')
def _small_template(provider):
template = provider.data.get('small_template', None)
if template:
+ if not TEMPLATES:
+ # Same as couple of lines above
+ return template
templates = TEMPLATES.get(provider.key, None)
if templates is not None:
if template in templates:
return template
- logger.info(
- "Wanted template {} on {} but it is not there!\n".format(template, provider.key))
+ else:
+ pytest.skip('No small_template for provider {}'.format(provider.key))
+ logger.info("Wanted template %s on %s but it is not there!", template, provider.key)
pytest.skip('Template not available')
@@ -132,12 +139,16 @@
def full_template(provider):
template = provider.data.get('full_template', {})
if template:
+ if not TEMPLATES:
+ # Same as couple of lines above
+ return template
templates = TEMPLATES.get(provider.key, None)
if templates is not None:
if template['name'] in templates:
return template
- logger.info(
- "Wanted template {} on {} but it is not there!\n".format(template, provider.key))
+ else:
+ pytest.skip('No full_template for provider {}'.format(provider.key))
+ logger.info("Wanted template %s on %s but it is not there!", template, provider.key)
pytest.skip('Template not available')
| {"golden_diff": "diff --git a/fixtures/provider.py b/fixtures/provider.py\n--- a/fixtures/provider.py\n+++ b/fixtures/provider.py\n@@ -9,6 +9,7 @@\n \n \"\"\"\n import pytest\n+import six\n \n from fixtures.artifactor_plugin import art_client, get_test_idents\n from fixtures.templateloader import TEMPLATES\n@@ -41,7 +42,7 @@\n try:\n providers.setup_provider(provider_key)\n except Exception as ex:\n- logger.error('Error setting up provider {}'.format(provider_key))\n+ logger.error('Error setting up provider %s', provider_key)\n logger.exception(ex)\n _failed_providers.add(provider_key)\n skip(provider_key)\n@@ -92,29 +93,35 @@\n for field in template_location:\n o = o[field]\n except (IndexError, KeyError):\n- logger.info(\"Cannot apply %s to %s in the template specification, ignoring.\",\n- repr(field), repr(o))\n+ logger.info(\"Cannot apply %r to %r in the template specification, ignoring.\", field, o)\n else:\n- if not isinstance(o, basestring):\n- raise ValueError(\"{} is not a string! (for template)\".format(repr(o)))\n+ if not isinstance(o, six.string_types):\n+ raise ValueError(\"{!r} is not a string! (for template)\".format(o))\n+ if not TEMPLATES:\n+ # There is nothing in TEMPLATES, that means no trackerbot URL and no data pulled.\n+ # This should normally not constitute an issue so continue.\n+ return o\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if o in templates:\n return o\n- logger.info(\n- \"Wanted template {} on {} but it is not there!\\n\".format(o, provider.key))\n+ logger.info(\"Wanted template %s on %s but it is not there!\", o, provider.key)\n pytest.skip('Template not available')\n \n \n def _small_template(provider):\n template = provider.data.get('small_template', None)\n if template:\n+ if not TEMPLATES:\n+ # Same as couple of lines above\n+ return template\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template in templates:\n return template\n- logger.info(\n- \"Wanted template {} on {} but it is not there!\\n\".format(template, provider.key))\n+ else:\n+ pytest.skip('No small_template for provider {}'.format(provider.key))\n+ logger.info(\"Wanted template %s on %s but it is not there!\", template, provider.key)\n pytest.skip('Template not available')\n \n \n@@ -132,12 +139,16 @@\n def full_template(provider):\n template = provider.data.get('full_template', {})\n if template:\n+ if not TEMPLATES:\n+ # Same as couple of lines above\n+ return template\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template['name'] in templates:\n return template\n- logger.info(\n- \"Wanted template {} on {} but it is not there!\\n\".format(template, provider.key))\n+ else:\n+ pytest.skip('No full_template for provider {}'.format(provider.key))\n+ logger.info(\"Wanted template %s on %s but it is not there!\", template, provider.key)\n pytest.skip('Template not available')\n", "issue": "Check if we have fallback if no trackerbot URL specified\nWe had some issues htat people with no trackerbot URL in config were not able to run test:\n- Investigate what is happening\n- Add fallback (do not check templates at all? Load them from all providers?)\n\n", "before_files": [{"content": "\"\"\"``setup_provider`` fixture\n\nIn test modules paramatrized with :py:func:`utils.testgen.provider_by_type` (should be\njust about any module that needs a provider to run its tests), this fixture will set up\nthe single provider needed to run that test.\n\nIf the provider setup fails, this fixture will record that failure and skip future tests\nusing the provider.\n\n\"\"\"\nimport pytest\n\nfrom fixtures.artifactor_plugin import art_client, get_test_idents\nfrom fixtures.templateloader import TEMPLATES\nfrom utils import providers\nfrom utils.log import logger\n\n# failed provider tracking for _setup_provider_fixture\n_failed_providers = set()\n\n\ndef _setup_provider(provider_key, request=None):\n def skip(provider_key, previous_fail=False):\n if request:\n node = request.node\n name, location = get_test_idents(node)\n skip_data = {'type': 'provider', 'reason': provider_key}\n art_client.fire_hook('skip_test', test_location=location, test_name=name,\n skip_data=skip_data)\n if previous_fail:\n raise pytest.skip('Provider {} failed to set up previously in another test, '\n 'skipping test'.format(provider_key))\n else:\n raise pytest.skip('Provider {} failed to set up this time, '\n 'skipping test'.format(provider_key))\n # This function is dynamically \"fixturized\" to setup up a specific provider,\n # optionally skipping the provider setup if that provider has previously failed.\n if provider_key in _failed_providers:\n skip(provider_key, previous_fail=True)\n\n try:\n providers.setup_provider(provider_key)\n except Exception as ex:\n logger.error('Error setting up provider {}'.format(provider_key))\n logger.exception(ex)\n _failed_providers.add(provider_key)\n skip(provider_key)\n\n\[email protected](scope='function')\ndef setup_provider(request, provider):\n \"\"\"Function-scoped fixture to set up a provider\"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected](scope='module')\ndef setup_provider_modscope(request, provider):\n \"\"\"Function-scoped fixture to set up a provider\"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected](scope='class')\ndef setup_provider_clsscope(request, provider):\n \"\"\"Module-scoped fixture to set up a provider\"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected]\ndef setup_provider_funcscope(request, provider):\n \"\"\"Function-scoped fixture to set up a provider\n\n Note:\n\n While there are cases where this is useful, provider fixtures should\n be module-scoped the majority of the time.\n\n \"\"\"\n _setup_provider(provider.key, request)\n\n\[email protected](scope=\"session\")\ndef any_provider_session():\n providers.clear_providers() # To make it clean\n providers.setup_a_provider(validate=True, check_existing=True)\n\n\[email protected](scope=\"function\")\ndef template(template_location, provider):\n if template_location is not None:\n o = provider.data\n try:\n for field in template_location:\n o = o[field]\n except (IndexError, KeyError):\n logger.info(\"Cannot apply %s to %s in the template specification, ignoring.\",\n repr(field), repr(o))\n else:\n if not isinstance(o, basestring):\n raise ValueError(\"{} is not a string! (for template)\".format(repr(o)))\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if o in templates:\n return o\n logger.info(\n \"Wanted template {} on {} but it is not there!\\n\".format(o, provider.key))\n pytest.skip('Template not available')\n\n\ndef _small_template(provider):\n template = provider.data.get('small_template', None)\n if template:\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template in templates:\n return template\n logger.info(\n \"Wanted template {} on {} but it is not there!\\n\".format(template, provider.key))\n pytest.skip('Template not available')\n\n\[email protected](scope=\"function\")\ndef small_template(provider):\n return _small_template(provider)\n\n\[email protected](scope=\"module\")\ndef small_template_modscope(provider):\n return _small_template(provider)\n\n\[email protected](scope=\"function\")\ndef full_template(provider):\n template = provider.data.get('full_template', {})\n if template:\n templates = TEMPLATES.get(provider.key, None)\n if templates is not None:\n if template['name'] in templates:\n return template\n logger.info(\n \"Wanted template {} on {} but it is not there!\\n\".format(template, provider.key))\n pytest.skip('Template not available')\n\n\[email protected](scope=\"function\")\ndef provisioning(provider):\n return provider.data['provisioning']\n", "path": "fixtures/provider.py"}]} | 1,953 | 756 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.