problem_id
stringlengths 18
21
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
54
| prompt
stringlengths 1.28k
64.2k
| golden_diff
stringlengths 166
811
| verification_info
stringlengths 604
118k
|
---|---|---|---|---|---|---|
gh_patches_debug_100 | rasdani/github-patches | git_diff | feast-dev__feast-3501 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Greate Expectations version a bit outdated
## Expected Behavior
GX version works well with >=0.15.41
## Current Behavior
It got pinned between 0.14 and 0.15
### Specifications
- Version: 0.29.0
## Possible Solution
Bump GX version
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright 2019 The Feast Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # https://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 import copy
15 import glob
16 import json
17 import os
18 import pathlib
19 import re
20 import shutil
21 import subprocess
22 import sys
23 from distutils.cmd import Command
24 from distutils.dir_util import copy_tree
25 from pathlib import Path
26 from subprocess import CalledProcessError
27
28 from setuptools import Extension, find_packages
29
30 try:
31 from setuptools import setup
32 from setuptools.command.build_ext import build_ext as _build_ext
33 from setuptools.command.build_py import build_py
34 from setuptools.command.develop import develop
35 from setuptools.command.install import install
36
37 except ImportError:
38 from distutils.command.build_ext import build_ext as _build_ext
39 from distutils.command.build_py import build_py
40 from distutils.core import setup
41
42 NAME = "feast"
43 DESCRIPTION = "Python SDK for Feast"
44 URL = "https://github.com/feast-dev/feast"
45 AUTHOR = "Feast"
46 REQUIRES_PYTHON = ">=3.8.0"
47
48 REQUIRED = [
49 "click>=7.0.0,<9.0.0",
50 "colorama>=0.3.9,<1",
51 "dill~=0.3.0",
52 "fastavro>=1.1.0,<2",
53 "grpcio>=1.47.0,<2",
54 "grpcio-reflection>=1.47.0,<2",
55 "Jinja2>=2,<4",
56 "jsonschema",
57 "mmh3",
58 "numpy>=1.22,<3",
59 "pandas>=1.4.3,<2",
60 "pandavro~=1.5.0", # For some reason pandavro higher than 1.5.* only support pandas less than 1.3.
61 "protobuf<5,>3.20",
62 "proto-plus>=1.20.0,<2",
63 "pyarrow>=4,<9",
64 "pydantic>=1,<2",
65 "pygments>=2.12.0,<3",
66 "PyYAML>=5.4.0,<7",
67 "requests",
68 "SQLAlchemy[mypy]>1,<2",
69 "tabulate>=0.8.0,<1",
70 "tenacity>=7,<9",
71 "toml>=0.10.0,<1",
72 "tqdm>=4,<5",
73 "typeguard",
74 "fastapi>=0.68.0,<1",
75 "uvicorn[standard]>=0.14.0,<1",
76 "dask>=2021.1.0",
77 "bowler", # Needed for automatic repo upgrades
78 "httpx>=0.23.3", # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656).
79 ]
80
81 GCP_REQUIRED = [
82 "google-api-core>=1.23.0,<3",
83 "googleapis-common-protos>=1.52.0,<2",
84 "google-cloud-bigquery[pandas]>=2,<4",
85 "google-cloud-bigquery-storage >= 2.0.0,<3",
86 "google-cloud-datastore>=2.1.0,<3",
87 "google-cloud-storage>=1.34.0,<3",
88 "google-cloud-bigtable>=2.11.0,<3",
89 ]
90
91 REDIS_REQUIRED = [
92 "redis==4.2.2",
93 "hiredis>=2.0.0,<3",
94 ]
95
96 AWS_REQUIRED = ["boto3>=1.17.0,<=1.20.23", "docker>=5.0.2", "s3fs>=0.4.0,<=2022.01.0"]
97
98 BYTEWAX_REQUIRED = ["bytewax==0.13.1", "docker>=5.0.2", "kubernetes<=20.13.0"]
99
100 SNOWFLAKE_REQUIRED = [
101 "snowflake-connector-python[pandas]>=2.7.3,<3",
102 # `pyOpenSSL==22.1.0` requires `cryptography<39,>=38.0.0`, which is incompatible
103 # with `snowflake-connector-python[pandas]==2.8.0`, which depends on
104 # `cryptography<37.0.0,>=3.1.0`.
105 "pyOpenSSL<22.1.0",
106 ]
107
108 SPARK_REQUIRED = [
109 "pyspark>=3.0.0,<4",
110 ]
111
112 TRINO_REQUIRED = [
113 "trino>=0.305.0,<0.400.0", "regex"
114 ]
115
116 POSTGRES_REQUIRED = [
117 "psycopg2-binary>=2.8.3,<3",
118 ]
119
120 MYSQL_REQUIRED = ["mysqlclient", "pymysql", "types-PyMySQL"]
121
122 HBASE_REQUIRED = [
123 "happybase>=1.2.0,<3",
124 ]
125
126 CASSANDRA_REQUIRED = [
127 "cassandra-driver>=3.24.0,<4",
128 ]
129
130 GE_REQUIRED = ["great_expectations>=0.14.0,<0.15.0"]
131
132 GO_REQUIRED = [
133 "cffi~=1.15.0",
134 ]
135
136 AZURE_REQUIRED = [
137 "azure-storage-blob>=0.37.0",
138 "azure-identity>=1.6.1",
139 "SQLAlchemy>=1.4.19",
140 "pyodbc>=4.0.30",
141 "pymssql",
142 ]
143
144 ROCKSET_REQUIRED = [
145 "rockset>=1.0.3",
146 ]
147
148 CI_REQUIRED = (
149 [
150 "build",
151 "cryptography>=35.0,<36",
152 "flake8",
153 "black>=22.6.0,<23",
154 "isort>=5,<6",
155 "grpcio-tools>=1.47.0",
156 "grpcio-testing>=1.47.0",
157 "minio==7.1.0",
158 "mock==2.0.0",
159 "moto<4",
160 "mypy>=0.981,<0.990",
161 "mypy-protobuf==3.1",
162 "avro==1.10.0",
163 "gcsfs>=0.4.0,<=2022.01.0",
164 "urllib3>=1.25.4,<2",
165 "psutil==5.9.0",
166 "py>=1.11.0", # https://github.com/pytest-dev/pytest/issues/10420
167 "pytest>=6.0.0,<8",
168 "pytest-cov",
169 "pytest-xdist",
170 "pytest-benchmark>=3.4.1,<4",
171 "pytest-lazy-fixture==0.6.3",
172 "pytest-timeout==1.4.2",
173 "pytest-ordering~=0.6.0",
174 "pytest-mock==1.10.4",
175 "Sphinx>4.0.0,<7",
176 "testcontainers>=3.5,<4",
177 "adlfs==0.5.9",
178 "firebase-admin>=5.2.0,<6",
179 "pre-commit",
180 "assertpy==1.1",
181 "pip-tools",
182 "pybindgen",
183 "types-protobuf~=3.19.22",
184 "types-python-dateutil",
185 "types-pytz",
186 "types-PyYAML",
187 "types-redis",
188 "types-requests",
189 "types-setuptools",
190 "types-tabulate",
191 ]
192 + GCP_REQUIRED
193 + REDIS_REQUIRED
194 + AWS_REQUIRED
195 + BYTEWAX_REQUIRED
196 + SNOWFLAKE_REQUIRED
197 + SPARK_REQUIRED
198 + POSTGRES_REQUIRED
199 + MYSQL_REQUIRED
200 + TRINO_REQUIRED
201 + GE_REQUIRED
202 + HBASE_REQUIRED
203 + CASSANDRA_REQUIRED
204 + AZURE_REQUIRED
205 + ROCKSET_REQUIRED
206 )
207
208
209 # rtd builds fail because of mysql not being installed in their environment.
210 # We can add mysql there, but it's not strictly needed. This will be faster for builds.
211 DOCS_REQUIRED = CI_REQUIRED.copy()
212 for _r in MYSQL_REQUIRED:
213 DOCS_REQUIRED.remove(_r)
214
215 DEV_REQUIRED = ["mypy-protobuf==3.1", "grpcio-testing~=1.0"] + CI_REQUIRED
216
217 # Get git repo root directory
218 repo_root = str(pathlib.Path(__file__).resolve().parent)
219
220 # README file from Feast repo root directory
221 README_FILE = os.path.join(repo_root, "README.md")
222 with open(README_FILE, "r", encoding="utf8") as f:
223 LONG_DESCRIPTION = f.read()
224
225 # Add Support for parsing tags that have a prefix containing '/' (ie 'sdk/go') to setuptools_scm.
226 # Regex modified from default tag regex in:
227 # https://github.com/pypa/setuptools_scm/blob/2a1b46d38fb2b8aeac09853e660bcd0d7c1bc7be/src/setuptools_scm/config.py#L9
228 TAG_REGEX = re.compile(
229 r"^(?:[\/\w-]+)?(?P<version>[vV]?\d+(?:\.\d+){0,2}[^\+]*)(?:\+.*)?$"
230 )
231
232 # Only set use_scm_version if git executable exists (setting this variable causes pip to use git under the hood)
233 if shutil.which("git"):
234 use_scm_version = {"root": ".", "relative_to": __file__, "tag_regex": TAG_REGEX}
235 else:
236 use_scm_version = None
237
238 PROTO_SUBDIRS = ["core", "serving", "types", "storage"]
239 PYTHON_CODE_PREFIX = "sdk/python"
240
241
242 class BuildPythonProtosCommand(Command):
243 description = "Builds the proto files into Python files."
244 user_options = [
245 ("inplace", "i", "Write generated proto files to source directory."),
246 ]
247
248 def initialize_options(self):
249 self.python_protoc = [
250 sys.executable,
251 "-m",
252 "grpc_tools.protoc",
253 ] # find_executable("protoc")
254 self.proto_folder = os.path.join(repo_root, "protos")
255 self.sub_folders = PROTO_SUBDIRS
256 self.build_lib = None
257 self.inplace = 0
258
259 def finalize_options(self):
260 self.set_undefined_options("build", ("build_lib", "build_lib"))
261
262 @property
263 def python_folder(self):
264 if self.inplace:
265 return os.path.join(
266 os.path.dirname(__file__) or os.getcwd(), "sdk/python/feast/protos"
267 )
268
269 return os.path.join(self.build_lib, "feast/protos")
270
271 def _generate_python_protos(self, path: str):
272 proto_files = glob.glob(os.path.join(self.proto_folder, path))
273 Path(self.python_folder).mkdir(parents=True, exist_ok=True)
274 subprocess.check_call(
275 self.python_protoc
276 + [
277 "-I",
278 self.proto_folder,
279 "--python_out",
280 self.python_folder,
281 "--grpc_python_out",
282 self.python_folder,
283 "--mypy_out",
284 self.python_folder,
285 ]
286 + proto_files
287 )
288
289 def run(self):
290 for sub_folder in self.sub_folders:
291 self._generate_python_protos(f"feast/{sub_folder}/*.proto")
292 # We need the __init__ files for each of the generated subdirs
293 # so that they are regular packages, and don't need the `--namespace-packages` flags
294 # when being typechecked using mypy.
295 with open(f"{self.python_folder}/feast/{sub_folder}/__init__.py", "w"):
296 pass
297
298 with open(f"{self.python_folder}/__init__.py", "w"):
299 pass
300 with open(f"{self.python_folder}/feast/__init__.py", "w"):
301 pass
302
303 for path in Path(self.python_folder).rglob("*.py"):
304 for folder in self.sub_folders:
305 # Read in the file
306 with open(path, "r") as file:
307 filedata = file.read()
308
309 # Replace the target string
310 filedata = filedata.replace(
311 f"from feast.{folder}", f"from feast.protos.feast.{folder}"
312 )
313
314 # Write the file out again
315 with open(path, "w") as file:
316 file.write(filedata)
317
318
319 def _generate_path_with_gopath():
320 go_path = subprocess.check_output(["go", "env", "GOPATH"]).decode("utf-8")
321 go_path = go_path.strip()
322 path_val = os.getenv("PATH")
323 path_val = f"{path_val}:{go_path}/bin"
324
325 return path_val
326
327
328 def _ensure_go_and_proto_toolchain():
329 try:
330 version = subprocess.check_output(["go", "version"])
331 except Exception as e:
332 raise RuntimeError("Unable to find go toolchain") from e
333
334 semver_string = re.search(r"go[\S]+", str(version)).group().lstrip("go")
335 parts = semver_string.split(".")
336 if not (int(parts[0]) >= 1 and int(parts[1]) >= 16):
337 raise RuntimeError(f"Go compiler too old; expected 1.16+ found {semver_string}")
338
339 path_val = _generate_path_with_gopath()
340
341 try:
342 subprocess.check_call(["protoc-gen-go", "--version"], env={"PATH": path_val})
343 subprocess.check_call(
344 ["protoc-gen-go-grpc", "--version"], env={"PATH": path_val}
345 )
346 except Exception as e:
347 raise RuntimeError("Unable to find go/grpc extensions for protoc") from e
348
349
350 class BuildGoProtosCommand(Command):
351 description = "Builds the proto files into Go files."
352 user_options = []
353
354 def initialize_options(self):
355 self.go_protoc = [
356 sys.executable,
357 "-m",
358 "grpc_tools.protoc",
359 ] # find_executable("protoc")
360 self.proto_folder = os.path.join(repo_root, "protos")
361 self.go_folder = os.path.join(repo_root, "go/protos")
362 self.sub_folders = PROTO_SUBDIRS
363 self.path_val = _generate_path_with_gopath()
364
365 def finalize_options(self):
366 pass
367
368 def _generate_go_protos(self, path: str):
369 proto_files = glob.glob(os.path.join(self.proto_folder, path))
370
371 try:
372 subprocess.check_call(
373 self.go_protoc
374 + [
375 "-I",
376 self.proto_folder,
377 "--go_out",
378 self.go_folder,
379 "--go_opt=module=github.com/feast-dev/feast/go/protos",
380 "--go-grpc_out",
381 self.go_folder,
382 "--go-grpc_opt=module=github.com/feast-dev/feast/go/protos",
383 ]
384 + proto_files,
385 env={"PATH": self.path_val},
386 )
387 except CalledProcessError as e:
388 print(f"Stderr: {e.stderr}")
389 print(f"Stdout: {e.stdout}")
390
391 def run(self):
392 go_dir = Path(repo_root) / "go" / "protos"
393 go_dir.mkdir(exist_ok=True)
394 for sub_folder in self.sub_folders:
395 self._generate_go_protos(f"feast/{sub_folder}/*.proto")
396
397
398 class BuildCommand(build_py):
399 """Custom build command."""
400
401 def run(self):
402 self.run_command("build_python_protos")
403 if os.getenv("COMPILE_GO", "false").lower() == "true":
404 _ensure_go_and_proto_toolchain()
405 self.run_command("build_go_protos")
406
407 self.run_command("build_ext")
408 build_py.run(self)
409
410
411 class DevelopCommand(develop):
412 """Custom develop command."""
413
414 def run(self):
415 self.reinitialize_command("build_python_protos", inplace=1)
416 self.run_command("build_python_protos")
417 if os.getenv("COMPILE_GO", "false").lower() == "true":
418 _ensure_go_and_proto_toolchain()
419 self.run_command("build_go_protos")
420
421 develop.run(self)
422
423
424 class build_ext(_build_ext):
425 def finalize_options(self) -> None:
426 super().finalize_options()
427 if os.getenv("COMPILE_GO", "false").lower() == "false":
428 self.extensions = [e for e in self.extensions if not self._is_go_ext(e)]
429
430 def _is_go_ext(self, ext: Extension):
431 return any(
432 source.endswith(".go") or source.startswith("github")
433 for source in ext.sources
434 )
435
436 def build_extension(self, ext: Extension):
437 print(f"Building extension {ext}")
438 if not self._is_go_ext(ext):
439 # the base class may mutate `self.compiler`
440 compiler = copy.deepcopy(self.compiler)
441 self.compiler, compiler = compiler, self.compiler
442 try:
443 return _build_ext.build_extension(self, ext)
444 finally:
445 self.compiler, compiler = compiler, self.compiler
446
447 bin_path = _generate_path_with_gopath()
448 go_env = json.loads(
449 subprocess.check_output(["go", "env", "-json"]).decode("utf-8").strip()
450 )
451
452 print(f"Go env: {go_env}")
453 print(f"CWD: {os.getcwd()}")
454
455 destination = os.path.dirname(os.path.abspath(self.get_ext_fullpath(ext.name)))
456 subprocess.check_call(
457 ["go", "install", "golang.org/x/tools/cmd/goimports"],
458 env={"PATH": bin_path, **go_env},
459 )
460 subprocess.check_call(
461 ["go", "get", "github.com/go-python/[email protected]"],
462 env={"PATH": bin_path, **go_env},
463 )
464 subprocess.check_call(
465 ["go", "install", "github.com/go-python/gopy"],
466 env={"PATH": bin_path, **go_env},
467 )
468 subprocess.check_call(
469 [
470 "gopy",
471 "build",
472 "-output",
473 destination,
474 "-vm",
475 sys.executable,
476 "--build-tags",
477 "cgo,ccalloc",
478 "--dynamic-link=True",
479 "-no-make",
480 *ext.sources,
481 ],
482 env={
483 "PATH": bin_path,
484 "CGO_LDFLAGS_ALLOW": ".*",
485 **go_env,
486 },
487 )
488
489 def copy_extensions_to_source(self):
490 build_py = self.get_finalized_command("build_py")
491 for ext in self.extensions:
492 fullname = self.get_ext_fullname(ext.name)
493 modpath = fullname.split(".")
494 package = ".".join(modpath[:-1])
495 package_dir = build_py.get_package_dir(package)
496
497 src_dir = dest_dir = package_dir
498
499 if src_dir.startswith(PYTHON_CODE_PREFIX):
500 src_dir = package_dir[len(PYTHON_CODE_PREFIX) :]
501 src_dir = src_dir.lstrip("/")
502
503 src_dir = os.path.join(self.build_lib, src_dir)
504
505 # copy whole directory
506 print(f"Copying from {src_dir} to {dest_dir}")
507 copy_tree(src_dir, dest_dir)
508
509
510 setup(
511 name=NAME,
512 author=AUTHOR,
513 description=DESCRIPTION,
514 long_description=LONG_DESCRIPTION,
515 long_description_content_type="text/markdown",
516 python_requires=REQUIRES_PYTHON,
517 url=URL,
518 packages=find_packages(
519 where=PYTHON_CODE_PREFIX, exclude=("java", "infra", "sdk/python/tests", "ui")
520 ),
521 package_dir={"": PYTHON_CODE_PREFIX},
522 install_requires=REQUIRED,
523 # https://stackoverflow.com/questions/28509965/setuptools-development-requirements
524 # Install dev requirements with: pip install -e .[dev]
525 extras_require={
526 "dev": DEV_REQUIRED,
527 "ci": CI_REQUIRED,
528 "gcp": GCP_REQUIRED,
529 "aws": AWS_REQUIRED,
530 "bytewax": BYTEWAX_REQUIRED,
531 "redis": REDIS_REQUIRED,
532 "snowflake": SNOWFLAKE_REQUIRED,
533 "spark": SPARK_REQUIRED,
534 "trino": TRINO_REQUIRED,
535 "postgres": POSTGRES_REQUIRED,
536 "azure": AZURE_REQUIRED,
537 "mysql": MYSQL_REQUIRED,
538 "ge": GE_REQUIRED,
539 "hbase": HBASE_REQUIRED,
540 "go": GO_REQUIRED,
541 "docs": DOCS_REQUIRED,
542 "cassandra": CASSANDRA_REQUIRED,
543 },
544 include_package_data=True,
545 license="Apache",
546 classifiers=[
547 # Trove classifiers
548 # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
549 "License :: OSI Approved :: Apache Software License",
550 "Programming Language :: Python",
551 "Programming Language :: Python :: 3",
552 "Programming Language :: Python :: 3.7",
553 ],
554 entry_points={"console_scripts": ["feast=feast.cli:cli"]},
555 use_scm_version=use_scm_version,
556 setup_requires=[
557 "setuptools_scm",
558 "grpcio>=1.47.0",
559 "grpcio-tools>=1.47.0",
560 "mypy-protobuf==3.1",
561 "pybindgen==0.22.0",
562 ],
563 cmdclass={
564 "build_python_protos": BuildPythonProtosCommand,
565 "build_go_protos": BuildGoProtosCommand,
566 "build_py": BuildCommand,
567 "develop": DevelopCommand,
568 "build_ext": build_ext,
569 },
570 ext_modules=[
571 Extension(
572 "feast.embedded_go.lib._embedded",
573 ["github.com/feast-dev/feast/go/embedded"],
574 )
575 ],
576 )
577
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -127,7 +127,7 @@
"cassandra-driver>=3.24.0,<4",
]
-GE_REQUIRED = ["great_expectations>=0.14.0,<0.15.0"]
+GE_REQUIRED = ["great_expectations>=0.15.41,<0.16.0"]
GO_REQUIRED = [
"cffi~=1.15.0",
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -127,7 +127,7 @@\n \"cassandra-driver>=3.24.0,<4\",\n ]\n \n-GE_REQUIRED = [\"great_expectations>=0.14.0,<0.15.0\"]\n+GE_REQUIRED = [\"great_expectations>=0.15.41,<0.16.0\"]\n \n GO_REQUIRED = [\n \"cffi~=1.15.0\",\n", "issue": "Greate Expectations version a bit outdated\n## Expected Behavior \r\nGX version works well with >=0.15.41\r\n\r\n## Current Behavior\r\nIt got pinned between 0.14 and 0.15\r\n\r\n\r\n### Specifications\r\n\r\n- Version: 0.29.0\r\n\r\n## Possible Solution\r\nBump GX version\r\n\n", "before_files": [{"content": "# Copyright 2019 The Feast Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport copy\nimport glob\nimport json\nimport os\nimport pathlib\nimport re\nimport shutil\nimport subprocess\nimport sys\nfrom distutils.cmd import Command\nfrom distutils.dir_util import copy_tree\nfrom pathlib import Path\nfrom subprocess import CalledProcessError\n\nfrom setuptools import Extension, find_packages\n\ntry:\n from setuptools import setup\n from setuptools.command.build_ext import build_ext as _build_ext\n from setuptools.command.build_py import build_py\n from setuptools.command.develop import develop\n from setuptools.command.install import install\n\nexcept ImportError:\n from distutils.command.build_ext import build_ext as _build_ext\n from distutils.command.build_py import build_py\n from distutils.core import setup\n\nNAME = \"feast\"\nDESCRIPTION = \"Python SDK for Feast\"\nURL = \"https://github.com/feast-dev/feast\"\nAUTHOR = \"Feast\"\nREQUIRES_PYTHON = \">=3.8.0\"\n\nREQUIRED = [\n \"click>=7.0.0,<9.0.0\",\n \"colorama>=0.3.9,<1\",\n \"dill~=0.3.0\",\n \"fastavro>=1.1.0,<2\",\n \"grpcio>=1.47.0,<2\",\n \"grpcio-reflection>=1.47.0,<2\",\n \"Jinja2>=2,<4\",\n \"jsonschema\",\n \"mmh3\",\n \"numpy>=1.22,<3\",\n \"pandas>=1.4.3,<2\",\n \"pandavro~=1.5.0\", # For some reason pandavro higher than 1.5.* only support pandas less than 1.3.\n \"protobuf<5,>3.20\",\n \"proto-plus>=1.20.0,<2\",\n \"pyarrow>=4,<9\",\n \"pydantic>=1,<2\",\n \"pygments>=2.12.0,<3\",\n \"PyYAML>=5.4.0,<7\",\n \"requests\",\n \"SQLAlchemy[mypy]>1,<2\",\n \"tabulate>=0.8.0,<1\",\n \"tenacity>=7,<9\",\n \"toml>=0.10.0,<1\",\n \"tqdm>=4,<5\",\n \"typeguard\",\n \"fastapi>=0.68.0,<1\",\n \"uvicorn[standard]>=0.14.0,<1\",\n \"dask>=2021.1.0\",\n \"bowler\", # Needed for automatic repo upgrades\n \"httpx>=0.23.3\", # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656).\n]\n\nGCP_REQUIRED = [\n \"google-api-core>=1.23.0,<3\",\n \"googleapis-common-protos>=1.52.0,<2\",\n \"google-cloud-bigquery[pandas]>=2,<4\",\n \"google-cloud-bigquery-storage >= 2.0.0,<3\",\n \"google-cloud-datastore>=2.1.0,<3\",\n \"google-cloud-storage>=1.34.0,<3\",\n \"google-cloud-bigtable>=2.11.0,<3\",\n]\n\nREDIS_REQUIRED = [\n \"redis==4.2.2\",\n \"hiredis>=2.0.0,<3\",\n]\n\nAWS_REQUIRED = [\"boto3>=1.17.0,<=1.20.23\", \"docker>=5.0.2\", \"s3fs>=0.4.0,<=2022.01.0\"]\n\nBYTEWAX_REQUIRED = [\"bytewax==0.13.1\", \"docker>=5.0.2\", \"kubernetes<=20.13.0\"]\n\nSNOWFLAKE_REQUIRED = [\n \"snowflake-connector-python[pandas]>=2.7.3,<3\",\n # `pyOpenSSL==22.1.0` requires `cryptography<39,>=38.0.0`, which is incompatible\n # with `snowflake-connector-python[pandas]==2.8.0`, which depends on\n # `cryptography<37.0.0,>=3.1.0`.\n \"pyOpenSSL<22.1.0\",\n]\n\nSPARK_REQUIRED = [\n \"pyspark>=3.0.0,<4\",\n]\n\nTRINO_REQUIRED = [\n \"trino>=0.305.0,<0.400.0\", \"regex\"\n]\n\nPOSTGRES_REQUIRED = [\n \"psycopg2-binary>=2.8.3,<3\",\n]\n\nMYSQL_REQUIRED = [\"mysqlclient\", \"pymysql\", \"types-PyMySQL\"]\n\nHBASE_REQUIRED = [\n \"happybase>=1.2.0,<3\",\n]\n\nCASSANDRA_REQUIRED = [\n \"cassandra-driver>=3.24.0,<4\",\n]\n\nGE_REQUIRED = [\"great_expectations>=0.14.0,<0.15.0\"]\n\nGO_REQUIRED = [\n \"cffi~=1.15.0\",\n]\n\nAZURE_REQUIRED = [\n \"azure-storage-blob>=0.37.0\",\n \"azure-identity>=1.6.1\",\n \"SQLAlchemy>=1.4.19\",\n \"pyodbc>=4.0.30\",\n \"pymssql\",\n]\n\nROCKSET_REQUIRED = [\n \"rockset>=1.0.3\",\n]\n\nCI_REQUIRED = (\n [\n \"build\",\n \"cryptography>=35.0,<36\",\n \"flake8\",\n \"black>=22.6.0,<23\",\n \"isort>=5,<6\",\n \"grpcio-tools>=1.47.0\",\n \"grpcio-testing>=1.47.0\",\n \"minio==7.1.0\",\n \"mock==2.0.0\",\n \"moto<4\",\n \"mypy>=0.981,<0.990\",\n \"mypy-protobuf==3.1\",\n \"avro==1.10.0\",\n \"gcsfs>=0.4.0,<=2022.01.0\",\n \"urllib3>=1.25.4,<2\",\n \"psutil==5.9.0\",\n \"py>=1.11.0\", # https://github.com/pytest-dev/pytest/issues/10420\n \"pytest>=6.0.0,<8\",\n \"pytest-cov\",\n \"pytest-xdist\",\n \"pytest-benchmark>=3.4.1,<4\",\n \"pytest-lazy-fixture==0.6.3\",\n \"pytest-timeout==1.4.2\",\n \"pytest-ordering~=0.6.0\",\n \"pytest-mock==1.10.4\",\n \"Sphinx>4.0.0,<7\",\n \"testcontainers>=3.5,<4\",\n \"adlfs==0.5.9\",\n \"firebase-admin>=5.2.0,<6\",\n \"pre-commit\",\n \"assertpy==1.1\",\n \"pip-tools\",\n \"pybindgen\",\n \"types-protobuf~=3.19.22\",\n \"types-python-dateutil\",\n \"types-pytz\",\n \"types-PyYAML\",\n \"types-redis\",\n \"types-requests\",\n \"types-setuptools\",\n \"types-tabulate\",\n ]\n + GCP_REQUIRED\n + REDIS_REQUIRED\n + AWS_REQUIRED\n + BYTEWAX_REQUIRED\n + SNOWFLAKE_REQUIRED\n + SPARK_REQUIRED\n + POSTGRES_REQUIRED\n + MYSQL_REQUIRED\n + TRINO_REQUIRED\n + GE_REQUIRED\n + HBASE_REQUIRED\n + CASSANDRA_REQUIRED\n + AZURE_REQUIRED\n + ROCKSET_REQUIRED\n)\n\n\n# rtd builds fail because of mysql not being installed in their environment.\n# We can add mysql there, but it's not strictly needed. This will be faster for builds.\nDOCS_REQUIRED = CI_REQUIRED.copy()\nfor _r in MYSQL_REQUIRED:\n DOCS_REQUIRED.remove(_r)\n\nDEV_REQUIRED = [\"mypy-protobuf==3.1\", \"grpcio-testing~=1.0\"] + CI_REQUIRED\n\n# Get git repo root directory\nrepo_root = str(pathlib.Path(__file__).resolve().parent)\n\n# README file from Feast repo root directory\nREADME_FILE = os.path.join(repo_root, \"README.md\")\nwith open(README_FILE, \"r\", encoding=\"utf8\") as f:\n LONG_DESCRIPTION = f.read()\n\n# Add Support for parsing tags that have a prefix containing '/' (ie 'sdk/go') to setuptools_scm.\n# Regex modified from default tag regex in:\n# https://github.com/pypa/setuptools_scm/blob/2a1b46d38fb2b8aeac09853e660bcd0d7c1bc7be/src/setuptools_scm/config.py#L9\nTAG_REGEX = re.compile(\n r\"^(?:[\\/\\w-]+)?(?P<version>[vV]?\\d+(?:\\.\\d+){0,2}[^\\+]*)(?:\\+.*)?$\"\n)\n\n# Only set use_scm_version if git executable exists (setting this variable causes pip to use git under the hood)\nif shutil.which(\"git\"):\n use_scm_version = {\"root\": \".\", \"relative_to\": __file__, \"tag_regex\": TAG_REGEX}\nelse:\n use_scm_version = None\n\nPROTO_SUBDIRS = [\"core\", \"serving\", \"types\", \"storage\"]\nPYTHON_CODE_PREFIX = \"sdk/python\"\n\n\nclass BuildPythonProtosCommand(Command):\n description = \"Builds the proto files into Python files.\"\n user_options = [\n (\"inplace\", \"i\", \"Write generated proto files to source directory.\"),\n ]\n\n def initialize_options(self):\n self.python_protoc = [\n sys.executable,\n \"-m\",\n \"grpc_tools.protoc\",\n ] # find_executable(\"protoc\")\n self.proto_folder = os.path.join(repo_root, \"protos\")\n self.sub_folders = PROTO_SUBDIRS\n self.build_lib = None\n self.inplace = 0\n\n def finalize_options(self):\n self.set_undefined_options(\"build\", (\"build_lib\", \"build_lib\"))\n\n @property\n def python_folder(self):\n if self.inplace:\n return os.path.join(\n os.path.dirname(__file__) or os.getcwd(), \"sdk/python/feast/protos\"\n )\n\n return os.path.join(self.build_lib, \"feast/protos\")\n\n def _generate_python_protos(self, path: str):\n proto_files = glob.glob(os.path.join(self.proto_folder, path))\n Path(self.python_folder).mkdir(parents=True, exist_ok=True)\n subprocess.check_call(\n self.python_protoc\n + [\n \"-I\",\n self.proto_folder,\n \"--python_out\",\n self.python_folder,\n \"--grpc_python_out\",\n self.python_folder,\n \"--mypy_out\",\n self.python_folder,\n ]\n + proto_files\n )\n\n def run(self):\n for sub_folder in self.sub_folders:\n self._generate_python_protos(f\"feast/{sub_folder}/*.proto\")\n # We need the __init__ files for each of the generated subdirs\n # so that they are regular packages, and don't need the `--namespace-packages` flags\n # when being typechecked using mypy.\n with open(f\"{self.python_folder}/feast/{sub_folder}/__init__.py\", \"w\"):\n pass\n\n with open(f\"{self.python_folder}/__init__.py\", \"w\"):\n pass\n with open(f\"{self.python_folder}/feast/__init__.py\", \"w\"):\n pass\n\n for path in Path(self.python_folder).rglob(\"*.py\"):\n for folder in self.sub_folders:\n # Read in the file\n with open(path, \"r\") as file:\n filedata = file.read()\n\n # Replace the target string\n filedata = filedata.replace(\n f\"from feast.{folder}\", f\"from feast.protos.feast.{folder}\"\n )\n\n # Write the file out again\n with open(path, \"w\") as file:\n file.write(filedata)\n\n\ndef _generate_path_with_gopath():\n go_path = subprocess.check_output([\"go\", \"env\", \"GOPATH\"]).decode(\"utf-8\")\n go_path = go_path.strip()\n path_val = os.getenv(\"PATH\")\n path_val = f\"{path_val}:{go_path}/bin\"\n\n return path_val\n\n\ndef _ensure_go_and_proto_toolchain():\n try:\n version = subprocess.check_output([\"go\", \"version\"])\n except Exception as e:\n raise RuntimeError(\"Unable to find go toolchain\") from e\n\n semver_string = re.search(r\"go[\\S]+\", str(version)).group().lstrip(\"go\")\n parts = semver_string.split(\".\")\n if not (int(parts[0]) >= 1 and int(parts[1]) >= 16):\n raise RuntimeError(f\"Go compiler too old; expected 1.16+ found {semver_string}\")\n\n path_val = _generate_path_with_gopath()\n\n try:\n subprocess.check_call([\"protoc-gen-go\", \"--version\"], env={\"PATH\": path_val})\n subprocess.check_call(\n [\"protoc-gen-go-grpc\", \"--version\"], env={\"PATH\": path_val}\n )\n except Exception as e:\n raise RuntimeError(\"Unable to find go/grpc extensions for protoc\") from e\n\n\nclass BuildGoProtosCommand(Command):\n description = \"Builds the proto files into Go files.\"\n user_options = []\n\n def initialize_options(self):\n self.go_protoc = [\n sys.executable,\n \"-m\",\n \"grpc_tools.protoc\",\n ] # find_executable(\"protoc\")\n self.proto_folder = os.path.join(repo_root, \"protos\")\n self.go_folder = os.path.join(repo_root, \"go/protos\")\n self.sub_folders = PROTO_SUBDIRS\n self.path_val = _generate_path_with_gopath()\n\n def finalize_options(self):\n pass\n\n def _generate_go_protos(self, path: str):\n proto_files = glob.glob(os.path.join(self.proto_folder, path))\n\n try:\n subprocess.check_call(\n self.go_protoc\n + [\n \"-I\",\n self.proto_folder,\n \"--go_out\",\n self.go_folder,\n \"--go_opt=module=github.com/feast-dev/feast/go/protos\",\n \"--go-grpc_out\",\n self.go_folder,\n \"--go-grpc_opt=module=github.com/feast-dev/feast/go/protos\",\n ]\n + proto_files,\n env={\"PATH\": self.path_val},\n )\n except CalledProcessError as e:\n print(f\"Stderr: {e.stderr}\")\n print(f\"Stdout: {e.stdout}\")\n\n def run(self):\n go_dir = Path(repo_root) / \"go\" / \"protos\"\n go_dir.mkdir(exist_ok=True)\n for sub_folder in self.sub_folders:\n self._generate_go_protos(f\"feast/{sub_folder}/*.proto\")\n\n\nclass BuildCommand(build_py):\n \"\"\"Custom build command.\"\"\"\n\n def run(self):\n self.run_command(\"build_python_protos\")\n if os.getenv(\"COMPILE_GO\", \"false\").lower() == \"true\":\n _ensure_go_and_proto_toolchain()\n self.run_command(\"build_go_protos\")\n\n self.run_command(\"build_ext\")\n build_py.run(self)\n\n\nclass DevelopCommand(develop):\n \"\"\"Custom develop command.\"\"\"\n\n def run(self):\n self.reinitialize_command(\"build_python_protos\", inplace=1)\n self.run_command(\"build_python_protos\")\n if os.getenv(\"COMPILE_GO\", \"false\").lower() == \"true\":\n _ensure_go_and_proto_toolchain()\n self.run_command(\"build_go_protos\")\n\n develop.run(self)\n\n\nclass build_ext(_build_ext):\n def finalize_options(self) -> None:\n super().finalize_options()\n if os.getenv(\"COMPILE_GO\", \"false\").lower() == \"false\":\n self.extensions = [e for e in self.extensions if not self._is_go_ext(e)]\n\n def _is_go_ext(self, ext: Extension):\n return any(\n source.endswith(\".go\") or source.startswith(\"github\")\n for source in ext.sources\n )\n\n def build_extension(self, ext: Extension):\n print(f\"Building extension {ext}\")\n if not self._is_go_ext(ext):\n # the base class may mutate `self.compiler`\n compiler = copy.deepcopy(self.compiler)\n self.compiler, compiler = compiler, self.compiler\n try:\n return _build_ext.build_extension(self, ext)\n finally:\n self.compiler, compiler = compiler, self.compiler\n\n bin_path = _generate_path_with_gopath()\n go_env = json.loads(\n subprocess.check_output([\"go\", \"env\", \"-json\"]).decode(\"utf-8\").strip()\n )\n\n print(f\"Go env: {go_env}\")\n print(f\"CWD: {os.getcwd()}\")\n\n destination = os.path.dirname(os.path.abspath(self.get_ext_fullpath(ext.name)))\n subprocess.check_call(\n [\"go\", \"install\", \"golang.org/x/tools/cmd/goimports\"],\n env={\"PATH\": bin_path, **go_env},\n )\n subprocess.check_call(\n [\"go\", \"get\", \"github.com/go-python/[email protected]\"],\n env={\"PATH\": bin_path, **go_env},\n )\n subprocess.check_call(\n [\"go\", \"install\", \"github.com/go-python/gopy\"],\n env={\"PATH\": bin_path, **go_env},\n )\n subprocess.check_call(\n [\n \"gopy\",\n \"build\",\n \"-output\",\n destination,\n \"-vm\",\n sys.executable,\n \"--build-tags\",\n \"cgo,ccalloc\",\n \"--dynamic-link=True\",\n \"-no-make\",\n *ext.sources,\n ],\n env={\n \"PATH\": bin_path,\n \"CGO_LDFLAGS_ALLOW\": \".*\",\n **go_env,\n },\n )\n\n def copy_extensions_to_source(self):\n build_py = self.get_finalized_command(\"build_py\")\n for ext in self.extensions:\n fullname = self.get_ext_fullname(ext.name)\n modpath = fullname.split(\".\")\n package = \".\".join(modpath[:-1])\n package_dir = build_py.get_package_dir(package)\n\n src_dir = dest_dir = package_dir\n\n if src_dir.startswith(PYTHON_CODE_PREFIX):\n src_dir = package_dir[len(PYTHON_CODE_PREFIX) :]\n src_dir = src_dir.lstrip(\"/\")\n\n src_dir = os.path.join(self.build_lib, src_dir)\n\n # copy whole directory\n print(f\"Copying from {src_dir} to {dest_dir}\")\n copy_tree(src_dir, dest_dir)\n\n\nsetup(\n name=NAME,\n author=AUTHOR,\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n long_description_content_type=\"text/markdown\",\n python_requires=REQUIRES_PYTHON,\n url=URL,\n packages=find_packages(\n where=PYTHON_CODE_PREFIX, exclude=(\"java\", \"infra\", \"sdk/python/tests\", \"ui\")\n ),\n package_dir={\"\": PYTHON_CODE_PREFIX},\n install_requires=REQUIRED,\n # https://stackoverflow.com/questions/28509965/setuptools-development-requirements\n # Install dev requirements with: pip install -e .[dev]\n extras_require={\n \"dev\": DEV_REQUIRED,\n \"ci\": CI_REQUIRED,\n \"gcp\": GCP_REQUIRED,\n \"aws\": AWS_REQUIRED,\n \"bytewax\": BYTEWAX_REQUIRED,\n \"redis\": REDIS_REQUIRED,\n \"snowflake\": SNOWFLAKE_REQUIRED,\n \"spark\": SPARK_REQUIRED,\n \"trino\": TRINO_REQUIRED,\n \"postgres\": POSTGRES_REQUIRED,\n \"azure\": AZURE_REQUIRED,\n \"mysql\": MYSQL_REQUIRED,\n \"ge\": GE_REQUIRED,\n \"hbase\": HBASE_REQUIRED,\n \"go\": GO_REQUIRED,\n \"docs\": DOCS_REQUIRED,\n \"cassandra\": CASSANDRA_REQUIRED,\n },\n include_package_data=True,\n license=\"Apache\",\n classifiers=[\n # Trove classifiers\n # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n ],\n entry_points={\"console_scripts\": [\"feast=feast.cli:cli\"]},\n use_scm_version=use_scm_version,\n setup_requires=[\n \"setuptools_scm\",\n \"grpcio>=1.47.0\",\n \"grpcio-tools>=1.47.0\",\n \"mypy-protobuf==3.1\",\n \"pybindgen==0.22.0\",\n ],\n cmdclass={\n \"build_python_protos\": BuildPythonProtosCommand,\n \"build_go_protos\": BuildGoProtosCommand,\n \"build_py\": BuildCommand,\n \"develop\": DevelopCommand,\n \"build_ext\": build_ext,\n },\n ext_modules=[\n Extension(\n \"feast.embedded_go.lib._embedded\",\n [\"github.com/feast-dev/feast/go/embedded\"],\n )\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright 2019 The Feast Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nimport copy\nimport glob\nimport json\nimport os\nimport pathlib\nimport re\nimport shutil\nimport subprocess\nimport sys\nfrom distutils.cmd import Command\nfrom distutils.dir_util import copy_tree\nfrom pathlib import Path\nfrom subprocess import CalledProcessError\n\nfrom setuptools import Extension, find_packages\n\ntry:\n from setuptools import setup\n from setuptools.command.build_ext import build_ext as _build_ext\n from setuptools.command.build_py import build_py\n from setuptools.command.develop import develop\n from setuptools.command.install import install\n\nexcept ImportError:\n from distutils.command.build_ext import build_ext as _build_ext\n from distutils.command.build_py import build_py\n from distutils.core import setup\n\nNAME = \"feast\"\nDESCRIPTION = \"Python SDK for Feast\"\nURL = \"https://github.com/feast-dev/feast\"\nAUTHOR = \"Feast\"\nREQUIRES_PYTHON = \">=3.8.0\"\n\nREQUIRED = [\n \"click>=7.0.0,<9.0.0\",\n \"colorama>=0.3.9,<1\",\n \"dill~=0.3.0\",\n \"fastavro>=1.1.0,<2\",\n \"grpcio>=1.47.0,<2\",\n \"grpcio-reflection>=1.47.0,<2\",\n \"Jinja2>=2,<4\",\n \"jsonschema\",\n \"mmh3\",\n \"numpy>=1.22,<3\",\n \"pandas>=1.4.3,<2\",\n \"pandavro~=1.5.0\", # For some reason pandavro higher than 1.5.* only support pandas less than 1.3.\n \"protobuf<5,>3.20\",\n \"proto-plus>=1.20.0,<2\",\n \"pyarrow>=4,<9\",\n \"pydantic>=1,<2\",\n \"pygments>=2.12.0,<3\",\n \"PyYAML>=5.4.0,<7\",\n \"requests\",\n \"SQLAlchemy[mypy]>1,<2\",\n \"tabulate>=0.8.0,<1\",\n \"tenacity>=7,<9\",\n \"toml>=0.10.0,<1\",\n \"tqdm>=4,<5\",\n \"typeguard\",\n \"fastapi>=0.68.0,<1\",\n \"uvicorn[standard]>=0.14.0,<1\",\n \"dask>=2021.1.0\",\n \"bowler\", # Needed for automatic repo upgrades\n \"httpx>=0.23.3\", # FastAPI does not correctly pull starlette dependency on httpx see thread(https://github.com/tiangolo/fastapi/issues/5656).\n]\n\nGCP_REQUIRED = [\n \"google-api-core>=1.23.0,<3\",\n \"googleapis-common-protos>=1.52.0,<2\",\n \"google-cloud-bigquery[pandas]>=2,<4\",\n \"google-cloud-bigquery-storage >= 2.0.0,<3\",\n \"google-cloud-datastore>=2.1.0,<3\",\n \"google-cloud-storage>=1.34.0,<3\",\n \"google-cloud-bigtable>=2.11.0,<3\",\n]\n\nREDIS_REQUIRED = [\n \"redis==4.2.2\",\n \"hiredis>=2.0.0,<3\",\n]\n\nAWS_REQUIRED = [\"boto3>=1.17.0,<=1.20.23\", \"docker>=5.0.2\", \"s3fs>=0.4.0,<=2022.01.0\"]\n\nBYTEWAX_REQUIRED = [\"bytewax==0.13.1\", \"docker>=5.0.2\", \"kubernetes<=20.13.0\"]\n\nSNOWFLAKE_REQUIRED = [\n \"snowflake-connector-python[pandas]>=2.7.3,<3\",\n # `pyOpenSSL==22.1.0` requires `cryptography<39,>=38.0.0`, which is incompatible\n # with `snowflake-connector-python[pandas]==2.8.0`, which depends on\n # `cryptography<37.0.0,>=3.1.0`.\n \"pyOpenSSL<22.1.0\",\n]\n\nSPARK_REQUIRED = [\n \"pyspark>=3.0.0,<4\",\n]\n\nTRINO_REQUIRED = [\n \"trino>=0.305.0,<0.400.0\", \"regex\"\n]\n\nPOSTGRES_REQUIRED = [\n \"psycopg2-binary>=2.8.3,<3\",\n]\n\nMYSQL_REQUIRED = [\"mysqlclient\", \"pymysql\", \"types-PyMySQL\"]\n\nHBASE_REQUIRED = [\n \"happybase>=1.2.0,<3\",\n]\n\nCASSANDRA_REQUIRED = [\n \"cassandra-driver>=3.24.0,<4\",\n]\n\nGE_REQUIRED = [\"great_expectations>=0.15.41,<0.16.0\"]\n\nGO_REQUIRED = [\n \"cffi~=1.15.0\",\n]\n\nAZURE_REQUIRED = [\n \"azure-storage-blob>=0.37.0\",\n \"azure-identity>=1.6.1\",\n \"SQLAlchemy>=1.4.19\",\n \"pyodbc>=4.0.30\",\n \"pymssql\",\n]\n\nROCKSET_REQUIRED = [\n \"rockset>=1.0.3\",\n]\n\nCI_REQUIRED = (\n [\n \"build\",\n \"cryptography>=35.0,<36\",\n \"flake8\",\n \"black>=22.6.0,<23\",\n \"isort>=5,<6\",\n \"grpcio-tools>=1.47.0\",\n \"grpcio-testing>=1.47.0\",\n \"minio==7.1.0\",\n \"mock==2.0.0\",\n \"moto<4\",\n \"mypy>=0.981,<0.990\",\n \"mypy-protobuf==3.1\",\n \"avro==1.10.0\",\n \"gcsfs>=0.4.0,<=2022.01.0\",\n \"urllib3>=1.25.4,<2\",\n \"psutil==5.9.0\",\n \"py>=1.11.0\", # https://github.com/pytest-dev/pytest/issues/10420\n \"pytest>=6.0.0,<8\",\n \"pytest-cov\",\n \"pytest-xdist\",\n \"pytest-benchmark>=3.4.1,<4\",\n \"pytest-lazy-fixture==0.6.3\",\n \"pytest-timeout==1.4.2\",\n \"pytest-ordering~=0.6.0\",\n \"pytest-mock==1.10.4\",\n \"Sphinx>4.0.0,<7\",\n \"testcontainers>=3.5,<4\",\n \"adlfs==0.5.9\",\n \"firebase-admin>=5.2.0,<6\",\n \"pre-commit\",\n \"assertpy==1.1\",\n \"pip-tools\",\n \"pybindgen\",\n \"types-protobuf~=3.19.22\",\n \"types-python-dateutil\",\n \"types-pytz\",\n \"types-PyYAML\",\n \"types-redis\",\n \"types-requests\",\n \"types-setuptools\",\n \"types-tabulate\",\n ]\n + GCP_REQUIRED\n + REDIS_REQUIRED\n + AWS_REQUIRED\n + BYTEWAX_REQUIRED\n + SNOWFLAKE_REQUIRED\n + SPARK_REQUIRED\n + POSTGRES_REQUIRED\n + MYSQL_REQUIRED\n + TRINO_REQUIRED\n + GE_REQUIRED\n + HBASE_REQUIRED\n + CASSANDRA_REQUIRED\n + AZURE_REQUIRED\n + ROCKSET_REQUIRED\n)\n\n\n# rtd builds fail because of mysql not being installed in their environment.\n# We can add mysql there, but it's not strictly needed. This will be faster for builds.\nDOCS_REQUIRED = CI_REQUIRED.copy()\nfor _r in MYSQL_REQUIRED:\n DOCS_REQUIRED.remove(_r)\n\nDEV_REQUIRED = [\"mypy-protobuf==3.1\", \"grpcio-testing~=1.0\"] + CI_REQUIRED\n\n# Get git repo root directory\nrepo_root = str(pathlib.Path(__file__).resolve().parent)\n\n# README file from Feast repo root directory\nREADME_FILE = os.path.join(repo_root, \"README.md\")\nwith open(README_FILE, \"r\", encoding=\"utf8\") as f:\n LONG_DESCRIPTION = f.read()\n\n# Add Support for parsing tags that have a prefix containing '/' (ie 'sdk/go') to setuptools_scm.\n# Regex modified from default tag regex in:\n# https://github.com/pypa/setuptools_scm/blob/2a1b46d38fb2b8aeac09853e660bcd0d7c1bc7be/src/setuptools_scm/config.py#L9\nTAG_REGEX = re.compile(\n r\"^(?:[\\/\\w-]+)?(?P<version>[vV]?\\d+(?:\\.\\d+){0,2}[^\\+]*)(?:\\+.*)?$\"\n)\n\n# Only set use_scm_version if git executable exists (setting this variable causes pip to use git under the hood)\nif shutil.which(\"git\"):\n use_scm_version = {\"root\": \".\", \"relative_to\": __file__, \"tag_regex\": TAG_REGEX}\nelse:\n use_scm_version = None\n\nPROTO_SUBDIRS = [\"core\", \"serving\", \"types\", \"storage\"]\nPYTHON_CODE_PREFIX = \"sdk/python\"\n\n\nclass BuildPythonProtosCommand(Command):\n description = \"Builds the proto files into Python files.\"\n user_options = [\n (\"inplace\", \"i\", \"Write generated proto files to source directory.\"),\n ]\n\n def initialize_options(self):\n self.python_protoc = [\n sys.executable,\n \"-m\",\n \"grpc_tools.protoc\",\n ] # find_executable(\"protoc\")\n self.proto_folder = os.path.join(repo_root, \"protos\")\n self.sub_folders = PROTO_SUBDIRS\n self.build_lib = None\n self.inplace = 0\n\n def finalize_options(self):\n self.set_undefined_options(\"build\", (\"build_lib\", \"build_lib\"))\n\n @property\n def python_folder(self):\n if self.inplace:\n return os.path.join(\n os.path.dirname(__file__) or os.getcwd(), \"sdk/python/feast/protos\"\n )\n\n return os.path.join(self.build_lib, \"feast/protos\")\n\n def _generate_python_protos(self, path: str):\n proto_files = glob.glob(os.path.join(self.proto_folder, path))\n Path(self.python_folder).mkdir(parents=True, exist_ok=True)\n subprocess.check_call(\n self.python_protoc\n + [\n \"-I\",\n self.proto_folder,\n \"--python_out\",\n self.python_folder,\n \"--grpc_python_out\",\n self.python_folder,\n \"--mypy_out\",\n self.python_folder,\n ]\n + proto_files\n )\n\n def run(self):\n for sub_folder in self.sub_folders:\n self._generate_python_protos(f\"feast/{sub_folder}/*.proto\")\n # We need the __init__ files for each of the generated subdirs\n # so that they are regular packages, and don't need the `--namespace-packages` flags\n # when being typechecked using mypy.\n with open(f\"{self.python_folder}/feast/{sub_folder}/__init__.py\", \"w\"):\n pass\n\n with open(f\"{self.python_folder}/__init__.py\", \"w\"):\n pass\n with open(f\"{self.python_folder}/feast/__init__.py\", \"w\"):\n pass\n\n for path in Path(self.python_folder).rglob(\"*.py\"):\n for folder in self.sub_folders:\n # Read in the file\n with open(path, \"r\") as file:\n filedata = file.read()\n\n # Replace the target string\n filedata = filedata.replace(\n f\"from feast.{folder}\", f\"from feast.protos.feast.{folder}\"\n )\n\n # Write the file out again\n with open(path, \"w\") as file:\n file.write(filedata)\n\n\ndef _generate_path_with_gopath():\n go_path = subprocess.check_output([\"go\", \"env\", \"GOPATH\"]).decode(\"utf-8\")\n go_path = go_path.strip()\n path_val = os.getenv(\"PATH\")\n path_val = f\"{path_val}:{go_path}/bin\"\n\n return path_val\n\n\ndef _ensure_go_and_proto_toolchain():\n try:\n version = subprocess.check_output([\"go\", \"version\"])\n except Exception as e:\n raise RuntimeError(\"Unable to find go toolchain\") from e\n\n semver_string = re.search(r\"go[\\S]+\", str(version)).group().lstrip(\"go\")\n parts = semver_string.split(\".\")\n if not (int(parts[0]) >= 1 and int(parts[1]) >= 16):\n raise RuntimeError(f\"Go compiler too old; expected 1.16+ found {semver_string}\")\n\n path_val = _generate_path_with_gopath()\n\n try:\n subprocess.check_call([\"protoc-gen-go\", \"--version\"], env={\"PATH\": path_val})\n subprocess.check_call(\n [\"protoc-gen-go-grpc\", \"--version\"], env={\"PATH\": path_val}\n )\n except Exception as e:\n raise RuntimeError(\"Unable to find go/grpc extensions for protoc\") from e\n\n\nclass BuildGoProtosCommand(Command):\n description = \"Builds the proto files into Go files.\"\n user_options = []\n\n def initialize_options(self):\n self.go_protoc = [\n sys.executable,\n \"-m\",\n \"grpc_tools.protoc\",\n ] # find_executable(\"protoc\")\n self.proto_folder = os.path.join(repo_root, \"protos\")\n self.go_folder = os.path.join(repo_root, \"go/protos\")\n self.sub_folders = PROTO_SUBDIRS\n self.path_val = _generate_path_with_gopath()\n\n def finalize_options(self):\n pass\n\n def _generate_go_protos(self, path: str):\n proto_files = glob.glob(os.path.join(self.proto_folder, path))\n\n try:\n subprocess.check_call(\n self.go_protoc\n + [\n \"-I\",\n self.proto_folder,\n \"--go_out\",\n self.go_folder,\n \"--go_opt=module=github.com/feast-dev/feast/go/protos\",\n \"--go-grpc_out\",\n self.go_folder,\n \"--go-grpc_opt=module=github.com/feast-dev/feast/go/protos\",\n ]\n + proto_files,\n env={\"PATH\": self.path_val},\n )\n except CalledProcessError as e:\n print(f\"Stderr: {e.stderr}\")\n print(f\"Stdout: {e.stdout}\")\n\n def run(self):\n go_dir = Path(repo_root) / \"go\" / \"protos\"\n go_dir.mkdir(exist_ok=True)\n for sub_folder in self.sub_folders:\n self._generate_go_protos(f\"feast/{sub_folder}/*.proto\")\n\n\nclass BuildCommand(build_py):\n \"\"\"Custom build command.\"\"\"\n\n def run(self):\n self.run_command(\"build_python_protos\")\n if os.getenv(\"COMPILE_GO\", \"false\").lower() == \"true\":\n _ensure_go_and_proto_toolchain()\n self.run_command(\"build_go_protos\")\n\n self.run_command(\"build_ext\")\n build_py.run(self)\n\n\nclass DevelopCommand(develop):\n \"\"\"Custom develop command.\"\"\"\n\n def run(self):\n self.reinitialize_command(\"build_python_protos\", inplace=1)\n self.run_command(\"build_python_protos\")\n if os.getenv(\"COMPILE_GO\", \"false\").lower() == \"true\":\n _ensure_go_and_proto_toolchain()\n self.run_command(\"build_go_protos\")\n\n develop.run(self)\n\n\nclass build_ext(_build_ext):\n def finalize_options(self) -> None:\n super().finalize_options()\n if os.getenv(\"COMPILE_GO\", \"false\").lower() == \"false\":\n self.extensions = [e for e in self.extensions if not self._is_go_ext(e)]\n\n def _is_go_ext(self, ext: Extension):\n return any(\n source.endswith(\".go\") or source.startswith(\"github\")\n for source in ext.sources\n )\n\n def build_extension(self, ext: Extension):\n print(f\"Building extension {ext}\")\n if not self._is_go_ext(ext):\n # the base class may mutate `self.compiler`\n compiler = copy.deepcopy(self.compiler)\n self.compiler, compiler = compiler, self.compiler\n try:\n return _build_ext.build_extension(self, ext)\n finally:\n self.compiler, compiler = compiler, self.compiler\n\n bin_path = _generate_path_with_gopath()\n go_env = json.loads(\n subprocess.check_output([\"go\", \"env\", \"-json\"]).decode(\"utf-8\").strip()\n )\n\n print(f\"Go env: {go_env}\")\n print(f\"CWD: {os.getcwd()}\")\n\n destination = os.path.dirname(os.path.abspath(self.get_ext_fullpath(ext.name)))\n subprocess.check_call(\n [\"go\", \"install\", \"golang.org/x/tools/cmd/goimports\"],\n env={\"PATH\": bin_path, **go_env},\n )\n subprocess.check_call(\n [\"go\", \"get\", \"github.com/go-python/[email protected]\"],\n env={\"PATH\": bin_path, **go_env},\n )\n subprocess.check_call(\n [\"go\", \"install\", \"github.com/go-python/gopy\"],\n env={\"PATH\": bin_path, **go_env},\n )\n subprocess.check_call(\n [\n \"gopy\",\n \"build\",\n \"-output\",\n destination,\n \"-vm\",\n sys.executable,\n \"--build-tags\",\n \"cgo,ccalloc\",\n \"--dynamic-link=True\",\n \"-no-make\",\n *ext.sources,\n ],\n env={\n \"PATH\": bin_path,\n \"CGO_LDFLAGS_ALLOW\": \".*\",\n **go_env,\n },\n )\n\n def copy_extensions_to_source(self):\n build_py = self.get_finalized_command(\"build_py\")\n for ext in self.extensions:\n fullname = self.get_ext_fullname(ext.name)\n modpath = fullname.split(\".\")\n package = \".\".join(modpath[:-1])\n package_dir = build_py.get_package_dir(package)\n\n src_dir = dest_dir = package_dir\n\n if src_dir.startswith(PYTHON_CODE_PREFIX):\n src_dir = package_dir[len(PYTHON_CODE_PREFIX) :]\n src_dir = src_dir.lstrip(\"/\")\n\n src_dir = os.path.join(self.build_lib, src_dir)\n\n # copy whole directory\n print(f\"Copying from {src_dir} to {dest_dir}\")\n copy_tree(src_dir, dest_dir)\n\n\nsetup(\n name=NAME,\n author=AUTHOR,\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n long_description_content_type=\"text/markdown\",\n python_requires=REQUIRES_PYTHON,\n url=URL,\n packages=find_packages(\n where=PYTHON_CODE_PREFIX, exclude=(\"java\", \"infra\", \"sdk/python/tests\", \"ui\")\n ),\n package_dir={\"\": PYTHON_CODE_PREFIX},\n install_requires=REQUIRED,\n # https://stackoverflow.com/questions/28509965/setuptools-development-requirements\n # Install dev requirements with: pip install -e .[dev]\n extras_require={\n \"dev\": DEV_REQUIRED,\n \"ci\": CI_REQUIRED,\n \"gcp\": GCP_REQUIRED,\n \"aws\": AWS_REQUIRED,\n \"bytewax\": BYTEWAX_REQUIRED,\n \"redis\": REDIS_REQUIRED,\n \"snowflake\": SNOWFLAKE_REQUIRED,\n \"spark\": SPARK_REQUIRED,\n \"trino\": TRINO_REQUIRED,\n \"postgres\": POSTGRES_REQUIRED,\n \"azure\": AZURE_REQUIRED,\n \"mysql\": MYSQL_REQUIRED,\n \"ge\": GE_REQUIRED,\n \"hbase\": HBASE_REQUIRED,\n \"go\": GO_REQUIRED,\n \"docs\": DOCS_REQUIRED,\n \"cassandra\": CASSANDRA_REQUIRED,\n },\n include_package_data=True,\n license=\"Apache\",\n classifiers=[\n # Trove classifiers\n # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers\n \"License :: OSI Approved :: Apache Software License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.7\",\n ],\n entry_points={\"console_scripts\": [\"feast=feast.cli:cli\"]},\n use_scm_version=use_scm_version,\n setup_requires=[\n \"setuptools_scm\",\n \"grpcio>=1.47.0\",\n \"grpcio-tools>=1.47.0\",\n \"mypy-protobuf==3.1\",\n \"pybindgen==0.22.0\",\n ],\n cmdclass={\n \"build_python_protos\": BuildPythonProtosCommand,\n \"build_go_protos\": BuildGoProtosCommand,\n \"build_py\": BuildCommand,\n \"develop\": DevelopCommand,\n \"build_ext\": build_ext,\n },\n ext_modules=[\n Extension(\n \"feast.embedded_go.lib._embedded\",\n [\"github.com/feast-dev/feast/go/embedded\"],\n )\n ],\n)\n", "path": "setup.py"}]} |
gh_patches_debug_101 | rasdani/github-patches | git_diff | CiviWiki__OpenCiviWiki-1042 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
{FEAT}: Automated testing with actions.
### Idea summary
Usage of GitHub actions.
### Further details
We can use GitHub Actions to check/test the code that is being pushed upstream via PRs and it can be tested before merging automatically (Technically it is Continuous Integration).
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `project/accounts/models.py`
Content:
```
1 from django.contrib.auth.models import AbstractUser
2 import os
3 import io
4 from django.core.files.storage import default_storage
5 from django.conf import settings
6 from django.db import models
7 from PIL import Image, ImageOps
8 from django.core.files.uploadedfile import InMemoryUploadedFile
9
10 from taggit.managers import TaggableManager
11
12 from api.models.category import Category
13 from common.utils import PathAndRename
14
15
16 class User(AbstractUser):
17 """
18 A new custom User model for any functionality needed in the future. Extending AbstractUser
19 allows for adding new fields to the user model as needed.
20 """
21
22 class Meta:
23 db_table = "users"
24
25
26 # Image manipulation constants
27 PROFILE_IMG_SIZE = (171, 171)
28 PROFILE_IMG_THUMB_SIZE = (40, 40)
29 WHITE_BG = (255, 255, 255)
30
31
32 class ProfileManager(models.Manager):
33 def summarize(self, profile):
34 from api.models.civi import Civi
35
36 data = {
37 "username": profile.user.username,
38 "first_name": profile.first_name,
39 "last_name": profile.last_name,
40 "about_me": profile.about_me,
41 "history": [
42 Civi.objects.serialize(c)
43 for c in Civi.objects.filter(author_id=profile.id).order_by("-created")
44 ],
45 "profile_image": profile.profile_image_url,
46 "followers": self.followers(profile),
47 "following": self.following(profile),
48 }
49 return data
50
51 def chip_summarize(self, profile):
52 data = {
53 "username": profile.user.username,
54 "first_name": profile.first_name,
55 "last_name": profile.last_name,
56 "profile_image": profile.profile_image_url,
57 }
58 return data
59
60 def card_summarize(self, profile, request_profile):
61 # Length at which to truncate 'about me' text
62 about_me_truncate_length = 150
63
64 # If 'about me' text is longer than 150 characters... add elipsis (truncate)
65 ellipsis_if_too_long = (
66 "" if len(profile.about_me) <= about_me_truncate_length else "..."
67 )
68
69 data = {
70 "id": profile.user.id,
71 "username": profile.user.username,
72 "first_name": profile.first_name,
73 "last_name": profile.last_name,
74 "about_me": profile.about_me[:about_me_truncate_length] + ellipsis_if_too_long,
75 "profile_image": profile.profile_image_url,
76 "follow_state": True
77 if profile in request_profile.following.all()
78 else False,
79 "request_profile": request_profile.first_name,
80 }
81 return data
82
83 def followers(self, profile):
84 return [self.chip_summarize(follower) for follower in profile.followers.all()]
85
86 def following(self, profile):
87 return [self.chip_summarize(following) for following in profile.following.all()]
88
89
90 profile_upload_path = PathAndRename("")
91
92
93 class Profile(models.Model):
94 user = models.ForeignKey(User, on_delete=models.CASCADE)
95 first_name = models.CharField(max_length=63, blank=False)
96 last_name = models.CharField(max_length=63, blank=False)
97 about_me = models.CharField(max_length=511, blank=True)
98
99 categories = models.ManyToManyField(
100 Category, related_name="user_categories", symmetrical=False
101 )
102 tags = TaggableManager()
103
104 followers = models.ManyToManyField(
105 "self", related_name="follower", symmetrical=False
106 )
107 following = models.ManyToManyField(
108 "self", related_name="followings", symmetrical=False
109 )
110
111 is_verified = models.BooleanField(default=False)
112 full_profile = models.BooleanField(default=False)
113
114 objects = ProfileManager()
115 profile_image = models.ImageField(
116 upload_to=profile_upload_path, blank=True, null=True
117 )
118 profile_image_thumb = models.ImageField(
119 upload_to=profile_upload_path, blank=True, null=True
120 )
121
122 @property
123 def full_name(self):
124 """Returns the person's full name."""
125
126 return f"{self.first_name} {self.last_name}"
127
128 @property
129 def profile_image_url(self):
130 """Return placeholder profile image if user didn't upload one"""
131
132 if self.profile_image:
133 file_exists = default_storage.exists(
134 os.path.join(settings.MEDIA_ROOT, self.profile_image.name)
135 )
136 if file_exists:
137 return self.profile_image.url
138
139 return "/static/img/no_image_md.png"
140
141 @property
142 def profile_image_thumb_url(self):
143 """Return placeholder profile image if user didn't upload one"""
144
145 if self.profile_image_thumb:
146 file_exists = default_storage.exists(
147 os.path.join(settings.MEDIA_ROOT, self.profile_image_thumb.name)
148 )
149 if file_exists:
150 return self.profile_image_thumb.url
151
152 return "/static/img/no_image_md.png"
153
154 def __init__(self, *args, **kwargs):
155 super(Profile, self).__init__(*args, **kwargs)
156
157 def save(self, *args, **kwargs):
158 """ Image crop/resize and thumbnail creation """
159
160 # New Profile image --
161 if self.profile_image:
162 self.resize_profile_image()
163
164 self.full_profile = self.is_full_profile()
165
166 super(Profile, self).save(*args, **kwargs)
167
168 def resize_profile_image(self):
169 """
170 Resizes and crops the user uploaded image and creates a thumbnail version of it
171 """
172 profile_image_field = self.profile_image
173 image_file = io.StringIO(profile_image_field.read())
174 profile_image = Image.open(image_file)
175 profile_image.load()
176
177 # Resize image
178 profile_image = ImageOps.fit(
179 profile_image, PROFILE_IMG_SIZE, Image.ANTIALIAS, centering=(0.5, 0.5)
180 )
181
182 # Convert to JPG image format with white background
183 if profile_image.mode not in ("L", "RGB"):
184 white_bg_img = Image.new("RGB", PROFILE_IMG_SIZE, WHITE_BG)
185 white_bg_img.paste(profile_image, mask=profile_image.split()[3])
186 profile_image = white_bg_img
187
188 # Save new cropped image
189 tmp_image_file = io.StringIO()
190 profile_image.save(tmp_image_file, "JPEG", quality=90)
191 tmp_image_file.seek(0)
192 self.profile_image = InMemoryUploadedFile(
193 tmp_image_file,
194 "ImageField",
195 self.profile_image.name,
196 "image/jpeg",
197 tmp_image_file.len,
198 None,
199 )
200 # Make a Thumbnail Image for the new resized image
201 thumb_image = profile_image.copy()
202 thumb_image.thumbnail(PROFILE_IMG_THUMB_SIZE, resample=Image.ANTIALIAS)
203 tmp_image_file = io.StringIO()
204 thumb_image.save(tmp_image_file, "JPEG", quality=90)
205 tmp_image_file.seek(0)
206 self.profile_image_thumb = InMemoryUploadedFile(
207 tmp_image_file,
208 "ImageField",
209 self.profile_image.name,
210 "image/jpeg",
211 tmp_image_file.len,
212 None,
213 )
214
215 def is_full_profile(self):
216 if self.first_name and self.last_name:
217 return True
218 else:
219 return False
220
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/project/accounts/models.py b/project/accounts/models.py
--- a/project/accounts/models.py
+++ b/project/accounts/models.py
@@ -9,7 +9,7 @@
from taggit.managers import TaggableManager
-from api.models.category import Category
+from api.models import Category
from common.utils import PathAndRename
| {"golden_diff": "diff --git a/project/accounts/models.py b/project/accounts/models.py\n--- a/project/accounts/models.py\n+++ b/project/accounts/models.py\n@@ -9,7 +9,7 @@\n \n from taggit.managers import TaggableManager\n \n-from api.models.category import Category\n+from api.models import Category\n from common.utils import PathAndRename\n", "issue": "{FEAT}: Automated testing with actions.\n### Idea summary\n\nUsage of GitHub actions.\n\n### Further details\n\nWe can use GitHub Actions to check/test the code that is being pushed upstream via PRs and it can be tested before merging automatically (Technically it is Continuous Integration).\n", "before_files": [{"content": "from django.contrib.auth.models import AbstractUser\nimport os\nimport io\nfrom django.core.files.storage import default_storage\nfrom django.conf import settings\nfrom django.db import models\nfrom PIL import Image, ImageOps\nfrom django.core.files.uploadedfile import InMemoryUploadedFile\n\nfrom taggit.managers import TaggableManager\n\nfrom api.models.category import Category\nfrom common.utils import PathAndRename\n\n\nclass User(AbstractUser):\n \"\"\"\n A new custom User model for any functionality needed in the future. Extending AbstractUser\n allows for adding new fields to the user model as needed.\n \"\"\"\n\n class Meta:\n db_table = \"users\"\n\n\n# Image manipulation constants\nPROFILE_IMG_SIZE = (171, 171)\nPROFILE_IMG_THUMB_SIZE = (40, 40)\nWHITE_BG = (255, 255, 255)\n\n\nclass ProfileManager(models.Manager):\n def summarize(self, profile):\n from api.models.civi import Civi\n\n data = {\n \"username\": profile.user.username,\n \"first_name\": profile.first_name,\n \"last_name\": profile.last_name,\n \"about_me\": profile.about_me,\n \"history\": [\n Civi.objects.serialize(c)\n for c in Civi.objects.filter(author_id=profile.id).order_by(\"-created\")\n ],\n \"profile_image\": profile.profile_image_url,\n \"followers\": self.followers(profile),\n \"following\": self.following(profile),\n }\n return data\n\n def chip_summarize(self, profile):\n data = {\n \"username\": profile.user.username,\n \"first_name\": profile.first_name,\n \"last_name\": profile.last_name,\n \"profile_image\": profile.profile_image_url,\n }\n return data\n\n def card_summarize(self, profile, request_profile):\n # Length at which to truncate 'about me' text\n about_me_truncate_length = 150\n\n # If 'about me' text is longer than 150 characters... add elipsis (truncate)\n ellipsis_if_too_long = (\n \"\" if len(profile.about_me) <= about_me_truncate_length else \"...\"\n )\n\n data = {\n \"id\": profile.user.id,\n \"username\": profile.user.username,\n \"first_name\": profile.first_name,\n \"last_name\": profile.last_name,\n \"about_me\": profile.about_me[:about_me_truncate_length] + ellipsis_if_too_long,\n \"profile_image\": profile.profile_image_url,\n \"follow_state\": True\n if profile in request_profile.following.all()\n else False,\n \"request_profile\": request_profile.first_name,\n }\n return data\n\n def followers(self, profile):\n return [self.chip_summarize(follower) for follower in profile.followers.all()]\n\n def following(self, profile):\n return [self.chip_summarize(following) for following in profile.following.all()]\n\n\nprofile_upload_path = PathAndRename(\"\")\n\n\nclass Profile(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n first_name = models.CharField(max_length=63, blank=False)\n last_name = models.CharField(max_length=63, blank=False)\n about_me = models.CharField(max_length=511, blank=True)\n\n categories = models.ManyToManyField(\n Category, related_name=\"user_categories\", symmetrical=False\n )\n tags = TaggableManager()\n\n followers = models.ManyToManyField(\n \"self\", related_name=\"follower\", symmetrical=False\n )\n following = models.ManyToManyField(\n \"self\", related_name=\"followings\", symmetrical=False\n )\n\n is_verified = models.BooleanField(default=False)\n full_profile = models.BooleanField(default=False)\n\n objects = ProfileManager()\n profile_image = models.ImageField(\n upload_to=profile_upload_path, blank=True, null=True\n )\n profile_image_thumb = models.ImageField(\n upload_to=profile_upload_path, blank=True, null=True\n )\n\n @property\n def full_name(self):\n \"\"\"Returns the person's full name.\"\"\"\n\n return f\"{self.first_name} {self.last_name}\"\n\n @property\n def profile_image_url(self):\n \"\"\"Return placeholder profile image if user didn't upload one\"\"\"\n\n if self.profile_image:\n file_exists = default_storage.exists(\n os.path.join(settings.MEDIA_ROOT, self.profile_image.name)\n )\n if file_exists:\n return self.profile_image.url\n\n return \"/static/img/no_image_md.png\"\n\n @property\n def profile_image_thumb_url(self):\n \"\"\"Return placeholder profile image if user didn't upload one\"\"\"\n\n if self.profile_image_thumb:\n file_exists = default_storage.exists(\n os.path.join(settings.MEDIA_ROOT, self.profile_image_thumb.name)\n )\n if file_exists:\n return self.profile_image_thumb.url\n\n return \"/static/img/no_image_md.png\"\n\n def __init__(self, *args, **kwargs):\n super(Profile, self).__init__(*args, **kwargs)\n\n def save(self, *args, **kwargs):\n \"\"\" Image crop/resize and thumbnail creation \"\"\"\n\n # New Profile image --\n if self.profile_image:\n self.resize_profile_image()\n\n self.full_profile = self.is_full_profile()\n\n super(Profile, self).save(*args, **kwargs)\n\n def resize_profile_image(self):\n \"\"\"\n Resizes and crops the user uploaded image and creates a thumbnail version of it\n \"\"\"\n profile_image_field = self.profile_image\n image_file = io.StringIO(profile_image_field.read())\n profile_image = Image.open(image_file)\n profile_image.load()\n\n # Resize image\n profile_image = ImageOps.fit(\n profile_image, PROFILE_IMG_SIZE, Image.ANTIALIAS, centering=(0.5, 0.5)\n )\n\n # Convert to JPG image format with white background\n if profile_image.mode not in (\"L\", \"RGB\"):\n white_bg_img = Image.new(\"RGB\", PROFILE_IMG_SIZE, WHITE_BG)\n white_bg_img.paste(profile_image, mask=profile_image.split()[3])\n profile_image = white_bg_img\n\n # Save new cropped image\n tmp_image_file = io.StringIO()\n profile_image.save(tmp_image_file, \"JPEG\", quality=90)\n tmp_image_file.seek(0)\n self.profile_image = InMemoryUploadedFile(\n tmp_image_file,\n \"ImageField\",\n self.profile_image.name,\n \"image/jpeg\",\n tmp_image_file.len,\n None,\n )\n # Make a Thumbnail Image for the new resized image\n thumb_image = profile_image.copy()\n thumb_image.thumbnail(PROFILE_IMG_THUMB_SIZE, resample=Image.ANTIALIAS)\n tmp_image_file = io.StringIO()\n thumb_image.save(tmp_image_file, \"JPEG\", quality=90)\n tmp_image_file.seek(0)\n self.profile_image_thumb = InMemoryUploadedFile(\n tmp_image_file,\n \"ImageField\",\n self.profile_image.name,\n \"image/jpeg\",\n tmp_image_file.len,\n None,\n )\n\n def is_full_profile(self):\n if self.first_name and self.last_name:\n return True\n else:\n return False\n", "path": "project/accounts/models.py"}], "after_files": [{"content": "from django.contrib.auth.models import AbstractUser\nimport os\nimport io\nfrom django.core.files.storage import default_storage\nfrom django.conf import settings\nfrom django.db import models\nfrom PIL import Image, ImageOps\nfrom django.core.files.uploadedfile import InMemoryUploadedFile\n\nfrom taggit.managers import TaggableManager\n\nfrom api.models import Category\nfrom common.utils import PathAndRename\n\n\nclass User(AbstractUser):\n \"\"\"\n A new custom User model for any functionality needed in the future. Extending AbstractUser\n allows for adding new fields to the user model as needed.\n \"\"\"\n\n class Meta:\n db_table = \"users\"\n\n\n# Image manipulation constants\nPROFILE_IMG_SIZE = (171, 171)\nPROFILE_IMG_THUMB_SIZE = (40, 40)\nWHITE_BG = (255, 255, 255)\n\n\nclass ProfileManager(models.Manager):\n def summarize(self, profile):\n from api.models.civi import Civi\n\n data = {\n \"username\": profile.user.username,\n \"first_name\": profile.first_name,\n \"last_name\": profile.last_name,\n \"about_me\": profile.about_me,\n \"history\": [\n Civi.objects.serialize(c)\n for c in Civi.objects.filter(author_id=profile.id).order_by(\"-created\")\n ],\n \"profile_image\": profile.profile_image_url,\n \"followers\": self.followers(profile),\n \"following\": self.following(profile),\n }\n return data\n\n def chip_summarize(self, profile):\n data = {\n \"username\": profile.user.username,\n \"first_name\": profile.first_name,\n \"last_name\": profile.last_name,\n \"profile_image\": profile.profile_image_url,\n }\n return data\n\n def card_summarize(self, profile, request_profile):\n # Length at which to truncate 'about me' text\n about_me_truncate_length = 150\n\n # If 'about me' text is longer than 150 characters... add elipsis (truncate)\n ellipsis_if_too_long = (\n \"\" if len(profile.about_me) <= about_me_truncate_length else \"...\"\n )\n\n data = {\n \"id\": profile.user.id,\n \"username\": profile.user.username,\n \"first_name\": profile.first_name,\n \"last_name\": profile.last_name,\n \"about_me\": profile.about_me[:about_me_truncate_length] + ellipsis_if_too_long,\n \"profile_image\": profile.profile_image_url,\n \"follow_state\": True\n if profile in request_profile.following.all()\n else False,\n \"request_profile\": request_profile.first_name,\n }\n return data\n\n def followers(self, profile):\n return [self.chip_summarize(follower) for follower in profile.followers.all()]\n\n def following(self, profile):\n return [self.chip_summarize(following) for following in profile.following.all()]\n\n\nprofile_upload_path = PathAndRename(\"\")\n\n\nclass Profile(models.Model):\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n first_name = models.CharField(max_length=63, blank=False)\n last_name = models.CharField(max_length=63, blank=False)\n about_me = models.CharField(max_length=511, blank=True)\n\n categories = models.ManyToManyField(\n Category, related_name=\"user_categories\", symmetrical=False\n )\n tags = TaggableManager()\n\n followers = models.ManyToManyField(\n \"self\", related_name=\"follower\", symmetrical=False\n )\n following = models.ManyToManyField(\n \"self\", related_name=\"followings\", symmetrical=False\n )\n\n is_verified = models.BooleanField(default=False)\n full_profile = models.BooleanField(default=False)\n\n objects = ProfileManager()\n profile_image = models.ImageField(\n upload_to=profile_upload_path, blank=True, null=True\n )\n profile_image_thumb = models.ImageField(\n upload_to=profile_upload_path, blank=True, null=True\n )\n\n @property\n def full_name(self):\n \"\"\"Returns the person's full name.\"\"\"\n\n return f\"{self.first_name} {self.last_name}\"\n\n @property\n def profile_image_url(self):\n \"\"\"Return placeholder profile image if user didn't upload one\"\"\"\n\n if self.profile_image:\n file_exists = default_storage.exists(\n os.path.join(settings.MEDIA_ROOT, self.profile_image.name)\n )\n if file_exists:\n return self.profile_image.url\n\n return \"/static/img/no_image_md.png\"\n\n @property\n def profile_image_thumb_url(self):\n \"\"\"Return placeholder profile image if user didn't upload one\"\"\"\n\n if self.profile_image_thumb:\n file_exists = default_storage.exists(\n os.path.join(settings.MEDIA_ROOT, self.profile_image_thumb.name)\n )\n if file_exists:\n return self.profile_image_thumb.url\n\n return \"/static/img/no_image_md.png\"\n\n def __init__(self, *args, **kwargs):\n super(Profile, self).__init__(*args, **kwargs)\n\n def save(self, *args, **kwargs):\n \"\"\" Image crop/resize and thumbnail creation \"\"\"\n\n # New Profile image --\n if self.profile_image:\n self.resize_profile_image()\n\n self.full_profile = self.is_full_profile()\n\n super(Profile, self).save(*args, **kwargs)\n\n def resize_profile_image(self):\n \"\"\"\n Resizes and crops the user uploaded image and creates a thumbnail version of it\n \"\"\"\n profile_image_field = self.profile_image\n image_file = io.StringIO(profile_image_field.read())\n profile_image = Image.open(image_file)\n profile_image.load()\n\n # Resize image\n profile_image = ImageOps.fit(\n profile_image, PROFILE_IMG_SIZE, Image.ANTIALIAS, centering=(0.5, 0.5)\n )\n\n # Convert to JPG image format with white background\n if profile_image.mode not in (\"L\", \"RGB\"):\n white_bg_img = Image.new(\"RGB\", PROFILE_IMG_SIZE, WHITE_BG)\n white_bg_img.paste(profile_image, mask=profile_image.split()[3])\n profile_image = white_bg_img\n\n # Save new cropped image\n tmp_image_file = io.StringIO()\n profile_image.save(tmp_image_file, \"JPEG\", quality=90)\n tmp_image_file.seek(0)\n self.profile_image = InMemoryUploadedFile(\n tmp_image_file,\n \"ImageField\",\n self.profile_image.name,\n \"image/jpeg\",\n tmp_image_file.len,\n None,\n )\n # Make a Thumbnail Image for the new resized image\n thumb_image = profile_image.copy()\n thumb_image.thumbnail(PROFILE_IMG_THUMB_SIZE, resample=Image.ANTIALIAS)\n tmp_image_file = io.StringIO()\n thumb_image.save(tmp_image_file, \"JPEG\", quality=90)\n tmp_image_file.seek(0)\n self.profile_image_thumb = InMemoryUploadedFile(\n tmp_image_file,\n \"ImageField\",\n self.profile_image.name,\n \"image/jpeg\",\n tmp_image_file.len,\n None,\n )\n\n def is_full_profile(self):\n if self.first_name and self.last_name:\n return True\n else:\n return False\n", "path": "project/accounts/models.py"}]} |
gh_patches_debug_102 | rasdani/github-patches | git_diff | python-discord__site-1232 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Show font awesome icons in development
<details><summary> Old issue description</summary>
The site uses premium icons, but not all icons we use are premium.
Sometimes it is helpful for us to get a sense of how the icons look like, for free icons, when developing locally. This is especially so in PRs that involve colors or adding icons, such as #1210. My workaround is to change the `fontawesome_token` setting myself, but this is annoying when having to commit multiple changes together (especially when a change in settings.py is involved as well) -- to stage by-patch.
I propose that we add a `FONTAWESOME_TOKEN` to the envs, and then use that value in `settings.py` with the fallback being our actual font awesome token for production.
Files to be modified:
- `settings.py`
- Docs for contributing to site
- Example `.env`
- Instructions to how to create your kit on font awesome
---
</details>
**Problem**: We've surpassed the usage limits for the font awesome kit we've set in settings.py, and we need a way for icons to show up consistently in both production and local environments.
**Solutions**: Starting from lemon's first comment.
Further explanation on wookie's suggestion to use the font awesome django plugin:
https://discord.com/channels/267624335836053506/635950537262759947/1202436500856512583
> I meant <https://fontawesome.com/docs/web/use-with/python-django>, which would be a drop in replacement. It just bundles all the js/css/fonts into Django's static files so all that stuff would be served by us and we don't need to worry about kits/tokens/their cdn limits etc. (though the django plugin isn't really necessary for that, it could be better to just copy the files we need into the repo ourself)
>
> Trying to manage each icon we want individually sounds like a bit more of a pain, the current way the icons are packaged using css and fonts is quite convenient, so I'm not sure if it would be worth adding complication just for a bit of a performance increase. I don't know exactly how that would work though so I can't really tell.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pydis_site/settings.py`
Content:
```
1 """
2 Django settings for pydis_site project.
3
4 Generated by 'django-admin startproject' using Django 2.1.
5
6 For more information on this file, see
7 https://docs.djangoproject.com/en/2.1/topics/settings/
8
9 For the full list of settings and their values, see
10 https://docs.djangoproject.com/en/2.1/ref/settings/
11 """
12
13 import logging
14 import os
15 import secrets
16 import sys
17 import warnings
18 from pathlib import Path
19 from socket import gethostbyname, gethostname
20
21 import environ
22 import sentry_sdk
23 from sentry_sdk.integrations.logging import LoggingIntegration
24 from sentry_sdk.integrations.django import DjangoIntegration
25
26 env = environ.Env(
27 DEBUG=(bool, False),
28 SITE_DSN=(str, ""),
29 BUILDING_DOCKER=(bool, False),
30 STATIC_BUILD=(bool, False),
31 GIT_SHA=(str, 'development'),
32 TIMEOUT_PERIOD=(int, 5),
33 GITHUB_TOKEN=(str, None),
34 GITHUB_APP_ID=(str, None),
35 GITHUB_APP_KEY=(str, None),
36 )
37
38 GIT_SHA = env("GIT_SHA")
39 GITHUB_API = "https://api.github.com"
40 GITHUB_TOKEN = env("GITHUB_TOKEN")
41 GITHUB_APP_ID = env("GITHUB_APP_ID")
42 GITHUB_APP_KEY = env("GITHUB_APP_KEY")
43 GITHUB_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
44 """The datetime string format GitHub uses."""
45
46 STATIC_BUILD: bool = env("STATIC_BUILD")
47
48 if GITHUB_APP_KEY and (key_file := Path(GITHUB_APP_KEY)).is_file():
49 # Allow the OAuth key to be loaded from a file
50 GITHUB_APP_KEY = key_file.read_text(encoding="utf-8")
51
52 if not STATIC_BUILD:
53 sentry_sdk.init(
54 dsn=env('SITE_DSN'),
55 integrations=[DjangoIntegration(), LoggingIntegration(level=logging.DEBUG, event_level=logging.ERROR)],
56 send_default_pii=True,
57 release=f"site@{GIT_SHA}",
58 profiles_sample_rate=1.0,
59 enable_tracing=True,
60 enable_db_query_source=True,
61 db_query_source_threshold_ms=100, # Queries slower that 100ms will include the source in the event
62 )
63
64 # Build paths inside the project like this: os.path.join(BASE_DIR, ...)
65 BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
66 DEBUG = env('DEBUG')
67
68 # Quick-start development settings - unsuitable for production
69 # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
70
71 # SECURITY WARNING: keep the secret key used in production secret!
72 if DEBUG:
73 ALLOWED_HOSTS = env.list('ALLOWED_HOSTS', default=['*'])
74 SECRET_KEY = "yellow polkadot bikini" # noqa: S105
75
76 # Prevent verbose warnings emitted when passing a non-timezone aware
77 # datetime object to the database, whilst we have time zone support
78 # active. See the Django documentation for more details:
79 # https://docs.djangoproject.com/en/dev/topics/i18n/timezones/
80 warnings.filterwarnings(
81 'error', r"DateTimeField .* received a naive datetime",
82 RuntimeWarning, r'django\.db\.models\.fields',
83 )
84
85 elif 'CI' in os.environ:
86 ALLOWED_HOSTS = ['*']
87 SECRET_KEY = secrets.token_urlsafe(32)
88
89 # See above. We run with `CI=true`, but debug unset in GitHub Actions,
90 # so we also want to filter it there.
91 warnings.filterwarnings(
92 'error', r"DateTimeField .* received a naive datetime",
93 RuntimeWarning, r'django\.db\.models\.fields',
94 )
95
96 else:
97 ALLOWED_HOSTS = env.list(
98 'ALLOWED_HOSTS',
99 default=[
100 'www.pythondiscord.com',
101 'pythondiscord.com',
102 gethostname(),
103 gethostbyname(gethostname()),
104 'site.default.svc.cluster.local',
105 ],
106 )
107 SECRET_KEY = env('SECRET_KEY')
108
109 # Application definition
110 NON_STATIC_APPS = [
111 'pydis_site.apps.api',
112 'pydis_site.apps.staff',
113 ] if not STATIC_BUILD else []
114
115 INSTALLED_APPS = [
116 *NON_STATIC_APPS,
117 'pydis_site.apps.home',
118 'pydis_site.apps.resources',
119 'pydis_site.apps.content',
120 'pydis_site.apps.events',
121 'pydis_site.apps.redirect',
122
123 'django.contrib.admin',
124 'django.contrib.auth',
125 'django.contrib.contenttypes',
126 'django.contrib.sessions',
127 'django.contrib.messages',
128 'django.contrib.sites',
129 'django.contrib.staticfiles',
130
131 'django_filters',
132 'django_simple_bulma',
133 'rest_framework',
134 'rest_framework.authtoken',
135
136 'django_distill',
137 ]
138
139 if not env("BUILDING_DOCKER"):
140 INSTALLED_APPS.append("django_prometheus")
141
142 if STATIC_BUILD:
143 # The only middleware required during static builds
144 MIDDLEWARE = [
145 'django.contrib.sessions.middleware.SessionMiddleware',
146 'django.contrib.auth.middleware.AuthenticationMiddleware',
147 'django.contrib.messages.middleware.MessageMiddleware',
148 ]
149 else:
150 # Ensure that Prometheus middlewares are first and last here.
151 MIDDLEWARE = [
152 'django_prometheus.middleware.PrometheusBeforeMiddleware',
153
154 'django.middleware.security.SecurityMiddleware',
155 'whitenoise.middleware.WhiteNoiseMiddleware',
156 'django.contrib.sessions.middleware.SessionMiddleware',
157 'django.middleware.common.CommonMiddleware',
158 'django.middleware.csrf.CsrfViewMiddleware',
159 'django.contrib.auth.middleware.AuthenticationMiddleware',
160 'django.contrib.messages.middleware.MessageMiddleware',
161 'django.middleware.clickjacking.XFrameOptionsMiddleware',
162
163 'django_prometheus.middleware.PrometheusAfterMiddleware'
164 ]
165
166 ROOT_URLCONF = 'pydis_site.urls'
167
168 TEMPLATES = [
169 {
170 'BACKEND': 'django.template.backends.django.DjangoTemplates',
171 'DIRS': [os.path.join(BASE_DIR, 'pydis_site', 'templates')],
172 'APP_DIRS': True,
173 'OPTIONS': {
174 'context_processors': [
175 'django.template.context_processors.debug',
176 'django.template.context_processors.request',
177 'django.contrib.auth.context_processors.auth',
178 'django.contrib.messages.context_processors.messages',
179 "pydis_site.context_processors.git_sha_processor"
180 ],
181 },
182 },
183 ]
184
185 WSGI_APPLICATION = 'pydis_site.wsgi.application'
186
187 # Database
188 # https://docs.djangoproject.com/en/2.1/ref/settings/#databases
189
190 DATABASES = {
191 'default': env.db(),
192 'metricity': env.db('METRICITY_DB_URL'),
193 } if not STATIC_BUILD else {}
194
195 # Password validation
196 # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
197
198 AUTH_PASSWORD_VALIDATORS = [
199 {
200 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
201 },
202 {
203 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
204 },
205 {
206 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
207 },
208 {
209 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
210 },
211 ]
212
213 # Internationalization
214 # https://docs.djangoproject.com/en/2.1/topics/i18n/
215 LANGUAGE_CODE = 'en-us'
216 TIME_ZONE = 'UTC'
217 USE_I18N = True
218 USE_TZ = True
219
220 # Static files (CSS, JavaScript, Images)
221 # https://docs.djangoproject.com/en/2.1/howto/static-files/
222
223 STATIC_URL = '/static/'
224 STATICFILES_DIRS = [os.path.join(BASE_DIR, 'pydis_site', 'static')]
225 STATIC_ROOT = env('STATIC_ROOT', default='/app/staticfiles')
226
227 STATICFILES_FINDERS = [
228 'django.contrib.staticfiles.finders.FileSystemFinder',
229 'django.contrib.staticfiles.finders.AppDirectoriesFinder',
230
231 'django_simple_bulma.finders.SimpleBulmaFinder',
232 ]
233
234 if DEBUG:
235 PARENT_HOST = env('PARENT_HOST', default='pythondiscord.local:8000')
236
237 if ":" in PARENT_HOST:
238 ALLOWED_HOSTS.append(PARENT_HOST.split(":", 1)[0])
239 else:
240 ALLOWED_HOSTS.append(PARENT_HOST)
241 else:
242 PARENT_HOST = env('PARENT_HOST', default='pythondiscord.com')
243
244 # Django Model Configuration
245 DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
246
247 # Django REST framework
248 # https://www.django-rest-framework.org
249 REST_FRAMEWORK = {
250 'DEFAULT_AUTHENTICATION_CLASSES': (
251 'rest_framework.authentication.TokenAuthentication',
252 ),
253 'DEFAULT_PERMISSION_CLASSES': (
254 'rest_framework.permissions.DjangoModelPermissions',
255 ),
256 'TEST_REQUEST_DEFAULT_FORMAT': 'json'
257 }
258
259 # Logging
260 # https://docs.djangoproject.com/en/2.1/topics/logging/
261 LOGGING = {
262 'version': 1,
263 'disable_existing_loggers': False,
264 'formatters': {
265 'verbose': {
266 'format': (
267 '%(asctime)s | %(process)d:%(thread)d | %(module)s | %(levelname)-8s | %(message)s'
268 )
269 }
270 },
271 'handlers': {
272 'console': {
273 'class': 'logging.StreamHandler'
274 }
275 },
276 'loggers': {
277 'django': {
278 'handlers': ['console'],
279 'propagate': True,
280 'level': env(
281 'LOG_LEVEL',
282 default=(
283 # If there is no explicit `LOG_LEVEL` set,
284 # use `DEBUG` if we're running in debug mode but not
285 # testing. Use `ERROR` if we're running tests, else
286 # default to using `WARN`.
287 'INFO'
288 if DEBUG and 'test' not in sys.argv
289 else (
290 'ERROR'
291 if 'test' in sys.argv
292 else 'WARN'
293 )
294 )
295 )
296 }
297 }
298 }
299
300 # Custom settings for django-simple-bulma
301 BULMA_SETTINGS = {
302 "variables": {
303 "primary": "#7289DA", # PyDis blurple
304 "green": "#32ac66", # Colour picked after Discord discussion
305 "turquoise": "#7289DA", # Blurple, because Bulma uses this regardless of `primary` above
306 "blue": "#2482c1", # Colour picked after Discord discussion
307 "cyan": "#2482c1", # Colour picked after Discord discussion (matches the blue)
308 "purple": "#aa55e4", # Apparently unused, but changed for consistency
309 "red": "#d63852", # Colour picked after Discord discussion
310
311 "link": "$primary",
312
313 "dimensions": "16 24 32 48 64 96 128 256 512", # Possible image dimensions
314 "navbar-height": "4.75rem",
315 "footer-padding": "1rem 1.5rem 1rem",
316 "tooltip-max-width": "30rem",
317 },
318 "dark_variables": {
319 "primary": "#5365A4", # A darker PyDis blurple
320 "warning": "#4B4636",
321 "warning-invert": "#FFFFFF",
322 "primary-dark": "#EFF1FB", # Bulma's primary-light
323 "primary-light": "#2B3660",
324 "success-dark": "#EFFAF5", # Bulma's success-light
325 "success-light": "#214133",
326 "danger-dark": "#FEECF0", # Bulma's danger-light
327 "danger-light": "#4C1822",
328 "info-dark": "#EFF5FB", # Bulma's info-light
329 "info-light": "#254056",
330
331 "body-background-color": "#252629",
332
333 "white": "#2C2F33",
334 "white-bis": "#23272A ",
335 "white-ter": "#36393F",
336 "light": "$white",
337
338 "black": "#F7F7F7",
339 "black-bis": "#F2F2F2",
340 "black-ter": "#E6E6E6",
341 "dark": "$black",
342
343 "grey-darker": "#303032",
344
345 "text": "#F4F4F4",
346 "text-light": "#F7F7F7",
347 "text-strong": "#FEFEFE",
348
349 "link": "#99B0FF", # A brighter PyDis blurple
350 "link-hover": "#FFFFFF",
351 "link-focus": "$link-hover",
352 "link-active": "$link-hover",
353
354 "code": "#FF7990", # Adjusted to 4.5 contrast ratio per WCAG Level AA
355 "code-background": "#464951", # A graduation lighter than the default for light theme
356
357 # Same as bulma, adjusted for dark mode
358 "shadow": "0 0.5em 1em -0.125em rgba(0, 0, 0, 0.3), 0 0px 0 1px rgba(0, 0, 0, 0.13)",
359 "border": "#4E4F51",
360 "border-light": "#313233",
361
362 # Use the same sizes
363 "dimensions": "16 24 32 48 64 96 128 256 512",
364 "navbar-height": "4.75rem",
365 "footer-padding": "1rem 1.5rem 1rem",
366 "tooltip-max-width": "30rem",
367 },
368 "extensions": [
369 "bulma-dropdown",
370 "bulma-navbar-burger",
371 ],
372 "fontawesome_token": "ff22cb6f41",
373 }
374
375 # Information about site repository
376 SITE_REPOSITORY_OWNER = "python-discord"
377 SITE_REPOSITORY_NAME = "site"
378 SITE_REPOSITORY_BRANCH = "master"
379
380 # Path for events pages
381 EVENTS_PAGES_PATH = Path(BASE_DIR, "pydis_site", "templates", "events", "pages")
382
383 # Path for content pages
384 CONTENT_PAGES_PATH = Path(BASE_DIR, "pydis_site", "apps", "content", "resources")
385
386 # Path for redirection links
387 REDIRECTIONS_PATH = Path(BASE_DIR, "pydis_site", "apps", "redirect", "redirects.yaml")
388
389 # How long to wait for synchronous requests before timing out
390 TIMEOUT_PERIOD = env("TIMEOUT_PERIOD")
391
392 # Source files url for 'Edit on GitHub' link on content articles
393 CONTENT_SRC_URL = (
394 "https://github.com/python-discord/site/tree/main/pydis_site/apps/content/resources/"
395 )
396
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pydis_site/settings.py b/pydis_site/settings.py
--- a/pydis_site/settings.py
+++ b/pydis_site/settings.py
@@ -369,7 +369,6 @@
"bulma-dropdown",
"bulma-navbar-burger",
],
- "fontawesome_token": "ff22cb6f41",
}
# Information about site repository
| {"golden_diff": "diff --git a/pydis_site/settings.py b/pydis_site/settings.py\n--- a/pydis_site/settings.py\n+++ b/pydis_site/settings.py\n@@ -369,7 +369,6 @@\n \"bulma-dropdown\",\n \"bulma-navbar-burger\",\n ],\n- \"fontawesome_token\": \"ff22cb6f41\",\n }\n \n # Information about site repository\n", "issue": "Show font awesome icons in development\n<details><summary> Old issue description</summary>\r\n\r\nThe site uses premium icons, but not all icons we use are premium.\r\n\r\nSometimes it is helpful for us to get a sense of how the icons look like, for free icons, when developing locally. This is especially so in PRs that involve colors or adding icons, such as #1210. My workaround is to change the `fontawesome_token` setting myself, but this is annoying when having to commit multiple changes together (especially when a change in settings.py is involved as well) -- to stage by-patch.\r\n\r\nI propose that we add a `FONTAWESOME_TOKEN` to the envs, and then use that value in `settings.py` with the fallback being our actual font awesome token for production.\r\n\r\nFiles to be modified:\r\n- `settings.py`\r\n- Docs for contributing to site\r\n - Example `.env`\r\n - Instructions to how to create your kit on font awesome\r\n\r\n---\r\n\r\n</details>\r\n\r\n**Problem**: We've surpassed the usage limits for the font awesome kit we've set in settings.py, and we need a way for icons to show up consistently in both production and local environments.\r\n\r\n**Solutions**: Starting from lemon's first comment.\r\n\r\nFurther explanation on wookie's suggestion to use the font awesome django plugin:\r\nhttps://discord.com/channels/267624335836053506/635950537262759947/1202436500856512583\r\n> I meant <https://fontawesome.com/docs/web/use-with/python-django>, which would be a drop in replacement. It just bundles all the js/css/fonts into Django's static files so all that stuff would be served by us and we don't need to worry about kits/tokens/their cdn limits etc. (though the django plugin isn't really necessary for that, it could be better to just copy the files we need into the repo ourself)\r\n> \r\n> Trying to manage each icon we want individually sounds like a bit more of a pain, the current way the icons are packaged using css and fonts is quite convenient, so I'm not sure if it would be worth adding complication just for a bit of a performance increase. I don't know exactly how that would work though so I can't really tell.\n", "before_files": [{"content": "\"\"\"\nDjango settings for pydis_site project.\n\nGenerated by 'django-admin startproject' using Django 2.1.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/2.1/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/2.1/ref/settings/\n\"\"\"\n\nimport logging\nimport os\nimport secrets\nimport sys\nimport warnings\nfrom pathlib import Path\nfrom socket import gethostbyname, gethostname\n\nimport environ\nimport sentry_sdk\nfrom sentry_sdk.integrations.logging import LoggingIntegration\nfrom sentry_sdk.integrations.django import DjangoIntegration\n\nenv = environ.Env(\n DEBUG=(bool, False),\n SITE_DSN=(str, \"\"),\n BUILDING_DOCKER=(bool, False),\n STATIC_BUILD=(bool, False),\n GIT_SHA=(str, 'development'),\n TIMEOUT_PERIOD=(int, 5),\n GITHUB_TOKEN=(str, None),\n GITHUB_APP_ID=(str, None),\n GITHUB_APP_KEY=(str, None),\n)\n\nGIT_SHA = env(\"GIT_SHA\")\nGITHUB_API = \"https://api.github.com\"\nGITHUB_TOKEN = env(\"GITHUB_TOKEN\")\nGITHUB_APP_ID = env(\"GITHUB_APP_ID\")\nGITHUB_APP_KEY = env(\"GITHUB_APP_KEY\")\nGITHUB_TIMESTAMP_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\"\n\"\"\"The datetime string format GitHub uses.\"\"\"\n\nSTATIC_BUILD: bool = env(\"STATIC_BUILD\")\n\nif GITHUB_APP_KEY and (key_file := Path(GITHUB_APP_KEY)).is_file():\n # Allow the OAuth key to be loaded from a file\n GITHUB_APP_KEY = key_file.read_text(encoding=\"utf-8\")\n\nif not STATIC_BUILD:\n sentry_sdk.init(\n dsn=env('SITE_DSN'),\n integrations=[DjangoIntegration(), LoggingIntegration(level=logging.DEBUG, event_level=logging.ERROR)],\n send_default_pii=True,\n release=f\"site@{GIT_SHA}\",\n profiles_sample_rate=1.0,\n enable_tracing=True,\n enable_db_query_source=True,\n db_query_source_threshold_ms=100, # Queries slower that 100ms will include the source in the event\n )\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nDEBUG = env('DEBUG')\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\nif DEBUG:\n ALLOWED_HOSTS = env.list('ALLOWED_HOSTS', default=['*'])\n SECRET_KEY = \"yellow polkadot bikini\" # noqa: S105\n\n # Prevent verbose warnings emitted when passing a non-timezone aware\n # datetime object to the database, whilst we have time zone support\n # active. See the Django documentation for more details:\n # https://docs.djangoproject.com/en/dev/topics/i18n/timezones/\n warnings.filterwarnings(\n 'error', r\"DateTimeField .* received a naive datetime\",\n RuntimeWarning, r'django\\.db\\.models\\.fields',\n )\n\nelif 'CI' in os.environ:\n ALLOWED_HOSTS = ['*']\n SECRET_KEY = secrets.token_urlsafe(32)\n\n # See above. We run with `CI=true`, but debug unset in GitHub Actions,\n # so we also want to filter it there.\n warnings.filterwarnings(\n 'error', r\"DateTimeField .* received a naive datetime\",\n RuntimeWarning, r'django\\.db\\.models\\.fields',\n )\n\nelse:\n ALLOWED_HOSTS = env.list(\n 'ALLOWED_HOSTS',\n default=[\n 'www.pythondiscord.com',\n 'pythondiscord.com',\n gethostname(),\n gethostbyname(gethostname()),\n 'site.default.svc.cluster.local',\n ],\n )\n SECRET_KEY = env('SECRET_KEY')\n\n# Application definition\nNON_STATIC_APPS = [\n 'pydis_site.apps.api',\n 'pydis_site.apps.staff',\n] if not STATIC_BUILD else []\n\nINSTALLED_APPS = [\n *NON_STATIC_APPS,\n 'pydis_site.apps.home',\n 'pydis_site.apps.resources',\n 'pydis_site.apps.content',\n 'pydis_site.apps.events',\n 'pydis_site.apps.redirect',\n\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.sites',\n 'django.contrib.staticfiles',\n\n 'django_filters',\n 'django_simple_bulma',\n 'rest_framework',\n 'rest_framework.authtoken',\n\n 'django_distill',\n]\n\nif not env(\"BUILDING_DOCKER\"):\n INSTALLED_APPS.append(\"django_prometheus\")\n\nif STATIC_BUILD:\n # The only middleware required during static builds\n MIDDLEWARE = [\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n ]\nelse:\n # Ensure that Prometheus middlewares are first and last here.\n MIDDLEWARE = [\n 'django_prometheus.middleware.PrometheusBeforeMiddleware',\n\n 'django.middleware.security.SecurityMiddleware',\n 'whitenoise.middleware.WhiteNoiseMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n\n 'django_prometheus.middleware.PrometheusAfterMiddleware'\n ]\n\nROOT_URLCONF = 'pydis_site.urls'\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [os.path.join(BASE_DIR, 'pydis_site', 'templates')],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n \"pydis_site.context_processors.git_sha_processor\"\n ],\n },\n },\n]\n\nWSGI_APPLICATION = 'pydis_site.wsgi.application'\n\n# Database\n# https://docs.djangoproject.com/en/2.1/ref/settings/#databases\n\nDATABASES = {\n 'default': env.db(),\n 'metricity': env.db('METRICITY_DB_URL'),\n} if not STATIC_BUILD else {}\n\n# Password validation\n# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n },\n]\n\n# Internationalization\n# https://docs.djangoproject.com/en/2.1/topics/i18n/\nLANGUAGE_CODE = 'en-us'\nTIME_ZONE = 'UTC'\nUSE_I18N = True\nUSE_TZ = True\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/2.1/howto/static-files/\n\nSTATIC_URL = '/static/'\nSTATICFILES_DIRS = [os.path.join(BASE_DIR, 'pydis_site', 'static')]\nSTATIC_ROOT = env('STATIC_ROOT', default='/app/staticfiles')\n\nSTATICFILES_FINDERS = [\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder',\n\n 'django_simple_bulma.finders.SimpleBulmaFinder',\n]\n\nif DEBUG:\n PARENT_HOST = env('PARENT_HOST', default='pythondiscord.local:8000')\n\n if \":\" in PARENT_HOST:\n ALLOWED_HOSTS.append(PARENT_HOST.split(\":\", 1)[0])\n else:\n ALLOWED_HOSTS.append(PARENT_HOST)\nelse:\n PARENT_HOST = env('PARENT_HOST', default='pythondiscord.com')\n\n# Django Model Configuration\nDEFAULT_AUTO_FIELD = \"django.db.models.AutoField\"\n\n# Django REST framework\n# https://www.django-rest-framework.org\nREST_FRAMEWORK = {\n 'DEFAULT_AUTHENTICATION_CLASSES': (\n 'rest_framework.authentication.TokenAuthentication',\n ),\n 'DEFAULT_PERMISSION_CLASSES': (\n 'rest_framework.permissions.DjangoModelPermissions',\n ),\n 'TEST_REQUEST_DEFAULT_FORMAT': 'json'\n}\n\n# Logging\n# https://docs.djangoproject.com/en/2.1/topics/logging/\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'formatters': {\n 'verbose': {\n 'format': (\n '%(asctime)s | %(process)d:%(thread)d | %(module)s | %(levelname)-8s | %(message)s'\n )\n }\n },\n 'handlers': {\n 'console': {\n 'class': 'logging.StreamHandler'\n }\n },\n 'loggers': {\n 'django': {\n 'handlers': ['console'],\n 'propagate': True,\n 'level': env(\n 'LOG_LEVEL',\n default=(\n # If there is no explicit `LOG_LEVEL` set,\n # use `DEBUG` if we're running in debug mode but not\n # testing. Use `ERROR` if we're running tests, else\n # default to using `WARN`.\n 'INFO'\n if DEBUG and 'test' not in sys.argv\n else (\n 'ERROR'\n if 'test' in sys.argv\n else 'WARN'\n )\n )\n )\n }\n }\n}\n\n# Custom settings for django-simple-bulma\nBULMA_SETTINGS = {\n \"variables\": {\n \"primary\": \"#7289DA\", # PyDis blurple\n \"green\": \"#32ac66\", # Colour picked after Discord discussion\n \"turquoise\": \"#7289DA\", # Blurple, because Bulma uses this regardless of `primary` above\n \"blue\": \"#2482c1\", # Colour picked after Discord discussion\n \"cyan\": \"#2482c1\", # Colour picked after Discord discussion (matches the blue)\n \"purple\": \"#aa55e4\", # Apparently unused, but changed for consistency\n \"red\": \"#d63852\", # Colour picked after Discord discussion\n\n \"link\": \"$primary\",\n\n \"dimensions\": \"16 24 32 48 64 96 128 256 512\", # Possible image dimensions\n \"navbar-height\": \"4.75rem\",\n \"footer-padding\": \"1rem 1.5rem 1rem\",\n \"tooltip-max-width\": \"30rem\",\n },\n \"dark_variables\": {\n \"primary\": \"#5365A4\", # A darker PyDis blurple\n \"warning\": \"#4B4636\",\n \"warning-invert\": \"#FFFFFF\",\n \"primary-dark\": \"#EFF1FB\", # Bulma's primary-light\n \"primary-light\": \"#2B3660\",\n \"success-dark\": \"#EFFAF5\", # Bulma's success-light\n \"success-light\": \"#214133\",\n \"danger-dark\": \"#FEECF0\", # Bulma's danger-light\n \"danger-light\": \"#4C1822\",\n \"info-dark\": \"#EFF5FB\", # Bulma's info-light\n \"info-light\": \"#254056\",\n\n \"body-background-color\": \"#252629\",\n\n \"white\": \"#2C2F33\",\n \"white-bis\": \"#23272A \",\n \"white-ter\": \"#36393F\",\n \"light\": \"$white\",\n\n \"black\": \"#F7F7F7\",\n \"black-bis\": \"#F2F2F2\",\n \"black-ter\": \"#E6E6E6\",\n \"dark\": \"$black\",\n\n \"grey-darker\": \"#303032\",\n\n \"text\": \"#F4F4F4\",\n \"text-light\": \"#F7F7F7\",\n \"text-strong\": \"#FEFEFE\",\n\n \"link\": \"#99B0FF\", # A brighter PyDis blurple\n \"link-hover\": \"#FFFFFF\",\n \"link-focus\": \"$link-hover\",\n \"link-active\": \"$link-hover\",\n\n \"code\": \"#FF7990\", # Adjusted to 4.5 contrast ratio per WCAG Level AA\n \"code-background\": \"#464951\", # A graduation lighter than the default for light theme\n\n # Same as bulma, adjusted for dark mode\n \"shadow\": \"0 0.5em 1em -0.125em rgba(0, 0, 0, 0.3), 0 0px 0 1px rgba(0, 0, 0, 0.13)\",\n \"border\": \"#4E4F51\",\n \"border-light\": \"#313233\",\n\n # Use the same sizes\n \"dimensions\": \"16 24 32 48 64 96 128 256 512\",\n \"navbar-height\": \"4.75rem\",\n \"footer-padding\": \"1rem 1.5rem 1rem\",\n \"tooltip-max-width\": \"30rem\",\n },\n \"extensions\": [\n \"bulma-dropdown\",\n \"bulma-navbar-burger\",\n ],\n \"fontawesome_token\": \"ff22cb6f41\",\n}\n\n# Information about site repository\nSITE_REPOSITORY_OWNER = \"python-discord\"\nSITE_REPOSITORY_NAME = \"site\"\nSITE_REPOSITORY_BRANCH = \"master\"\n\n# Path for events pages\nEVENTS_PAGES_PATH = Path(BASE_DIR, \"pydis_site\", \"templates\", \"events\", \"pages\")\n\n# Path for content pages\nCONTENT_PAGES_PATH = Path(BASE_DIR, \"pydis_site\", \"apps\", \"content\", \"resources\")\n\n# Path for redirection links\nREDIRECTIONS_PATH = Path(BASE_DIR, \"pydis_site\", \"apps\", \"redirect\", \"redirects.yaml\")\n\n# How long to wait for synchronous requests before timing out\nTIMEOUT_PERIOD = env(\"TIMEOUT_PERIOD\")\n\n# Source files url for 'Edit on GitHub' link on content articles\nCONTENT_SRC_URL = (\n \"https://github.com/python-discord/site/tree/main/pydis_site/apps/content/resources/\"\n)\n", "path": "pydis_site/settings.py"}], "after_files": [{"content": "\"\"\"\nDjango settings for pydis_site project.\n\nGenerated by 'django-admin startproject' using Django 2.1.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/2.1/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/2.1/ref/settings/\n\"\"\"\n\nimport logging\nimport os\nimport secrets\nimport sys\nimport warnings\nfrom pathlib import Path\nfrom socket import gethostbyname, gethostname\n\nimport environ\nimport sentry_sdk\nfrom sentry_sdk.integrations.logging import LoggingIntegration\nfrom sentry_sdk.integrations.django import DjangoIntegration\n\nenv = environ.Env(\n DEBUG=(bool, False),\n SITE_DSN=(str, \"\"),\n BUILDING_DOCKER=(bool, False),\n STATIC_BUILD=(bool, False),\n GIT_SHA=(str, 'development'),\n TIMEOUT_PERIOD=(int, 5),\n GITHUB_TOKEN=(str, None),\n GITHUB_APP_ID=(str, None),\n GITHUB_APP_KEY=(str, None),\n)\n\nGIT_SHA = env(\"GIT_SHA\")\nGITHUB_API = \"https://api.github.com\"\nGITHUB_TOKEN = env(\"GITHUB_TOKEN\")\nGITHUB_APP_ID = env(\"GITHUB_APP_ID\")\nGITHUB_APP_KEY = env(\"GITHUB_APP_KEY\")\nGITHUB_TIMESTAMP_FORMAT = \"%Y-%m-%dT%H:%M:%SZ\"\n\"\"\"The datetime string format GitHub uses.\"\"\"\n\nSTATIC_BUILD: bool = env(\"STATIC_BUILD\")\n\nif GITHUB_APP_KEY and (key_file := Path(GITHUB_APP_KEY)).is_file():\n # Allow the OAuth key to be loaded from a file\n GITHUB_APP_KEY = key_file.read_text(encoding=\"utf-8\")\n\nif not STATIC_BUILD:\n sentry_sdk.init(\n dsn=env('SITE_DSN'),\n integrations=[DjangoIntegration(), LoggingIntegration(level=logging.DEBUG, event_level=logging.ERROR)],\n send_default_pii=True,\n release=f\"site@{GIT_SHA}\",\n profiles_sample_rate=1.0,\n enable_tracing=True,\n enable_db_query_source=True,\n db_query_source_threshold_ms=100, # Queries slower that 100ms will include the source in the event\n )\n\n# Build paths inside the project like this: os.path.join(BASE_DIR, ...)\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\nDEBUG = env('DEBUG')\n\n# Quick-start development settings - unsuitable for production\n# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/\n\n# SECURITY WARNING: keep the secret key used in production secret!\nif DEBUG:\n ALLOWED_HOSTS = env.list('ALLOWED_HOSTS', default=['*'])\n SECRET_KEY = \"yellow polkadot bikini\" # noqa: S105\n\n # Prevent verbose warnings emitted when passing a non-timezone aware\n # datetime object to the database, whilst we have time zone support\n # active. See the Django documentation for more details:\n # https://docs.djangoproject.com/en/dev/topics/i18n/timezones/\n warnings.filterwarnings(\n 'error', r\"DateTimeField .* received a naive datetime\",\n RuntimeWarning, r'django\\.db\\.models\\.fields',\n )\n\nelif 'CI' in os.environ:\n ALLOWED_HOSTS = ['*']\n SECRET_KEY = secrets.token_urlsafe(32)\n\n # See above. We run with `CI=true`, but debug unset in GitHub Actions,\n # so we also want to filter it there.\n warnings.filterwarnings(\n 'error', r\"DateTimeField .* received a naive datetime\",\n RuntimeWarning, r'django\\.db\\.models\\.fields',\n )\n\nelse:\n ALLOWED_HOSTS = env.list(\n 'ALLOWED_HOSTS',\n default=[\n 'www.pythondiscord.com',\n 'pythondiscord.com',\n gethostname(),\n gethostbyname(gethostname()),\n 'site.default.svc.cluster.local',\n ],\n )\n SECRET_KEY = env('SECRET_KEY')\n\n# Application definition\nNON_STATIC_APPS = [\n 'pydis_site.apps.api',\n 'pydis_site.apps.staff',\n] if not STATIC_BUILD else []\n\nINSTALLED_APPS = [\n *NON_STATIC_APPS,\n 'pydis_site.apps.home',\n 'pydis_site.apps.resources',\n 'pydis_site.apps.content',\n 'pydis_site.apps.events',\n 'pydis_site.apps.redirect',\n\n 'django.contrib.admin',\n 'django.contrib.auth',\n 'django.contrib.contenttypes',\n 'django.contrib.sessions',\n 'django.contrib.messages',\n 'django.contrib.sites',\n 'django.contrib.staticfiles',\n\n 'django_filters',\n 'django_simple_bulma',\n 'rest_framework',\n 'rest_framework.authtoken',\n\n 'django_distill',\n]\n\nif not env(\"BUILDING_DOCKER\"):\n INSTALLED_APPS.append(\"django_prometheus\")\n\nif STATIC_BUILD:\n # The only middleware required during static builds\n MIDDLEWARE = [\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n ]\nelse:\n # Ensure that Prometheus middlewares are first and last here.\n MIDDLEWARE = [\n 'django_prometheus.middleware.PrometheusBeforeMiddleware',\n\n 'django.middleware.security.SecurityMiddleware',\n 'whitenoise.middleware.WhiteNoiseMiddleware',\n 'django.contrib.sessions.middleware.SessionMiddleware',\n 'django.middleware.common.CommonMiddleware',\n 'django.middleware.csrf.CsrfViewMiddleware',\n 'django.contrib.auth.middleware.AuthenticationMiddleware',\n 'django.contrib.messages.middleware.MessageMiddleware',\n 'django.middleware.clickjacking.XFrameOptionsMiddleware',\n\n 'django_prometheus.middleware.PrometheusAfterMiddleware'\n ]\n\nROOT_URLCONF = 'pydis_site.urls'\n\nTEMPLATES = [\n {\n 'BACKEND': 'django.template.backends.django.DjangoTemplates',\n 'DIRS': [os.path.join(BASE_DIR, 'pydis_site', 'templates')],\n 'APP_DIRS': True,\n 'OPTIONS': {\n 'context_processors': [\n 'django.template.context_processors.debug',\n 'django.template.context_processors.request',\n 'django.contrib.auth.context_processors.auth',\n 'django.contrib.messages.context_processors.messages',\n \"pydis_site.context_processors.git_sha_processor\"\n ],\n },\n },\n]\n\nWSGI_APPLICATION = 'pydis_site.wsgi.application'\n\n# Database\n# https://docs.djangoproject.com/en/2.1/ref/settings/#databases\n\nDATABASES = {\n 'default': env.db(),\n 'metricity': env.db('METRICITY_DB_URL'),\n} if not STATIC_BUILD else {}\n\n# Password validation\n# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators\n\nAUTH_PASSWORD_VALIDATORS = [\n {\n 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',\n },\n {\n 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',\n },\n]\n\n# Internationalization\n# https://docs.djangoproject.com/en/2.1/topics/i18n/\nLANGUAGE_CODE = 'en-us'\nTIME_ZONE = 'UTC'\nUSE_I18N = True\nUSE_TZ = True\n\n# Static files (CSS, JavaScript, Images)\n# https://docs.djangoproject.com/en/2.1/howto/static-files/\n\nSTATIC_URL = '/static/'\nSTATICFILES_DIRS = [os.path.join(BASE_DIR, 'pydis_site', 'static')]\nSTATIC_ROOT = env('STATIC_ROOT', default='/app/staticfiles')\n\nSTATICFILES_FINDERS = [\n 'django.contrib.staticfiles.finders.FileSystemFinder',\n 'django.contrib.staticfiles.finders.AppDirectoriesFinder',\n\n 'django_simple_bulma.finders.SimpleBulmaFinder',\n]\n\nif DEBUG:\n PARENT_HOST = env('PARENT_HOST', default='pythondiscord.local:8000')\n\n if \":\" in PARENT_HOST:\n ALLOWED_HOSTS.append(PARENT_HOST.split(\":\", 1)[0])\n else:\n ALLOWED_HOSTS.append(PARENT_HOST)\nelse:\n PARENT_HOST = env('PARENT_HOST', default='pythondiscord.com')\n\n# Django Model Configuration\nDEFAULT_AUTO_FIELD = \"django.db.models.AutoField\"\n\n# Django REST framework\n# https://www.django-rest-framework.org\nREST_FRAMEWORK = {\n 'DEFAULT_AUTHENTICATION_CLASSES': (\n 'rest_framework.authentication.TokenAuthentication',\n ),\n 'DEFAULT_PERMISSION_CLASSES': (\n 'rest_framework.permissions.DjangoModelPermissions',\n ),\n 'TEST_REQUEST_DEFAULT_FORMAT': 'json'\n}\n\n# Logging\n# https://docs.djangoproject.com/en/2.1/topics/logging/\nLOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'formatters': {\n 'verbose': {\n 'format': (\n '%(asctime)s | %(process)d:%(thread)d | %(module)s | %(levelname)-8s | %(message)s'\n )\n }\n },\n 'handlers': {\n 'console': {\n 'class': 'logging.StreamHandler'\n }\n },\n 'loggers': {\n 'django': {\n 'handlers': ['console'],\n 'propagate': True,\n 'level': env(\n 'LOG_LEVEL',\n default=(\n # If there is no explicit `LOG_LEVEL` set,\n # use `DEBUG` if we're running in debug mode but not\n # testing. Use `ERROR` if we're running tests, else\n # default to using `WARN`.\n 'INFO'\n if DEBUG and 'test' not in sys.argv\n else (\n 'ERROR'\n if 'test' in sys.argv\n else 'WARN'\n )\n )\n )\n }\n }\n}\n\n# Custom settings for django-simple-bulma\nBULMA_SETTINGS = {\n \"variables\": {\n \"primary\": \"#7289DA\", # PyDis blurple\n \"green\": \"#32ac66\", # Colour picked after Discord discussion\n \"turquoise\": \"#7289DA\", # Blurple, because Bulma uses this regardless of `primary` above\n \"blue\": \"#2482c1\", # Colour picked after Discord discussion\n \"cyan\": \"#2482c1\", # Colour picked after Discord discussion (matches the blue)\n \"purple\": \"#aa55e4\", # Apparently unused, but changed for consistency\n \"red\": \"#d63852\", # Colour picked after Discord discussion\n\n \"link\": \"$primary\",\n\n \"dimensions\": \"16 24 32 48 64 96 128 256 512\", # Possible image dimensions\n \"navbar-height\": \"4.75rem\",\n \"footer-padding\": \"1rem 1.5rem 1rem\",\n \"tooltip-max-width\": \"30rem\",\n },\n \"dark_variables\": {\n \"primary\": \"#5365A4\", # A darker PyDis blurple\n \"warning\": \"#4B4636\",\n \"warning-invert\": \"#FFFFFF\",\n \"primary-dark\": \"#EFF1FB\", # Bulma's primary-light\n \"primary-light\": \"#2B3660\",\n \"success-dark\": \"#EFFAF5\", # Bulma's success-light\n \"success-light\": \"#214133\",\n \"danger-dark\": \"#FEECF0\", # Bulma's danger-light\n \"danger-light\": \"#4C1822\",\n \"info-dark\": \"#EFF5FB\", # Bulma's info-light\n \"info-light\": \"#254056\",\n\n \"body-background-color\": \"#252629\",\n\n \"white\": \"#2C2F33\",\n \"white-bis\": \"#23272A \",\n \"white-ter\": \"#36393F\",\n \"light\": \"$white\",\n\n \"black\": \"#F7F7F7\",\n \"black-bis\": \"#F2F2F2\",\n \"black-ter\": \"#E6E6E6\",\n \"dark\": \"$black\",\n\n \"grey-darker\": \"#303032\",\n\n \"text\": \"#F4F4F4\",\n \"text-light\": \"#F7F7F7\",\n \"text-strong\": \"#FEFEFE\",\n\n \"link\": \"#99B0FF\", # A brighter PyDis blurple\n \"link-hover\": \"#FFFFFF\",\n \"link-focus\": \"$link-hover\",\n \"link-active\": \"$link-hover\",\n\n \"code\": \"#FF7990\", # Adjusted to 4.5 contrast ratio per WCAG Level AA\n \"code-background\": \"#464951\", # A graduation lighter than the default for light theme\n\n # Same as bulma, adjusted for dark mode\n \"shadow\": \"0 0.5em 1em -0.125em rgba(0, 0, 0, 0.3), 0 0px 0 1px rgba(0, 0, 0, 0.13)\",\n \"border\": \"#4E4F51\",\n \"border-light\": \"#313233\",\n\n # Use the same sizes\n \"dimensions\": \"16 24 32 48 64 96 128 256 512\",\n \"navbar-height\": \"4.75rem\",\n \"footer-padding\": \"1rem 1.5rem 1rem\",\n \"tooltip-max-width\": \"30rem\",\n },\n \"extensions\": [\n \"bulma-dropdown\",\n \"bulma-navbar-burger\",\n ],\n}\n\n# Information about site repository\nSITE_REPOSITORY_OWNER = \"python-discord\"\nSITE_REPOSITORY_NAME = \"site\"\nSITE_REPOSITORY_BRANCH = \"master\"\n\n# Path for events pages\nEVENTS_PAGES_PATH = Path(BASE_DIR, \"pydis_site\", \"templates\", \"events\", \"pages\")\n\n# Path for content pages\nCONTENT_PAGES_PATH = Path(BASE_DIR, \"pydis_site\", \"apps\", \"content\", \"resources\")\n\n# Path for redirection links\nREDIRECTIONS_PATH = Path(BASE_DIR, \"pydis_site\", \"apps\", \"redirect\", \"redirects.yaml\")\n\n# How long to wait for synchronous requests before timing out\nTIMEOUT_PERIOD = env(\"TIMEOUT_PERIOD\")\n\n# Source files url for 'Edit on GitHub' link on content articles\nCONTENT_SRC_URL = (\n \"https://github.com/python-discord/site/tree/main/pydis_site/apps/content/resources/\"\n)\n", "path": "pydis_site/settings.py"}]} |
gh_patches_debug_103 | rasdani/github-patches | git_diff | cisagov__manage.get.gov-114 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Setup new cloud.gov prototyping org
We recently signed an agreement for a more robust prototyping org with cloud.gov that will give us the ability to have multiple spaces and invite members from DHS.
- [ ] Setup org and make sure everyone who needs access has the appropriate access
- [ ] Setup spaces `unstable` and `staging` according to rules
- [ ] Change Github actions access to CD
- [ ] Replace org and space name in settings (actions, documentation, scripts, etc.)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/registrar/config/settings.py`
Content:
```
1 """
2 Django settings for .gov registrar project.
3
4 For more information on this file, see
5 https://docs.djangoproject.com/en/4.0/topics/settings/
6
7 For the full list of settings and their values, see
8 https://docs.djangoproject.com/en/4.0/ref/settings/
9
10 IF you'd like to see all of these settings in the running app:
11
12 ```shell
13 $ docker-compose exec app python manage.py shell
14 >>> from django.conf import settings
15 >>> dir(settings)
16 ```
17
18 """
19 import environs
20 from cfenv import AppEnv
21 from pathlib import Path
22
23 # # # ###
24 # Setup code goes here #
25 # # # ###
26
27 env = environs.Env()
28
29 # Get secrets from Cloud.gov user provided service, if exists
30 # If not, get secrets from environment variables
31 key_service = AppEnv().get_service(name="getgov-credentials")
32 if key_service and key_service.credentials:
33 secret = key_service.credentials.get
34 else:
35 secret = env
36
37 # # # ###
38 # Values obtained externally #
39 # # # ###
40
41 path = Path(__file__)
42
43 env_db_url = env.dj_db_url("DATABASE_URL")
44 env_debug = env.bool("DJANGO_DEBUG", default=False)
45 env_log_level = env.str("DJANGO_LOG_LEVEL", "DEBUG")
46
47 secret_key = secret("DJANGO_SECRET_KEY")
48
49 # region: Basic Django Config-----------------------------------------------###
50
51 # Build paths inside the project like this: BASE_DIR / "subdir".
52 BASE_DIR = path.resolve().parent.parent
53
54 # SECURITY WARNING: don't run with debug turned on in production!
55 DEBUG = env_debug
56
57
58 # Applications are modular pieces of code.
59 # They are provided by Django, by third-parties, or by yourself.
60 # Installing them here makes them available for execution.
61 # Do not access INSTALLED_APPS directly. Use `django.apps.apps` instead.
62 INSTALLED_APPS = [
63 # Django automatic admin interface reads metadata
64 # from database models to provide a quick, model-centric
65 # interface where trusted users can manage content
66 "django.contrib.admin",
67 # vv Required by django.contrib.admin vv
68 # the "user" model! *\o/*
69 "django.contrib.auth",
70 # generic interface for Django models
71 "django.contrib.contenttypes",
72 # required for CSRF protection and many other things
73 "django.contrib.sessions",
74 # framework for displaying messages to the user
75 "django.contrib.messages",
76 # ^^ Required by django.contrib.admin ^^
77 # collects static files from each of your applications
78 # (and any other places you specify) into a single location
79 # that can easily be served in production
80 "django.contrib.staticfiles",
81 # let's be sure to install our own application!
82 "registrar",
83 ]
84
85 # Middleware are routines for processing web requests.
86 # Adding them here turns them "on"; Django will perform the
87 # specified routines on each incoming request and outgoing response.
88 MIDDLEWARE = [
89 # django-allow-cidr: enable use of CIDR IP ranges in ALLOWED_HOSTS
90 "allow_cidr.middleware.AllowCIDRMiddleware",
91 # provide security enhancements to the request/response cycle
92 "django.middleware.security.SecurityMiddleware",
93 # store and retrieve arbitrary data on a per-site-visitor basis
94 "django.contrib.sessions.middleware.SessionMiddleware",
95 # add a few conveniences for perfectionists, see documentation
96 "django.middleware.common.CommonMiddleware",
97 # add protection against Cross Site Request Forgeries by adding
98 # hidden form fields to POST forms and checking requests for the correct value
99 "django.middleware.csrf.CsrfViewMiddleware",
100 # add `user` (the currently-logged-in user) to incoming HttpRequest objects
101 "django.contrib.auth.middleware.AuthenticationMiddleware",
102 # provide framework for displaying messages to the user, see documentation
103 "django.contrib.messages.middleware.MessageMiddleware",
104 # provide clickjacking protection via the X-Frame-Options header
105 "django.middleware.clickjacking.XFrameOptionsMiddleware",
106 # django-csp: enable use of Content-Security-Policy header
107 "csp.middleware.CSPMiddleware",
108 ]
109
110 # application object used by Django’s built-in servers (e.g. `runserver`)
111 WSGI_APPLICATION = "registrar.config.wsgi.application"
112
113 # endregion
114 # region: Assets and HTML and Caching---------------------------------------###
115
116 # https://docs.djangoproject.com/en/4.0/howto/static-files/
117
118
119 # Caching is disabled by default.
120 # For a low to medium traffic site, caching causes more
121 # problems than it solves. Should caching be desired,
122 # a reasonable start might be:
123 # CACHES = {
124 # "default": {
125 # "BACKEND": "django.core.cache.backends.db.DatabaseCache",
126 # }
127 # }
128
129 # Absolute path to the directory where `collectstatic`
130 # will place static files for deployment.
131 # Do not use this directory for permanent storage -
132 # it is for Django!
133 STATIC_ROOT = BASE_DIR / "static"
134
135 # TODO: decide on template engine and document in ADR
136 TEMPLATES = [
137 {
138 "BACKEND": "django.template.backends.django.DjangoTemplates",
139 "DIRS": [BASE_DIR / "templates"],
140 # look for templates inside installed apps
141 # required by django-debug-toolbar
142 "APP_DIRS": True,
143 "OPTIONS": {
144 # IMPORTANT security setting: escapes HTMLEntities,
145 # helping to prevent XSS attacks
146 "autoescape": True,
147 # context processors are callables which return
148 # dicts - Django merges them into the context
149 # dictionary used to render the templates
150 "context_processors": [
151 "django.template.context_processors.debug",
152 "django.template.context_processors.request",
153 "django.contrib.auth.context_processors.auth",
154 "django.contrib.messages.context_processors.messages",
155 ],
156 },
157 },
158 ]
159
160 # endregion
161 # region: Database----------------------------------------------------------###
162
163 # Wrap each view in a transaction on the database
164 # A decorator can be used for views which have no database activity:
165 # from django.db import transaction
166 # @transaction.non_atomic_requests
167 env_db_url["ATOMIC_REQUESTS"] = True
168
169 DATABASES = {
170 # dj-database-url package takes the supplied Postgres connection string
171 # and converts it into a dictionary with the correct USER, HOST, etc
172 "default": env_db_url,
173 }
174
175 # Specify default field type to use for primary keys
176 DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
177
178 # endregion
179 # region: Email-------------------------------------------------------------###
180
181 # email address to use for various automated correspondence
182 # TODO: pick something sensible here
183 DEFAULT_FROM_EMAIL = "[email protected]"
184
185 # connect to an (external) SMTP server for sending email
186 EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
187
188 # TODO: configure these when the values are known
189 # EMAIL_HOST = ""
190 # EMAIL_HOST_PASSWORD = ""
191 # EMAIL_HOST_USER = ""
192 # EMAIL_PORT = 587
193
194 # for mail sent with mail_admins or mail_managers
195 EMAIL_SUBJECT_PREFIX = "[Attn: .gov admin] "
196
197 # use a TLS (secure) connection when talking to the SMTP server
198 # TLS generally uses port 587
199 EMAIL_USE_TLS = True
200
201 # mutually exclusive with EMAIL_USE_TLS = True
202 # SSL generally uses port 465
203 EMAIL_USE_SSL = False
204
205 # timeout in seconds for blocking operations, like the connection attempt
206 EMAIL_TIMEOUT = 30
207
208 # email address to use for sending error reports
209 SERVER_EMAIL = "[email protected]"
210
211 # endregion
212 # region: Headers-----------------------------------------------------------###
213
214 # Content-Length header is set by django.middleware.common.CommonMiddleware
215
216 # X-Frame-Options header is set by
217 # django.middleware.clickjacking.XFrameOptionsMiddleware
218 # and configured in the Security and Privacy section of this file.
219 # Strict-Transport-Security is set by django.middleware.security.SecurityMiddleware
220 # and configured in the Security and Privacy section of this file.
221
222 # prefer contents of X-Forwarded-Host header to Host header
223 # as Host header may contain a proxy rather than the actual client
224 USE_X_FORWARDED_HOST = True
225
226 # endregion
227 # region: Internationalisation----------------------------------------------###
228
229 # https://docs.djangoproject.com/en/4.0/topics/i18n/
230
231 # Charset to use for HttpResponse objects; used in Content-Type header
232 DEFAULT_CHARSET = "utf-8"
233
234 # provide fallback language if translation file is missing or
235 # user's locale is not supported - requires USE_I18N = True
236 LANGUAGE_CODE = "en-us"
237
238 # allows language cookie to be sent if the user
239 # is coming to our site from an external page.
240 LANGUAGE_COOKIE_SAMESITE = None
241
242 # only send via HTTPS connection
243 LANGUAGE_COOKIE_SECURE = True
244
245 # to display datetimes in templates
246 # and to interpret datetimes entered in forms
247 TIME_ZONE = "UTC"
248
249 # enable Django’s translation system
250 USE_I18N = True
251
252 # enable localized formatting of numbers and dates
253 USE_L10N = True
254
255 # make datetimes timezone-aware by default
256 USE_TZ = True
257
258 # endregion
259 # region: Logging-----------------------------------------------------------###
260
261 # No file logger is configured, because containerized apps
262 # do not log to the file system.
263 # TODO: Configure better logging options
264 LOGGING = {
265 "version": 1,
266 "disable_existing_loggers": False,
267 "formatters": {
268 "verbose": {
269 "format": "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] "
270 "%(message)s",
271 "datefmt": "%d/%b/%Y %H:%M:%S",
272 },
273 "simple": {
274 "format": "%(levelname)s %(message)s",
275 },
276 },
277 "handlers": {
278 "console": {
279 "level": "INFO",
280 "class": "logging.StreamHandler",
281 "formatter": "verbose",
282 },
283 },
284 "loggers": {
285 "django": {
286 "handlers": ["console"],
287 "propagate": True,
288 "level": env_log_level,
289 },
290 "django.template": {
291 "handlers": ["console"],
292 "propagate": True,
293 "level": "INFO",
294 },
295 "registrar": {
296 "handlers": ["console"],
297 "propagate": True,
298 "level": "INFO",
299 },
300 },
301 }
302
303 # endregion
304 # region: Login-------------------------------------------------------------###
305
306 # TODO: FAC example for login.gov
307 # SIMPLE_JWT = {
308 # "ALGORITHM": "RS256",
309 # "AUDIENCE": None,
310 # "ISSUER": "https://idp.int.identitysandbox.gov/",
311 # "JWK_URL": "https://idp.int.identitysandbox.gov/api/openid_connect/certs",
312 # "LEEWAY": 0,
313 # "AUTH_TOKEN_CLASSES": ("rest_framework_simplejwt.tokens.UntypedToken",),
314 # "USER_ID_CLAIM": "sub",
315 # }
316 # TOKEN_AUTH = {"TOKEN_TTL": 3600}
317
318 # endregion
319 # region: Rest Framework/API------------------------------------------------###
320
321 # Enable CORS if api is served at subdomain
322 # https://github.com/adamchainz/django-cors-headers
323 # TODO: FAC example for REST framework
324 # API_VERSION = "0"
325 # REST_FRAMEWORK = {
326 # "DEFAULT_AUTHENTICATION_CLASSES": [
327 # "rest_framework.authentication.BasicAuthentication",
328 # "users.auth.ExpiringTokenAuthentication",
329 # ],
330 # "DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
331 # "DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.PageNumberPagination",
332 # "PAGE_SIZE": 10,
333 # "TEST_REQUEST_RENDERER_CLASSES": [
334 # "rest_framework.renderers.MultiPartRenderer",
335 # "rest_framework.renderers.JSONRenderer",
336 # "rest_framework.renderers.TemplateHTMLRenderer",
337 # "rest_framework.renderers.BrowsableAPIRenderer",
338 # ],
339 # "TEST_REQUEST_DEFAULT_FORMAT": "api",
340 # }
341
342 # endregion
343 # region: Routing-----------------------------------------------------------###
344
345 # ~ Set by django.middleware.common.CommonMiddleware
346 # APPEND_SLASH = True
347 # PREPEND_WWW = False
348
349 # full Python import path to the root URLconf
350 ROOT_URLCONF = "registrar.config.urls"
351
352 # URL to use when referring to static files located in STATIC_ROOT
353 # Must be relative and end with "/"
354 STATIC_URL = "public/"
355
356 # endregion
357 # region: Security and Privacy----------------------------------------------###
358
359 # SECURITY WARNING: keep the secret key used in production secret!
360 SECRET_KEY = secret_key
361
362 # Use this variable for doing SECRET_KEY rotation, see documentation
363 SECRET_KEY_FALLBACKS: "list[str]" = []
364
365 # ~ Set by django.middleware.security.SecurityMiddleware
366 # SECURE_CONTENT_TYPE_NOSNIFF = True
367 # SECURE_CROSS_ORIGIN_OPENER_POLICY = "same-origin"
368 # SECURE_REDIRECT_EXEMPT = []
369 # SECURE_REFERRER_POLICY = "same-origin"
370 # SECURE_SSL_HOST = None
371
372 # ~ Overridden from django.middleware.security.SecurityMiddleware
373 # adds the includeSubDomains directive to the HTTP Strict Transport Security header
374 SECURE_HSTS_INCLUDE_SUBDOMAINS = True
375 # adds the preload directive to the HTTP Strict Transport Security header
376 SECURE_HSTS_PRELOAD = True
377 # TODO: set this value to 31536000 (1 year) for production
378 SECURE_HSTS_SECONDS = 300
379 # redirect all non-HTTPS requests to HTTPS
380 SECURE_SSL_REDIRECT = True
381
382 # ~ Set by django.middleware.common.CommonMiddleware
383 # DISALLOWED_USER_AGENTS = []
384
385 # The host/domain names that Django can serve.
386 # This is a security measure to prevent HTTP Host header attacks,
387 # which are possible even under many seemingly-safe
388 # web server configurations.
389 ALLOWED_HOSTS = [
390 "getgov-unstable.app.cloud.gov",
391 "get.gov",
392 ]
393
394
395 # Extend ALLOWED_HOSTS.
396 # IP addresses can also be hosts, which are used by internal
397 # load balancers for health checks, etc.
398 ALLOWED_CIDR_NETS = ["10.0.0.0/8"]
399
400 # ~ Below are some protections from cross-site request forgery.
401 # This is canonically done by including a nonce value
402 # in pages sent to the user, which the user is expected
403 # to send back. The specifics of implementation are
404 # intricate and varied.
405
406 # Store the token server-side, do not send it
407 # to the user via a cookie. This means each page
408 # which requires protection must place the token
409 # in the HTML explicitly, otherwise the user will
410 # get a 403 error when they submit.
411 CSRF_USE_SESSIONS = True
412
413 # Expiry of CSRF cookie, in seconds.
414 # None means "use session-based CSRF cookies".
415 CSRF_COOKIE_AGE = None
416
417 # Prevent JavaScript from reading the CSRF cookie.
418 # Has no effect with CSRF_USE_SESSIONS = True.
419 CSRF_COOKIE_HTTPONLY = True
420
421 # Only send the cookie via HTTPS connections.
422 # Has no effect with CSRF_USE_SESSIONS = True.
423 CSRF_COOKIE_SECURE = True
424
425 # Protect from non-targeted attacks by obscuring
426 # the CSRF cookie name from the default.
427 # Has no effect with CSRF_USE_SESSIONS = True.
428 CSRF_COOKIE_NAME = "CrSiReFo"
429
430 # Prevents CSRF cookie from being sent if the user
431 # is coming to our site from an external page.
432 # Has no effect with CSRF_USE_SESSIONS = True.
433 CSRF_COOKIE_SAMESITE = "Strict"
434
435 # Change header name to match cookie name.
436 # Has no effect with CSRF_USE_SESSIONS = True.
437 CSRF_HEADER_NAME = "HTTP_X_CRSIREFO"
438
439 # Max parameters that may be received via GET or POST
440 # TODO: 1000 is the default, may need to tune upward for
441 # large DNS zone files, if records are represented by
442 # individual form fields.
443 DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
444
445 # age of session cookies, in seconds (28800 = 8 hours)
446 SESSION_COOKIE_AGE = 28800
447
448 # instruct the browser to forbid client-side JavaScript
449 # from accessing the cookie
450 SESSION_COOKIE_HTTPONLY = True
451
452 # are we a spring boot application? who knows!
453 SESSION_COOKIE_NAME = "JSESSIONID"
454
455 # Prevents session cookie from being sent if the user
456 # is coming to our site from an external page.
457 SESSION_COOKIE_SAMESITE = "Strict"
458
459 # instruct browser to only send cookie via HTTPS
460 SESSION_COOKIE_SECURE = True
461
462 # ~ Set by django.middleware.clickjacking.XFrameOptionsMiddleware
463 # prevent clickjacking by instructing the browser not to load
464 # our site within an iframe
465 # X_FRAME_OPTIONS = "Deny"
466
467 # endregion
468 # region: Testing-----------------------------------------------------------###
469
470 # Additional directories searched for fixture files.
471 # The fixtures directory of each application is searched by default.
472 # Must use unix style "/" path separators.
473 FIXTURE_DIRS: "list[str]" = []
474
475 # endregion
476
477
478 # # # ###
479 # Development settings #
480 # # # ###
481
482 if DEBUG:
483 # used by debug() context processor
484 INTERNAL_IPS = [
485 "127.0.0.1",
486 "::1",
487 ]
488
489 # allow dev laptop to connect
490 ALLOWED_HOSTS += ("localhost",)
491 SECURE_SSL_REDIRECT = False
492 SECURE_HSTS_PRELOAD = False
493
494 # discover potentially inefficient database queries
495 # TODO: use settings overrides to ensure this always is True during tests
496 INSTALLED_APPS += ("nplusone.ext.django",)
497 MIDDLEWARE += ("nplusone.ext.django.NPlusOneMiddleware",)
498 NPLUSONE_RAISE = True
499
500 # insert the amazing django-debug-toolbar
501 INSTALLED_APPS += ("debug_toolbar",)
502 MIDDLEWARE.insert(0, "debug_toolbar.middleware.DebugToolbarMiddleware")
503
504 DEBUG_TOOLBAR_CONFIG = {
505 # due to Docker, bypass Debug Toolbar's check on INTERNAL_IPS
506 "SHOW_TOOLBAR_CALLBACK": lambda _: True,
507 }
508
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/registrar/config/settings.py b/src/registrar/config/settings.py
--- a/src/registrar/config/settings.py
+++ b/src/registrar/config/settings.py
@@ -388,6 +388,7 @@
# web server configurations.
ALLOWED_HOSTS = [
"getgov-unstable.app.cloud.gov",
+ "getgov-staging.app.cloud.gov",
"get.gov",
]
| {"golden_diff": "diff --git a/src/registrar/config/settings.py b/src/registrar/config/settings.py\n--- a/src/registrar/config/settings.py\n+++ b/src/registrar/config/settings.py\n@@ -388,6 +388,7 @@\n # web server configurations.\n ALLOWED_HOSTS = [\n \"getgov-unstable.app.cloud.gov\",\n+ \"getgov-staging.app.cloud.gov\",\n \"get.gov\",\n ]\n", "issue": "Setup new cloud.gov prototyping org\nWe recently signed an agreement for a more robust prototyping org with cloud.gov that will give us the ability to have multiple spaces and invite members from DHS. \n\n- [ ] Setup org and make sure everyone who needs access has the appropriate access\n- [ ] Setup spaces `unstable` and `staging` according to rules\n- [ ] Change Github actions access to CD\n- [ ] Replace org and space name in settings (actions, documentation, scripts, etc.)\n", "before_files": [{"content": "\"\"\"\nDjango settings for .gov registrar project.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/4.0/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/4.0/ref/settings/\n\nIF you'd like to see all of these settings in the running app:\n\n```shell\n$ docker-compose exec app python manage.py shell\n>>> from django.conf import settings\n>>> dir(settings)\n```\n\n\"\"\"\nimport environs\nfrom cfenv import AppEnv\nfrom pathlib import Path\n\n# # # ###\n# Setup code goes here #\n# # # ###\n\nenv = environs.Env()\n\n# Get secrets from Cloud.gov user provided service, if exists\n# If not, get secrets from environment variables\nkey_service = AppEnv().get_service(name=\"getgov-credentials\")\nif key_service and key_service.credentials:\n secret = key_service.credentials.get\nelse:\n secret = env\n\n# # # ###\n# Values obtained externally #\n# # # ###\n\npath = Path(__file__)\n\nenv_db_url = env.dj_db_url(\"DATABASE_URL\")\nenv_debug = env.bool(\"DJANGO_DEBUG\", default=False)\nenv_log_level = env.str(\"DJANGO_LOG_LEVEL\", \"DEBUG\")\n\nsecret_key = secret(\"DJANGO_SECRET_KEY\")\n\n# region: Basic Django Config-----------------------------------------------###\n\n# Build paths inside the project like this: BASE_DIR / \"subdir\".\nBASE_DIR = path.resolve().parent.parent\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = env_debug\n\n\n# Applications are modular pieces of code.\n# They are provided by Django, by third-parties, or by yourself.\n# Installing them here makes them available for execution.\n# Do not access INSTALLED_APPS directly. Use `django.apps.apps` instead.\nINSTALLED_APPS = [\n # Django automatic admin interface reads metadata\n # from database models to provide a quick, model-centric\n # interface where trusted users can manage content\n \"django.contrib.admin\",\n # vv Required by django.contrib.admin vv\n # the \"user\" model! *\\o/*\n \"django.contrib.auth\",\n # generic interface for Django models\n \"django.contrib.contenttypes\",\n # required for CSRF protection and many other things\n \"django.contrib.sessions\",\n # framework for displaying messages to the user\n \"django.contrib.messages\",\n # ^^ Required by django.contrib.admin ^^\n # collects static files from each of your applications\n # (and any other places you specify) into a single location\n # that can easily be served in production\n \"django.contrib.staticfiles\",\n # let's be sure to install our own application!\n \"registrar\",\n]\n\n# Middleware are routines for processing web requests.\n# Adding them here turns them \"on\"; Django will perform the\n# specified routines on each incoming request and outgoing response.\nMIDDLEWARE = [\n # django-allow-cidr: enable use of CIDR IP ranges in ALLOWED_HOSTS\n \"allow_cidr.middleware.AllowCIDRMiddleware\",\n # provide security enhancements to the request/response cycle\n \"django.middleware.security.SecurityMiddleware\",\n # store and retrieve arbitrary data on a per-site-visitor basis\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n # add a few conveniences for perfectionists, see documentation\n \"django.middleware.common.CommonMiddleware\",\n # add protection against Cross Site Request Forgeries by adding\n # hidden form fields to POST forms and checking requests for the correct value\n \"django.middleware.csrf.CsrfViewMiddleware\",\n # add `user` (the currently-logged-in user) to incoming HttpRequest objects\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n # provide framework for displaying messages to the user, see documentation\n \"django.contrib.messages.middleware.MessageMiddleware\",\n # provide clickjacking protection via the X-Frame-Options header\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n # django-csp: enable use of Content-Security-Policy header\n \"csp.middleware.CSPMiddleware\",\n]\n\n# application object used by Django\u2019s built-in servers (e.g. `runserver`)\nWSGI_APPLICATION = \"registrar.config.wsgi.application\"\n\n# endregion\n# region: Assets and HTML and Caching---------------------------------------###\n\n# https://docs.djangoproject.com/en/4.0/howto/static-files/\n\n\n# Caching is disabled by default.\n# For a low to medium traffic site, caching causes more\n# problems than it solves. Should caching be desired,\n# a reasonable start might be:\n# CACHES = {\n# \"default\": {\n# \"BACKEND\": \"django.core.cache.backends.db.DatabaseCache\",\n# }\n# }\n\n# Absolute path to the directory where `collectstatic`\n# will place static files for deployment.\n# Do not use this directory for permanent storage -\n# it is for Django!\nSTATIC_ROOT = BASE_DIR / \"static\"\n\n# TODO: decide on template engine and document in ADR\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [BASE_DIR / \"templates\"],\n # look for templates inside installed apps\n # required by django-debug-toolbar\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n # IMPORTANT security setting: escapes HTMLEntities,\n # helping to prevent XSS attacks\n \"autoescape\": True,\n # context processors are callables which return\n # dicts - Django merges them into the context\n # dictionary used to render the templates\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n ],\n },\n },\n]\n\n# endregion\n# region: Database----------------------------------------------------------###\n\n# Wrap each view in a transaction on the database\n# A decorator can be used for views which have no database activity:\n# from django.db import transaction\n# @transaction.non_atomic_requests\nenv_db_url[\"ATOMIC_REQUESTS\"] = True\n\nDATABASES = {\n # dj-database-url package takes the supplied Postgres connection string\n # and converts it into a dictionary with the correct USER, HOST, etc\n \"default\": env_db_url,\n}\n\n# Specify default field type to use for primary keys\nDEFAULT_AUTO_FIELD = \"django.db.models.BigAutoField\"\n\n# endregion\n# region: Email-------------------------------------------------------------###\n\n# email address to use for various automated correspondence\n# TODO: pick something sensible here\nDEFAULT_FROM_EMAIL = \"[email protected]\"\n\n# connect to an (external) SMTP server for sending email\nEMAIL_BACKEND = \"django.core.mail.backends.smtp.EmailBackend\"\n\n# TODO: configure these when the values are known\n# EMAIL_HOST = \"\"\n# EMAIL_HOST_PASSWORD = \"\"\n# EMAIL_HOST_USER = \"\"\n# EMAIL_PORT = 587\n\n# for mail sent with mail_admins or mail_managers\nEMAIL_SUBJECT_PREFIX = \"[Attn: .gov admin] \"\n\n# use a TLS (secure) connection when talking to the SMTP server\n# TLS generally uses port 587\nEMAIL_USE_TLS = True\n\n# mutually exclusive with EMAIL_USE_TLS = True\n# SSL generally uses port 465\nEMAIL_USE_SSL = False\n\n# timeout in seconds for blocking operations, like the connection attempt\nEMAIL_TIMEOUT = 30\n\n# email address to use for sending error reports\nSERVER_EMAIL = \"[email protected]\"\n\n# endregion\n# region: Headers-----------------------------------------------------------###\n\n# Content-Length header is set by django.middleware.common.CommonMiddleware\n\n# X-Frame-Options header is set by\n# django.middleware.clickjacking.XFrameOptionsMiddleware\n# and configured in the Security and Privacy section of this file.\n# Strict-Transport-Security is set by django.middleware.security.SecurityMiddleware\n# and configured in the Security and Privacy section of this file.\n\n# prefer contents of X-Forwarded-Host header to Host header\n# as Host header may contain a proxy rather than the actual client\nUSE_X_FORWARDED_HOST = True\n\n# endregion\n# region: Internationalisation----------------------------------------------###\n\n# https://docs.djangoproject.com/en/4.0/topics/i18n/\n\n# Charset to use for HttpResponse objects; used in Content-Type header\nDEFAULT_CHARSET = \"utf-8\"\n\n# provide fallback language if translation file is missing or\n# user's locale is not supported - requires USE_I18N = True\nLANGUAGE_CODE = \"en-us\"\n\n# allows language cookie to be sent if the user\n# is coming to our site from an external page.\nLANGUAGE_COOKIE_SAMESITE = None\n\n# only send via HTTPS connection\nLANGUAGE_COOKIE_SECURE = True\n\n# to display datetimes in templates\n# and to interpret datetimes entered in forms\nTIME_ZONE = \"UTC\"\n\n# enable Django\u2019s translation system\nUSE_I18N = True\n\n# enable localized formatting of numbers and dates\nUSE_L10N = True\n\n# make datetimes timezone-aware by default\nUSE_TZ = True\n\n# endregion\n# region: Logging-----------------------------------------------------------###\n\n# No file logger is configured, because containerized apps\n# do not log to the file system.\n# TODO: Configure better logging options\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"formatters\": {\n \"verbose\": {\n \"format\": \"[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] \"\n \"%(message)s\",\n \"datefmt\": \"%d/%b/%Y %H:%M:%S\",\n },\n \"simple\": {\n \"format\": \"%(levelname)s %(message)s\",\n },\n },\n \"handlers\": {\n \"console\": {\n \"level\": \"INFO\",\n \"class\": \"logging.StreamHandler\",\n \"formatter\": \"verbose\",\n },\n },\n \"loggers\": {\n \"django\": {\n \"handlers\": [\"console\"],\n \"propagate\": True,\n \"level\": env_log_level,\n },\n \"django.template\": {\n \"handlers\": [\"console\"],\n \"propagate\": True,\n \"level\": \"INFO\",\n },\n \"registrar\": {\n \"handlers\": [\"console\"],\n \"propagate\": True,\n \"level\": \"INFO\",\n },\n },\n}\n\n# endregion\n# region: Login-------------------------------------------------------------###\n\n# TODO: FAC example for login.gov\n# SIMPLE_JWT = {\n# \"ALGORITHM\": \"RS256\",\n# \"AUDIENCE\": None,\n# \"ISSUER\": \"https://idp.int.identitysandbox.gov/\",\n# \"JWK_URL\": \"https://idp.int.identitysandbox.gov/api/openid_connect/certs\",\n# \"LEEWAY\": 0,\n# \"AUTH_TOKEN_CLASSES\": (\"rest_framework_simplejwt.tokens.UntypedToken\",),\n# \"USER_ID_CLAIM\": \"sub\",\n# }\n# TOKEN_AUTH = {\"TOKEN_TTL\": 3600}\n\n# endregion\n# region: Rest Framework/API------------------------------------------------###\n\n# Enable CORS if api is served at subdomain\n# https://github.com/adamchainz/django-cors-headers\n# TODO: FAC example for REST framework\n# API_VERSION = \"0\"\n# REST_FRAMEWORK = {\n# \"DEFAULT_AUTHENTICATION_CLASSES\": [\n# \"rest_framework.authentication.BasicAuthentication\",\n# \"users.auth.ExpiringTokenAuthentication\",\n# ],\n# \"DEFAULT_PERMISSION_CLASSES\": (\"rest_framework.permissions.IsAuthenticated\",),\n# \"DEFAULT_PAGINATION_CLASS\": \"rest_framework.pagination.PageNumberPagination\",\n# \"PAGE_SIZE\": 10,\n# \"TEST_REQUEST_RENDERER_CLASSES\": [\n# \"rest_framework.renderers.MultiPartRenderer\",\n# \"rest_framework.renderers.JSONRenderer\",\n# \"rest_framework.renderers.TemplateHTMLRenderer\",\n# \"rest_framework.renderers.BrowsableAPIRenderer\",\n# ],\n# \"TEST_REQUEST_DEFAULT_FORMAT\": \"api\",\n# }\n\n# endregion\n# region: Routing-----------------------------------------------------------###\n\n# ~ Set by django.middleware.common.CommonMiddleware\n# APPEND_SLASH = True\n# PREPEND_WWW = False\n\n# full Python import path to the root URLconf\nROOT_URLCONF = \"registrar.config.urls\"\n\n# URL to use when referring to static files located in STATIC_ROOT\n# Must be relative and end with \"/\"\nSTATIC_URL = \"public/\"\n\n# endregion\n# region: Security and Privacy----------------------------------------------###\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = secret_key\n\n# Use this variable for doing SECRET_KEY rotation, see documentation\nSECRET_KEY_FALLBACKS: \"list[str]\" = []\n\n# ~ Set by django.middleware.security.SecurityMiddleware\n# SECURE_CONTENT_TYPE_NOSNIFF = True\n# SECURE_CROSS_ORIGIN_OPENER_POLICY = \"same-origin\"\n# SECURE_REDIRECT_EXEMPT = []\n# SECURE_REFERRER_POLICY = \"same-origin\"\n# SECURE_SSL_HOST = None\n\n# ~ Overridden from django.middleware.security.SecurityMiddleware\n# adds the includeSubDomains directive to the HTTP Strict Transport Security header\nSECURE_HSTS_INCLUDE_SUBDOMAINS = True\n# adds the preload directive to the HTTP Strict Transport Security header\nSECURE_HSTS_PRELOAD = True\n# TODO: set this value to 31536000 (1 year) for production\nSECURE_HSTS_SECONDS = 300\n# redirect all non-HTTPS requests to HTTPS\nSECURE_SSL_REDIRECT = True\n\n# ~ Set by django.middleware.common.CommonMiddleware\n# DISALLOWED_USER_AGENTS = []\n\n# The host/domain names that Django can serve.\n# This is a security measure to prevent HTTP Host header attacks,\n# which are possible even under many seemingly-safe\n# web server configurations.\nALLOWED_HOSTS = [\n \"getgov-unstable.app.cloud.gov\",\n \"get.gov\",\n]\n\n\n# Extend ALLOWED_HOSTS.\n# IP addresses can also be hosts, which are used by internal\n# load balancers for health checks, etc.\nALLOWED_CIDR_NETS = [\"10.0.0.0/8\"]\n\n# ~ Below are some protections from cross-site request forgery.\n# This is canonically done by including a nonce value\n# in pages sent to the user, which the user is expected\n# to send back. The specifics of implementation are\n# intricate and varied.\n\n# Store the token server-side, do not send it\n# to the user via a cookie. This means each page\n# which requires protection must place the token\n# in the HTML explicitly, otherwise the user will\n# get a 403 error when they submit.\nCSRF_USE_SESSIONS = True\n\n# Expiry of CSRF cookie, in seconds.\n# None means \"use session-based CSRF cookies\".\nCSRF_COOKIE_AGE = None\n\n# Prevent JavaScript from reading the CSRF cookie.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_COOKIE_HTTPONLY = True\n\n# Only send the cookie via HTTPS connections.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_COOKIE_SECURE = True\n\n# Protect from non-targeted attacks by obscuring\n# the CSRF cookie name from the default.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_COOKIE_NAME = \"CrSiReFo\"\n\n# Prevents CSRF cookie from being sent if the user\n# is coming to our site from an external page.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_COOKIE_SAMESITE = \"Strict\"\n\n# Change header name to match cookie name.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_HEADER_NAME = \"HTTP_X_CRSIREFO\"\n\n# Max parameters that may be received via GET or POST\n# TODO: 1000 is the default, may need to tune upward for\n# large DNS zone files, if records are represented by\n# individual form fields.\nDATA_UPLOAD_MAX_NUMBER_FIELDS = 1000\n\n# age of session cookies, in seconds (28800 = 8 hours)\nSESSION_COOKIE_AGE = 28800\n\n# instruct the browser to forbid client-side JavaScript\n# from accessing the cookie\nSESSION_COOKIE_HTTPONLY = True\n\n# are we a spring boot application? who knows!\nSESSION_COOKIE_NAME = \"JSESSIONID\"\n\n# Prevents session cookie from being sent if the user\n# is coming to our site from an external page.\nSESSION_COOKIE_SAMESITE = \"Strict\"\n\n# instruct browser to only send cookie via HTTPS\nSESSION_COOKIE_SECURE = True\n\n# ~ Set by django.middleware.clickjacking.XFrameOptionsMiddleware\n# prevent clickjacking by instructing the browser not to load\n# our site within an iframe\n# X_FRAME_OPTIONS = \"Deny\"\n\n# endregion\n# region: Testing-----------------------------------------------------------###\n\n# Additional directories searched for fixture files.\n# The fixtures directory of each application is searched by default.\n# Must use unix style \"/\" path separators.\nFIXTURE_DIRS: \"list[str]\" = []\n\n# endregion\n\n\n# # # ###\n# Development settings #\n# # # ###\n\nif DEBUG:\n # used by debug() context processor\n INTERNAL_IPS = [\n \"127.0.0.1\",\n \"::1\",\n ]\n\n # allow dev laptop to connect\n ALLOWED_HOSTS += (\"localhost\",)\n SECURE_SSL_REDIRECT = False\n SECURE_HSTS_PRELOAD = False\n\n # discover potentially inefficient database queries\n # TODO: use settings overrides to ensure this always is True during tests\n INSTALLED_APPS += (\"nplusone.ext.django\",)\n MIDDLEWARE += (\"nplusone.ext.django.NPlusOneMiddleware\",)\n NPLUSONE_RAISE = True\n\n # insert the amazing django-debug-toolbar\n INSTALLED_APPS += (\"debug_toolbar\",)\n MIDDLEWARE.insert(0, \"debug_toolbar.middleware.DebugToolbarMiddleware\")\n\n DEBUG_TOOLBAR_CONFIG = {\n # due to Docker, bypass Debug Toolbar's check on INTERNAL_IPS\n \"SHOW_TOOLBAR_CALLBACK\": lambda _: True,\n }\n", "path": "src/registrar/config/settings.py"}], "after_files": [{"content": "\"\"\"\nDjango settings for .gov registrar project.\n\nFor more information on this file, see\nhttps://docs.djangoproject.com/en/4.0/topics/settings/\n\nFor the full list of settings and their values, see\nhttps://docs.djangoproject.com/en/4.0/ref/settings/\n\nIF you'd like to see all of these settings in the running app:\n\n```shell\n$ docker-compose exec app python manage.py shell\n>>> from django.conf import settings\n>>> dir(settings)\n```\n\n\"\"\"\nimport environs\nfrom cfenv import AppEnv\nfrom pathlib import Path\n\n# # # ###\n# Setup code goes here #\n# # # ###\n\nenv = environs.Env()\n\n# Get secrets from Cloud.gov user provided service, if exists\n# If not, get secrets from environment variables\nkey_service = AppEnv().get_service(name=\"getgov-credentials\")\nif key_service and key_service.credentials:\n secret = key_service.credentials.get\nelse:\n secret = env\n\n# # # ###\n# Values obtained externally #\n# # # ###\n\npath = Path(__file__)\n\nenv_db_url = env.dj_db_url(\"DATABASE_URL\")\nenv_debug = env.bool(\"DJANGO_DEBUG\", default=False)\nenv_log_level = env.str(\"DJANGO_LOG_LEVEL\", \"DEBUG\")\n\nsecret_key = secret(\"DJANGO_SECRET_KEY\")\n\n# region: Basic Django Config-----------------------------------------------###\n\n# Build paths inside the project like this: BASE_DIR / \"subdir\".\nBASE_DIR = path.resolve().parent.parent\n\n# SECURITY WARNING: don't run with debug turned on in production!\nDEBUG = env_debug\n\n\n# Applications are modular pieces of code.\n# They are provided by Django, by third-parties, or by yourself.\n# Installing them here makes them available for execution.\n# Do not access INSTALLED_APPS directly. Use `django.apps.apps` instead.\nINSTALLED_APPS = [\n # Django automatic admin interface reads metadata\n # from database models to provide a quick, model-centric\n # interface where trusted users can manage content\n \"django.contrib.admin\",\n # vv Required by django.contrib.admin vv\n # the \"user\" model! *\\o/*\n \"django.contrib.auth\",\n # generic interface for Django models\n \"django.contrib.contenttypes\",\n # required for CSRF protection and many other things\n \"django.contrib.sessions\",\n # framework for displaying messages to the user\n \"django.contrib.messages\",\n # ^^ Required by django.contrib.admin ^^\n # collects static files from each of your applications\n # (and any other places you specify) into a single location\n # that can easily be served in production\n \"django.contrib.staticfiles\",\n # let's be sure to install our own application!\n \"registrar\",\n]\n\n# Middleware are routines for processing web requests.\n# Adding them here turns them \"on\"; Django will perform the\n# specified routines on each incoming request and outgoing response.\nMIDDLEWARE = [\n # django-allow-cidr: enable use of CIDR IP ranges in ALLOWED_HOSTS\n \"allow_cidr.middleware.AllowCIDRMiddleware\",\n # provide security enhancements to the request/response cycle\n \"django.middleware.security.SecurityMiddleware\",\n # store and retrieve arbitrary data on a per-site-visitor basis\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n # add a few conveniences for perfectionists, see documentation\n \"django.middleware.common.CommonMiddleware\",\n # add protection against Cross Site Request Forgeries by adding\n # hidden form fields to POST forms and checking requests for the correct value\n \"django.middleware.csrf.CsrfViewMiddleware\",\n # add `user` (the currently-logged-in user) to incoming HttpRequest objects\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n # provide framework for displaying messages to the user, see documentation\n \"django.contrib.messages.middleware.MessageMiddleware\",\n # provide clickjacking protection via the X-Frame-Options header\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n # django-csp: enable use of Content-Security-Policy header\n \"csp.middleware.CSPMiddleware\",\n]\n\n# application object used by Django\u2019s built-in servers (e.g. `runserver`)\nWSGI_APPLICATION = \"registrar.config.wsgi.application\"\n\n# endregion\n# region: Assets and HTML and Caching---------------------------------------###\n\n# https://docs.djangoproject.com/en/4.0/howto/static-files/\n\n\n# Caching is disabled by default.\n# For a low to medium traffic site, caching causes more\n# problems than it solves. Should caching be desired,\n# a reasonable start might be:\n# CACHES = {\n# \"default\": {\n# \"BACKEND\": \"django.core.cache.backends.db.DatabaseCache\",\n# }\n# }\n\n# Absolute path to the directory where `collectstatic`\n# will place static files for deployment.\n# Do not use this directory for permanent storage -\n# it is for Django!\nSTATIC_ROOT = BASE_DIR / \"static\"\n\n# TODO: decide on template engine and document in ADR\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [BASE_DIR / \"templates\"],\n # look for templates inside installed apps\n # required by django-debug-toolbar\n \"APP_DIRS\": True,\n \"OPTIONS\": {\n # IMPORTANT security setting: escapes HTMLEntities,\n # helping to prevent XSS attacks\n \"autoescape\": True,\n # context processors are callables which return\n # dicts - Django merges them into the context\n # dictionary used to render the templates\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n ],\n },\n },\n]\n\n# endregion\n# region: Database----------------------------------------------------------###\n\n# Wrap each view in a transaction on the database\n# A decorator can be used for views which have no database activity:\n# from django.db import transaction\n# @transaction.non_atomic_requests\nenv_db_url[\"ATOMIC_REQUESTS\"] = True\n\nDATABASES = {\n # dj-database-url package takes the supplied Postgres connection string\n # and converts it into a dictionary with the correct USER, HOST, etc\n \"default\": env_db_url,\n}\n\n# Specify default field type to use for primary keys\nDEFAULT_AUTO_FIELD = \"django.db.models.BigAutoField\"\n\n# endregion\n# region: Email-------------------------------------------------------------###\n\n# email address to use for various automated correspondence\n# TODO: pick something sensible here\nDEFAULT_FROM_EMAIL = \"[email protected]\"\n\n# connect to an (external) SMTP server for sending email\nEMAIL_BACKEND = \"django.core.mail.backends.smtp.EmailBackend\"\n\n# TODO: configure these when the values are known\n# EMAIL_HOST = \"\"\n# EMAIL_HOST_PASSWORD = \"\"\n# EMAIL_HOST_USER = \"\"\n# EMAIL_PORT = 587\n\n# for mail sent with mail_admins or mail_managers\nEMAIL_SUBJECT_PREFIX = \"[Attn: .gov admin] \"\n\n# use a TLS (secure) connection when talking to the SMTP server\n# TLS generally uses port 587\nEMAIL_USE_TLS = True\n\n# mutually exclusive with EMAIL_USE_TLS = True\n# SSL generally uses port 465\nEMAIL_USE_SSL = False\n\n# timeout in seconds for blocking operations, like the connection attempt\nEMAIL_TIMEOUT = 30\n\n# email address to use for sending error reports\nSERVER_EMAIL = \"[email protected]\"\n\n# endregion\n# region: Headers-----------------------------------------------------------###\n\n# Content-Length header is set by django.middleware.common.CommonMiddleware\n\n# X-Frame-Options header is set by\n# django.middleware.clickjacking.XFrameOptionsMiddleware\n# and configured in the Security and Privacy section of this file.\n# Strict-Transport-Security is set by django.middleware.security.SecurityMiddleware\n# and configured in the Security and Privacy section of this file.\n\n# prefer contents of X-Forwarded-Host header to Host header\n# as Host header may contain a proxy rather than the actual client\nUSE_X_FORWARDED_HOST = True\n\n# endregion\n# region: Internationalisation----------------------------------------------###\n\n# https://docs.djangoproject.com/en/4.0/topics/i18n/\n\n# Charset to use for HttpResponse objects; used in Content-Type header\nDEFAULT_CHARSET = \"utf-8\"\n\n# provide fallback language if translation file is missing or\n# user's locale is not supported - requires USE_I18N = True\nLANGUAGE_CODE = \"en-us\"\n\n# allows language cookie to be sent if the user\n# is coming to our site from an external page.\nLANGUAGE_COOKIE_SAMESITE = None\n\n# only send via HTTPS connection\nLANGUAGE_COOKIE_SECURE = True\n\n# to display datetimes in templates\n# and to interpret datetimes entered in forms\nTIME_ZONE = \"UTC\"\n\n# enable Django\u2019s translation system\nUSE_I18N = True\n\n# enable localized formatting of numbers and dates\nUSE_L10N = True\n\n# make datetimes timezone-aware by default\nUSE_TZ = True\n\n# endregion\n# region: Logging-----------------------------------------------------------###\n\n# No file logger is configured, because containerized apps\n# do not log to the file system.\n# TODO: Configure better logging options\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"formatters\": {\n \"verbose\": {\n \"format\": \"[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] \"\n \"%(message)s\",\n \"datefmt\": \"%d/%b/%Y %H:%M:%S\",\n },\n \"simple\": {\n \"format\": \"%(levelname)s %(message)s\",\n },\n },\n \"handlers\": {\n \"console\": {\n \"level\": \"INFO\",\n \"class\": \"logging.StreamHandler\",\n \"formatter\": \"verbose\",\n },\n },\n \"loggers\": {\n \"django\": {\n \"handlers\": [\"console\"],\n \"propagate\": True,\n \"level\": env_log_level,\n },\n \"django.template\": {\n \"handlers\": [\"console\"],\n \"propagate\": True,\n \"level\": \"INFO\",\n },\n \"registrar\": {\n \"handlers\": [\"console\"],\n \"propagate\": True,\n \"level\": \"INFO\",\n },\n },\n}\n\n# endregion\n# region: Login-------------------------------------------------------------###\n\n# TODO: FAC example for login.gov\n# SIMPLE_JWT = {\n# \"ALGORITHM\": \"RS256\",\n# \"AUDIENCE\": None,\n# \"ISSUER\": \"https://idp.int.identitysandbox.gov/\",\n# \"JWK_URL\": \"https://idp.int.identitysandbox.gov/api/openid_connect/certs\",\n# \"LEEWAY\": 0,\n# \"AUTH_TOKEN_CLASSES\": (\"rest_framework_simplejwt.tokens.UntypedToken\",),\n# \"USER_ID_CLAIM\": \"sub\",\n# }\n# TOKEN_AUTH = {\"TOKEN_TTL\": 3600}\n\n# endregion\n# region: Rest Framework/API------------------------------------------------###\n\n# Enable CORS if api is served at subdomain\n# https://github.com/adamchainz/django-cors-headers\n# TODO: FAC example for REST framework\n# API_VERSION = \"0\"\n# REST_FRAMEWORK = {\n# \"DEFAULT_AUTHENTICATION_CLASSES\": [\n# \"rest_framework.authentication.BasicAuthentication\",\n# \"users.auth.ExpiringTokenAuthentication\",\n# ],\n# \"DEFAULT_PERMISSION_CLASSES\": (\"rest_framework.permissions.IsAuthenticated\",),\n# \"DEFAULT_PAGINATION_CLASS\": \"rest_framework.pagination.PageNumberPagination\",\n# \"PAGE_SIZE\": 10,\n# \"TEST_REQUEST_RENDERER_CLASSES\": [\n# \"rest_framework.renderers.MultiPartRenderer\",\n# \"rest_framework.renderers.JSONRenderer\",\n# \"rest_framework.renderers.TemplateHTMLRenderer\",\n# \"rest_framework.renderers.BrowsableAPIRenderer\",\n# ],\n# \"TEST_REQUEST_DEFAULT_FORMAT\": \"api\",\n# }\n\n# endregion\n# region: Routing-----------------------------------------------------------###\n\n# ~ Set by django.middleware.common.CommonMiddleware\n# APPEND_SLASH = True\n# PREPEND_WWW = False\n\n# full Python import path to the root URLconf\nROOT_URLCONF = \"registrar.config.urls\"\n\n# URL to use when referring to static files located in STATIC_ROOT\n# Must be relative and end with \"/\"\nSTATIC_URL = \"public/\"\n\n# endregion\n# region: Security and Privacy----------------------------------------------###\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = secret_key\n\n# Use this variable for doing SECRET_KEY rotation, see documentation\nSECRET_KEY_FALLBACKS: \"list[str]\" = []\n\n# ~ Set by django.middleware.security.SecurityMiddleware\n# SECURE_CONTENT_TYPE_NOSNIFF = True\n# SECURE_CROSS_ORIGIN_OPENER_POLICY = \"same-origin\"\n# SECURE_REDIRECT_EXEMPT = []\n# SECURE_REFERRER_POLICY = \"same-origin\"\n# SECURE_SSL_HOST = None\n\n# ~ Overridden from django.middleware.security.SecurityMiddleware\n# adds the includeSubDomains directive to the HTTP Strict Transport Security header\nSECURE_HSTS_INCLUDE_SUBDOMAINS = True\n# adds the preload directive to the HTTP Strict Transport Security header\nSECURE_HSTS_PRELOAD = True\n# TODO: set this value to 31536000 (1 year) for production\nSECURE_HSTS_SECONDS = 300\n# redirect all non-HTTPS requests to HTTPS\nSECURE_SSL_REDIRECT = True\n\n# ~ Set by django.middleware.common.CommonMiddleware\n# DISALLOWED_USER_AGENTS = []\n\n# The host/domain names that Django can serve.\n# This is a security measure to prevent HTTP Host header attacks,\n# which are possible even under many seemingly-safe\n# web server configurations.\nALLOWED_HOSTS = [\n \"getgov-unstable.app.cloud.gov\",\n \"getgov-staging.app.cloud.gov\",\n \"get.gov\",\n]\n\n\n# Extend ALLOWED_HOSTS.\n# IP addresses can also be hosts, which are used by internal\n# load balancers for health checks, etc.\nALLOWED_CIDR_NETS = [\"10.0.0.0/8\"]\n\n# ~ Below are some protections from cross-site request forgery.\n# This is canonically done by including a nonce value\n# in pages sent to the user, which the user is expected\n# to send back. The specifics of implementation are\n# intricate and varied.\n\n# Store the token server-side, do not send it\n# to the user via a cookie. This means each page\n# which requires protection must place the token\n# in the HTML explicitly, otherwise the user will\n# get a 403 error when they submit.\nCSRF_USE_SESSIONS = True\n\n# Expiry of CSRF cookie, in seconds.\n# None means \"use session-based CSRF cookies\".\nCSRF_COOKIE_AGE = None\n\n# Prevent JavaScript from reading the CSRF cookie.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_COOKIE_HTTPONLY = True\n\n# Only send the cookie via HTTPS connections.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_COOKIE_SECURE = True\n\n# Protect from non-targeted attacks by obscuring\n# the CSRF cookie name from the default.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_COOKIE_NAME = \"CrSiReFo\"\n\n# Prevents CSRF cookie from being sent if the user\n# is coming to our site from an external page.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_COOKIE_SAMESITE = \"Strict\"\n\n# Change header name to match cookie name.\n# Has no effect with CSRF_USE_SESSIONS = True.\nCSRF_HEADER_NAME = \"HTTP_X_CRSIREFO\"\n\n# Max parameters that may be received via GET or POST\n# TODO: 1000 is the default, may need to tune upward for\n# large DNS zone files, if records are represented by\n# individual form fields.\nDATA_UPLOAD_MAX_NUMBER_FIELDS = 1000\n\n# age of session cookies, in seconds (28800 = 8 hours)\nSESSION_COOKIE_AGE = 28800\n\n# instruct the browser to forbid client-side JavaScript\n# from accessing the cookie\nSESSION_COOKIE_HTTPONLY = True\n\n# are we a spring boot application? who knows!\nSESSION_COOKIE_NAME = \"JSESSIONID\"\n\n# Prevents session cookie from being sent if the user\n# is coming to our site from an external page.\nSESSION_COOKIE_SAMESITE = \"Strict\"\n\n# instruct browser to only send cookie via HTTPS\nSESSION_COOKIE_SECURE = True\n\n# ~ Set by django.middleware.clickjacking.XFrameOptionsMiddleware\n# prevent clickjacking by instructing the browser not to load\n# our site within an iframe\n# X_FRAME_OPTIONS = \"Deny\"\n\n# endregion\n# region: Testing-----------------------------------------------------------###\n\n# Additional directories searched for fixture files.\n# The fixtures directory of each application is searched by default.\n# Must use unix style \"/\" path separators.\nFIXTURE_DIRS: \"list[str]\" = []\n\n# endregion\n\n\n# # # ###\n# Development settings #\n# # # ###\n\nif DEBUG:\n # used by debug() context processor\n INTERNAL_IPS = [\n \"127.0.0.1\",\n \"::1\",\n ]\n\n # allow dev laptop to connect\n ALLOWED_HOSTS += (\"localhost\",)\n SECURE_SSL_REDIRECT = False\n SECURE_HSTS_PRELOAD = False\n\n # discover potentially inefficient database queries\n # TODO: use settings overrides to ensure this always is True during tests\n INSTALLED_APPS += (\"nplusone.ext.django\",)\n MIDDLEWARE += (\"nplusone.ext.django.NPlusOneMiddleware\",)\n NPLUSONE_RAISE = True\n\n # insert the amazing django-debug-toolbar\n INSTALLED_APPS += (\"debug_toolbar\",)\n MIDDLEWARE.insert(0, \"debug_toolbar.middleware.DebugToolbarMiddleware\")\n\n DEBUG_TOOLBAR_CONFIG = {\n # due to Docker, bypass Debug Toolbar's check on INTERNAL_IPS\n \"SHOW_TOOLBAR_CALLBACK\": lambda _: True,\n }\n", "path": "src/registrar/config/settings.py"}]} |
gh_patches_debug_104 | rasdani/github-patches | git_diff | NVIDIA__NVFlare-314 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Server admin port still vulnerable to DOS
There are 128 connections allowed where each is limited to 512mb, this leaves 64GB of memory that can be acquired by a mal actor.
There is also still the issue where it is not checking if the socket is closed.
If I understand the idea of that port, the only data sent through it are some json files? I don't think it justifies such a large max size.
---
I think this is a larger problem though. Why is the the socket being accessed directly? There are many similar gotchas need to be considered when programming directly on a TCP socket and there are many libraries that have already done the hard work of solving those problems.
gRPC is an option since its already in your stack, Flask is an option but it doesn't match the use case too well, zeromq is an option
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nvflare/fuel/hci/server/hci.py`
Content:
```
1 # Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 import logging
16 import socketserver
17 import ssl
18 import threading
19
20 from nvflare.fuel.hci.conn import Connection, receive_til_end
21 from nvflare.fuel.hci.proto import validate_proto
22 from nvflare.fuel.hci.security import get_certificate_common_name
23
24 from .reg import ServerCommandRegister
25
26 MAX_ADMIN_CONNECTIONS = 128
27
28
29 class _MsgHandler(socketserver.BaseRequestHandler):
30 """Message handler.
31
32 Used by the AdminServer to receive admin commands, validate, then process and do command through the
33 ServerCommandRegister.
34 """
35
36 connections = 0
37 lock = threading.Lock()
38
39 def __init__(self, request, client_address, server):
40 # handle() is called in the constructor so logger must be initialized first
41 self.logger = logging.getLogger(self.__class__.__name__)
42 super().__init__(request, client_address, server)
43
44 def handle(self):
45 try:
46 with _MsgHandler.lock:
47 _MsgHandler.connections += 1
48
49 self.logger.debug(f"Concurrent admin connections: {_MsgHandler.connections}")
50 if _MsgHandler.connections > MAX_ADMIN_CONNECTIONS:
51 raise ConnectionRefusedError(f"Admin connection limit ({MAX_ADMIN_CONNECTIONS}) reached")
52
53 conn = Connection(self.request, self.server)
54
55 if self.server.use_ssl:
56 cn = get_certificate_common_name(self.request.getpeercert())
57 conn.set_prop("_client_cn", cn)
58 valid = self.server.validate_client_cn(cn)
59 else:
60 valid = True
61
62 if not valid:
63 conn.append_error("authentication error")
64 else:
65 req = receive_til_end(self.request).strip()
66 command = None
67 req_json = validate_proto(req)
68 conn.request = req_json
69 if req_json is not None:
70 data = req_json["data"]
71 for item in data:
72 it = item["type"]
73 if it == "command":
74 command = item["data"]
75 break
76
77 if command is None:
78 conn.append_error("protocol violation")
79 else:
80 self.server.cmd_reg.process_command(conn, command)
81 else:
82 # not json encoded
83 conn.append_error("protocol violation")
84
85 if not conn.ended:
86 conn.close()
87 except BaseException as exc:
88 self.logger.error(f"Admin connection terminated due to exception: {str(exc)}")
89 if self.logger.getEffectiveLevel() <= logging.DEBUG:
90 self.logger.exception("Admin connection error")
91 finally:
92 with _MsgHandler.lock:
93 _MsgHandler.connections -= 1
94
95
96 def initialize_hci():
97 socketserver.TCPServer.allow_reuse_address = True
98
99
100 class AdminServer(socketserver.ThreadingTCPServer):
101 # faster re-binding
102 allow_reuse_address = True
103
104 # make this bigger than five
105 request_queue_size = 10
106
107 # kick connections when we exit
108 daemon_threads = True
109
110 def __init__(
111 self,
112 cmd_reg: ServerCommandRegister,
113 host,
114 port,
115 ca_cert=None,
116 server_cert=None,
117 server_key=None,
118 accepted_client_cns=None,
119 ):
120 """Base class of FedAdminServer to create a server that can receive commands.
121
122 Args:
123 cmd_reg: CommandRegister
124 host: the IP address of the admin server
125 port: port number of admin server
126 ca_cert: the root CA's cert file name
127 server_cert: server's cert, signed by the CA
128 server_key: server's private key file
129 accepted_client_cns: list of accepted Common Names from client, if specified
130 """
131 socketserver.TCPServer.__init__(self, (host, port), _MsgHandler, False)
132
133 self.use_ssl = False
134 if ca_cert and server_cert:
135 if accepted_client_cns:
136 assert isinstance(accepted_client_cns, list), "accepted_client_cns must be list but got {}.".format(
137 accepted_client_cns
138 )
139
140 ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
141 ctx.verify_mode = ssl.CERT_REQUIRED
142 ctx.load_verify_locations(ca_cert)
143 ctx.load_cert_chain(certfile=server_cert, keyfile=server_key)
144
145 # replace the socket with an SSL version of itself
146 self.socket = ctx.wrap_socket(self.socket, server_side=True)
147 self.use_ssl = True
148
149 # bind the socket and start the server
150 self.server_bind()
151 self.server_activate()
152
153 self._thread = None
154 self.host = host
155 self.port = port
156 self.accepted_client_cns = accepted_client_cns
157 self.cmd_reg = cmd_reg
158 cmd_reg.finalize()
159 self.logger = logging.getLogger(self.__class__.__name__)
160
161 def validate_client_cn(self, cn):
162 if self.accepted_client_cns:
163 return cn in self.accepted_client_cns
164 else:
165 return True
166
167 def stop(self):
168 self.shutdown()
169 self.cmd_reg.close()
170
171 if self._thread.is_alive():
172 self._thread.join()
173
174 self.logger.info(f"Admin Server {self.host} on Port {self.port} shutdown!")
175
176 def set_command_registry(self, cmd_reg: ServerCommandRegister):
177 if cmd_reg:
178 cmd_reg.finalize()
179
180 if self.cmd_reg:
181 self.cmd_reg.close()
182
183 self.cmd_reg = cmd_reg
184
185 def start(self):
186 if self._thread is None:
187 self._thread = threading.Thread(target=self._run, args=())
188
189 if not self._thread.is_alive():
190 self._thread.start()
191
192 def _run(self):
193 self.logger.info(f"Starting Admin Server {self.host} on Port {self.port}")
194 self.serve_forever()
195
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nvflare/fuel/hci/server/hci.py b/nvflare/fuel/hci/server/hci.py
--- a/nvflare/fuel/hci/server/hci.py
+++ b/nvflare/fuel/hci/server/hci.py
@@ -23,7 +23,7 @@
from .reg import ServerCommandRegister
-MAX_ADMIN_CONNECTIONS = 128
+MAX_ADMIN_CONNECTIONS = 16
class _MsgHandler(socketserver.BaseRequestHandler):
| {"golden_diff": "diff --git a/nvflare/fuel/hci/server/hci.py b/nvflare/fuel/hci/server/hci.py\n--- a/nvflare/fuel/hci/server/hci.py\n+++ b/nvflare/fuel/hci/server/hci.py\n@@ -23,7 +23,7 @@\n \n from .reg import ServerCommandRegister\n \n-MAX_ADMIN_CONNECTIONS = 128\n+MAX_ADMIN_CONNECTIONS = 16\n \n \n class _MsgHandler(socketserver.BaseRequestHandler):\n", "issue": "Server admin port still vulnerable to DOS\nThere are 128 connections allowed where each is limited to 512mb, this leaves 64GB of memory that can be acquired by a mal actor.\r\n\r\nThere is also still the issue where it is not checking if the socket is closed.\r\n\r\nIf I understand the idea of that port, the only data sent through it are some json files? I don't think it justifies such a large max size.\r\n\r\n---\r\n\r\nI think this is a larger problem though. Why is the the socket being accessed directly? There are many similar gotchas need to be considered when programming directly on a TCP socket and there are many libraries that have already done the hard work of solving those problems.\r\n\r\ngRPC is an option since its already in your stack, Flask is an option but it doesn't match the use case too well, zeromq is an option\n", "before_files": [{"content": "# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nimport socketserver\nimport ssl\nimport threading\n\nfrom nvflare.fuel.hci.conn import Connection, receive_til_end\nfrom nvflare.fuel.hci.proto import validate_proto\nfrom nvflare.fuel.hci.security import get_certificate_common_name\n\nfrom .reg import ServerCommandRegister\n\nMAX_ADMIN_CONNECTIONS = 128\n\n\nclass _MsgHandler(socketserver.BaseRequestHandler):\n \"\"\"Message handler.\n\n Used by the AdminServer to receive admin commands, validate, then process and do command through the\n ServerCommandRegister.\n \"\"\"\n\n connections = 0\n lock = threading.Lock()\n\n def __init__(self, request, client_address, server):\n # handle() is called in the constructor so logger must be initialized first\n self.logger = logging.getLogger(self.__class__.__name__)\n super().__init__(request, client_address, server)\n\n def handle(self):\n try:\n with _MsgHandler.lock:\n _MsgHandler.connections += 1\n\n self.logger.debug(f\"Concurrent admin connections: {_MsgHandler.connections}\")\n if _MsgHandler.connections > MAX_ADMIN_CONNECTIONS:\n raise ConnectionRefusedError(f\"Admin connection limit ({MAX_ADMIN_CONNECTIONS}) reached\")\n\n conn = Connection(self.request, self.server)\n\n if self.server.use_ssl:\n cn = get_certificate_common_name(self.request.getpeercert())\n conn.set_prop(\"_client_cn\", cn)\n valid = self.server.validate_client_cn(cn)\n else:\n valid = True\n\n if not valid:\n conn.append_error(\"authentication error\")\n else:\n req = receive_til_end(self.request).strip()\n command = None\n req_json = validate_proto(req)\n conn.request = req_json\n if req_json is not None:\n data = req_json[\"data\"]\n for item in data:\n it = item[\"type\"]\n if it == \"command\":\n command = item[\"data\"]\n break\n\n if command is None:\n conn.append_error(\"protocol violation\")\n else:\n self.server.cmd_reg.process_command(conn, command)\n else:\n # not json encoded\n conn.append_error(\"protocol violation\")\n\n if not conn.ended:\n conn.close()\n except BaseException as exc:\n self.logger.error(f\"Admin connection terminated due to exception: {str(exc)}\")\n if self.logger.getEffectiveLevel() <= logging.DEBUG:\n self.logger.exception(\"Admin connection error\")\n finally:\n with _MsgHandler.lock:\n _MsgHandler.connections -= 1\n\n\ndef initialize_hci():\n socketserver.TCPServer.allow_reuse_address = True\n\n\nclass AdminServer(socketserver.ThreadingTCPServer):\n # faster re-binding\n allow_reuse_address = True\n\n # make this bigger than five\n request_queue_size = 10\n\n # kick connections when we exit\n daemon_threads = True\n\n def __init__(\n self,\n cmd_reg: ServerCommandRegister,\n host,\n port,\n ca_cert=None,\n server_cert=None,\n server_key=None,\n accepted_client_cns=None,\n ):\n \"\"\"Base class of FedAdminServer to create a server that can receive commands.\n\n Args:\n cmd_reg: CommandRegister\n host: the IP address of the admin server\n port: port number of admin server\n ca_cert: the root CA's cert file name\n server_cert: server's cert, signed by the CA\n server_key: server's private key file\n accepted_client_cns: list of accepted Common Names from client, if specified\n \"\"\"\n socketserver.TCPServer.__init__(self, (host, port), _MsgHandler, False)\n\n self.use_ssl = False\n if ca_cert and server_cert:\n if accepted_client_cns:\n assert isinstance(accepted_client_cns, list), \"accepted_client_cns must be list but got {}.\".format(\n accepted_client_cns\n )\n\n ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)\n ctx.verify_mode = ssl.CERT_REQUIRED\n ctx.load_verify_locations(ca_cert)\n ctx.load_cert_chain(certfile=server_cert, keyfile=server_key)\n\n # replace the socket with an SSL version of itself\n self.socket = ctx.wrap_socket(self.socket, server_side=True)\n self.use_ssl = True\n\n # bind the socket and start the server\n self.server_bind()\n self.server_activate()\n\n self._thread = None\n self.host = host\n self.port = port\n self.accepted_client_cns = accepted_client_cns\n self.cmd_reg = cmd_reg\n cmd_reg.finalize()\n self.logger = logging.getLogger(self.__class__.__name__)\n\n def validate_client_cn(self, cn):\n if self.accepted_client_cns:\n return cn in self.accepted_client_cns\n else:\n return True\n\n def stop(self):\n self.shutdown()\n self.cmd_reg.close()\n\n if self._thread.is_alive():\n self._thread.join()\n\n self.logger.info(f\"Admin Server {self.host} on Port {self.port} shutdown!\")\n\n def set_command_registry(self, cmd_reg: ServerCommandRegister):\n if cmd_reg:\n cmd_reg.finalize()\n\n if self.cmd_reg:\n self.cmd_reg.close()\n\n self.cmd_reg = cmd_reg\n\n def start(self):\n if self._thread is None:\n self._thread = threading.Thread(target=self._run, args=())\n\n if not self._thread.is_alive():\n self._thread.start()\n\n def _run(self):\n self.logger.info(f\"Starting Admin Server {self.host} on Port {self.port}\")\n self.serve_forever()\n", "path": "nvflare/fuel/hci/server/hci.py"}], "after_files": [{"content": "# Copyright (c) 2021-2022, NVIDIA CORPORATION. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging\nimport socketserver\nimport ssl\nimport threading\n\nfrom nvflare.fuel.hci.conn import Connection, receive_til_end\nfrom nvflare.fuel.hci.proto import validate_proto\nfrom nvflare.fuel.hci.security import get_certificate_common_name\n\nfrom .reg import ServerCommandRegister\n\nMAX_ADMIN_CONNECTIONS = 16\n\n\nclass _MsgHandler(socketserver.BaseRequestHandler):\n \"\"\"Message handler.\n\n Used by the AdminServer to receive admin commands, validate, then process and do command through the\n ServerCommandRegister.\n \"\"\"\n\n connections = 0\n lock = threading.Lock()\n\n def __init__(self, request, client_address, server):\n # handle() is called in the constructor so logger must be initialized first\n self.logger = logging.getLogger(self.__class__.__name__)\n super().__init__(request, client_address, server)\n\n def handle(self):\n try:\n with _MsgHandler.lock:\n _MsgHandler.connections += 1\n\n self.logger.debug(f\"Concurrent admin connections: {_MsgHandler.connections}\")\n if _MsgHandler.connections > MAX_ADMIN_CONNECTIONS:\n raise ConnectionRefusedError(f\"Admin connection limit ({MAX_ADMIN_CONNECTIONS}) reached\")\n\n conn = Connection(self.request, self.server)\n\n if self.server.use_ssl:\n cn = get_certificate_common_name(self.request.getpeercert())\n conn.set_prop(\"_client_cn\", cn)\n valid = self.server.validate_client_cn(cn)\n else:\n valid = True\n\n if not valid:\n conn.append_error(\"authentication error\")\n else:\n req = receive_til_end(self.request).strip()\n command = None\n req_json = validate_proto(req)\n conn.request = req_json\n if req_json is not None:\n data = req_json[\"data\"]\n for item in data:\n it = item[\"type\"]\n if it == \"command\":\n command = item[\"data\"]\n break\n\n if command is None:\n conn.append_error(\"protocol violation\")\n else:\n self.server.cmd_reg.process_command(conn, command)\n else:\n # not json encoded\n conn.append_error(\"protocol violation\")\n\n if not conn.ended:\n conn.close()\n except BaseException as exc:\n self.logger.error(f\"Admin connection terminated due to exception: {str(exc)}\")\n if self.logger.getEffectiveLevel() <= logging.DEBUG:\n self.logger.exception(\"Admin connection error\")\n finally:\n with _MsgHandler.lock:\n _MsgHandler.connections -= 1\n\n\ndef initialize_hci():\n socketserver.TCPServer.allow_reuse_address = True\n\n\nclass AdminServer(socketserver.ThreadingTCPServer):\n # faster re-binding\n allow_reuse_address = True\n\n # make this bigger than five\n request_queue_size = 10\n\n # kick connections when we exit\n daemon_threads = True\n\n def __init__(\n self,\n cmd_reg: ServerCommandRegister,\n host,\n port,\n ca_cert=None,\n server_cert=None,\n server_key=None,\n accepted_client_cns=None,\n ):\n \"\"\"Base class of FedAdminServer to create a server that can receive commands.\n\n Args:\n cmd_reg: CommandRegister\n host: the IP address of the admin server\n port: port number of admin server\n ca_cert: the root CA's cert file name\n server_cert: server's cert, signed by the CA\n server_key: server's private key file\n accepted_client_cns: list of accepted Common Names from client, if specified\n \"\"\"\n socketserver.TCPServer.__init__(self, (host, port), _MsgHandler, False)\n\n self.use_ssl = False\n if ca_cert and server_cert:\n if accepted_client_cns:\n assert isinstance(accepted_client_cns, list), \"accepted_client_cns must be list but got {}.\".format(\n accepted_client_cns\n )\n\n ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)\n ctx.verify_mode = ssl.CERT_REQUIRED\n ctx.load_verify_locations(ca_cert)\n ctx.load_cert_chain(certfile=server_cert, keyfile=server_key)\n\n # replace the socket with an SSL version of itself\n self.socket = ctx.wrap_socket(self.socket, server_side=True)\n self.use_ssl = True\n\n # bind the socket and start the server\n self.server_bind()\n self.server_activate()\n\n self._thread = None\n self.host = host\n self.port = port\n self.accepted_client_cns = accepted_client_cns\n self.cmd_reg = cmd_reg\n cmd_reg.finalize()\n self.logger = logging.getLogger(self.__class__.__name__)\n\n def validate_client_cn(self, cn):\n if self.accepted_client_cns:\n return cn in self.accepted_client_cns\n else:\n return True\n\n def stop(self):\n self.shutdown()\n self.cmd_reg.close()\n\n if self._thread.is_alive():\n self._thread.join()\n\n self.logger.info(f\"Admin Server {self.host} on Port {self.port} shutdown!\")\n\n def set_command_registry(self, cmd_reg: ServerCommandRegister):\n if cmd_reg:\n cmd_reg.finalize()\n\n if self.cmd_reg:\n self.cmd_reg.close()\n\n self.cmd_reg = cmd_reg\n\n def start(self):\n if self._thread is None:\n self._thread = threading.Thread(target=self._run, args=())\n\n if not self._thread.is_alive():\n self._thread.start()\n\n def _run(self):\n self.logger.info(f\"Starting Admin Server {self.host} on Port {self.port}\")\n self.serve_forever()\n", "path": "nvflare/fuel/hci/server/hci.py"}]} |
gh_patches_debug_105 | rasdani/github-patches | git_diff | Netflix__lemur-924 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Missing 'default' rotation policy
When trying to create a certificate, the error message is displayed:
`{"_schema":"Unable to find <class 'lemur.policies.models.RotationPolicy'> with name: default"}`
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `lemur/manage.py`
Content:
```
1 from __future__ import unicode_literals # at top of module
2
3 import os
4 import sys
5 import base64
6 import requests
7 import json
8
9 from gunicorn.config import make_settings
10
11 from cryptography.fernet import Fernet
12
13 from flask import current_app
14 from flask_script import Manager, Command, Option, prompt_pass
15 from flask_migrate import Migrate, MigrateCommand, stamp
16 from flask_script.commands import ShowUrls, Clean, Server
17
18 from lemur.sources.cli import manager as source_manager
19 from lemur.policies.cli import manager as policy_manager
20 from lemur.reporting.cli import manager as report_manager
21 from lemur.endpoints.cli import manager as endpoint_manager
22 from lemur.certificates.cli import manager as certificate_manager
23 from lemur.notifications.cli import manager as notification_manager
24
25 from lemur import database
26 from lemur.users import service as user_service
27 from lemur.roles import service as role_service
28 from lemur.policies import service as policy_service
29 from lemur.notifications import service as notification_service
30
31 from lemur.common.utils import validate_conf
32
33 from lemur import create_app
34
35 # Needed to be imported so that SQLAlchemy create_all can find our models
36 from lemur.users.models import User # noqa
37 from lemur.roles.models import Role # noqa
38 from lemur.authorities.models import Authority # noqa
39 from lemur.certificates.models import Certificate # noqa
40 from lemur.destinations.models import Destination # noqa
41 from lemur.domains.models import Domain # noqa
42 from lemur.notifications.models import Notification # noqa
43 from lemur.sources.models import Source # noqa
44 from lemur.logs.models import Log # noqa
45 from lemur.endpoints.models import Endpoint # noqa
46 from lemur.policies.models import RotationPolicy # noqa
47
48
49 manager = Manager(create_app)
50 manager.add_option('-c', '--config', dest='config')
51
52 migrate = Migrate(create_app)
53
54 REQUIRED_VARIABLES = [
55 'LEMUR_SECURITY_TEAM_EMAIL',
56 'LEMUR_DEFAULT_ORGANIZATIONAL_UNIT',
57 'LEMUR_DEFAULT_ORGANIZATION',
58 'LEMUR_DEFAULT_LOCATION',
59 'LEMUR_DEFAULT_COUNTRY',
60 'LEMUR_DEFAULT_STATE',
61 'SQLALCHEMY_DATABASE_URI'
62 ]
63
64 KEY_LENGTH = 40
65 DEFAULT_CONFIG_PATH = '~/.lemur/lemur.conf.py'
66 DEFAULT_SETTINGS = 'lemur.conf.server'
67 SETTINGS_ENVVAR = 'LEMUR_CONF'
68
69 CONFIG_TEMPLATE = """
70 # This is just Python which means you can inherit and tweak settings
71
72 import os
73 _basedir = os.path.abspath(os.path.dirname(__file__))
74
75 THREADS_PER_PAGE = 8
76
77 # General
78
79 # These will need to be set to `True` if you are developing locally
80 CORS = False
81 debug = False
82
83 # this is the secret key used by flask session management
84 SECRET_KEY = '{flask_secret_key}'
85
86 # You should consider storing these separately from your config
87 LEMUR_TOKEN_SECRET = '{secret_token}'
88 LEMUR_ENCRYPTION_KEYS = '{encryption_key}'
89
90 # List of domain regular expressions that non-admin users can issue
91 LEMUR_WHITELISTED_DOMAINS = []
92
93 # Mail Server
94
95 LEMUR_EMAIL = ''
96 LEMUR_SECURITY_TEAM_EMAIL = []
97
98 # Certificate Defaults
99
100 LEMUR_DEFAULT_COUNTRY = ''
101 LEMUR_DEFAULT_STATE = ''
102 LEMUR_DEFAULT_LOCATION = ''
103 LEMUR_DEFAULT_ORGANIZATION = ''
104 LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = ''
105
106 # Authentication Providers
107 ACTIVE_PROVIDERS = []
108
109 # Logging
110
111 LOG_LEVEL = "DEBUG"
112 LOG_FILE = "lemur.log"
113
114
115 # Database
116
117 # modify this if you are not using a local database
118 SQLALCHEMY_DATABASE_URI = 'postgresql://lemur:lemur@localhost:5432/lemur'
119
120 # AWS
121
122 #LEMUR_INSTANCE_PROFILE = 'Lemur'
123
124 # Issuers
125
126 # These will be dependent on which 3rd party that Lemur is
127 # configured to use.
128
129 # VERISIGN_URL = ''
130 # VERISIGN_PEM_PATH = ''
131 # VERISIGN_FIRST_NAME = ''
132 # VERISIGN_LAST_NAME = ''
133 # VERSIGN_EMAIL = ''
134 """
135
136
137 @MigrateCommand.command
138 def create():
139 database.db.create_all()
140 stamp(revision='head')
141
142
143 @MigrateCommand.command
144 def drop_all():
145 database.db.drop_all()
146
147
148 @manager.shell
149 def make_shell_context():
150 """
151 Creates a python REPL with several default imports
152 in the context of the current_app
153
154 :return:
155 """
156 return dict(current_app=current_app)
157
158
159 def generate_settings():
160 """
161 This command is run when ``default_path`` doesn't exist, or ``init`` is
162 run and returns a string representing the default data to put into their
163 settings file.
164 """
165 output = CONFIG_TEMPLATE.format(
166 # we use Fernet.generate_key to make sure that the key length is
167 # compatible with Fernet
168 encryption_key=Fernet.generate_key().decode('utf-8'),
169 secret_token=base64.b64encode(os.urandom(KEY_LENGTH)).decode('utf-8'),
170 flask_secret_key=base64.b64encode(os.urandom(KEY_LENGTH)).decode('utf-8'),
171 )
172
173 return output
174
175
176 class InitializeApp(Command):
177 """
178 This command will bootstrap our database with any destinations as
179 specified by our config.
180
181 Additionally a Lemur user will be created as a default user
182 and be used when certificates are discovered by Lemur.
183 """
184 option_list = (
185 Option('-p', '--password', dest='password'),
186 )
187
188 def run(self, password):
189 create()
190 user = user_service.get_by_username("lemur")
191
192 admin_role = role_service.get_by_name('admin')
193
194 if admin_role:
195 sys.stdout.write("[-] Admin role already created, skipping...!\n")
196 else:
197 # we create an admin role
198 admin_role = role_service.create('admin', description='This is the Lemur administrator role.')
199 sys.stdout.write("[+] Created 'admin' role\n")
200
201 operator_role = role_service.get_by_name('operator')
202
203 if operator_role:
204 sys.stdout.write("[-] Operator role already created, skipping...!\n")
205 else:
206 # we create an admin role
207 operator_role = role_service.create('operator', description='This is the Lemur operator role.')
208 sys.stdout.write("[+] Created 'operator' role\n")
209
210 read_only_role = role_service.get_by_name('read-only')
211
212 if read_only_role:
213 sys.stdout.write("[-] Operator role already created, skipping...!\n")
214 else:
215 # we create an admin role
216 read_only_role = role_service.create('read-only', description='This is the Lemur read only role.')
217 sys.stdout.write("[+] Created 'read-only' role\n")
218
219 if not user:
220 if not password:
221 sys.stdout.write("We need to set Lemur's password to continue!\n")
222 password = prompt_pass("Password")
223 password1 = prompt_pass("Confirm Password")
224
225 if password != password1:
226 sys.stderr.write("[!] Passwords do not match!\n")
227 sys.exit(1)
228
229 user_service.create("lemur", password, '[email protected]', True, None, [admin_role])
230 sys.stdout.write("[+] Created the user 'lemur' and granted it the 'admin' role!\n")
231
232 else:
233 sys.stdout.write("[-] Default user has already been created, skipping...!\n")
234
235 sys.stdout.write("[+] Creating expiration email notifications!\n")
236 sys.stdout.write("[!] Using {0} as specified by LEMUR_SECURITY_TEAM_EMAIL for notifications\n".format("LEMUR_SECURITY_TEAM_EMAIL"))
237
238 intervals = current_app.config.get("LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS", [])
239 sys.stdout.write(
240 "[!] Creating {num} notifications for {intervals} days as specified by LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS\n".format(
241 num=len(intervals),
242 intervals=",".join([str(x) for x in intervals])
243 )
244 )
245
246 recipients = current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL')
247 notification_service.create_default_expiration_notifications("DEFAULT_SECURITY", recipients=recipients)
248
249 days = current_app.config.get("LEMUR_DEFAULT_ROTATION_INTERVAL", 30)
250 sys.stdout.write("[+] Creating default certificate rotation policy of {days} days before issuance.\n".format(
251 days=days
252 ))
253
254 policy_service.create(days=days)
255 sys.stdout.write("[/] Done!\n")
256
257
258 class CreateUser(Command):
259 """
260 This command allows for the creation of a new user within Lemur.
261 """
262 option_list = (
263 Option('-u', '--username', dest='username', required=True),
264 Option('-e', '--email', dest='email', required=True),
265 Option('-a', '--active', dest='active', default=True),
266 Option('-r', '--roles', dest='roles', action='append', default=[])
267 )
268
269 def run(self, username, email, active, roles):
270 role_objs = []
271 for r in roles:
272 role_obj = role_service.get_by_name(r)
273 if role_obj:
274 role_objs.append(role_obj)
275 else:
276 sys.stderr.write("[!] Cannot find role {0}\n".format(r))
277 sys.exit(1)
278
279 password1 = prompt_pass("Password")
280 password2 = prompt_pass("Confirm Password")
281
282 if password1 != password2:
283 sys.stderr.write("[!] Passwords do not match!\n")
284 sys.exit(1)
285
286 user_service.create(username, password1, email, active, None, role_objs)
287 sys.stdout.write("[+] Created new user: {0}\n".format(username))
288
289
290 class ResetPassword(Command):
291 """
292 This command allows you to reset a user's password.
293 """
294 option_list = (
295 Option('-u', '--username', dest='username', required=True),
296 )
297
298 def run(self, username):
299 user = user_service.get_by_username(username)
300
301 if not user:
302 sys.stderr.write("[!] No user found for username: {0}\n".format(username))
303 sys.exit(1)
304
305 sys.stderr.write("[+] Resetting password for {0}\n".format(username))
306 password1 = prompt_pass("Password")
307 password2 = prompt_pass("Confirm Password")
308
309 if password1 != password2:
310 sys.stderr.write("[!] Passwords do not match\n")
311 sys.exit(1)
312
313 user.password = password1
314 user.hash_password()
315 database.commit()
316
317
318 class CreateRole(Command):
319 """
320 This command allows for the creation of a new role within Lemur
321 """
322 option_list = (
323 Option('-n', '--name', dest='name', required=True),
324 Option('-u', '--users', dest='users', default=[]),
325 Option('-d', '--description', dest='description', required=True)
326 )
327
328 def run(self, name, users, description):
329 user_objs = []
330 for u in users:
331 user_obj = user_service.get_by_username(u)
332 if user_obj:
333 user_objs.append(user_obj)
334 else:
335 sys.stderr.write("[!] Cannot find user {0}".format(u))
336 sys.exit(1)
337 role_service.create(name, description=description, users=users)
338 sys.stdout.write("[+] Created new role: {0}".format(name))
339
340
341 class LemurServer(Command):
342 """
343 This is the main Lemur server, it runs the flask app with gunicorn and
344 uses any configuration options passed to it.
345
346
347 You can pass all standard gunicorn flags to this command as if you were
348 running gunicorn itself.
349
350 For example:
351
352 lemur start -w 4 -b 127.0.0.0:8002
353
354 Will start gunicorn with 4 workers bound to 127.0.0.0:8002
355 """
356 description = 'Run the app within Gunicorn'
357
358 def get_options(self):
359 settings = make_settings()
360 options = []
361 for setting, klass in settings.items():
362 if klass.cli:
363 if klass.action:
364 if klass.action == 'store_const':
365 options.append(Option(*klass.cli, const=klass.const, action=klass.action))
366 else:
367 options.append(Option(*klass.cli, action=klass.action))
368 else:
369 options.append(Option(*klass.cli))
370
371 return options
372
373 def run(self, *args, **kwargs):
374 from gunicorn.app.wsgiapp import WSGIApplication
375
376 app = WSGIApplication()
377
378 # run startup tasks on a app like object
379 validate_conf(current_app, REQUIRED_VARIABLES)
380
381 app.app_uri = 'lemur:create_app(config="{0}")'.format(current_app.config.get('CONFIG_PATH'))
382
383 return app.run()
384
385
386 @manager.command
387 def create_config(config_path=None):
388 """
389 Creates a new configuration file if one does not already exist
390 """
391 if not config_path:
392 config_path = DEFAULT_CONFIG_PATH
393
394 config_path = os.path.expanduser(config_path)
395 dir = os.path.dirname(config_path)
396
397 if not os.path.exists(dir):
398 os.makedirs(dir)
399
400 config = generate_settings()
401 with open(config_path, 'w') as f:
402 f.write(config)
403
404 sys.stdout.write("[+] Created a new configuration file {0}\n".format(config_path))
405
406
407 @manager.command
408 def lock(path=None):
409 """
410 Encrypts a given path. This directory can be used to store secrets needed for normal
411 Lemur operation. This is especially useful for storing secrets needed for communication
412 with third parties (e.g. external certificate authorities).
413
414 Lemur does not assume anything about the contents of the directory and will attempt to
415 encrypt all files contained within. Currently this has only been tested against plain
416 text files.
417
418 Path defaults ~/.lemur/keys
419
420 :param: path
421 """
422 if not path:
423 path = os.path.expanduser('~/.lemur/keys')
424
425 dest_dir = os.path.join(path, "encrypted")
426 sys.stdout.write("[!] Generating a new key...\n")
427
428 key = Fernet.generate_key()
429
430 if not os.path.exists(dest_dir):
431 sys.stdout.write("[+] Creating encryption directory: {0}\n".format(dest_dir))
432 os.makedirs(dest_dir)
433
434 for root, dirs, files in os.walk(os.path.join(path, 'decrypted')):
435 for f in files:
436 source = os.path.join(root, f)
437 dest = os.path.join(dest_dir, f + ".enc")
438 with open(source, 'rb') as in_file, open(dest, 'wb') as out_file:
439 f = Fernet(key)
440 data = f.encrypt(in_file.read())
441 out_file.write(data)
442 sys.stdout.write("[+] Writing file: {0} Source: {1}\n".format(dest, source))
443
444 sys.stdout.write("[+] Keys have been encrypted with key {0}\n".format(key))
445
446
447 @manager.command
448 def unlock(path=None):
449 """
450 Decrypts all of the files in a given directory with provided password.
451 This is most commonly used during the startup sequence of Lemur
452 allowing it to go from source code to something that can communicate
453 with external services.
454
455 Path defaults ~/.lemur/keys
456
457 :param: path
458 """
459 key = prompt_pass("[!] Please enter the encryption password")
460
461 if not path:
462 path = os.path.expanduser('~/.lemur/keys')
463
464 dest_dir = os.path.join(path, "decrypted")
465 source_dir = os.path.join(path, "encrypted")
466
467 if not os.path.exists(dest_dir):
468 sys.stdout.write("[+] Creating decryption directory: {0}\n".format(dest_dir))
469 os.makedirs(dest_dir)
470
471 for root, dirs, files in os.walk(source_dir):
472 for f in files:
473 source = os.path.join(source_dir, f)
474 dest = os.path.join(dest_dir, ".".join(f.split(".")[:-1]))
475 with open(source, 'rb') as in_file, open(dest, 'wb') as out_file:
476 f = Fernet(key)
477 data = f.decrypt(in_file.read())
478 out_file.write(data)
479 sys.stdout.write("[+] Writing file: {0} Source: {1}\n".format(dest, source))
480
481 sys.stdout.write("[+] Keys have been unencrypted!\n")
482
483
484 @manager.command
485 def publish_verisign_units():
486 """
487 Simple function that queries verisign for API units and posts the mertics to
488 Atlas API for other teams to consume.
489 :return:
490 """
491 from lemur.plugins import plugins
492 v = plugins.get('verisign-issuer')
493 units = v.get_available_units()
494
495 metrics = {}
496 for item in units:
497 if item['@type'] in metrics.keys():
498 metrics[item['@type']] += int(item['@remaining'])
499 else:
500 metrics.update({item['@type']: int(item['@remaining'])})
501
502 for name, value in metrics.items():
503 metric = [
504 {
505 "timestamp": 1321351651,
506 "type": "GAUGE",
507 "name": "Symantec {0} Unit Count".format(name),
508 "tags": {},
509 "value": value
510 }
511 ]
512
513 requests.post('http://localhost:8078/metrics', data=json.dumps(metric))
514
515
516 @manager.command
517 def publish_unapproved_verisign_certificates():
518 """
519 Query the Verisign for any certificates that need to be approved.
520 :return:
521 """
522 from lemur.plugins import plugins
523 from lemur.extensions import metrics
524 v = plugins.get('verisign-issuer')
525 certs = v.get_pending_certificates()
526 metrics.send('pending_certificates', 'gauge', certs)
527
528
529 def main():
530 manager.add_command("start", LemurServer())
531 manager.add_command("runserver", Server(host='127.0.0.1', threaded=True))
532 manager.add_command("clean", Clean())
533 manager.add_command("show_urls", ShowUrls())
534 manager.add_command("db", MigrateCommand)
535 manager.add_command("init", InitializeApp())
536 manager.add_command("create_user", CreateUser())
537 manager.add_command("reset_password", ResetPassword())
538 manager.add_command("create_role", CreateRole())
539 manager.add_command("source", source_manager)
540 manager.add_command("certificate", certificate_manager)
541 manager.add_command("notify", notification_manager)
542 manager.add_command("endpoint", endpoint_manager)
543 manager.add_command("report", report_manager)
544 manager.add_command("policy", policy_manager)
545 manager.run()
546
547
548 if __name__ == "__main__":
549 main()
550
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/lemur/manage.py b/lemur/manage.py
--- a/lemur/manage.py
+++ b/lemur/manage.py
@@ -251,7 +251,7 @@
days=days
))
- policy_service.create(days=days)
+ policy_service.create(days=days, name='default')
sys.stdout.write("[/] Done!\n")
| {"golden_diff": "diff --git a/lemur/manage.py b/lemur/manage.py\n--- a/lemur/manage.py\n+++ b/lemur/manage.py\n@@ -251,7 +251,7 @@\n days=days\n ))\n \n- policy_service.create(days=days)\n+ policy_service.create(days=days, name='default')\n sys.stdout.write(\"[/] Done!\\n\")\n", "issue": "Missing 'default' rotation policy\nWhen trying to create a certificate, the error message is displayed:\r\n\r\n`{\"_schema\":\"Unable to find <class 'lemur.policies.models.RotationPolicy'> with name: default\"}`\n", "before_files": [{"content": "from __future__ import unicode_literals # at top of module\n\nimport os\nimport sys\nimport base64\nimport requests\nimport json\n\nfrom gunicorn.config import make_settings\n\nfrom cryptography.fernet import Fernet\n\nfrom flask import current_app\nfrom flask_script import Manager, Command, Option, prompt_pass\nfrom flask_migrate import Migrate, MigrateCommand, stamp\nfrom flask_script.commands import ShowUrls, Clean, Server\n\nfrom lemur.sources.cli import manager as source_manager\nfrom lemur.policies.cli import manager as policy_manager\nfrom lemur.reporting.cli import manager as report_manager\nfrom lemur.endpoints.cli import manager as endpoint_manager\nfrom lemur.certificates.cli import manager as certificate_manager\nfrom lemur.notifications.cli import manager as notification_manager\n\nfrom lemur import database\nfrom lemur.users import service as user_service\nfrom lemur.roles import service as role_service\nfrom lemur.policies import service as policy_service\nfrom lemur.notifications import service as notification_service\n\nfrom lemur.common.utils import validate_conf\n\nfrom lemur import create_app\n\n# Needed to be imported so that SQLAlchemy create_all can find our models\nfrom lemur.users.models import User # noqa\nfrom lemur.roles.models import Role # noqa\nfrom lemur.authorities.models import Authority # noqa\nfrom lemur.certificates.models import Certificate # noqa\nfrom lemur.destinations.models import Destination # noqa\nfrom lemur.domains.models import Domain # noqa\nfrom lemur.notifications.models import Notification # noqa\nfrom lemur.sources.models import Source # noqa\nfrom lemur.logs.models import Log # noqa\nfrom lemur.endpoints.models import Endpoint # noqa\nfrom lemur.policies.models import RotationPolicy # noqa\n\n\nmanager = Manager(create_app)\nmanager.add_option('-c', '--config', dest='config')\n\nmigrate = Migrate(create_app)\n\nREQUIRED_VARIABLES = [\n 'LEMUR_SECURITY_TEAM_EMAIL',\n 'LEMUR_DEFAULT_ORGANIZATIONAL_UNIT',\n 'LEMUR_DEFAULT_ORGANIZATION',\n 'LEMUR_DEFAULT_LOCATION',\n 'LEMUR_DEFAULT_COUNTRY',\n 'LEMUR_DEFAULT_STATE',\n 'SQLALCHEMY_DATABASE_URI'\n]\n\nKEY_LENGTH = 40\nDEFAULT_CONFIG_PATH = '~/.lemur/lemur.conf.py'\nDEFAULT_SETTINGS = 'lemur.conf.server'\nSETTINGS_ENVVAR = 'LEMUR_CONF'\n\nCONFIG_TEMPLATE = \"\"\"\n# This is just Python which means you can inherit and tweak settings\n\nimport os\n_basedir = os.path.abspath(os.path.dirname(__file__))\n\nTHREADS_PER_PAGE = 8\n\n# General\n\n# These will need to be set to `True` if you are developing locally\nCORS = False\ndebug = False\n\n# this is the secret key used by flask session management\nSECRET_KEY = '{flask_secret_key}'\n\n# You should consider storing these separately from your config\nLEMUR_TOKEN_SECRET = '{secret_token}'\nLEMUR_ENCRYPTION_KEYS = '{encryption_key}'\n\n# List of domain regular expressions that non-admin users can issue\nLEMUR_WHITELISTED_DOMAINS = []\n\n# Mail Server\n\nLEMUR_EMAIL = ''\nLEMUR_SECURITY_TEAM_EMAIL = []\n\n# Certificate Defaults\n\nLEMUR_DEFAULT_COUNTRY = ''\nLEMUR_DEFAULT_STATE = ''\nLEMUR_DEFAULT_LOCATION = ''\nLEMUR_DEFAULT_ORGANIZATION = ''\nLEMUR_DEFAULT_ORGANIZATIONAL_UNIT = ''\n\n# Authentication Providers\nACTIVE_PROVIDERS = []\n\n# Logging\n\nLOG_LEVEL = \"DEBUG\"\nLOG_FILE = \"lemur.log\"\n\n\n# Database\n\n# modify this if you are not using a local database\nSQLALCHEMY_DATABASE_URI = 'postgresql://lemur:lemur@localhost:5432/lemur'\n\n# AWS\n\n#LEMUR_INSTANCE_PROFILE = 'Lemur'\n\n# Issuers\n\n# These will be dependent on which 3rd party that Lemur is\n# configured to use.\n\n# VERISIGN_URL = ''\n# VERISIGN_PEM_PATH = ''\n# VERISIGN_FIRST_NAME = ''\n# VERISIGN_LAST_NAME = ''\n# VERSIGN_EMAIL = ''\n\"\"\"\n\n\[email protected]\ndef create():\n database.db.create_all()\n stamp(revision='head')\n\n\[email protected]\ndef drop_all():\n database.db.drop_all()\n\n\[email protected]\ndef make_shell_context():\n \"\"\"\n Creates a python REPL with several default imports\n in the context of the current_app\n\n :return:\n \"\"\"\n return dict(current_app=current_app)\n\n\ndef generate_settings():\n \"\"\"\n This command is run when ``default_path`` doesn't exist, or ``init`` is\n run and returns a string representing the default data to put into their\n settings file.\n \"\"\"\n output = CONFIG_TEMPLATE.format(\n # we use Fernet.generate_key to make sure that the key length is\n # compatible with Fernet\n encryption_key=Fernet.generate_key().decode('utf-8'),\n secret_token=base64.b64encode(os.urandom(KEY_LENGTH)).decode('utf-8'),\n flask_secret_key=base64.b64encode(os.urandom(KEY_LENGTH)).decode('utf-8'),\n )\n\n return output\n\n\nclass InitializeApp(Command):\n \"\"\"\n This command will bootstrap our database with any destinations as\n specified by our config.\n\n Additionally a Lemur user will be created as a default user\n and be used when certificates are discovered by Lemur.\n \"\"\"\n option_list = (\n Option('-p', '--password', dest='password'),\n )\n\n def run(self, password):\n create()\n user = user_service.get_by_username(\"lemur\")\n\n admin_role = role_service.get_by_name('admin')\n\n if admin_role:\n sys.stdout.write(\"[-] Admin role already created, skipping...!\\n\")\n else:\n # we create an admin role\n admin_role = role_service.create('admin', description='This is the Lemur administrator role.')\n sys.stdout.write(\"[+] Created 'admin' role\\n\")\n\n operator_role = role_service.get_by_name('operator')\n\n if operator_role:\n sys.stdout.write(\"[-] Operator role already created, skipping...!\\n\")\n else:\n # we create an admin role\n operator_role = role_service.create('operator', description='This is the Lemur operator role.')\n sys.stdout.write(\"[+] Created 'operator' role\\n\")\n\n read_only_role = role_service.get_by_name('read-only')\n\n if read_only_role:\n sys.stdout.write(\"[-] Operator role already created, skipping...!\\n\")\n else:\n # we create an admin role\n read_only_role = role_service.create('read-only', description='This is the Lemur read only role.')\n sys.stdout.write(\"[+] Created 'read-only' role\\n\")\n\n if not user:\n if not password:\n sys.stdout.write(\"We need to set Lemur's password to continue!\\n\")\n password = prompt_pass(\"Password\")\n password1 = prompt_pass(\"Confirm Password\")\n\n if password != password1:\n sys.stderr.write(\"[!] Passwords do not match!\\n\")\n sys.exit(1)\n\n user_service.create(\"lemur\", password, '[email protected]', True, None, [admin_role])\n sys.stdout.write(\"[+] Created the user 'lemur' and granted it the 'admin' role!\\n\")\n\n else:\n sys.stdout.write(\"[-] Default user has already been created, skipping...!\\n\")\n\n sys.stdout.write(\"[+] Creating expiration email notifications!\\n\")\n sys.stdout.write(\"[!] Using {0} as specified by LEMUR_SECURITY_TEAM_EMAIL for notifications\\n\".format(\"LEMUR_SECURITY_TEAM_EMAIL\"))\n\n intervals = current_app.config.get(\"LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS\", [])\n sys.stdout.write(\n \"[!] Creating {num} notifications for {intervals} days as specified by LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS\\n\".format(\n num=len(intervals),\n intervals=\",\".join([str(x) for x in intervals])\n )\n )\n\n recipients = current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL')\n notification_service.create_default_expiration_notifications(\"DEFAULT_SECURITY\", recipients=recipients)\n\n days = current_app.config.get(\"LEMUR_DEFAULT_ROTATION_INTERVAL\", 30)\n sys.stdout.write(\"[+] Creating default certificate rotation policy of {days} days before issuance.\\n\".format(\n days=days\n ))\n\n policy_service.create(days=days)\n sys.stdout.write(\"[/] Done!\\n\")\n\n\nclass CreateUser(Command):\n \"\"\"\n This command allows for the creation of a new user within Lemur.\n \"\"\"\n option_list = (\n Option('-u', '--username', dest='username', required=True),\n Option('-e', '--email', dest='email', required=True),\n Option('-a', '--active', dest='active', default=True),\n Option('-r', '--roles', dest='roles', action='append', default=[])\n )\n\n def run(self, username, email, active, roles):\n role_objs = []\n for r in roles:\n role_obj = role_service.get_by_name(r)\n if role_obj:\n role_objs.append(role_obj)\n else:\n sys.stderr.write(\"[!] Cannot find role {0}\\n\".format(r))\n sys.exit(1)\n\n password1 = prompt_pass(\"Password\")\n password2 = prompt_pass(\"Confirm Password\")\n\n if password1 != password2:\n sys.stderr.write(\"[!] Passwords do not match!\\n\")\n sys.exit(1)\n\n user_service.create(username, password1, email, active, None, role_objs)\n sys.stdout.write(\"[+] Created new user: {0}\\n\".format(username))\n\n\nclass ResetPassword(Command):\n \"\"\"\n This command allows you to reset a user's password.\n \"\"\"\n option_list = (\n Option('-u', '--username', dest='username', required=True),\n )\n\n def run(self, username):\n user = user_service.get_by_username(username)\n\n if not user:\n sys.stderr.write(\"[!] No user found for username: {0}\\n\".format(username))\n sys.exit(1)\n\n sys.stderr.write(\"[+] Resetting password for {0}\\n\".format(username))\n password1 = prompt_pass(\"Password\")\n password2 = prompt_pass(\"Confirm Password\")\n\n if password1 != password2:\n sys.stderr.write(\"[!] Passwords do not match\\n\")\n sys.exit(1)\n\n user.password = password1\n user.hash_password()\n database.commit()\n\n\nclass CreateRole(Command):\n \"\"\"\n This command allows for the creation of a new role within Lemur\n \"\"\"\n option_list = (\n Option('-n', '--name', dest='name', required=True),\n Option('-u', '--users', dest='users', default=[]),\n Option('-d', '--description', dest='description', required=True)\n )\n\n def run(self, name, users, description):\n user_objs = []\n for u in users:\n user_obj = user_service.get_by_username(u)\n if user_obj:\n user_objs.append(user_obj)\n else:\n sys.stderr.write(\"[!] Cannot find user {0}\".format(u))\n sys.exit(1)\n role_service.create(name, description=description, users=users)\n sys.stdout.write(\"[+] Created new role: {0}\".format(name))\n\n\nclass LemurServer(Command):\n \"\"\"\n This is the main Lemur server, it runs the flask app with gunicorn and\n uses any configuration options passed to it.\n\n\n You can pass all standard gunicorn flags to this command as if you were\n running gunicorn itself.\n\n For example:\n\n lemur start -w 4 -b 127.0.0.0:8002\n\n Will start gunicorn with 4 workers bound to 127.0.0.0:8002\n \"\"\"\n description = 'Run the app within Gunicorn'\n\n def get_options(self):\n settings = make_settings()\n options = []\n for setting, klass in settings.items():\n if klass.cli:\n if klass.action:\n if klass.action == 'store_const':\n options.append(Option(*klass.cli, const=klass.const, action=klass.action))\n else:\n options.append(Option(*klass.cli, action=klass.action))\n else:\n options.append(Option(*klass.cli))\n\n return options\n\n def run(self, *args, **kwargs):\n from gunicorn.app.wsgiapp import WSGIApplication\n\n app = WSGIApplication()\n\n # run startup tasks on a app like object\n validate_conf(current_app, REQUIRED_VARIABLES)\n\n app.app_uri = 'lemur:create_app(config=\"{0}\")'.format(current_app.config.get('CONFIG_PATH'))\n\n return app.run()\n\n\[email protected]\ndef create_config(config_path=None):\n \"\"\"\n Creates a new configuration file if one does not already exist\n \"\"\"\n if not config_path:\n config_path = DEFAULT_CONFIG_PATH\n\n config_path = os.path.expanduser(config_path)\n dir = os.path.dirname(config_path)\n\n if not os.path.exists(dir):\n os.makedirs(dir)\n\n config = generate_settings()\n with open(config_path, 'w') as f:\n f.write(config)\n\n sys.stdout.write(\"[+] Created a new configuration file {0}\\n\".format(config_path))\n\n\[email protected]\ndef lock(path=None):\n \"\"\"\n Encrypts a given path. This directory can be used to store secrets needed for normal\n Lemur operation. This is especially useful for storing secrets needed for communication\n with third parties (e.g. external certificate authorities).\n\n Lemur does not assume anything about the contents of the directory and will attempt to\n encrypt all files contained within. Currently this has only been tested against plain\n text files.\n\n Path defaults ~/.lemur/keys\n\n :param: path\n \"\"\"\n if not path:\n path = os.path.expanduser('~/.lemur/keys')\n\n dest_dir = os.path.join(path, \"encrypted\")\n sys.stdout.write(\"[!] Generating a new key...\\n\")\n\n key = Fernet.generate_key()\n\n if not os.path.exists(dest_dir):\n sys.stdout.write(\"[+] Creating encryption directory: {0}\\n\".format(dest_dir))\n os.makedirs(dest_dir)\n\n for root, dirs, files in os.walk(os.path.join(path, 'decrypted')):\n for f in files:\n source = os.path.join(root, f)\n dest = os.path.join(dest_dir, f + \".enc\")\n with open(source, 'rb') as in_file, open(dest, 'wb') as out_file:\n f = Fernet(key)\n data = f.encrypt(in_file.read())\n out_file.write(data)\n sys.stdout.write(\"[+] Writing file: {0} Source: {1}\\n\".format(dest, source))\n\n sys.stdout.write(\"[+] Keys have been encrypted with key {0}\\n\".format(key))\n\n\[email protected]\ndef unlock(path=None):\n \"\"\"\n Decrypts all of the files in a given directory with provided password.\n This is most commonly used during the startup sequence of Lemur\n allowing it to go from source code to something that can communicate\n with external services.\n\n Path defaults ~/.lemur/keys\n\n :param: path\n \"\"\"\n key = prompt_pass(\"[!] Please enter the encryption password\")\n\n if not path:\n path = os.path.expanduser('~/.lemur/keys')\n\n dest_dir = os.path.join(path, \"decrypted\")\n source_dir = os.path.join(path, \"encrypted\")\n\n if not os.path.exists(dest_dir):\n sys.stdout.write(\"[+] Creating decryption directory: {0}\\n\".format(dest_dir))\n os.makedirs(dest_dir)\n\n for root, dirs, files in os.walk(source_dir):\n for f in files:\n source = os.path.join(source_dir, f)\n dest = os.path.join(dest_dir, \".\".join(f.split(\".\")[:-1]))\n with open(source, 'rb') as in_file, open(dest, 'wb') as out_file:\n f = Fernet(key)\n data = f.decrypt(in_file.read())\n out_file.write(data)\n sys.stdout.write(\"[+] Writing file: {0} Source: {1}\\n\".format(dest, source))\n\n sys.stdout.write(\"[+] Keys have been unencrypted!\\n\")\n\n\[email protected]\ndef publish_verisign_units():\n \"\"\"\n Simple function that queries verisign for API units and posts the mertics to\n Atlas API for other teams to consume.\n :return:\n \"\"\"\n from lemur.plugins import plugins\n v = plugins.get('verisign-issuer')\n units = v.get_available_units()\n\n metrics = {}\n for item in units:\n if item['@type'] in metrics.keys():\n metrics[item['@type']] += int(item['@remaining'])\n else:\n metrics.update({item['@type']: int(item['@remaining'])})\n\n for name, value in metrics.items():\n metric = [\n {\n \"timestamp\": 1321351651,\n \"type\": \"GAUGE\",\n \"name\": \"Symantec {0} Unit Count\".format(name),\n \"tags\": {},\n \"value\": value\n }\n ]\n\n requests.post('http://localhost:8078/metrics', data=json.dumps(metric))\n\n\[email protected]\ndef publish_unapproved_verisign_certificates():\n \"\"\"\n Query the Verisign for any certificates that need to be approved.\n :return:\n \"\"\"\n from lemur.plugins import plugins\n from lemur.extensions import metrics\n v = plugins.get('verisign-issuer')\n certs = v.get_pending_certificates()\n metrics.send('pending_certificates', 'gauge', certs)\n\n\ndef main():\n manager.add_command(\"start\", LemurServer())\n manager.add_command(\"runserver\", Server(host='127.0.0.1', threaded=True))\n manager.add_command(\"clean\", Clean())\n manager.add_command(\"show_urls\", ShowUrls())\n manager.add_command(\"db\", MigrateCommand)\n manager.add_command(\"init\", InitializeApp())\n manager.add_command(\"create_user\", CreateUser())\n manager.add_command(\"reset_password\", ResetPassword())\n manager.add_command(\"create_role\", CreateRole())\n manager.add_command(\"source\", source_manager)\n manager.add_command(\"certificate\", certificate_manager)\n manager.add_command(\"notify\", notification_manager)\n manager.add_command(\"endpoint\", endpoint_manager)\n manager.add_command(\"report\", report_manager)\n manager.add_command(\"policy\", policy_manager)\n manager.run()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "lemur/manage.py"}], "after_files": [{"content": "from __future__ import unicode_literals # at top of module\n\nimport os\nimport sys\nimport base64\nimport requests\nimport json\n\nfrom gunicorn.config import make_settings\n\nfrom cryptography.fernet import Fernet\n\nfrom flask import current_app\nfrom flask_script import Manager, Command, Option, prompt_pass\nfrom flask_migrate import Migrate, MigrateCommand, stamp\nfrom flask_script.commands import ShowUrls, Clean, Server\n\nfrom lemur.sources.cli import manager as source_manager\nfrom lemur.policies.cli import manager as policy_manager\nfrom lemur.reporting.cli import manager as report_manager\nfrom lemur.endpoints.cli import manager as endpoint_manager\nfrom lemur.certificates.cli import manager as certificate_manager\nfrom lemur.notifications.cli import manager as notification_manager\n\nfrom lemur import database\nfrom lemur.users import service as user_service\nfrom lemur.roles import service as role_service\nfrom lemur.policies import service as policy_service\nfrom lemur.notifications import service as notification_service\n\nfrom lemur.common.utils import validate_conf\n\nfrom lemur import create_app\n\n# Needed to be imported so that SQLAlchemy create_all can find our models\nfrom lemur.users.models import User # noqa\nfrom lemur.roles.models import Role # noqa\nfrom lemur.authorities.models import Authority # noqa\nfrom lemur.certificates.models import Certificate # noqa\nfrom lemur.destinations.models import Destination # noqa\nfrom lemur.domains.models import Domain # noqa\nfrom lemur.notifications.models import Notification # noqa\nfrom lemur.sources.models import Source # noqa\nfrom lemur.logs.models import Log # noqa\nfrom lemur.endpoints.models import Endpoint # noqa\nfrom lemur.policies.models import RotationPolicy # noqa\n\n\nmanager = Manager(create_app)\nmanager.add_option('-c', '--config', dest='config')\n\nmigrate = Migrate(create_app)\n\nREQUIRED_VARIABLES = [\n 'LEMUR_SECURITY_TEAM_EMAIL',\n 'LEMUR_DEFAULT_ORGANIZATIONAL_UNIT',\n 'LEMUR_DEFAULT_ORGANIZATION',\n 'LEMUR_DEFAULT_LOCATION',\n 'LEMUR_DEFAULT_COUNTRY',\n 'LEMUR_DEFAULT_STATE',\n 'SQLALCHEMY_DATABASE_URI'\n]\n\nKEY_LENGTH = 40\nDEFAULT_CONFIG_PATH = '~/.lemur/lemur.conf.py'\nDEFAULT_SETTINGS = 'lemur.conf.server'\nSETTINGS_ENVVAR = 'LEMUR_CONF'\n\nCONFIG_TEMPLATE = \"\"\"\n# This is just Python which means you can inherit and tweak settings\n\nimport os\n_basedir = os.path.abspath(os.path.dirname(__file__))\n\nTHREADS_PER_PAGE = 8\n\n# General\n\n# These will need to be set to `True` if you are developing locally\nCORS = False\ndebug = False\n\n# this is the secret key used by flask session management\nSECRET_KEY = '{flask_secret_key}'\n\n# You should consider storing these separately from your config\nLEMUR_TOKEN_SECRET = '{secret_token}'\nLEMUR_ENCRYPTION_KEYS = '{encryption_key}'\n\n# List of domain regular expressions that non-admin users can issue\nLEMUR_WHITELISTED_DOMAINS = []\n\n# Mail Server\n\nLEMUR_EMAIL = ''\nLEMUR_SECURITY_TEAM_EMAIL = []\n\n# Certificate Defaults\n\nLEMUR_DEFAULT_COUNTRY = ''\nLEMUR_DEFAULT_STATE = ''\nLEMUR_DEFAULT_LOCATION = ''\nLEMUR_DEFAULT_ORGANIZATION = ''\nLEMUR_DEFAULT_ORGANIZATIONAL_UNIT = ''\n\n# Authentication Providers\nACTIVE_PROVIDERS = []\n\n# Logging\n\nLOG_LEVEL = \"DEBUG\"\nLOG_FILE = \"lemur.log\"\n\n\n# Database\n\n# modify this if you are not using a local database\nSQLALCHEMY_DATABASE_URI = 'postgresql://lemur:lemur@localhost:5432/lemur'\n\n# AWS\n\n#LEMUR_INSTANCE_PROFILE = 'Lemur'\n\n# Issuers\n\n# These will be dependent on which 3rd party that Lemur is\n# configured to use.\n\n# VERISIGN_URL = ''\n# VERISIGN_PEM_PATH = ''\n# VERISIGN_FIRST_NAME = ''\n# VERISIGN_LAST_NAME = ''\n# VERSIGN_EMAIL = ''\n\"\"\"\n\n\[email protected]\ndef create():\n database.db.create_all()\n stamp(revision='head')\n\n\[email protected]\ndef drop_all():\n database.db.drop_all()\n\n\[email protected]\ndef make_shell_context():\n \"\"\"\n Creates a python REPL with several default imports\n in the context of the current_app\n\n :return:\n \"\"\"\n return dict(current_app=current_app)\n\n\ndef generate_settings():\n \"\"\"\n This command is run when ``default_path`` doesn't exist, or ``init`` is\n run and returns a string representing the default data to put into their\n settings file.\n \"\"\"\n output = CONFIG_TEMPLATE.format(\n # we use Fernet.generate_key to make sure that the key length is\n # compatible with Fernet\n encryption_key=Fernet.generate_key().decode('utf-8'),\n secret_token=base64.b64encode(os.urandom(KEY_LENGTH)).decode('utf-8'),\n flask_secret_key=base64.b64encode(os.urandom(KEY_LENGTH)).decode('utf-8'),\n )\n\n return output\n\n\nclass InitializeApp(Command):\n \"\"\"\n This command will bootstrap our database with any destinations as\n specified by our config.\n\n Additionally a Lemur user will be created as a default user\n and be used when certificates are discovered by Lemur.\n \"\"\"\n option_list = (\n Option('-p', '--password', dest='password'),\n )\n\n def run(self, password):\n create()\n user = user_service.get_by_username(\"lemur\")\n\n admin_role = role_service.get_by_name('admin')\n\n if admin_role:\n sys.stdout.write(\"[-] Admin role already created, skipping...!\\n\")\n else:\n # we create an admin role\n admin_role = role_service.create('admin', description='This is the Lemur administrator role.')\n sys.stdout.write(\"[+] Created 'admin' role\\n\")\n\n operator_role = role_service.get_by_name('operator')\n\n if operator_role:\n sys.stdout.write(\"[-] Operator role already created, skipping...!\\n\")\n else:\n # we create an admin role\n operator_role = role_service.create('operator', description='This is the Lemur operator role.')\n sys.stdout.write(\"[+] Created 'operator' role\\n\")\n\n read_only_role = role_service.get_by_name('read-only')\n\n if read_only_role:\n sys.stdout.write(\"[-] Operator role already created, skipping...!\\n\")\n else:\n # we create an admin role\n read_only_role = role_service.create('read-only', description='This is the Lemur read only role.')\n sys.stdout.write(\"[+] Created 'read-only' role\\n\")\n\n if not user:\n if not password:\n sys.stdout.write(\"We need to set Lemur's password to continue!\\n\")\n password = prompt_pass(\"Password\")\n password1 = prompt_pass(\"Confirm Password\")\n\n if password != password1:\n sys.stderr.write(\"[!] Passwords do not match!\\n\")\n sys.exit(1)\n\n user_service.create(\"lemur\", password, '[email protected]', True, None, [admin_role])\n sys.stdout.write(\"[+] Created the user 'lemur' and granted it the 'admin' role!\\n\")\n\n else:\n sys.stdout.write(\"[-] Default user has already been created, skipping...!\\n\")\n\n sys.stdout.write(\"[+] Creating expiration email notifications!\\n\")\n sys.stdout.write(\"[!] Using {0} as specified by LEMUR_SECURITY_TEAM_EMAIL for notifications\\n\".format(\"LEMUR_SECURITY_TEAM_EMAIL\"))\n\n intervals = current_app.config.get(\"LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS\", [])\n sys.stdout.write(\n \"[!] Creating {num} notifications for {intervals} days as specified by LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS\\n\".format(\n num=len(intervals),\n intervals=\",\".join([str(x) for x in intervals])\n )\n )\n\n recipients = current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL')\n notification_service.create_default_expiration_notifications(\"DEFAULT_SECURITY\", recipients=recipients)\n\n days = current_app.config.get(\"LEMUR_DEFAULT_ROTATION_INTERVAL\", 30)\n sys.stdout.write(\"[+] Creating default certificate rotation policy of {days} days before issuance.\\n\".format(\n days=days\n ))\n\n policy_service.create(days=days, name='default')\n sys.stdout.write(\"[/] Done!\\n\")\n\n\nclass CreateUser(Command):\n \"\"\"\n This command allows for the creation of a new user within Lemur.\n \"\"\"\n option_list = (\n Option('-u', '--username', dest='username', required=True),\n Option('-e', '--email', dest='email', required=True),\n Option('-a', '--active', dest='active', default=True),\n Option('-r', '--roles', dest='roles', action='append', default=[])\n )\n\n def run(self, username, email, active, roles):\n role_objs = []\n for r in roles:\n role_obj = role_service.get_by_name(r)\n if role_obj:\n role_objs.append(role_obj)\n else:\n sys.stderr.write(\"[!] Cannot find role {0}\\n\".format(r))\n sys.exit(1)\n\n password1 = prompt_pass(\"Password\")\n password2 = prompt_pass(\"Confirm Password\")\n\n if password1 != password2:\n sys.stderr.write(\"[!] Passwords do not match!\\n\")\n sys.exit(1)\n\n user_service.create(username, password1, email, active, None, role_objs)\n sys.stdout.write(\"[+] Created new user: {0}\\n\".format(username))\n\n\nclass ResetPassword(Command):\n \"\"\"\n This command allows you to reset a user's password.\n \"\"\"\n option_list = (\n Option('-u', '--username', dest='username', required=True),\n )\n\n def run(self, username):\n user = user_service.get_by_username(username)\n\n if not user:\n sys.stderr.write(\"[!] No user found for username: {0}\\n\".format(username))\n sys.exit(1)\n\n sys.stderr.write(\"[+] Resetting password for {0}\\n\".format(username))\n password1 = prompt_pass(\"Password\")\n password2 = prompt_pass(\"Confirm Password\")\n\n if password1 != password2:\n sys.stderr.write(\"[!] Passwords do not match\\n\")\n sys.exit(1)\n\n user.password = password1\n user.hash_password()\n database.commit()\n\n\nclass CreateRole(Command):\n \"\"\"\n This command allows for the creation of a new role within Lemur\n \"\"\"\n option_list = (\n Option('-n', '--name', dest='name', required=True),\n Option('-u', '--users', dest='users', default=[]),\n Option('-d', '--description', dest='description', required=True)\n )\n\n def run(self, name, users, description):\n user_objs = []\n for u in users:\n user_obj = user_service.get_by_username(u)\n if user_obj:\n user_objs.append(user_obj)\n else:\n sys.stderr.write(\"[!] Cannot find user {0}\".format(u))\n sys.exit(1)\n role_service.create(name, description=description, users=users)\n sys.stdout.write(\"[+] Created new role: {0}\".format(name))\n\n\nclass LemurServer(Command):\n \"\"\"\n This is the main Lemur server, it runs the flask app with gunicorn and\n uses any configuration options passed to it.\n\n\n You can pass all standard gunicorn flags to this command as if you were\n running gunicorn itself.\n\n For example:\n\n lemur start -w 4 -b 127.0.0.0:8002\n\n Will start gunicorn with 4 workers bound to 127.0.0.0:8002\n \"\"\"\n description = 'Run the app within Gunicorn'\n\n def get_options(self):\n settings = make_settings()\n options = []\n for setting, klass in settings.items():\n if klass.cli:\n if klass.action:\n if klass.action == 'store_const':\n options.append(Option(*klass.cli, const=klass.const, action=klass.action))\n else:\n options.append(Option(*klass.cli, action=klass.action))\n else:\n options.append(Option(*klass.cli))\n\n return options\n\n def run(self, *args, **kwargs):\n from gunicorn.app.wsgiapp import WSGIApplication\n\n app = WSGIApplication()\n\n # run startup tasks on a app like object\n validate_conf(current_app, REQUIRED_VARIABLES)\n\n app.app_uri = 'lemur:create_app(config=\"{0}\")'.format(current_app.config.get('CONFIG_PATH'))\n\n return app.run()\n\n\[email protected]\ndef create_config(config_path=None):\n \"\"\"\n Creates a new configuration file if one does not already exist\n \"\"\"\n if not config_path:\n config_path = DEFAULT_CONFIG_PATH\n\n config_path = os.path.expanduser(config_path)\n dir = os.path.dirname(config_path)\n\n if not os.path.exists(dir):\n os.makedirs(dir)\n\n config = generate_settings()\n with open(config_path, 'w') as f:\n f.write(config)\n\n sys.stdout.write(\"[+] Created a new configuration file {0}\\n\".format(config_path))\n\n\[email protected]\ndef lock(path=None):\n \"\"\"\n Encrypts a given path. This directory can be used to store secrets needed for normal\n Lemur operation. This is especially useful for storing secrets needed for communication\n with third parties (e.g. external certificate authorities).\n\n Lemur does not assume anything about the contents of the directory and will attempt to\n encrypt all files contained within. Currently this has only been tested against plain\n text files.\n\n Path defaults ~/.lemur/keys\n\n :param: path\n \"\"\"\n if not path:\n path = os.path.expanduser('~/.lemur/keys')\n\n dest_dir = os.path.join(path, \"encrypted\")\n sys.stdout.write(\"[!] Generating a new key...\\n\")\n\n key = Fernet.generate_key()\n\n if not os.path.exists(dest_dir):\n sys.stdout.write(\"[+] Creating encryption directory: {0}\\n\".format(dest_dir))\n os.makedirs(dest_dir)\n\n for root, dirs, files in os.walk(os.path.join(path, 'decrypted')):\n for f in files:\n source = os.path.join(root, f)\n dest = os.path.join(dest_dir, f + \".enc\")\n with open(source, 'rb') as in_file, open(dest, 'wb') as out_file:\n f = Fernet(key)\n data = f.encrypt(in_file.read())\n out_file.write(data)\n sys.stdout.write(\"[+] Writing file: {0} Source: {1}\\n\".format(dest, source))\n\n sys.stdout.write(\"[+] Keys have been encrypted with key {0}\\n\".format(key))\n\n\[email protected]\ndef unlock(path=None):\n \"\"\"\n Decrypts all of the files in a given directory with provided password.\n This is most commonly used during the startup sequence of Lemur\n allowing it to go from source code to something that can communicate\n with external services.\n\n Path defaults ~/.lemur/keys\n\n :param: path\n \"\"\"\n key = prompt_pass(\"[!] Please enter the encryption password\")\n\n if not path:\n path = os.path.expanduser('~/.lemur/keys')\n\n dest_dir = os.path.join(path, \"decrypted\")\n source_dir = os.path.join(path, \"encrypted\")\n\n if not os.path.exists(dest_dir):\n sys.stdout.write(\"[+] Creating decryption directory: {0}\\n\".format(dest_dir))\n os.makedirs(dest_dir)\n\n for root, dirs, files in os.walk(source_dir):\n for f in files:\n source = os.path.join(source_dir, f)\n dest = os.path.join(dest_dir, \".\".join(f.split(\".\")[:-1]))\n with open(source, 'rb') as in_file, open(dest, 'wb') as out_file:\n f = Fernet(key)\n data = f.decrypt(in_file.read())\n out_file.write(data)\n sys.stdout.write(\"[+] Writing file: {0} Source: {1}\\n\".format(dest, source))\n\n sys.stdout.write(\"[+] Keys have been unencrypted!\\n\")\n\n\[email protected]\ndef publish_verisign_units():\n \"\"\"\n Simple function that queries verisign for API units and posts the mertics to\n Atlas API for other teams to consume.\n :return:\n \"\"\"\n from lemur.plugins import plugins\n v = plugins.get('verisign-issuer')\n units = v.get_available_units()\n\n metrics = {}\n for item in units:\n if item['@type'] in metrics.keys():\n metrics[item['@type']] += int(item['@remaining'])\n else:\n metrics.update({item['@type']: int(item['@remaining'])})\n\n for name, value in metrics.items():\n metric = [\n {\n \"timestamp\": 1321351651,\n \"type\": \"GAUGE\",\n \"name\": \"Symantec {0} Unit Count\".format(name),\n \"tags\": {},\n \"value\": value\n }\n ]\n\n requests.post('http://localhost:8078/metrics', data=json.dumps(metric))\n\n\[email protected]\ndef publish_unapproved_verisign_certificates():\n \"\"\"\n Query the Verisign for any certificates that need to be approved.\n :return:\n \"\"\"\n from lemur.plugins import plugins\n from lemur.extensions import metrics\n v = plugins.get('verisign-issuer')\n certs = v.get_pending_certificates()\n metrics.send('pending_certificates', 'gauge', certs)\n\n\ndef main():\n manager.add_command(\"start\", LemurServer())\n manager.add_command(\"runserver\", Server(host='127.0.0.1', threaded=True))\n manager.add_command(\"clean\", Clean())\n manager.add_command(\"show_urls\", ShowUrls())\n manager.add_command(\"db\", MigrateCommand)\n manager.add_command(\"init\", InitializeApp())\n manager.add_command(\"create_user\", CreateUser())\n manager.add_command(\"reset_password\", ResetPassword())\n manager.add_command(\"create_role\", CreateRole())\n manager.add_command(\"source\", source_manager)\n manager.add_command(\"certificate\", certificate_manager)\n manager.add_command(\"notify\", notification_manager)\n manager.add_command(\"endpoint\", endpoint_manager)\n manager.add_command(\"report\", report_manager)\n manager.add_command(\"policy\", policy_manager)\n manager.run()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "lemur/manage.py"}]} |
gh_patches_debug_106 | rasdani/github-patches | git_diff | meltano__meltano-6488 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support Node v16
Currently building the Meltano UI with Node 16 results in a crash while building libsass. [That library is deprecated](https://sass-lang.com/blog/libsass-is-deprecated), so we should switch to using Dart-sass instead.
CC @alexmarple
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/alembic_freeze.py`
Content:
```
1 #!/usr/bin/env python3
2
3 """Script to freeze the Meltano database - executed by the Makefile."""
4
5 from __future__ import annotations
6
7 from alembic.script import ScriptDirectory
8
9 from meltano.migrations import LOCK_PATH, MIGRATION_DIR
10
11 scripts = ScriptDirectory(str(MIGRATION_DIR))
12
13 with LOCK_PATH.open("w") as lock:
14 HEAD = scripts.get_current_head()
15 lock.write(HEAD)
16
17 print(f"Meltano database frozen at {HEAD}.")
18
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scripts/alembic_freeze.py b/scripts/alembic_freeze.py
--- a/scripts/alembic_freeze.py
+++ b/scripts/alembic_freeze.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-"""Script to freeze the Meltano database - executed by the Makefile."""
+"""Script to freeze the Meltano database - executed by GitHub CI."""
from __future__ import annotations
| {"golden_diff": "diff --git a/scripts/alembic_freeze.py b/scripts/alembic_freeze.py\n--- a/scripts/alembic_freeze.py\n+++ b/scripts/alembic_freeze.py\n@@ -1,6 +1,6 @@\n #!/usr/bin/env python3\n \n-\"\"\"Script to freeze the Meltano database - executed by the Makefile.\"\"\"\n+\"\"\"Script to freeze the Meltano database - executed by GitHub CI.\"\"\"\n \n from __future__ import annotations\n", "issue": "Support Node v16\nCurrently building the Meltano UI with Node 16 results in a crash while building libsass. [That library is deprecated](https://sass-lang.com/blog/libsass-is-deprecated), so we should switch to using Dart-sass instead.\r\n\r\nCC @alexmarple \n", "before_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"Script to freeze the Meltano database - executed by the Makefile.\"\"\"\n\nfrom __future__ import annotations\n\nfrom alembic.script import ScriptDirectory\n\nfrom meltano.migrations import LOCK_PATH, MIGRATION_DIR\n\nscripts = ScriptDirectory(str(MIGRATION_DIR))\n\nwith LOCK_PATH.open(\"w\") as lock:\n HEAD = scripts.get_current_head()\n lock.write(HEAD)\n\nprint(f\"Meltano database frozen at {HEAD}.\")\n", "path": "scripts/alembic_freeze.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"Script to freeze the Meltano database - executed by GitHub CI.\"\"\"\n\nfrom __future__ import annotations\n\nfrom alembic.script import ScriptDirectory\n\nfrom meltano.migrations import LOCK_PATH, MIGRATION_DIR\n\nscripts = ScriptDirectory(str(MIGRATION_DIR))\n\nwith LOCK_PATH.open(\"w\") as lock:\n HEAD = scripts.get_current_head()\n lock.write(HEAD)\n\nprint(f\"Meltano database frozen at {HEAD}.\")\n", "path": "scripts/alembic_freeze.py"}]} |
gh_patches_debug_107 | rasdani/github-patches | git_diff | conda__conda-3931 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Regression: cannot install from explicit conda package filenames
This command used to work, but now it gives the following error/traceback:
Example: `conda install bzip2-1.0.6-vc14_3.tar.bz2 --dry-run`
```
An unexpected error has occurred.
Please consider posting the following information to the
conda GitHub issue tracker at:
https://github.com/conda/conda/issues
Current conda install:
platform : win-64
conda version : 4.2.12
conda is private : False
conda-env version : 4.2.12
conda-build version : 2.0.7
python version : 3.5.2.final.0
requests version : 2.10.0
root environment : C:\Miniconda3 (writable)
default environment : C:\Miniconda3\envs\test_conda
envs directories : C:\Miniconda3\envs
package cache : C:\Miniconda3\pkgs
channel URLs : https://repo.continuum.io/pkgs/free/win-64
https://repo.continuum.io/pkgs/free/noarch
https://repo.continuum.io/pkgs/pro/win-64
https://repo.continuum.io/pkgs/pro/noarch
https://repo.continuum.io/pkgs/msys2/win-64
https://repo.continuum.io/pkgs/msys2/noarch
config file : None
offline mode : False
`$ C:\Miniconda3\Scripts\conda-script.py install bzip2-1.0.6-vc14_3.tar.bz2 --dry-run`
Traceback (most recent call last):
File "C:\Miniconda3\lib\site-packages\conda\exceptions.py", line 479, in conda_exception_handler
return_value = func(*args, **kwargs)
File "C:\Miniconda3\lib\site-packages\conda\cli\main.py", line 145, in _main
exit_code = args.func(args, p)
File "C:\Miniconda3\lib\site-packages\conda\cli\main_install.py", line 80, in execute
install(args, parser, 'install')
File "C:\Miniconda3\lib\site-packages\conda\cli\install.py", line 209, in install
explicit(args.packages, prefix, verbose=not context.quiet)
File "C:\Miniconda3\lib\site-packages\conda\misc.py", line 66, in explicit
if not is_url(url_p):
File "C:\Miniconda3\lib\site-packages\conda\common\url.py", line 72, in is_url
p = urlparse(url)
File "C:\Miniconda3\lib\site-packages\conda\_vendor\auxlib\decorators.py", line 56, in _memoized_func
result = func(*args, **kwargs)
File "C:\Miniconda3\lib\site-packages\conda\common\url.py", line 55, in urlparse
if on_win and url.startswith('file:'):
AttributeError: 'NoneType' object has no attribute 'startswith'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conda/common/url.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 from __future__ import absolute_import, division, print_function, unicode_literals
3
4 import re
5 import socket
6 import sys
7 from getpass import getpass
8 from logging import getLogger
9 from os.path import abspath, expanduser
10
11 try:
12 # Python 3
13 from urllib.parse import (quote, quote_plus, unquote, unquote_plus, # NOQA
14 urlunparse as stdlib_urlparse, urljoin) # NOQA
15 from urllib.request import pathname2url # NOQA
16 except ImportError:
17 # Python 2
18 from urllib import quote, quote_plus, unquote, unquote_plus, pathname2url # NOQA
19 from urlparse import urlunparse as stdlib_urlparse, urljoin # NOQA
20
21 from requests.packages.urllib3.exceptions import LocationParseError
22 from requests.packages.urllib3.util.url import Url, parse_url
23
24 from .._vendor.auxlib.decorators import memoize
25
26 log = getLogger(__name__)
27
28
29 on_win = bool(sys.platform == "win32")
30
31
32 @memoize
33 def path_to_url(path):
34 path = abspath(expanduser(path))
35 url = urljoin('file:', pathname2url(path))
36 log.debug("%s converted to %s", path, url)
37 return url
38
39
40 def url_to_path(url): # NOQA
41 """Convert a file:// URL to a path."""
42 assert url.startswith('file:'), "You can only turn file: urls into filenames (not %r)" % url
43 path = url[len('file:'):].lstrip('/')
44 path = unquote(path)
45 if re.match('^([a-z])[:|]', path, re.I):
46 path = path[0] + ':' + path[2:]
47 elif not path.startswith(r'\\'):
48 # if not a Windows UNC path
49 path = '/' + path
50 return path
51
52
53 @memoize
54 def urlparse(url):
55 if on_win and url.startswith('file:'):
56 url.replace('\\', '/')
57 return parse_url(url)
58
59
60 def url_to_s3_info(url):
61 """
62 Convert a S3 url to a tuple of bucket and key
63 """
64 parsed_url = parse_url(url)
65 assert parsed_url.scheme == 's3', "You can only use s3: urls (not %r)" % url
66 bucket, key = parsed_url.host, parsed_url.path
67 return bucket, key
68
69
70 def is_url(url):
71 try:
72 p = urlparse(url)
73 return p.netloc is not None or p.scheme == "file"
74 except LocationParseError:
75 log.debug("Could not parse url ({0}).".format(url))
76 return False
77
78
79 def is_ipv4_address(string_ip):
80 """
81 Examples:
82 >>> [is_ipv4_address(ip) for ip in ('8.8.8.8', '192.168.10.10', '255.255.255.255')]
83 [True, True, True]
84 >>> [is_ipv4_address(ip) for ip in ('8.8.8', '192.168.10.10.20', '256.255.255.255', '::1')]
85 [False, False, False, False]
86 """
87 try:
88 socket.inet_aton(string_ip)
89 except socket.error:
90 return False
91 return string_ip.count('.') == 3
92
93
94 def is_ipv6_address(string_ip):
95 """
96 Examples:
97 >>> [is_ipv6_address(ip) for ip in ('::1', '2001:db8:85a3::370:7334', '1234:'*7+'1234')]
98 [True, True, True]
99 >>> [is_ipv6_address(ip) for ip in ('192.168.10.10', '1234:'*8+'1234')]
100 [False, False]
101 """
102 try:
103 socket.inet_pton(socket.AF_INET6, string_ip)
104 except socket.error:
105 return False
106 return True
107
108
109 def is_ip_address(string_ip):
110 """
111 Examples:
112 >>> is_ip_address('192.168.10.10')
113 True
114 >>> is_ip_address('::1')
115 True
116 >>> is_ip_address('www.google.com')
117 False
118 """
119 return is_ipv4_address(string_ip) or is_ipv6_address(string_ip)
120
121
122 def join(*args):
123 start = '/' if not args[0] or args[0].startswith('/') else ''
124 return start + '/'.join(y for y in (x.strip('/') for x in args if x) if y)
125
126
127 join_url = join
128
129
130 def has_scheme(value):
131 return re.match(r'[a-z][a-z0-9]{0,11}://', value)
132
133
134 def strip_scheme(url):
135 return url.split('://', 1)[-1]
136
137
138 def mask_anaconda_token(url):
139 _, token = split_anaconda_token(url)
140 return url.replace(token, "<TOKEN>", 1) if token else url
141
142
143 def split_anaconda_token(url):
144 """
145 Examples:
146 >>> split_anaconda_token("https://1.2.3.4/t/tk-123-456/path")
147 (u'https://1.2.3.4/path', u'tk-123-456')
148 >>> split_anaconda_token("https://1.2.3.4/t//path")
149 (u'https://1.2.3.4/path', u'')
150 >>> split_anaconda_token("https://some.domain/api/t/tk-123-456/path")
151 (u'https://some.domain/api/path', u'tk-123-456')
152 >>> split_anaconda_token("https://1.2.3.4/conda/t/tk-123-456/path")
153 (u'https://1.2.3.4/conda/path', u'tk-123-456')
154 >>> split_anaconda_token("https://1.2.3.4/path")
155 (u'https://1.2.3.4/path', None)
156 >>> split_anaconda_token("https://10.2.3.4:8080/conda/t/tk-123-45")
157 (u'https://10.2.3.4:8080/conda', u'tk-123-45')
158 """
159 _token_match = re.search(r'/t/([a-zA-Z0-9-]*)', url)
160 token = _token_match.groups()[0] if _token_match else None
161 cleaned_url = url.replace('/t/' + token, '', 1) if token is not None else url
162 return cleaned_url.rstrip('/'), token
163
164
165 def split_platform(url):
166 """
167
168 Examples:
169 >>> split_platform("https://1.2.3.4/t/tk-123/osx-64/path")
170 (u'https://1.2.3.4/t/tk-123/path', u'osx-64')
171
172 """
173 from conda.base.constants import PLATFORM_DIRECTORIES
174 _platform_match_regex = r'/(%s)/?' % r'|'.join(r'%s' % d for d in PLATFORM_DIRECTORIES)
175 _platform_match = re.search(_platform_match_regex, url, re.IGNORECASE)
176 platform = _platform_match.groups()[0] if _platform_match else None
177 cleaned_url = url.replace('/' + platform, '', 1) if platform is not None else url
178 return cleaned_url.rstrip('/'), platform
179
180
181 def split_package_filename(url):
182 cleaned_url, package_filename = (url.rsplit('/', 1) if url.endswith(('.tar.bz2', '.json'))
183 else (url, None))
184 return cleaned_url, package_filename
185
186
187 def split_scheme_auth_token(url):
188 if not url:
189 return None, None, None, None
190 cleaned_url, token = split_anaconda_token(url)
191 url_parts = urlparse(cleaned_url)
192 remainder_url = Url(host=url_parts.host, port=url_parts.port, path=url_parts.path,
193 query=url_parts.query).url
194 return remainder_url, url_parts.scheme, url_parts.auth, token
195
196
197 def split_conda_url_easy_parts(url):
198 # scheme, auth, token, platform, package_filename, host, port, path, query
199 cleaned_url, token = split_anaconda_token(url)
200 cleaned_url, platform = split_platform(cleaned_url)
201 cleaned_url, package_filename = split_package_filename(cleaned_url)
202
203 # TODO: split out namespace using regex
204
205 url_parts = urlparse(cleaned_url)
206
207 return (url_parts.scheme, url_parts.auth, token, platform, package_filename, url_parts.host,
208 url_parts.port, url_parts.path, url_parts.query)
209
210
211 def is_windows_path(value):
212 return re.match(r'[a-z]:[/\\]', value, re.IGNORECASE)
213
214
215 @memoize
216 def get_proxy_username_and_pass(scheme):
217 username = input("\n%s proxy username: " % scheme)
218 passwd = getpass("Password:")
219 return username, passwd
220
221
222 def add_username_and_password(url, username, password):
223 url_parts = parse_url(url)._asdict()
224 url_parts['auth'] = username + ':' + quote(password, '')
225 return Url(**url_parts).url
226
227
228 def maybe_add_auth(url, auth, force=False):
229 """add auth if the url doesn't currently have it"""
230 if not auth:
231 return url
232 url_parts = urlparse(url)._asdict()
233 if url_parts['auth'] and not force:
234 return url
235 url_parts['auth'] = auth
236 return Url(**url_parts).url
237
238
239 if __name__ == "__main__":
240 import doctest
241 doctest.testmod()
242
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conda/common/url.py b/conda/common/url.py
--- a/conda/common/url.py
+++ b/conda/common/url.py
@@ -68,6 +68,8 @@
def is_url(url):
+ if not url:
+ return False
try:
p = urlparse(url)
return p.netloc is not None or p.scheme == "file"
| {"golden_diff": "diff --git a/conda/common/url.py b/conda/common/url.py\n--- a/conda/common/url.py\n+++ b/conda/common/url.py\n@@ -68,6 +68,8 @@\n \n \n def is_url(url):\n+ if not url:\n+ return False\n try:\n p = urlparse(url)\n return p.netloc is not None or p.scheme == \"file\"\n", "issue": "Regression: cannot install from explicit conda package filenames\nThis command used to work, but now it gives the following error/traceback:\r\n\r\nExample: `conda install bzip2-1.0.6-vc14_3.tar.bz2 --dry-run`\r\n\r\n```\r\nAn unexpected error has occurred.\r\nPlease consider posting the following information to the\r\nconda GitHub issue tracker at:\r\n\r\n https://github.com/conda/conda/issues\r\n\r\n\r\n\r\nCurrent conda install:\r\n\r\n platform : win-64\r\n conda version : 4.2.12\r\n conda is private : False\r\n conda-env version : 4.2.12\r\n conda-build version : 2.0.7\r\n python version : 3.5.2.final.0\r\n requests version : 2.10.0\r\n root environment : C:\\Miniconda3 (writable)\r\n default environment : C:\\Miniconda3\\envs\\test_conda\r\n envs directories : C:\\Miniconda3\\envs\r\n package cache : C:\\Miniconda3\\pkgs\r\n channel URLs : https://repo.continuum.io/pkgs/free/win-64\r\n https://repo.continuum.io/pkgs/free/noarch\r\n https://repo.continuum.io/pkgs/pro/win-64\r\n https://repo.continuum.io/pkgs/pro/noarch\r\n https://repo.continuum.io/pkgs/msys2/win-64\r\n https://repo.continuum.io/pkgs/msys2/noarch\r\n config file : None\r\n offline mode : False\r\n\r\n\r\n\r\n`$ C:\\Miniconda3\\Scripts\\conda-script.py install bzip2-1.0.6-vc14_3.tar.bz2 --dry-run`\r\n\r\n\r\n\r\n\r\n Traceback (most recent call last):\r\n File \"C:\\Miniconda3\\lib\\site-packages\\conda\\exceptions.py\", line 479, in conda_exception_handler\r\n return_value = func(*args, **kwargs)\r\n File \"C:\\Miniconda3\\lib\\site-packages\\conda\\cli\\main.py\", line 145, in _main\r\n exit_code = args.func(args, p)\r\n File \"C:\\Miniconda3\\lib\\site-packages\\conda\\cli\\main_install.py\", line 80, in execute\r\n install(args, parser, 'install')\r\n File \"C:\\Miniconda3\\lib\\site-packages\\conda\\cli\\install.py\", line 209, in install\r\n explicit(args.packages, prefix, verbose=not context.quiet)\r\n File \"C:\\Miniconda3\\lib\\site-packages\\conda\\misc.py\", line 66, in explicit\r\n if not is_url(url_p):\r\n File \"C:\\Miniconda3\\lib\\site-packages\\conda\\common\\url.py\", line 72, in is_url\r\n p = urlparse(url)\r\n File \"C:\\Miniconda3\\lib\\site-packages\\conda\\_vendor\\auxlib\\decorators.py\", line 56, in _memoized_func\r\n result = func(*args, **kwargs)\r\n File \"C:\\Miniconda3\\lib\\site-packages\\conda\\common\\url.py\", line 55, in urlparse\r\n if on_win and url.startswith('file:'):\r\n AttributeError: 'NoneType' object has no attribute 'startswith'\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport re\nimport socket\nimport sys\nfrom getpass import getpass\nfrom logging import getLogger\nfrom os.path import abspath, expanduser\n\ntry:\n # Python 3\n from urllib.parse import (quote, quote_plus, unquote, unquote_plus, # NOQA\n urlunparse as stdlib_urlparse, urljoin) # NOQA\n from urllib.request import pathname2url # NOQA\nexcept ImportError:\n # Python 2\n from urllib import quote, quote_plus, unquote, unquote_plus, pathname2url # NOQA\n from urlparse import urlunparse as stdlib_urlparse, urljoin # NOQA\n\nfrom requests.packages.urllib3.exceptions import LocationParseError\nfrom requests.packages.urllib3.util.url import Url, parse_url\n\nfrom .._vendor.auxlib.decorators import memoize\n\nlog = getLogger(__name__)\n\n\non_win = bool(sys.platform == \"win32\")\n\n\n@memoize\ndef path_to_url(path):\n path = abspath(expanduser(path))\n url = urljoin('file:', pathname2url(path))\n log.debug(\"%s converted to %s\", path, url)\n return url\n\n\ndef url_to_path(url): # NOQA\n \"\"\"Convert a file:// URL to a path.\"\"\"\n assert url.startswith('file:'), \"You can only turn file: urls into filenames (not %r)\" % url\n path = url[len('file:'):].lstrip('/')\n path = unquote(path)\n if re.match('^([a-z])[:|]', path, re.I):\n path = path[0] + ':' + path[2:]\n elif not path.startswith(r'\\\\'):\n # if not a Windows UNC path\n path = '/' + path\n return path\n\n\n@memoize\ndef urlparse(url):\n if on_win and url.startswith('file:'):\n url.replace('\\\\', '/')\n return parse_url(url)\n\n\ndef url_to_s3_info(url):\n \"\"\"\n Convert a S3 url to a tuple of bucket and key\n \"\"\"\n parsed_url = parse_url(url)\n assert parsed_url.scheme == 's3', \"You can only use s3: urls (not %r)\" % url\n bucket, key = parsed_url.host, parsed_url.path\n return bucket, key\n\n\ndef is_url(url):\n try:\n p = urlparse(url)\n return p.netloc is not None or p.scheme == \"file\"\n except LocationParseError:\n log.debug(\"Could not parse url ({0}).\".format(url))\n return False\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n Examples:\n >>> [is_ipv4_address(ip) for ip in ('8.8.8.8', '192.168.10.10', '255.255.255.255')]\n [True, True, True]\n >>> [is_ipv4_address(ip) for ip in ('8.8.8', '192.168.10.10.20', '256.255.255.255', '::1')]\n [False, False, False, False]\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except socket.error:\n return False\n return string_ip.count('.') == 3\n\n\ndef is_ipv6_address(string_ip):\n \"\"\"\n Examples:\n >>> [is_ipv6_address(ip) for ip in ('::1', '2001:db8:85a3::370:7334', '1234:'*7+'1234')]\n [True, True, True]\n >>> [is_ipv6_address(ip) for ip in ('192.168.10.10', '1234:'*8+'1234')]\n [False, False]\n \"\"\"\n try:\n socket.inet_pton(socket.AF_INET6, string_ip)\n except socket.error:\n return False\n return True\n\n\ndef is_ip_address(string_ip):\n \"\"\"\n Examples:\n >>> is_ip_address('192.168.10.10')\n True\n >>> is_ip_address('::1')\n True\n >>> is_ip_address('www.google.com')\n False\n \"\"\"\n return is_ipv4_address(string_ip) or is_ipv6_address(string_ip)\n\n\ndef join(*args):\n start = '/' if not args[0] or args[0].startswith('/') else ''\n return start + '/'.join(y for y in (x.strip('/') for x in args if x) if y)\n\n\njoin_url = join\n\n\ndef has_scheme(value):\n return re.match(r'[a-z][a-z0-9]{0,11}://', value)\n\n\ndef strip_scheme(url):\n return url.split('://', 1)[-1]\n\n\ndef mask_anaconda_token(url):\n _, token = split_anaconda_token(url)\n return url.replace(token, \"<TOKEN>\", 1) if token else url\n\n\ndef split_anaconda_token(url):\n \"\"\"\n Examples:\n >>> split_anaconda_token(\"https://1.2.3.4/t/tk-123-456/path\")\n (u'https://1.2.3.4/path', u'tk-123-456')\n >>> split_anaconda_token(\"https://1.2.3.4/t//path\")\n (u'https://1.2.3.4/path', u'')\n >>> split_anaconda_token(\"https://some.domain/api/t/tk-123-456/path\")\n (u'https://some.domain/api/path', u'tk-123-456')\n >>> split_anaconda_token(\"https://1.2.3.4/conda/t/tk-123-456/path\")\n (u'https://1.2.3.4/conda/path', u'tk-123-456')\n >>> split_anaconda_token(\"https://1.2.3.4/path\")\n (u'https://1.2.3.4/path', None)\n >>> split_anaconda_token(\"https://10.2.3.4:8080/conda/t/tk-123-45\")\n (u'https://10.2.3.4:8080/conda', u'tk-123-45')\n \"\"\"\n _token_match = re.search(r'/t/([a-zA-Z0-9-]*)', url)\n token = _token_match.groups()[0] if _token_match else None\n cleaned_url = url.replace('/t/' + token, '', 1) if token is not None else url\n return cleaned_url.rstrip('/'), token\n\n\ndef split_platform(url):\n \"\"\"\n\n Examples:\n >>> split_platform(\"https://1.2.3.4/t/tk-123/osx-64/path\")\n (u'https://1.2.3.4/t/tk-123/path', u'osx-64')\n\n \"\"\"\n from conda.base.constants import PLATFORM_DIRECTORIES\n _platform_match_regex = r'/(%s)/?' % r'|'.join(r'%s' % d for d in PLATFORM_DIRECTORIES)\n _platform_match = re.search(_platform_match_regex, url, re.IGNORECASE)\n platform = _platform_match.groups()[0] if _platform_match else None\n cleaned_url = url.replace('/' + platform, '', 1) if platform is not None else url\n return cleaned_url.rstrip('/'), platform\n\n\ndef split_package_filename(url):\n cleaned_url, package_filename = (url.rsplit('/', 1) if url.endswith(('.tar.bz2', '.json'))\n else (url, None))\n return cleaned_url, package_filename\n\n\ndef split_scheme_auth_token(url):\n if not url:\n return None, None, None, None\n cleaned_url, token = split_anaconda_token(url)\n url_parts = urlparse(cleaned_url)\n remainder_url = Url(host=url_parts.host, port=url_parts.port, path=url_parts.path,\n query=url_parts.query).url\n return remainder_url, url_parts.scheme, url_parts.auth, token\n\n\ndef split_conda_url_easy_parts(url):\n # scheme, auth, token, platform, package_filename, host, port, path, query\n cleaned_url, token = split_anaconda_token(url)\n cleaned_url, platform = split_platform(cleaned_url)\n cleaned_url, package_filename = split_package_filename(cleaned_url)\n\n # TODO: split out namespace using regex\n\n url_parts = urlparse(cleaned_url)\n\n return (url_parts.scheme, url_parts.auth, token, platform, package_filename, url_parts.host,\n url_parts.port, url_parts.path, url_parts.query)\n\n\ndef is_windows_path(value):\n return re.match(r'[a-z]:[/\\\\]', value, re.IGNORECASE)\n\n\n@memoize\ndef get_proxy_username_and_pass(scheme):\n username = input(\"\\n%s proxy username: \" % scheme)\n passwd = getpass(\"Password:\")\n return username, passwd\n\n\ndef add_username_and_password(url, username, password):\n url_parts = parse_url(url)._asdict()\n url_parts['auth'] = username + ':' + quote(password, '')\n return Url(**url_parts).url\n\n\ndef maybe_add_auth(url, auth, force=False):\n \"\"\"add auth if the url doesn't currently have it\"\"\"\n if not auth:\n return url\n url_parts = urlparse(url)._asdict()\n if url_parts['auth'] and not force:\n return url\n url_parts['auth'] = auth\n return Url(**url_parts).url\n\n\nif __name__ == \"__main__\":\n import doctest\n doctest.testmod()\n", "path": "conda/common/url.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nimport re\nimport socket\nimport sys\nfrom getpass import getpass\nfrom logging import getLogger\nfrom os.path import abspath, expanduser\n\ntry:\n # Python 3\n from urllib.parse import (quote, quote_plus, unquote, unquote_plus, # NOQA\n urlunparse as stdlib_urlparse, urljoin) # NOQA\n from urllib.request import pathname2url # NOQA\nexcept ImportError:\n # Python 2\n from urllib import quote, quote_plus, unquote, unquote_plus, pathname2url # NOQA\n from urlparse import urlunparse as stdlib_urlparse, urljoin # NOQA\n\nfrom requests.packages.urllib3.exceptions import LocationParseError\nfrom requests.packages.urllib3.util.url import Url, parse_url\n\nfrom .._vendor.auxlib.decorators import memoize\n\nlog = getLogger(__name__)\n\n\non_win = bool(sys.platform == \"win32\")\n\n\n@memoize\ndef path_to_url(path):\n path = abspath(expanduser(path))\n url = urljoin('file:', pathname2url(path))\n log.debug(\"%s converted to %s\", path, url)\n return url\n\n\ndef url_to_path(url): # NOQA\n \"\"\"Convert a file:// URL to a path.\"\"\"\n assert url.startswith('file:'), \"You can only turn file: urls into filenames (not %r)\" % url\n path = url[len('file:'):].lstrip('/')\n path = unquote(path)\n if re.match('^([a-z])[:|]', path, re.I):\n path = path[0] + ':' + path[2:]\n elif not path.startswith(r'\\\\'):\n # if not a Windows UNC path\n path = '/' + path\n return path\n\n\n@memoize\ndef urlparse(url):\n if on_win and url.startswith('file:'):\n url.replace('\\\\', '/')\n return parse_url(url)\n\n\ndef url_to_s3_info(url):\n \"\"\"\n Convert a S3 url to a tuple of bucket and key\n \"\"\"\n parsed_url = parse_url(url)\n assert parsed_url.scheme == 's3', \"You can only use s3: urls (not %r)\" % url\n bucket, key = parsed_url.host, parsed_url.path\n return bucket, key\n\n\ndef is_url(url):\n if not url:\n return False\n try:\n p = urlparse(url)\n return p.netloc is not None or p.scheme == \"file\"\n except LocationParseError:\n log.debug(\"Could not parse url ({0}).\".format(url))\n return False\n\n\ndef is_ipv4_address(string_ip):\n \"\"\"\n Examples:\n >>> [is_ipv4_address(ip) for ip in ('8.8.8.8', '192.168.10.10', '255.255.255.255')]\n [True, True, True]\n >>> [is_ipv4_address(ip) for ip in ('8.8.8', '192.168.10.10.20', '256.255.255.255', '::1')]\n [False, False, False, False]\n \"\"\"\n try:\n socket.inet_aton(string_ip)\n except socket.error:\n return False\n return string_ip.count('.') == 3\n\n\ndef is_ipv6_address(string_ip):\n \"\"\"\n Examples:\n >>> [is_ipv6_address(ip) for ip in ('::1', '2001:db8:85a3::370:7334', '1234:'*7+'1234')]\n [True, True, True]\n >>> [is_ipv6_address(ip) for ip in ('192.168.10.10', '1234:'*8+'1234')]\n [False, False]\n \"\"\"\n try:\n socket.inet_pton(socket.AF_INET6, string_ip)\n except socket.error:\n return False\n return True\n\n\ndef is_ip_address(string_ip):\n \"\"\"\n Examples:\n >>> is_ip_address('192.168.10.10')\n True\n >>> is_ip_address('::1')\n True\n >>> is_ip_address('www.google.com')\n False\n \"\"\"\n return is_ipv4_address(string_ip) or is_ipv6_address(string_ip)\n\n\ndef join(*args):\n start = '/' if not args[0] or args[0].startswith('/') else ''\n return start + '/'.join(y for y in (x.strip('/') for x in args if x) if y)\n\n\njoin_url = join\n\n\ndef has_scheme(value):\n return re.match(r'[a-z][a-z0-9]{0,11}://', value)\n\n\ndef strip_scheme(url):\n return url.split('://', 1)[-1]\n\n\ndef mask_anaconda_token(url):\n _, token = split_anaconda_token(url)\n return url.replace(token, \"<TOKEN>\", 1) if token else url\n\n\ndef split_anaconda_token(url):\n \"\"\"\n Examples:\n >>> split_anaconda_token(\"https://1.2.3.4/t/tk-123-456/path\")\n (u'https://1.2.3.4/path', u'tk-123-456')\n >>> split_anaconda_token(\"https://1.2.3.4/t//path\")\n (u'https://1.2.3.4/path', u'')\n >>> split_anaconda_token(\"https://some.domain/api/t/tk-123-456/path\")\n (u'https://some.domain/api/path', u'tk-123-456')\n >>> split_anaconda_token(\"https://1.2.3.4/conda/t/tk-123-456/path\")\n (u'https://1.2.3.4/conda/path', u'tk-123-456')\n >>> split_anaconda_token(\"https://1.2.3.4/path\")\n (u'https://1.2.3.4/path', None)\n >>> split_anaconda_token(\"https://10.2.3.4:8080/conda/t/tk-123-45\")\n (u'https://10.2.3.4:8080/conda', u'tk-123-45')\n \"\"\"\n _token_match = re.search(r'/t/([a-zA-Z0-9-]*)', url)\n token = _token_match.groups()[0] if _token_match else None\n cleaned_url = url.replace('/t/' + token, '', 1) if token is not None else url\n return cleaned_url.rstrip('/'), token\n\n\ndef split_platform(url):\n \"\"\"\n\n Examples:\n >>> split_platform(\"https://1.2.3.4/t/tk-123/osx-64/path\")\n (u'https://1.2.3.4/t/tk-123/path', u'osx-64')\n\n \"\"\"\n from conda.base.constants import PLATFORM_DIRECTORIES\n _platform_match_regex = r'/(%s)/?' % r'|'.join(r'%s' % d for d in PLATFORM_DIRECTORIES)\n _platform_match = re.search(_platform_match_regex, url, re.IGNORECASE)\n platform = _platform_match.groups()[0] if _platform_match else None\n cleaned_url = url.replace('/' + platform, '', 1) if platform is not None else url\n return cleaned_url.rstrip('/'), platform\n\n\ndef split_package_filename(url):\n cleaned_url, package_filename = (url.rsplit('/', 1) if url.endswith(('.tar.bz2', '.json'))\n else (url, None))\n return cleaned_url, package_filename\n\n\ndef split_scheme_auth_token(url):\n if not url:\n return None, None, None, None\n cleaned_url, token = split_anaconda_token(url)\n url_parts = urlparse(cleaned_url)\n remainder_url = Url(host=url_parts.host, port=url_parts.port, path=url_parts.path,\n query=url_parts.query).url\n return remainder_url, url_parts.scheme, url_parts.auth, token\n\n\ndef split_conda_url_easy_parts(url):\n # scheme, auth, token, platform, package_filename, host, port, path, query\n cleaned_url, token = split_anaconda_token(url)\n cleaned_url, platform = split_platform(cleaned_url)\n cleaned_url, package_filename = split_package_filename(cleaned_url)\n\n # TODO: split out namespace using regex\n\n url_parts = urlparse(cleaned_url)\n\n return (url_parts.scheme, url_parts.auth, token, platform, package_filename, url_parts.host,\n url_parts.port, url_parts.path, url_parts.query)\n\n\ndef is_windows_path(value):\n return re.match(r'[a-z]:[/\\\\]', value, re.IGNORECASE)\n\n\n@memoize\ndef get_proxy_username_and_pass(scheme):\n username = input(\"\\n%s proxy username: \" % scheme)\n passwd = getpass(\"Password:\")\n return username, passwd\n\n\ndef add_username_and_password(url, username, password):\n url_parts = parse_url(url)._asdict()\n url_parts['auth'] = username + ':' + quote(password, '')\n return Url(**url_parts).url\n\n\ndef maybe_add_auth(url, auth, force=False):\n \"\"\"add auth if the url doesn't currently have it\"\"\"\n if not auth:\n return url\n url_parts = urlparse(url)._asdict()\n if url_parts['auth'] and not force:\n return url\n url_parts['auth'] = auth\n return Url(**url_parts).url\n\n\nif __name__ == \"__main__\":\n import doctest\n doctest.testmod()\n", "path": "conda/common/url.py"}]} |
gh_patches_debug_108 | rasdani/github-patches | git_diff | acl-org__acl-anthology-724 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
I08-2 front matter links to whole PDF
The link from here https://www.aclweb.org/anthology/events/ijcnlp-2008/ that is normally the front matter for volume 2 links instead to the full PDF for all of volume 2.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bin/anthology/papers.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright 2019 Marcel Bollmann <[email protected]>
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16
17 import logging as log
18 from .utils import (
19 build_anthology_id,
20 parse_element,
21 infer_attachment_url,
22 remove_extra_whitespace,
23 is_journal,
24 is_volume_id,
25 )
26 from . import data
27
28 # For BibTeX export
29 from .formatter import bibtex_encode, bibtex_make_entry
30
31
32 class Paper:
33 def __init__(self, paper_id, ingest_date, volume, formatter):
34 self.parent_volume = volume
35 self.formatter = formatter
36 self._id = paper_id
37 self._ingest_date = ingest_date
38 self._bibkey = False
39 self.is_volume = paper_id == "0"
40
41 # initialize metadata with keys inherited from volume
42 self.attrib = {}
43 for key, value in volume.attrib.items():
44 # Only inherit 'editor' for frontmatter
45 if (key == "editor" and not self.is_volume) or key in (
46 "collection_id",
47 "booktitle",
48 "id",
49 "meta_data",
50 "meta_journal_title",
51 "meta_volume",
52 "meta_issue",
53 "sigs",
54 "venues",
55 "meta_date",
56 "url",
57 ):
58 continue
59
60 self.attrib[key] = value
61
62 def from_xml(xml_element, *args):
63 ingest_date = xml_element.get("ingest-date", data.UNKNOWN_INGEST_DATE)
64
65 # Default to paper ID "0" (for front matter)
66 paper = Paper(xml_element.get("id", "0"), ingest_date, *args)
67
68 # Set values from parsing the XML element (overwriting
69 # and changing some initialized from the volume metadata)
70 for key, value in parse_element(xml_element).items():
71 if key == "author" and "editor" in paper.attrib:
72 del paper.attrib["editor"]
73 paper.attrib[key] = value
74
75 # Frontmatter title is the volume 'booktitle'
76 if paper.is_volume:
77 paper.attrib["xml_title"] = paper.attrib["xml_booktitle"]
78 paper.attrib["xml_title"].tag = "title"
79
80 # Remove booktitle for frontmatter and journals
81 if paper.is_volume or is_journal(paper.full_id):
82 del paper.attrib["xml_booktitle"]
83
84 # Expand URLs with paper ID
85 for tag in ("revision", "erratum"):
86 if tag in paper.attrib:
87 for item in paper.attrib[tag]:
88 if not item["url"].startswith(paper.full_id):
89 log.error(
90 "{} must begin with paper ID '{}', but is '{}'".format(
91 tag, paper.full_id, item["url"]
92 )
93 )
94 item["url"] = data.ANTHOLOGY_PDF.format(item["url"])
95
96 if "attachment" in paper.attrib:
97 for item in paper.attrib["attachment"]:
98 item["url"] = infer_attachment_url(item["url"], paper.full_id)
99
100 # Explicitly construct URL of original version of the paper
101 # -- this is a bit hacky, but it's not given in the XML
102 # explicitly
103 if "revision" in paper.attrib:
104 paper.attrib["revision"].insert(
105 0,
106 {
107 "value": "{}v1".format(paper.full_id),
108 "id": "1",
109 "url": data.ANTHOLOGY_PDF.format("{}v1".format(paper.full_id)),
110 },
111 )
112
113 paper.attrib["title"] = paper.get_title("plain")
114 paper.attrib["booktitle"] = paper.get_booktitle("plain")
115
116 if "editor" in paper.attrib:
117 if paper.is_volume:
118 if "author" in paper.attrib:
119 log.warn(
120 "Paper {} has both <editor> and <author>; ignoring <author>".format(
121 paper.full_id
122 )
123 )
124 # Proceedings editors are considered authors for their front matter
125 paper.attrib["author"] = paper.attrib["editor"]
126 del paper.attrib["editor"]
127 else:
128 log.warn(
129 "Paper {} has <editor> but is not a proceedings volume; ignoring <editor>".format(
130 paper.full_id
131 )
132 )
133 if "pages" in paper.attrib:
134 if paper.attrib["pages"] is not None:
135 paper._interpret_pages()
136 else:
137 del paper.attrib["pages"]
138
139 if "author" in paper.attrib:
140 paper.attrib["author_string"] = ", ".join(
141 [x[0].full for x in paper.attrib["author"]]
142 )
143
144 paper.attrib["thumbnail"] = data.ANTHOLOGY_THUMBNAIL.format(paper.full_id)
145
146 return paper
147
148 def _interpret_pages(self):
149 """Splits up 'pages' field into first and last page, if possible.
150
151 This is used for metadata in the generated HTML."""
152 for s in ("--", "-", "–"):
153 if self.attrib["pages"].count(s) == 1:
154 self.attrib["page_first"], self.attrib["page_last"] = self.attrib[
155 "pages"
156 ].split(s)
157 self.attrib["pages"] = self.attrib["pages"].replace(s, "–")
158 return
159
160 @property
161 def ingest_date(self):
162 """Inherit publication date from parent, but self overrides. May be undefined."""
163 if self._ingest_date:
164 return self._ingest_date
165 if self.parent_volume:
166 return self.parent_volume.ingest_date
167 return data.UNKNOWN_INGEST_DATE
168
169 @property
170 def collection_id(self):
171 return self.parent_volume.collection_id
172
173 @property
174 def volume_id(self):
175 return self.parent_volume.volume_id
176
177 @property
178 def paper_id(self):
179 return self._id
180
181 @property
182 def full_id(self):
183 return self.anthology_id
184
185 @property
186 def anthology_id(self):
187 return build_anthology_id(self.collection_id, self.volume_id, self.paper_id)
188
189 @property
190 def bibkey(self):
191 if not self._bibkey:
192 self._bibkey = self.full_id # fallback
193 return self._bibkey
194
195 @bibkey.setter
196 def bibkey(self, value):
197 self._bibkey = value
198
199 @property
200 def bibtype(self):
201 if is_journal(self.full_id):
202 return "article"
203 elif self.is_volume:
204 return "proceedings"
205 else:
206 return "inproceedings"
207
208 @property
209 def parent_volume_id(self):
210 if self.parent_volume is not None:
211 return self.parent_volume.full_id
212 return None
213
214 @property
215 def has_abstract(self):
216 return "xml_abstract" in self.attrib
217
218 def get(self, name, default=None):
219 try:
220 return self.attrib[name]
221 except KeyError:
222 return default
223
224 def get_title(self, form="xml"):
225 """Returns the paper title, optionally formatting it.
226
227 Accepted formats:
228 - xml: Include any contained XML tags unchanged
229 - plain: Strip all XML tags, returning only plain text
230 - html: Convert XML tags into valid HTML tags
231 - latex: Convert XML tags into LaTeX commands
232 """
233 return self.formatter(self.get("xml_title"), form)
234
235 def get_abstract(self, form="xml"):
236 """Returns the abstract, optionally formatting it.
237
238 See `get_title()` for details.
239 """
240 return self.formatter(self.get("xml_abstract"), form, allow_url=True)
241
242 def get_booktitle(self, form="xml", default=""):
243 """Returns the booktitle, optionally formatting it.
244
245 See `get_title()` for details.
246 """
247 if "xml_booktitle" in self.attrib:
248 return self.formatter(self.get("xml_booktitle"), form)
249 elif self.parent_volume is not None:
250 return self.parent_volume.get("title")
251 else:
252 return default
253
254 def as_bibtex(self, concise=False):
255 """Return the BibTeX entry for this paper."""
256 # Build BibTeX entry
257 bibkey = self.bibkey
258 bibtype = self.bibtype
259 entries = [("title", self.get_title(form="latex"))]
260 for people in ("author", "editor"):
261 if people in self.attrib:
262 entries.append(
263 (people, " and ".join(p.as_bibtex() for p, _ in self.get(people)))
264 )
265 if is_journal(self.full_id):
266 entries.append(
267 ("journal", bibtex_encode(self.parent_volume.get("meta_journal_title")))
268 )
269 journal_volume = self.parent_volume.get(
270 "meta_volume", self.parent_volume.get("volume")
271 )
272 if journal_volume:
273 entries.append(("volume", journal_volume))
274 journal_issue = self.parent_volume.get(
275 "meta_issue", self.parent_volume.get("issue")
276 )
277 if journal_issue:
278 entries.append(("number", journal_issue))
279 else:
280 # not is_journal(self.full_id)
281 if "xml_booktitle" in self.attrib:
282 entries.append(("booktitle", self.get_booktitle(form="latex")))
283 elif bibtype != "proceedings":
284 entries.append(("booktitle", self.parent_volume.get_title(form="latex")))
285 for entry in ("month", "year", "address", "publisher", "note"):
286 if self.get(entry) is not None:
287 entries.append((entry, bibtex_encode(self.get(entry))))
288 for entry in ("url", "doi"):
289 if entry in self.attrib:
290 # don't want latex escapes such as
291 # doi = "10.1162/coli{\_}a{\_}00008",
292 entries.append((entry, self.get(entry)))
293 if "pages" in self.attrib:
294 entries.append(("pages", self.get("pages").replace("–", "--")))
295 if "xml_abstract" in self.attrib and not concise:
296 entries.append(("abstract", self.get_abstract(form="latex")))
297
298 # Serialize it
299 return bibtex_make_entry(bibkey, bibtype, entries)
300
301 def as_dict(self):
302 value = self.attrib
303 value["paper_id"] = self.paper_id
304 value["parent_volume_id"] = self.parent_volume_id
305 value["bibkey"] = self.bibkey
306 value["bibtype"] = self.bibtype
307 return value
308
309 def items(self):
310 return self.attrib.items()
311
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bin/anthology/papers.py b/bin/anthology/papers.py
--- a/bin/anthology/papers.py
+++ b/bin/anthology/papers.py
@@ -54,6 +54,7 @@
"venues",
"meta_date",
"url",
+ "pdf",
):
continue
| {"golden_diff": "diff --git a/bin/anthology/papers.py b/bin/anthology/papers.py\n--- a/bin/anthology/papers.py\n+++ b/bin/anthology/papers.py\n@@ -54,6 +54,7 @@\n \"venues\",\n \"meta_date\",\n \"url\",\n+ \"pdf\",\n ):\n continue\n", "issue": "I08-2 front matter links to whole PDF\nThe link from here https://www.aclweb.org/anthology/events/ijcnlp-2008/ that is normally the front matter for volume 2 links instead to the full PDF for all of volume 2.\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright 2019 Marcel Bollmann <[email protected]>\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging as log\nfrom .utils import (\n build_anthology_id,\n parse_element,\n infer_attachment_url,\n remove_extra_whitespace,\n is_journal,\n is_volume_id,\n)\nfrom . import data\n\n# For BibTeX export\nfrom .formatter import bibtex_encode, bibtex_make_entry\n\n\nclass Paper:\n def __init__(self, paper_id, ingest_date, volume, formatter):\n self.parent_volume = volume\n self.formatter = formatter\n self._id = paper_id\n self._ingest_date = ingest_date\n self._bibkey = False\n self.is_volume = paper_id == \"0\"\n\n # initialize metadata with keys inherited from volume\n self.attrib = {}\n for key, value in volume.attrib.items():\n # Only inherit 'editor' for frontmatter\n if (key == \"editor\" and not self.is_volume) or key in (\n \"collection_id\",\n \"booktitle\",\n \"id\",\n \"meta_data\",\n \"meta_journal_title\",\n \"meta_volume\",\n \"meta_issue\",\n \"sigs\",\n \"venues\",\n \"meta_date\",\n \"url\",\n ):\n continue\n\n self.attrib[key] = value\n\n def from_xml(xml_element, *args):\n ingest_date = xml_element.get(\"ingest-date\", data.UNKNOWN_INGEST_DATE)\n\n # Default to paper ID \"0\" (for front matter)\n paper = Paper(xml_element.get(\"id\", \"0\"), ingest_date, *args)\n\n # Set values from parsing the XML element (overwriting\n # and changing some initialized from the volume metadata)\n for key, value in parse_element(xml_element).items():\n if key == \"author\" and \"editor\" in paper.attrib:\n del paper.attrib[\"editor\"]\n paper.attrib[key] = value\n\n # Frontmatter title is the volume 'booktitle'\n if paper.is_volume:\n paper.attrib[\"xml_title\"] = paper.attrib[\"xml_booktitle\"]\n paper.attrib[\"xml_title\"].tag = \"title\"\n\n # Remove booktitle for frontmatter and journals\n if paper.is_volume or is_journal(paper.full_id):\n del paper.attrib[\"xml_booktitle\"]\n\n # Expand URLs with paper ID\n for tag in (\"revision\", \"erratum\"):\n if tag in paper.attrib:\n for item in paper.attrib[tag]:\n if not item[\"url\"].startswith(paper.full_id):\n log.error(\n \"{} must begin with paper ID '{}', but is '{}'\".format(\n tag, paper.full_id, item[\"url\"]\n )\n )\n item[\"url\"] = data.ANTHOLOGY_PDF.format(item[\"url\"])\n\n if \"attachment\" in paper.attrib:\n for item in paper.attrib[\"attachment\"]:\n item[\"url\"] = infer_attachment_url(item[\"url\"], paper.full_id)\n\n # Explicitly construct URL of original version of the paper\n # -- this is a bit hacky, but it's not given in the XML\n # explicitly\n if \"revision\" in paper.attrib:\n paper.attrib[\"revision\"].insert(\n 0,\n {\n \"value\": \"{}v1\".format(paper.full_id),\n \"id\": \"1\",\n \"url\": data.ANTHOLOGY_PDF.format(\"{}v1\".format(paper.full_id)),\n },\n )\n\n paper.attrib[\"title\"] = paper.get_title(\"plain\")\n paper.attrib[\"booktitle\"] = paper.get_booktitle(\"plain\")\n\n if \"editor\" in paper.attrib:\n if paper.is_volume:\n if \"author\" in paper.attrib:\n log.warn(\n \"Paper {} has both <editor> and <author>; ignoring <author>\".format(\n paper.full_id\n )\n )\n # Proceedings editors are considered authors for their front matter\n paper.attrib[\"author\"] = paper.attrib[\"editor\"]\n del paper.attrib[\"editor\"]\n else:\n log.warn(\n \"Paper {} has <editor> but is not a proceedings volume; ignoring <editor>\".format(\n paper.full_id\n )\n )\n if \"pages\" in paper.attrib:\n if paper.attrib[\"pages\"] is not None:\n paper._interpret_pages()\n else:\n del paper.attrib[\"pages\"]\n\n if \"author\" in paper.attrib:\n paper.attrib[\"author_string\"] = \", \".join(\n [x[0].full for x in paper.attrib[\"author\"]]\n )\n\n paper.attrib[\"thumbnail\"] = data.ANTHOLOGY_THUMBNAIL.format(paper.full_id)\n\n return paper\n\n def _interpret_pages(self):\n \"\"\"Splits up 'pages' field into first and last page, if possible.\n\n This is used for metadata in the generated HTML.\"\"\"\n for s in (\"--\", \"-\", \"\u2013\"):\n if self.attrib[\"pages\"].count(s) == 1:\n self.attrib[\"page_first\"], self.attrib[\"page_last\"] = self.attrib[\n \"pages\"\n ].split(s)\n self.attrib[\"pages\"] = self.attrib[\"pages\"].replace(s, \"\u2013\")\n return\n\n @property\n def ingest_date(self):\n \"\"\"Inherit publication date from parent, but self overrides. May be undefined.\"\"\"\n if self._ingest_date:\n return self._ingest_date\n if self.parent_volume:\n return self.parent_volume.ingest_date\n return data.UNKNOWN_INGEST_DATE\n\n @property\n def collection_id(self):\n return self.parent_volume.collection_id\n\n @property\n def volume_id(self):\n return self.parent_volume.volume_id\n\n @property\n def paper_id(self):\n return self._id\n\n @property\n def full_id(self):\n return self.anthology_id\n\n @property\n def anthology_id(self):\n return build_anthology_id(self.collection_id, self.volume_id, self.paper_id)\n\n @property\n def bibkey(self):\n if not self._bibkey:\n self._bibkey = self.full_id # fallback\n return self._bibkey\n\n @bibkey.setter\n def bibkey(self, value):\n self._bibkey = value\n\n @property\n def bibtype(self):\n if is_journal(self.full_id):\n return \"article\"\n elif self.is_volume:\n return \"proceedings\"\n else:\n return \"inproceedings\"\n\n @property\n def parent_volume_id(self):\n if self.parent_volume is not None:\n return self.parent_volume.full_id\n return None\n\n @property\n def has_abstract(self):\n return \"xml_abstract\" in self.attrib\n\n def get(self, name, default=None):\n try:\n return self.attrib[name]\n except KeyError:\n return default\n\n def get_title(self, form=\"xml\"):\n \"\"\"Returns the paper title, optionally formatting it.\n\n Accepted formats:\n - xml: Include any contained XML tags unchanged\n - plain: Strip all XML tags, returning only plain text\n - html: Convert XML tags into valid HTML tags\n - latex: Convert XML tags into LaTeX commands\n \"\"\"\n return self.formatter(self.get(\"xml_title\"), form)\n\n def get_abstract(self, form=\"xml\"):\n \"\"\"Returns the abstract, optionally formatting it.\n\n See `get_title()` for details.\n \"\"\"\n return self.formatter(self.get(\"xml_abstract\"), form, allow_url=True)\n\n def get_booktitle(self, form=\"xml\", default=\"\"):\n \"\"\"Returns the booktitle, optionally formatting it.\n\n See `get_title()` for details.\n \"\"\"\n if \"xml_booktitle\" in self.attrib:\n return self.formatter(self.get(\"xml_booktitle\"), form)\n elif self.parent_volume is not None:\n return self.parent_volume.get(\"title\")\n else:\n return default\n\n def as_bibtex(self, concise=False):\n \"\"\"Return the BibTeX entry for this paper.\"\"\"\n # Build BibTeX entry\n bibkey = self.bibkey\n bibtype = self.bibtype\n entries = [(\"title\", self.get_title(form=\"latex\"))]\n for people in (\"author\", \"editor\"):\n if people in self.attrib:\n entries.append(\n (people, \" and \".join(p.as_bibtex() for p, _ in self.get(people)))\n )\n if is_journal(self.full_id):\n entries.append(\n (\"journal\", bibtex_encode(self.parent_volume.get(\"meta_journal_title\")))\n )\n journal_volume = self.parent_volume.get(\n \"meta_volume\", self.parent_volume.get(\"volume\")\n )\n if journal_volume:\n entries.append((\"volume\", journal_volume))\n journal_issue = self.parent_volume.get(\n \"meta_issue\", self.parent_volume.get(\"issue\")\n )\n if journal_issue:\n entries.append((\"number\", journal_issue))\n else:\n # not is_journal(self.full_id)\n if \"xml_booktitle\" in self.attrib:\n entries.append((\"booktitle\", self.get_booktitle(form=\"latex\")))\n elif bibtype != \"proceedings\":\n entries.append((\"booktitle\", self.parent_volume.get_title(form=\"latex\")))\n for entry in (\"month\", \"year\", \"address\", \"publisher\", \"note\"):\n if self.get(entry) is not None:\n entries.append((entry, bibtex_encode(self.get(entry))))\n for entry in (\"url\", \"doi\"):\n if entry in self.attrib:\n # don't want latex escapes such as\n # doi = \"10.1162/coli{\\_}a{\\_}00008\",\n entries.append((entry, self.get(entry)))\n if \"pages\" in self.attrib:\n entries.append((\"pages\", self.get(\"pages\").replace(\"\u2013\", \"--\")))\n if \"xml_abstract\" in self.attrib and not concise:\n entries.append((\"abstract\", self.get_abstract(form=\"latex\")))\n\n # Serialize it\n return bibtex_make_entry(bibkey, bibtype, entries)\n\n def as_dict(self):\n value = self.attrib\n value[\"paper_id\"] = self.paper_id\n value[\"parent_volume_id\"] = self.parent_volume_id\n value[\"bibkey\"] = self.bibkey\n value[\"bibtype\"] = self.bibtype\n return value\n\n def items(self):\n return self.attrib.items()\n", "path": "bin/anthology/papers.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright 2019 Marcel Bollmann <[email protected]>\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport logging as log\nfrom .utils import (\n build_anthology_id,\n parse_element,\n infer_attachment_url,\n remove_extra_whitespace,\n is_journal,\n is_volume_id,\n)\nfrom . import data\n\n# For BibTeX export\nfrom .formatter import bibtex_encode, bibtex_make_entry\n\n\nclass Paper:\n def __init__(self, paper_id, ingest_date, volume, formatter):\n self.parent_volume = volume\n self.formatter = formatter\n self._id = paper_id\n self._ingest_date = ingest_date\n self._bibkey = False\n self.is_volume = paper_id == \"0\"\n\n # initialize metadata with keys inherited from volume\n self.attrib = {}\n for key, value in volume.attrib.items():\n # Only inherit 'editor' for frontmatter\n if (key == \"editor\" and not self.is_volume) or key in (\n \"collection_id\",\n \"booktitle\",\n \"id\",\n \"meta_data\",\n \"meta_journal_title\",\n \"meta_volume\",\n \"meta_issue\",\n \"sigs\",\n \"venues\",\n \"meta_date\",\n \"url\",\n \"pdf\",\n ):\n continue\n\n self.attrib[key] = value\n\n def from_xml(xml_element, *args):\n ingest_date = xml_element.get(\"ingest-date\", data.UNKNOWN_INGEST_DATE)\n\n # Default to paper ID \"0\" (for front matter)\n paper = Paper(xml_element.get(\"id\", \"0\"), ingest_date, *args)\n\n # Set values from parsing the XML element (overwriting\n # and changing some initialized from the volume metadata)\n for key, value in parse_element(xml_element).items():\n if key == \"author\" and \"editor\" in paper.attrib:\n del paper.attrib[\"editor\"]\n paper.attrib[key] = value\n\n # Frontmatter title is the volume 'booktitle'\n if paper.is_volume:\n paper.attrib[\"xml_title\"] = paper.attrib[\"xml_booktitle\"]\n paper.attrib[\"xml_title\"].tag = \"title\"\n\n # Remove booktitle for frontmatter and journals\n if paper.is_volume or is_journal(paper.full_id):\n del paper.attrib[\"xml_booktitle\"]\n\n # Expand URLs with paper ID\n for tag in (\"revision\", \"erratum\"):\n if tag in paper.attrib:\n for item in paper.attrib[tag]:\n if not item[\"url\"].startswith(paper.full_id):\n log.error(\n \"{} must begin with paper ID '{}', but is '{}'\".format(\n tag, paper.full_id, item[\"url\"]\n )\n )\n item[\"url\"] = data.ANTHOLOGY_PDF.format(item[\"url\"])\n\n if \"attachment\" in paper.attrib:\n for item in paper.attrib[\"attachment\"]:\n item[\"url\"] = infer_attachment_url(item[\"url\"], paper.full_id)\n\n # Explicitly construct URL of original version of the paper\n # -- this is a bit hacky, but it's not given in the XML\n # explicitly\n if \"revision\" in paper.attrib:\n paper.attrib[\"revision\"].insert(\n 0,\n {\n \"value\": \"{}v1\".format(paper.full_id),\n \"id\": \"1\",\n \"url\": data.ANTHOLOGY_PDF.format(\"{}v1\".format(paper.full_id)),\n },\n )\n\n paper.attrib[\"title\"] = paper.get_title(\"plain\")\n paper.attrib[\"booktitle\"] = paper.get_booktitle(\"plain\")\n\n if \"editor\" in paper.attrib:\n if paper.is_volume:\n if \"author\" in paper.attrib:\n log.warn(\n \"Paper {} has both <editor> and <author>; ignoring <author>\".format(\n paper.full_id\n )\n )\n # Proceedings editors are considered authors for their front matter\n paper.attrib[\"author\"] = paper.attrib[\"editor\"]\n del paper.attrib[\"editor\"]\n else:\n log.warn(\n \"Paper {} has <editor> but is not a proceedings volume; ignoring <editor>\".format(\n paper.full_id\n )\n )\n if \"pages\" in paper.attrib:\n if paper.attrib[\"pages\"] is not None:\n paper._interpret_pages()\n else:\n del paper.attrib[\"pages\"]\n\n if \"author\" in paper.attrib:\n paper.attrib[\"author_string\"] = \", \".join(\n [x[0].full for x in paper.attrib[\"author\"]]\n )\n\n paper.attrib[\"thumbnail\"] = data.ANTHOLOGY_THUMBNAIL.format(paper.full_id)\n\n return paper\n\n def _interpret_pages(self):\n \"\"\"Splits up 'pages' field into first and last page, if possible.\n\n This is used for metadata in the generated HTML.\"\"\"\n for s in (\"--\", \"-\", \"\u2013\"):\n if self.attrib[\"pages\"].count(s) == 1:\n self.attrib[\"page_first\"], self.attrib[\"page_last\"] = self.attrib[\n \"pages\"\n ].split(s)\n self.attrib[\"pages\"] = self.attrib[\"pages\"].replace(s, \"\u2013\")\n return\n\n @property\n def ingest_date(self):\n \"\"\"Inherit publication date from parent, but self overrides. May be undefined.\"\"\"\n if self._ingest_date:\n return self._ingest_date\n if self.parent_volume:\n return self.parent_volume.ingest_date\n return data.UNKNOWN_INGEST_DATE\n\n @property\n def collection_id(self):\n return self.parent_volume.collection_id\n\n @property\n def volume_id(self):\n return self.parent_volume.volume_id\n\n @property\n def paper_id(self):\n return self._id\n\n @property\n def full_id(self):\n return self.anthology_id\n\n @property\n def anthology_id(self):\n return build_anthology_id(self.collection_id, self.volume_id, self.paper_id)\n\n @property\n def bibkey(self):\n if not self._bibkey:\n self._bibkey = self.full_id # fallback\n return self._bibkey\n\n @bibkey.setter\n def bibkey(self, value):\n self._bibkey = value\n\n @property\n def bibtype(self):\n if is_journal(self.full_id):\n return \"article\"\n elif self.is_volume:\n return \"proceedings\"\n else:\n return \"inproceedings\"\n\n @property\n def parent_volume_id(self):\n if self.parent_volume is not None:\n return self.parent_volume.full_id\n return None\n\n @property\n def has_abstract(self):\n return \"xml_abstract\" in self.attrib\n\n def get(self, name, default=None):\n try:\n return self.attrib[name]\n except KeyError:\n return default\n\n def get_title(self, form=\"xml\"):\n \"\"\"Returns the paper title, optionally formatting it.\n\n Accepted formats:\n - xml: Include any contained XML tags unchanged\n - plain: Strip all XML tags, returning only plain text\n - html: Convert XML tags into valid HTML tags\n - latex: Convert XML tags into LaTeX commands\n \"\"\"\n return self.formatter(self.get(\"xml_title\"), form)\n\n def get_abstract(self, form=\"xml\"):\n \"\"\"Returns the abstract, optionally formatting it.\n\n See `get_title()` for details.\n \"\"\"\n return self.formatter(self.get(\"xml_abstract\"), form, allow_url=True)\n\n def get_booktitle(self, form=\"xml\", default=\"\"):\n \"\"\"Returns the booktitle, optionally formatting it.\n\n See `get_title()` for details.\n \"\"\"\n if \"xml_booktitle\" in self.attrib:\n return self.formatter(self.get(\"xml_booktitle\"), form)\n elif self.parent_volume is not None:\n return self.parent_volume.get(\"title\")\n else:\n return default\n\n def as_bibtex(self, concise=False):\n \"\"\"Return the BibTeX entry for this paper.\"\"\"\n # Build BibTeX entry\n bibkey = self.bibkey\n bibtype = self.bibtype\n entries = [(\"title\", self.get_title(form=\"latex\"))]\n for people in (\"author\", \"editor\"):\n if people in self.attrib:\n entries.append(\n (people, \" and \".join(p.as_bibtex() for p, _ in self.get(people)))\n )\n if is_journal(self.full_id):\n entries.append(\n (\"journal\", bibtex_encode(self.parent_volume.get(\"meta_journal_title\")))\n )\n journal_volume = self.parent_volume.get(\n \"meta_volume\", self.parent_volume.get(\"volume\")\n )\n if journal_volume:\n entries.append((\"volume\", journal_volume))\n journal_issue = self.parent_volume.get(\n \"meta_issue\", self.parent_volume.get(\"issue\")\n )\n if journal_issue:\n entries.append((\"number\", journal_issue))\n else:\n # not is_journal(self.full_id)\n if \"xml_booktitle\" in self.attrib:\n entries.append((\"booktitle\", self.get_booktitle(form=\"latex\")))\n elif bibtype != \"proceedings\":\n entries.append((\"booktitle\", self.parent_volume.get_title(form=\"latex\")))\n for entry in (\"month\", \"year\", \"address\", \"publisher\", \"note\"):\n if self.get(entry) is not None:\n entries.append((entry, bibtex_encode(self.get(entry))))\n for entry in (\"url\", \"doi\"):\n if entry in self.attrib:\n # don't want latex escapes such as\n # doi = \"10.1162/coli{\\_}a{\\_}00008\",\n entries.append((entry, self.get(entry)))\n if \"pages\" in self.attrib:\n entries.append((\"pages\", self.get(\"pages\").replace(\"\u2013\", \"--\")))\n if \"xml_abstract\" in self.attrib and not concise:\n entries.append((\"abstract\", self.get_abstract(form=\"latex\")))\n\n # Serialize it\n return bibtex_make_entry(bibkey, bibtype, entries)\n\n def as_dict(self):\n value = self.attrib\n value[\"paper_id\"] = self.paper_id\n value[\"parent_volume_id\"] = self.parent_volume_id\n value[\"bibkey\"] = self.bibkey\n value[\"bibtype\"] = self.bibtype\n return value\n\n def items(self):\n return self.attrib.items()\n", "path": "bin/anthology/papers.py"}]} |
gh_patches_debug_109 | rasdani/github-patches | git_diff | graphql-python__graphene-django-639 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
GraphiQL to version v0.11.11?
Proposal to move to GraphiQL v0.11.11 to solve a markdown issue in the documentation:
cfr. https://github.com/graphql/graphiql/issues/575
Scanning over the code, the main difference between v0.11.10 and v0.11.11 is a switch from 'marked' to 'markdown-it' for rendering the documentation.
Affected lines:
https://github.com/graphql-python/graphene-django/blob/f76f38ef30b88d921df243b09c4970528b1a4007/graphene_django/views.py#L54
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `graphene_django/views.py`
Content:
```
1 import inspect
2 import json
3 import re
4
5 import six
6 from django.http import HttpResponse, HttpResponseNotAllowed
7 from django.http.response import HttpResponseBadRequest
8 from django.shortcuts import render
9 from django.utils.decorators import method_decorator
10 from django.views.generic import View
11 from django.views.decorators.csrf import ensure_csrf_cookie
12
13 from graphql import get_default_backend
14 from graphql.error import format_error as format_graphql_error
15 from graphql.error import GraphQLError
16 from graphql.execution import ExecutionResult
17 from graphql.type.schema import GraphQLSchema
18
19 from .settings import graphene_settings
20
21
22 class HttpError(Exception):
23 def __init__(self, response, message=None, *args, **kwargs):
24 self.response = response
25 self.message = message = message or response.content.decode()
26 super(HttpError, self).__init__(message, *args, **kwargs)
27
28
29 def get_accepted_content_types(request):
30 def qualify(x):
31 parts = x.split(";", 1)
32 if len(parts) == 2:
33 match = re.match(r"(^|;)q=(0(\.\d{,3})?|1(\.0{,3})?)(;|$)", parts[1])
34 if match:
35 return parts[0].strip(), float(match.group(2))
36 return parts[0].strip(), 1
37
38 raw_content_types = request.META.get("HTTP_ACCEPT", "*/*").split(",")
39 qualified_content_types = map(qualify, raw_content_types)
40 return list(
41 x[0] for x in sorted(qualified_content_types, key=lambda x: x[1], reverse=True)
42 )
43
44
45 def instantiate_middleware(middlewares):
46 for middleware in middlewares:
47 if inspect.isclass(middleware):
48 yield middleware()
49 continue
50 yield middleware
51
52
53 class GraphQLView(View):
54 graphiql_version = "0.11.10"
55 graphiql_template = "graphene/graphiql.html"
56
57 schema = None
58 graphiql = False
59 executor = None
60 backend = None
61 middleware = None
62 root_value = None
63 pretty = False
64 batch = False
65
66 def __init__(
67 self,
68 schema=None,
69 executor=None,
70 middleware=None,
71 root_value=None,
72 graphiql=False,
73 pretty=False,
74 batch=False,
75 backend=None,
76 ):
77 if not schema:
78 schema = graphene_settings.SCHEMA
79
80 if backend is None:
81 backend = get_default_backend()
82
83 if middleware is None:
84 middleware = graphene_settings.MIDDLEWARE
85
86 self.schema = self.schema or schema
87 if middleware is not None:
88 self.middleware = list(instantiate_middleware(middleware))
89 self.executor = executor
90 self.root_value = root_value
91 self.pretty = self.pretty or pretty
92 self.graphiql = self.graphiql or graphiql
93 self.batch = self.batch or batch
94 self.backend = backend
95
96 assert isinstance(
97 self.schema, GraphQLSchema
98 ), "A Schema is required to be provided to GraphQLView."
99 assert not all((graphiql, batch)), "Use either graphiql or batch processing"
100
101 # noinspection PyUnusedLocal
102 def get_root_value(self, request):
103 return self.root_value
104
105 def get_middleware(self, request):
106 return self.middleware
107
108 def get_context(self, request):
109 return request
110
111 def get_backend(self, request):
112 return self.backend
113
114 @method_decorator(ensure_csrf_cookie)
115 def dispatch(self, request, *args, **kwargs):
116 try:
117 if request.method.lower() not in ("get", "post"):
118 raise HttpError(
119 HttpResponseNotAllowed(
120 ["GET", "POST"], "GraphQL only supports GET and POST requests."
121 )
122 )
123
124 data = self.parse_body(request)
125 show_graphiql = self.graphiql and self.can_display_graphiql(request, data)
126
127 if show_graphiql:
128 return self.render_graphiql(
129 request, graphiql_version=self.graphiql_version
130 )
131
132 if self.batch:
133 responses = [self.get_response(request, entry) for entry in data]
134 result = "[{}]".format(
135 ",".join([response[0] for response in responses])
136 )
137 status_code = (
138 responses
139 and max(responses, key=lambda response: response[1])[1]
140 or 200
141 )
142 else:
143 result, status_code = self.get_response(request, data, show_graphiql)
144
145 return HttpResponse(
146 status=status_code, content=result, content_type="application/json"
147 )
148
149 except HttpError as e:
150 response = e.response
151 response["Content-Type"] = "application/json"
152 response.content = self.json_encode(
153 request, {"errors": [self.format_error(e)]}
154 )
155 return response
156
157 def get_response(self, request, data, show_graphiql=False):
158 query, variables, operation_name, id = self.get_graphql_params(request, data)
159
160 execution_result = self.execute_graphql_request(
161 request, data, query, variables, operation_name, show_graphiql
162 )
163
164 status_code = 200
165 if execution_result:
166 response = {}
167
168 if execution_result.errors:
169 response["errors"] = [
170 self.format_error(e) for e in execution_result.errors
171 ]
172
173 if execution_result.invalid:
174 status_code = 400
175 else:
176 response["data"] = execution_result.data
177
178 if self.batch:
179 response["id"] = id
180 response["status"] = status_code
181
182 result = self.json_encode(request, response, pretty=show_graphiql)
183 else:
184 result = None
185
186 return result, status_code
187
188 def render_graphiql(self, request, **data):
189 return render(request, self.graphiql_template, data)
190
191 def json_encode(self, request, d, pretty=False):
192 if not (self.pretty or pretty) and not request.GET.get("pretty"):
193 return json.dumps(d, separators=(",", ":"))
194
195 return json.dumps(d, sort_keys=True, indent=2, separators=(",", ": "))
196
197 def parse_body(self, request):
198 content_type = self.get_content_type(request)
199
200 if content_type == "application/graphql":
201 return {"query": request.body.decode()}
202
203 elif content_type == "application/json":
204 # noinspection PyBroadException
205 try:
206 body = request.body.decode("utf-8")
207 except Exception as e:
208 raise HttpError(HttpResponseBadRequest(str(e)))
209
210 try:
211 request_json = json.loads(body)
212 if self.batch:
213 assert isinstance(request_json, list), (
214 "Batch requests should receive a list, but received {}."
215 ).format(repr(request_json))
216 assert (
217 len(request_json) > 0
218 ), "Received an empty list in the batch request."
219 else:
220 assert isinstance(
221 request_json, dict
222 ), "The received data is not a valid JSON query."
223 return request_json
224 except AssertionError as e:
225 raise HttpError(HttpResponseBadRequest(str(e)))
226 except (TypeError, ValueError):
227 raise HttpError(HttpResponseBadRequest("POST body sent invalid JSON."))
228
229 elif content_type in [
230 "application/x-www-form-urlencoded",
231 "multipart/form-data",
232 ]:
233 return request.POST
234
235 return {}
236
237 def execute_graphql_request(
238 self, request, data, query, variables, operation_name, show_graphiql=False
239 ):
240 if not query:
241 if show_graphiql:
242 return None
243 raise HttpError(HttpResponseBadRequest("Must provide query string."))
244
245 try:
246 backend = self.get_backend(request)
247 document = backend.document_from_string(self.schema, query)
248 except Exception as e:
249 return ExecutionResult(errors=[e], invalid=True)
250
251 if request.method.lower() == "get":
252 operation_type = document.get_operation_type(operation_name)
253 if operation_type and operation_type != "query":
254 if show_graphiql:
255 return None
256
257 raise HttpError(
258 HttpResponseNotAllowed(
259 ["POST"],
260 "Can only perform a {} operation from a POST request.".format(
261 operation_type
262 ),
263 )
264 )
265
266 try:
267 extra_options = {}
268 if self.executor:
269 # We only include it optionally since
270 # executor is not a valid argument in all backends
271 extra_options["executor"] = self.executor
272
273 return document.execute(
274 root=self.get_root_value(request),
275 variables=variables,
276 operation_name=operation_name,
277 context=self.get_context(request),
278 middleware=self.get_middleware(request),
279 **extra_options
280 )
281 except Exception as e:
282 return ExecutionResult(errors=[e], invalid=True)
283
284 @classmethod
285 def can_display_graphiql(cls, request, data):
286 raw = "raw" in request.GET or "raw" in data
287 return not raw and cls.request_wants_html(request)
288
289 @classmethod
290 def request_wants_html(cls, request):
291 accepted = get_accepted_content_types(request)
292 accepted_length = len(accepted)
293 # the list will be ordered in preferred first - so we have to make
294 # sure the most preferred gets the highest number
295 html_priority = (
296 accepted_length - accepted.index("text/html")
297 if "text/html" in accepted
298 else 0
299 )
300 json_priority = (
301 accepted_length - accepted.index("application/json")
302 if "application/json" in accepted
303 else 0
304 )
305
306 return html_priority > json_priority
307
308 @staticmethod
309 def get_graphql_params(request, data):
310 query = request.GET.get("query") or data.get("query")
311 variables = request.GET.get("variables") or data.get("variables")
312 id = request.GET.get("id") or data.get("id")
313
314 if variables and isinstance(variables, six.text_type):
315 try:
316 variables = json.loads(variables)
317 except Exception:
318 raise HttpError(HttpResponseBadRequest("Variables are invalid JSON."))
319
320 operation_name = request.GET.get("operationName") or data.get("operationName")
321 if operation_name == "null":
322 operation_name = None
323
324 return query, variables, operation_name, id
325
326 @staticmethod
327 def format_error(error):
328 if isinstance(error, GraphQLError):
329 return format_graphql_error(error)
330
331 return {"message": six.text_type(error)}
332
333 @staticmethod
334 def get_content_type(request):
335 meta = request.META
336 content_type = meta.get("CONTENT_TYPE", meta.get("HTTP_CONTENT_TYPE", ""))
337 return content_type.split(";", 1)[0].lower()
338
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/graphene_django/views.py b/graphene_django/views.py
--- a/graphene_django/views.py
+++ b/graphene_django/views.py
@@ -51,7 +51,7 @@
class GraphQLView(View):
- graphiql_version = "0.11.10"
+ graphiql_version = "0.11.11"
graphiql_template = "graphene/graphiql.html"
schema = None
| {"golden_diff": "diff --git a/graphene_django/views.py b/graphene_django/views.py\n--- a/graphene_django/views.py\n+++ b/graphene_django/views.py\n@@ -51,7 +51,7 @@\n \n \n class GraphQLView(View):\n- graphiql_version = \"0.11.10\"\n+ graphiql_version = \"0.11.11\"\n graphiql_template = \"graphene/graphiql.html\"\n \n schema = None\n", "issue": "GraphiQL to version v0.11.11?\nProposal to move to GraphiQL v0.11.11 to solve a markdown issue in the documentation:\r\ncfr. https://github.com/graphql/graphiql/issues/575\r\n\r\nScanning over the code, the main difference between v0.11.10 and v0.11.11 is a switch from 'marked' to 'markdown-it' for rendering the documentation.\r\n\r\nAffected lines:\r\nhttps://github.com/graphql-python/graphene-django/blob/f76f38ef30b88d921df243b09c4970528b1a4007/graphene_django/views.py#L54\n", "before_files": [{"content": "import inspect\nimport json\nimport re\n\nimport six\nfrom django.http import HttpResponse, HttpResponseNotAllowed\nfrom django.http.response import HttpResponseBadRequest\nfrom django.shortcuts import render\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import View\nfrom django.views.decorators.csrf import ensure_csrf_cookie\n\nfrom graphql import get_default_backend\nfrom graphql.error import format_error as format_graphql_error\nfrom graphql.error import GraphQLError\nfrom graphql.execution import ExecutionResult\nfrom graphql.type.schema import GraphQLSchema\n\nfrom .settings import graphene_settings\n\n\nclass HttpError(Exception):\n def __init__(self, response, message=None, *args, **kwargs):\n self.response = response\n self.message = message = message or response.content.decode()\n super(HttpError, self).__init__(message, *args, **kwargs)\n\n\ndef get_accepted_content_types(request):\n def qualify(x):\n parts = x.split(\";\", 1)\n if len(parts) == 2:\n match = re.match(r\"(^|;)q=(0(\\.\\d{,3})?|1(\\.0{,3})?)(;|$)\", parts[1])\n if match:\n return parts[0].strip(), float(match.group(2))\n return parts[0].strip(), 1\n\n raw_content_types = request.META.get(\"HTTP_ACCEPT\", \"*/*\").split(\",\")\n qualified_content_types = map(qualify, raw_content_types)\n return list(\n x[0] for x in sorted(qualified_content_types, key=lambda x: x[1], reverse=True)\n )\n\n\ndef instantiate_middleware(middlewares):\n for middleware in middlewares:\n if inspect.isclass(middleware):\n yield middleware()\n continue\n yield middleware\n\n\nclass GraphQLView(View):\n graphiql_version = \"0.11.10\"\n graphiql_template = \"graphene/graphiql.html\"\n\n schema = None\n graphiql = False\n executor = None\n backend = None\n middleware = None\n root_value = None\n pretty = False\n batch = False\n\n def __init__(\n self,\n schema=None,\n executor=None,\n middleware=None,\n root_value=None,\n graphiql=False,\n pretty=False,\n batch=False,\n backend=None,\n ):\n if not schema:\n schema = graphene_settings.SCHEMA\n\n if backend is None:\n backend = get_default_backend()\n\n if middleware is None:\n middleware = graphene_settings.MIDDLEWARE\n\n self.schema = self.schema or schema\n if middleware is not None:\n self.middleware = list(instantiate_middleware(middleware))\n self.executor = executor\n self.root_value = root_value\n self.pretty = self.pretty or pretty\n self.graphiql = self.graphiql or graphiql\n self.batch = self.batch or batch\n self.backend = backend\n\n assert isinstance(\n self.schema, GraphQLSchema\n ), \"A Schema is required to be provided to GraphQLView.\"\n assert not all((graphiql, batch)), \"Use either graphiql or batch processing\"\n\n # noinspection PyUnusedLocal\n def get_root_value(self, request):\n return self.root_value\n\n def get_middleware(self, request):\n return self.middleware\n\n def get_context(self, request):\n return request\n\n def get_backend(self, request):\n return self.backend\n\n @method_decorator(ensure_csrf_cookie)\n def dispatch(self, request, *args, **kwargs):\n try:\n if request.method.lower() not in (\"get\", \"post\"):\n raise HttpError(\n HttpResponseNotAllowed(\n [\"GET\", \"POST\"], \"GraphQL only supports GET and POST requests.\"\n )\n )\n\n data = self.parse_body(request)\n show_graphiql = self.graphiql and self.can_display_graphiql(request, data)\n\n if show_graphiql:\n return self.render_graphiql(\n request, graphiql_version=self.graphiql_version\n )\n\n if self.batch:\n responses = [self.get_response(request, entry) for entry in data]\n result = \"[{}]\".format(\n \",\".join([response[0] for response in responses])\n )\n status_code = (\n responses\n and max(responses, key=lambda response: response[1])[1]\n or 200\n )\n else:\n result, status_code = self.get_response(request, data, show_graphiql)\n\n return HttpResponse(\n status=status_code, content=result, content_type=\"application/json\"\n )\n\n except HttpError as e:\n response = e.response\n response[\"Content-Type\"] = \"application/json\"\n response.content = self.json_encode(\n request, {\"errors\": [self.format_error(e)]}\n )\n return response\n\n def get_response(self, request, data, show_graphiql=False):\n query, variables, operation_name, id = self.get_graphql_params(request, data)\n\n execution_result = self.execute_graphql_request(\n request, data, query, variables, operation_name, show_graphiql\n )\n\n status_code = 200\n if execution_result:\n response = {}\n\n if execution_result.errors:\n response[\"errors\"] = [\n self.format_error(e) for e in execution_result.errors\n ]\n\n if execution_result.invalid:\n status_code = 400\n else:\n response[\"data\"] = execution_result.data\n\n if self.batch:\n response[\"id\"] = id\n response[\"status\"] = status_code\n\n result = self.json_encode(request, response, pretty=show_graphiql)\n else:\n result = None\n\n return result, status_code\n\n def render_graphiql(self, request, **data):\n return render(request, self.graphiql_template, data)\n\n def json_encode(self, request, d, pretty=False):\n if not (self.pretty or pretty) and not request.GET.get(\"pretty\"):\n return json.dumps(d, separators=(\",\", \":\"))\n\n return json.dumps(d, sort_keys=True, indent=2, separators=(\",\", \": \"))\n\n def parse_body(self, request):\n content_type = self.get_content_type(request)\n\n if content_type == \"application/graphql\":\n return {\"query\": request.body.decode()}\n\n elif content_type == \"application/json\":\n # noinspection PyBroadException\n try:\n body = request.body.decode(\"utf-8\")\n except Exception as e:\n raise HttpError(HttpResponseBadRequest(str(e)))\n\n try:\n request_json = json.loads(body)\n if self.batch:\n assert isinstance(request_json, list), (\n \"Batch requests should receive a list, but received {}.\"\n ).format(repr(request_json))\n assert (\n len(request_json) > 0\n ), \"Received an empty list in the batch request.\"\n else:\n assert isinstance(\n request_json, dict\n ), \"The received data is not a valid JSON query.\"\n return request_json\n except AssertionError as e:\n raise HttpError(HttpResponseBadRequest(str(e)))\n except (TypeError, ValueError):\n raise HttpError(HttpResponseBadRequest(\"POST body sent invalid JSON.\"))\n\n elif content_type in [\n \"application/x-www-form-urlencoded\",\n \"multipart/form-data\",\n ]:\n return request.POST\n\n return {}\n\n def execute_graphql_request(\n self, request, data, query, variables, operation_name, show_graphiql=False\n ):\n if not query:\n if show_graphiql:\n return None\n raise HttpError(HttpResponseBadRequest(\"Must provide query string.\"))\n\n try:\n backend = self.get_backend(request)\n document = backend.document_from_string(self.schema, query)\n except Exception as e:\n return ExecutionResult(errors=[e], invalid=True)\n\n if request.method.lower() == \"get\":\n operation_type = document.get_operation_type(operation_name)\n if operation_type and operation_type != \"query\":\n if show_graphiql:\n return None\n\n raise HttpError(\n HttpResponseNotAllowed(\n [\"POST\"],\n \"Can only perform a {} operation from a POST request.\".format(\n operation_type\n ),\n )\n )\n\n try:\n extra_options = {}\n if self.executor:\n # We only include it optionally since\n # executor is not a valid argument in all backends\n extra_options[\"executor\"] = self.executor\n\n return document.execute(\n root=self.get_root_value(request),\n variables=variables,\n operation_name=operation_name,\n context=self.get_context(request),\n middleware=self.get_middleware(request),\n **extra_options\n )\n except Exception as e:\n return ExecutionResult(errors=[e], invalid=True)\n\n @classmethod\n def can_display_graphiql(cls, request, data):\n raw = \"raw\" in request.GET or \"raw\" in data\n return not raw and cls.request_wants_html(request)\n\n @classmethod\n def request_wants_html(cls, request):\n accepted = get_accepted_content_types(request)\n accepted_length = len(accepted)\n # the list will be ordered in preferred first - so we have to make\n # sure the most preferred gets the highest number\n html_priority = (\n accepted_length - accepted.index(\"text/html\")\n if \"text/html\" in accepted\n else 0\n )\n json_priority = (\n accepted_length - accepted.index(\"application/json\")\n if \"application/json\" in accepted\n else 0\n )\n\n return html_priority > json_priority\n\n @staticmethod\n def get_graphql_params(request, data):\n query = request.GET.get(\"query\") or data.get(\"query\")\n variables = request.GET.get(\"variables\") or data.get(\"variables\")\n id = request.GET.get(\"id\") or data.get(\"id\")\n\n if variables and isinstance(variables, six.text_type):\n try:\n variables = json.loads(variables)\n except Exception:\n raise HttpError(HttpResponseBadRequest(\"Variables are invalid JSON.\"))\n\n operation_name = request.GET.get(\"operationName\") or data.get(\"operationName\")\n if operation_name == \"null\":\n operation_name = None\n\n return query, variables, operation_name, id\n\n @staticmethod\n def format_error(error):\n if isinstance(error, GraphQLError):\n return format_graphql_error(error)\n\n return {\"message\": six.text_type(error)}\n\n @staticmethod\n def get_content_type(request):\n meta = request.META\n content_type = meta.get(\"CONTENT_TYPE\", meta.get(\"HTTP_CONTENT_TYPE\", \"\"))\n return content_type.split(\";\", 1)[0].lower()\n", "path": "graphene_django/views.py"}], "after_files": [{"content": "import inspect\nimport json\nimport re\n\nimport six\nfrom django.http import HttpResponse, HttpResponseNotAllowed\nfrom django.http.response import HttpResponseBadRequest\nfrom django.shortcuts import render\nfrom django.utils.decorators import method_decorator\nfrom django.views.generic import View\nfrom django.views.decorators.csrf import ensure_csrf_cookie\n\nfrom graphql import get_default_backend\nfrom graphql.error import format_error as format_graphql_error\nfrom graphql.error import GraphQLError\nfrom graphql.execution import ExecutionResult\nfrom graphql.type.schema import GraphQLSchema\n\nfrom .settings import graphene_settings\n\n\nclass HttpError(Exception):\n def __init__(self, response, message=None, *args, **kwargs):\n self.response = response\n self.message = message = message or response.content.decode()\n super(HttpError, self).__init__(message, *args, **kwargs)\n\n\ndef get_accepted_content_types(request):\n def qualify(x):\n parts = x.split(\";\", 1)\n if len(parts) == 2:\n match = re.match(r\"(^|;)q=(0(\\.\\d{,3})?|1(\\.0{,3})?)(;|$)\", parts[1])\n if match:\n return parts[0].strip(), float(match.group(2))\n return parts[0].strip(), 1\n\n raw_content_types = request.META.get(\"HTTP_ACCEPT\", \"*/*\").split(\",\")\n qualified_content_types = map(qualify, raw_content_types)\n return list(\n x[0] for x in sorted(qualified_content_types, key=lambda x: x[1], reverse=True)\n )\n\n\ndef instantiate_middleware(middlewares):\n for middleware in middlewares:\n if inspect.isclass(middleware):\n yield middleware()\n continue\n yield middleware\n\n\nclass GraphQLView(View):\n graphiql_version = \"0.11.11\"\n graphiql_template = \"graphene/graphiql.html\"\n\n schema = None\n graphiql = False\n executor = None\n backend = None\n middleware = None\n root_value = None\n pretty = False\n batch = False\n\n def __init__(\n self,\n schema=None,\n executor=None,\n middleware=None,\n root_value=None,\n graphiql=False,\n pretty=False,\n batch=False,\n backend=None,\n ):\n if not schema:\n schema = graphene_settings.SCHEMA\n\n if backend is None:\n backend = get_default_backend()\n\n if middleware is None:\n middleware = graphene_settings.MIDDLEWARE\n\n self.schema = self.schema or schema\n if middleware is not None:\n self.middleware = list(instantiate_middleware(middleware))\n self.executor = executor\n self.root_value = root_value\n self.pretty = self.pretty or pretty\n self.graphiql = self.graphiql or graphiql\n self.batch = self.batch or batch\n self.backend = backend\n\n assert isinstance(\n self.schema, GraphQLSchema\n ), \"A Schema is required to be provided to GraphQLView.\"\n assert not all((graphiql, batch)), \"Use either graphiql or batch processing\"\n\n # noinspection PyUnusedLocal\n def get_root_value(self, request):\n return self.root_value\n\n def get_middleware(self, request):\n return self.middleware\n\n def get_context(self, request):\n return request\n\n def get_backend(self, request):\n return self.backend\n\n @method_decorator(ensure_csrf_cookie)\n def dispatch(self, request, *args, **kwargs):\n try:\n if request.method.lower() not in (\"get\", \"post\"):\n raise HttpError(\n HttpResponseNotAllowed(\n [\"GET\", \"POST\"], \"GraphQL only supports GET and POST requests.\"\n )\n )\n\n data = self.parse_body(request)\n show_graphiql = self.graphiql and self.can_display_graphiql(request, data)\n\n if show_graphiql:\n return self.render_graphiql(\n request, graphiql_version=self.graphiql_version\n )\n\n if self.batch:\n responses = [self.get_response(request, entry) for entry in data]\n result = \"[{}]\".format(\n \",\".join([response[0] for response in responses])\n )\n status_code = (\n responses\n and max(responses, key=lambda response: response[1])[1]\n or 200\n )\n else:\n result, status_code = self.get_response(request, data, show_graphiql)\n\n return HttpResponse(\n status=status_code, content=result, content_type=\"application/json\"\n )\n\n except HttpError as e:\n response = e.response\n response[\"Content-Type\"] = \"application/json\"\n response.content = self.json_encode(\n request, {\"errors\": [self.format_error(e)]}\n )\n return response\n\n def get_response(self, request, data, show_graphiql=False):\n query, variables, operation_name, id = self.get_graphql_params(request, data)\n\n execution_result = self.execute_graphql_request(\n request, data, query, variables, operation_name, show_graphiql\n )\n\n status_code = 200\n if execution_result:\n response = {}\n\n if execution_result.errors:\n response[\"errors\"] = [\n self.format_error(e) for e in execution_result.errors\n ]\n\n if execution_result.invalid:\n status_code = 400\n else:\n response[\"data\"] = execution_result.data\n\n if self.batch:\n response[\"id\"] = id\n response[\"status\"] = status_code\n\n result = self.json_encode(request, response, pretty=show_graphiql)\n else:\n result = None\n\n return result, status_code\n\n def render_graphiql(self, request, **data):\n return render(request, self.graphiql_template, data)\n\n def json_encode(self, request, d, pretty=False):\n if not (self.pretty or pretty) and not request.GET.get(\"pretty\"):\n return json.dumps(d, separators=(\",\", \":\"))\n\n return json.dumps(d, sort_keys=True, indent=2, separators=(\",\", \": \"))\n\n def parse_body(self, request):\n content_type = self.get_content_type(request)\n\n if content_type == \"application/graphql\":\n return {\"query\": request.body.decode()}\n\n elif content_type == \"application/json\":\n # noinspection PyBroadException\n try:\n body = request.body.decode(\"utf-8\")\n except Exception as e:\n raise HttpError(HttpResponseBadRequest(str(e)))\n\n try:\n request_json = json.loads(body)\n if self.batch:\n assert isinstance(request_json, list), (\n \"Batch requests should receive a list, but received {}.\"\n ).format(repr(request_json))\n assert (\n len(request_json) > 0\n ), \"Received an empty list in the batch request.\"\n else:\n assert isinstance(\n request_json, dict\n ), \"The received data is not a valid JSON query.\"\n return request_json\n except AssertionError as e:\n raise HttpError(HttpResponseBadRequest(str(e)))\n except (TypeError, ValueError):\n raise HttpError(HttpResponseBadRequest(\"POST body sent invalid JSON.\"))\n\n elif content_type in [\n \"application/x-www-form-urlencoded\",\n \"multipart/form-data\",\n ]:\n return request.POST\n\n return {}\n\n def execute_graphql_request(\n self, request, data, query, variables, operation_name, show_graphiql=False\n ):\n if not query:\n if show_graphiql:\n return None\n raise HttpError(HttpResponseBadRequest(\"Must provide query string.\"))\n\n try:\n backend = self.get_backend(request)\n document = backend.document_from_string(self.schema, query)\n except Exception as e:\n return ExecutionResult(errors=[e], invalid=True)\n\n if request.method.lower() == \"get\":\n operation_type = document.get_operation_type(operation_name)\n if operation_type and operation_type != \"query\":\n if show_graphiql:\n return None\n\n raise HttpError(\n HttpResponseNotAllowed(\n [\"POST\"],\n \"Can only perform a {} operation from a POST request.\".format(\n operation_type\n ),\n )\n )\n\n try:\n extra_options = {}\n if self.executor:\n # We only include it optionally since\n # executor is not a valid argument in all backends\n extra_options[\"executor\"] = self.executor\n\n return document.execute(\n root=self.get_root_value(request),\n variables=variables,\n operation_name=operation_name,\n context=self.get_context(request),\n middleware=self.get_middleware(request),\n **extra_options\n )\n except Exception as e:\n return ExecutionResult(errors=[e], invalid=True)\n\n @classmethod\n def can_display_graphiql(cls, request, data):\n raw = \"raw\" in request.GET or \"raw\" in data\n return not raw and cls.request_wants_html(request)\n\n @classmethod\n def request_wants_html(cls, request):\n accepted = get_accepted_content_types(request)\n accepted_length = len(accepted)\n # the list will be ordered in preferred first - so we have to make\n # sure the most preferred gets the highest number\n html_priority = (\n accepted_length - accepted.index(\"text/html\")\n if \"text/html\" in accepted\n else 0\n )\n json_priority = (\n accepted_length - accepted.index(\"application/json\")\n if \"application/json\" in accepted\n else 0\n )\n\n return html_priority > json_priority\n\n @staticmethod\n def get_graphql_params(request, data):\n query = request.GET.get(\"query\") or data.get(\"query\")\n variables = request.GET.get(\"variables\") or data.get(\"variables\")\n id = request.GET.get(\"id\") or data.get(\"id\")\n\n if variables and isinstance(variables, six.text_type):\n try:\n variables = json.loads(variables)\n except Exception:\n raise HttpError(HttpResponseBadRequest(\"Variables are invalid JSON.\"))\n\n operation_name = request.GET.get(\"operationName\") or data.get(\"operationName\")\n if operation_name == \"null\":\n operation_name = None\n\n return query, variables, operation_name, id\n\n @staticmethod\n def format_error(error):\n if isinstance(error, GraphQLError):\n return format_graphql_error(error)\n\n return {\"message\": six.text_type(error)}\n\n @staticmethod\n def get_content_type(request):\n meta = request.META\n content_type = meta.get(\"CONTENT_TYPE\", meta.get(\"HTTP_CONTENT_TYPE\", \"\"))\n return content_type.split(\";\", 1)[0].lower()\n", "path": "graphene_django/views.py"}]} |
gh_patches_debug_110 | rasdani/github-patches | git_diff | localstack__localstack-1842 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Elasticsearch domain managed by Terraform cannot be updated; request for /tags/? returns 404
<!-- Love localstack? Please consider supporting our collective:
👉 https://opencollective.com/localstack/donate -->
We use Terraform to create and update resources in Localstack, which has worked for services like S3 and Dynamo so far.
We hit an issue with Elasticsearch domains, where the domain is created successfully but Terraform fails to apply in subsequent runs, when it makes a request to:
```
logs: ---[ REQUEST POST-SIGN ]-----------------------------
logs: GET /2015-01-01/tags/?arn=arn%3Aaws%3Aes%3Aus-east-1%3A000000000000%3Adomain%2Fepdam-local-amd HTTP/1.1
logs: Host: localhost:4578
logs: User-Agent: aws-sdk-go/1.14.31 (go1.9.2; darwin; amd64) APN/1.0 HashiCorp/1.0 Terraform/0.11.8-dev
logs: Authorization: AWS4-HMAC-SHA256 Credential=mock_access_key/20190221/us-west-2/es/aws4_request, SignedHeaders=host;x-amz-date, Signature=26f42429e2af2240466635ab9202c8888617afe9be7b8ef91a8831d6b4160bd1
logs: X-Amz-Date: 20190221T191447Z
logs: Accept-Encoding: gzip
```
and the response is:
```
logs: ---[ RESPONSE ]--------------------------------------
logs: HTTP/1.0 404 NOT FOUND
logs: Connection: close
logs: Content-Length: 233
logs: Access-Control-Allow-Origin: *
logs: Content-Type: text/html
logs: Date: Thu, 21 Feb 2019 19:14:47 GMT
logs: Server: Werkzeug/0.14.1 Python/2.7.15
```
While a request to `localhost:4578/2015-01-01/tags/?arn=...` gets 404, a request to `localhost:4578/2015-01-01/tags?arn=...`, (without the `/` before the query params), is successful.
The reason we are reporting this against Localstack and not [terraform](https://github.com/hashicorp/terraform) or [terraform-provider-aws](https://github.com/terraform-providers/terraform-provider-aws) is that the AWS REST API apparently supports requests with slashes before query parameters, or else Terraform could not be used to manage Elasticsearch domains in AWS.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `localstack/services/es/es_api.py`
Content:
```
1 import json
2 import time
3 from random import randint
4 from flask import Flask, jsonify, request, make_response
5 from localstack.services import generic_proxy
6 from localstack.utils.aws import aws_stack
7 from localstack.constants import TEST_AWS_ACCOUNT_ID
8 from localstack.utils.common import to_str
9 from localstack.utils.analytics import event_publisher
10
11 APP_NAME = 'es_api'
12 API_PREFIX = '/2015-01-01'
13
14 ES_DOMAINS = {}
15
16 app = Flask(APP_NAME)
17
18
19 def error_response(error_type, code=400, message='Unknown error.'):
20 if not message:
21 if error_type == 'ResourceNotFoundException':
22 message = 'Resource not found.'
23 elif error_type == 'ResourceAlreadyExistsException':
24 message = 'Resource already exists.'
25 response = make_response(jsonify({'error': message}))
26 response.headers['x-amzn-errortype'] = error_type
27 return response, code
28
29
30 def get_domain_config_status():
31 return {
32 'CreationDate': '%.2f' % time.time(),
33 'PendingDeletion': False,
34 'State': 'Active',
35 'UpdateDate': '%.2f' % time.time(),
36 'UpdateVersion': randint(1, 100)
37 }
38
39
40 def get_domain_config(domain_name):
41 config_status = get_domain_config_status()
42 return {
43 'DomainConfig': {
44 'AccessPolicies': {
45 'Options': '{"Version":"2012-10-17","Statement":[{"Effect":"Allow","Principal":{"AWS":"arn:aws:iam::%s:root"},"Action":"es:*","Resource":"arn:aws:es:%s:%s:domain/%s/*"}]}' % (TEST_AWS_ACCOUNT_ID, aws_stack.get_region(), TEST_AWS_ACCOUNT_ID, domain_name), # noqa: E501
46 'Status': config_status
47 },
48 'AdvancedOptions': {
49 'Options': {
50 'indices.fielddata.cache.size': '',
51 'rest.action.multi.allow_explicit_index': 'true'
52 },
53 'Status': config_status
54 },
55 'EBSOptions': {
56 'Options': {
57 'EBSEnabled': True,
58 'EncryptionEnabled': False,
59 'Iops': 0,
60 'VolumeSize': 10,
61 'VolumeType': 'gp2'
62 },
63 'Status': config_status
64 },
65 'ElasticsearchClusterConfig': {
66 'Options': {
67 'DedicatedMasterCount': 1,
68 'DedicatedMasterEnabled': True,
69 'DedicatedMasterType': 'm3.medium.elasticsearch',
70 'InstanceCount': 1,
71 'InstanceType': 'm3.medium.elasticsearch',
72 'ZoneAwarenessEnabled': False
73 },
74 'Status': config_status
75 },
76 'ElasticsearchVersion': {
77 'Options': '5.3',
78 'Status': config_status
79 },
80 'EncryptionAtRestOptions': {
81 'Options': {
82 'Enabled': False,
83 'KmsKeyId': ''
84 },
85 'Status': config_status
86 },
87 'LogPublishingOptions': {
88 'Options': {
89 'INDEX_SLOW_LOGS': {
90 'CloudWatchLogsLogGroupArn': 'arn:aws:logs:%s:%s:log-group:sample-domain' % (aws_stack.get_region(), TEST_AWS_ACCOUNT_ID), # noqa: E501
91 'Enabled': False
92 },
93 'SEARCH_SLOW_LOGS': {
94 'CloudWatchLogsLogGroupArn': 'arn:aws:logs:%s:%s:log-group:sample-domain' % (aws_stack.get_region(), TEST_AWS_ACCOUNT_ID), # noqa: E501
95 'Enabled': False,
96 }
97 },
98 'Status': config_status
99 },
100 'SnapshotOptions': {
101 'Options': {
102 'AutomatedSnapshotStartHour': randint(0, 23)
103 },
104 'Status': config_status
105 },
106 'VPCOptions': {
107 'Options': {
108 'AvailabilityZones': [
109 'us-east-1b'
110 ],
111 'SecurityGroupIds': [
112 'sg-12345678'
113 ],
114 'SubnetIds': [
115 'subnet-12345678'
116 ],
117 'VPCId': 'vpc-12345678'
118 },
119 'Status': config_status
120 }
121 }
122 }
123
124
125 def get_domain_status(domain_name, deleted=False):
126 return {
127 'DomainStatus': {
128 'ARN': 'arn:aws:es:%s:%s:domain/%s' % (aws_stack.get_region(), TEST_AWS_ACCOUNT_ID, domain_name),
129 'Created': True,
130 'Deleted': deleted,
131 'DomainId': '%s/%s' % (TEST_AWS_ACCOUNT_ID, domain_name),
132 'DomainName': domain_name,
133 'ElasticsearchClusterConfig': {
134 'DedicatedMasterCount': 1,
135 'DedicatedMasterEnabled': True,
136 'DedicatedMasterType': 'm3.medium.elasticsearch',
137 'InstanceCount': 1,
138 'InstanceType': 'm3.medium.elasticsearch',
139 'ZoneAwarenessEnabled': False
140 },
141 'ElasticsearchVersion': '6.7',
142 'Endpoint': aws_stack.get_elasticsearch_endpoint(domain_name),
143 'Processing': False,
144 'EBSOptions': {
145 'EBSEnabled': True,
146 'VolumeType': 'gp2',
147 'VolumeSize': 10,
148 'Iops': 0
149 },
150 }
151 }
152
153
154 @app.route('%s/domain' % API_PREFIX, methods=['GET'])
155 def list_domain_names():
156 result = {
157 'DomainNames': [{'DomainName': name} for name in ES_DOMAINS.keys()]
158 }
159 return jsonify(result)
160
161
162 @app.route('%s/es/domain' % API_PREFIX, methods=['POST'])
163 def create_domain():
164 data = json.loads(to_str(request.data))
165 domain_name = data['DomainName']
166 if domain_name in ES_DOMAINS:
167 return error_response(error_type='ResourceAlreadyExistsException')
168 ES_DOMAINS[domain_name] = data
169 result = get_domain_status(domain_name)
170 # record event
171 event_publisher.fire_event(event_publisher.EVENT_ES_CREATE_DOMAIN,
172 payload={'n': event_publisher.get_hash(domain_name)})
173 return jsonify(result)
174
175
176 @app.route('%s/es/domain/<domain_name>' % API_PREFIX, methods=['GET'])
177 def describe_domain(domain_name):
178 if domain_name not in ES_DOMAINS:
179 return error_response(error_type='ResourceNotFoundException')
180 result = get_domain_status(domain_name)
181 return jsonify(result)
182
183
184 @app.route('%s/es/domain/<domain_name>/config' % API_PREFIX, methods=['GET', 'POST'])
185 def domain_config(domain_name):
186 config = get_domain_config(domain_name)
187 return jsonify(config)
188
189
190 @app.route('%s/es/domain/<domain_name>' % API_PREFIX, methods=['DELETE'])
191 def delete_domain(domain_name):
192 if domain_name not in ES_DOMAINS:
193 return error_response(error_type='ResourceNotFoundException')
194 result = get_domain_status(domain_name, deleted=True)
195 ES_DOMAINS.pop(domain_name)
196 # record event
197 event_publisher.fire_event(event_publisher.EVENT_ES_DELETE_DOMAIN,
198 payload={'n': event_publisher.get_hash(domain_name)})
199 return jsonify(result)
200
201
202 @app.route('%s/tags' % API_PREFIX, methods=['GET', 'POST'])
203 def add_list_tags():
204 if request.method == 'GET' and request.args.get('arn'):
205 response = {
206 'TagList': [
207 {
208 'Key': 'Example1',
209 'Value': 'Value'
210 },
211 {
212 'Key': 'Example2',
213 'Value': 'Value'
214 }
215 ]
216 }
217 return jsonify(response)
218
219 return jsonify({})
220
221
222 def serve(port, quiet=True):
223 generic_proxy.serve_flask_app(app=app, port=port, quiet=quiet)
224
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/localstack/services/es/es_api.py b/localstack/services/es/es_api.py
--- a/localstack/services/es/es_api.py
+++ b/localstack/services/es/es_api.py
@@ -14,6 +14,7 @@
ES_DOMAINS = {}
app = Flask(APP_NAME)
+app.url_map.strict_slashes = False
def error_response(error_type, code=400, message='Unknown error.'):
| {"golden_diff": "diff --git a/localstack/services/es/es_api.py b/localstack/services/es/es_api.py\n--- a/localstack/services/es/es_api.py\n+++ b/localstack/services/es/es_api.py\n@@ -14,6 +14,7 @@\n ES_DOMAINS = {}\n \n app = Flask(APP_NAME)\n+app.url_map.strict_slashes = False\n \n \n def error_response(error_type, code=400, message='Unknown error.'):\n", "issue": "Elasticsearch domain managed by Terraform cannot be updated; request for /tags/? returns 404\n<!-- Love localstack? Please consider supporting our collective:\r\n\ud83d\udc49 https://opencollective.com/localstack/donate -->\r\n\r\nWe use Terraform to create and update resources in Localstack, which has worked for services like S3 and Dynamo so far.\r\n\r\nWe hit an issue with Elasticsearch domains, where the domain is created successfully but Terraform fails to apply in subsequent runs, when it makes a request to:\r\n\r\n```\r\nlogs: ---[ REQUEST POST-SIGN ]-----------------------------\r\nlogs: GET /2015-01-01/tags/?arn=arn%3Aaws%3Aes%3Aus-east-1%3A000000000000%3Adomain%2Fepdam-local-amd HTTP/1.1\r\nlogs: Host: localhost:4578\r\nlogs: User-Agent: aws-sdk-go/1.14.31 (go1.9.2; darwin; amd64) APN/1.0 HashiCorp/1.0 Terraform/0.11.8-dev\r\nlogs: Authorization: AWS4-HMAC-SHA256 Credential=mock_access_key/20190221/us-west-2/es/aws4_request, SignedHeaders=host;x-amz-date, Signature=26f42429e2af2240466635ab9202c8888617afe9be7b8ef91a8831d6b4160bd1\r\nlogs: X-Amz-Date: 20190221T191447Z\r\nlogs: Accept-Encoding: gzip\r\n```\r\n\r\nand the response is:\r\n\r\n```\r\nlogs: ---[ RESPONSE ]--------------------------------------\r\nlogs: HTTP/1.0 404 NOT FOUND\r\nlogs: Connection: close\r\nlogs: Content-Length: 233\r\nlogs: Access-Control-Allow-Origin: *\r\nlogs: Content-Type: text/html\r\nlogs: Date: Thu, 21 Feb 2019 19:14:47 GMT\r\nlogs: Server: Werkzeug/0.14.1 Python/2.7.15\r\n```\r\n\r\nWhile a request to `localhost:4578/2015-01-01/tags/?arn=...` gets 404, a request to `localhost:4578/2015-01-01/tags?arn=...`, (without the `/` before the query params), is successful.\r\n\r\nThe reason we are reporting this against Localstack and not [terraform](https://github.com/hashicorp/terraform) or [terraform-provider-aws](https://github.com/terraform-providers/terraform-provider-aws) is that the AWS REST API apparently supports requests with slashes before query parameters, or else Terraform could not be used to manage Elasticsearch domains in AWS.\n", "before_files": [{"content": "import json\nimport time\nfrom random import randint\nfrom flask import Flask, jsonify, request, make_response\nfrom localstack.services import generic_proxy\nfrom localstack.utils.aws import aws_stack\nfrom localstack.constants import TEST_AWS_ACCOUNT_ID\nfrom localstack.utils.common import to_str\nfrom localstack.utils.analytics import event_publisher\n\nAPP_NAME = 'es_api'\nAPI_PREFIX = '/2015-01-01'\n\nES_DOMAINS = {}\n\napp = Flask(APP_NAME)\n\n\ndef error_response(error_type, code=400, message='Unknown error.'):\n if not message:\n if error_type == 'ResourceNotFoundException':\n message = 'Resource not found.'\n elif error_type == 'ResourceAlreadyExistsException':\n message = 'Resource already exists.'\n response = make_response(jsonify({'error': message}))\n response.headers['x-amzn-errortype'] = error_type\n return response, code\n\n\ndef get_domain_config_status():\n return {\n 'CreationDate': '%.2f' % time.time(),\n 'PendingDeletion': False,\n 'State': 'Active',\n 'UpdateDate': '%.2f' % time.time(),\n 'UpdateVersion': randint(1, 100)\n }\n\n\ndef get_domain_config(domain_name):\n config_status = get_domain_config_status()\n return {\n 'DomainConfig': {\n 'AccessPolicies': {\n 'Options': '{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::%s:root\"},\"Action\":\"es:*\",\"Resource\":\"arn:aws:es:%s:%s:domain/%s/*\"}]}' % (TEST_AWS_ACCOUNT_ID, aws_stack.get_region(), TEST_AWS_ACCOUNT_ID, domain_name), # noqa: E501\n 'Status': config_status\n },\n 'AdvancedOptions': {\n 'Options': {\n 'indices.fielddata.cache.size': '',\n 'rest.action.multi.allow_explicit_index': 'true'\n },\n 'Status': config_status\n },\n 'EBSOptions': {\n 'Options': {\n 'EBSEnabled': True,\n 'EncryptionEnabled': False,\n 'Iops': 0,\n 'VolumeSize': 10,\n 'VolumeType': 'gp2'\n },\n 'Status': config_status\n },\n 'ElasticsearchClusterConfig': {\n 'Options': {\n 'DedicatedMasterCount': 1,\n 'DedicatedMasterEnabled': True,\n 'DedicatedMasterType': 'm3.medium.elasticsearch',\n 'InstanceCount': 1,\n 'InstanceType': 'm3.medium.elasticsearch',\n 'ZoneAwarenessEnabled': False\n },\n 'Status': config_status\n },\n 'ElasticsearchVersion': {\n 'Options': '5.3',\n 'Status': config_status\n },\n 'EncryptionAtRestOptions': {\n 'Options': {\n 'Enabled': False,\n 'KmsKeyId': ''\n },\n 'Status': config_status\n },\n 'LogPublishingOptions': {\n 'Options': {\n 'INDEX_SLOW_LOGS': {\n 'CloudWatchLogsLogGroupArn': 'arn:aws:logs:%s:%s:log-group:sample-domain' % (aws_stack.get_region(), TEST_AWS_ACCOUNT_ID), # noqa: E501\n 'Enabled': False\n },\n 'SEARCH_SLOW_LOGS': {\n 'CloudWatchLogsLogGroupArn': 'arn:aws:logs:%s:%s:log-group:sample-domain' % (aws_stack.get_region(), TEST_AWS_ACCOUNT_ID), # noqa: E501\n 'Enabled': False,\n }\n },\n 'Status': config_status\n },\n 'SnapshotOptions': {\n 'Options': {\n 'AutomatedSnapshotStartHour': randint(0, 23)\n },\n 'Status': config_status\n },\n 'VPCOptions': {\n 'Options': {\n 'AvailabilityZones': [\n 'us-east-1b'\n ],\n 'SecurityGroupIds': [\n 'sg-12345678'\n ],\n 'SubnetIds': [\n 'subnet-12345678'\n ],\n 'VPCId': 'vpc-12345678'\n },\n 'Status': config_status\n }\n }\n }\n\n\ndef get_domain_status(domain_name, deleted=False):\n return {\n 'DomainStatus': {\n 'ARN': 'arn:aws:es:%s:%s:domain/%s' % (aws_stack.get_region(), TEST_AWS_ACCOUNT_ID, domain_name),\n 'Created': True,\n 'Deleted': deleted,\n 'DomainId': '%s/%s' % (TEST_AWS_ACCOUNT_ID, domain_name),\n 'DomainName': domain_name,\n 'ElasticsearchClusterConfig': {\n 'DedicatedMasterCount': 1,\n 'DedicatedMasterEnabled': True,\n 'DedicatedMasterType': 'm3.medium.elasticsearch',\n 'InstanceCount': 1,\n 'InstanceType': 'm3.medium.elasticsearch',\n 'ZoneAwarenessEnabled': False\n },\n 'ElasticsearchVersion': '6.7',\n 'Endpoint': aws_stack.get_elasticsearch_endpoint(domain_name),\n 'Processing': False,\n 'EBSOptions': {\n 'EBSEnabled': True,\n 'VolumeType': 'gp2',\n 'VolumeSize': 10,\n 'Iops': 0\n },\n }\n }\n\n\[email protected]('%s/domain' % API_PREFIX, methods=['GET'])\ndef list_domain_names():\n result = {\n 'DomainNames': [{'DomainName': name} for name in ES_DOMAINS.keys()]\n }\n return jsonify(result)\n\n\[email protected]('%s/es/domain' % API_PREFIX, methods=['POST'])\ndef create_domain():\n data = json.loads(to_str(request.data))\n domain_name = data['DomainName']\n if domain_name in ES_DOMAINS:\n return error_response(error_type='ResourceAlreadyExistsException')\n ES_DOMAINS[domain_name] = data\n result = get_domain_status(domain_name)\n # record event\n event_publisher.fire_event(event_publisher.EVENT_ES_CREATE_DOMAIN,\n payload={'n': event_publisher.get_hash(domain_name)})\n return jsonify(result)\n\n\[email protected]('%s/es/domain/<domain_name>' % API_PREFIX, methods=['GET'])\ndef describe_domain(domain_name):\n if domain_name not in ES_DOMAINS:\n return error_response(error_type='ResourceNotFoundException')\n result = get_domain_status(domain_name)\n return jsonify(result)\n\n\[email protected]('%s/es/domain/<domain_name>/config' % API_PREFIX, methods=['GET', 'POST'])\ndef domain_config(domain_name):\n config = get_domain_config(domain_name)\n return jsonify(config)\n\n\[email protected]('%s/es/domain/<domain_name>' % API_PREFIX, methods=['DELETE'])\ndef delete_domain(domain_name):\n if domain_name not in ES_DOMAINS:\n return error_response(error_type='ResourceNotFoundException')\n result = get_domain_status(domain_name, deleted=True)\n ES_DOMAINS.pop(domain_name)\n # record event\n event_publisher.fire_event(event_publisher.EVENT_ES_DELETE_DOMAIN,\n payload={'n': event_publisher.get_hash(domain_name)})\n return jsonify(result)\n\n\[email protected]('%s/tags' % API_PREFIX, methods=['GET', 'POST'])\ndef add_list_tags():\n if request.method == 'GET' and request.args.get('arn'):\n response = {\n 'TagList': [\n {\n 'Key': 'Example1',\n 'Value': 'Value'\n },\n {\n 'Key': 'Example2',\n 'Value': 'Value'\n }\n ]\n }\n return jsonify(response)\n\n return jsonify({})\n\n\ndef serve(port, quiet=True):\n generic_proxy.serve_flask_app(app=app, port=port, quiet=quiet)\n", "path": "localstack/services/es/es_api.py"}], "after_files": [{"content": "import json\nimport time\nfrom random import randint\nfrom flask import Flask, jsonify, request, make_response\nfrom localstack.services import generic_proxy\nfrom localstack.utils.aws import aws_stack\nfrom localstack.constants import TEST_AWS_ACCOUNT_ID\nfrom localstack.utils.common import to_str\nfrom localstack.utils.analytics import event_publisher\n\nAPP_NAME = 'es_api'\nAPI_PREFIX = '/2015-01-01'\n\nES_DOMAINS = {}\n\napp = Flask(APP_NAME)\napp.url_map.strict_slashes = False\n\n\ndef error_response(error_type, code=400, message='Unknown error.'):\n if not message:\n if error_type == 'ResourceNotFoundException':\n message = 'Resource not found.'\n elif error_type == 'ResourceAlreadyExistsException':\n message = 'Resource already exists.'\n response = make_response(jsonify({'error': message}))\n response.headers['x-amzn-errortype'] = error_type\n return response, code\n\n\ndef get_domain_config_status():\n return {\n 'CreationDate': '%.2f' % time.time(),\n 'PendingDeletion': False,\n 'State': 'Active',\n 'UpdateDate': '%.2f' % time.time(),\n 'UpdateVersion': randint(1, 100)\n }\n\n\ndef get_domain_config(domain_name):\n config_status = get_domain_config_status()\n return {\n 'DomainConfig': {\n 'AccessPolicies': {\n 'Options': '{\"Version\":\"2012-10-17\",\"Statement\":[{\"Effect\":\"Allow\",\"Principal\":{\"AWS\":\"arn:aws:iam::%s:root\"},\"Action\":\"es:*\",\"Resource\":\"arn:aws:es:%s:%s:domain/%s/*\"}]}' % (TEST_AWS_ACCOUNT_ID, aws_stack.get_region(), TEST_AWS_ACCOUNT_ID, domain_name), # noqa: E501\n 'Status': config_status\n },\n 'AdvancedOptions': {\n 'Options': {\n 'indices.fielddata.cache.size': '',\n 'rest.action.multi.allow_explicit_index': 'true'\n },\n 'Status': config_status\n },\n 'EBSOptions': {\n 'Options': {\n 'EBSEnabled': True,\n 'EncryptionEnabled': False,\n 'Iops': 0,\n 'VolumeSize': 10,\n 'VolumeType': 'gp2'\n },\n 'Status': config_status\n },\n 'ElasticsearchClusterConfig': {\n 'Options': {\n 'DedicatedMasterCount': 1,\n 'DedicatedMasterEnabled': True,\n 'DedicatedMasterType': 'm3.medium.elasticsearch',\n 'InstanceCount': 1,\n 'InstanceType': 'm3.medium.elasticsearch',\n 'ZoneAwarenessEnabled': False\n },\n 'Status': config_status\n },\n 'ElasticsearchVersion': {\n 'Options': '5.3',\n 'Status': config_status\n },\n 'EncryptionAtRestOptions': {\n 'Options': {\n 'Enabled': False,\n 'KmsKeyId': ''\n },\n 'Status': config_status\n },\n 'LogPublishingOptions': {\n 'Options': {\n 'INDEX_SLOW_LOGS': {\n 'CloudWatchLogsLogGroupArn': 'arn:aws:logs:%s:%s:log-group:sample-domain' % (aws_stack.get_region(), TEST_AWS_ACCOUNT_ID), # noqa: E501\n 'Enabled': False\n },\n 'SEARCH_SLOW_LOGS': {\n 'CloudWatchLogsLogGroupArn': 'arn:aws:logs:%s:%s:log-group:sample-domain' % (aws_stack.get_region(), TEST_AWS_ACCOUNT_ID), # noqa: E501\n 'Enabled': False,\n }\n },\n 'Status': config_status\n },\n 'SnapshotOptions': {\n 'Options': {\n 'AutomatedSnapshotStartHour': randint(0, 23)\n },\n 'Status': config_status\n },\n 'VPCOptions': {\n 'Options': {\n 'AvailabilityZones': [\n 'us-east-1b'\n ],\n 'SecurityGroupIds': [\n 'sg-12345678'\n ],\n 'SubnetIds': [\n 'subnet-12345678'\n ],\n 'VPCId': 'vpc-12345678'\n },\n 'Status': config_status\n }\n }\n }\n\n\ndef get_domain_status(domain_name, deleted=False):\n return {\n 'DomainStatus': {\n 'ARN': 'arn:aws:es:%s:%s:domain/%s' % (aws_stack.get_region(), TEST_AWS_ACCOUNT_ID, domain_name),\n 'Created': True,\n 'Deleted': deleted,\n 'DomainId': '%s/%s' % (TEST_AWS_ACCOUNT_ID, domain_name),\n 'DomainName': domain_name,\n 'ElasticsearchClusterConfig': {\n 'DedicatedMasterCount': 1,\n 'DedicatedMasterEnabled': True,\n 'DedicatedMasterType': 'm3.medium.elasticsearch',\n 'InstanceCount': 1,\n 'InstanceType': 'm3.medium.elasticsearch',\n 'ZoneAwarenessEnabled': False\n },\n 'ElasticsearchVersion': '6.7',\n 'Endpoint': aws_stack.get_elasticsearch_endpoint(domain_name),\n 'Processing': False,\n 'EBSOptions': {\n 'EBSEnabled': True,\n 'VolumeType': 'gp2',\n 'VolumeSize': 10,\n 'Iops': 0\n },\n }\n }\n\n\[email protected]('%s/domain' % API_PREFIX, methods=['GET'])\ndef list_domain_names():\n result = {\n 'DomainNames': [{'DomainName': name} for name in ES_DOMAINS.keys()]\n }\n return jsonify(result)\n\n\[email protected]('%s/es/domain' % API_PREFIX, methods=['POST'])\ndef create_domain():\n data = json.loads(to_str(request.data))\n domain_name = data['DomainName']\n if domain_name in ES_DOMAINS:\n return error_response(error_type='ResourceAlreadyExistsException')\n ES_DOMAINS[domain_name] = data\n result = get_domain_status(domain_name)\n # record event\n event_publisher.fire_event(event_publisher.EVENT_ES_CREATE_DOMAIN,\n payload={'n': event_publisher.get_hash(domain_name)})\n return jsonify(result)\n\n\[email protected]('%s/es/domain/<domain_name>' % API_PREFIX, methods=['GET'])\ndef describe_domain(domain_name):\n if domain_name not in ES_DOMAINS:\n return error_response(error_type='ResourceNotFoundException')\n result = get_domain_status(domain_name)\n return jsonify(result)\n\n\[email protected]('%s/es/domain/<domain_name>/config' % API_PREFIX, methods=['GET', 'POST'])\ndef domain_config(domain_name):\n config = get_domain_config(domain_name)\n return jsonify(config)\n\n\[email protected]('%s/es/domain/<domain_name>' % API_PREFIX, methods=['DELETE'])\ndef delete_domain(domain_name):\n if domain_name not in ES_DOMAINS:\n return error_response(error_type='ResourceNotFoundException')\n result = get_domain_status(domain_name, deleted=True)\n ES_DOMAINS.pop(domain_name)\n # record event\n event_publisher.fire_event(event_publisher.EVENT_ES_DELETE_DOMAIN,\n payload={'n': event_publisher.get_hash(domain_name)})\n return jsonify(result)\n\n\[email protected]('%s/tags' % API_PREFIX, methods=['GET', 'POST'])\ndef add_list_tags():\n if request.method == 'GET' and request.args.get('arn'):\n response = {\n 'TagList': [\n {\n 'Key': 'Example1',\n 'Value': 'Value'\n },\n {\n 'Key': 'Example2',\n 'Value': 'Value'\n }\n ]\n }\n return jsonify(response)\n\n return jsonify({})\n\n\ndef serve(port, quiet=True):\n generic_proxy.serve_flask_app(app=app, port=port, quiet=quiet)\n", "path": "localstack/services/es/es_api.py"}]} |
gh_patches_debug_111 | rasdani/github-patches | git_diff | pymedusa__Medusa-4086 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[APP SUBMITTED]: AttributeError: 'module' object has no attribute 'core'
### INFO
**Python Version**: `2.7.13 (v2.7.13:a06454b1afa1, Dec 17 2016, 20:53:40) [MSC v.1500 64 bit (AMD64)]`
**Operating System**: `Windows-7-6.1.7601-SP1`
**Locale**: `cp1252`
**Branch**: [master](../tree/master)
**Database**: `44.9`
**Commit**: pymedusa/Medusa@77b20916ab577b82112ebc624f816054788c63f2
**Link to Log**: https://gist.github.com/7f6099e1c66cecd44e069bffee91a3ac
### ERROR
<pre>
2018-04-23 16:08:49 ERROR Thread_18 :: [77b2091] Exception generated: 'module' object has no attribute 'core'
Traceback (most recent call last):
File "C:\Medusa\Medusa\medusa\server\web\core\base.py", line 285, in async_call
result = function(**kwargs)
File "C:\Medusa\Medusa\medusa\server\web\home\handler.py", line 357, in testGrowl
result = notifiers.growl_notifier.test_notify(host, password)
File "C:\Medusa\Medusa\medusa\notifiers\growl.py", line 22, in test_notify
self._sendRegistration(host, password)
File "C:\Medusa\Medusa\medusa\notifiers\growl.py", line 174, in _sendRegistration
register = gntp.core.GNTPRegister()
AttributeError: 'module' object has no attribute 'core'
</pre>
---
_STAFF NOTIFIED_: @pymedusa/support @pymedusa/moderators
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `medusa/notifiers/growl.py`
Content:
```
1 # coding=utf-8
2
3 from __future__ import print_function
4 from __future__ import unicode_literals
5
6 import logging
7 import socket
8 from builtins import object
9
10 import gntp
11
12 from medusa import app, common
13 from medusa.helper.exceptions import ex
14 from medusa.logger.adapters.style import BraceAdapter
15
16 log = BraceAdapter(logging.getLogger(__name__))
17 log.logger.addHandler(logging.NullHandler())
18
19
20 class Notifier(object):
21 def test_notify(self, host, password):
22 self._sendRegistration(host, password)
23 return self._sendGrowl('Test Growl', 'Testing Growl settings from Medusa', 'Test', host, password,
24 force=True)
25
26 def notify_snatch(self, ep_name, is_proper):
27 if app.GROWL_NOTIFY_ONSNATCH:
28 self._sendGrowl(
29 common.notifyStrings[
30 (common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]
31 ], ep_name)
32
33 def notify_download(self, ep_name):
34 if app.GROWL_NOTIFY_ONDOWNLOAD:
35 self._sendGrowl(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_name)
36
37 def notify_subtitle_download(self, ep_name, lang):
38 if app.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD:
39 self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ': ' + lang)
40
41 def notify_git_update(self, new_version='??'):
42 update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT]
43 title = common.notifyStrings[common.NOTIFY_GIT_UPDATE]
44 self._sendGrowl(title, update_text + new_version)
45
46 def notify_login(self, ipaddress=''):
47 update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT]
48 title = common.notifyStrings[common.NOTIFY_LOGIN]
49 self._sendGrowl(title, update_text.format(ipaddress))
50
51 def _send_growl(self, options, message=None):
52
53 # Initialize Notification
54 notice = gntp.core.GNTPNotice(
55 app=options['app'],
56 name=options['name'],
57 title=options['title'],
58 password=options['password'],
59 )
60
61 # Optional
62 if options['sticky']:
63 notice.add_header('Notification-Sticky', options['sticky'])
64 if options['priority']:
65 notice.add_header('Notification-Priority', options['priority'])
66 if options['icon']:
67 notice.add_header('Notification-Icon', app.LOGO_URL)
68
69 if message:
70 notice.add_header('Notification-Text', message)
71
72 response = self._send(options['host'], options['port'], notice.encode(), options['debug'])
73 return True if isinstance(response, gntp.core.GNTPOK) else False
74
75 @staticmethod
76 def _send(host, port, data, debug=False):
77 if debug:
78 print('<Sending>\n', data, '\n</Sending>')
79
80 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
81 s.connect((host, port))
82 s.send(data)
83 response = gntp.core.parse_gntp(s.recv(1024))
84 s.close()
85
86 if debug:
87 print('<Received>\n', response, '\n</Received>')
88
89 return response
90
91 def _sendGrowl(self, title='Medusa Notification', message=None, name=None, host=None, password=None,
92 force=False):
93 if not app.USE_GROWL and not force:
94 return False
95
96 if name is None:
97 name = title
98
99 if host is None:
100 hostParts = app.GROWL_HOST.split(':')
101 else:
102 hostParts = host.split(':')
103
104 if len(hostParts) != 2 or hostParts[1] == '':
105 port = 23053
106 else:
107 port = int(hostParts[1])
108
109 growlHosts = [(hostParts[0], port)]
110
111 opts = {
112 'name': name,
113 'title': title,
114 'app': 'Medusa',
115 'sticky': None,
116 'priority': None,
117 'debug': False
118 }
119
120 if password is None:
121 opts['password'] = app.GROWL_PASSWORD
122 else:
123 opts['password'] = password
124
125 opts['icon'] = True
126
127 for pc in growlHosts:
128 opts['host'] = pc[0]
129 opts['port'] = pc[1]
130 log.debug(
131 u'GROWL: Sending growl to {host}:{port} - {msg!r}',
132 {'msg': message, 'host': opts['host'], 'port': opts['port']}
133 )
134 try:
135 if self._send_growl(opts, message):
136 return True
137 else:
138 if self._sendRegistration(host, password):
139 return self._send_growl(opts, message)
140 else:
141 return False
142 except Exception as error:
143 log.warning(
144 u'GROWL: Unable to send growl to {host}:{port} - {msg!r}',
145 {'msg': ex(error), 'host': opts['host'], 'port': opts['port']}
146 )
147 return False
148
149 def _sendRegistration(self, host=None, password=None):
150 opts = {}
151
152 if host is None:
153 hostParts = app.GROWL_HOST.split(':')
154 else:
155 hostParts = host.split(':')
156
157 if len(hostParts) != 2 or hostParts[1] == '':
158 port = 23053
159 else:
160 port = int(hostParts[1])
161
162 opts['host'] = hostParts[0]
163 opts['port'] = port
164
165 if password is None:
166 opts['password'] = app.GROWL_PASSWORD
167 else:
168 opts['password'] = password
169
170 opts['app'] = 'Medusa'
171 opts['debug'] = False
172
173 # Send Registration
174 register = gntp.core.GNTPRegister()
175 register.add_header('Application-Name', opts['app'])
176 register.add_header('Application-Icon', app.LOGO_URL)
177
178 register.add_notification('Test', True)
179 register.add_notification(common.notifyStrings[common.NOTIFY_SNATCH], True)
180 register.add_notification(common.notifyStrings[common.NOTIFY_DOWNLOAD], True)
181 register.add_notification(common.notifyStrings[common.NOTIFY_GIT_UPDATE], True)
182
183 if opts['password']:
184 register.set_password(opts['password'])
185
186 try:
187 return self._send(opts['host'], opts['port'], register.encode(), opts['debug'])
188 except Exception as error:
189 log.warning(
190 u'GROWL: Unable to send growl to {host}:{port} - {msg!r}',
191 {'msg': ex(error), 'host': opts['host'], 'port': opts['port']}
192 )
193 return False
194
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/medusa/notifiers/growl.py b/medusa/notifiers/growl.py
--- a/medusa/notifiers/growl.py
+++ b/medusa/notifiers/growl.py
@@ -7,7 +7,7 @@
import socket
from builtins import object
-import gntp
+import gntp.core
from medusa import app, common
from medusa.helper.exceptions import ex
| {"golden_diff": "diff --git a/medusa/notifiers/growl.py b/medusa/notifiers/growl.py\n--- a/medusa/notifiers/growl.py\n+++ b/medusa/notifiers/growl.py\n@@ -7,7 +7,7 @@\n import socket\n from builtins import object\n \n-import gntp\n+import gntp.core\n \n from medusa import app, common\n from medusa.helper.exceptions import ex\n", "issue": "[APP SUBMITTED]: AttributeError: 'module' object has no attribute 'core'\n\n### INFO\n**Python Version**: `2.7.13 (v2.7.13:a06454b1afa1, Dec 17 2016, 20:53:40) [MSC v.1500 64 bit (AMD64)]`\n**Operating System**: `Windows-7-6.1.7601-SP1`\n**Locale**: `cp1252`\n**Branch**: [master](../tree/master)\n**Database**: `44.9`\n**Commit**: pymedusa/Medusa@77b20916ab577b82112ebc624f816054788c63f2\n**Link to Log**: https://gist.github.com/7f6099e1c66cecd44e069bffee91a3ac\n### ERROR\n<pre>\n2018-04-23 16:08:49 ERROR Thread_18 :: [77b2091] Exception generated: 'module' object has no attribute 'core'\nTraceback (most recent call last):\n File \"C:\\Medusa\\Medusa\\medusa\\server\\web\\core\\base.py\", line 285, in async_call\n result = function(**kwargs)\n File \"C:\\Medusa\\Medusa\\medusa\\server\\web\\home\\handler.py\", line 357, in testGrowl\n result = notifiers.growl_notifier.test_notify(host, password)\n File \"C:\\Medusa\\Medusa\\medusa\\notifiers\\growl.py\", line 22, in test_notify\n self._sendRegistration(host, password)\n File \"C:\\Medusa\\Medusa\\medusa\\notifiers\\growl.py\", line 174, in _sendRegistration\n register = gntp.core.GNTPRegister()\nAttributeError: 'module' object has no attribute 'core'\n</pre>\n---\n_STAFF NOTIFIED_: @pymedusa/support @pymedusa/moderators\n\n", "before_files": [{"content": "# coding=utf-8\n\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport logging\nimport socket\nfrom builtins import object\n\nimport gntp\n\nfrom medusa import app, common\nfrom medusa.helper.exceptions import ex\nfrom medusa.logger.adapters.style import BraceAdapter\n\nlog = BraceAdapter(logging.getLogger(__name__))\nlog.logger.addHandler(logging.NullHandler())\n\n\nclass Notifier(object):\n def test_notify(self, host, password):\n self._sendRegistration(host, password)\n return self._sendGrowl('Test Growl', 'Testing Growl settings from Medusa', 'Test', host, password,\n force=True)\n\n def notify_snatch(self, ep_name, is_proper):\n if app.GROWL_NOTIFY_ONSNATCH:\n self._sendGrowl(\n common.notifyStrings[\n (common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]\n ], ep_name)\n\n def notify_download(self, ep_name):\n if app.GROWL_NOTIFY_ONDOWNLOAD:\n self._sendGrowl(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_name)\n\n def notify_subtitle_download(self, ep_name, lang):\n if app.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD:\n self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ': ' + lang)\n\n def notify_git_update(self, new_version='??'):\n update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT]\n title = common.notifyStrings[common.NOTIFY_GIT_UPDATE]\n self._sendGrowl(title, update_text + new_version)\n\n def notify_login(self, ipaddress=''):\n update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT]\n title = common.notifyStrings[common.NOTIFY_LOGIN]\n self._sendGrowl(title, update_text.format(ipaddress))\n\n def _send_growl(self, options, message=None):\n\n # Initialize Notification\n notice = gntp.core.GNTPNotice(\n app=options['app'],\n name=options['name'],\n title=options['title'],\n password=options['password'],\n )\n\n # Optional\n if options['sticky']:\n notice.add_header('Notification-Sticky', options['sticky'])\n if options['priority']:\n notice.add_header('Notification-Priority', options['priority'])\n if options['icon']:\n notice.add_header('Notification-Icon', app.LOGO_URL)\n\n if message:\n notice.add_header('Notification-Text', message)\n\n response = self._send(options['host'], options['port'], notice.encode(), options['debug'])\n return True if isinstance(response, gntp.core.GNTPOK) else False\n\n @staticmethod\n def _send(host, port, data, debug=False):\n if debug:\n print('<Sending>\\n', data, '\\n</Sending>')\n\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((host, port))\n s.send(data)\n response = gntp.core.parse_gntp(s.recv(1024))\n s.close()\n\n if debug:\n print('<Received>\\n', response, '\\n</Received>')\n\n return response\n\n def _sendGrowl(self, title='Medusa Notification', message=None, name=None, host=None, password=None,\n force=False):\n if not app.USE_GROWL and not force:\n return False\n\n if name is None:\n name = title\n\n if host is None:\n hostParts = app.GROWL_HOST.split(':')\n else:\n hostParts = host.split(':')\n\n if len(hostParts) != 2 or hostParts[1] == '':\n port = 23053\n else:\n port = int(hostParts[1])\n\n growlHosts = [(hostParts[0], port)]\n\n opts = {\n 'name': name,\n 'title': title,\n 'app': 'Medusa',\n 'sticky': None,\n 'priority': None,\n 'debug': False\n }\n\n if password is None:\n opts['password'] = app.GROWL_PASSWORD\n else:\n opts['password'] = password\n\n opts['icon'] = True\n\n for pc in growlHosts:\n opts['host'] = pc[0]\n opts['port'] = pc[1]\n log.debug(\n u'GROWL: Sending growl to {host}:{port} - {msg!r}',\n {'msg': message, 'host': opts['host'], 'port': opts['port']}\n )\n try:\n if self._send_growl(opts, message):\n return True\n else:\n if self._sendRegistration(host, password):\n return self._send_growl(opts, message)\n else:\n return False\n except Exception as error:\n log.warning(\n u'GROWL: Unable to send growl to {host}:{port} - {msg!r}',\n {'msg': ex(error), 'host': opts['host'], 'port': opts['port']}\n )\n return False\n\n def _sendRegistration(self, host=None, password=None):\n opts = {}\n\n if host is None:\n hostParts = app.GROWL_HOST.split(':')\n else:\n hostParts = host.split(':')\n\n if len(hostParts) != 2 or hostParts[1] == '':\n port = 23053\n else:\n port = int(hostParts[1])\n\n opts['host'] = hostParts[0]\n opts['port'] = port\n\n if password is None:\n opts['password'] = app.GROWL_PASSWORD\n else:\n opts['password'] = password\n\n opts['app'] = 'Medusa'\n opts['debug'] = False\n\n # Send Registration\n register = gntp.core.GNTPRegister()\n register.add_header('Application-Name', opts['app'])\n register.add_header('Application-Icon', app.LOGO_URL)\n\n register.add_notification('Test', True)\n register.add_notification(common.notifyStrings[common.NOTIFY_SNATCH], True)\n register.add_notification(common.notifyStrings[common.NOTIFY_DOWNLOAD], True)\n register.add_notification(common.notifyStrings[common.NOTIFY_GIT_UPDATE], True)\n\n if opts['password']:\n register.set_password(opts['password'])\n\n try:\n return self._send(opts['host'], opts['port'], register.encode(), opts['debug'])\n except Exception as error:\n log.warning(\n u'GROWL: Unable to send growl to {host}:{port} - {msg!r}',\n {'msg': ex(error), 'host': opts['host'], 'port': opts['port']}\n )\n return False\n", "path": "medusa/notifiers/growl.py"}], "after_files": [{"content": "# coding=utf-8\n\nfrom __future__ import print_function\nfrom __future__ import unicode_literals\n\nimport logging\nimport socket\nfrom builtins import object\n\nimport gntp.core\n\nfrom medusa import app, common\nfrom medusa.helper.exceptions import ex\nfrom medusa.logger.adapters.style import BraceAdapter\n\nlog = BraceAdapter(logging.getLogger(__name__))\nlog.logger.addHandler(logging.NullHandler())\n\n\nclass Notifier(object):\n def test_notify(self, host, password):\n self._sendRegistration(host, password)\n return self._sendGrowl('Test Growl', 'Testing Growl settings from Medusa', 'Test', host, password,\n force=True)\n\n def notify_snatch(self, ep_name, is_proper):\n if app.GROWL_NOTIFY_ONSNATCH:\n self._sendGrowl(\n common.notifyStrings[\n (common.NOTIFY_SNATCH, common.NOTIFY_SNATCH_PROPER)[is_proper]\n ], ep_name)\n\n def notify_download(self, ep_name):\n if app.GROWL_NOTIFY_ONDOWNLOAD:\n self._sendGrowl(common.notifyStrings[common.NOTIFY_DOWNLOAD], ep_name)\n\n def notify_subtitle_download(self, ep_name, lang):\n if app.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD:\n self._sendGrowl(common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], ep_name + ': ' + lang)\n\n def notify_git_update(self, new_version='??'):\n update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT]\n title = common.notifyStrings[common.NOTIFY_GIT_UPDATE]\n self._sendGrowl(title, update_text + new_version)\n\n def notify_login(self, ipaddress=''):\n update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT]\n title = common.notifyStrings[common.NOTIFY_LOGIN]\n self._sendGrowl(title, update_text.format(ipaddress))\n\n def _send_growl(self, options, message=None):\n\n # Initialize Notification\n notice = gntp.core.GNTPNotice(\n app=options['app'],\n name=options['name'],\n title=options['title'],\n password=options['password'],\n )\n\n # Optional\n if options['sticky']:\n notice.add_header('Notification-Sticky', options['sticky'])\n if options['priority']:\n notice.add_header('Notification-Priority', options['priority'])\n if options['icon']:\n notice.add_header('Notification-Icon', app.LOGO_URL)\n\n if message:\n notice.add_header('Notification-Text', message)\n\n response = self._send(options['host'], options['port'], notice.encode(), options['debug'])\n return True if isinstance(response, gntp.core.GNTPOK) else False\n\n @staticmethod\n def _send(host, port, data, debug=False):\n if debug:\n print('<Sending>\\n', data, '\\n</Sending>')\n\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.connect((host, port))\n s.send(data)\n response = gntp.core.parse_gntp(s.recv(1024))\n s.close()\n\n if debug:\n print('<Received>\\n', response, '\\n</Received>')\n\n return response\n\n def _sendGrowl(self, title='Medusa Notification', message=None, name=None, host=None, password=None,\n force=False):\n if not app.USE_GROWL and not force:\n return False\n\n if name is None:\n name = title\n\n if host is None:\n hostParts = app.GROWL_HOST.split(':')\n else:\n hostParts = host.split(':')\n\n if len(hostParts) != 2 or hostParts[1] == '':\n port = 23053\n else:\n port = int(hostParts[1])\n\n growlHosts = [(hostParts[0], port)]\n\n opts = {\n 'name': name,\n 'title': title,\n 'app': 'Medusa',\n 'sticky': None,\n 'priority': None,\n 'debug': False\n }\n\n if password is None:\n opts['password'] = app.GROWL_PASSWORD\n else:\n opts['password'] = password\n\n opts['icon'] = True\n\n for pc in growlHosts:\n opts['host'] = pc[0]\n opts['port'] = pc[1]\n log.debug(\n u'GROWL: Sending growl to {host}:{port} - {msg!r}',\n {'msg': message, 'host': opts['host'], 'port': opts['port']}\n )\n try:\n if self._send_growl(opts, message):\n return True\n else:\n if self._sendRegistration(host, password):\n return self._send_growl(opts, message)\n else:\n return False\n except Exception as error:\n log.warning(\n u'GROWL: Unable to send growl to {host}:{port} - {msg!r}',\n {'msg': ex(error), 'host': opts['host'], 'port': opts['port']}\n )\n return False\n\n def _sendRegistration(self, host=None, password=None):\n opts = {}\n\n if host is None:\n hostParts = app.GROWL_HOST.split(':')\n else:\n hostParts = host.split(':')\n\n if len(hostParts) != 2 or hostParts[1] == '':\n port = 23053\n else:\n port = int(hostParts[1])\n\n opts['host'] = hostParts[0]\n opts['port'] = port\n\n if password is None:\n opts['password'] = app.GROWL_PASSWORD\n else:\n opts['password'] = password\n\n opts['app'] = 'Medusa'\n opts['debug'] = False\n\n # Send Registration\n register = gntp.core.GNTPRegister()\n register.add_header('Application-Name', opts['app'])\n register.add_header('Application-Icon', app.LOGO_URL)\n\n register.add_notification('Test', True)\n register.add_notification(common.notifyStrings[common.NOTIFY_SNATCH], True)\n register.add_notification(common.notifyStrings[common.NOTIFY_DOWNLOAD], True)\n register.add_notification(common.notifyStrings[common.NOTIFY_GIT_UPDATE], True)\n\n if opts['password']:\n register.set_password(opts['password'])\n\n try:\n return self._send(opts['host'], opts['port'], register.encode(), opts['debug'])\n except Exception as error:\n log.warning(\n u'GROWL: Unable to send growl to {host}:{port} - {msg!r}',\n {'msg': ex(error), 'host': opts['host'], 'port': opts['port']}\n )\n return False\n", "path": "medusa/notifiers/growl.py"}]} |
gh_patches_debug_112 | rasdani/github-patches | git_diff | encode__httpx-589 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AttributeError: module 'rfc3986.api' has no attribute 'iri_reference'
Just did a pip install of httpx, and it appeared to meet all the pre-reqs. However, I'd encounter the error in the title.
(python37) $ pip install httpx
Collecting httpx
Using cached https://files.pythonhosted.org/packages/15/fb/81861a0dbe87a0de57491b1b587419845f9a99e5b3d48e56cd440b1fcb68/httpx-0.7.6-py2.py3-none-any.whl
Requirement already satisfied: certifi in .../envs/python37/lib/python3.7/site-packages (from httpx) (2019.6.16)
Requirement already satisfied: h2==3.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (3.1.0)
Requirement already satisfied: chardet==3.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (3.0.4)
Requirement already satisfied: h11==0.8.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (0.8.1)
Requirement already satisfied: rfc3986==1.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (1.2.0)
Requirement already satisfied: idna==2.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (2.8)
Collecting hstspreload>=2019.8.27 (from httpx)
Downloading https://files.pythonhosted.org/packages/55/77/e0031fbb2930891db135af5a53afdf5567ca6f1b1ff55bf231ed5330f66d/hstspreload-2019.11.4.tar.gz (701kB)
|████████████████████████████████| 706kB 740kB/s
Requirement already satisfied: hpack<4,>=2.3 in .../envs/python37/lib/python3.7/site-packages (from h2==3.*->httpx) (3.0.0)
Requirement already satisfied: hyperframe<6,>=5.2.0 in .../envs/python37/lib/python3.7/site-packages (from h2==3.*->httpx) (5.2.0)
Building wheels for collected packages: hstspreload
Building wheel for hstspreload (setup.py) ... done
Created wheel for hstspreload: filename=hstspreload-2019.11.4-cp37-none-any.whl size=704190 sha256=ac82e6b8abe795fa1a179318d64d73660fa3024715b04b7db0746971c3e5a87b
Stored in directory: .../Library/Caches/pip/wheels/49/f1/9d/dc01d2c108593d8d2c32d00cdd9daf595e6ad4a727d57b7033
Successfully built hstspreload
Installing collected packages: hstspreload, httpx
Successfully installed hstspreload-2019.11.4 httpx-0.7.6
Manually upgrading rfc3986 to 1.3.2 resolved the issue. Seems the installer should be checking for rfc3986==1.3.2 or greater?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4 import re
5 from pathlib import Path
6
7 from setuptools import setup
8
9
10 def get_version(package):
11 """
12 Return package version as listed in `__version__` in `init.py`.
13 """
14 version = Path(package, "__version__.py").read_text()
15 return re.search("__version__ = ['\"]([^'\"]+)['\"]", version).group(1)
16
17
18 def get_long_description():
19 """
20 Return the README.
21 """
22 long_description = ""
23 with open("README.md", encoding="utf8") as f:
24 long_description += f.read()
25 long_description += "\n\n"
26 with open("CHANGELOG.md", encoding="utf8") as f:
27 long_description += f.read()
28 return long_description
29
30
31 def get_packages(package):
32 """
33 Return root package and all sub-packages.
34 """
35 return [str(path.parent) for path in Path(package).glob("**/__init__.py")]
36
37
38 setup(
39 name="httpx",
40 python_requires=">=3.6",
41 version=get_version("httpx"),
42 url="https://github.com/encode/httpx",
43 license="BSD",
44 description="The next generation HTTP client.",
45 long_description=get_long_description(),
46 long_description_content_type="text/markdown",
47 author="Tom Christie",
48 author_email="[email protected]",
49 package_data={"httpx": ["py.typed"]},
50 packages=get_packages("httpx"),
51 include_package_data=True,
52 zip_safe=False,
53 install_requires=[
54 "certifi",
55 "hstspreload",
56 "chardet==3.*",
57 "h11==0.8.*",
58 "h2==3.*",
59 "idna==2.*",
60 "rfc3986==1.*",
61 "sniffio==1.*",
62 ],
63 classifiers=[
64 "Development Status :: 3 - Alpha",
65 "Environment :: Web Environment",
66 "Intended Audience :: Developers",
67 "License :: OSI Approved :: BSD License",
68 "Operating System :: OS Independent",
69 "Topic :: Internet :: WWW/HTTP",
70 "Framework :: AsyncIO",
71 "Framework :: Trio",
72 "Programming Language :: Python :: 3",
73 "Programming Language :: Python :: 3.6",
74 "Programming Language :: Python :: 3.7",
75 "Programming Language :: Python :: 3.8",
76 ],
77 )
78
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -57,7 +57,7 @@
"h11==0.8.*",
"h2==3.*",
"idna==2.*",
- "rfc3986==1.*",
+ "rfc3986>=1.3,<2",
"sniffio==1.*",
],
classifiers=[
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -57,7 +57,7 @@\n \"h11==0.8.*\",\n \"h2==3.*\",\n \"idna==2.*\",\n- \"rfc3986==1.*\",\n+ \"rfc3986>=1.3,<2\",\n \"sniffio==1.*\",\n ],\n classifiers=[\n", "issue": "AttributeError: module 'rfc3986.api' has no attribute 'iri_reference'\nJust did a pip install of httpx, and it appeared to meet all the pre-reqs. However, I'd encounter the error in the title.\r\n\r\n\r\n(python37) $ pip install httpx\r\nCollecting httpx\r\n Using cached https://files.pythonhosted.org/packages/15/fb/81861a0dbe87a0de57491b1b587419845f9a99e5b3d48e56cd440b1fcb68/httpx-0.7.6-py2.py3-none-any.whl\r\nRequirement already satisfied: certifi in .../envs/python37/lib/python3.7/site-packages (from httpx) (2019.6.16)\r\nRequirement already satisfied: h2==3.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (3.1.0)\r\nRequirement already satisfied: chardet==3.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (3.0.4)\r\nRequirement already satisfied: h11==0.8.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (0.8.1)\r\nRequirement already satisfied: rfc3986==1.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (1.2.0)\r\nRequirement already satisfied: idna==2.* in .../envs/python37/lib/python3.7/site-packages (from httpx) (2.8)\r\nCollecting hstspreload>=2019.8.27 (from httpx)\r\n Downloading https://files.pythonhosted.org/packages/55/77/e0031fbb2930891db135af5a53afdf5567ca6f1b1ff55bf231ed5330f66d/hstspreload-2019.11.4.tar.gz (701kB)\r\n |\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 706kB 740kB/s \r\nRequirement already satisfied: hpack<4,>=2.3 in .../envs/python37/lib/python3.7/site-packages (from h2==3.*->httpx) (3.0.0)\r\nRequirement already satisfied: hyperframe<6,>=5.2.0 in .../envs/python37/lib/python3.7/site-packages (from h2==3.*->httpx) (5.2.0)\r\nBuilding wheels for collected packages: hstspreload\r\n Building wheel for hstspreload (setup.py) ... done\r\n Created wheel for hstspreload: filename=hstspreload-2019.11.4-cp37-none-any.whl size=704190 sha256=ac82e6b8abe795fa1a179318d64d73660fa3024715b04b7db0746971c3e5a87b\r\n Stored in directory: .../Library/Caches/pip/wheels/49/f1/9d/dc01d2c108593d8d2c32d00cdd9daf595e6ad4a727d57b7033\r\nSuccessfully built hstspreload\r\nInstalling collected packages: hstspreload, httpx\r\nSuccessfully installed hstspreload-2019.11.4 httpx-0.7.6\r\n\r\nManually upgrading rfc3986 to 1.3.2 resolved the issue. Seems the installer should be checking for rfc3986==1.3.2 or greater? \n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport re\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef get_version(package):\n \"\"\"\n Return package version as listed in `__version__` in `init.py`.\n \"\"\"\n version = Path(package, \"__version__.py\").read_text()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", version).group(1)\n\n\ndef get_long_description():\n \"\"\"\n Return the README.\n \"\"\"\n long_description = \"\"\n with open(\"README.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n long_description += \"\\n\\n\"\n with open(\"CHANGELOG.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n return long_description\n\n\ndef get_packages(package):\n \"\"\"\n Return root package and all sub-packages.\n \"\"\"\n return [str(path.parent) for path in Path(package).glob(\"**/__init__.py\")]\n\n\nsetup(\n name=\"httpx\",\n python_requires=\">=3.6\",\n version=get_version(\"httpx\"),\n url=\"https://github.com/encode/httpx\",\n license=\"BSD\",\n description=\"The next generation HTTP client.\",\n long_description=get_long_description(),\n long_description_content_type=\"text/markdown\",\n author=\"Tom Christie\",\n author_email=\"[email protected]\",\n package_data={\"httpx\": [\"py.typed\"]},\n packages=get_packages(\"httpx\"),\n include_package_data=True,\n zip_safe=False,\n install_requires=[\n \"certifi\",\n \"hstspreload\",\n \"chardet==3.*\",\n \"h11==0.8.*\",\n \"h2==3.*\",\n \"idna==2.*\",\n \"rfc3986==1.*\",\n \"sniffio==1.*\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Environment :: Web Environment\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Framework :: AsyncIO\",\n \"Framework :: Trio\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport re\nfrom pathlib import Path\n\nfrom setuptools import setup\n\n\ndef get_version(package):\n \"\"\"\n Return package version as listed in `__version__` in `init.py`.\n \"\"\"\n version = Path(package, \"__version__.py\").read_text()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", version).group(1)\n\n\ndef get_long_description():\n \"\"\"\n Return the README.\n \"\"\"\n long_description = \"\"\n with open(\"README.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n long_description += \"\\n\\n\"\n with open(\"CHANGELOG.md\", encoding=\"utf8\") as f:\n long_description += f.read()\n return long_description\n\n\ndef get_packages(package):\n \"\"\"\n Return root package and all sub-packages.\n \"\"\"\n return [str(path.parent) for path in Path(package).glob(\"**/__init__.py\")]\n\n\nsetup(\n name=\"httpx\",\n python_requires=\">=3.6\",\n version=get_version(\"httpx\"),\n url=\"https://github.com/encode/httpx\",\n license=\"BSD\",\n description=\"The next generation HTTP client.\",\n long_description=get_long_description(),\n long_description_content_type=\"text/markdown\",\n author=\"Tom Christie\",\n author_email=\"[email protected]\",\n package_data={\"httpx\": [\"py.typed\"]},\n packages=get_packages(\"httpx\"),\n include_package_data=True,\n zip_safe=False,\n install_requires=[\n \"certifi\",\n \"hstspreload\",\n \"chardet==3.*\",\n \"h11==0.8.*\",\n \"h2==3.*\",\n \"idna==2.*\",\n \"rfc3986>=1.3,<2\",\n \"sniffio==1.*\",\n ],\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Environment :: Web Environment\",\n \"Intended Audience :: Developers\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n \"Topic :: Internet :: WWW/HTTP\",\n \"Framework :: AsyncIO\",\n \"Framework :: Trio\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n ],\n)\n", "path": "setup.py"}]} |
gh_patches_debug_113 | rasdani/github-patches | git_diff | getredash__redash-740 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Alert send Error(AttributeError: 'Organization' object has no attribute 'domain)
Hello.
Alert send Error.
```
01:51:24 worker.1 | [2016-01-07 01:51:24,764: ERROR/MainProcess] Task redash.tasks.check_alerts_for_query[6fd2a1aa-bb2b-4054-a6a1-2487c0bae30c] raised unexpected: AttributeError("'Organization' object has no attribute 'domain'",)
01:51:24 worker.1 | Traceback (most recent call last):
01:51:24 worker.1 | File "/usr/local/lib/python2.7/dist-packages/celery/app/trace.py", line 240, in trace_task
01:51:24 worker.1 | R = retval = fun(*args, **kwargs)
01:51:24 worker.1 | File "/opt/redash/current/redash/tasks.py", line 31, in __call__
01:51:24 worker.1 | return super(BaseTask, self).__call__(*args, **kwargs)
01:51:24 worker.1 | File "/usr/local/lib/python2.7/dist-packages/celery/app/trace.py", line 437, in __protected_call__
01:51:24 worker.1 | return self.run(*args, **kwargs)
01:51:24 worker.1 | File "/opt/redash/current/redash/tasks.py", line 338, in check_alerts_for_query
01:51:24 worker.1 | """.format(host=base_url(alert.query.org), alert_id=alert.id, query_id=query.id)
01:51:24 worker.1 | File "/opt/redash/current/redash/tasks.py", line 309, in base_url
01:51:24 worker.1 | if org.domain:
01:51:24 worker.1 | AttributeError: 'Organization' object has no attribute 'domain'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `redash/tasks.py`
Content:
```
1 import datetime
2 import time
3 import logging
4 import signal
5 from flask.ext.mail import Message
6 import redis
7 import hipchat
8 import requests
9 from redash.utils import json_dumps
10 from requests.auth import HTTPBasicAuth
11 from celery import Task
12 from celery.result import AsyncResult
13 from celery.utils.log import get_task_logger
14 from redash import redis_connection, models, statsd_client, settings, utils, mail
15 from redash.utils import gen_query_hash
16 from redash.worker import celery
17 from redash.query_runner import get_query_runner, InterruptException
18 from version_check import run_version_check
19
20 logger = get_task_logger(__name__)
21
22
23 class BaseTask(Task):
24 abstract = True
25
26 def after_return(self, *args, **kwargs):
27 models.db.close_db(None)
28
29 def __call__(self, *args, **kwargs):
30 models.db.connect_db()
31 return super(BaseTask, self).__call__(*args, **kwargs)
32
33
34 class QueryTask(object):
35 MAX_RETRIES = 5
36
37 # TODO: this is mapping to the old Job class statuses. Need to update the client side and remove this
38 STATUSES = {
39 'PENDING': 1,
40 'STARTED': 2,
41 'SUCCESS': 3,
42 'FAILURE': 4,
43 'REVOKED': 4
44 }
45
46 def __init__(self, job_id=None, async_result=None):
47 if async_result:
48 self._async_result = async_result
49 else:
50 self._async_result = AsyncResult(job_id, app=celery)
51
52 @property
53 def id(self):
54 return self._async_result.id
55
56 @classmethod
57 def add_task(cls, query, data_source, scheduled=False, metadata={}):
58 query_hash = gen_query_hash(query)
59 logging.info("[Manager][%s] Inserting job", query_hash)
60 logging.info("[Manager] Metadata: [%s]", metadata)
61 try_count = 0
62 job = None
63
64 while try_count < cls.MAX_RETRIES:
65 try_count += 1
66
67 pipe = redis_connection.pipeline()
68 try:
69 pipe.watch(cls._job_lock_id(query_hash, data_source.id))
70 job_id = pipe.get(cls._job_lock_id(query_hash, data_source.id))
71 if job_id:
72 logging.info("[Manager][%s] Found existing job: %s", query_hash, job_id)
73
74 job = cls(job_id=job_id)
75 if job.ready():
76 logging.info("[%s] job found is ready (%s), removing lock", query_hash, job.celery_status)
77 redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
78 job = None
79
80 if not job:
81 pipe.multi()
82
83 if scheduled:
84 queue_name = data_source.scheduled_queue_name
85 else:
86 queue_name = data_source.queue_name
87
88 result = execute_query.apply_async(args=(query, data_source.id, metadata), queue=queue_name)
89 job = cls(async_result=result)
90
91 logging.info("[Manager][%s] Created new job: %s", query_hash, job.id)
92 pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id, settings.JOB_EXPIRY_TIME)
93 pipe.execute()
94 break
95
96 except redis.WatchError:
97 continue
98
99 if not job:
100 logging.error("[Manager][%s] Failed adding job for query.", query_hash)
101
102 return job
103
104 def to_dict(self):
105 if self._async_result.status == 'STARTED':
106 updated_at = self._async_result.result.get('start_time', 0)
107 else:
108 updated_at = 0
109
110 if self._async_result.failed() and isinstance(self._async_result.result, Exception):
111 error = self._async_result.result.message
112 elif self._async_result.status == 'REVOKED':
113 error = 'Query execution cancelled.'
114 else:
115 error = ''
116
117 if self._async_result.successful():
118 query_result_id = self._async_result.result
119 else:
120 query_result_id = None
121
122 return {
123 'id': self._async_result.id,
124 'updated_at': updated_at,
125 'status': self.STATUSES[self._async_result.status],
126 'error': error,
127 'query_result_id': query_result_id,
128 }
129
130 @property
131 def is_cancelled(self):
132 return self._async_result.status == 'REVOKED'
133
134 @property
135 def celery_status(self):
136 return self._async_result.status
137
138 def ready(self):
139 return self._async_result.ready()
140
141 def cancel(self):
142 return self._async_result.revoke(terminate=True, signal='SIGINT')
143
144 @staticmethod
145 def _job_lock_id(query_hash, data_source_id):
146 return "query_hash_job:%s:%s" % (data_source_id, query_hash)
147
148
149 @celery.task(base=BaseTask)
150 def refresh_queries():
151 # self.status['last_refresh_at'] = time.time()
152 # self._save_status()
153
154 logger.info("Refreshing queries...")
155
156 outdated_queries_count = 0
157 for query in models.Query.outdated_queries():
158 QueryTask.add_task(query.query, query.data_source, scheduled=True,
159 metadata={'Query ID': query.id, 'Username': 'Scheduled'})
160 outdated_queries_count += 1
161
162 statsd_client.gauge('manager.outdated_queries', outdated_queries_count)
163
164 logger.info("Done refreshing queries. Found %d outdated queries." % outdated_queries_count)
165
166 status = redis_connection.hgetall('redash:status')
167 now = time.time()
168
169 redis_connection.hmset('redash:status', {
170 'outdated_queries_count': outdated_queries_count,
171 'last_refresh_at': now
172 })
173
174 statsd_client.gauge('manager.seconds_since_refresh', now - float(status.get('last_refresh_at', now)))
175
176
177 @celery.task(base=BaseTask)
178 def cleanup_tasks():
179 # in case of cold restart of the workers, there might be jobs that still have their "lock" object, but aren't really
180 # going to run. this job removes them.
181 lock_keys = redis_connection.keys("query_hash_job:*") # TODO: use set instead of keys command
182 if not lock_keys:
183 return
184
185 query_tasks = [QueryTask(job_id=j) for j in redis_connection.mget(lock_keys)]
186
187 logger.info("Found %d locks", len(query_tasks))
188
189 inspect = celery.control.inspect()
190 active_tasks = inspect.active()
191 if active_tasks is None:
192 active_tasks = []
193 else:
194 active_tasks = active_tasks.values()
195
196 all_tasks = set()
197 for task_list in active_tasks:
198 for task in task_list:
199 all_tasks.add(task['id'])
200
201 logger.info("Active jobs count: %d", len(all_tasks))
202
203 for i, t in enumerate(query_tasks):
204 if t.ready():
205 # if locked task is ready already (failed, finished, revoked), we don't need the lock anymore
206 logger.warning("%s is ready (%s), removing lock.", lock_keys[i], t.celery_status)
207 redis_connection.delete(lock_keys[i])
208
209 # if t.celery_status == 'STARTED' and t.id not in all_tasks:
210 # logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
211 # redis_connection.delete(lock_keys[i])
212
213
214 @celery.task(base=BaseTask)
215 def cleanup_query_results():
216 """
217 Job to cleanup unused query results -- such that no query links to them anymore, and older than a week (so it's less
218 likely to be open in someone's browser and be used).
219
220 Each time the job deletes only 100 query results so it won't choke the database in case of many such results.
221 """
222
223 logging.info("Running query results clean up (removing maximum of %d unused results, that are %d days old or more)",
224 settings.QUERY_RESULTS_CLEANUP_COUNT, settings.QUERY_RESULTS_CLEANUP_MAX_AGE)
225
226 unused_query_results = models.QueryResult.unused(settings.QUERY_RESULTS_CLEANUP_MAX_AGE).limit(settings.QUERY_RESULTS_CLEANUP_COUNT)
227 total_unused_query_results = models.QueryResult.unused().count()
228 deleted_count = models.QueryResult.delete().where(models.QueryResult.id << unused_query_results).execute()
229
230 logger.info("Deleted %d unused query results out of total of %d." % (deleted_count, total_unused_query_results))
231
232
233 @celery.task(base=BaseTask)
234 def refresh_schemas():
235 """
236 Refreshs the datasources schema.
237 """
238
239 for ds in models.DataSource.select():
240 logger.info("Refreshing schema for: {}".format(ds.name))
241 ds.get_schema(refresh=True)
242
243
244 def signal_handler(*args):
245 raise InterruptException
246
247
248 @celery.task(bind=True, base=BaseTask, track_started=True)
249 def execute_query(self, query, data_source_id, metadata):
250 signal.signal(signal.SIGINT, signal_handler)
251 start_time = time.time()
252
253 logger.info("Loading data source (%d)...", data_source_id)
254
255 # TODO: we should probably cache data sources in Redis
256 data_source = models.DataSource.get_by_id(data_source_id)
257
258 self.update_state(state='STARTED', meta={'start_time': start_time, 'custom_message': ''})
259
260 logger.info("Executing query:\n%s", query)
261
262 query_hash = gen_query_hash(query)
263 query_runner = get_query_runner(data_source.type, data_source.options)
264
265 if query_runner.annotate_query():
266 metadata['Task ID'] = self.request.id
267 metadata['Query Hash'] = query_hash
268 metadata['Queue'] = self.request.delivery_info['routing_key']
269
270 annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()])
271
272 logging.debug(u"Annotation: %s", annotation)
273
274 annotated_query = u"/* {} */ {}".format(annotation, query)
275 else:
276 annotated_query = query
277
278 with statsd_client.timer('query_runner.{}.{}.run_time'.format(data_source.type, data_source.name)):
279 data, error = query_runner.run_query(annotated_query)
280
281 run_time = time.time() - start_time
282 logger.info("Query finished... data length=%s, error=%s", data and len(data), error)
283
284 self.update_state(state='STARTED', meta={'start_time': start_time, 'error': error, 'custom_message': ''})
285
286 # Delete query_hash
287 redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
288
289 if not error:
290 query_result, updated_query_ids = models.QueryResult.store_result(data_source.org_id, data_source.id, query_hash, query, data, run_time, utils.utcnow())
291 for query_id in updated_query_ids:
292 check_alerts_for_query.delay(query_id)
293 else:
294 raise Exception(error)
295
296 return query_result.id
297
298
299 @celery.task(base=BaseTask)
300 def record_event(event):
301 models.Event.record(event)
302
303 @celery.task(base=BaseTask)
304 def version_check():
305 run_version_check()
306
307
308 def base_url(org):
309 if org.domain:
310 return 'https://{}'.format(org.domain)
311 return settings.HOST
312
313
314 @celery.task(bind=True, base=BaseTask)
315 def check_alerts_for_query(self, query_id):
316 from redash.wsgi import app
317
318 logger.debug("Checking query %d for alerts", query_id)
319 query = models.Query.get_by_id(query_id)
320 for alert in query.alerts:
321 alert.query = query
322 new_state = alert.evaluate()
323 passed_rearm_threshold = False
324 if alert.rearm and alert.last_triggered_at:
325 passed_rearm_threshold = alert.last_triggered_at + datetime.timedelta(seconds=alert.rearm) < utils.utcnow()
326 if new_state != alert.state or (alert.state == models.Alert.TRIGGERED_STATE and passed_rearm_threshold ):
327 logger.info("Alert %d new state: %s", alert.id, new_state)
328 old_state = alert.state
329 alert.update_instance(state=new_state, last_triggered_at=utils.utcnow())
330
331 if old_state == models.Alert.UNKNOWN_STATE and new_state == models.Alert.OK_STATE:
332 logger.debug("Skipping notification (previous state was unknown and now it's ok).")
333 continue
334
335 # message = Message
336 html = """
337 Check <a href="{host}/alerts/{alert_id}">alert</a> / check <a href="{host}/queries/{query_id}">query</a>.
338 """.format(host=base_url(alert.query.org), alert_id=alert.id, query_id=query.id)
339
340 notify_mail(alert, html, new_state, app)
341
342 if settings.HIPCHAT_API_TOKEN:
343 notify_hipchat(alert, html, new_state)
344
345 if settings.WEBHOOK_ENDPOINT:
346 notify_webhook(alert, query, html, new_state)
347
348
349 def notify_hipchat(alert, html, new_state):
350 try:
351 hipchat_client = hipchat.HipChat(token=settings.HIPCHAT_API_TOKEN)
352 message = '[' + new_state.upper() + '] ' + alert.name + '<br />' + html
353 hipchat_client.message_room(settings.HIPCHAT_ROOM_ID, settings.NAME, message.encode('utf-8', 'ignore'), message_format='html')
354 except Exception:
355 logger.exception("hipchat send ERROR.")
356
357
358 def notify_mail(alert, html, new_state, app):
359 recipients = [s.email for s in alert.subscribers()]
360 logger.debug("Notifying: %s", recipients)
361 try:
362 with app.app_context():
363 message = Message(recipients=recipients,
364 subject="[{1}] {0}".format(alert.name.encode('utf-8', 'ignore'), new_state.upper()),
365 html=html)
366 mail.send(message)
367 except Exception:
368 logger.exception("mail send ERROR.")
369
370
371 def notify_webhook(alert, query, html, new_state):
372 try:
373 data = {
374 'event': 'alert_state_change',
375 'alert': alert.to_dict(full=False),
376 'url_base': base_url(query.org)
377 }
378 headers = {'Content-Type': 'application/json'}
379 auth = HTTPBasicAuth(settings.WEBHOOK_USERNAME, settings.WEBHOOK_PASSWORD) if settings.WEBHOOK_USERNAME else None
380 resp = requests.post(settings.WEBHOOK_ENDPOINT, data=json_dumps(data), auth=auth, headers=headers)
381 if resp.status_code != 200:
382 logger.error("webhook send ERROR. status_code => {status}".format(status=resp.status_code))
383 except Exception:
384 logger.exception("webhook send ERROR.")
385
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/redash/tasks.py b/redash/tasks.py
--- a/redash/tasks.py
+++ b/redash/tasks.py
@@ -306,8 +306,9 @@
def base_url(org):
- if org.domain:
- return 'https://{}'.format(org.domain)
+ if settings.MULTI_ORG:
+ return "https://{}/{}".format(settings.HOST, org.slug)
+
return settings.HOST
| {"golden_diff": "diff --git a/redash/tasks.py b/redash/tasks.py\n--- a/redash/tasks.py\n+++ b/redash/tasks.py\n@@ -306,8 +306,9 @@\n \n \n def base_url(org):\n- if org.domain:\n- return 'https://{}'.format(org.domain)\n+ if settings.MULTI_ORG:\n+ return \"https://{}/{}\".format(settings.HOST, org.slug)\n+\n return settings.HOST\n", "issue": "Alert send Error(AttributeError: 'Organization' object has no attribute 'domain)\nHello.\n\nAlert send Error.\n\n```\n01:51:24 worker.1 | [2016-01-07 01:51:24,764: ERROR/MainProcess] Task redash.tasks.check_alerts_for_query[6fd2a1aa-bb2b-4054-a6a1-2487c0bae30c] raised unexpected: AttributeError(\"'Organization' object has no attribute 'domain'\",)\n01:51:24 worker.1 | Traceback (most recent call last):\n01:51:24 worker.1 | File \"/usr/local/lib/python2.7/dist-packages/celery/app/trace.py\", line 240, in trace_task\n01:51:24 worker.1 | R = retval = fun(*args, **kwargs)\n01:51:24 worker.1 | File \"/opt/redash/current/redash/tasks.py\", line 31, in __call__\n01:51:24 worker.1 | return super(BaseTask, self).__call__(*args, **kwargs)\n01:51:24 worker.1 | File \"/usr/local/lib/python2.7/dist-packages/celery/app/trace.py\", line 437, in __protected_call__\n01:51:24 worker.1 | return self.run(*args, **kwargs)\n01:51:24 worker.1 | File \"/opt/redash/current/redash/tasks.py\", line 338, in check_alerts_for_query\n01:51:24 worker.1 | \"\"\".format(host=base_url(alert.query.org), alert_id=alert.id, query_id=query.id)\n01:51:24 worker.1 | File \"/opt/redash/current/redash/tasks.py\", line 309, in base_url\n01:51:24 worker.1 | if org.domain:\n01:51:24 worker.1 | AttributeError: 'Organization' object has no attribute 'domain'\n```\n\n", "before_files": [{"content": "import datetime\nimport time\nimport logging\nimport signal\nfrom flask.ext.mail import Message\nimport redis\nimport hipchat\nimport requests\nfrom redash.utils import json_dumps\nfrom requests.auth import HTTPBasicAuth\nfrom celery import Task\nfrom celery.result import AsyncResult\nfrom celery.utils.log import get_task_logger\nfrom redash import redis_connection, models, statsd_client, settings, utils, mail\nfrom redash.utils import gen_query_hash\nfrom redash.worker import celery\nfrom redash.query_runner import get_query_runner, InterruptException\nfrom version_check import run_version_check\n\nlogger = get_task_logger(__name__)\n\n\nclass BaseTask(Task):\n abstract = True\n\n def after_return(self, *args, **kwargs):\n models.db.close_db(None)\n\n def __call__(self, *args, **kwargs):\n models.db.connect_db()\n return super(BaseTask, self).__call__(*args, **kwargs)\n\n\nclass QueryTask(object):\n MAX_RETRIES = 5\n\n # TODO: this is mapping to the old Job class statuses. Need to update the client side and remove this\n STATUSES = {\n 'PENDING': 1,\n 'STARTED': 2,\n 'SUCCESS': 3,\n 'FAILURE': 4,\n 'REVOKED': 4\n }\n\n def __init__(self, job_id=None, async_result=None):\n if async_result:\n self._async_result = async_result\n else:\n self._async_result = AsyncResult(job_id, app=celery)\n\n @property\n def id(self):\n return self._async_result.id\n\n @classmethod\n def add_task(cls, query, data_source, scheduled=False, metadata={}):\n query_hash = gen_query_hash(query)\n logging.info(\"[Manager][%s] Inserting job\", query_hash)\n logging.info(\"[Manager] Metadata: [%s]\", metadata)\n try_count = 0\n job = None\n \n while try_count < cls.MAX_RETRIES:\n try_count += 1\n\n pipe = redis_connection.pipeline()\n try:\n pipe.watch(cls._job_lock_id(query_hash, data_source.id))\n job_id = pipe.get(cls._job_lock_id(query_hash, data_source.id))\n if job_id:\n logging.info(\"[Manager][%s] Found existing job: %s\", query_hash, job_id)\n\n job = cls(job_id=job_id)\n if job.ready():\n logging.info(\"[%s] job found is ready (%s), removing lock\", query_hash, job.celery_status)\n redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))\n job = None\n\n if not job:\n pipe.multi()\n\n if scheduled:\n queue_name = data_source.scheduled_queue_name\n else:\n queue_name = data_source.queue_name\n\n result = execute_query.apply_async(args=(query, data_source.id, metadata), queue=queue_name)\n job = cls(async_result=result)\n \n logging.info(\"[Manager][%s] Created new job: %s\", query_hash, job.id)\n pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id, settings.JOB_EXPIRY_TIME)\n pipe.execute()\n break\n\n except redis.WatchError:\n continue\n\n if not job:\n logging.error(\"[Manager][%s] Failed adding job for query.\", query_hash)\n\n return job\n\n def to_dict(self):\n if self._async_result.status == 'STARTED':\n updated_at = self._async_result.result.get('start_time', 0)\n else:\n updated_at = 0\n\n if self._async_result.failed() and isinstance(self._async_result.result, Exception):\n error = self._async_result.result.message\n elif self._async_result.status == 'REVOKED':\n error = 'Query execution cancelled.'\n else:\n error = ''\n\n if self._async_result.successful():\n query_result_id = self._async_result.result\n else:\n query_result_id = None\n\n return {\n 'id': self._async_result.id,\n 'updated_at': updated_at,\n 'status': self.STATUSES[self._async_result.status],\n 'error': error,\n 'query_result_id': query_result_id,\n }\n\n @property\n def is_cancelled(self):\n return self._async_result.status == 'REVOKED'\n\n @property\n def celery_status(self):\n return self._async_result.status\n\n def ready(self):\n return self._async_result.ready()\n\n def cancel(self):\n return self._async_result.revoke(terminate=True, signal='SIGINT')\n\n @staticmethod\n def _job_lock_id(query_hash, data_source_id):\n return \"query_hash_job:%s:%s\" % (data_source_id, query_hash)\n\n\[email protected](base=BaseTask)\ndef refresh_queries():\n # self.status['last_refresh_at'] = time.time()\n # self._save_status()\n\n logger.info(\"Refreshing queries...\")\n\n outdated_queries_count = 0\n for query in models.Query.outdated_queries():\n QueryTask.add_task(query.query, query.data_source, scheduled=True,\n metadata={'Query ID': query.id, 'Username': 'Scheduled'})\n outdated_queries_count += 1\n\n statsd_client.gauge('manager.outdated_queries', outdated_queries_count)\n\n logger.info(\"Done refreshing queries. Found %d outdated queries.\" % outdated_queries_count)\n\n status = redis_connection.hgetall('redash:status')\n now = time.time()\n\n redis_connection.hmset('redash:status', {\n 'outdated_queries_count': outdated_queries_count,\n 'last_refresh_at': now\n })\n\n statsd_client.gauge('manager.seconds_since_refresh', now - float(status.get('last_refresh_at', now)))\n\n\[email protected](base=BaseTask)\ndef cleanup_tasks():\n # in case of cold restart of the workers, there might be jobs that still have their \"lock\" object, but aren't really\n # going to run. this job removes them.\n lock_keys = redis_connection.keys(\"query_hash_job:*\") # TODO: use set instead of keys command\n if not lock_keys:\n return\n \n query_tasks = [QueryTask(job_id=j) for j in redis_connection.mget(lock_keys)]\n\n logger.info(\"Found %d locks\", len(query_tasks))\n\n inspect = celery.control.inspect()\n active_tasks = inspect.active()\n if active_tasks is None:\n active_tasks = []\n else:\n active_tasks = active_tasks.values()\n\n all_tasks = set()\n for task_list in active_tasks:\n for task in task_list:\n all_tasks.add(task['id'])\n\n logger.info(\"Active jobs count: %d\", len(all_tasks))\n\n for i, t in enumerate(query_tasks):\n if t.ready():\n # if locked task is ready already (failed, finished, revoked), we don't need the lock anymore\n logger.warning(\"%s is ready (%s), removing lock.\", lock_keys[i], t.celery_status)\n redis_connection.delete(lock_keys[i])\n\n # if t.celery_status == 'STARTED' and t.id not in all_tasks:\n # logger.warning(\"Couldn't find active job for: %s, removing lock.\", lock_keys[i])\n # redis_connection.delete(lock_keys[i])\n\n\[email protected](base=BaseTask)\ndef cleanup_query_results():\n \"\"\"\n Job to cleanup unused query results -- such that no query links to them anymore, and older than a week (so it's less\n likely to be open in someone's browser and be used).\n\n Each time the job deletes only 100 query results so it won't choke the database in case of many such results.\n \"\"\"\n\n logging.info(\"Running query results clean up (removing maximum of %d unused results, that are %d days old or more)\",\n settings.QUERY_RESULTS_CLEANUP_COUNT, settings.QUERY_RESULTS_CLEANUP_MAX_AGE)\n\n unused_query_results = models.QueryResult.unused(settings.QUERY_RESULTS_CLEANUP_MAX_AGE).limit(settings.QUERY_RESULTS_CLEANUP_COUNT)\n total_unused_query_results = models.QueryResult.unused().count()\n deleted_count = models.QueryResult.delete().where(models.QueryResult.id << unused_query_results).execute()\n\n logger.info(\"Deleted %d unused query results out of total of %d.\" % (deleted_count, total_unused_query_results))\n\n\[email protected](base=BaseTask)\ndef refresh_schemas():\n \"\"\"\n Refreshs the datasources schema.\n \"\"\"\n\n for ds in models.DataSource.select():\n logger.info(\"Refreshing schema for: {}\".format(ds.name))\n ds.get_schema(refresh=True)\n\n\ndef signal_handler(*args):\n raise InterruptException\n\n\[email protected](bind=True, base=BaseTask, track_started=True)\ndef execute_query(self, query, data_source_id, metadata):\n signal.signal(signal.SIGINT, signal_handler)\n start_time = time.time()\n\n logger.info(\"Loading data source (%d)...\", data_source_id)\n\n # TODO: we should probably cache data sources in Redis\n data_source = models.DataSource.get_by_id(data_source_id)\n\n self.update_state(state='STARTED', meta={'start_time': start_time, 'custom_message': ''})\n\n logger.info(\"Executing query:\\n%s\", query)\n\n query_hash = gen_query_hash(query)\n query_runner = get_query_runner(data_source.type, data_source.options)\n\n if query_runner.annotate_query():\n metadata['Task ID'] = self.request.id\n metadata['Query Hash'] = query_hash\n metadata['Queue'] = self.request.delivery_info['routing_key']\n\n annotation = u\", \".join([u\"{}: {}\".format(k, v) for k, v in metadata.iteritems()])\n\n logging.debug(u\"Annotation: %s\", annotation)\n\n annotated_query = u\"/* {} */ {}\".format(annotation, query)\n else:\n annotated_query = query\n\n with statsd_client.timer('query_runner.{}.{}.run_time'.format(data_source.type, data_source.name)):\n data, error = query_runner.run_query(annotated_query)\n\n run_time = time.time() - start_time\n logger.info(\"Query finished... data length=%s, error=%s\", data and len(data), error)\n\n self.update_state(state='STARTED', meta={'start_time': start_time, 'error': error, 'custom_message': ''})\n\n # Delete query_hash\n redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))\n\n if not error:\n query_result, updated_query_ids = models.QueryResult.store_result(data_source.org_id, data_source.id, query_hash, query, data, run_time, utils.utcnow())\n for query_id in updated_query_ids:\n check_alerts_for_query.delay(query_id)\n else:\n raise Exception(error)\n\n return query_result.id\n\n\[email protected](base=BaseTask)\ndef record_event(event):\n models.Event.record(event)\n\[email protected](base=BaseTask)\ndef version_check():\n run_version_check()\n\n\ndef base_url(org):\n if org.domain:\n return 'https://{}'.format(org.domain)\n return settings.HOST\n\n\[email protected](bind=True, base=BaseTask)\ndef check_alerts_for_query(self, query_id):\n from redash.wsgi import app\n\n logger.debug(\"Checking query %d for alerts\", query_id)\n query = models.Query.get_by_id(query_id)\n for alert in query.alerts:\n alert.query = query\n new_state = alert.evaluate()\n passed_rearm_threshold = False\n if alert.rearm and alert.last_triggered_at:\n passed_rearm_threshold = alert.last_triggered_at + datetime.timedelta(seconds=alert.rearm) < utils.utcnow()\n if new_state != alert.state or (alert.state == models.Alert.TRIGGERED_STATE and passed_rearm_threshold ):\n logger.info(\"Alert %d new state: %s\", alert.id, new_state)\n old_state = alert.state\n alert.update_instance(state=new_state, last_triggered_at=utils.utcnow())\n\n if old_state == models.Alert.UNKNOWN_STATE and new_state == models.Alert.OK_STATE:\n logger.debug(\"Skipping notification (previous state was unknown and now it's ok).\")\n continue\n\n # message = Message\n html = \"\"\"\n Check <a href=\"{host}/alerts/{alert_id}\">alert</a> / check <a href=\"{host}/queries/{query_id}\">query</a>.\n \"\"\".format(host=base_url(alert.query.org), alert_id=alert.id, query_id=query.id)\n\n notify_mail(alert, html, new_state, app)\n\n if settings.HIPCHAT_API_TOKEN:\n notify_hipchat(alert, html, new_state)\n\n if settings.WEBHOOK_ENDPOINT:\n notify_webhook(alert, query, html, new_state)\n\n\ndef notify_hipchat(alert, html, new_state):\n try:\n hipchat_client = hipchat.HipChat(token=settings.HIPCHAT_API_TOKEN)\n message = '[' + new_state.upper() + '] ' + alert.name + '<br />' + html\n hipchat_client.message_room(settings.HIPCHAT_ROOM_ID, settings.NAME, message.encode('utf-8', 'ignore'), message_format='html')\n except Exception:\n logger.exception(\"hipchat send ERROR.\")\n\n\ndef notify_mail(alert, html, new_state, app):\n recipients = [s.email for s in alert.subscribers()]\n logger.debug(\"Notifying: %s\", recipients)\n try:\n with app.app_context():\n message = Message(recipients=recipients,\n subject=\"[{1}] {0}\".format(alert.name.encode('utf-8', 'ignore'), new_state.upper()),\n html=html)\n mail.send(message)\n except Exception:\n logger.exception(\"mail send ERROR.\")\n\n\ndef notify_webhook(alert, query, html, new_state):\n try:\n data = {\n 'event': 'alert_state_change',\n 'alert': alert.to_dict(full=False),\n 'url_base': base_url(query.org)\n }\n headers = {'Content-Type': 'application/json'}\n auth = HTTPBasicAuth(settings.WEBHOOK_USERNAME, settings.WEBHOOK_PASSWORD) if settings.WEBHOOK_USERNAME else None\n resp = requests.post(settings.WEBHOOK_ENDPOINT, data=json_dumps(data), auth=auth, headers=headers)\n if resp.status_code != 200:\n logger.error(\"webhook send ERROR. status_code => {status}\".format(status=resp.status_code))\n except Exception:\n logger.exception(\"webhook send ERROR.\")\n", "path": "redash/tasks.py"}], "after_files": [{"content": "import datetime\nimport time\nimport logging\nimport signal\nfrom flask.ext.mail import Message\nimport redis\nimport hipchat\nimport requests\nfrom redash.utils import json_dumps\nfrom requests.auth import HTTPBasicAuth\nfrom celery import Task\nfrom celery.result import AsyncResult\nfrom celery.utils.log import get_task_logger\nfrom redash import redis_connection, models, statsd_client, settings, utils, mail\nfrom redash.utils import gen_query_hash\nfrom redash.worker import celery\nfrom redash.query_runner import get_query_runner, InterruptException\nfrom version_check import run_version_check\n\nlogger = get_task_logger(__name__)\n\n\nclass BaseTask(Task):\n abstract = True\n\n def after_return(self, *args, **kwargs):\n models.db.close_db(None)\n\n def __call__(self, *args, **kwargs):\n models.db.connect_db()\n return super(BaseTask, self).__call__(*args, **kwargs)\n\n\nclass QueryTask(object):\n MAX_RETRIES = 5\n\n # TODO: this is mapping to the old Job class statuses. Need to update the client side and remove this\n STATUSES = {\n 'PENDING': 1,\n 'STARTED': 2,\n 'SUCCESS': 3,\n 'FAILURE': 4,\n 'REVOKED': 4\n }\n\n def __init__(self, job_id=None, async_result=None):\n if async_result:\n self._async_result = async_result\n else:\n self._async_result = AsyncResult(job_id, app=celery)\n\n @property\n def id(self):\n return self._async_result.id\n\n @classmethod\n def add_task(cls, query, data_source, scheduled=False, metadata={}):\n query_hash = gen_query_hash(query)\n logging.info(\"[Manager][%s] Inserting job\", query_hash)\n logging.info(\"[Manager] Metadata: [%s]\", metadata)\n try_count = 0\n job = None\n \n while try_count < cls.MAX_RETRIES:\n try_count += 1\n\n pipe = redis_connection.pipeline()\n try:\n pipe.watch(cls._job_lock_id(query_hash, data_source.id))\n job_id = pipe.get(cls._job_lock_id(query_hash, data_source.id))\n if job_id:\n logging.info(\"[Manager][%s] Found existing job: %s\", query_hash, job_id)\n\n job = cls(job_id=job_id)\n if job.ready():\n logging.info(\"[%s] job found is ready (%s), removing lock\", query_hash, job.celery_status)\n redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))\n job = None\n\n if not job:\n pipe.multi()\n\n if scheduled:\n queue_name = data_source.scheduled_queue_name\n else:\n queue_name = data_source.queue_name\n\n result = execute_query.apply_async(args=(query, data_source.id, metadata), queue=queue_name)\n job = cls(async_result=result)\n \n logging.info(\"[Manager][%s] Created new job: %s\", query_hash, job.id)\n pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id, settings.JOB_EXPIRY_TIME)\n pipe.execute()\n break\n\n except redis.WatchError:\n continue\n\n if not job:\n logging.error(\"[Manager][%s] Failed adding job for query.\", query_hash)\n\n return job\n\n def to_dict(self):\n if self._async_result.status == 'STARTED':\n updated_at = self._async_result.result.get('start_time', 0)\n else:\n updated_at = 0\n\n if self._async_result.failed() and isinstance(self._async_result.result, Exception):\n error = self._async_result.result.message\n elif self._async_result.status == 'REVOKED':\n error = 'Query execution cancelled.'\n else:\n error = ''\n\n if self._async_result.successful():\n query_result_id = self._async_result.result\n else:\n query_result_id = None\n\n return {\n 'id': self._async_result.id,\n 'updated_at': updated_at,\n 'status': self.STATUSES[self._async_result.status],\n 'error': error,\n 'query_result_id': query_result_id,\n }\n\n @property\n def is_cancelled(self):\n return self._async_result.status == 'REVOKED'\n\n @property\n def celery_status(self):\n return self._async_result.status\n\n def ready(self):\n return self._async_result.ready()\n\n def cancel(self):\n return self._async_result.revoke(terminate=True, signal='SIGINT')\n\n @staticmethod\n def _job_lock_id(query_hash, data_source_id):\n return \"query_hash_job:%s:%s\" % (data_source_id, query_hash)\n\n\[email protected](base=BaseTask)\ndef refresh_queries():\n # self.status['last_refresh_at'] = time.time()\n # self._save_status()\n\n logger.info(\"Refreshing queries...\")\n\n outdated_queries_count = 0\n for query in models.Query.outdated_queries():\n QueryTask.add_task(query.query, query.data_source, scheduled=True,\n metadata={'Query ID': query.id, 'Username': 'Scheduled'})\n outdated_queries_count += 1\n\n statsd_client.gauge('manager.outdated_queries', outdated_queries_count)\n\n logger.info(\"Done refreshing queries. Found %d outdated queries.\" % outdated_queries_count)\n\n status = redis_connection.hgetall('redash:status')\n now = time.time()\n\n redis_connection.hmset('redash:status', {\n 'outdated_queries_count': outdated_queries_count,\n 'last_refresh_at': now\n })\n\n statsd_client.gauge('manager.seconds_since_refresh', now - float(status.get('last_refresh_at', now)))\n\n\[email protected](base=BaseTask)\ndef cleanup_tasks():\n # in case of cold restart of the workers, there might be jobs that still have their \"lock\" object, but aren't really\n # going to run. this job removes them.\n lock_keys = redis_connection.keys(\"query_hash_job:*\") # TODO: use set instead of keys command\n if not lock_keys:\n return\n \n query_tasks = [QueryTask(job_id=j) for j in redis_connection.mget(lock_keys)]\n\n logger.info(\"Found %d locks\", len(query_tasks))\n\n inspect = celery.control.inspect()\n active_tasks = inspect.active()\n if active_tasks is None:\n active_tasks = []\n else:\n active_tasks = active_tasks.values()\n\n all_tasks = set()\n for task_list in active_tasks:\n for task in task_list:\n all_tasks.add(task['id'])\n\n logger.info(\"Active jobs count: %d\", len(all_tasks))\n\n for i, t in enumerate(query_tasks):\n if t.ready():\n # if locked task is ready already (failed, finished, revoked), we don't need the lock anymore\n logger.warning(\"%s is ready (%s), removing lock.\", lock_keys[i], t.celery_status)\n redis_connection.delete(lock_keys[i])\n\n # if t.celery_status == 'STARTED' and t.id not in all_tasks:\n # logger.warning(\"Couldn't find active job for: %s, removing lock.\", lock_keys[i])\n # redis_connection.delete(lock_keys[i])\n\n\[email protected](base=BaseTask)\ndef cleanup_query_results():\n \"\"\"\n Job to cleanup unused query results -- such that no query links to them anymore, and older than a week (so it's less\n likely to be open in someone's browser and be used).\n\n Each time the job deletes only 100 query results so it won't choke the database in case of many such results.\n \"\"\"\n\n logging.info(\"Running query results clean up (removing maximum of %d unused results, that are %d days old or more)\",\n settings.QUERY_RESULTS_CLEANUP_COUNT, settings.QUERY_RESULTS_CLEANUP_MAX_AGE)\n\n unused_query_results = models.QueryResult.unused(settings.QUERY_RESULTS_CLEANUP_MAX_AGE).limit(settings.QUERY_RESULTS_CLEANUP_COUNT)\n total_unused_query_results = models.QueryResult.unused().count()\n deleted_count = models.QueryResult.delete().where(models.QueryResult.id << unused_query_results).execute()\n\n logger.info(\"Deleted %d unused query results out of total of %d.\" % (deleted_count, total_unused_query_results))\n\n\[email protected](base=BaseTask)\ndef refresh_schemas():\n \"\"\"\n Refreshs the datasources schema.\n \"\"\"\n\n for ds in models.DataSource.select():\n logger.info(\"Refreshing schema for: {}\".format(ds.name))\n ds.get_schema(refresh=True)\n\n\ndef signal_handler(*args):\n raise InterruptException\n\n\[email protected](bind=True, base=BaseTask, track_started=True)\ndef execute_query(self, query, data_source_id, metadata):\n signal.signal(signal.SIGINT, signal_handler)\n start_time = time.time()\n\n logger.info(\"Loading data source (%d)...\", data_source_id)\n\n # TODO: we should probably cache data sources in Redis\n data_source = models.DataSource.get_by_id(data_source_id)\n\n self.update_state(state='STARTED', meta={'start_time': start_time, 'custom_message': ''})\n\n logger.info(\"Executing query:\\n%s\", query)\n\n query_hash = gen_query_hash(query)\n query_runner = get_query_runner(data_source.type, data_source.options)\n\n if query_runner.annotate_query():\n metadata['Task ID'] = self.request.id\n metadata['Query Hash'] = query_hash\n metadata['Queue'] = self.request.delivery_info['routing_key']\n\n annotation = u\", \".join([u\"{}: {}\".format(k, v) for k, v in metadata.iteritems()])\n\n logging.debug(u\"Annotation: %s\", annotation)\n\n annotated_query = u\"/* {} */ {}\".format(annotation, query)\n else:\n annotated_query = query\n\n with statsd_client.timer('query_runner.{}.{}.run_time'.format(data_source.type, data_source.name)):\n data, error = query_runner.run_query(annotated_query)\n\n run_time = time.time() - start_time\n logger.info(\"Query finished... data length=%s, error=%s\", data and len(data), error)\n\n self.update_state(state='STARTED', meta={'start_time': start_time, 'error': error, 'custom_message': ''})\n\n # Delete query_hash\n redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))\n\n if not error:\n query_result, updated_query_ids = models.QueryResult.store_result(data_source.org_id, data_source.id, query_hash, query, data, run_time, utils.utcnow())\n for query_id in updated_query_ids:\n check_alerts_for_query.delay(query_id)\n else:\n raise Exception(error)\n\n return query_result.id\n\n\[email protected](base=BaseTask)\ndef record_event(event):\n models.Event.record(event)\n\[email protected](base=BaseTask)\ndef version_check():\n run_version_check()\n\n\ndef base_url(org):\n if settings.MULTI_ORG:\n return \"https://{}/{}\".format(settings.HOST, org.slug)\n\n return settings.HOST\n\n\[email protected](bind=True, base=BaseTask)\ndef check_alerts_for_query(self, query_id):\n from redash.wsgi import app\n\n logger.debug(\"Checking query %d for alerts\", query_id)\n query = models.Query.get_by_id(query_id)\n for alert in query.alerts:\n alert.query = query\n new_state = alert.evaluate()\n passed_rearm_threshold = False\n if alert.rearm and alert.last_triggered_at:\n passed_rearm_threshold = alert.last_triggered_at + datetime.timedelta(seconds=alert.rearm) < utils.utcnow()\n if new_state != alert.state or (alert.state == models.Alert.TRIGGERED_STATE and passed_rearm_threshold ):\n logger.info(\"Alert %d new state: %s\", alert.id, new_state)\n old_state = alert.state\n alert.update_instance(state=new_state, last_triggered_at=utils.utcnow())\n\n if old_state == models.Alert.UNKNOWN_STATE and new_state == models.Alert.OK_STATE:\n logger.debug(\"Skipping notification (previous state was unknown and now it's ok).\")\n continue\n\n # message = Message\n html = \"\"\"\n Check <a href=\"{host}/alerts/{alert_id}\">alert</a> / check <a href=\"{host}/queries/{query_id}\">query</a>.\n \"\"\".format(host=base_url(alert.query.org), alert_id=alert.id, query_id=query.id)\n\n notify_mail(alert, html, new_state, app)\n\n if settings.HIPCHAT_API_TOKEN:\n notify_hipchat(alert, html, new_state)\n\n if settings.WEBHOOK_ENDPOINT:\n notify_webhook(alert, query, html, new_state)\n\n\ndef notify_hipchat(alert, html, new_state):\n try:\n hipchat_client = hipchat.HipChat(token=settings.HIPCHAT_API_TOKEN)\n message = '[' + new_state.upper() + '] ' + alert.name + '<br />' + html\n hipchat_client.message_room(settings.HIPCHAT_ROOM_ID, settings.NAME, message, message_format='html')\n except Exception:\n logger.exception(\"hipchat send ERROR.\")\n\n\ndef notify_mail(alert, html, new_state, app):\n recipients = [s.email for s in alert.subscribers()]\n logger.debug(\"Notifying: %s\", recipients)\n try:\n with app.app_context():\n message = Message(recipients=recipients,\n subject=\"[{1}] {0}\".format(alert.name, new_state.upper()),\n html=html)\n mail.send(message)\n except Exception:\n logger.exception(\"mail send ERROR.\")\n\n\ndef notify_webhook(alert, query, html, new_state):\n try:\n data = {\n 'event': 'alert_state_change',\n 'alert': alert.to_dict(full=False),\n 'url_base': base_url(query.org)\n }\n headers = {'Content-Type': 'application/json'}\n auth = HTTPBasicAuth(settings.WEBHOOK_USERNAME, settings.WEBHOOK_PASSWORD) if settings.WEBHOOK_USERNAME else None\n resp = requests.post(settings.WEBHOOK_ENDPOINT, data=json_dumps(data), auth=auth, headers=headers)\n if resp.status_code != 200:\n logger.error(\"webhook send ERROR. status_code => {status}\".format(status=resp.status_code))\n except Exception:\n logger.exception(\"webhook send ERROR.\")\n", "path": "redash/tasks.py"}]} |
gh_patches_debug_114 | rasdani/github-patches | git_diff | microsoft__ptvsd-1986 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ptvsd broken on Python 2 if python-future is installed
Issue Type: <b>Bug</b>
After I installed an interpreter of Python 2.7.16 32bit, an exception is thrown with error message below when I want to switch to the old Python 2.7.16 64bit:
```
E+00000.045: /handling microsoft/vscode-python#1 request "launch" from Adapter-1/
Handler 'launch_request' (file u'c:\\Users\\linshimeng01\\.vscode\\extensions\\ms-python.python-2019.11.50794\\pythonFiles\\lib\\python\\new_ptvsd\\no_wheels\\ptvsd\\launcher/../../ptvsd\\launcher\\adapter.py', line 35)
couldn't handle microsoft/vscode-python#1 request "launch" from Adapter-1:
Traceback (most recent call last):
File "c:\Users\linshimeng01\.vscode\extensions\ms-python.python-2019.11.50794\pythonFiles\lib\python\new_ptvsd\no_wheels\ptvsd\launcher/../../ptvsd\common\messaging.py", line 763, in _handle
result = handler(self)
File "c:\Users\linshimeng01\.vscode\extensions\ms-python.python-2019.11.50794\pythonFiles\lib\python\new_ptvsd\no_wheels\ptvsd\launcher/../../ptvsd\launcher\adapter.py", line 147, in launch_request
debuggee.spawn(process_name, cmdline, cwd, env, redirect_output)
File "c:\Users\linshimeng01\.vscode\extensions\ms-python.python-2019.11.50794\pythonFiles\lib\python\new_ptvsd\no_wheels\ptvsd\launcher/../../ptvsd\launcher\debuggee.py", line 64, in spawn
"Couldn't spawn debuggee: {0}\n\nCommand line:{1!r}", exc, cmdline
TypeError: unbound method cant_handle() must be called with Message instance as first argument (got unicode instance instead)
Stack where logged:
File "C:\Python27-32\lib\threading.py", line 774, in __bootstrap
self.__bootstrap_inner()
File "C:\Python27-32\lib\threading.py", line 801, in __bootstrap_inner
self.run()
File "C:\Python27-32\lib\threading.py", line 754, in run
self.__target(*self.__args, **self.__kwargs)
File "c:\Users\linshimeng01\.vscode\extensions\ms-python.python-2019.11.50794\pythonFiles\lib\python\new_ptvsd\no_wheels\ptvsd\launcher/../../ptvsd\common\messaging.py", line 1520, in _run_handlers
handler()
File "c:\Users\linshimeng01\.vscode\extensions\ms-python.python-2019.11.50794\pythonFiles\lib\python\new_ptvsd\no_wheels\ptvsd\launcher/../../ptvsd\common\messaging.py", line 809, in _handle
self.describe(),
```
Now I installed an Python 3 interpreter, and I found the two Python 2 interpreter are all unavailable and throw the same error message above as well when I attempt to run a python file. How can I fix it?
Extension version: 2019.11.50794
VS Code version: Code 1.40.1 (8795a9889db74563ddd43eb0a897a2384129a619, 2019-11-13T16:49:35.976Z)
OS version: Windows_NT x64 10.0.17763
<details>
<summary>System Info</summary>
|Item|Value|
|---|---|
|CPUs|Intel(R) Core(TM) i5-9400F CPU @ 2.90GHz (6 x 2904)|
|GPU Status|2d_canvas: enabled<br>flash_3d: enabled<br>flash_stage3d: enabled<br>flash_stage3d_baseline: enabled<br>gpu_compositing: enabled<br>metal: disabled_off<br>multiple_raster_threads: enabled_on<br>oop_rasterization: disabled_off<br>protected_video_decode: unavailable_off<br>rasterization: enabled<br>skia_renderer: disabled_off<br>surface_control: disabled_off<br>surface_synchronization: enabled_on<br>video_decode: enabled<br>viz_display_compositor: enabled_on<br>viz_hit_test_surface_layer: disabled_off<br>webgl: enabled<br>webgl2: enabled|
|Load (avg)|undefined|
|Memory (System)|15.93GB (7.79GB free)|
|Process Argv||
|Screen Reader|no|
|VM|67%|
</details>
<!-- generated by issue reporter -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/ptvsd/common/compat.py`
Content:
```
1 # Copyright (c) Microsoft Corporation. All rights reserved.
2 # Licensed under the MIT License. See LICENSE in the project root
3 # for license information.
4
5 from __future__ import absolute_import, division, print_function, unicode_literals
6
7 """Python 2/3 compatibility helpers.
8 """
9
10 import inspect
11 import itertools
12 import sys
13
14 from ptvsd.common import fmt
15
16
17 try:
18 import builtins
19 except ImportError:
20 import __builtin__ as builtins # noqa
21
22 try:
23 unicode = builtins.unicode
24 bytes = builtins.str
25 except AttributeError:
26 unicode = builtins.str
27 bytes = builtins.bytes
28
29 try:
30 xrange = builtins.xrange
31 except AttributeError:
32 xrange = builtins.range
33
34 try:
35 izip = itertools.izip
36 except AttributeError:
37 izip = builtins.zip
38
39 try:
40 reload = builtins.reload
41 except AttributeError:
42 from importlib import reload # noqa
43
44 try:
45 import queue
46 except ImportError:
47 import Queue as queue # noqa
48
49
50 def force_unicode(s, encoding, errors="strict"):
51 """Converts s to Unicode, using the provided encoding. If s is already Unicode,
52 it is returned as is.
53 """
54 return s.decode(encoding, errors) if isinstance(s, bytes) else unicode(s)
55
56
57 def force_bytes(s, encoding, errors="strict"):
58 """Converts s to bytes, using the provided encoding. If s is already bytes,
59 it is returned as is.
60
61 If errors="strict" and s is bytes, its encoding is verified by decoding it;
62 UnicodeError is raised if it cannot be decoded.
63 """
64 if isinstance(s, unicode):
65 return s.encode(encoding, errors)
66 else:
67 s = bytes(s)
68 if errors == "strict":
69 # Return value ignored - invoked solely for verification.
70 s.decode(encoding, errors)
71 return s
72
73
74 def force_str(s, encoding="ascii", errors="strict"):
75 """Converts s to str (which is bytes on Python 2, and unicode on Python 3), using
76 the provided encoding if necessary. If s is already str, it is returned as is.
77
78 If errors="strict", str is bytes, and s is str, its encoding is verified by decoding
79 it; UnicodeError is raised if it cannot be decoded.
80 """
81 return (force_bytes if str is bytes else force_unicode)(s, encoding, errors)
82
83
84 def force_ascii(s, errors="strict"):
85 """Same as force_bytes(s, "ascii", errors)
86 """
87 return force_bytes(s, "ascii", errors)
88
89
90 def force_utf8(s, errors="strict"):
91 """Same as force_bytes(s, "utf8", errors)
92 """
93 return force_bytes(s, "utf8", errors)
94
95
96 def filename(s, errors="strict"):
97 """Same as force_unicode(s, sys.getfilesystemencoding(), errors)
98 """
99 return force_unicode(s, sys.getfilesystemencoding(), errors)
100
101
102 def filename_bytes(s, errors="strict"):
103 """Same as force_bytes(s, sys.getfilesystemencoding(), errors)
104 """
105 return force_bytes(s, sys.getfilesystemencoding(), errors)
106
107
108 def filename_str(s, errors="strict"):
109 """Same as force_str(s, sys.getfilesystemencoding(), errors)
110 """
111 return force_str(s, sys.getfilesystemencoding(), errors)
112
113
114 def nameof(obj, quote=False):
115 """Returns the most descriptive name of a Python module, class, or function,
116 as a Unicode string
117
118 If quote=True, name is quoted with repr().
119
120 Best-effort, but guaranteed to not fail - always returns something.
121 """
122
123 try:
124 name = obj.__qualname__
125 except Exception:
126 try:
127 name = obj.__name__
128 except Exception:
129 # Fall back to raw repr(), and skip quoting.
130 try:
131 name = repr(obj)
132 except Exception:
133 return "<unknown>"
134 else:
135 quote = False
136
137 if quote:
138 try:
139 name = repr(name)
140 except Exception:
141 pass
142
143 return force_unicode(name, "utf-8", "replace")
144
145
146 def unicode_repr(obj):
147 """Like repr(), but guarantees that the result is Unicode even on Python 2.
148 """
149 return force_unicode(repr(obj), "ascii")
150
151
152 def srcnameof(obj):
153 """Returns the most descriptive name of a Python module, class, or function,
154 including source information (filename and linenumber), if available.
155
156 Best-effort, but guaranteed to not fail - always returns something.
157 """
158
159 name = nameof(obj, quote=True)
160
161 # Get the source information if possible.
162 try:
163 src_file = filename(inspect.getsourcefile(obj), "replace")
164 except Exception:
165 pass
166 else:
167 name += fmt(" (file {0!r}", src_file)
168 try:
169 _, src_lineno = inspect.getsourcelines(obj)
170 except Exception:
171 pass
172 else:
173 name += fmt(", line {0}", src_lineno)
174 name += ")"
175
176 return name
177
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/ptvsd/common/compat.py b/src/ptvsd/common/compat.py
--- a/src/ptvsd/common/compat.py
+++ b/src/ptvsd/common/compat.py
@@ -15,9 +15,9 @@
try:
- import builtins
+ import __builtin__ as builtins
except ImportError:
- import __builtin__ as builtins # noqa
+ import builtins
try:
unicode = builtins.unicode
| {"golden_diff": "diff --git a/src/ptvsd/common/compat.py b/src/ptvsd/common/compat.py\n--- a/src/ptvsd/common/compat.py\n+++ b/src/ptvsd/common/compat.py\n@@ -15,9 +15,9 @@\n \n \n try:\n- import builtins\n+ import __builtin__ as builtins\n except ImportError:\n- import __builtin__ as builtins # noqa\n+ import builtins\n \n try:\n unicode = builtins.unicode\n", "issue": "ptvsd broken on Python 2 if python-future is installed\nIssue Type: <b>Bug</b>\r\n\r\nAfter I installed an interpreter of Python 2.7.16 32bit, an exception is thrown with error message below when I want to switch to the old Python 2.7.16 64bit:\r\n```\r\nE+00000.045: /handling microsoft/vscode-python#1 request \"launch\" from Adapter-1/\r\n Handler 'launch_request' (file u'c:\\\\Users\\\\linshimeng01\\\\.vscode\\\\extensions\\\\ms-python.python-2019.11.50794\\\\pythonFiles\\\\lib\\\\python\\\\new_ptvsd\\\\no_wheels\\\\ptvsd\\\\launcher/../../ptvsd\\\\launcher\\\\adapter.py', line 35)\r\n couldn't handle microsoft/vscode-python#1 request \"launch\" from Adapter-1:\r\n\r\n Traceback (most recent call last):\r\n File \"c:\\Users\\linshimeng01\\.vscode\\extensions\\ms-python.python-2019.11.50794\\pythonFiles\\lib\\python\\new_ptvsd\\no_wheels\\ptvsd\\launcher/../../ptvsd\\common\\messaging.py\", line 763, in _handle\r\n result = handler(self)\r\n File \"c:\\Users\\linshimeng01\\.vscode\\extensions\\ms-python.python-2019.11.50794\\pythonFiles\\lib\\python\\new_ptvsd\\no_wheels\\ptvsd\\launcher/../../ptvsd\\launcher\\adapter.py\", line 147, in launch_request\r\n debuggee.spawn(process_name, cmdline, cwd, env, redirect_output)\r\n File \"c:\\Users\\linshimeng01\\.vscode\\extensions\\ms-python.python-2019.11.50794\\pythonFiles\\lib\\python\\new_ptvsd\\no_wheels\\ptvsd\\launcher/../../ptvsd\\launcher\\debuggee.py\", line 64, in spawn\r\n \"Couldn't spawn debuggee: {0}\\n\\nCommand line:{1!r}\", exc, cmdline\r\n TypeError: unbound method cant_handle() must be called with Message instance as first argument (got unicode instance instead)\r\n\r\n Stack where logged:\r\n File \"C:\\Python27-32\\lib\\threading.py\", line 774, in __bootstrap\r\n self.__bootstrap_inner()\r\n File \"C:\\Python27-32\\lib\\threading.py\", line 801, in __bootstrap_inner\r\n self.run()\r\n File \"C:\\Python27-32\\lib\\threading.py\", line 754, in run\r\n self.__target(*self.__args, **self.__kwargs)\r\n File \"c:\\Users\\linshimeng01\\.vscode\\extensions\\ms-python.python-2019.11.50794\\pythonFiles\\lib\\python\\new_ptvsd\\no_wheels\\ptvsd\\launcher/../../ptvsd\\common\\messaging.py\", line 1520, in _run_handlers\r\n handler()\r\n File \"c:\\Users\\linshimeng01\\.vscode\\extensions\\ms-python.python-2019.11.50794\\pythonFiles\\lib\\python\\new_ptvsd\\no_wheels\\ptvsd\\launcher/../../ptvsd\\common\\messaging.py\", line 809, in _handle\r\n self.describe(),\r\n```\r\nNow I installed an Python 3 interpreter, and I found the two Python 2 interpreter are all unavailable and throw the same error message above as well when I attempt to run a python file. How can I fix it?\r\n\r\nExtension version: 2019.11.50794\r\nVS Code version: Code 1.40.1 (8795a9889db74563ddd43eb0a897a2384129a619, 2019-11-13T16:49:35.976Z)\r\nOS version: Windows_NT x64 10.0.17763\r\n\r\n<details>\r\n<summary>System Info</summary>\r\n\r\n|Item|Value|\r\n|---|---|\r\n|CPUs|Intel(R) Core(TM) i5-9400F CPU @ 2.90GHz (6 x 2904)|\r\n|GPU Status|2d_canvas: enabled<br>flash_3d: enabled<br>flash_stage3d: enabled<br>flash_stage3d_baseline: enabled<br>gpu_compositing: enabled<br>metal: disabled_off<br>multiple_raster_threads: enabled_on<br>oop_rasterization: disabled_off<br>protected_video_decode: unavailable_off<br>rasterization: enabled<br>skia_renderer: disabled_off<br>surface_control: disabled_off<br>surface_synchronization: enabled_on<br>video_decode: enabled<br>viz_display_compositor: enabled_on<br>viz_hit_test_surface_layer: disabled_off<br>webgl: enabled<br>webgl2: enabled|\r\n|Load (avg)|undefined|\r\n|Memory (System)|15.93GB (7.79GB free)|\r\n|Process Argv||\r\n|Screen Reader|no|\r\n|VM|67%|\r\n</details>\r\n<!-- generated by issue reporter -->\n", "before_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\n\"\"\"Python 2/3 compatibility helpers.\n\"\"\"\n\nimport inspect\nimport itertools\nimport sys\n\nfrom ptvsd.common import fmt\n\n\ntry:\n import builtins\nexcept ImportError:\n import __builtin__ as builtins # noqa\n\ntry:\n unicode = builtins.unicode\n bytes = builtins.str\nexcept AttributeError:\n unicode = builtins.str\n bytes = builtins.bytes\n\ntry:\n xrange = builtins.xrange\nexcept AttributeError:\n xrange = builtins.range\n\ntry:\n izip = itertools.izip\nexcept AttributeError:\n izip = builtins.zip\n\ntry:\n reload = builtins.reload\nexcept AttributeError:\n from importlib import reload # noqa\n\ntry:\n import queue\nexcept ImportError:\n import Queue as queue # noqa\n\n\ndef force_unicode(s, encoding, errors=\"strict\"):\n \"\"\"Converts s to Unicode, using the provided encoding. If s is already Unicode,\n it is returned as is.\n \"\"\"\n return s.decode(encoding, errors) if isinstance(s, bytes) else unicode(s)\n\n\ndef force_bytes(s, encoding, errors=\"strict\"):\n \"\"\"Converts s to bytes, using the provided encoding. If s is already bytes,\n it is returned as is.\n\n If errors=\"strict\" and s is bytes, its encoding is verified by decoding it;\n UnicodeError is raised if it cannot be decoded.\n \"\"\"\n if isinstance(s, unicode):\n return s.encode(encoding, errors)\n else:\n s = bytes(s)\n if errors == \"strict\":\n # Return value ignored - invoked solely for verification.\n s.decode(encoding, errors)\n return s\n\n\ndef force_str(s, encoding=\"ascii\", errors=\"strict\"):\n \"\"\"Converts s to str (which is bytes on Python 2, and unicode on Python 3), using\n the provided encoding if necessary. If s is already str, it is returned as is.\n\n If errors=\"strict\", str is bytes, and s is str, its encoding is verified by decoding\n it; UnicodeError is raised if it cannot be decoded.\n \"\"\"\n return (force_bytes if str is bytes else force_unicode)(s, encoding, errors)\n\n\ndef force_ascii(s, errors=\"strict\"):\n \"\"\"Same as force_bytes(s, \"ascii\", errors)\n \"\"\"\n return force_bytes(s, \"ascii\", errors)\n\n\ndef force_utf8(s, errors=\"strict\"):\n \"\"\"Same as force_bytes(s, \"utf8\", errors)\n \"\"\"\n return force_bytes(s, \"utf8\", errors)\n\n\ndef filename(s, errors=\"strict\"):\n \"\"\"Same as force_unicode(s, sys.getfilesystemencoding(), errors)\n \"\"\"\n return force_unicode(s, sys.getfilesystemencoding(), errors)\n\n\ndef filename_bytes(s, errors=\"strict\"):\n \"\"\"Same as force_bytes(s, sys.getfilesystemencoding(), errors)\n \"\"\"\n return force_bytes(s, sys.getfilesystemencoding(), errors)\n\n\ndef filename_str(s, errors=\"strict\"):\n \"\"\"Same as force_str(s, sys.getfilesystemencoding(), errors)\n \"\"\"\n return force_str(s, sys.getfilesystemencoding(), errors)\n\n\ndef nameof(obj, quote=False):\n \"\"\"Returns the most descriptive name of a Python module, class, or function,\n as a Unicode string\n\n If quote=True, name is quoted with repr().\n\n Best-effort, but guaranteed to not fail - always returns something.\n \"\"\"\n\n try:\n name = obj.__qualname__\n except Exception:\n try:\n name = obj.__name__\n except Exception:\n # Fall back to raw repr(), and skip quoting.\n try:\n name = repr(obj)\n except Exception:\n return \"<unknown>\"\n else:\n quote = False\n\n if quote:\n try:\n name = repr(name)\n except Exception:\n pass\n\n return force_unicode(name, \"utf-8\", \"replace\")\n\n\ndef unicode_repr(obj):\n \"\"\"Like repr(), but guarantees that the result is Unicode even on Python 2.\n \"\"\"\n return force_unicode(repr(obj), \"ascii\")\n\n\ndef srcnameof(obj):\n \"\"\"Returns the most descriptive name of a Python module, class, or function,\n including source information (filename and linenumber), if available.\n\n Best-effort, but guaranteed to not fail - always returns something.\n \"\"\"\n\n name = nameof(obj, quote=True)\n\n # Get the source information if possible.\n try:\n src_file = filename(inspect.getsourcefile(obj), \"replace\")\n except Exception:\n pass\n else:\n name += fmt(\" (file {0!r}\", src_file)\n try:\n _, src_lineno = inspect.getsourcelines(obj)\n except Exception:\n pass\n else:\n name += fmt(\", line {0}\", src_lineno)\n name += \")\"\n\n return name\n", "path": "src/ptvsd/common/compat.py"}], "after_files": [{"content": "# Copyright (c) Microsoft Corporation. All rights reserved.\n# Licensed under the MIT License. See LICENSE in the project root\n# for license information.\n\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\n\"\"\"Python 2/3 compatibility helpers.\n\"\"\"\n\nimport inspect\nimport itertools\nimport sys\n\nfrom ptvsd.common import fmt\n\n\ntry:\n import __builtin__ as builtins\nexcept ImportError:\n import builtins\n\ntry:\n unicode = builtins.unicode\n bytes = builtins.str\nexcept AttributeError:\n unicode = builtins.str\n bytes = builtins.bytes\n\ntry:\n xrange = builtins.xrange\nexcept AttributeError:\n xrange = builtins.range\n\ntry:\n izip = itertools.izip\nexcept AttributeError:\n izip = builtins.zip\n\ntry:\n reload = builtins.reload\nexcept AttributeError:\n from importlib import reload # noqa\n\ntry:\n import queue\nexcept ImportError:\n import Queue as queue # noqa\n\n\ndef force_unicode(s, encoding, errors=\"strict\"):\n \"\"\"Converts s to Unicode, using the provided encoding. If s is already Unicode,\n it is returned as is.\n \"\"\"\n return s.decode(encoding, errors) if isinstance(s, bytes) else unicode(s)\n\n\ndef force_bytes(s, encoding, errors=\"strict\"):\n \"\"\"Converts s to bytes, using the provided encoding. If s is already bytes,\n it is returned as is.\n\n If errors=\"strict\" and s is bytes, its encoding is verified by decoding it;\n UnicodeError is raised if it cannot be decoded.\n \"\"\"\n if isinstance(s, unicode):\n return s.encode(encoding, errors)\n else:\n s = bytes(s)\n if errors == \"strict\":\n # Return value ignored - invoked solely for verification.\n s.decode(encoding, errors)\n return s\n\n\ndef force_str(s, encoding=\"ascii\", errors=\"strict\"):\n \"\"\"Converts s to str (which is bytes on Python 2, and unicode on Python 3), using\n the provided encoding if necessary. If s is already str, it is returned as is.\n\n If errors=\"strict\", str is bytes, and s is str, its encoding is verified by decoding\n it; UnicodeError is raised if it cannot be decoded.\n \"\"\"\n return (force_bytes if str is bytes else force_unicode)(s, encoding, errors)\n\n\ndef force_ascii(s, errors=\"strict\"):\n \"\"\"Same as force_bytes(s, \"ascii\", errors)\n \"\"\"\n return force_bytes(s, \"ascii\", errors)\n\n\ndef force_utf8(s, errors=\"strict\"):\n \"\"\"Same as force_bytes(s, \"utf8\", errors)\n \"\"\"\n return force_bytes(s, \"utf8\", errors)\n\n\ndef filename(s, errors=\"strict\"):\n \"\"\"Same as force_unicode(s, sys.getfilesystemencoding(), errors)\n \"\"\"\n return force_unicode(s, sys.getfilesystemencoding(), errors)\n\n\ndef filename_bytes(s, errors=\"strict\"):\n \"\"\"Same as force_bytes(s, sys.getfilesystemencoding(), errors)\n \"\"\"\n return force_bytes(s, sys.getfilesystemencoding(), errors)\n\n\ndef filename_str(s, errors=\"strict\"):\n \"\"\"Same as force_str(s, sys.getfilesystemencoding(), errors)\n \"\"\"\n return force_str(s, sys.getfilesystemencoding(), errors)\n\n\ndef nameof(obj, quote=False):\n \"\"\"Returns the most descriptive name of a Python module, class, or function,\n as a Unicode string\n\n If quote=True, name is quoted with repr().\n\n Best-effort, but guaranteed to not fail - always returns something.\n \"\"\"\n\n try:\n name = obj.__qualname__\n except Exception:\n try:\n name = obj.__name__\n except Exception:\n # Fall back to raw repr(), and skip quoting.\n try:\n name = repr(obj)\n except Exception:\n return \"<unknown>\"\n else:\n quote = False\n\n if quote:\n try:\n name = repr(name)\n except Exception:\n pass\n\n return force_unicode(name, \"utf-8\", \"replace\")\n\n\ndef unicode_repr(obj):\n \"\"\"Like repr(), but guarantees that the result is Unicode even on Python 2.\n \"\"\"\n return force_unicode(repr(obj), \"ascii\")\n\n\ndef srcnameof(obj):\n \"\"\"Returns the most descriptive name of a Python module, class, or function,\n including source information (filename and linenumber), if available.\n\n Best-effort, but guaranteed to not fail - always returns something.\n \"\"\"\n\n name = nameof(obj, quote=True)\n\n # Get the source information if possible.\n try:\n src_file = filename(inspect.getsourcefile(obj), \"replace\")\n except Exception:\n pass\n else:\n name += fmt(\" (file {0!r}\", src_file)\n try:\n _, src_lineno = inspect.getsourcelines(obj)\n except Exception:\n pass\n else:\n name += fmt(\", line {0}\", src_lineno)\n name += \")\"\n\n return name\n", "path": "src/ptvsd/common/compat.py"}]} |
gh_patches_debug_115 | rasdani/github-patches | git_diff | chainer__chainer-751 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`cupy.array_split` doesn't accept empty indecies
```
>>> x=cupy.array([1])
>>> cupy.array_split(x, [])
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/unno/git/chainer/cupy/manipulation/split.py", line 32, in array_split
ret.append(ary[skip + (slice(index, size),)])
UnboundLocalError: local variable 'index' referenced before assignment
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cupy/manipulation/split.py`
Content:
```
1 import numpy
2 import six
3
4
5 def array_split(ary, indices_or_sections, axis=0):
6 """Splits an array into multiple sub arrays along a given axis.
7
8 This function is almost equivalent to :func:`cupy.split`. The only
9 difference is that this function allows an integer sections that does not
10 evenly divide the axis.
11
12 .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.array_split`
13
14 """
15 if ary.ndim <= axis:
16 raise IndexError('Axis exceeds ndim')
17 size = ary.shape[axis]
18
19 if numpy.isscalar(indices_or_sections):
20 each_size = (size - 1) // indices_or_sections + 1
21 indices = [i * each_size
22 for i in six.moves.range(1, indices_or_sections)]
23 else:
24 indices = indices_or_sections
25
26 skip = (slice(None),) * axis
27 ret = []
28 i = 0
29 for index in indices:
30 ret.append(ary[skip + (slice(i, index),)])
31 i = index
32 ret.append(ary[skip + (slice(index, size),)])
33
34 return ret
35
36
37 def dsplit(ary, indices_or_sections):
38 """Splits an array into multiple sub arrays along the third axis.
39
40 This is equivalent to ``split`` with ``axis=2``.
41
42 .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit`
43
44 """
45 if ary.ndim <= 2:
46 raise ValueError('Cannot dsplit an array with less than 3 dimensions')
47 return split(ary, indices_or_sections, 2)
48
49
50 def hsplit(ary, indices_or_sections):
51 """Splits an array into multiple sub arrays horizontally.
52
53 This is equivalent to ``split`` with ``axis=0`` if ``ary`` has one
54 dimension, and otherwise that with ``axis=1``.
55
56 .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.hsplit`
57
58 """
59 if ary.ndim == 0:
60 raise ValueError('Cannot hsplit a zero-dimensional array')
61 if ary.ndim == 1:
62 return split(ary, indices_or_sections, 0)
63 else:
64 return split(ary, indices_or_sections, 1)
65
66
67 def split(ary, indices_or_sections, axis=0):
68 """Splits an array into multiple sub arrays along a given axis.
69
70 Args:
71 ary (cupy.ndarray): Array to split.
72 indices_or_sections (int or sequence of ints): A value indicating how
73 to divide the axis. If it is an integer, then is treated as the
74 number of sections, and the axis is evenly divided. Otherwise,
75 the integers indicate indices to split at. Note that the sequence
76 on the device memory is not allowed.
77 axis (int): Axis along which the array is split.
78
79 Returns:
80 A list of sub arrays. Eacy array is a view of the corresponding input
81 array.
82
83 .. seealso:: :func:`numpy.split`
84
85 """
86 if ary.ndim <= axis:
87 raise IndexError('Axis exceeds ndim')
88 size = ary.shape[axis]
89
90 if numpy.isscalar(indices_or_sections):
91 if size % indices_or_sections != 0:
92 raise ValueError(
93 'indices_or_sections must divide the size along the axes.\n'
94 'If you want to split the array into non-equally-sized '
95 'arrays, use array_split instead.')
96 return array_split(ary, indices_or_sections, axis)
97
98
99 def vsplit(ary, indices_or_sections):
100 """Splits an array into multiple sub arrays along the first axis.
101
102 This is equivalent to ``split`` with ``axis=0``.
103
104 .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit`
105
106 """
107 if ary.ndim <= 1:
108 raise ValueError('Cannot vsplit an array with less than 2 dimensions')
109 return split(ary, indices_or_sections, 0)
110
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/cupy/manipulation/split.py b/cupy/manipulation/split.py
--- a/cupy/manipulation/split.py
+++ b/cupy/manipulation/split.py
@@ -23,6 +23,9 @@
else:
indices = indices_or_sections
+ if len(indices) == 0:
+ return [ary]
+
skip = (slice(None),) * axis
ret = []
i = 0
| {"golden_diff": "diff --git a/cupy/manipulation/split.py b/cupy/manipulation/split.py\n--- a/cupy/manipulation/split.py\n+++ b/cupy/manipulation/split.py\n@@ -23,6 +23,9 @@\n else:\n indices = indices_or_sections\n \n+ if len(indices) == 0:\n+ return [ary]\n+\n skip = (slice(None),) * axis\n ret = []\n i = 0\n", "issue": "`cupy.array_split` doesn't accept empty indecies\n```\n>>> x=cupy.array([1])\n>>> cupy.array_split(x, [])\nTraceback (most recent call last):\n File \"<stdin>\", line 1, in <module>\n File \"/home/unno/git/chainer/cupy/manipulation/split.py\", line 32, in array_split\n ret.append(ary[skip + (slice(index, size),)])\nUnboundLocalError: local variable 'index' referenced before assignment\n```\n\n", "before_files": [{"content": "import numpy\nimport six\n\n\ndef array_split(ary, indices_or_sections, axis=0):\n \"\"\"Splits an array into multiple sub arrays along a given axis.\n\n This function is almost equivalent to :func:`cupy.split`. The only\n difference is that this function allows an integer sections that does not\n evenly divide the axis.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.array_split`\n\n \"\"\"\n if ary.ndim <= axis:\n raise IndexError('Axis exceeds ndim')\n size = ary.shape[axis]\n\n if numpy.isscalar(indices_or_sections):\n each_size = (size - 1) // indices_or_sections + 1\n indices = [i * each_size\n for i in six.moves.range(1, indices_or_sections)]\n else:\n indices = indices_or_sections\n\n skip = (slice(None),) * axis\n ret = []\n i = 0\n for index in indices:\n ret.append(ary[skip + (slice(i, index),)])\n i = index\n ret.append(ary[skip + (slice(index, size),)])\n\n return ret\n\n\ndef dsplit(ary, indices_or_sections):\n \"\"\"Splits an array into multiple sub arrays along the third axis.\n\n This is equivalent to ``split`` with ``axis=2``.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit`\n\n \"\"\"\n if ary.ndim <= 2:\n raise ValueError('Cannot dsplit an array with less than 3 dimensions')\n return split(ary, indices_or_sections, 2)\n\n\ndef hsplit(ary, indices_or_sections):\n \"\"\"Splits an array into multiple sub arrays horizontally.\n\n This is equivalent to ``split`` with ``axis=0`` if ``ary`` has one\n dimension, and otherwise that with ``axis=1``.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.hsplit`\n\n \"\"\"\n if ary.ndim == 0:\n raise ValueError('Cannot hsplit a zero-dimensional array')\n if ary.ndim == 1:\n return split(ary, indices_or_sections, 0)\n else:\n return split(ary, indices_or_sections, 1)\n\n\ndef split(ary, indices_or_sections, axis=0):\n \"\"\"Splits an array into multiple sub arrays along a given axis.\n\n Args:\n ary (cupy.ndarray): Array to split.\n indices_or_sections (int or sequence of ints): A value indicating how\n to divide the axis. If it is an integer, then is treated as the\n number of sections, and the axis is evenly divided. Otherwise,\n the integers indicate indices to split at. Note that the sequence\n on the device memory is not allowed.\n axis (int): Axis along which the array is split.\n\n Returns:\n A list of sub arrays. Eacy array is a view of the corresponding input\n array.\n\n .. seealso:: :func:`numpy.split`\n\n \"\"\"\n if ary.ndim <= axis:\n raise IndexError('Axis exceeds ndim')\n size = ary.shape[axis]\n\n if numpy.isscalar(indices_or_sections):\n if size % indices_or_sections != 0:\n raise ValueError(\n 'indices_or_sections must divide the size along the axes.\\n'\n 'If you want to split the array into non-equally-sized '\n 'arrays, use array_split instead.')\n return array_split(ary, indices_or_sections, axis)\n\n\ndef vsplit(ary, indices_or_sections):\n \"\"\"Splits an array into multiple sub arrays along the first axis.\n\n This is equivalent to ``split`` with ``axis=0``.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit`\n\n \"\"\"\n if ary.ndim <= 1:\n raise ValueError('Cannot vsplit an array with less than 2 dimensions')\n return split(ary, indices_or_sections, 0)\n", "path": "cupy/manipulation/split.py"}], "after_files": [{"content": "import numpy\nimport six\n\n\ndef array_split(ary, indices_or_sections, axis=0):\n \"\"\"Splits an array into multiple sub arrays along a given axis.\n\n This function is almost equivalent to :func:`cupy.split`. The only\n difference is that this function allows an integer sections that does not\n evenly divide the axis.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.array_split`\n\n \"\"\"\n if ary.ndim <= axis:\n raise IndexError('Axis exceeds ndim')\n size = ary.shape[axis]\n\n if numpy.isscalar(indices_or_sections):\n each_size = (size - 1) // indices_or_sections + 1\n indices = [i * each_size\n for i in six.moves.range(1, indices_or_sections)]\n else:\n indices = indices_or_sections\n\n if len(indices) == 0:\n return [ary]\n\n skip = (slice(None),) * axis\n ret = []\n i = 0\n for index in indices:\n ret.append(ary[skip + (slice(i, index),)])\n i = index\n ret.append(ary[skip + (slice(index, size),)])\n\n return ret\n\n\ndef dsplit(ary, indices_or_sections):\n \"\"\"Splits an array into multiple sub arrays along the third axis.\n\n This is equivalent to ``split`` with ``axis=2``.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit`\n\n \"\"\"\n if ary.ndim <= 2:\n raise ValueError('Cannot dsplit an array with less than 3 dimensions')\n return split(ary, indices_or_sections, 2)\n\n\ndef hsplit(ary, indices_or_sections):\n \"\"\"Splits an array into multiple sub arrays horizontally.\n\n This is equivalent to ``split`` with ``axis=0`` if ``ary`` has one\n dimension, and otherwise that with ``axis=1``.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.hsplit`\n\n \"\"\"\n if ary.ndim == 0:\n raise ValueError('Cannot hsplit a zero-dimensional array')\n if ary.ndim == 1:\n return split(ary, indices_or_sections, 0)\n else:\n return split(ary, indices_or_sections, 1)\n\n\ndef split(ary, indices_or_sections, axis=0):\n \"\"\"Splits an array into multiple sub arrays along a given axis.\n\n Args:\n ary (cupy.ndarray): Array to split.\n indices_or_sections (int or sequence of ints): A value indicating how\n to divide the axis. If it is an integer, then is treated as the\n number of sections, and the axis is evenly divided. Otherwise,\n the integers indicate indices to split at. Note that the sequence\n on the device memory is not allowed.\n axis (int): Axis along which the array is split.\n\n Returns:\n A list of sub arrays. Eacy array is a view of the corresponding input\n array.\n\n .. seealso:: :func:`numpy.split`\n\n \"\"\"\n if ary.ndim <= axis:\n raise IndexError('Axis exceeds ndim')\n size = ary.shape[axis]\n\n if numpy.isscalar(indices_or_sections):\n if size % indices_or_sections != 0:\n raise ValueError(\n 'indices_or_sections must divide the size along the axes.\\n'\n 'If you want to split the array into non-equally-sized '\n 'arrays, use array_split instead.')\n return array_split(ary, indices_or_sections, axis)\n\n\ndef vsplit(ary, indices_or_sections):\n \"\"\"Splits an array into multiple sub arrays along the first axis.\n\n This is equivalent to ``split`` with ``axis=0``.\n\n .. seealso:: :func:`cupy.split` for more detail, :func:`numpy.dsplit`\n\n \"\"\"\n if ary.ndim <= 1:\n raise ValueError('Cannot vsplit an array with less than 2 dimensions')\n return split(ary, indices_or_sections, 0)\n", "path": "cupy/manipulation/split.py"}]} |
gh_patches_debug_116 | rasdani/github-patches | git_diff | ivy-llc__ivy-13563 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
imag
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ivy/functional/frontends/torch/pointwise_ops.py`
Content:
```
1 # global
2 import ivy
3 from ivy.func_wrapper import with_unsupported_dtypes, integer_arrays_to_float
4 import ivy.functional.frontends.torch as torch_frontend
5 from ivy.functional.frontends.torch.func_wrapper import to_ivy_arrays_and_back
6
7
8 @to_ivy_arrays_and_back
9 def add(input, other, *, alpha=1, out=None):
10 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
11 return ivy.add(input, other, alpha=alpha, out=out)
12
13
14 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
15 @to_ivy_arrays_and_back
16 def tan(input, *, out=None):
17 return ivy.tan(input, out=out)
18
19
20 @to_ivy_arrays_and_back
21 def remainder(input, other, *, out=None):
22 if ivy.is_array(input) and ivy.isscalar(other):
23 other = ivy.full(input.shape, other)
24 return ivy.remainder(input, other, out=out)
25
26
27 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
28 @to_ivy_arrays_and_back
29 def atan(input, *, out=None):
30 return ivy.atan(input, out=out)
31
32
33 arctan = atan
34
35
36 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
37 @to_ivy_arrays_and_back
38 def tanh(input, *, out=None):
39 return ivy.tanh(input, out=out)
40
41
42 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
43 @to_ivy_arrays_and_back
44 def cos(input, *, out=None):
45 return ivy.cos(input, out=out)
46
47
48 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
49 @to_ivy_arrays_and_back
50 def sin(input, *, out=None):
51 return ivy.sin(input, out=out)
52
53
54 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
55 @to_ivy_arrays_and_back
56 def acos(input, *, out=None):
57 return ivy.acos(input, out=out)
58
59
60 arccos = acos
61
62
63 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
64 @to_ivy_arrays_and_back
65 def sinh(input, *, out=None):
66 return ivy.sinh(input, out=out)
67
68
69 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
70 @to_ivy_arrays_and_back
71 def acosh(input, *, out=None):
72 return ivy.acosh(input, out=out)
73
74
75 arccosh = acosh
76
77
78 @to_ivy_arrays_and_back
79 def abs(input, *, out=None):
80 return ivy.abs(input, out=out)
81
82
83 absolute = abs
84
85
86 @to_ivy_arrays_and_back
87 def cosh(input, *, out=None):
88 return ivy.cosh(input, out=out)
89
90
91 @to_ivy_arrays_and_back
92 def subtract(input, other, *, alpha=1, out=None):
93 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
94 return ivy.subtract(input, other * alpha, out=out)
95
96
97 sub = subtract
98
99
100 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
101 @to_ivy_arrays_and_back
102 def exp(input, *, out=None):
103 return ivy.exp(input, out=out)
104
105
106 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
107 @to_ivy_arrays_and_back
108 def asin(input, *, out=None):
109 return ivy.asin(input, out=out)
110
111
112 arcsin = asin
113
114
115 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
116 @to_ivy_arrays_and_back
117 def asinh(input, *, out=None):
118 return ivy.asinh(input, out=out)
119
120
121 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
122 @to_ivy_arrays_and_back
123 def atanh(input, *, out=None):
124 return ivy.atanh(input, out=out)
125
126
127 arctanh = atanh
128
129
130 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
131 @to_ivy_arrays_and_back
132 def log2(input, *, out=None):
133 return ivy.log2(input, out=out)
134
135
136 @to_ivy_arrays_and_back
137 def square(input, *, out=None):
138 return ivy.square(input, out=out)
139
140
141 @with_unsupported_dtypes({"1.11.0 and below": ("float16", "bfloat16")}, "torch")
142 @to_ivy_arrays_and_back
143 def atan2(input, other, *, out=None):
144 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
145 return ivy.atan2(input, other, out=out)
146
147
148 arctan2 = atan2
149
150
151 @to_ivy_arrays_and_back
152 def negative(input, *, out=None):
153 return ivy.negative(input, out=out)
154
155
156 @to_ivy_arrays_and_back
157 def bitwise_and(input, other, *, out=None):
158 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
159 return ivy.bitwise_and(input, other, out=out)
160
161
162 @to_ivy_arrays_and_back
163 def bitwise_not(input, *, out=None):
164 return ivy.bitwise_invert(input, out=out)
165
166
167 @to_ivy_arrays_and_back
168 def bitwise_xor(input, other, *, out=None):
169 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
170 return ivy.bitwise_xor(input, other, out=out)
171
172
173 @to_ivy_arrays_and_back
174 def bitwise_or(input, other, *, out=None):
175 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
176 return ivy.bitwise_or(input, other, out=out)
177
178
179 @to_ivy_arrays_and_back
180 def bitwise_left_shift(input, other, *, out=None):
181 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
182 return ivy.bitwise_left_shift(input, other, out=out)
183
184
185 @to_ivy_arrays_and_back
186 def bitwise_right_shift(input, other, *, out=None):
187 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
188 return ivy.bitwise_right_shift(input, other, out=out)
189
190
191 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
192 @to_ivy_arrays_and_back
193 def log10(input, *, out=None):
194 return ivy.log10(input, out=out)
195
196
197 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
198 @to_ivy_arrays_and_back
199 def trunc(input, *, out=None):
200 return ivy.trunc(input, out=out)
201
202
203 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
204 @to_ivy_arrays_and_back
205 def sqrt(input, *, out=None):
206 return ivy.sqrt(input, out=out)
207
208
209 @to_ivy_arrays_and_back
210 def sign(input, *, out=None):
211 return ivy.sign(input, out=out)
212
213
214 @to_ivy_arrays_and_back
215 def logical_not(input, *, out=None):
216 return ivy.logical_not(input, out=out)
217
218
219 @to_ivy_arrays_and_back
220 def logical_and(input, other, *, out=None):
221 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
222 return ivy.logical_and(input, other, out=out)
223
224
225 @to_ivy_arrays_and_back
226 def logical_or(input, other, *, out=None):
227 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
228 return ivy.logical_or(input, other, out=out)
229
230
231 @to_ivy_arrays_and_back
232 def logical_xor(input, other, *, out=None):
233 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
234 return ivy.logical_xor(input, other, out=out)
235
236
237 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
238 @to_ivy_arrays_and_back
239 def round(input, *, decimals=0, out=None):
240 m = ivy.full(input.shape, 10**decimals)
241 upscale = ivy.multiply(input, m, out=out)
242 rounded = ivy.round(upscale, out=out)
243 return ivy.divide(rounded, m, out=out)
244
245
246 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
247 @to_ivy_arrays_and_back
248 def ceil(input, *, out=None):
249 return ivy.ceil(input, out=out)
250
251
252 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
253 @to_ivy_arrays_and_back
254 def clamp(input, min=None, max=None, *, out=None):
255 ivy.utils.assertions.check_all_or_any_fn(
256 min,
257 max,
258 fn=ivy.exists,
259 type="any",
260 limit=[1, 2],
261 message="at most one of min or max can be None",
262 )
263 input = ivy.array(input)
264 if min is None:
265 return ivy.minimum(input, max, out=out)
266 if max is None:
267 return ivy.maximum(input, min, out=out)
268 return ivy.clip(input, min, max, out=out)
269
270
271 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
272 @to_ivy_arrays_and_back
273 def clip(input, min=None, max=None, *, out=None):
274 ivy.utils.assertions.check_all_or_any_fn(
275 min,
276 max,
277 fn=ivy.exists,
278 type="any",
279 limit=[1, 2],
280 message="at most one of min or max can be None",
281 )
282 input = ivy.array(input)
283 if min is None:
284 return ivy.minimum(input, max, out=out)
285 if max is None:
286 return ivy.maximum(input, min, out=out)
287 return ivy.clip(input, min, max, out=out)
288
289
290 @to_ivy_arrays_and_back
291 def mul(input, other, *, out=None):
292 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
293 return ivy.multiply(input, other, out=out)
294
295
296 multiply = mul
297
298
299 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
300 @to_ivy_arrays_and_back
301 def div(input, other, *, rounding_mode=None, out=None):
302 if rounding_mode is not None:
303 input, other = torch_frontend.promote_types_of_torch_inputs(input, other)
304 promoted = input.dtype
305 if rounding_mode == "trunc":
306 return ivy.trunc_divide(input, other, out=out).astype(promoted)
307 else:
308 return ivy.floor_divide(input, other, out=out).astype(promoted)
309 else:
310 return ivy.divide(input, other, out=out)
311
312
313 @to_ivy_arrays_and_back
314 def reciprocal(input, *, out=None):
315 return ivy.reciprocal(input)
316
317
318 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
319 @to_ivy_arrays_and_back
320 def floor(input, *, out=None):
321 return ivy.floor(input, out=out)
322
323
324 @to_ivy_arrays_and_back
325 def flipud(input):
326 return ivy.flipud(input)
327
328
329 @integer_arrays_to_float
330 @to_ivy_arrays_and_back
331 def deg2rad(input, *, out=None):
332 return ivy.array(input * 3.1416 / 180, out=out)
333
334
335 arcsinh = asinh
336
337
338 divide = div
339
340
341 @to_ivy_arrays_and_back
342 def true_divide(input, other, *, out=None):
343 return ivy.divide(input, other, out=out)
344
345
346 @to_ivy_arrays_and_back
347 def floor_divide(input, other, *, out=None):
348 return ivy.floor_divide(input, other, out=out)
349
350
351 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
352 @to_ivy_arrays_and_back
353 def log1p(input, *, out=None):
354 return ivy.log1p(input, out=out)
355
356
357 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
358 def addcdiv(input, tensor1, tensor2, *, value=1, out=None):
359 return ivy.add(input, ivy.multiply(value, ivy.divide(tensor1, tensor2)), out=out)
360
361
362 @to_ivy_arrays_and_back
363 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
364 def addcmul(input, tensor1, tensor2, *, value=1, out=None):
365 return ivy.add(input, ivy.multiply(value, ivy.multiply(tensor1, tensor2)), out=out)
366
367
368 @to_ivy_arrays_and_back
369 def pow(input, exponent, *, out=None):
370 return ivy.pow(input, exponent, out=out)
371
372
373 @to_ivy_arrays_and_back
374 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
375 def log(input, *, out=None):
376 return ivy.log(input, out=out)
377
378
379 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
380 def logaddexp(x1, x2, out=None):
381 return ivy.logaddexp(x1, x2, out=out)
382
383
384 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
385 @to_ivy_arrays_and_back
386 def exp2(input, out=None):
387 return ivy.exp2(input, out=out)
388
389
390 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
391 @to_ivy_arrays_and_back
392 def rsqrt(input, *, out=None):
393 return ivy.reciprocal(ivy.sqrt(input), out=out)
394
395
396 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
397 @to_ivy_arrays_and_back
398 def expm1(input, out=None):
399 return ivy.expm1(input, out=out)
400
401
402 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
403 def logaddexp2(x1, x2, out=None):
404 return ivy.logaddexp2(x1, x2, out=out)
405
406
407 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
408 def i0(x, out=None):
409 return ivy.i0(x, out=out)
410
411
412 def rad2deg(input, *, out=None):
413 return ivy.rad2deg(input, out=out)
414
415
416 @to_ivy_arrays_and_back
417 def positive(input, *, out=None):
418 return ivy.positive(input, out=out)
419
420
421 @to_ivy_arrays_and_back
422 def frac(input, *, out=None):
423 return input - ivy.sign(input) * ivy.floor(ivy.abs(input))
424
425
426 @with_unsupported_dtypes({"2.9.0 and below": ("bfloat16",)}, "tensorflow")
427 @to_ivy_arrays_and_back
428 def xlogy(input, other, *, out=None):
429 return ivy.xlogy(input, other, out=out)
430
431
432 @to_ivy_arrays_and_back
433 def copysign(input, other, *, out=None):
434 return ivy.copysign(input, other, out=out)
435
436
437 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
438 @to_ivy_arrays_and_back
439 def sinc(input, *, out=None):
440 return ivy.sinc(input, out=out)
441
442
443 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
444 @to_ivy_arrays_and_back
445 def hypot(input, other, *, out=None):
446 return ivy.hypot(input, other, out=out)
447
448
449 @with_unsupported_dtypes({"1.11.0 and below": ("float16",)}, "torch")
450 @to_ivy_arrays_and_back
451 def sigmoid(input, *, out=None):
452 return ivy.sigmoid(input, out=out)
453
454
455 @with_unsupported_dtypes({"1.11.0 and below": ("float16", "bfloat16")}, "torch")
456 @to_ivy_arrays_and_back
457 def lerp(input, end, weight, *, out=None):
458 return ivy.add(input, ivy.multiply(weight, ivy.subtract(end, input)), out=out)
459
460
461 @to_ivy_arrays_and_back
462 def signbit(input, *, out=None):
463 return ivy.signbit(input, out=out)
464
465
466 @to_ivy_arrays_and_back
467 def angle(input, *, out=None):
468 return ivy.angle(input, out=out)
469
470
471 @to_ivy_arrays_and_back
472 def arctan(input, *, out=None):
473 return ivy.arctan(input, out=out)
474
475
476 @to_ivy_arrays_and_back
477 def conj_physical(input, *, out=None):
478 return ivy.conj_physical(input, out=out)
479
480
481 @to_ivy_arrays_and_back
482 def nextafter(input, *, out=None):
483 return ivy.nextafter(input, out=out)
484
485
486 @with_unsupported_dtypes({"1.11.0 and below": ("float16", "bfloat16")}, "torch")
487 @to_ivy_arrays_and_back
488 def fmod(x1, x2, out=None):
489 return ivy.fmod(x1, x2, out=out)
490
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ivy/functional/frontends/torch/pointwise_ops.py b/ivy/functional/frontends/torch/pointwise_ops.py
--- a/ivy/functional/frontends/torch/pointwise_ops.py
+++ b/ivy/functional/frontends/torch/pointwise_ops.py
@@ -487,3 +487,8 @@
@to_ivy_arrays_and_back
def fmod(x1, x2, out=None):
return ivy.fmod(x1, x2, out=out)
+
+
+@to_ivy_arrays_and_back
+def imag(input):
+ return ivy.imag(input)
| {"golden_diff": "diff --git a/ivy/functional/frontends/torch/pointwise_ops.py b/ivy/functional/frontends/torch/pointwise_ops.py\n--- a/ivy/functional/frontends/torch/pointwise_ops.py\n+++ b/ivy/functional/frontends/torch/pointwise_ops.py\n@@ -487,3 +487,8 @@\n @to_ivy_arrays_and_back\n def fmod(x1, x2, out=None):\n return ivy.fmod(x1, x2, out=out)\n+\n+\n+@to_ivy_arrays_and_back\n+def imag(input):\n+ return ivy.imag(input)\n", "issue": "imag\n\n", "before_files": [{"content": "# global\nimport ivy\nfrom ivy.func_wrapper import with_unsupported_dtypes, integer_arrays_to_float\nimport ivy.functional.frontends.torch as torch_frontend\nfrom ivy.functional.frontends.torch.func_wrapper import to_ivy_arrays_and_back\n\n\n@to_ivy_arrays_and_back\ndef add(input, other, *, alpha=1, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.add(input, other, alpha=alpha, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef tan(input, *, out=None):\n return ivy.tan(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef remainder(input, other, *, out=None):\n if ivy.is_array(input) and ivy.isscalar(other):\n other = ivy.full(input.shape, other)\n return ivy.remainder(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef atan(input, *, out=None):\n return ivy.atan(input, out=out)\n\n\narctan = atan\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef tanh(input, *, out=None):\n return ivy.tanh(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef cos(input, *, out=None):\n return ivy.cos(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sin(input, *, out=None):\n return ivy.sin(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef acos(input, *, out=None):\n return ivy.acos(input, out=out)\n\n\narccos = acos\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sinh(input, *, out=None):\n return ivy.sinh(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef acosh(input, *, out=None):\n return ivy.acosh(input, out=out)\n\n\narccosh = acosh\n\n\n@to_ivy_arrays_and_back\ndef abs(input, *, out=None):\n return ivy.abs(input, out=out)\n\n\nabsolute = abs\n\n\n@to_ivy_arrays_and_back\ndef cosh(input, *, out=None):\n return ivy.cosh(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef subtract(input, other, *, alpha=1, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.subtract(input, other * alpha, out=out)\n\n\nsub = subtract\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef exp(input, *, out=None):\n return ivy.exp(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef asin(input, *, out=None):\n return ivy.asin(input, out=out)\n\n\narcsin = asin\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef asinh(input, *, out=None):\n return ivy.asinh(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef atanh(input, *, out=None):\n return ivy.atanh(input, out=out)\n\n\narctanh = atanh\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef log2(input, *, out=None):\n return ivy.log2(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef square(input, *, out=None):\n return ivy.square(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\", \"bfloat16\")}, \"torch\")\n@to_ivy_arrays_and_back\ndef atan2(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.atan2(input, other, out=out)\n\n\narctan2 = atan2\n\n\n@to_ivy_arrays_and_back\ndef negative(input, *, out=None):\n return ivy.negative(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_and(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_and(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_not(input, *, out=None):\n return ivy.bitwise_invert(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_xor(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_xor(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_or(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_or(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_left_shift(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_left_shift(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_right_shift(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_right_shift(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef log10(input, *, out=None):\n return ivy.log10(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef trunc(input, *, out=None):\n return ivy.trunc(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sqrt(input, *, out=None):\n return ivy.sqrt(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef sign(input, *, out=None):\n return ivy.sign(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef logical_not(input, *, out=None):\n return ivy.logical_not(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef logical_and(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.logical_and(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef logical_or(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.logical_or(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef logical_xor(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.logical_xor(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef round(input, *, decimals=0, out=None):\n m = ivy.full(input.shape, 10**decimals)\n upscale = ivy.multiply(input, m, out=out)\n rounded = ivy.round(upscale, out=out)\n return ivy.divide(rounded, m, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef ceil(input, *, out=None):\n return ivy.ceil(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef clamp(input, min=None, max=None, *, out=None):\n ivy.utils.assertions.check_all_or_any_fn(\n min,\n max,\n fn=ivy.exists,\n type=\"any\",\n limit=[1, 2],\n message=\"at most one of min or max can be None\",\n )\n input = ivy.array(input)\n if min is None:\n return ivy.minimum(input, max, out=out)\n if max is None:\n return ivy.maximum(input, min, out=out)\n return ivy.clip(input, min, max, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef clip(input, min=None, max=None, *, out=None):\n ivy.utils.assertions.check_all_or_any_fn(\n min,\n max,\n fn=ivy.exists,\n type=\"any\",\n limit=[1, 2],\n message=\"at most one of min or max can be None\",\n )\n input = ivy.array(input)\n if min is None:\n return ivy.minimum(input, max, out=out)\n if max is None:\n return ivy.maximum(input, min, out=out)\n return ivy.clip(input, min, max, out=out)\n\n\n@to_ivy_arrays_and_back\ndef mul(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.multiply(input, other, out=out)\n\n\nmultiply = mul\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef div(input, other, *, rounding_mode=None, out=None):\n if rounding_mode is not None:\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n promoted = input.dtype\n if rounding_mode == \"trunc\":\n return ivy.trunc_divide(input, other, out=out).astype(promoted)\n else:\n return ivy.floor_divide(input, other, out=out).astype(promoted)\n else:\n return ivy.divide(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef reciprocal(input, *, out=None):\n return ivy.reciprocal(input)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef floor(input, *, out=None):\n return ivy.floor(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef flipud(input):\n return ivy.flipud(input)\n\n\n@integer_arrays_to_float\n@to_ivy_arrays_and_back\ndef deg2rad(input, *, out=None):\n return ivy.array(input * 3.1416 / 180, out=out)\n\n\narcsinh = asinh\n\n\ndivide = div\n\n\n@to_ivy_arrays_and_back\ndef true_divide(input, other, *, out=None):\n return ivy.divide(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef floor_divide(input, other, *, out=None):\n return ivy.floor_divide(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef log1p(input, *, out=None):\n return ivy.log1p(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef addcdiv(input, tensor1, tensor2, *, value=1, out=None):\n return ivy.add(input, ivy.multiply(value, ivy.divide(tensor1, tensor2)), out=out)\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef addcmul(input, tensor1, tensor2, *, value=1, out=None):\n return ivy.add(input, ivy.multiply(value, ivy.multiply(tensor1, tensor2)), out=out)\n\n\n@to_ivy_arrays_and_back\ndef pow(input, exponent, *, out=None):\n return ivy.pow(input, exponent, out=out)\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef log(input, *, out=None):\n return ivy.log(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef logaddexp(x1, x2, out=None):\n return ivy.logaddexp(x1, x2, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef exp2(input, out=None):\n return ivy.exp2(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef rsqrt(input, *, out=None):\n return ivy.reciprocal(ivy.sqrt(input), out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef expm1(input, out=None):\n return ivy.expm1(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef logaddexp2(x1, x2, out=None):\n return ivy.logaddexp2(x1, x2, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef i0(x, out=None):\n return ivy.i0(x, out=out)\n\n\ndef rad2deg(input, *, out=None):\n return ivy.rad2deg(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef positive(input, *, out=None):\n return ivy.positive(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef frac(input, *, out=None):\n return input - ivy.sign(input) * ivy.floor(ivy.abs(input))\n\n\n@with_unsupported_dtypes({\"2.9.0 and below\": (\"bfloat16\",)}, \"tensorflow\")\n@to_ivy_arrays_and_back\ndef xlogy(input, other, *, out=None):\n return ivy.xlogy(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef copysign(input, other, *, out=None):\n return ivy.copysign(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sinc(input, *, out=None):\n return ivy.sinc(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef hypot(input, other, *, out=None):\n return ivy.hypot(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sigmoid(input, *, out=None):\n return ivy.sigmoid(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\", \"bfloat16\")}, \"torch\")\n@to_ivy_arrays_and_back\ndef lerp(input, end, weight, *, out=None):\n return ivy.add(input, ivy.multiply(weight, ivy.subtract(end, input)), out=out)\n\n\n@to_ivy_arrays_and_back\ndef signbit(input, *, out=None):\n return ivy.signbit(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef angle(input, *, out=None):\n return ivy.angle(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef arctan(input, *, out=None):\n return ivy.arctan(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef conj_physical(input, *, out=None):\n return ivy.conj_physical(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef nextafter(input, *, out=None):\n return ivy.nextafter(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\", \"bfloat16\")}, \"torch\")\n@to_ivy_arrays_and_back\ndef fmod(x1, x2, out=None):\n return ivy.fmod(x1, x2, out=out)\n", "path": "ivy/functional/frontends/torch/pointwise_ops.py"}], "after_files": [{"content": "# global\nimport ivy\nfrom ivy.func_wrapper import with_unsupported_dtypes, integer_arrays_to_float\nimport ivy.functional.frontends.torch as torch_frontend\nfrom ivy.functional.frontends.torch.func_wrapper import to_ivy_arrays_and_back\n\n\n@to_ivy_arrays_and_back\ndef add(input, other, *, alpha=1, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.add(input, other, alpha=alpha, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef tan(input, *, out=None):\n return ivy.tan(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef remainder(input, other, *, out=None):\n if ivy.is_array(input) and ivy.isscalar(other):\n other = ivy.full(input.shape, other)\n return ivy.remainder(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef atan(input, *, out=None):\n return ivy.atan(input, out=out)\n\n\narctan = atan\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef tanh(input, *, out=None):\n return ivy.tanh(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef cos(input, *, out=None):\n return ivy.cos(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sin(input, *, out=None):\n return ivy.sin(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef acos(input, *, out=None):\n return ivy.acos(input, out=out)\n\n\narccos = acos\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sinh(input, *, out=None):\n return ivy.sinh(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef acosh(input, *, out=None):\n return ivy.acosh(input, out=out)\n\n\narccosh = acosh\n\n\n@to_ivy_arrays_and_back\ndef abs(input, *, out=None):\n return ivy.abs(input, out=out)\n\n\nabsolute = abs\n\n\n@to_ivy_arrays_and_back\ndef cosh(input, *, out=None):\n return ivy.cosh(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef subtract(input, other, *, alpha=1, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.subtract(input, other * alpha, out=out)\n\n\nsub = subtract\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef exp(input, *, out=None):\n return ivy.exp(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef asin(input, *, out=None):\n return ivy.asin(input, out=out)\n\n\narcsin = asin\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef asinh(input, *, out=None):\n return ivy.asinh(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef atanh(input, *, out=None):\n return ivy.atanh(input, out=out)\n\n\narctanh = atanh\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef log2(input, *, out=None):\n return ivy.log2(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef square(input, *, out=None):\n return ivy.square(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\", \"bfloat16\")}, \"torch\")\n@to_ivy_arrays_and_back\ndef atan2(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.atan2(input, other, out=out)\n\n\narctan2 = atan2\n\n\n@to_ivy_arrays_and_back\ndef negative(input, *, out=None):\n return ivy.negative(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_and(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_and(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_not(input, *, out=None):\n return ivy.bitwise_invert(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_xor(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_xor(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_or(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_or(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_left_shift(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_left_shift(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef bitwise_right_shift(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.bitwise_right_shift(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef log10(input, *, out=None):\n return ivy.log10(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef trunc(input, *, out=None):\n return ivy.trunc(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sqrt(input, *, out=None):\n return ivy.sqrt(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef sign(input, *, out=None):\n return ivy.sign(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef logical_not(input, *, out=None):\n return ivy.logical_not(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef logical_and(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.logical_and(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef logical_or(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.logical_or(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef logical_xor(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.logical_xor(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef round(input, *, decimals=0, out=None):\n m = ivy.full(input.shape, 10**decimals)\n upscale = ivy.multiply(input, m, out=out)\n rounded = ivy.round(upscale, out=out)\n return ivy.divide(rounded, m, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef ceil(input, *, out=None):\n return ivy.ceil(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef clamp(input, min=None, max=None, *, out=None):\n ivy.utils.assertions.check_all_or_any_fn(\n min,\n max,\n fn=ivy.exists,\n type=\"any\",\n limit=[1, 2],\n message=\"at most one of min or max can be None\",\n )\n input = ivy.array(input)\n if min is None:\n return ivy.minimum(input, max, out=out)\n if max is None:\n return ivy.maximum(input, min, out=out)\n return ivy.clip(input, min, max, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef clip(input, min=None, max=None, *, out=None):\n ivy.utils.assertions.check_all_or_any_fn(\n min,\n max,\n fn=ivy.exists,\n type=\"any\",\n limit=[1, 2],\n message=\"at most one of min or max can be None\",\n )\n input = ivy.array(input)\n if min is None:\n return ivy.minimum(input, max, out=out)\n if max is None:\n return ivy.maximum(input, min, out=out)\n return ivy.clip(input, min, max, out=out)\n\n\n@to_ivy_arrays_and_back\ndef mul(input, other, *, out=None):\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n return ivy.multiply(input, other, out=out)\n\n\nmultiply = mul\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef div(input, other, *, rounding_mode=None, out=None):\n if rounding_mode is not None:\n input, other = torch_frontend.promote_types_of_torch_inputs(input, other)\n promoted = input.dtype\n if rounding_mode == \"trunc\":\n return ivy.trunc_divide(input, other, out=out).astype(promoted)\n else:\n return ivy.floor_divide(input, other, out=out).astype(promoted)\n else:\n return ivy.divide(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef reciprocal(input, *, out=None):\n return ivy.reciprocal(input)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef floor(input, *, out=None):\n return ivy.floor(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef flipud(input):\n return ivy.flipud(input)\n\n\n@integer_arrays_to_float\n@to_ivy_arrays_and_back\ndef deg2rad(input, *, out=None):\n return ivy.array(input * 3.1416 / 180, out=out)\n\n\narcsinh = asinh\n\n\ndivide = div\n\n\n@to_ivy_arrays_and_back\ndef true_divide(input, other, *, out=None):\n return ivy.divide(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef floor_divide(input, other, *, out=None):\n return ivy.floor_divide(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef log1p(input, *, out=None):\n return ivy.log1p(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef addcdiv(input, tensor1, tensor2, *, value=1, out=None):\n return ivy.add(input, ivy.multiply(value, ivy.divide(tensor1, tensor2)), out=out)\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef addcmul(input, tensor1, tensor2, *, value=1, out=None):\n return ivy.add(input, ivy.multiply(value, ivy.multiply(tensor1, tensor2)), out=out)\n\n\n@to_ivy_arrays_and_back\ndef pow(input, exponent, *, out=None):\n return ivy.pow(input, exponent, out=out)\n\n\n@to_ivy_arrays_and_back\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef log(input, *, out=None):\n return ivy.log(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef logaddexp(x1, x2, out=None):\n return ivy.logaddexp(x1, x2, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef exp2(input, out=None):\n return ivy.exp2(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef rsqrt(input, *, out=None):\n return ivy.reciprocal(ivy.sqrt(input), out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef expm1(input, out=None):\n return ivy.expm1(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef logaddexp2(x1, x2, out=None):\n return ivy.logaddexp2(x1, x2, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\ndef i0(x, out=None):\n return ivy.i0(x, out=out)\n\n\ndef rad2deg(input, *, out=None):\n return ivy.rad2deg(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef positive(input, *, out=None):\n return ivy.positive(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef frac(input, *, out=None):\n return input - ivy.sign(input) * ivy.floor(ivy.abs(input))\n\n\n@with_unsupported_dtypes({\"2.9.0 and below\": (\"bfloat16\",)}, \"tensorflow\")\n@to_ivy_arrays_and_back\ndef xlogy(input, other, *, out=None):\n return ivy.xlogy(input, other, out=out)\n\n\n@to_ivy_arrays_and_back\ndef copysign(input, other, *, out=None):\n return ivy.copysign(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sinc(input, *, out=None):\n return ivy.sinc(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef hypot(input, other, *, out=None):\n return ivy.hypot(input, other, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\",)}, \"torch\")\n@to_ivy_arrays_and_back\ndef sigmoid(input, *, out=None):\n return ivy.sigmoid(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\", \"bfloat16\")}, \"torch\")\n@to_ivy_arrays_and_back\ndef lerp(input, end, weight, *, out=None):\n return ivy.add(input, ivy.multiply(weight, ivy.subtract(end, input)), out=out)\n\n\n@to_ivy_arrays_and_back\ndef signbit(input, *, out=None):\n return ivy.signbit(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef angle(input, *, out=None):\n return ivy.angle(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef arctan(input, *, out=None):\n return ivy.arctan(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef conj_physical(input, *, out=None):\n return ivy.conj_physical(input, out=out)\n\n\n@to_ivy_arrays_and_back\ndef nextafter(input, *, out=None):\n return ivy.nextafter(input, out=out)\n\n\n@with_unsupported_dtypes({\"1.11.0 and below\": (\"float16\", \"bfloat16\")}, \"torch\")\n@to_ivy_arrays_and_back\ndef fmod(x1, x2, out=None):\n return ivy.fmod(x1, x2, out=out)\n\n\n@to_ivy_arrays_and_back\ndef imag(input):\n return ivy.imag(input)\n", "path": "ivy/functional/frontends/torch/pointwise_ops.py"}]} |
gh_patches_debug_117 | rasdani/github-patches | git_diff | larq__larq-80 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add docs on how to define your own quantizer
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `larq/quantizers.py`
Content:
```
1 """A Quantizer defines the way of transforming a full precision input to a
2 quantized output and the pseudo-gradient method used for the backwards pass."""
3
4 import tensorflow as tf
5 from larq import utils
6
7
8 def sign(x):
9 """A sign function that will never be zero"""
10 return tf.sign(tf.sign(x) + 0.1)
11
12
13 @tf.custom_gradient
14 def _binarize_with_identity_grad(x):
15 def grad(dy):
16 return dy
17
18 return sign(x), grad
19
20
21 @tf.custom_gradient
22 def _binarize_with_weighted_grad(x):
23 def grad(dy):
24 return (1 - tf.abs(x)) * 2 * dy
25
26 return sign(x), grad
27
28
29 @utils.register_keras_custom_object
30 def ste_sign(x):
31 r"""
32 Sign binarization function.
33 \\[
34 q(x) = \begin{cases}
35 -1 & x < 0 \\\
36 1 & x \geq 0
37 \end{cases}
38 \\]
39
40 The gradient is estimated using the Straight-Through Estimator
41 (essentially the binarization is replaced by a clipped identity on the
42 backward pass).
43 \\[\frac{\partial q(x)}{\partial x} = \begin{cases}
44 1 & \left|x\right| \leq 1 \\\
45 0 & \left|x\right| > 1
46 \end{cases}\\]
47
48 # Arguments
49 x: Input tensor.
50
51 # Returns
52 Binarized tensor.
53
54 # References
55 - [Binarized Neural Networks: Training Deep Neural Networks with Weights and
56 Activations Constrained to +1 or -1](http://arxiv.org/abs/1602.02830)
57 """
58
59 x = tf.clip_by_value(x, -1, 1)
60
61 return _binarize_with_identity_grad(x)
62
63
64 @utils.register_keras_custom_object
65 def magnitude_aware_sign(x):
66 r"""
67 Magnitude-aware sign for birealnet.
68
69
70 # Arguments
71 x: Input tensor
72
73 # Returns
74 Scaled binarized tensor (with values in $\{-a, a\}$, where $a$ is a float).
75
76 # References
77 - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved
78 Representational Capability and Advanced Training
79 Algorithm](https://arxiv.org/abs/1808.00278)
80
81 """
82 scale_factor = tf.stop_gradient(
83 tf.reduce_mean(tf.abs(x), axis=list(range(len(x.shape) - 1)))
84 )
85 return scale_factor * ste_sign(x)
86
87
88 @utils.register_keras_custom_object
89 def approx_sign(x):
90 r"""
91 Sign binarization function.
92 \\[
93 q(x) = \begin{cases}
94 -1 & x < 0 \\\
95 1 & x \geq 0
96 \end{cases}
97 \\]
98
99 The gradient is estimated using the ApproxSign method.
100 \\[\frac{\partial q(x)}{\partial x} = \begin{cases}
101 (2 - 2 \left|x\right|) & \left|x\right| \leq 1 \\\
102 0 & \left|x\right| > 1
103 \end{cases}
104 \\]
105
106 # Arguments
107 x: Input tensor.
108
109 # Returns
110 Binarized tensor.
111
112 # References
113 - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved
114 Representational Capability and Advanced
115 Training Algorithm](http://arxiv.org/abs/1808.00278)
116 """
117
118 x = tf.clip_by_value(x, -1, 1)
119
120 return _binarize_with_weighted_grad(x)
121
122
123 def serialize(initializer):
124 return tf.keras.utils.serialize_keras_object(initializer)
125
126
127 def deserialize(name, custom_objects=None):
128 return tf.keras.utils.deserialize_keras_object(
129 name,
130 module_objects=globals(),
131 custom_objects=custom_objects,
132 printable_module_name="quantization function",
133 )
134
135
136 def get(identifier):
137 if identifier is None:
138 return None
139 if isinstance(identifier, str):
140 return deserialize(str(identifier))
141 if callable(identifier):
142 return identifier
143 raise ValueError(
144 f"Could not interpret quantization function identifier: {identifier}"
145 )
146
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/larq/quantizers.py b/larq/quantizers.py
--- a/larq/quantizers.py
+++ b/larq/quantizers.py
@@ -64,7 +64,7 @@
@utils.register_keras_custom_object
def magnitude_aware_sign(x):
r"""
- Magnitude-aware sign for birealnet.
+ Magnitude-aware sign for Bi-Real Net.
# Arguments
| {"golden_diff": "diff --git a/larq/quantizers.py b/larq/quantizers.py\n--- a/larq/quantizers.py\n+++ b/larq/quantizers.py\n@@ -64,7 +64,7 @@\n @utils.register_keras_custom_object\n def magnitude_aware_sign(x):\n r\"\"\"\n- Magnitude-aware sign for birealnet.\n+ Magnitude-aware sign for Bi-Real Net.\n \n \n # Arguments\n", "issue": "Add docs on how to define your own quantizer\n\n", "before_files": [{"content": "\"\"\"A Quantizer defines the way of transforming a full precision input to a\nquantized output and the pseudo-gradient method used for the backwards pass.\"\"\"\n\nimport tensorflow as tf\nfrom larq import utils\n\n\ndef sign(x):\n \"\"\"A sign function that will never be zero\"\"\"\n return tf.sign(tf.sign(x) + 0.1)\n\n\[email protected]_gradient\ndef _binarize_with_identity_grad(x):\n def grad(dy):\n return dy\n\n return sign(x), grad\n\n\[email protected]_gradient\ndef _binarize_with_weighted_grad(x):\n def grad(dy):\n return (1 - tf.abs(x)) * 2 * dy\n\n return sign(x), grad\n\n\[email protected]_keras_custom_object\ndef ste_sign(x):\n r\"\"\"\n Sign binarization function.\n \\\\[\n q(x) = \\begin{cases}\n -1 & x < 0 \\\\\\\n 1 & x \\geq 0\n \\end{cases}\n \\\\]\n\n The gradient is estimated using the Straight-Through Estimator\n (essentially the binarization is replaced by a clipped identity on the\n backward pass).\n \\\\[\\frac{\\partial q(x)}{\\partial x} = \\begin{cases}\n 1 & \\left|x\\right| \\leq 1 \\\\\\\n 0 & \\left|x\\right| > 1\n \\end{cases}\\\\]\n\n # Arguments\n x: Input tensor.\n\n # Returns\n Binarized tensor.\n\n # References\n - [Binarized Neural Networks: Training Deep Neural Networks with Weights and\n Activations Constrained to +1 or -1](http://arxiv.org/abs/1602.02830)\n \"\"\"\n\n x = tf.clip_by_value(x, -1, 1)\n\n return _binarize_with_identity_grad(x)\n\n\[email protected]_keras_custom_object\ndef magnitude_aware_sign(x):\n r\"\"\"\n Magnitude-aware sign for birealnet.\n\n\n # Arguments\n x: Input tensor\n\n # Returns\n Scaled binarized tensor (with values in $\\{-a, a\\}$, where $a$ is a float).\n\n # References\n - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved\n Representational Capability and Advanced Training\n Algorithm](https://arxiv.org/abs/1808.00278)\n\n \"\"\"\n scale_factor = tf.stop_gradient(\n tf.reduce_mean(tf.abs(x), axis=list(range(len(x.shape) - 1)))\n )\n return scale_factor * ste_sign(x)\n\n\[email protected]_keras_custom_object\ndef approx_sign(x):\n r\"\"\"\n Sign binarization function.\n \\\\[\n q(x) = \\begin{cases}\n -1 & x < 0 \\\\\\\n 1 & x \\geq 0\n \\end{cases}\n \\\\]\n\n The gradient is estimated using the ApproxSign method.\n \\\\[\\frac{\\partial q(x)}{\\partial x} = \\begin{cases}\n (2 - 2 \\left|x\\right|) & \\left|x\\right| \\leq 1 \\\\\\\n 0 & \\left|x\\right| > 1\n \\end{cases}\n \\\\]\n\n # Arguments\n x: Input tensor.\n\n # Returns\n Binarized tensor.\n\n # References\n - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved\n Representational Capability and Advanced\n Training Algorithm](http://arxiv.org/abs/1808.00278)\n \"\"\"\n\n x = tf.clip_by_value(x, -1, 1)\n\n return _binarize_with_weighted_grad(x)\n\n\ndef serialize(initializer):\n return tf.keras.utils.serialize_keras_object(initializer)\n\n\ndef deserialize(name, custom_objects=None):\n return tf.keras.utils.deserialize_keras_object(\n name,\n module_objects=globals(),\n custom_objects=custom_objects,\n printable_module_name=\"quantization function\",\n )\n\n\ndef get(identifier):\n if identifier is None:\n return None\n if isinstance(identifier, str):\n return deserialize(str(identifier))\n if callable(identifier):\n return identifier\n raise ValueError(\n f\"Could not interpret quantization function identifier: {identifier}\"\n )\n", "path": "larq/quantizers.py"}], "after_files": [{"content": "\"\"\"A Quantizer defines the way of transforming a full precision input to a\nquantized output and the pseudo-gradient method used for the backwards pass.\"\"\"\n\nimport tensorflow as tf\nfrom larq import utils\n\n\ndef sign(x):\n \"\"\"A sign function that will never be zero\"\"\"\n return tf.sign(tf.sign(x) + 0.1)\n\n\[email protected]_gradient\ndef _binarize_with_identity_grad(x):\n def grad(dy):\n return dy\n\n return sign(x), grad\n\n\[email protected]_gradient\ndef _binarize_with_weighted_grad(x):\n def grad(dy):\n return (1 - tf.abs(x)) * 2 * dy\n\n return sign(x), grad\n\n\[email protected]_keras_custom_object\ndef ste_sign(x):\n r\"\"\"\n Sign binarization function.\n \\\\[\n q(x) = \\begin{cases}\n -1 & x < 0 \\\\\\\n 1 & x \\geq 0\n \\end{cases}\n \\\\]\n\n The gradient is estimated using the Straight-Through Estimator\n (essentially the binarization is replaced by a clipped identity on the\n backward pass).\n \\\\[\\frac{\\partial q(x)}{\\partial x} = \\begin{cases}\n 1 & \\left|x\\right| \\leq 1 \\\\\\\n 0 & \\left|x\\right| > 1\n \\end{cases}\\\\]\n\n # Arguments\n x: Input tensor.\n\n # Returns\n Binarized tensor.\n\n # References\n - [Binarized Neural Networks: Training Deep Neural Networks with Weights and\n Activations Constrained to +1 or -1](http://arxiv.org/abs/1602.02830)\n \"\"\"\n\n x = tf.clip_by_value(x, -1, 1)\n\n return _binarize_with_identity_grad(x)\n\n\[email protected]_keras_custom_object\ndef magnitude_aware_sign(x):\n r\"\"\"\n Magnitude-aware sign for Bi-Real Net.\n\n\n # Arguments\n x: Input tensor\n\n # Returns\n Scaled binarized tensor (with values in $\\{-a, a\\}$, where $a$ is a float).\n\n # References\n - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved\n Representational Capability and Advanced Training\n Algorithm](https://arxiv.org/abs/1808.00278)\n\n \"\"\"\n scale_factor = tf.stop_gradient(\n tf.reduce_mean(tf.abs(x), axis=list(range(len(x.shape) - 1)))\n )\n return scale_factor * ste_sign(x)\n\n\[email protected]_keras_custom_object\ndef approx_sign(x):\n r\"\"\"\n Sign binarization function.\n \\\\[\n q(x) = \\begin{cases}\n -1 & x < 0 \\\\\\\n 1 & x \\geq 0\n \\end{cases}\n \\\\]\n\n The gradient is estimated using the ApproxSign method.\n \\\\[\\frac{\\partial q(x)}{\\partial x} = \\begin{cases}\n (2 - 2 \\left|x\\right|) & \\left|x\\right| \\leq 1 \\\\\\\n 0 & \\left|x\\right| > 1\n \\end{cases}\n \\\\]\n\n # Arguments\n x: Input tensor.\n\n # Returns\n Binarized tensor.\n\n # References\n - [Bi-Real Net: Enhancing the Performance of 1-bit CNNs With Improved\n Representational Capability and Advanced\n Training Algorithm](http://arxiv.org/abs/1808.00278)\n \"\"\"\n\n x = tf.clip_by_value(x, -1, 1)\n\n return _binarize_with_weighted_grad(x)\n\n\ndef serialize(initializer):\n return tf.keras.utils.serialize_keras_object(initializer)\n\n\ndef deserialize(name, custom_objects=None):\n return tf.keras.utils.deserialize_keras_object(\n name,\n module_objects=globals(),\n custom_objects=custom_objects,\n printable_module_name=\"quantization function\",\n )\n\n\ndef get(identifier):\n if identifier is None:\n return None\n if isinstance(identifier, str):\n return deserialize(str(identifier))\n if callable(identifier):\n return identifier\n raise ValueError(\n f\"Could not interpret quantization function identifier: {identifier}\"\n )\n", "path": "larq/quantizers.py"}]} |
gh_patches_debug_118 | rasdani/github-patches | git_diff | scverse__scanpy-721 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Give `external` higher billing in the docs?
At the moment external modules are kind of hidden in the docs. I think it'd be worth making them more visible (at least on the same page as everything else). I've been giving this a shot, but have hit the limit of my sphinx/ rst abilities.
Two ideas for how they could be more discoverable:
* They get their own heading under `api`
* They're mixed in with everything else (so everything stays organized by topic), but their names are prepended with `sce` while scanpy functions are prepended with `sc`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scanpy/external/__init__.py`
Content:
```
1 from . import tl
2 from . import pl
3 from . import pp
4
5 from .. import _exporting as exporting
6
7 import sys
8 from .. import utils
9 utils.annotate_doc_types(sys.modules[__name__], 'scanpy')
10 del sys, utils
11
12
13 __doc__ = """\
14 External API
15 ============
16
17
18 Import Scanpy's wrappers to external tools as::
19
20 import scanpy.external as sce
21
22 Preprocessing: PP
23 ------------------
24
25 Batch effect correction
26 ~~~~~~~~~~~~~~~~~~~~~~~
27
28 .. autosummary::
29 :toctree: .
30
31 pp.bbknn
32 pp.mnn_correct
33
34 Imputation
35 ~~~~~~~~~~
36
37 Note that the fundamental limitations of imputation are still under `debate
38 <https://github.com/theislab/scanpy/issues/189>`__.
39
40 .. autosummary::
41 :toctree: .
42
43 pp.dca
44 pp.magic
45
46
47 Tools: TL
48 ----------
49
50 Embeddings
51 ~~~~~~~~~~
52
53 .. autosummary::
54 :toctree: .
55
56 tl.phate
57 tl.palantir
58
59 Clustering and trajectory inference
60 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
61
62 .. autosummary::
63 :toctree: .
64
65 tl.phenograph
66
67 Gene scores, Cell cycle
68 ~~~~~~~~~~~~~~~~~~~~~~~
69
70 .. autosummary::
71 :toctree: .
72
73 tl.sandbag
74 tl.cyclone
75
76
77 Plotting: PL
78 ------------
79
80 .. autosummary::
81 :toctree: .
82
83 pl.phate
84 tl.palantir
85
86
87 Exporting
88 ---------
89
90 .. autosummary::
91 :toctree: .
92
93 exporting.spring_project
94 exporting.cellbrowser
95 """
96
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scanpy/external/__init__.py b/scanpy/external/__init__.py
--- a/scanpy/external/__init__.py
+++ b/scanpy/external/__init__.py
@@ -19,6 +19,8 @@
import scanpy.external as sce
+If you'd like to see your tool included here, please open a `pull request <https://github.com/theislab/scanpy>`_!
+
Preprocessing: PP
------------------
| {"golden_diff": "diff --git a/scanpy/external/__init__.py b/scanpy/external/__init__.py\n--- a/scanpy/external/__init__.py\n+++ b/scanpy/external/__init__.py\n@@ -19,6 +19,8 @@\n \n import scanpy.external as sce\n \n+If you'd like to see your tool included here, please open a `pull request <https://github.com/theislab/scanpy>`_!\n+\n Preprocessing: PP\n ------------------\n", "issue": "Give `external` higher billing in the docs?\nAt the moment external modules are kind of hidden in the docs. I think it'd be worth making them more visible (at least on the same page as everything else). I've been giving this a shot, but have hit the limit of my sphinx/ rst abilities.\r\n\r\nTwo ideas for how they could be more discoverable:\r\n\r\n* They get their own heading under `api`\r\n* They're mixed in with everything else (so everything stays organized by topic), but their names are prepended with `sce` while scanpy functions are prepended with `sc`.\n", "before_files": [{"content": "from . import tl\nfrom . import pl\nfrom . import pp\n\nfrom .. import _exporting as exporting\n\nimport sys\nfrom .. import utils\nutils.annotate_doc_types(sys.modules[__name__], 'scanpy')\ndel sys, utils\n\n\n__doc__ = \"\"\"\\\nExternal API\n============\n\n\nImport Scanpy's wrappers to external tools as::\n\n import scanpy.external as sce\n\nPreprocessing: PP\n------------------\n\nBatch effect correction\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n pp.bbknn\n pp.mnn_correct\n\nImputation\n~~~~~~~~~~\n\nNote that the fundamental limitations of imputation are still under `debate\n<https://github.com/theislab/scanpy/issues/189>`__.\n\n.. autosummary::\n :toctree: .\n\n pp.dca\n pp.magic\n\n\nTools: TL\n----------\n\nEmbeddings\n~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n tl.phate\n tl.palantir\n\nClustering and trajectory inference\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n tl.phenograph\n\nGene scores, Cell cycle\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n tl.sandbag\n tl.cyclone\n\n\nPlotting: PL\n------------\n\n.. autosummary::\n :toctree: .\n\n pl.phate\n tl.palantir\n\n\nExporting\n---------\n\n.. autosummary::\n :toctree: .\n\n exporting.spring_project\n exporting.cellbrowser\n\"\"\"\n", "path": "scanpy/external/__init__.py"}], "after_files": [{"content": "from . import tl\nfrom . import pl\nfrom . import pp\n\nfrom .. import _exporting as exporting\n\nimport sys\nfrom .. import utils\nutils.annotate_doc_types(sys.modules[__name__], 'scanpy')\ndel sys, utils\n\n\n__doc__ = \"\"\"\\\nExternal API\n============\n\n\nImport Scanpy's wrappers to external tools as::\n\n import scanpy.external as sce\n\nIf you'd like to see your tool included here, please open a `pull request <https://github.com/theislab/scanpy>`_!\n\nPreprocessing: PP\n------------------\n\nBatch effect correction\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n pp.bbknn\n pp.mnn_correct\n\nImputation\n~~~~~~~~~~\n\nNote that the fundamental limitations of imputation are still under `debate\n<https://github.com/theislab/scanpy/issues/189>`__.\n\n.. autosummary::\n :toctree: .\n\n pp.dca\n pp.magic\n\n\nTools: TL\n----------\n\nEmbeddings\n~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n tl.phate\n tl.palantir\n\nClustering and trajectory inference\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n tl.phenograph\n\nGene scores, Cell cycle\n~~~~~~~~~~~~~~~~~~~~~~~\n\n.. autosummary::\n :toctree: .\n\n tl.sandbag\n tl.cyclone\n\n\nPlotting: PL\n------------\n\n.. autosummary::\n :toctree: .\n\n pl.phate\n tl.palantir\n\n\nExporting\n---------\n\n.. autosummary::\n :toctree: .\n\n exporting.spring_project\n exporting.cellbrowser\n\"\"\"\n", "path": "scanpy/external/__init__.py"}]} |
gh_patches_debug_119 | rasdani/github-patches | git_diff | Flexget__Flexget-2778 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
new bug in 3.1.85 NameError: name 'partial' is not defined
<!---
Before opening an issue, verify:
- Is this a feature request? Post it on https://feathub.com/Flexget/Flexget
- Is this an issue with webui? Make an issue over on https://github.com/Flexget/webui
- Did you recently upgrade? Look at the Change Log and Upgrade Actions to make sure that you don't need to make any changes to your config https://flexget.com/ChangeLog https://flexget.com/UpgradeActions
- Are you running FlexGet as a daemon? Stop it completely and then start it again https://flexget.com/CLI/daemon
- Did you search to see if the issue already exists? https://github.com/Flexget/Flexget/issues
- Did you fill out the issue template as completely as possible?
The issue template is here because it helps to ensure you submitted all the necessary information the first time, and allows us to more quickly review issues. Please fill it out correctly and do not ignore it, no matter how irrelevant you think it may be. Thanks in advance for your help with this!
--->
### Expected behaviour:
<!---
Please don't just say "it doesn't crash" or "it works". Explain what the expected result is.
--->
version 3.1.85 works with transmission plugin
### Actual behaviour:
version 3.1.85 is broken
rollback to 3.1.84 fixes the issue
### Steps to reproduce:
- Step 1: ...
run version 3.8.5 with transmission plugin
- Step 2:..
break immediately
- Step 3:..
this issue is introduced from https://github.com/Flexget/Flexget/pull/2773/ , please revert
#### Config:
```yaml
Paste FULL config and remove any personal info if config is too long, attach the file to the ticket.
If issue is with a single task, you can get get resulting configuration by running:
flexget execute --task <NAME> --dump-config
Make sure to redact any personal information (passwords, api keys, etc) !
```
#### Log:
<details>
<summary>(click to expand)</summary>
```
NameError: name 'partial' is not defined
Traceback (most recent call last):
File "/home/ubuntu/flexget/lib/python3.8/site-packages/flexget/task.py", line 547, in __run_plugin
result = method(*args, **kwargs)
File "/home/ubuntu/flexget/lib/python3.8/site-packages/flexget/event.py", line 20, in call
return self.func(*args, **kwargs)
File "/home/ubuntu/flexget/lib/python3.8/site-packages/flexget/plugins/clients/transmission.py", line 616, in on_task_output
start_torrent = partial(self.client.start_torrent, [torrent_info.id])
NameError: name 'partial' is not defined
```
</details>
### Additional information:
- FlexGet version: 3.1.85
- Python version: 3.8.6
- Installation method: python virtualenv
- Using daemon (yes/no): yes
- OS and version: ubuntu
- Link to crash log: ^
<!---
In config and debug/crash logs, remember to redact any personal or sensitive information such as passwords, API keys, private URLs and so on.
Please verify that the following data is present before submitting your issue:
- Link to a paste service or paste above the relevant config (preferably full config, including templates if present). Please make sure the paste does not expire, if possible.
- Link to a paste service or paste above debug-level logs of the relevant task/s (use `flexget -L debug execute --tasks <Task_name>`).
- FlexGet version (use `flexget -V` to get it).
- Full Python version, for example `2.7.11` (use `python -V` to get it).
- Installation method (pip, git install, etc).
- Whether or not you're running FlexGet as a daemon.
- OS and version.
- Attach crash log if one was generated, in addition to the debug-level log. It can be found in the directory with your config file.
--->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `flexget/plugins/clients/transmission.py`
Content:
```
1 import base64
2 import os
3 import re
4 from datetime import datetime, timedelta
5 from fnmatch import fnmatch
6 from netrc import NetrcParseError, netrc
7 from time import sleep
8 from urllib.parse import urlparse
9
10 from loguru import logger
11
12 from flexget import plugin
13 from flexget.config_schema import one_or_more
14 from flexget.entry import Entry
15 from flexget.event import event
16 from flexget.utils.pathscrub import pathscrub
17 from flexget.utils.template import RenderError
18 from flexget.utils.tools import parse_timedelta
19
20 try:
21 import transmissionrpc
22 from transmissionrpc import HTTPHandlerError, TransmissionError
23 except ImportError:
24 # If transmissionrpc is not found, errors will be shown later
25 pass
26
27 logger = logger.bind(name='transmission')
28
29
30 class TransmissionBase:
31 def __init__(self):
32 self.client = None
33 self.opener = None
34
35 def prepare_config(self, config):
36 if isinstance(config, bool):
37 config = {'enabled': config}
38 config.setdefault('enabled', True)
39 config.setdefault('host', 'localhost')
40 config.setdefault('port', 9091)
41 config.setdefault('main_file_ratio', 0.9)
42 if 'netrc' in config:
43 netrc_path = os.path.expanduser(config['netrc'])
44 try:
45 config['username'], _, config['password'] = netrc(netrc_path).authenticators(
46 config['host']
47 )
48 except OSError as e:
49 logger.error('netrc: unable to open: {}', e.filename)
50 except NetrcParseError as e:
51 logger.error('netrc: {}, file: {}, line: {}', e.msg, e.filename, e.lineno)
52 return config
53
54 def create_rpc_client(self, config):
55 user, password = config.get('username'), config.get('password')
56
57 try:
58 cli = transmissionrpc.Client(config['host'], config['port'], user, password)
59 except TransmissionError as e:
60 if isinstance(e.original, HTTPHandlerError):
61 if e.original.code == 111:
62 raise plugin.PluginError("Cannot connect to transmission. Is it running?")
63 elif e.original.code == 401:
64 raise plugin.PluginError(
65 "Username/password for transmission is incorrect. Cannot connect."
66 )
67 elif e.original.code == 110:
68 raise plugin.PluginError(
69 "Cannot connect to transmission: Connection timed out."
70 )
71 else:
72 raise plugin.PluginError(
73 "Error connecting to transmission: %s" % e.original.message
74 )
75 else:
76 raise plugin.PluginError("Error connecting to transmission: %s" % e.message)
77 return cli
78
79 def torrent_info(self, torrent, config):
80 done = torrent.totalSize > 0
81 vloc = None
82 best = None
83 for t in torrent.files().items():
84 tf = t[1]
85 if tf['selected']:
86 if tf['size'] <= 0 or tf['completed'] < tf['size']:
87 done = False
88 break
89 if not best or tf['size'] > best[1]:
90 best = (tf['name'], tf['size'])
91 if (
92 done
93 and best
94 and (100 * float(best[1]) / float(torrent.totalSize))
95 >= (config['main_file_ratio'] * 100)
96 ):
97 vloc = ('%s/%s' % (torrent.downloadDir, best[0])).replace('/', os.sep)
98 return done, vloc
99
100 def check_seed_limits(self, torrent, session):
101 seed_limit_ok = True # will remain if no seed ratio defined
102 idle_limit_ok = True # will remain if no idle limit defined
103
104 if torrent.seedRatioMode == 1: # use torrent's own seed ratio limit
105 seed_limit_ok = torrent.uploadRatio >= torrent.seedRatioLimit
106 elif torrent.seedRatioMode == 0: # use global rules
107 if session.seedRatioLimited:
108 seed_limit_ok = torrent.uploadRatio >= session.seedRatioLimit
109
110 if torrent.seedIdleMode == 1: # use torrent's own idle limit
111 idle_limit_ok = (
112 torrent.date_active + timedelta(minutes=torrent.seedIdleLimit) < datetime.now()
113 )
114 elif torrent.seedIdleMode == 0: # use global rules
115 if session.idle_seeding_limit_enabled:
116 idle_limit_ok = (
117 torrent.date_active + timedelta(minutes=session.idle_seeding_limit)
118 < datetime.now()
119 )
120
121 return seed_limit_ok, idle_limit_ok
122
123 def on_task_start(self, task, config):
124 try:
125 import transmissionrpc
126 from transmissionrpc import HTTPHandlerError # noqa
127 from transmissionrpc import TransmissionError # noqa
128 except:
129 raise plugin.PluginError(
130 'Transmissionrpc module version 0.11 or higher required.', logger
131 )
132 if [int(part) for part in transmissionrpc.__version__.split('.')] < [0, 11]:
133 raise plugin.PluginError(
134 'Transmissionrpc module version 0.11 or higher required, please upgrade', logger
135 )
136
137 # Mark rpc client for garbage collector so every task can start
138 # a fresh new according its own config - fix to bug #2804
139 self.client = None
140 config = self.prepare_config(config)
141 if config['enabled']:
142 if task.options.test:
143 logger.info('Trying to connect to transmission...')
144 self.client = self.create_rpc_client(config)
145 if self.client:
146 logger.info('Successfully connected to transmission.')
147 else:
148 logger.error('It looks like there was a problem connecting to transmission.')
149
150
151 class PluginTransmissionInput(TransmissionBase):
152 schema = {
153 'anyOf': [
154 {'type': 'boolean'},
155 {
156 'type': 'object',
157 'properties': {
158 'host': {'type': 'string'},
159 'port': {'type': 'integer'},
160 'netrc': {'type': 'string', 'format': 'file'},
161 'username': {'type': 'string'},
162 'password': {'type': 'string'},
163 'enabled': {'type': 'boolean'},
164 'only_complete': {'type': 'boolean'},
165 },
166 'additionalProperties': False,
167 },
168 ]
169 }
170
171 def prepare_config(self, config):
172 config = TransmissionBase.prepare_config(self, config)
173 config.setdefault('only_complete', False)
174 return config
175
176 def on_task_input(self, task, config):
177 config = self.prepare_config(config)
178 if not config['enabled']:
179 return
180
181 if not self.client:
182 self.client = self.create_rpc_client(config)
183 entries = []
184
185 # Hack/Workaround for http://flexget.com/ticket/2002
186 # TODO: Proper fix
187 if 'username' in config and 'password' in config:
188 self.client.http_handler.set_authentication(
189 self.client.url, config['username'], config['password']
190 )
191
192 session = self.client.get_session()
193
194 for torrent in self.client.get_torrents():
195 seed_ratio_ok, idle_limit_ok = self.check_seed_limits(torrent, session)
196 if config['only_complete'] and not (
197 seed_ratio_ok and idle_limit_ok and torrent.progress == 100
198 ):
199 continue
200 entry = Entry(
201 title=torrent.name,
202 url='',
203 torrent_info_hash=torrent.hashString,
204 content_size=torrent.totalSize / (1024 * 1024),
205 )
206 # Location of torrent is only valid if transmission is on same machine as flexget
207 if config['host'] in ('localhost', '127.0.0.1'):
208 entry['location'] = torrent.torrentFile
209 entry['url'] = 'file://' + torrent.torrentFile
210 for attr in [
211 'id',
212 'comment',
213 'desiredAvailable',
214 'downloadDir',
215 'isFinished',
216 'isPrivate',
217 'leftUntilDone',
218 'ratio',
219 'status',
220 'date_active',
221 'date_added',
222 'date_done',
223 'date_started',
224 'errorString',
225 'priority',
226 'progress',
227 'secondsDownloading',
228 'secondsSeeding',
229 'torrentFile',
230 ]:
231 try:
232 entry['transmission_' + attr] = getattr(torrent, attr)
233 except Exception:
234 logger.opt(exception=True).debug(
235 'error when requesting transmissionrpc attribute {}', attr
236 )
237 # Availability in percent
238 entry['transmission_availability'] = (torrent.desiredAvailable / torrent.leftUntilDone) if torrent.leftUntilDone else 0
239
240 entry['transmission_trackers'] = [t['announce'] for t in torrent.trackers]
241 entry['transmission_seed_ratio_ok'] = seed_ratio_ok
242 entry['transmission_idle_limit_ok'] = idle_limit_ok
243 st_error_to_desc = {
244 0: 'OK',
245 1: 'tracker_warning',
246 2: 'tracker_error',
247 3: 'local_error',
248 }
249 entry['transmission_error_state'] = st_error_to_desc[torrent.error]
250 # Built in done_date doesn't work when user adds an already completed file to transmission
251 if torrent.progress == 100:
252 entry['transmission_date_done'] = datetime.fromtimestamp(
253 max(torrent.addedDate, torrent.doneDate)
254 )
255 entries.append(entry)
256 return entries
257
258
259 class PluginTransmission(TransmissionBase):
260 """
261 Add url from entry url to transmission
262
263 Example::
264
265 transmission:
266 host: localhost
267 port: 9091
268 netrc: /home/flexget/.tmnetrc
269 username: myusername
270 password: mypassword
271 path: the download location
272
273 Default values for the config elements::
274
275 transmission:
276 host: localhost
277 port: 9091
278 enabled: yes
279 """
280
281 schema = {
282 'anyOf': [
283 {'type': 'boolean'},
284 {
285 'type': 'object',
286 'properties': {
287 'host': {'type': 'string'},
288 'port': {'type': 'integer'},
289 'netrc': {'type': 'string'},
290 'username': {'type': 'string'},
291 'password': {'type': 'string'},
292 'action': {
293 'type': 'string',
294 'enum': ['add', 'remove', 'purge', 'pause', 'resume', 'bypass_queue'],
295 },
296 'path': {'type': 'string'},
297 'max_up_speed': {'type': 'number'},
298 'max_down_speed': {'type': 'number'},
299 'max_connections': {'type': 'integer'},
300 'ratio': {'type': 'number'},
301 'add_paused': {'type': 'boolean'},
302 'content_filename': {'type': 'string'},
303 'main_file_only': {'type': 'boolean'},
304 'main_file_ratio': {'type': 'number'},
305 'magnetization_timeout': {'type': 'integer'},
306 'enabled': {'type': 'boolean'},
307 'include_subs': {'type': 'boolean'},
308 'bandwidth_priority': {'type': 'number'},
309 'honor_limits': {'type': 'boolean'},
310 'include_files': one_or_more({'type': 'string'}),
311 'skip_files': one_or_more({'type': 'string'}),
312 'rename_like_files': {'type': 'boolean'},
313 'queue_position': {'type': 'integer'},
314 },
315 'additionalProperties': False,
316 },
317 ]
318 }
319
320 def prepare_config(self, config):
321 config = TransmissionBase.prepare_config(self, config)
322 config.setdefault('action', 'add')
323 config.setdefault('path', '')
324 config.setdefault('main_file_only', False)
325 config.setdefault('magnetization_timeout', 0)
326 config.setdefault('include_subs', False)
327 config.setdefault('rename_like_files', False)
328 config.setdefault('include_files', [])
329 return config
330
331 @plugin.priority(120)
332 def on_task_download(self, task, config):
333 """
334 Call download plugin to generate the temp files we will load
335 into deluge then verify they are valid torrents
336 """
337 config = self.prepare_config(config)
338 if not config['enabled']:
339 return
340 # If the download plugin is not enabled, we need to call it to get our temp .torrent files
341 if 'download' not in task.config:
342 download = plugin.get('download', self)
343 for entry in task.accepted:
344 if entry.get('transmission_id'):
345 # The torrent is already loaded in deluge, we don't need to get anything
346 continue
347 if config['action'] != 'add' and entry.get('torrent_info_hash'):
348 # If we aren't adding the torrent new, all we need is info hash
349 continue
350 download.get_temp_file(task, entry, handle_magnets=True, fail_html=True)
351
352 @plugin.priority(135)
353 def on_task_output(self, task, config):
354 config = self.prepare_config(config)
355 # don't add when learning
356 if task.options.learn:
357 return
358 if not config['enabled']:
359 return
360 # Do not run if there is nothing to do
361 if not task.accepted:
362 return
363 if self.client is None:
364 self.client = self.create_rpc_client(config)
365 if self.client:
366 logger.debug('Successfully connected to transmission.')
367 else:
368 raise plugin.PluginError("Couldn't connect to transmission.")
369 session_torrents = self.client.get_torrents()
370 for entry in task.accepted:
371 if task.options.test:
372 logger.info('Would {} {} in transmission.', config['action'], entry['title'])
373 continue
374 # Compile user options into appropriate dict
375 options = self._make_torrent_options_dict(config, entry)
376 torrent_info = None
377 for t in session_torrents:
378 if t.hashString.lower() == entry.get(
379 'torrent_info_hash', ''
380 ).lower() or t.id == entry.get('transmission_id'):
381 torrent_info = t
382 logger.debug(
383 'Found {} already loaded in transmission as {}',
384 entry['title'],
385 torrent_info.name,
386 )
387 break
388
389 if not torrent_info:
390 if config['action'] != 'add':
391 logger.warning(
392 'Cannot {} {} because it is not loaded in transmission.',
393 config['action'],
394 entry['title'],
395 )
396 continue
397 downloaded = not entry['url'].startswith('magnet:')
398
399 # Check that file is downloaded
400 if downloaded and 'file' not in entry:
401 entry.fail('`file` field missing?')
402 continue
403
404 # Verify the temp file exists
405 if downloaded and not os.path.exists(entry['file']):
406 tmp_path = os.path.join(task.manager.config_base, 'temp')
407 logger.debug('entry: {}', entry)
408 logger.debug('temp: {}', ', '.join(os.listdir(tmp_path)))
409 entry.fail("Downloaded temp file '%s' doesn't exist!?" % entry['file'])
410 continue
411
412 try:
413 if downloaded:
414 with open(entry['file'], 'rb') as f:
415 filedump = base64.b64encode(f.read()).decode('utf-8')
416 torrent_info = self.client.add_torrent(filedump, 30, **options['add'])
417 else:
418 if options['post'].get('magnetization_timeout', 0) > 0:
419 options['add']['paused'] = False
420 torrent_info = self.client.add_torrent(
421 entry['url'], timeout=30, **options['add']
422 )
423 except TransmissionError as e:
424 logger.opt(exception=True).debug('TransmissionError')
425 logger.debug('Failed options dict: {}', options['add'])
426 msg = 'Error adding {} to transmission. TransmissionError: {}'.format(
427 entry['title'], e.message or 'N/A'
428 )
429 logger.error(msg)
430 entry.fail(msg)
431 continue
432 logger.info('"{}" torrent added to transmission', entry['title'])
433 # The info returned by the add call is incomplete, refresh it
434 torrent_info = self.client.get_torrent(torrent_info.id)
435 else:
436 # Torrent already loaded in transmission
437 if options['add'].get('download_dir'):
438 logger.verbose(
439 'Moving {} to "{}"', torrent_info.name, options['add']['download_dir']
440 )
441 # Move data even if current reported torrent location matches new location
442 # as transmission may fail to automatically move completed file to final
443 # location but continue reporting final location instead of real location.
444 # In such case this will kick transmission to really move data.
445 # If data is already located at new location then transmission just ignore
446 # this command.
447 self.client.move_torrent_data(
448 torrent_info.id, options['add']['download_dir'], 120
449 )
450
451 try:
452 total_size = torrent_info.totalSize
453 main_id = None
454 find_main_file = (
455 options['post'].get('main_file_only') or 'content_filename' in options['post']
456 )
457 skip_files = options['post'].get('skip_files')
458 # We need to index the files if any of the following are defined
459 if find_main_file or skip_files:
460 file_list = self.client.get_files(torrent_info.id)[torrent_info.id]
461
462 if options['post'].get('magnetization_timeout', 0) > 0 and not file_list:
463 logger.debug(
464 'Waiting {} seconds for "{}" to magnetize',
465 options['post']['magnetization_timeout'],
466 entry['title'],
467 )
468 for _ in range(options['post']['magnetization_timeout']):
469 sleep(1)
470 file_list = self.client.get_files(torrent_info.id)[torrent_info.id]
471 if file_list:
472 total_size = self.client.get_torrent(
473 torrent_info.id, ['id', 'totalSize']
474 ).totalSize
475 break
476 else:
477 logger.warning(
478 '"{}" did not magnetize before the timeout elapsed, file list unavailable for processing.',
479 entry['title'],
480 )
481
482 # Find files based on config
483 dl_list = []
484 skip_list = []
485 main_list = []
486 ext_list = ['*.srt', '*.sub', '*.idx', '*.ssa', '*.ass']
487
488 main_ratio = config['main_file_ratio']
489 if 'main_file_ratio' in options['post']:
490 main_ratio = options['post']['main_file_ratio']
491
492 for f in file_list:
493 # No need to set main_id if we're not going to need it
494 if find_main_file and file_list[f]['size'] > total_size * main_ratio:
495 main_id = f
496
497 if 'include_files' in options['post']:
498 if any(
499 fnmatch(file_list[f]['name'], mask)
500 for mask in options['post']['include_files']
501 ):
502 dl_list.append(f)
503 elif options['post'].get('include_subs') and any(
504 fnmatch(file_list[f]['name'], mask) for mask in ext_list
505 ):
506 dl_list.append(f)
507
508 if skip_files:
509 if any(fnmatch(file_list[f]['name'], mask) for mask in skip_files):
510 skip_list.append(f)
511
512 if main_id is not None:
513 # Look for files matching main ID title but with a different extension
514 if options['post'].get('rename_like_files'):
515 for f in file_list:
516 # if this filename matches main filename we want to rename it as well
517 fs = os.path.splitext(file_list[f]['name'])
518 if fs[0] == os.path.splitext(file_list[main_id]['name'])[0]:
519 main_list.append(f)
520 else:
521 main_list = [main_id]
522
523 if main_id not in dl_list:
524 dl_list.append(main_id)
525 elif find_main_file:
526 logger.warning(
527 'No files in "{}" are > {:.0f}% of content size, no files renamed.',
528 entry['title'],
529 main_ratio * 100,
530 )
531
532 # If we have a main file and want to rename it and associated files
533 if 'content_filename' in options['post'] and main_id is not None:
534 if 'download_dir' not in options['add']:
535 download_dir = self.client.get_session().download_dir
536 else:
537 download_dir = options['add']['download_dir']
538
539 # Get new filename without ext
540 file_ext = os.path.splitext(file_list[main_id]['name'])[1]
541 file_path = os.path.dirname(
542 os.path.join(download_dir, file_list[main_id]['name'])
543 )
544 filename = options['post']['content_filename']
545 if config['host'] == 'localhost' or config['host'] == '127.0.0.1':
546 counter = 1
547 while os.path.exists(os.path.join(file_path, filename + file_ext)):
548 # Try appending a (#) suffix till a unique filename is found
549 filename = '%s(%s)' % (
550 options['post']['content_filename'],
551 counter,
552 )
553 counter += 1
554 else:
555 logger.debug(
556 'Cannot ensure content_filename is unique '
557 'when adding to a remote transmission daemon.'
558 )
559
560 for index in main_list:
561 file_ext = os.path.splitext(file_list[index]['name'])[1]
562 logger.debug(
563 'File {} renamed to {}',
564 file_list[index]['name'],
565 filename + file_ext,
566 )
567 # change to below when set_files will allow setting name, more efficient to have one call
568 # fl[index]['name'] = os.path.basename(pathscrub(filename + file_ext).encode('utf-8'))
569 try:
570 self.client.rename_torrent_path(
571 torrent_info.id,
572 file_list[index]['name'],
573 os.path.basename(str(pathscrub(filename + file_ext))),
574 )
575 except TransmissionError:
576 logger.error(
577 'content_filename only supported with transmission 2.8+'
578 )
579
580 if options['post'].get('main_file_only') and main_id is not None:
581 # Set Unwanted Files
582 options['change']['files_unwanted'] = [
583 x for x in file_list if x not in dl_list
584 ]
585 options['change']['files_wanted'] = dl_list
586 logger.debug(
587 'Downloading {} of {} files in torrent.',
588 len(options['change']['files_wanted']),
589 len(file_list),
590 )
591 elif (
592 not options['post'].get('main_file_only') or main_id is None
593 ) and skip_files:
594 # If no main file and we want to skip files
595
596 if len(skip_list) >= len(file_list):
597 logger.debug(
598 'skip_files filter would cause no files to be downloaded; '
599 'including all files in torrent.'
600 )
601 else:
602 options['change']['files_unwanted'] = skip_list
603 options['change']['files_wanted'] = [
604 x for x in file_list if x not in skip_list
605 ]
606 logger.debug(
607 'Downloading {} of {} files in torrent.',
608 len(options['change']['files_wanted']),
609 len(file_list),
610 )
611
612 # Set any changed file properties
613 if list(options['change'].keys()):
614 self.client.change_torrent(torrent_info.id, 30, **options['change'])
615
616 start_torrent = partial(self.client.start_torrent, [torrent_info.id])
617
618 if config['action'] == 'add':
619 # if add_paused was defined and set to False start the torrent;
620 # prevents downloading data before we set what files we want
621 start_paused = (
622 options['post']['paused']
623 if 'paused' in options['post']
624 else not self.client.get_session().start_added_torrents
625 )
626 if start_paused:
627 self.client.stop_torrent(torrent_info.id)
628 else:
629 self.client.start_torrent(torrent_info.id)
630 elif config['action'] in ('remove', 'purge'):
631 self.client.remove_torrent(
632 [torrent_info.id], delete_data=config['action'] == 'purge'
633 )
634 logger.info('{}d {} from transmission', config['action'], torrent_info.name)
635 elif config['action'] == 'pause':
636 self.client.stop_torrent([torrent_info.id])
637 logger.info('paused {} in transmission', torrent_info.name)
638 elif config['action'] == 'resume':
639 start_torrent()
640 logger.info('resumed {} in transmission', torrent_info.name)
641 elif config['action'] == 'bypass_queue':
642 start_torrent(bypass_queue=True)
643 logger.info('resumed (bypass queue) {} in transmission', torrent_info.name)
644
645 except TransmissionError as e:
646 logger.opt(exception=True).debug('TransmissionError')
647 logger.debug('Failed options dict: {}', options)
648 msg = 'Error trying to {} {}, TransmissionError: {}'.format(
649 config['action'], entry['title'], e.message or 'N/A'
650 )
651 logger.error(msg)
652 continue
653
654 def _make_torrent_options_dict(self, config, entry):
655
656 opt_dic = {}
657
658 for opt_key in (
659 'path',
660 'add_paused',
661 'honor_limits',
662 'bandwidth_priority',
663 'max_connections',
664 'max_up_speed',
665 'max_down_speed',
666 'ratio',
667 'main_file_only',
668 'main_file_ratio',
669 'magnetization_timeout',
670 'include_subs',
671 'content_filename',
672 'include_files',
673 'skip_files',
674 'rename_like_files',
675 'queue_position',
676 ):
677 # Values do not merge config with task
678 # Task takes priority then config is used
679 if opt_key in entry:
680 opt_dic[opt_key] = entry[opt_key]
681 elif opt_key in config:
682 opt_dic[opt_key] = config[opt_key]
683
684 options = {'add': {}, 'change': {}, 'post': {}}
685
686 add = options['add']
687 if opt_dic.get('path'):
688 try:
689 path = os.path.expanduser(entry.render(opt_dic['path']))
690 except RenderError as e:
691 logger.error('Error setting path for {}: {}', entry['title'], e)
692 else:
693 # Transmission doesn't like it when paths end in a separator
694 path = path.rstrip('\\/')
695 add['download_dir'] = pathscrub(path)
696 # make sure we add it paused, will modify status after adding
697 add['paused'] = True
698
699 change = options['change']
700 if 'bandwidth_priority' in opt_dic:
701 change['bandwidthPriority'] = opt_dic['bandwidth_priority']
702 if 'honor_limits' in opt_dic and not opt_dic['honor_limits']:
703 change['honorsSessionLimits'] = False
704 if 'max_up_speed' in opt_dic:
705 change['uploadLimit'] = opt_dic['max_up_speed']
706 change['uploadLimited'] = True
707 if 'max_down_speed' in opt_dic:
708 change['downloadLimit'] = opt_dic['max_down_speed']
709 change['downloadLimited'] = True
710 if 'max_connections' in opt_dic:
711 change['peer_limit'] = opt_dic['max_connections']
712
713 if 'ratio' in opt_dic:
714 change['seedRatioLimit'] = opt_dic['ratio']
715 if opt_dic['ratio'] == -1:
716 # seedRatioMode:
717 # 0 follow the global settings
718 # 1 override the global settings, seeding until a certain ratio
719 # 2 override the global settings, seeding regardless of ratio
720 change['seedRatioMode'] = 2
721 else:
722 change['seedRatioMode'] = 1
723
724 if 'queue_position' in opt_dic:
725 change['queuePosition'] = opt_dic['queue_position']
726
727 post = options['post']
728 # set to modify paused status after
729 if 'add_paused' in opt_dic:
730 post['paused'] = opt_dic['add_paused']
731 if 'main_file_only' in opt_dic:
732 post['main_file_only'] = opt_dic['main_file_only']
733 if 'main_file_ratio' in opt_dic:
734 post['main_file_ratio'] = opt_dic['main_file_ratio']
735 if 'magnetization_timeout' in opt_dic:
736 post['magnetization_timeout'] = opt_dic['magnetization_timeout']
737 if 'include_subs' in opt_dic:
738 post['include_subs'] = opt_dic['include_subs']
739 if 'content_filename' in opt_dic:
740 try:
741 post['content_filename'] = entry.render(opt_dic['content_filename'])
742 except RenderError as e:
743 logger.error('Unable to render content_filename {}: {}', entry['title'], e)
744 if 'skip_files' in opt_dic:
745 post['skip_files'] = opt_dic['skip_files']
746 if not isinstance(post['skip_files'], list):
747 post['skip_files'] = [post['skip_files']]
748 if 'include_files' in opt_dic:
749 post['include_files'] = opt_dic['include_files']
750 if not isinstance(post['include_files'], list):
751 post['include_files'] = [post['include_files']]
752 if 'rename_like_files' in opt_dic:
753 post['rename_like_files'] = opt_dic['rename_like_files']
754 return options
755
756 def on_task_learn(self, task, config):
757 """ Make sure all temp files are cleaned up when entries are learned """
758 # If download plugin is enabled, it will handle cleanup.
759 if 'download' not in task.config:
760 download = plugin.get('download', self)
761 download.cleanup_temp_files(task)
762
763 on_task_abort = on_task_learn
764
765
766 class PluginTransmissionClean(TransmissionBase):
767 """
768 DEPRECATED: A separate task using from_transmission and transmission with remove action should be used instead.
769
770 Remove completed torrents from Transmission.
771
772 Examples::
773
774 clean_transmission: yes # ignore both time and ratio
775
776 clean_transmission: # uses transmission's internal limits for idle time and seed ratio ( if defined )
777 transmission_seed_limits: yes
778
779 clean_transmission: # matches time only
780 finished_for: 2 hours
781
782 clean_transmission: # matches ratio only
783 min_ratio: 0.5
784
785 clean_transmission: # matches time OR ratio
786 finished_for: 2 hours
787 min_ratio: 0.5
788
789 Default values for the config elements::
790
791 clean_transmission:
792 host: localhost
793 port: 9091
794 enabled: yes
795 """
796
797 schema = {
798 "deprecated": "The clean_transmission plugin is deprecated. Configure a new task using the from_transmission "
799 "plugin as well as the transmission plugin using the remove or purge action.",
800 "anyOf": [
801 {"type": "boolean"},
802 {
803 "type": "object",
804 "properties": {
805 "host": {"type": "string"},
806 "port": {"type": "integer"},
807 "netrc": {"type": "string", "format": "file"},
808 "username": {"type": "string"},
809 "password": {"type": "string"},
810 "enabled": {"type": "boolean"},
811 "min_ratio": {"type": "number"},
812 "finished_for": {"type": "string", "format": "interval"},
813 "transmission_seed_limits": {"type": "boolean"},
814 "delete_files": {"type": "boolean"},
815 "tracker": {"type": "string", "format": "regex"},
816 "preserve_tracker": {"type": "string", "format": "regex"},
817 "directories": {
818 "type": "array",
819 "items": {"type": "string", "format": "regex"},
820 },
821 },
822 "additionalProperties": False,
823 },
824 ],
825 }
826
827 def on_task_exit(self, task, config):
828 config = self.prepare_config(config)
829 if not config['enabled'] or task.options.learn:
830 return
831 if not self.client:
832 self.client = self.create_rpc_client(config)
833 tracker_re = re.compile(config['tracker'], re.IGNORECASE) if 'tracker' in config else None
834 preserve_tracker_re = (
835 re.compile(config['preserve_tracker'], re.IGNORECASE)
836 if 'preserve_tracker' in config
837 else None
838 )
839
840 session = self.client.get_session()
841
842 remove_ids = []
843 for torrent in self.client.get_torrents():
844 logger.verbose(
845 'Torrent "{}": status: "{}" - ratio: {} - date added: {}',
846 torrent.name,
847 torrent.status,
848 torrent.ratio,
849 torrent.date_added,
850 )
851 downloaded, dummy = self.torrent_info(torrent, config)
852 if not downloaded:
853 continue
854 if config.get('transmission_seed_limits'):
855 seed_ratio_ok, idle_limit_ok = self.check_seed_limits(torrent, session)
856 if not seed_ratio_ok or not idle_limit_ok:
857 continue
858 if 'min_ratio' in config:
859 if torrent.ratio < config['min_ratio']:
860 continue
861 if 'finished_for' in config:
862 # done date might be invalid if this torrent was added to transmission when already completed
863 started_seeding = datetime.fromtimestamp(max(torrent.addedDate, torrent.doneDate))
864 if started_seeding + parse_timedelta(config['finished_for']) > datetime.now():
865 continue
866 tracker_hosts = (
867 urlparse(tracker['announce']).hostname for tracker in torrent.trackers
868 )
869 if 'tracker' in config:
870 if not any(tracker_re.search(tracker) for tracker in tracker_hosts):
871 continue
872 if 'preserve_tracker' in config:
873 if any(preserve_tracker_re.search(tracker) for tracker in tracker_hosts):
874 continue
875 if config.get('directories'):
876 if not any(
877 re.search(d, torrent.downloadDir, re.IGNORECASE) for d in config['directories']
878 ):
879 continue
880 if task.options.test:
881 logger.info('Would remove finished torrent `{}` from transmission', torrent.name)
882 continue
883 logger.info('Removing finished torrent `{}` from transmission', torrent.name)
884 remove_ids.append(torrent.id)
885 if remove_ids:
886 self.client.remove_torrent(remove_ids, config.get('delete_files'))
887
888
889 @event('plugin.register')
890 def register_plugin():
891 plugin.register(PluginTransmission, 'transmission', api_ver=2)
892 plugin.register(PluginTransmissionInput, 'from_transmission', api_ver=2)
893 plugin.register(PluginTransmissionClean, 'clean_transmission', api_ver=2)
894
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/flexget/plugins/clients/transmission.py b/flexget/plugins/clients/transmission.py
--- a/flexget/plugins/clients/transmission.py
+++ b/flexget/plugins/clients/transmission.py
@@ -7,6 +7,7 @@
from time import sleep
from urllib.parse import urlparse
+from functools import partial
from loguru import logger
from flexget import plugin
| {"golden_diff": "diff --git a/flexget/plugins/clients/transmission.py b/flexget/plugins/clients/transmission.py\n--- a/flexget/plugins/clients/transmission.py\n+++ b/flexget/plugins/clients/transmission.py\n@@ -7,6 +7,7 @@\n from time import sleep\n from urllib.parse import urlparse\n \n+from functools import partial\n from loguru import logger\n \n from flexget import plugin\n", "issue": "new bug in 3.1.85 NameError: name 'partial' is not defined\n<!---\r\nBefore opening an issue, verify:\r\n\r\n- Is this a feature request? Post it on https://feathub.com/Flexget/Flexget\r\n- Is this an issue with webui? Make an issue over on https://github.com/Flexget/webui\r\n- Did you recently upgrade? Look at the Change Log and Upgrade Actions to make sure that you don't need to make any changes to your config https://flexget.com/ChangeLog https://flexget.com/UpgradeActions\r\n- Are you running FlexGet as a daemon? Stop it completely and then start it again https://flexget.com/CLI/daemon\r\n- Did you search to see if the issue already exists? https://github.com/Flexget/Flexget/issues\r\n- Did you fill out the issue template as completely as possible?\r\n\r\nThe issue template is here because it helps to ensure you submitted all the necessary information the first time, and allows us to more quickly review issues. Please fill it out correctly and do not ignore it, no matter how irrelevant you think it may be. Thanks in advance for your help with this!\r\n--->\r\n\r\n### Expected behaviour:\r\n\r\n<!---\r\nPlease don't just say \"it doesn't crash\" or \"it works\". Explain what the expected result is.\r\n--->\r\n version 3.1.85 works with transmission plugin\r\n\r\n### Actual behaviour:\r\n version 3.1.85 is broken\r\nrollback to 3.1.84 fixes the issue\r\n### Steps to reproduce:\r\n- Step 1: ...\r\nrun version 3.8.5 with transmission plugin\r\n- Step 2:..\r\nbreak immediately\r\n- Step 3:..\r\nthis issue is introduced from https://github.com/Flexget/Flexget/pull/2773/ , please revert \r\n\r\n#### Config:\r\n```yaml\r\nPaste FULL config and remove any personal info if config is too long, attach the file to the ticket.\r\nIf issue is with a single task, you can get get resulting configuration by running:\r\n flexget execute --task <NAME> --dump-config\r\nMake sure to redact any personal information (passwords, api keys, etc) !\r\n```\r\n \r\n#### Log:\r\n<details>\r\n <summary>(click to expand)</summary>\r\n\r\n```\r\nNameError: name 'partial' is not defined\r\nTraceback (most recent call last):\r\n File \"/home/ubuntu/flexget/lib/python3.8/site-packages/flexget/task.py\", line 547, in __run_plugin\r\n result = method(*args, **kwargs)\r\n File \"/home/ubuntu/flexget/lib/python3.8/site-packages/flexget/event.py\", line 20, in call\r\n return self.func(*args, **kwargs)\r\n File \"/home/ubuntu/flexget/lib/python3.8/site-packages/flexget/plugins/clients/transmission.py\", line 616, in on_task_output\r\n start_torrent = partial(self.client.start_torrent, [torrent_info.id])\r\nNameError: name 'partial' is not defined\r\n```\r\n</details>\r\n\r\n### Additional information:\r\n\r\n- FlexGet version: 3.1.85\r\n- Python version: 3.8.6\r\n- Installation method: python virtualenv\r\n- Using daemon (yes/no): yes\r\n- OS and version: ubuntu\r\n- Link to crash log: ^\r\n\r\n<!---\r\nIn config and debug/crash logs, remember to redact any personal or sensitive information such as passwords, API keys, private URLs and so on.\r\n\r\nPlease verify that the following data is present before submitting your issue:\r\n\r\n- Link to a paste service or paste above the relevant config (preferably full config, including templates if present). Please make sure the paste does not expire, if possible.\r\n- Link to a paste service or paste above debug-level logs of the relevant task/s (use `flexget -L debug execute --tasks <Task_name>`).\r\n- FlexGet version (use `flexget -V` to get it).\r\n- Full Python version, for example `2.7.11` (use `python -V` to get it). \r\n- Installation method (pip, git install, etc).\r\n- Whether or not you're running FlexGet as a daemon.\r\n- OS and version.\r\n- Attach crash log if one was generated, in addition to the debug-level log. It can be found in the directory with your config file.\r\n--->\r\n\n", "before_files": [{"content": "import base64\nimport os\nimport re\nfrom datetime import datetime, timedelta\nfrom fnmatch import fnmatch\nfrom netrc import NetrcParseError, netrc\nfrom time import sleep\nfrom urllib.parse import urlparse\n\nfrom loguru import logger\n\nfrom flexget import plugin\nfrom flexget.config_schema import one_or_more\nfrom flexget.entry import Entry\nfrom flexget.event import event\nfrom flexget.utils.pathscrub import pathscrub\nfrom flexget.utils.template import RenderError\nfrom flexget.utils.tools import parse_timedelta\n\ntry:\n import transmissionrpc\n from transmissionrpc import HTTPHandlerError, TransmissionError\nexcept ImportError:\n # If transmissionrpc is not found, errors will be shown later\n pass\n\nlogger = logger.bind(name='transmission')\n\n\nclass TransmissionBase:\n def __init__(self):\n self.client = None\n self.opener = None\n\n def prepare_config(self, config):\n if isinstance(config, bool):\n config = {'enabled': config}\n config.setdefault('enabled', True)\n config.setdefault('host', 'localhost')\n config.setdefault('port', 9091)\n config.setdefault('main_file_ratio', 0.9)\n if 'netrc' in config:\n netrc_path = os.path.expanduser(config['netrc'])\n try:\n config['username'], _, config['password'] = netrc(netrc_path).authenticators(\n config['host']\n )\n except OSError as e:\n logger.error('netrc: unable to open: {}', e.filename)\n except NetrcParseError as e:\n logger.error('netrc: {}, file: {}, line: {}', e.msg, e.filename, e.lineno)\n return config\n\n def create_rpc_client(self, config):\n user, password = config.get('username'), config.get('password')\n\n try:\n cli = transmissionrpc.Client(config['host'], config['port'], user, password)\n except TransmissionError as e:\n if isinstance(e.original, HTTPHandlerError):\n if e.original.code == 111:\n raise plugin.PluginError(\"Cannot connect to transmission. Is it running?\")\n elif e.original.code == 401:\n raise plugin.PluginError(\n \"Username/password for transmission is incorrect. Cannot connect.\"\n )\n elif e.original.code == 110:\n raise plugin.PluginError(\n \"Cannot connect to transmission: Connection timed out.\"\n )\n else:\n raise plugin.PluginError(\n \"Error connecting to transmission: %s\" % e.original.message\n )\n else:\n raise plugin.PluginError(\"Error connecting to transmission: %s\" % e.message)\n return cli\n\n def torrent_info(self, torrent, config):\n done = torrent.totalSize > 0\n vloc = None\n best = None\n for t in torrent.files().items():\n tf = t[1]\n if tf['selected']:\n if tf['size'] <= 0 or tf['completed'] < tf['size']:\n done = False\n break\n if not best or tf['size'] > best[1]:\n best = (tf['name'], tf['size'])\n if (\n done\n and best\n and (100 * float(best[1]) / float(torrent.totalSize))\n >= (config['main_file_ratio'] * 100)\n ):\n vloc = ('%s/%s' % (torrent.downloadDir, best[0])).replace('/', os.sep)\n return done, vloc\n\n def check_seed_limits(self, torrent, session):\n seed_limit_ok = True # will remain if no seed ratio defined\n idle_limit_ok = True # will remain if no idle limit defined\n\n if torrent.seedRatioMode == 1: # use torrent's own seed ratio limit\n seed_limit_ok = torrent.uploadRatio >= torrent.seedRatioLimit\n elif torrent.seedRatioMode == 0: # use global rules\n if session.seedRatioLimited:\n seed_limit_ok = torrent.uploadRatio >= session.seedRatioLimit\n\n if torrent.seedIdleMode == 1: # use torrent's own idle limit\n idle_limit_ok = (\n torrent.date_active + timedelta(minutes=torrent.seedIdleLimit) < datetime.now()\n )\n elif torrent.seedIdleMode == 0: # use global rules\n if session.idle_seeding_limit_enabled:\n idle_limit_ok = (\n torrent.date_active + timedelta(minutes=session.idle_seeding_limit)\n < datetime.now()\n )\n\n return seed_limit_ok, idle_limit_ok\n\n def on_task_start(self, task, config):\n try:\n import transmissionrpc\n from transmissionrpc import HTTPHandlerError # noqa\n from transmissionrpc import TransmissionError # noqa\n except:\n raise plugin.PluginError(\n 'Transmissionrpc module version 0.11 or higher required.', logger\n )\n if [int(part) for part in transmissionrpc.__version__.split('.')] < [0, 11]:\n raise plugin.PluginError(\n 'Transmissionrpc module version 0.11 or higher required, please upgrade', logger\n )\n\n # Mark rpc client for garbage collector so every task can start\n # a fresh new according its own config - fix to bug #2804\n self.client = None\n config = self.prepare_config(config)\n if config['enabled']:\n if task.options.test:\n logger.info('Trying to connect to transmission...')\n self.client = self.create_rpc_client(config)\n if self.client:\n logger.info('Successfully connected to transmission.')\n else:\n logger.error('It looks like there was a problem connecting to transmission.')\n\n\nclass PluginTransmissionInput(TransmissionBase):\n schema = {\n 'anyOf': [\n {'type': 'boolean'},\n {\n 'type': 'object',\n 'properties': {\n 'host': {'type': 'string'},\n 'port': {'type': 'integer'},\n 'netrc': {'type': 'string', 'format': 'file'},\n 'username': {'type': 'string'},\n 'password': {'type': 'string'},\n 'enabled': {'type': 'boolean'},\n 'only_complete': {'type': 'boolean'},\n },\n 'additionalProperties': False,\n },\n ]\n }\n\n def prepare_config(self, config):\n config = TransmissionBase.prepare_config(self, config)\n config.setdefault('only_complete', False)\n return config\n\n def on_task_input(self, task, config):\n config = self.prepare_config(config)\n if not config['enabled']:\n return\n\n if not self.client:\n self.client = self.create_rpc_client(config)\n entries = []\n\n # Hack/Workaround for http://flexget.com/ticket/2002\n # TODO: Proper fix\n if 'username' in config and 'password' in config:\n self.client.http_handler.set_authentication(\n self.client.url, config['username'], config['password']\n )\n\n session = self.client.get_session()\n\n for torrent in self.client.get_torrents():\n seed_ratio_ok, idle_limit_ok = self.check_seed_limits(torrent, session)\n if config['only_complete'] and not (\n seed_ratio_ok and idle_limit_ok and torrent.progress == 100\n ):\n continue\n entry = Entry(\n title=torrent.name,\n url='',\n torrent_info_hash=torrent.hashString,\n content_size=torrent.totalSize / (1024 * 1024),\n )\n # Location of torrent is only valid if transmission is on same machine as flexget\n if config['host'] in ('localhost', '127.0.0.1'):\n entry['location'] = torrent.torrentFile\n entry['url'] = 'file://' + torrent.torrentFile\n for attr in [\n 'id',\n 'comment',\n 'desiredAvailable',\n 'downloadDir',\n 'isFinished',\n 'isPrivate',\n 'leftUntilDone',\n 'ratio',\n 'status',\n 'date_active',\n 'date_added',\n 'date_done',\n 'date_started',\n 'errorString',\n 'priority',\n 'progress',\n 'secondsDownloading',\n 'secondsSeeding',\n 'torrentFile',\n ]:\n try:\n entry['transmission_' + attr] = getattr(torrent, attr)\n except Exception:\n logger.opt(exception=True).debug(\n 'error when requesting transmissionrpc attribute {}', attr\n )\n # Availability in percent\n entry['transmission_availability'] = (torrent.desiredAvailable / torrent.leftUntilDone) if torrent.leftUntilDone else 0\n \n entry['transmission_trackers'] = [t['announce'] for t in torrent.trackers]\n entry['transmission_seed_ratio_ok'] = seed_ratio_ok\n entry['transmission_idle_limit_ok'] = idle_limit_ok\n st_error_to_desc = {\n 0: 'OK',\n 1: 'tracker_warning',\n 2: 'tracker_error',\n 3: 'local_error',\n }\n entry['transmission_error_state'] = st_error_to_desc[torrent.error]\n # Built in done_date doesn't work when user adds an already completed file to transmission\n if torrent.progress == 100:\n entry['transmission_date_done'] = datetime.fromtimestamp(\n max(torrent.addedDate, torrent.doneDate)\n )\n entries.append(entry)\n return entries\n\n\nclass PluginTransmission(TransmissionBase):\n \"\"\"\n Add url from entry url to transmission\n\n Example::\n\n transmission:\n host: localhost\n port: 9091\n netrc: /home/flexget/.tmnetrc\n username: myusername\n password: mypassword\n path: the download location\n\n Default values for the config elements::\n\n transmission:\n host: localhost\n port: 9091\n enabled: yes\n \"\"\"\n\n schema = {\n 'anyOf': [\n {'type': 'boolean'},\n {\n 'type': 'object',\n 'properties': {\n 'host': {'type': 'string'},\n 'port': {'type': 'integer'},\n 'netrc': {'type': 'string'},\n 'username': {'type': 'string'},\n 'password': {'type': 'string'},\n 'action': {\n 'type': 'string',\n 'enum': ['add', 'remove', 'purge', 'pause', 'resume', 'bypass_queue'],\n },\n 'path': {'type': 'string'},\n 'max_up_speed': {'type': 'number'},\n 'max_down_speed': {'type': 'number'},\n 'max_connections': {'type': 'integer'},\n 'ratio': {'type': 'number'},\n 'add_paused': {'type': 'boolean'},\n 'content_filename': {'type': 'string'},\n 'main_file_only': {'type': 'boolean'},\n 'main_file_ratio': {'type': 'number'},\n 'magnetization_timeout': {'type': 'integer'},\n 'enabled': {'type': 'boolean'},\n 'include_subs': {'type': 'boolean'},\n 'bandwidth_priority': {'type': 'number'},\n 'honor_limits': {'type': 'boolean'},\n 'include_files': one_or_more({'type': 'string'}),\n 'skip_files': one_or_more({'type': 'string'}),\n 'rename_like_files': {'type': 'boolean'},\n 'queue_position': {'type': 'integer'},\n },\n 'additionalProperties': False,\n },\n ]\n }\n\n def prepare_config(self, config):\n config = TransmissionBase.prepare_config(self, config)\n config.setdefault('action', 'add')\n config.setdefault('path', '')\n config.setdefault('main_file_only', False)\n config.setdefault('magnetization_timeout', 0)\n config.setdefault('include_subs', False)\n config.setdefault('rename_like_files', False)\n config.setdefault('include_files', [])\n return config\n\n @plugin.priority(120)\n def on_task_download(self, task, config):\n \"\"\"\n Call download plugin to generate the temp files we will load\n into deluge then verify they are valid torrents\n \"\"\"\n config = self.prepare_config(config)\n if not config['enabled']:\n return\n # If the download plugin is not enabled, we need to call it to get our temp .torrent files\n if 'download' not in task.config:\n download = plugin.get('download', self)\n for entry in task.accepted:\n if entry.get('transmission_id'):\n # The torrent is already loaded in deluge, we don't need to get anything\n continue\n if config['action'] != 'add' and entry.get('torrent_info_hash'):\n # If we aren't adding the torrent new, all we need is info hash\n continue\n download.get_temp_file(task, entry, handle_magnets=True, fail_html=True)\n\n @plugin.priority(135)\n def on_task_output(self, task, config):\n config = self.prepare_config(config)\n # don't add when learning\n if task.options.learn:\n return\n if not config['enabled']:\n return\n # Do not run if there is nothing to do\n if not task.accepted:\n return\n if self.client is None:\n self.client = self.create_rpc_client(config)\n if self.client:\n logger.debug('Successfully connected to transmission.')\n else:\n raise plugin.PluginError(\"Couldn't connect to transmission.\")\n session_torrents = self.client.get_torrents()\n for entry in task.accepted:\n if task.options.test:\n logger.info('Would {} {} in transmission.', config['action'], entry['title'])\n continue\n # Compile user options into appropriate dict\n options = self._make_torrent_options_dict(config, entry)\n torrent_info = None\n for t in session_torrents:\n if t.hashString.lower() == entry.get(\n 'torrent_info_hash', ''\n ).lower() or t.id == entry.get('transmission_id'):\n torrent_info = t\n logger.debug(\n 'Found {} already loaded in transmission as {}',\n entry['title'],\n torrent_info.name,\n )\n break\n\n if not torrent_info:\n if config['action'] != 'add':\n logger.warning(\n 'Cannot {} {} because it is not loaded in transmission.',\n config['action'],\n entry['title'],\n )\n continue\n downloaded = not entry['url'].startswith('magnet:')\n\n # Check that file is downloaded\n if downloaded and 'file' not in entry:\n entry.fail('`file` field missing?')\n continue\n\n # Verify the temp file exists\n if downloaded and not os.path.exists(entry['file']):\n tmp_path = os.path.join(task.manager.config_base, 'temp')\n logger.debug('entry: {}', entry)\n logger.debug('temp: {}', ', '.join(os.listdir(tmp_path)))\n entry.fail(\"Downloaded temp file '%s' doesn't exist!?\" % entry['file'])\n continue\n\n try:\n if downloaded:\n with open(entry['file'], 'rb') as f:\n filedump = base64.b64encode(f.read()).decode('utf-8')\n torrent_info = self.client.add_torrent(filedump, 30, **options['add'])\n else:\n if options['post'].get('magnetization_timeout', 0) > 0:\n options['add']['paused'] = False\n torrent_info = self.client.add_torrent(\n entry['url'], timeout=30, **options['add']\n )\n except TransmissionError as e:\n logger.opt(exception=True).debug('TransmissionError')\n logger.debug('Failed options dict: {}', options['add'])\n msg = 'Error adding {} to transmission. TransmissionError: {}'.format(\n entry['title'], e.message or 'N/A'\n )\n logger.error(msg)\n entry.fail(msg)\n continue\n logger.info('\"{}\" torrent added to transmission', entry['title'])\n # The info returned by the add call is incomplete, refresh it\n torrent_info = self.client.get_torrent(torrent_info.id)\n else:\n # Torrent already loaded in transmission\n if options['add'].get('download_dir'):\n logger.verbose(\n 'Moving {} to \"{}\"', torrent_info.name, options['add']['download_dir']\n )\n # Move data even if current reported torrent location matches new location\n # as transmission may fail to automatically move completed file to final\n # location but continue reporting final location instead of real location.\n # In such case this will kick transmission to really move data.\n # If data is already located at new location then transmission just ignore\n # this command.\n self.client.move_torrent_data(\n torrent_info.id, options['add']['download_dir'], 120\n )\n\n try:\n total_size = torrent_info.totalSize\n main_id = None\n find_main_file = (\n options['post'].get('main_file_only') or 'content_filename' in options['post']\n )\n skip_files = options['post'].get('skip_files')\n # We need to index the files if any of the following are defined\n if find_main_file or skip_files:\n file_list = self.client.get_files(torrent_info.id)[torrent_info.id]\n\n if options['post'].get('magnetization_timeout', 0) > 0 and not file_list:\n logger.debug(\n 'Waiting {} seconds for \"{}\" to magnetize',\n options['post']['magnetization_timeout'],\n entry['title'],\n )\n for _ in range(options['post']['magnetization_timeout']):\n sleep(1)\n file_list = self.client.get_files(torrent_info.id)[torrent_info.id]\n if file_list:\n total_size = self.client.get_torrent(\n torrent_info.id, ['id', 'totalSize']\n ).totalSize\n break\n else:\n logger.warning(\n '\"{}\" did not magnetize before the timeout elapsed, file list unavailable for processing.',\n entry['title'],\n )\n\n # Find files based on config\n dl_list = []\n skip_list = []\n main_list = []\n ext_list = ['*.srt', '*.sub', '*.idx', '*.ssa', '*.ass']\n\n main_ratio = config['main_file_ratio']\n if 'main_file_ratio' in options['post']:\n main_ratio = options['post']['main_file_ratio']\n\n for f in file_list:\n # No need to set main_id if we're not going to need it\n if find_main_file and file_list[f]['size'] > total_size * main_ratio:\n main_id = f\n\n if 'include_files' in options['post']:\n if any(\n fnmatch(file_list[f]['name'], mask)\n for mask in options['post']['include_files']\n ):\n dl_list.append(f)\n elif options['post'].get('include_subs') and any(\n fnmatch(file_list[f]['name'], mask) for mask in ext_list\n ):\n dl_list.append(f)\n\n if skip_files:\n if any(fnmatch(file_list[f]['name'], mask) for mask in skip_files):\n skip_list.append(f)\n\n if main_id is not None:\n # Look for files matching main ID title but with a different extension\n if options['post'].get('rename_like_files'):\n for f in file_list:\n # if this filename matches main filename we want to rename it as well\n fs = os.path.splitext(file_list[f]['name'])\n if fs[0] == os.path.splitext(file_list[main_id]['name'])[0]:\n main_list.append(f)\n else:\n main_list = [main_id]\n\n if main_id not in dl_list:\n dl_list.append(main_id)\n elif find_main_file:\n logger.warning(\n 'No files in \"{}\" are > {:.0f}% of content size, no files renamed.',\n entry['title'],\n main_ratio * 100,\n )\n\n # If we have a main file and want to rename it and associated files\n if 'content_filename' in options['post'] and main_id is not None:\n if 'download_dir' not in options['add']:\n download_dir = self.client.get_session().download_dir\n else:\n download_dir = options['add']['download_dir']\n\n # Get new filename without ext\n file_ext = os.path.splitext(file_list[main_id]['name'])[1]\n file_path = os.path.dirname(\n os.path.join(download_dir, file_list[main_id]['name'])\n )\n filename = options['post']['content_filename']\n if config['host'] == 'localhost' or config['host'] == '127.0.0.1':\n counter = 1\n while os.path.exists(os.path.join(file_path, filename + file_ext)):\n # Try appending a (#) suffix till a unique filename is found\n filename = '%s(%s)' % (\n options['post']['content_filename'],\n counter,\n )\n counter += 1\n else:\n logger.debug(\n 'Cannot ensure content_filename is unique '\n 'when adding to a remote transmission daemon.'\n )\n\n for index in main_list:\n file_ext = os.path.splitext(file_list[index]['name'])[1]\n logger.debug(\n 'File {} renamed to {}',\n file_list[index]['name'],\n filename + file_ext,\n )\n # change to below when set_files will allow setting name, more efficient to have one call\n # fl[index]['name'] = os.path.basename(pathscrub(filename + file_ext).encode('utf-8'))\n try:\n self.client.rename_torrent_path(\n torrent_info.id,\n file_list[index]['name'],\n os.path.basename(str(pathscrub(filename + file_ext))),\n )\n except TransmissionError:\n logger.error(\n 'content_filename only supported with transmission 2.8+'\n )\n\n if options['post'].get('main_file_only') and main_id is not None:\n # Set Unwanted Files\n options['change']['files_unwanted'] = [\n x for x in file_list if x not in dl_list\n ]\n options['change']['files_wanted'] = dl_list\n logger.debug(\n 'Downloading {} of {} files in torrent.',\n len(options['change']['files_wanted']),\n len(file_list),\n )\n elif (\n not options['post'].get('main_file_only') or main_id is None\n ) and skip_files:\n # If no main file and we want to skip files\n\n if len(skip_list) >= len(file_list):\n logger.debug(\n 'skip_files filter would cause no files to be downloaded; '\n 'including all files in torrent.'\n )\n else:\n options['change']['files_unwanted'] = skip_list\n options['change']['files_wanted'] = [\n x for x in file_list if x not in skip_list\n ]\n logger.debug(\n 'Downloading {} of {} files in torrent.',\n len(options['change']['files_wanted']),\n len(file_list),\n )\n\n # Set any changed file properties\n if list(options['change'].keys()):\n self.client.change_torrent(torrent_info.id, 30, **options['change'])\n\n start_torrent = partial(self.client.start_torrent, [torrent_info.id])\n\n if config['action'] == 'add':\n # if add_paused was defined and set to False start the torrent;\n # prevents downloading data before we set what files we want\n start_paused = (\n options['post']['paused']\n if 'paused' in options['post']\n else not self.client.get_session().start_added_torrents\n )\n if start_paused:\n self.client.stop_torrent(torrent_info.id)\n else:\n self.client.start_torrent(torrent_info.id)\n elif config['action'] in ('remove', 'purge'):\n self.client.remove_torrent(\n [torrent_info.id], delete_data=config['action'] == 'purge'\n )\n logger.info('{}d {} from transmission', config['action'], torrent_info.name)\n elif config['action'] == 'pause':\n self.client.stop_torrent([torrent_info.id])\n logger.info('paused {} in transmission', torrent_info.name)\n elif config['action'] == 'resume':\n start_torrent()\n logger.info('resumed {} in transmission', torrent_info.name)\n elif config['action'] == 'bypass_queue':\n start_torrent(bypass_queue=True)\n logger.info('resumed (bypass queue) {} in transmission', torrent_info.name)\n \n except TransmissionError as e:\n logger.opt(exception=True).debug('TransmissionError')\n logger.debug('Failed options dict: {}', options)\n msg = 'Error trying to {} {}, TransmissionError: {}'.format(\n config['action'], entry['title'], e.message or 'N/A'\n )\n logger.error(msg)\n continue\n\n def _make_torrent_options_dict(self, config, entry):\n\n opt_dic = {}\n\n for opt_key in (\n 'path',\n 'add_paused',\n 'honor_limits',\n 'bandwidth_priority',\n 'max_connections',\n 'max_up_speed',\n 'max_down_speed',\n 'ratio',\n 'main_file_only',\n 'main_file_ratio',\n 'magnetization_timeout',\n 'include_subs',\n 'content_filename',\n 'include_files',\n 'skip_files',\n 'rename_like_files',\n 'queue_position',\n ):\n # Values do not merge config with task\n # Task takes priority then config is used\n if opt_key in entry:\n opt_dic[opt_key] = entry[opt_key]\n elif opt_key in config:\n opt_dic[opt_key] = config[opt_key]\n\n options = {'add': {}, 'change': {}, 'post': {}}\n\n add = options['add']\n if opt_dic.get('path'):\n try:\n path = os.path.expanduser(entry.render(opt_dic['path']))\n except RenderError as e:\n logger.error('Error setting path for {}: {}', entry['title'], e)\n else:\n # Transmission doesn't like it when paths end in a separator\n path = path.rstrip('\\\\/')\n add['download_dir'] = pathscrub(path)\n # make sure we add it paused, will modify status after adding\n add['paused'] = True\n\n change = options['change']\n if 'bandwidth_priority' in opt_dic:\n change['bandwidthPriority'] = opt_dic['bandwidth_priority']\n if 'honor_limits' in opt_dic and not opt_dic['honor_limits']:\n change['honorsSessionLimits'] = False\n if 'max_up_speed' in opt_dic:\n change['uploadLimit'] = opt_dic['max_up_speed']\n change['uploadLimited'] = True\n if 'max_down_speed' in opt_dic:\n change['downloadLimit'] = opt_dic['max_down_speed']\n change['downloadLimited'] = True\n if 'max_connections' in opt_dic:\n change['peer_limit'] = opt_dic['max_connections']\n\n if 'ratio' in opt_dic:\n change['seedRatioLimit'] = opt_dic['ratio']\n if opt_dic['ratio'] == -1:\n # seedRatioMode:\n # 0 follow the global settings\n # 1 override the global settings, seeding until a certain ratio\n # 2 override the global settings, seeding regardless of ratio\n change['seedRatioMode'] = 2\n else:\n change['seedRatioMode'] = 1\n\n if 'queue_position' in opt_dic:\n change['queuePosition'] = opt_dic['queue_position']\n\n post = options['post']\n # set to modify paused status after\n if 'add_paused' in opt_dic:\n post['paused'] = opt_dic['add_paused']\n if 'main_file_only' in opt_dic:\n post['main_file_only'] = opt_dic['main_file_only']\n if 'main_file_ratio' in opt_dic:\n post['main_file_ratio'] = opt_dic['main_file_ratio']\n if 'magnetization_timeout' in opt_dic:\n post['magnetization_timeout'] = opt_dic['magnetization_timeout']\n if 'include_subs' in opt_dic:\n post['include_subs'] = opt_dic['include_subs']\n if 'content_filename' in opt_dic:\n try:\n post['content_filename'] = entry.render(opt_dic['content_filename'])\n except RenderError as e:\n logger.error('Unable to render content_filename {}: {}', entry['title'], e)\n if 'skip_files' in opt_dic:\n post['skip_files'] = opt_dic['skip_files']\n if not isinstance(post['skip_files'], list):\n post['skip_files'] = [post['skip_files']]\n if 'include_files' in opt_dic:\n post['include_files'] = opt_dic['include_files']\n if not isinstance(post['include_files'], list):\n post['include_files'] = [post['include_files']]\n if 'rename_like_files' in opt_dic:\n post['rename_like_files'] = opt_dic['rename_like_files']\n return options\n\n def on_task_learn(self, task, config):\n \"\"\" Make sure all temp files are cleaned up when entries are learned \"\"\"\n # If download plugin is enabled, it will handle cleanup.\n if 'download' not in task.config:\n download = plugin.get('download', self)\n download.cleanup_temp_files(task)\n\n on_task_abort = on_task_learn\n\n\nclass PluginTransmissionClean(TransmissionBase):\n \"\"\"\n DEPRECATED: A separate task using from_transmission and transmission with remove action should be used instead.\n\n Remove completed torrents from Transmission.\n\n Examples::\n\n clean_transmission: yes # ignore both time and ratio\n\n clean_transmission: # uses transmission's internal limits for idle time and seed ratio ( if defined )\n transmission_seed_limits: yes\n\n clean_transmission: # matches time only\n finished_for: 2 hours\n\n clean_transmission: # matches ratio only\n min_ratio: 0.5\n\n clean_transmission: # matches time OR ratio\n finished_for: 2 hours\n min_ratio: 0.5\n\n Default values for the config elements::\n\n clean_transmission:\n host: localhost\n port: 9091\n enabled: yes\n \"\"\"\n\n schema = {\n \"deprecated\": \"The clean_transmission plugin is deprecated. Configure a new task using the from_transmission \"\n \"plugin as well as the transmission plugin using the remove or purge action.\",\n \"anyOf\": [\n {\"type\": \"boolean\"},\n {\n \"type\": \"object\",\n \"properties\": {\n \"host\": {\"type\": \"string\"},\n \"port\": {\"type\": \"integer\"},\n \"netrc\": {\"type\": \"string\", \"format\": \"file\"},\n \"username\": {\"type\": \"string\"},\n \"password\": {\"type\": \"string\"},\n \"enabled\": {\"type\": \"boolean\"},\n \"min_ratio\": {\"type\": \"number\"},\n \"finished_for\": {\"type\": \"string\", \"format\": \"interval\"},\n \"transmission_seed_limits\": {\"type\": \"boolean\"},\n \"delete_files\": {\"type\": \"boolean\"},\n \"tracker\": {\"type\": \"string\", \"format\": \"regex\"},\n \"preserve_tracker\": {\"type\": \"string\", \"format\": \"regex\"},\n \"directories\": {\n \"type\": \"array\",\n \"items\": {\"type\": \"string\", \"format\": \"regex\"},\n },\n },\n \"additionalProperties\": False,\n },\n ],\n }\n\n def on_task_exit(self, task, config):\n config = self.prepare_config(config)\n if not config['enabled'] or task.options.learn:\n return\n if not self.client:\n self.client = self.create_rpc_client(config)\n tracker_re = re.compile(config['tracker'], re.IGNORECASE) if 'tracker' in config else None\n preserve_tracker_re = (\n re.compile(config['preserve_tracker'], re.IGNORECASE)\n if 'preserve_tracker' in config\n else None\n )\n\n session = self.client.get_session()\n\n remove_ids = []\n for torrent in self.client.get_torrents():\n logger.verbose(\n 'Torrent \"{}\": status: \"{}\" - ratio: {} - date added: {}',\n torrent.name,\n torrent.status,\n torrent.ratio,\n torrent.date_added,\n )\n downloaded, dummy = self.torrent_info(torrent, config)\n if not downloaded:\n continue\n if config.get('transmission_seed_limits'):\n seed_ratio_ok, idle_limit_ok = self.check_seed_limits(torrent, session)\n if not seed_ratio_ok or not idle_limit_ok:\n continue\n if 'min_ratio' in config:\n if torrent.ratio < config['min_ratio']:\n continue\n if 'finished_for' in config:\n # done date might be invalid if this torrent was added to transmission when already completed\n started_seeding = datetime.fromtimestamp(max(torrent.addedDate, torrent.doneDate))\n if started_seeding + parse_timedelta(config['finished_for']) > datetime.now():\n continue\n tracker_hosts = (\n urlparse(tracker['announce']).hostname for tracker in torrent.trackers\n )\n if 'tracker' in config:\n if not any(tracker_re.search(tracker) for tracker in tracker_hosts):\n continue\n if 'preserve_tracker' in config:\n if any(preserve_tracker_re.search(tracker) for tracker in tracker_hosts):\n continue\n if config.get('directories'):\n if not any(\n re.search(d, torrent.downloadDir, re.IGNORECASE) for d in config['directories']\n ):\n continue\n if task.options.test:\n logger.info('Would remove finished torrent `{}` from transmission', torrent.name)\n continue\n logger.info('Removing finished torrent `{}` from transmission', torrent.name)\n remove_ids.append(torrent.id)\n if remove_ids:\n self.client.remove_torrent(remove_ids, config.get('delete_files'))\n\n\n@event('plugin.register')\ndef register_plugin():\n plugin.register(PluginTransmission, 'transmission', api_ver=2)\n plugin.register(PluginTransmissionInput, 'from_transmission', api_ver=2)\n plugin.register(PluginTransmissionClean, 'clean_transmission', api_ver=2)\n", "path": "flexget/plugins/clients/transmission.py"}], "after_files": [{"content": "import base64\nimport os\nimport re\nfrom datetime import datetime, timedelta\nfrom fnmatch import fnmatch\nfrom netrc import NetrcParseError, netrc\nfrom time import sleep\nfrom urllib.parse import urlparse\n\nfrom functools import partial\nfrom loguru import logger\n\nfrom flexget import plugin\nfrom flexget.config_schema import one_or_more\nfrom flexget.entry import Entry\nfrom flexget.event import event\nfrom flexget.utils.pathscrub import pathscrub\nfrom flexget.utils.template import RenderError\nfrom flexget.utils.tools import parse_timedelta\n\ntry:\n import transmissionrpc\n from transmissionrpc import HTTPHandlerError, TransmissionError\nexcept ImportError:\n # If transmissionrpc is not found, errors will be shown later\n pass\n\nlogger = logger.bind(name='transmission')\n\n\nclass TransmissionBase:\n def __init__(self):\n self.client = None\n self.opener = None\n\n def prepare_config(self, config):\n if isinstance(config, bool):\n config = {'enabled': config}\n config.setdefault('enabled', True)\n config.setdefault('host', 'localhost')\n config.setdefault('port', 9091)\n config.setdefault('main_file_ratio', 0.9)\n if 'netrc' in config:\n netrc_path = os.path.expanduser(config['netrc'])\n try:\n config['username'], _, config['password'] = netrc(netrc_path).authenticators(\n config['host']\n )\n except OSError as e:\n logger.error('netrc: unable to open: {}', e.filename)\n except NetrcParseError as e:\n logger.error('netrc: {}, file: {}, line: {}', e.msg, e.filename, e.lineno)\n return config\n\n def create_rpc_client(self, config):\n user, password = config.get('username'), config.get('password')\n\n try:\n cli = transmissionrpc.Client(config['host'], config['port'], user, password)\n except TransmissionError as e:\n if isinstance(e.original, HTTPHandlerError):\n if e.original.code == 111:\n raise plugin.PluginError(\"Cannot connect to transmission. Is it running?\")\n elif e.original.code == 401:\n raise plugin.PluginError(\n \"Username/password for transmission is incorrect. Cannot connect.\"\n )\n elif e.original.code == 110:\n raise plugin.PluginError(\n \"Cannot connect to transmission: Connection timed out.\"\n )\n else:\n raise plugin.PluginError(\n \"Error connecting to transmission: %s\" % e.original.message\n )\n else:\n raise plugin.PluginError(\"Error connecting to transmission: %s\" % e.message)\n return cli\n\n def torrent_info(self, torrent, config):\n done = torrent.totalSize > 0\n vloc = None\n best = None\n for t in torrent.files().items():\n tf = t[1]\n if tf['selected']:\n if tf['size'] <= 0 or tf['completed'] < tf['size']:\n done = False\n break\n if not best or tf['size'] > best[1]:\n best = (tf['name'], tf['size'])\n if (\n done\n and best\n and (100 * float(best[1]) / float(torrent.totalSize))\n >= (config['main_file_ratio'] * 100)\n ):\n vloc = ('%s/%s' % (torrent.downloadDir, best[0])).replace('/', os.sep)\n return done, vloc\n\n def check_seed_limits(self, torrent, session):\n seed_limit_ok = True # will remain if no seed ratio defined\n idle_limit_ok = True # will remain if no idle limit defined\n\n if torrent.seedRatioMode == 1: # use torrent's own seed ratio limit\n seed_limit_ok = torrent.uploadRatio >= torrent.seedRatioLimit\n elif torrent.seedRatioMode == 0: # use global rules\n if session.seedRatioLimited:\n seed_limit_ok = torrent.uploadRatio >= session.seedRatioLimit\n\n if torrent.seedIdleMode == 1: # use torrent's own idle limit\n idle_limit_ok = (\n torrent.date_active + timedelta(minutes=torrent.seedIdleLimit) < datetime.now()\n )\n elif torrent.seedIdleMode == 0: # use global rules\n if session.idle_seeding_limit_enabled:\n idle_limit_ok = (\n torrent.date_active + timedelta(minutes=session.idle_seeding_limit)\n < datetime.now()\n )\n\n return seed_limit_ok, idle_limit_ok\n\n def on_task_start(self, task, config):\n try:\n import transmissionrpc\n from transmissionrpc import HTTPHandlerError # noqa\n from transmissionrpc import TransmissionError # noqa\n except:\n raise plugin.PluginError(\n 'Transmissionrpc module version 0.11 or higher required.', logger\n )\n if [int(part) for part in transmissionrpc.__version__.split('.')] < [0, 11]:\n raise plugin.PluginError(\n 'Transmissionrpc module version 0.11 or higher required, please upgrade', logger\n )\n\n # Mark rpc client for garbage collector so every task can start\n # a fresh new according its own config - fix to bug #2804\n self.client = None\n config = self.prepare_config(config)\n if config['enabled']:\n if task.options.test:\n logger.info('Trying to connect to transmission...')\n self.client = self.create_rpc_client(config)\n if self.client:\n logger.info('Successfully connected to transmission.')\n else:\n logger.error('It looks like there was a problem connecting to transmission.')\n\n\nclass PluginTransmissionInput(TransmissionBase):\n schema = {\n 'anyOf': [\n {'type': 'boolean'},\n {\n 'type': 'object',\n 'properties': {\n 'host': {'type': 'string'},\n 'port': {'type': 'integer'},\n 'netrc': {'type': 'string', 'format': 'file'},\n 'username': {'type': 'string'},\n 'password': {'type': 'string'},\n 'enabled': {'type': 'boolean'},\n 'only_complete': {'type': 'boolean'},\n },\n 'additionalProperties': False,\n },\n ]\n }\n\n def prepare_config(self, config):\n config = TransmissionBase.prepare_config(self, config)\n config.setdefault('only_complete', False)\n return config\n\n def on_task_input(self, task, config):\n config = self.prepare_config(config)\n if not config['enabled']:\n return\n\n if not self.client:\n self.client = self.create_rpc_client(config)\n entries = []\n\n # Hack/Workaround for http://flexget.com/ticket/2002\n # TODO: Proper fix\n if 'username' in config and 'password' in config:\n self.client.http_handler.set_authentication(\n self.client.url, config['username'], config['password']\n )\n\n session = self.client.get_session()\n\n for torrent in self.client.get_torrents():\n seed_ratio_ok, idle_limit_ok = self.check_seed_limits(torrent, session)\n if config['only_complete'] and not (\n seed_ratio_ok and idle_limit_ok and torrent.progress == 100\n ):\n continue\n entry = Entry(\n title=torrent.name,\n url='',\n torrent_info_hash=torrent.hashString,\n content_size=torrent.totalSize / (1024 * 1024),\n )\n # Location of torrent is only valid if transmission is on same machine as flexget\n if config['host'] in ('localhost', '127.0.0.1'):\n entry['location'] = torrent.torrentFile\n entry['url'] = 'file://' + torrent.torrentFile\n for attr in [\n 'id',\n 'comment',\n 'desiredAvailable',\n 'downloadDir',\n 'isFinished',\n 'isPrivate',\n 'leftUntilDone',\n 'ratio',\n 'status',\n 'date_active',\n 'date_added',\n 'date_done',\n 'date_started',\n 'errorString',\n 'priority',\n 'progress',\n 'secondsDownloading',\n 'secondsSeeding',\n 'torrentFile',\n ]:\n try:\n entry['transmission_' + attr] = getattr(torrent, attr)\n except Exception:\n logger.opt(exception=True).debug(\n 'error when requesting transmissionrpc attribute {}', attr\n )\n # Availability in percent\n entry['transmission_availability'] = (torrent.desiredAvailable / torrent.leftUntilDone) if torrent.leftUntilDone else 0\n \n entry['transmission_trackers'] = [t['announce'] for t in torrent.trackers]\n entry['transmission_seed_ratio_ok'] = seed_ratio_ok\n entry['transmission_idle_limit_ok'] = idle_limit_ok\n st_error_to_desc = {\n 0: 'OK',\n 1: 'tracker_warning',\n 2: 'tracker_error',\n 3: 'local_error',\n }\n entry['transmission_error_state'] = st_error_to_desc[torrent.error]\n # Built in done_date doesn't work when user adds an already completed file to transmission\n if torrent.progress == 100:\n entry['transmission_date_done'] = datetime.fromtimestamp(\n max(torrent.addedDate, torrent.doneDate)\n )\n entries.append(entry)\n return entries\n\n\nclass PluginTransmission(TransmissionBase):\n \"\"\"\n Add url from entry url to transmission\n\n Example::\n\n transmission:\n host: localhost\n port: 9091\n netrc: /home/flexget/.tmnetrc\n username: myusername\n password: mypassword\n path: the download location\n\n Default values for the config elements::\n\n transmission:\n host: localhost\n port: 9091\n enabled: yes\n \"\"\"\n\n schema = {\n 'anyOf': [\n {'type': 'boolean'},\n {\n 'type': 'object',\n 'properties': {\n 'host': {'type': 'string'},\n 'port': {'type': 'integer'},\n 'netrc': {'type': 'string'},\n 'username': {'type': 'string'},\n 'password': {'type': 'string'},\n 'action': {\n 'type': 'string',\n 'enum': ['add', 'remove', 'purge', 'pause', 'resume', 'bypass_queue'],\n },\n 'path': {'type': 'string'},\n 'max_up_speed': {'type': 'number'},\n 'max_down_speed': {'type': 'number'},\n 'max_connections': {'type': 'integer'},\n 'ratio': {'type': 'number'},\n 'add_paused': {'type': 'boolean'},\n 'content_filename': {'type': 'string'},\n 'main_file_only': {'type': 'boolean'},\n 'main_file_ratio': {'type': 'number'},\n 'magnetization_timeout': {'type': 'integer'},\n 'enabled': {'type': 'boolean'},\n 'include_subs': {'type': 'boolean'},\n 'bandwidth_priority': {'type': 'number'},\n 'honor_limits': {'type': 'boolean'},\n 'include_files': one_or_more({'type': 'string'}),\n 'skip_files': one_or_more({'type': 'string'}),\n 'rename_like_files': {'type': 'boolean'},\n 'queue_position': {'type': 'integer'},\n },\n 'additionalProperties': False,\n },\n ]\n }\n\n def prepare_config(self, config):\n config = TransmissionBase.prepare_config(self, config)\n config.setdefault('action', 'add')\n config.setdefault('path', '')\n config.setdefault('main_file_only', False)\n config.setdefault('magnetization_timeout', 0)\n config.setdefault('include_subs', False)\n config.setdefault('rename_like_files', False)\n config.setdefault('include_files', [])\n return config\n\n @plugin.priority(120)\n def on_task_download(self, task, config):\n \"\"\"\n Call download plugin to generate the temp files we will load\n into deluge then verify they are valid torrents\n \"\"\"\n config = self.prepare_config(config)\n if not config['enabled']:\n return\n # If the download plugin is not enabled, we need to call it to get our temp .torrent files\n if 'download' not in task.config:\n download = plugin.get('download', self)\n for entry in task.accepted:\n if entry.get('transmission_id'):\n # The torrent is already loaded in deluge, we don't need to get anything\n continue\n if config['action'] != 'add' and entry.get('torrent_info_hash'):\n # If we aren't adding the torrent new, all we need is info hash\n continue\n download.get_temp_file(task, entry, handle_magnets=True, fail_html=True)\n\n @plugin.priority(135)\n def on_task_output(self, task, config):\n config = self.prepare_config(config)\n # don't add when learning\n if task.options.learn:\n return\n if not config['enabled']:\n return\n # Do not run if there is nothing to do\n if not task.accepted:\n return\n if self.client is None:\n self.client = self.create_rpc_client(config)\n if self.client:\n logger.debug('Successfully connected to transmission.')\n else:\n raise plugin.PluginError(\"Couldn't connect to transmission.\")\n session_torrents = self.client.get_torrents()\n for entry in task.accepted:\n if task.options.test:\n logger.info('Would {} {} in transmission.', config['action'], entry['title'])\n continue\n # Compile user options into appropriate dict\n options = self._make_torrent_options_dict(config, entry)\n torrent_info = None\n for t in session_torrents:\n if t.hashString.lower() == entry.get(\n 'torrent_info_hash', ''\n ).lower() or t.id == entry.get('transmission_id'):\n torrent_info = t\n logger.debug(\n 'Found {} already loaded in transmission as {}',\n entry['title'],\n torrent_info.name,\n )\n break\n\n if not torrent_info:\n if config['action'] != 'add':\n logger.warning(\n 'Cannot {} {} because it is not loaded in transmission.',\n config['action'],\n entry['title'],\n )\n continue\n downloaded = not entry['url'].startswith('magnet:')\n\n # Check that file is downloaded\n if downloaded and 'file' not in entry:\n entry.fail('`file` field missing?')\n continue\n\n # Verify the temp file exists\n if downloaded and not os.path.exists(entry['file']):\n tmp_path = os.path.join(task.manager.config_base, 'temp')\n logger.debug('entry: {}', entry)\n logger.debug('temp: {}', ', '.join(os.listdir(tmp_path)))\n entry.fail(\"Downloaded temp file '%s' doesn't exist!?\" % entry['file'])\n continue\n\n try:\n if downloaded:\n with open(entry['file'], 'rb') as f:\n filedump = base64.b64encode(f.read()).decode('utf-8')\n torrent_info = self.client.add_torrent(filedump, 30, **options['add'])\n else:\n if options['post'].get('magnetization_timeout', 0) > 0:\n options['add']['paused'] = False\n torrent_info = self.client.add_torrent(\n entry['url'], timeout=30, **options['add']\n )\n except TransmissionError as e:\n logger.opt(exception=True).debug('TransmissionError')\n logger.debug('Failed options dict: {}', options['add'])\n msg = 'Error adding {} to transmission. TransmissionError: {}'.format(\n entry['title'], e.message or 'N/A'\n )\n logger.error(msg)\n entry.fail(msg)\n continue\n logger.info('\"{}\" torrent added to transmission', entry['title'])\n # The info returned by the add call is incomplete, refresh it\n torrent_info = self.client.get_torrent(torrent_info.id)\n else:\n # Torrent already loaded in transmission\n if options['add'].get('download_dir'):\n logger.verbose(\n 'Moving {} to \"{}\"', torrent_info.name, options['add']['download_dir']\n )\n # Move data even if current reported torrent location matches new location\n # as transmission may fail to automatically move completed file to final\n # location but continue reporting final location instead of real location.\n # In such case this will kick transmission to really move data.\n # If data is already located at new location then transmission just ignore\n # this command.\n self.client.move_torrent_data(\n torrent_info.id, options['add']['download_dir'], 120\n )\n\n try:\n total_size = torrent_info.totalSize\n main_id = None\n find_main_file = (\n options['post'].get('main_file_only') or 'content_filename' in options['post']\n )\n skip_files = options['post'].get('skip_files')\n # We need to index the files if any of the following are defined\n if find_main_file or skip_files:\n file_list = self.client.get_files(torrent_info.id)[torrent_info.id]\n\n if options['post'].get('magnetization_timeout', 0) > 0 and not file_list:\n logger.debug(\n 'Waiting {} seconds for \"{}\" to magnetize',\n options['post']['magnetization_timeout'],\n entry['title'],\n )\n for _ in range(options['post']['magnetization_timeout']):\n sleep(1)\n file_list = self.client.get_files(torrent_info.id)[torrent_info.id]\n if file_list:\n total_size = self.client.get_torrent(\n torrent_info.id, ['id', 'totalSize']\n ).totalSize\n break\n else:\n logger.warning(\n '\"{}\" did not magnetize before the timeout elapsed, file list unavailable for processing.',\n entry['title'],\n )\n\n # Find files based on config\n dl_list = []\n skip_list = []\n main_list = []\n ext_list = ['*.srt', '*.sub', '*.idx', '*.ssa', '*.ass']\n\n main_ratio = config['main_file_ratio']\n if 'main_file_ratio' in options['post']:\n main_ratio = options['post']['main_file_ratio']\n\n for f in file_list:\n # No need to set main_id if we're not going to need it\n if find_main_file and file_list[f]['size'] > total_size * main_ratio:\n main_id = f\n\n if 'include_files' in options['post']:\n if any(\n fnmatch(file_list[f]['name'], mask)\n for mask in options['post']['include_files']\n ):\n dl_list.append(f)\n elif options['post'].get('include_subs') and any(\n fnmatch(file_list[f]['name'], mask) for mask in ext_list\n ):\n dl_list.append(f)\n\n if skip_files:\n if any(fnmatch(file_list[f]['name'], mask) for mask in skip_files):\n skip_list.append(f)\n\n if main_id is not None:\n # Look for files matching main ID title but with a different extension\n if options['post'].get('rename_like_files'):\n for f in file_list:\n # if this filename matches main filename we want to rename it as well\n fs = os.path.splitext(file_list[f]['name'])\n if fs[0] == os.path.splitext(file_list[main_id]['name'])[0]:\n main_list.append(f)\n else:\n main_list = [main_id]\n\n if main_id not in dl_list:\n dl_list.append(main_id)\n elif find_main_file:\n logger.warning(\n 'No files in \"{}\" are > {:.0f}% of content size, no files renamed.',\n entry['title'],\n main_ratio * 100,\n )\n\n # If we have a main file and want to rename it and associated files\n if 'content_filename' in options['post'] and main_id is not None:\n if 'download_dir' not in options['add']:\n download_dir = self.client.get_session().download_dir\n else:\n download_dir = options['add']['download_dir']\n\n # Get new filename without ext\n file_ext = os.path.splitext(file_list[main_id]['name'])[1]\n file_path = os.path.dirname(\n os.path.join(download_dir, file_list[main_id]['name'])\n )\n filename = options['post']['content_filename']\n if config['host'] == 'localhost' or config['host'] == '127.0.0.1':\n counter = 1\n while os.path.exists(os.path.join(file_path, filename + file_ext)):\n # Try appending a (#) suffix till a unique filename is found\n filename = '%s(%s)' % (\n options['post']['content_filename'],\n counter,\n )\n counter += 1\n else:\n logger.debug(\n 'Cannot ensure content_filename is unique '\n 'when adding to a remote transmission daemon.'\n )\n\n for index in main_list:\n file_ext = os.path.splitext(file_list[index]['name'])[1]\n logger.debug(\n 'File {} renamed to {}',\n file_list[index]['name'],\n filename + file_ext,\n )\n # change to below when set_files will allow setting name, more efficient to have one call\n # fl[index]['name'] = os.path.basename(pathscrub(filename + file_ext).encode('utf-8'))\n try:\n self.client.rename_torrent_path(\n torrent_info.id,\n file_list[index]['name'],\n os.path.basename(str(pathscrub(filename + file_ext))),\n )\n except TransmissionError:\n logger.error(\n 'content_filename only supported with transmission 2.8+'\n )\n\n if options['post'].get('main_file_only') and main_id is not None:\n # Set Unwanted Files\n options['change']['files_unwanted'] = [\n x for x in file_list if x not in dl_list\n ]\n options['change']['files_wanted'] = dl_list\n logger.debug(\n 'Downloading {} of {} files in torrent.',\n len(options['change']['files_wanted']),\n len(file_list),\n )\n elif (\n not options['post'].get('main_file_only') or main_id is None\n ) and skip_files:\n # If no main file and we want to skip files\n\n if len(skip_list) >= len(file_list):\n logger.debug(\n 'skip_files filter would cause no files to be downloaded; '\n 'including all files in torrent.'\n )\n else:\n options['change']['files_unwanted'] = skip_list\n options['change']['files_wanted'] = [\n x for x in file_list if x not in skip_list\n ]\n logger.debug(\n 'Downloading {} of {} files in torrent.',\n len(options['change']['files_wanted']),\n len(file_list),\n )\n\n # Set any changed file properties\n if list(options['change'].keys()):\n self.client.change_torrent(torrent_info.id, 30, **options['change'])\n\n start_torrent = partial(self.client.start_torrent, [torrent_info.id])\n\n if config['action'] == 'add':\n # if add_paused was defined and set to False start the torrent;\n # prevents downloading data before we set what files we want\n start_paused = (\n options['post']['paused']\n if 'paused' in options['post']\n else not self.client.get_session().start_added_torrents\n )\n if start_paused:\n self.client.stop_torrent(torrent_info.id)\n else:\n self.client.start_torrent(torrent_info.id)\n elif config['action'] in ('remove', 'purge'):\n self.client.remove_torrent(\n [torrent_info.id], delete_data=config['action'] == 'purge'\n )\n logger.info('{}d {} from transmission', config['action'], torrent_info.name)\n elif config['action'] == 'pause':\n self.client.stop_torrent([torrent_info.id])\n logger.info('paused {} in transmission', torrent_info.name)\n elif config['action'] == 'resume':\n start_torrent()\n logger.info('resumed {} in transmission', torrent_info.name)\n elif config['action'] == 'bypass_queue':\n start_torrent(bypass_queue=True)\n logger.info('resumed (bypass queue) {} in transmission', torrent_info.name)\n \n except TransmissionError as e:\n logger.opt(exception=True).debug('TransmissionError')\n logger.debug('Failed options dict: {}', options)\n msg = 'Error trying to {} {}, TransmissionError: {}'.format(\n config['action'], entry['title'], e.message or 'N/A'\n )\n logger.error(msg)\n continue\n\n def _make_torrent_options_dict(self, config, entry):\n\n opt_dic = {}\n\n for opt_key in (\n 'path',\n 'add_paused',\n 'honor_limits',\n 'bandwidth_priority',\n 'max_connections',\n 'max_up_speed',\n 'max_down_speed',\n 'ratio',\n 'main_file_only',\n 'main_file_ratio',\n 'magnetization_timeout',\n 'include_subs',\n 'content_filename',\n 'include_files',\n 'skip_files',\n 'rename_like_files',\n 'queue_position',\n ):\n # Values do not merge config with task\n # Task takes priority then config is used\n if opt_key in entry:\n opt_dic[opt_key] = entry[opt_key]\n elif opt_key in config:\n opt_dic[opt_key] = config[opt_key]\n\n options = {'add': {}, 'change': {}, 'post': {}}\n\n add = options['add']\n if opt_dic.get('path'):\n try:\n path = os.path.expanduser(entry.render(opt_dic['path']))\n except RenderError as e:\n logger.error('Error setting path for {}: {}', entry['title'], e)\n else:\n # Transmission doesn't like it when paths end in a separator\n path = path.rstrip('\\\\/')\n add['download_dir'] = pathscrub(path)\n # make sure we add it paused, will modify status after adding\n add['paused'] = True\n\n change = options['change']\n if 'bandwidth_priority' in opt_dic:\n change['bandwidthPriority'] = opt_dic['bandwidth_priority']\n if 'honor_limits' in opt_dic and not opt_dic['honor_limits']:\n change['honorsSessionLimits'] = False\n if 'max_up_speed' in opt_dic:\n change['uploadLimit'] = opt_dic['max_up_speed']\n change['uploadLimited'] = True\n if 'max_down_speed' in opt_dic:\n change['downloadLimit'] = opt_dic['max_down_speed']\n change['downloadLimited'] = True\n if 'max_connections' in opt_dic:\n change['peer_limit'] = opt_dic['max_connections']\n\n if 'ratio' in opt_dic:\n change['seedRatioLimit'] = opt_dic['ratio']\n if opt_dic['ratio'] == -1:\n # seedRatioMode:\n # 0 follow the global settings\n # 1 override the global settings, seeding until a certain ratio\n # 2 override the global settings, seeding regardless of ratio\n change['seedRatioMode'] = 2\n else:\n change['seedRatioMode'] = 1\n\n if 'queue_position' in opt_dic:\n change['queuePosition'] = opt_dic['queue_position']\n\n post = options['post']\n # set to modify paused status after\n if 'add_paused' in opt_dic:\n post['paused'] = opt_dic['add_paused']\n if 'main_file_only' in opt_dic:\n post['main_file_only'] = opt_dic['main_file_only']\n if 'main_file_ratio' in opt_dic:\n post['main_file_ratio'] = opt_dic['main_file_ratio']\n if 'magnetization_timeout' in opt_dic:\n post['magnetization_timeout'] = opt_dic['magnetization_timeout']\n if 'include_subs' in opt_dic:\n post['include_subs'] = opt_dic['include_subs']\n if 'content_filename' in opt_dic:\n try:\n post['content_filename'] = entry.render(opt_dic['content_filename'])\n except RenderError as e:\n logger.error('Unable to render content_filename {}: {}', entry['title'], e)\n if 'skip_files' in opt_dic:\n post['skip_files'] = opt_dic['skip_files']\n if not isinstance(post['skip_files'], list):\n post['skip_files'] = [post['skip_files']]\n if 'include_files' in opt_dic:\n post['include_files'] = opt_dic['include_files']\n if not isinstance(post['include_files'], list):\n post['include_files'] = [post['include_files']]\n if 'rename_like_files' in opt_dic:\n post['rename_like_files'] = opt_dic['rename_like_files']\n return options\n\n def on_task_learn(self, task, config):\n \"\"\" Make sure all temp files are cleaned up when entries are learned \"\"\"\n # If download plugin is enabled, it will handle cleanup.\n if 'download' not in task.config:\n download = plugin.get('download', self)\n download.cleanup_temp_files(task)\n\n on_task_abort = on_task_learn\n\n\nclass PluginTransmissionClean(TransmissionBase):\n \"\"\"\n DEPRECATED: A separate task using from_transmission and transmission with remove action should be used instead.\n\n Remove completed torrents from Transmission.\n\n Examples::\n\n clean_transmission: yes # ignore both time and ratio\n\n clean_transmission: # uses transmission's internal limits for idle time and seed ratio ( if defined )\n transmission_seed_limits: yes\n\n clean_transmission: # matches time only\n finished_for: 2 hours\n\n clean_transmission: # matches ratio only\n min_ratio: 0.5\n\n clean_transmission: # matches time OR ratio\n finished_for: 2 hours\n min_ratio: 0.5\n\n Default values for the config elements::\n\n clean_transmission:\n host: localhost\n port: 9091\n enabled: yes\n \"\"\"\n\n schema = {\n \"deprecated\": \"The clean_transmission plugin is deprecated. Configure a new task using the from_transmission \"\n \"plugin as well as the transmission plugin using the remove or purge action.\",\n \"anyOf\": [\n {\"type\": \"boolean\"},\n {\n \"type\": \"object\",\n \"properties\": {\n \"host\": {\"type\": \"string\"},\n \"port\": {\"type\": \"integer\"},\n \"netrc\": {\"type\": \"string\", \"format\": \"file\"},\n \"username\": {\"type\": \"string\"},\n \"password\": {\"type\": \"string\"},\n \"enabled\": {\"type\": \"boolean\"},\n \"min_ratio\": {\"type\": \"number\"},\n \"finished_for\": {\"type\": \"string\", \"format\": \"interval\"},\n \"transmission_seed_limits\": {\"type\": \"boolean\"},\n \"delete_files\": {\"type\": \"boolean\"},\n \"tracker\": {\"type\": \"string\", \"format\": \"regex\"},\n \"preserve_tracker\": {\"type\": \"string\", \"format\": \"regex\"},\n \"directories\": {\n \"type\": \"array\",\n \"items\": {\"type\": \"string\", \"format\": \"regex\"},\n },\n },\n \"additionalProperties\": False,\n },\n ],\n }\n\n def on_task_exit(self, task, config):\n config = self.prepare_config(config)\n if not config['enabled'] or task.options.learn:\n return\n if not self.client:\n self.client = self.create_rpc_client(config)\n tracker_re = re.compile(config['tracker'], re.IGNORECASE) if 'tracker' in config else None\n preserve_tracker_re = (\n re.compile(config['preserve_tracker'], re.IGNORECASE)\n if 'preserve_tracker' in config\n else None\n )\n\n session = self.client.get_session()\n\n remove_ids = []\n for torrent in self.client.get_torrents():\n logger.verbose(\n 'Torrent \"{}\": status: \"{}\" - ratio: {} - date added: {}',\n torrent.name,\n torrent.status,\n torrent.ratio,\n torrent.date_added,\n )\n downloaded, dummy = self.torrent_info(torrent, config)\n if not downloaded:\n continue\n if config.get('transmission_seed_limits'):\n seed_ratio_ok, idle_limit_ok = self.check_seed_limits(torrent, session)\n if not seed_ratio_ok or not idle_limit_ok:\n continue\n if 'min_ratio' in config:\n if torrent.ratio < config['min_ratio']:\n continue\n if 'finished_for' in config:\n # done date might be invalid if this torrent was added to transmission when already completed\n started_seeding = datetime.fromtimestamp(max(torrent.addedDate, torrent.doneDate))\n if started_seeding + parse_timedelta(config['finished_for']) > datetime.now():\n continue\n tracker_hosts = (\n urlparse(tracker['announce']).hostname for tracker in torrent.trackers\n )\n if 'tracker' in config:\n if not any(tracker_re.search(tracker) for tracker in tracker_hosts):\n continue\n if 'preserve_tracker' in config:\n if any(preserve_tracker_re.search(tracker) for tracker in tracker_hosts):\n continue\n if config.get('directories'):\n if not any(\n re.search(d, torrent.downloadDir, re.IGNORECASE) for d in config['directories']\n ):\n continue\n if task.options.test:\n logger.info('Would remove finished torrent `{}` from transmission', torrent.name)\n continue\n logger.info('Removing finished torrent `{}` from transmission', torrent.name)\n remove_ids.append(torrent.id)\n if remove_ids:\n self.client.remove_torrent(remove_ids, config.get('delete_files'))\n\n\n@event('plugin.register')\ndef register_plugin():\n plugin.register(PluginTransmission, 'transmission', api_ver=2)\n plugin.register(PluginTransmissionInput, 'from_transmission', api_ver=2)\n plugin.register(PluginTransmissionClean, 'clean_transmission', api_ver=2)\n", "path": "flexget/plugins/clients/transmission.py"}]} |
gh_patches_debug_120 | rasdani/github-patches | git_diff | CTPUG__wafer-111 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Wafer page editing fails on Django 1.8 with ImproperlyConfigured: error
As seen on Travis, and confirmed locally, attempting to edit a page bombs out, ending with
"Specifying both 'fields' and 'form_class' is not permitted."
ImproperlyConfigured: Specifying both 'fields' and 'form_class' is not permitted.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wafer/pages/views.py`
Content:
```
1 from django.http import Http404
2 from django.core.exceptions import PermissionDenied
3 from django.views.generic import DetailView, TemplateView, UpdateView
4
5 from wafer.pages.models import Page
6 from wafer.pages.forms import PageForm
7
8
9 class ShowPage(DetailView):
10 template_name = 'wafer.pages/page.html'
11 model = Page
12
13
14 class EditPage(UpdateView):
15 template_name = 'wafer.pages/page_form.html'
16 model = Page
17 form_class = PageForm
18 fields = ['name', 'content']
19
20
21 def slug(request, url):
22 """Look up a page by url (which is a tree of slugs)"""
23 page = None
24 for slug in url.split('/'):
25 if not slug:
26 continue
27 try:
28 page = Page.objects.get(slug=slug, parent=page)
29 except Page.DoesNotExist:
30 raise Http404
31
32 if page is None:
33 try:
34 page = Page.objects.get(slug='index')
35 except Page.DoesNotExist:
36 return TemplateView.as_view(
37 template_name='wafer/index.html')(request)
38
39 if 'edit' in request.GET.keys():
40 if not request.user.has_perm('pages.change_page'):
41 raise PermissionDenied
42 return EditPage.as_view()(request, pk=page.id)
43
44 return ShowPage.as_view()(request, pk=page.id)
45
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/wafer/pages/views.py b/wafer/pages/views.py
--- a/wafer/pages/views.py
+++ b/wafer/pages/views.py
@@ -15,7 +15,6 @@
template_name = 'wafer.pages/page_form.html'
model = Page
form_class = PageForm
- fields = ['name', 'content']
def slug(request, url):
| {"golden_diff": "diff --git a/wafer/pages/views.py b/wafer/pages/views.py\n--- a/wafer/pages/views.py\n+++ b/wafer/pages/views.py\n@@ -15,7 +15,6 @@\n template_name = 'wafer.pages/page_form.html'\n model = Page\n form_class = PageForm\n- fields = ['name', 'content']\n \n \n def slug(request, url):\n", "issue": "Wafer page editing fails on Django 1.8 with ImproperlyConfigured: error\nAs seen on Travis, and confirmed locally, attempting to edit a page bombs out, ending with\n\n\"Specifying both 'fields' and 'form_class' is not permitted.\"\nImproperlyConfigured: Specifying both 'fields' and 'form_class' is not permitted.\n\n", "before_files": [{"content": "from django.http import Http404\nfrom django.core.exceptions import PermissionDenied\nfrom django.views.generic import DetailView, TemplateView, UpdateView\n\nfrom wafer.pages.models import Page\nfrom wafer.pages.forms import PageForm\n\n\nclass ShowPage(DetailView):\n template_name = 'wafer.pages/page.html'\n model = Page\n\n\nclass EditPage(UpdateView):\n template_name = 'wafer.pages/page_form.html'\n model = Page\n form_class = PageForm\n fields = ['name', 'content']\n\n\ndef slug(request, url):\n \"\"\"Look up a page by url (which is a tree of slugs)\"\"\"\n page = None\n for slug in url.split('/'):\n if not slug:\n continue\n try:\n page = Page.objects.get(slug=slug, parent=page)\n except Page.DoesNotExist:\n raise Http404\n\n if page is None:\n try:\n page = Page.objects.get(slug='index')\n except Page.DoesNotExist:\n return TemplateView.as_view(\n template_name='wafer/index.html')(request)\n\n if 'edit' in request.GET.keys():\n if not request.user.has_perm('pages.change_page'):\n raise PermissionDenied\n return EditPage.as_view()(request, pk=page.id)\n\n return ShowPage.as_view()(request, pk=page.id)\n", "path": "wafer/pages/views.py"}], "after_files": [{"content": "from django.http import Http404\nfrom django.core.exceptions import PermissionDenied\nfrom django.views.generic import DetailView, TemplateView, UpdateView\n\nfrom wafer.pages.models import Page\nfrom wafer.pages.forms import PageForm\n\n\nclass ShowPage(DetailView):\n template_name = 'wafer.pages/page.html'\n model = Page\n\n\nclass EditPage(UpdateView):\n template_name = 'wafer.pages/page_form.html'\n model = Page\n form_class = PageForm\n\n\ndef slug(request, url):\n \"\"\"Look up a page by url (which is a tree of slugs)\"\"\"\n page = None\n for slug in url.split('/'):\n if not slug:\n continue\n try:\n page = Page.objects.get(slug=slug, parent=page)\n except Page.DoesNotExist:\n raise Http404\n\n if page is None:\n try:\n page = Page.objects.get(slug='index')\n except Page.DoesNotExist:\n return TemplateView.as_view(\n template_name='wafer/index.html')(request)\n\n if 'edit' in request.GET.keys():\n if not request.user.has_perm('pages.change_page'):\n raise PermissionDenied\n return EditPage.as_view()(request, pk=page.id)\n\n return ShowPage.as_view()(request, pk=page.id)\n", "path": "wafer/pages/views.py"}]} |
gh_patches_debug_121 | rasdani/github-patches | git_diff | dbt-labs__dbt-core-1148 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Support jinja expression statements
## Feature
### Feature description
http://jinja.pocoo.org/docs/2.10/extensions/#expression-statement
Presently, we hack expressions with:
```
{% set _ = my_dict.update({"a": 1, "b": 2}) %}
```
Instead, the jinja expression statement will make it possible to write:
```
{% do my_dict.update({"a": 1, "b": 2}) %}
```
This is a minor difference, but it will make jinja sql more readable and idiomatic.
### Who will this benefit?
jinja writers
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dbt/clients/jinja.py`
Content:
```
1 import codecs
2 import linecache
3 import os
4
5 import jinja2
6 import jinja2._compat
7 import jinja2.ext
8 import jinja2.nodes
9 import jinja2.parser
10 import jinja2.sandbox
11
12 import dbt.compat
13 import dbt.exceptions
14
15 from dbt.node_types import NodeType
16 from dbt.utils import AttrDict
17
18 from dbt.logger import GLOBAL_LOGGER as logger # noqa
19
20
21 class MacroFuzzParser(jinja2.parser.Parser):
22 def parse_macro(self):
23 node = jinja2.nodes.Macro(lineno=next(self.stream).lineno)
24
25 # modified to fuzz macros defined in the same file. this way
26 # dbt can understand the stack of macros being called.
27 # - @cmcarthur
28 node.name = dbt.utils.get_dbt_macro_name(
29 self.parse_assign_target(name_only=True).name)
30
31 self.parse_signature(node)
32 node.body = self.parse_statements(('name:endmacro',),
33 drop_needle=True)
34 return node
35
36
37 class MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment):
38 def _parse(self, source, name, filename):
39 return MacroFuzzParser(
40 self, source, name,
41 jinja2._compat.encode_filename(filename)
42 ).parse()
43
44 def _compile(self, source, filename):
45 """Override jinja's compilation to stash the rendered source inside
46 the python linecache for debugging.
47 """
48 if filename == '<template>':
49 # make a better filename
50 filename = 'dbt-{}'.format(
51 codecs.encode(os.urandom(12), 'hex').decode('ascii')
52 )
53 # encode, though I don't think this matters
54 filename = jinja2._compat.encode_filename(filename)
55 # put ourselves in the cache using the 'lazycache' method
56 linecache.cache[filename] = (lambda: source,)
57
58 return super(MacroFuzzEnvironment, self)._compile(source, filename)
59
60
61 class TemplateCache(object):
62
63 def __init__(self):
64 self.file_cache = {}
65
66 def get_node_template(self, node):
67 key = (node['package_name'], node['original_file_path'])
68
69 if key in self.file_cache:
70 return self.file_cache[key]
71
72 template = get_template(
73 string=node.get('raw_sql'),
74 ctx={},
75 node=node
76 )
77 self.file_cache[key] = template
78
79 return template
80
81 def clear(self):
82 self.file_cache.clear()
83
84
85 template_cache = TemplateCache()
86
87
88 def macro_generator(node):
89 def apply_context(context):
90 def call(*args, **kwargs):
91 name = node.get('name')
92 template = template_cache.get_node_template(node)
93 module = template.make_module(context, False, context)
94
95 if node['resource_type'] == NodeType.Operation:
96 macro = module.__dict__[dbt.utils.get_dbt_operation_name(name)]
97 else:
98 macro = module.__dict__[dbt.utils.get_dbt_macro_name(name)]
99 module.__dict__.update(context)
100
101 try:
102 return macro(*args, **kwargs)
103 except dbt.exceptions.MacroReturn as e:
104 return e.value
105 except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:
106 dbt.exceptions.raise_compiler_error(str(e), node)
107 except dbt.exceptions.CompilationException as e:
108 e.stack.append(node)
109 raise e
110
111 return call
112 return apply_context
113
114
115 class MaterializationExtension(jinja2.ext.Extension):
116 tags = ['materialization']
117
118 def parse(self, parser):
119 node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)
120 materialization_name = \
121 parser.parse_assign_target(name_only=True).name
122
123 adapter_name = 'default'
124 node.args = []
125 node.defaults = []
126
127 while parser.stream.skip_if('comma'):
128 target = parser.parse_assign_target(name_only=True)
129
130 if target.name == 'default':
131 pass
132
133 elif target.name == 'adapter':
134 parser.stream.expect('assign')
135 value = parser.parse_expression()
136 adapter_name = value.value
137
138 else:
139 dbt.exceptions.invalid_materialization_argument(
140 materialization_name, target.name)
141
142 node.name = dbt.utils.get_materialization_macro_name(
143 materialization_name, adapter_name)
144
145 node.body = parser.parse_statements(('name:endmaterialization',),
146 drop_needle=True)
147
148 return node
149
150
151 class OperationExtension(jinja2.ext.Extension):
152 tags = ['operation']
153
154 def parse(self, parser):
155 node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)
156 operation_name = \
157 parser.parse_assign_target(name_only=True).name
158
159 node.args = []
160 node.defaults = []
161
162 while parser.stream.skip_if('comma'):
163 target = parser.parse_assign_target(name_only=True)
164
165 node.name = dbt.utils.get_operation_macro_name(operation_name)
166
167 node.body = parser.parse_statements(('name:endoperation',),
168 drop_needle=True)
169
170 return node
171
172
173 class DocumentationExtension(jinja2.ext.Extension):
174 tags = ['docs']
175
176 def parse(self, parser):
177 node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)
178 docs_name = parser.parse_assign_target(name_only=True).name
179
180 node.args = []
181 node.defaults = []
182 node.name = dbt.utils.get_docs_macro_name(docs_name)
183 node.body = parser.parse_statements(('name:enddocs',),
184 drop_needle=True)
185 return node
186
187
188 def _is_dunder_name(name):
189 return name.startswith('__') and name.endswith('__')
190
191
192 def create_macro_capture_env(node):
193
194 class ParserMacroCapture(jinja2.Undefined):
195 """
196 This class sets up the parser to capture macros.
197 """
198 def __init__(self, hint=None, obj=None, name=None, exc=None):
199 super(ParserMacroCapture, self).__init__(hint=hint, name=name)
200 self.node = node
201 self.name = name
202 self.package_name = node.get('package_name')
203 # jinja uses these for safety, so we have to override them.
204 # see https://github.com/pallets/jinja/blob/master/jinja2/sandbox.py#L332-L339 # noqa
205 self.unsafe_callable = False
206 self.alters_data = False
207
208 def __deepcopy__(self, memo):
209 path = os.path.join(self.node.get('root_path'),
210 self.node.get('original_file_path'))
211
212 logger.debug(
213 'dbt encountered an undefined variable, "{}" in node {}.{} '
214 '(source path: {})'
215 .format(self.name, self.node.get('package_name'),
216 self.node.get('name'), path))
217
218 # match jinja's message
219 dbt.exceptions.raise_compiler_error(
220 "{!r} is undefined".format(self.name),
221 node=self.node
222 )
223
224 def __getitem__(self, name):
225 # Propagate the undefined value if a caller accesses this as if it
226 # were a dictionary
227 return self
228
229 def __getattr__(self, name):
230 if name == 'name' or _is_dunder_name(name):
231 raise AttributeError(
232 "'{}' object has no attribute '{}'"
233 .format(type(self).__name__, name)
234 )
235
236 self.package_name = self.name
237 self.name = name
238
239 return self
240
241 def __call__(self, *args, **kwargs):
242 return True
243
244 return ParserMacroCapture
245
246
247 def get_environment(node=None, capture_macros=False):
248 args = {
249 'extensions': []
250 }
251
252 if capture_macros:
253 args['undefined'] = create_macro_capture_env(node)
254
255 args['extensions'].append(MaterializationExtension)
256 args['extensions'].append(OperationExtension)
257 args['extensions'].append(DocumentationExtension)
258
259 return MacroFuzzEnvironment(**args)
260
261
262 def parse(string):
263 try:
264 return get_environment().parse(dbt.compat.to_string(string))
265
266 except (jinja2.exceptions.TemplateSyntaxError,
267 jinja2.exceptions.UndefinedError) as e:
268 e.translated = False
269 dbt.exceptions.raise_compiler_error(str(e))
270
271
272 def get_template(string, ctx, node=None, capture_macros=False):
273 try:
274 env = get_environment(node, capture_macros)
275
276 template_source = dbt.compat.to_string(string)
277 return env.from_string(template_source, globals=ctx)
278
279 except (jinja2.exceptions.TemplateSyntaxError,
280 jinja2.exceptions.UndefinedError) as e:
281 e.translated = False
282 dbt.exceptions.raise_compiler_error(str(e), node)
283
284
285 def render_template(template, ctx, node=None):
286 try:
287 return template.render(ctx)
288
289 except (jinja2.exceptions.TemplateSyntaxError,
290 jinja2.exceptions.UndefinedError) as e:
291 e.translated = False
292 dbt.exceptions.raise_compiler_error(str(e), node)
293
294
295 def get_rendered(string, ctx, node=None,
296 capture_macros=False):
297 template = get_template(string, ctx, node,
298 capture_macros=capture_macros)
299
300 return render_template(template, ctx, node)
301
302
303 def undefined_error(msg):
304 raise jinja2.exceptions.UndefinedError(msg)
305
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dbt/clients/jinja.py b/dbt/clients/jinja.py
--- a/dbt/clients/jinja.py
+++ b/dbt/clients/jinja.py
@@ -246,7 +246,7 @@
def get_environment(node=None, capture_macros=False):
args = {
- 'extensions': []
+ 'extensions': ['jinja2.ext.do']
}
if capture_macros:
| {"golden_diff": "diff --git a/dbt/clients/jinja.py b/dbt/clients/jinja.py\n--- a/dbt/clients/jinja.py\n+++ b/dbt/clients/jinja.py\n@@ -246,7 +246,7 @@\n \n def get_environment(node=None, capture_macros=False):\n args = {\n- 'extensions': []\n+ 'extensions': ['jinja2.ext.do']\n }\n \n if capture_macros:\n", "issue": "Support jinja expression statements\n## Feature\r\n\r\n### Feature description\r\nhttp://jinja.pocoo.org/docs/2.10/extensions/#expression-statement\r\n\r\nPresently, we hack expressions with:\r\n```\r\n{% set _ = my_dict.update({\"a\": 1, \"b\": 2}) %}\r\n```\r\n\r\nInstead, the jinja expression statement will make it possible to write:\r\n\r\n```\r\n{% do my_dict.update({\"a\": 1, \"b\": 2}) %}\r\n```\r\n\r\nThis is a minor difference, but it will make jinja sql more readable and idiomatic.\r\n\r\n### Who will this benefit?\r\njinja writers\n", "before_files": [{"content": "import codecs\nimport linecache\nimport os\n\nimport jinja2\nimport jinja2._compat\nimport jinja2.ext\nimport jinja2.nodes\nimport jinja2.parser\nimport jinja2.sandbox\n\nimport dbt.compat\nimport dbt.exceptions\n\nfrom dbt.node_types import NodeType\nfrom dbt.utils import AttrDict\n\nfrom dbt.logger import GLOBAL_LOGGER as logger # noqa\n\n\nclass MacroFuzzParser(jinja2.parser.Parser):\n def parse_macro(self):\n node = jinja2.nodes.Macro(lineno=next(self.stream).lineno)\n\n # modified to fuzz macros defined in the same file. this way\n # dbt can understand the stack of macros being called.\n # - @cmcarthur\n node.name = dbt.utils.get_dbt_macro_name(\n self.parse_assign_target(name_only=True).name)\n\n self.parse_signature(node)\n node.body = self.parse_statements(('name:endmacro',),\n drop_needle=True)\n return node\n\n\nclass MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment):\n def _parse(self, source, name, filename):\n return MacroFuzzParser(\n self, source, name,\n jinja2._compat.encode_filename(filename)\n ).parse()\n\n def _compile(self, source, filename):\n \"\"\"Override jinja's compilation to stash the rendered source inside\n the python linecache for debugging.\n \"\"\"\n if filename == '<template>':\n # make a better filename\n filename = 'dbt-{}'.format(\n codecs.encode(os.urandom(12), 'hex').decode('ascii')\n )\n # encode, though I don't think this matters\n filename = jinja2._compat.encode_filename(filename)\n # put ourselves in the cache using the 'lazycache' method\n linecache.cache[filename] = (lambda: source,)\n\n return super(MacroFuzzEnvironment, self)._compile(source, filename)\n\n\nclass TemplateCache(object):\n\n def __init__(self):\n self.file_cache = {}\n\n def get_node_template(self, node):\n key = (node['package_name'], node['original_file_path'])\n\n if key in self.file_cache:\n return self.file_cache[key]\n\n template = get_template(\n string=node.get('raw_sql'),\n ctx={},\n node=node\n )\n self.file_cache[key] = template\n\n return template\n\n def clear(self):\n self.file_cache.clear()\n\n\ntemplate_cache = TemplateCache()\n\n\ndef macro_generator(node):\n def apply_context(context):\n def call(*args, **kwargs):\n name = node.get('name')\n template = template_cache.get_node_template(node)\n module = template.make_module(context, False, context)\n\n if node['resource_type'] == NodeType.Operation:\n macro = module.__dict__[dbt.utils.get_dbt_operation_name(name)]\n else:\n macro = module.__dict__[dbt.utils.get_dbt_macro_name(name)]\n module.__dict__.update(context)\n\n try:\n return macro(*args, **kwargs)\n except dbt.exceptions.MacroReturn as e:\n return e.value\n except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:\n dbt.exceptions.raise_compiler_error(str(e), node)\n except dbt.exceptions.CompilationException as e:\n e.stack.append(node)\n raise e\n\n return call\n return apply_context\n\n\nclass MaterializationExtension(jinja2.ext.Extension):\n tags = ['materialization']\n\n def parse(self, parser):\n node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)\n materialization_name = \\\n parser.parse_assign_target(name_only=True).name\n\n adapter_name = 'default'\n node.args = []\n node.defaults = []\n\n while parser.stream.skip_if('comma'):\n target = parser.parse_assign_target(name_only=True)\n\n if target.name == 'default':\n pass\n\n elif target.name == 'adapter':\n parser.stream.expect('assign')\n value = parser.parse_expression()\n adapter_name = value.value\n\n else:\n dbt.exceptions.invalid_materialization_argument(\n materialization_name, target.name)\n\n node.name = dbt.utils.get_materialization_macro_name(\n materialization_name, adapter_name)\n\n node.body = parser.parse_statements(('name:endmaterialization',),\n drop_needle=True)\n\n return node\n\n\nclass OperationExtension(jinja2.ext.Extension):\n tags = ['operation']\n\n def parse(self, parser):\n node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)\n operation_name = \\\n parser.parse_assign_target(name_only=True).name\n\n node.args = []\n node.defaults = []\n\n while parser.stream.skip_if('comma'):\n target = parser.parse_assign_target(name_only=True)\n\n node.name = dbt.utils.get_operation_macro_name(operation_name)\n\n node.body = parser.parse_statements(('name:endoperation',),\n drop_needle=True)\n\n return node\n\n\nclass DocumentationExtension(jinja2.ext.Extension):\n tags = ['docs']\n\n def parse(self, parser):\n node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)\n docs_name = parser.parse_assign_target(name_only=True).name\n\n node.args = []\n node.defaults = []\n node.name = dbt.utils.get_docs_macro_name(docs_name)\n node.body = parser.parse_statements(('name:enddocs',),\n drop_needle=True)\n return node\n\n\ndef _is_dunder_name(name):\n return name.startswith('__') and name.endswith('__')\n\n\ndef create_macro_capture_env(node):\n\n class ParserMacroCapture(jinja2.Undefined):\n \"\"\"\n This class sets up the parser to capture macros.\n \"\"\"\n def __init__(self, hint=None, obj=None, name=None, exc=None):\n super(ParserMacroCapture, self).__init__(hint=hint, name=name)\n self.node = node\n self.name = name\n self.package_name = node.get('package_name')\n # jinja uses these for safety, so we have to override them.\n # see https://github.com/pallets/jinja/blob/master/jinja2/sandbox.py#L332-L339 # noqa\n self.unsafe_callable = False\n self.alters_data = False\n\n def __deepcopy__(self, memo):\n path = os.path.join(self.node.get('root_path'),\n self.node.get('original_file_path'))\n\n logger.debug(\n 'dbt encountered an undefined variable, \"{}\" in node {}.{} '\n '(source path: {})'\n .format(self.name, self.node.get('package_name'),\n self.node.get('name'), path))\n\n # match jinja's message\n dbt.exceptions.raise_compiler_error(\n \"{!r} is undefined\".format(self.name),\n node=self.node\n )\n\n def __getitem__(self, name):\n # Propagate the undefined value if a caller accesses this as if it\n # were a dictionary\n return self\n\n def __getattr__(self, name):\n if name == 'name' or _is_dunder_name(name):\n raise AttributeError(\n \"'{}' object has no attribute '{}'\"\n .format(type(self).__name__, name)\n )\n\n self.package_name = self.name\n self.name = name\n\n return self\n\n def __call__(self, *args, **kwargs):\n return True\n\n return ParserMacroCapture\n\n\ndef get_environment(node=None, capture_macros=False):\n args = {\n 'extensions': []\n }\n\n if capture_macros:\n args['undefined'] = create_macro_capture_env(node)\n\n args['extensions'].append(MaterializationExtension)\n args['extensions'].append(OperationExtension)\n args['extensions'].append(DocumentationExtension)\n\n return MacroFuzzEnvironment(**args)\n\n\ndef parse(string):\n try:\n return get_environment().parse(dbt.compat.to_string(string))\n\n except (jinja2.exceptions.TemplateSyntaxError,\n jinja2.exceptions.UndefinedError) as e:\n e.translated = False\n dbt.exceptions.raise_compiler_error(str(e))\n\n\ndef get_template(string, ctx, node=None, capture_macros=False):\n try:\n env = get_environment(node, capture_macros)\n\n template_source = dbt.compat.to_string(string)\n return env.from_string(template_source, globals=ctx)\n\n except (jinja2.exceptions.TemplateSyntaxError,\n jinja2.exceptions.UndefinedError) as e:\n e.translated = False\n dbt.exceptions.raise_compiler_error(str(e), node)\n\n\ndef render_template(template, ctx, node=None):\n try:\n return template.render(ctx)\n\n except (jinja2.exceptions.TemplateSyntaxError,\n jinja2.exceptions.UndefinedError) as e:\n e.translated = False\n dbt.exceptions.raise_compiler_error(str(e), node)\n\n\ndef get_rendered(string, ctx, node=None,\n capture_macros=False):\n template = get_template(string, ctx, node,\n capture_macros=capture_macros)\n\n return render_template(template, ctx, node)\n\n\ndef undefined_error(msg):\n raise jinja2.exceptions.UndefinedError(msg)\n", "path": "dbt/clients/jinja.py"}], "after_files": [{"content": "import codecs\nimport linecache\nimport os\n\nimport jinja2\nimport jinja2._compat\nimport jinja2.ext\nimport jinja2.nodes\nimport jinja2.parser\nimport jinja2.sandbox\n\nimport dbt.compat\nimport dbt.exceptions\n\nfrom dbt.node_types import NodeType\nfrom dbt.utils import AttrDict\n\nfrom dbt.logger import GLOBAL_LOGGER as logger # noqa\n\n\nclass MacroFuzzParser(jinja2.parser.Parser):\n def parse_macro(self):\n node = jinja2.nodes.Macro(lineno=next(self.stream).lineno)\n\n # modified to fuzz macros defined in the same file. this way\n # dbt can understand the stack of macros being called.\n # - @cmcarthur\n node.name = dbt.utils.get_dbt_macro_name(\n self.parse_assign_target(name_only=True).name)\n\n self.parse_signature(node)\n node.body = self.parse_statements(('name:endmacro',),\n drop_needle=True)\n return node\n\n\nclass MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment):\n def _parse(self, source, name, filename):\n return MacroFuzzParser(\n self, source, name,\n jinja2._compat.encode_filename(filename)\n ).parse()\n\n def _compile(self, source, filename):\n \"\"\"Override jinja's compilation to stash the rendered source inside\n the python linecache for debugging.\n \"\"\"\n if filename == '<template>':\n # make a better filename\n filename = 'dbt-{}'.format(\n codecs.encode(os.urandom(12), 'hex').decode('ascii')\n )\n # encode, though I don't think this matters\n filename = jinja2._compat.encode_filename(filename)\n # put ourselves in the cache using the 'lazycache' method\n linecache.cache[filename] = (lambda: source,)\n\n return super(MacroFuzzEnvironment, self)._compile(source, filename)\n\n\nclass TemplateCache(object):\n\n def __init__(self):\n self.file_cache = {}\n\n def get_node_template(self, node):\n key = (node['package_name'], node['original_file_path'])\n\n if key in self.file_cache:\n return self.file_cache[key]\n\n template = get_template(\n string=node.get('raw_sql'),\n ctx={},\n node=node\n )\n self.file_cache[key] = template\n\n return template\n\n def clear(self):\n self.file_cache.clear()\n\n\ntemplate_cache = TemplateCache()\n\n\ndef macro_generator(node):\n def apply_context(context):\n def call(*args, **kwargs):\n name = node.get('name')\n template = template_cache.get_node_template(node)\n module = template.make_module(context, False, context)\n\n if node['resource_type'] == NodeType.Operation:\n macro = module.__dict__[dbt.utils.get_dbt_operation_name(name)]\n else:\n macro = module.__dict__[dbt.utils.get_dbt_macro_name(name)]\n module.__dict__.update(context)\n\n try:\n return macro(*args, **kwargs)\n except dbt.exceptions.MacroReturn as e:\n return e.value\n except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e:\n dbt.exceptions.raise_compiler_error(str(e), node)\n except dbt.exceptions.CompilationException as e:\n e.stack.append(node)\n raise e\n\n return call\n return apply_context\n\n\nclass MaterializationExtension(jinja2.ext.Extension):\n tags = ['materialization']\n\n def parse(self, parser):\n node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)\n materialization_name = \\\n parser.parse_assign_target(name_only=True).name\n\n adapter_name = 'default'\n node.args = []\n node.defaults = []\n\n while parser.stream.skip_if('comma'):\n target = parser.parse_assign_target(name_only=True)\n\n if target.name == 'default':\n pass\n\n elif target.name == 'adapter':\n parser.stream.expect('assign')\n value = parser.parse_expression()\n adapter_name = value.value\n\n else:\n dbt.exceptions.invalid_materialization_argument(\n materialization_name, target.name)\n\n node.name = dbt.utils.get_materialization_macro_name(\n materialization_name, adapter_name)\n\n node.body = parser.parse_statements(('name:endmaterialization',),\n drop_needle=True)\n\n return node\n\n\nclass OperationExtension(jinja2.ext.Extension):\n tags = ['operation']\n\n def parse(self, parser):\n node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)\n operation_name = \\\n parser.parse_assign_target(name_only=True).name\n\n node.args = []\n node.defaults = []\n\n while parser.stream.skip_if('comma'):\n target = parser.parse_assign_target(name_only=True)\n\n node.name = dbt.utils.get_operation_macro_name(operation_name)\n\n node.body = parser.parse_statements(('name:endoperation',),\n drop_needle=True)\n\n return node\n\n\nclass DocumentationExtension(jinja2.ext.Extension):\n tags = ['docs']\n\n def parse(self, parser):\n node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno)\n docs_name = parser.parse_assign_target(name_only=True).name\n\n node.args = []\n node.defaults = []\n node.name = dbt.utils.get_docs_macro_name(docs_name)\n node.body = parser.parse_statements(('name:enddocs',),\n drop_needle=True)\n return node\n\n\ndef _is_dunder_name(name):\n return name.startswith('__') and name.endswith('__')\n\n\ndef create_macro_capture_env(node):\n\n class ParserMacroCapture(jinja2.Undefined):\n \"\"\"\n This class sets up the parser to capture macros.\n \"\"\"\n def __init__(self, hint=None, obj=None, name=None, exc=None):\n super(ParserMacroCapture, self).__init__(hint=hint, name=name)\n self.node = node\n self.name = name\n self.package_name = node.get('package_name')\n # jinja uses these for safety, so we have to override them.\n # see https://github.com/pallets/jinja/blob/master/jinja2/sandbox.py#L332-L339 # noqa\n self.unsafe_callable = False\n self.alters_data = False\n\n def __deepcopy__(self, memo):\n path = os.path.join(self.node.get('root_path'),\n self.node.get('original_file_path'))\n\n logger.debug(\n 'dbt encountered an undefined variable, \"{}\" in node {}.{} '\n '(source path: {})'\n .format(self.name, self.node.get('package_name'),\n self.node.get('name'), path))\n\n # match jinja's message\n dbt.exceptions.raise_compiler_error(\n \"{!r} is undefined\".format(self.name),\n node=self.node\n )\n\n def __getitem__(self, name):\n # Propagate the undefined value if a caller accesses this as if it\n # were a dictionary\n return self\n\n def __getattr__(self, name):\n if name == 'name' or _is_dunder_name(name):\n raise AttributeError(\n \"'{}' object has no attribute '{}'\"\n .format(type(self).__name__, name)\n )\n\n self.package_name = self.name\n self.name = name\n\n return self\n\n def __call__(self, *args, **kwargs):\n return True\n\n return ParserMacroCapture\n\n\ndef get_environment(node=None, capture_macros=False):\n args = {\n 'extensions': ['jinja2.ext.do']\n }\n\n if capture_macros:\n args['undefined'] = create_macro_capture_env(node)\n\n args['extensions'].append(MaterializationExtension)\n args['extensions'].append(OperationExtension)\n args['extensions'].append(DocumentationExtension)\n\n return MacroFuzzEnvironment(**args)\n\n\ndef parse(string):\n try:\n return get_environment().parse(dbt.compat.to_string(string))\n\n except (jinja2.exceptions.TemplateSyntaxError,\n jinja2.exceptions.UndefinedError) as e:\n e.translated = False\n dbt.exceptions.raise_compiler_error(str(e))\n\n\ndef get_template(string, ctx, node=None, capture_macros=False):\n try:\n env = get_environment(node, capture_macros)\n\n template_source = dbt.compat.to_string(string)\n return env.from_string(template_source, globals=ctx)\n\n except (jinja2.exceptions.TemplateSyntaxError,\n jinja2.exceptions.UndefinedError) as e:\n e.translated = False\n dbt.exceptions.raise_compiler_error(str(e), node)\n\n\ndef render_template(template, ctx, node=None):\n try:\n return template.render(ctx)\n\n except (jinja2.exceptions.TemplateSyntaxError,\n jinja2.exceptions.UndefinedError) as e:\n e.translated = False\n dbt.exceptions.raise_compiler_error(str(e), node)\n\n\ndef get_rendered(string, ctx, node=None,\n capture_macros=False):\n template = get_template(string, ctx, node,\n capture_macros=capture_macros)\n\n return render_template(template, ctx, node)\n\n\ndef undefined_error(msg):\n raise jinja2.exceptions.UndefinedError(msg)\n", "path": "dbt/clients/jinja.py"}]} |
gh_patches_debug_122 | rasdani/github-patches | git_diff | ranaroussi__yfinance-32 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Usage with Anaconda/iPython?
Hi - thanks for the package, works great.
Question - is it possible to use this on iPython/Anaconda?
Thanks!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `fix_yahoo_finance/__init__.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Yahoo! Finance Fix for Pandas Datareader
5 # https://github.com/ranaroussi/fix-yahoo-finance
6 #
7 # Copyright 2017 Ran Aroussi
8 #
9 # Licensed under the GNU Lesser General Public License, v3.0 (the "License");
10 # you may not use this file except in compliance with the License.
11 # You may obtain a copy of the License at
12 #
13 # https://www.gnu.org/licenses/lgpl-3.0.en.html
14 #
15 # Unless required by applicable law or agreed to in writing, software
16 # distributed under the License is distributed on an "AS IS" BASIS,
17 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18 # See the License for the specific language governing permissions and
19 # limitations under the License.
20
21 from __future__ import print_function
22
23 __version__ = "0.0.18"
24 __author__ = "Ran Aroussi"
25 __all__ = ['download', 'get_yahoo_crumb', 'parse_ticker_csv']
26
27
28 import datetime
29 import numpy as np
30 import pandas as pd
31 import time
32 import io
33 import requests
34 import re
35 import warnings
36 import sys
37 import multitasking
38
39 warnings.simplefilter("once")
40 warnings.warn("""
41 Auto-overriding of pandas_datareader's get_data_yahoo() is deprecated and will be removed in future versions.
42 Use pdr_override() to explicitly override it.""",
43 DeprecationWarning)
44
45 _YAHOO_COOKIE_ = ''
46 _YAHOO_CRUMB_ = ''
47 _YAHOO_CHECKED_ = None
48 _YAHOO_TTL_ = 180
49
50
51 def get_yahoo_crumb(force=False):
52 global _YAHOO_COOKIE_, _YAHOO_CRUMB_, _YAHOO_CHECKED_, _YAHOO_TTL_
53
54 # use same cookie for 5 min
55 if _YAHOO_CHECKED_ and not force:
56 now = datetime.datetime.now()
57 delta = (now - _YAHOO_CHECKED_).total_seconds()
58 if delta < _YAHOO_TTL_:
59 return (_YAHOO_CRUMB_, _YAHOO_COOKIE_)
60
61 res = requests.get('https://finance.yahoo.com/quote/SPY/history')
62 _YAHOO_COOKIE_ = res.cookies['B']
63
64 pattern = re.compile('.*"CrumbStore":\{"crumb":"(?P<crumb>[^"]+)"\}')
65 for line in res.text.splitlines():
66 m = pattern.match(line)
67 if m is not None:
68 _YAHOO_CRUMB_ = m.groupdict()['crumb']
69
70 # set global params
71 _YAHOO_CHECKED_ = datetime.datetime.now()
72
73 return (_YAHOO_CRUMB_, _YAHOO_COOKIE_)
74
75
76 def parse_ticker_csv(csv_str, auto_adjust):
77 df = pd.read_csv(csv_str, index_col=0, error_bad_lines=False
78 ).replace('null', np.nan).dropna()
79
80 df.index = pd.to_datetime(df.index)
81 df = df.apply(pd.to_numeric)
82 df['Volume'] = df['Volume'].fillna(0).astype(int)
83
84 if auto_adjust:
85 ratio = df["Close"] / df["Adj Close"]
86 df["Adj Open"] = df["Open"] / ratio
87 df["Adj High"] = df["High"] / ratio
88 df["Adj Low"] = df["Low"] / ratio
89
90 df.drop(
91 ["Open", "High", "Low", "Close"],
92 axis=1, inplace=True)
93
94 df.rename(columns={
95 "Adj Open": "Open", "Adj High": "High",
96 "Adj Low": "Low", "Adj Close": "Close"
97 }, inplace=True)
98
99 df = df[['Open', 'High', 'Low', 'Close', 'Volume']]
100
101 return df.groupby(df.index).first()
102
103
104 _DFS_ = {}
105 _COMPLETED_ = 0
106 _PROGRESS_BAR_ = False
107 _FAILED_ = []
108
109
110 def make_chunks(l, n):
111 """Yield successive n-sized chunks from l."""
112 for i in range(0, len(l), n):
113 yield l[i:i + n]
114
115
116 def download(tickers, start=None, end=None, as_panel=True,
117 group_by='column', auto_adjust=False, progress=True,
118 actions=None, threads=1, *args, **kwargs):
119 """Download yahoo tickers
120 :Parameters:
121
122 tickers : str, list
123 List of tickers to download
124 start: str
125 Download start date string (YYYY-MM-DD) or datetime. Default is 1950-01-01
126 end: str
127 Download end date string (YYYY-MM-DD) or datetime. Default is today
128 as_panel : bool
129 Return a multi-index DataFrame or Panel. Default is True (Panel), which is deprecated
130 group_by : str
131 Group by ticker or 'column' (default)
132 auto_adjust: bool
133 Adjust all OHLC automatically? Default is False
134 actions: str
135 Download dividend + stock splits data. Default is None (no actions)
136 Options are 'inline' (returns history + actions) and 'only' (actions only)
137 threads: int
138 How may threads to use? Default is 1 thread
139 """
140
141 global _DFS_, _COMPLETED_, _PROGRESS_BAR_, _FAILED_
142 _COMPLETED_ = 0
143 _FAILED_ = []
144
145 # format start
146 if start is None:
147 start = int(time.mktime(time.strptime('1950-01-01', '%Y-%m-%d')))
148 elif isinstance(start, datetime.datetime):
149 start = int(time.mktime(start.timetuple()))
150 else:
151 start = int(time.mktime(time.strptime(str(start), '%Y-%m-%d')))
152
153 # format end
154 if end is None:
155 end = int(time.mktime(datetime.datetime.now().timetuple()))
156 elif isinstance(end, datetime.datetime):
157 end = int(time.mktime(end.timetuple()))
158 else:
159 end = int(time.mktime(time.strptime(str(end), '%Y-%m-%d')))
160
161 # create ticker list
162 tickers = tickers if isinstance(tickers, list) else [tickers]
163 tickers = [x.upper() for x in tickers]
164
165 # initiate progress bar
166 if progress:
167 _PROGRESS_BAR_ = ProgressBar(len(tickers), 'downloaded')
168
169 # download using single thread
170 if threads is None or threads < 2:
171 download_chunk(tickers, start=start, end=end,
172 auto_adjust=auto_adjust, progress=progress,
173 actions=actions, *args, **kwargs)
174 # threaded download
175 else:
176 threads = min([threads, len(tickers)])
177
178 # download in chunks
179 chunks = 0
180 for chunk in make_chunks(tickers, max([1, len(tickers) // threads])):
181 chunks += len(chunk)
182 download_thread(chunk, start=start, end=end,
183 auto_adjust=auto_adjust, progress=progress,
184 actions=actions, *args, **kwargs)
185 if len(tickers[-chunks:]) > 0:
186 download_thread(tickers[-chunks:], start=start, end=end,
187 auto_adjust=auto_adjust, progress=progress,
188 actions=actions, *args, **kwargs)
189
190 # wait for completion
191 while _COMPLETED_ < len(tickers):
192 time.sleep(0.1)
193
194 _PROGRESS_BAR_.completed()
195
196 # create panel (derecated)
197 if as_panel:
198 with warnings.catch_warnings():
199 warnings.filterwarnings("ignore", category=DeprecationWarning)
200 data = pd.Panel(_DFS_)
201 if group_by == 'column':
202 data = data.swapaxes(0, 2)
203
204 # create multiIndex df
205 else:
206 data = pd.concat(_DFS_.values(), axis=1, keys=_DFS_.keys())
207 if group_by == 'column':
208 data.columns = data.columns.swaplevel(0, 1)
209 data.sort_index(level=0, axis=1, inplace=True)
210 if auto_adjust:
211 data = data[['Open', 'High', 'Low', 'Close', 'Volume']]
212 else:
213 data = data[['Open', 'High', 'Low',
214 'Close', 'Adj Close', 'Volume']]
215
216 # return single df if only one ticker
217 if len(tickers) == 1:
218 data = _DFS_[tickers[0]]
219
220 if len(_FAILED_) > 0:
221 print("\nThe following tickers failed to download:\n",
222 ', '.join(_FAILED_))
223
224 return data
225
226
227 def download_one(ticker, start, end, interval, auto_adjust=None, actions=None):
228
229 tried_once = False
230 crumb, cookie = get_yahoo_crumb()
231
232 url_str = "https://query1.finance.yahoo.com/v7/finance/download/%s"
233 url_str += "?period1=%s&period2=%s&interval=%s&events=%s&crumb=%s"
234
235 actions = None if '^' in ticker else actions
236
237 if actions:
238 url = url_str % (ticker, start, end, interval, 'div', crumb)
239 res = requests.get(url, cookies={'B': cookie}).text
240 # print(res)
241 div = pd.DataFrame(columns=['action', 'value'])
242 if "error" not in res:
243 div = pd.read_csv(io.StringIO(res),
244 index_col=0, error_bad_lines=False
245 ).replace('null', np.nan).dropna()
246
247 if isinstance(div, pd.DataFrame):
248 div.index = pd.to_datetime(div.index)
249 div["action"] = "DIVIDEND"
250 div = div.rename(columns={'Dividends': 'value'})
251 div['value'] = div['value'].astype(float)
252
253 # download Stock Splits data
254 url = url_str % (ticker, start, end, interval, 'split', crumb)
255 res = requests.get(url, cookies={'B': cookie}).text
256 split = pd.DataFrame(columns=['action', 'value'])
257 if "error" not in res:
258 split = pd.read_csv(io.StringIO(res),
259 index_col=0, error_bad_lines=False
260 ).replace('null', np.nan).dropna()
261
262 if isinstance(split, pd.DataFrame):
263 split.index = pd.to_datetime(split.index)
264 split["action"] = "SPLIT"
265 split = split.rename(columns={'Stock Splits': 'value'})
266 if len(split.index) > 0:
267 split['value'] = split.apply(
268 lambda x: 1 / eval(x['value']), axis=1).astype(float)
269
270 if actions == 'only':
271 return pd.concat([div, split]).sort_index()
272
273 # download history
274 url = url_str % (ticker, start, end, interval, 'history', crumb)
275 res = requests.get(url, cookies={'B': cookie}).text
276 hist = pd.DataFrame(
277 columns=['Open', 'High', 'Low', 'Close', 'Adj Close', 'Volume'])
278
279 if "error" in res:
280 return pd.DataFrame()
281
282 hist = parse_ticker_csv(io.StringIO(res), auto_adjust)
283
284 if len(hist.index) > 0:
285 if actions is None:
286 return hist
287
288 hist['Dividends'] = div['value'] if len(div.index) > 0 else np.nan
289 hist['Dividends'].fillna(0, inplace=True)
290 hist['Stock Splits'] = split['value'] if len(
291 split.index) > 0 else np.nan
292 hist['Stock Splits'].fillna(1, inplace=True)
293
294 return hist
295
296 # empty len(hist.index) == 0
297 if not tried_once:
298 tried_once = True
299 get_yahoo_crumb(force=True)
300 return download_one(ticker, start, end, interval, auto_adjust, actions)
301
302
303 @multitasking.task
304 def download_thread(tickers, start=None, end=None,
305 auto_adjust=False, progress=True,
306 actions=False, *args, **kwargs):
307 download_chunk(tickers, start=None, end=None,
308 auto_adjust=False, progress=progress,
309 actions=False, *args, **kwargs)
310
311
312 def download_chunk(tickers, start=None, end=None,
313 auto_adjust=False, progress=True,
314 actions=False, *args, **kwargs):
315
316 global _DFS_, _COMPLETED_, _PROGRESS_BAR_, _FAILED_
317
318 interval = kwargs["interval"] if "interval" in kwargs else "1d"
319
320 # url template
321 url_str = "https://query1.finance.yahoo.com/v7/finance/download/%s"
322 url_str += "?period1=%s&period2=%s&interval=%s&events=%s&crumb=%s"
323
324 # failed tickers collectors
325 round1_failed_tickers = []
326
327 # start downloading
328 for ticker in tickers:
329
330 # yahoo crumb/cookie
331 crumb, cookie = get_yahoo_crumb()
332
333 tried_once = False
334 try:
335 hist = download_one(ticker, start, end,
336 interval, auto_adjust, actions)
337 if isinstance(hist, pd.DataFrame):
338 _DFS_[ticker] = hist
339 if progress:
340 _PROGRESS_BAR_.animate()
341 else:
342 round1_failed_tickers.append(ticker)
343 except:
344 # something went wrong...
345 # try one more time using a new cookie/crumb
346 if not tried_once:
347 tried_once = True
348 try:
349 get_yahoo_crumb(force=True)
350 hist = download_one(ticker, start, end,
351 interval, auto_adjust, actions)
352 if isinstance(hist, pd.DataFrame):
353 _DFS_[ticker] = hist
354 if progress:
355 _PROGRESS_BAR_.animate()
356 else:
357 round1_failed_tickers.append(ticker)
358 except:
359 round1_failed_tickers.append(ticker)
360 time.sleep(0.001)
361
362 # try failed items again before giving up
363 _COMPLETED_ += len(tickers) - len(round1_failed_tickers)
364
365 if len(round1_failed_tickers) > 0:
366 get_yahoo_crumb(force=True)
367 for ticker in round1_failed_tickers:
368 try:
369 hist = download_one(ticker, start, end,
370 interval, auto_adjust, actions)
371 if isinstance(hist, pd.DataFrame):
372 _DFS_[ticker] = hist
373 if progress:
374 _PROGRESS_BAR_.animate()
375 else:
376 _FAILED_.append(ticker)
377 except:
378 _FAILED_.append(ticker)
379 pass
380 time.sleep(0.000001)
381 _COMPLETED_ += 1
382
383
384 class ProgressBar:
385 def __init__(self, iterations, text='completed'):
386 self.text = text
387 self.iterations = iterations
388 self.prog_bar = '[]'
389 self.fill_char = '*'
390 self.width = 50
391 self.__update_amount(0)
392 self.elapsed = 1
393
394 def completed(self):
395 if self.elapsed > self.iterations:
396 self.elapsed = self.iterations
397 self.update_iteration(1)
398 print('\r' + str(self), end='')
399 sys.stdout.flush()
400
401 def animate(self, iteration=None):
402 if iteration is None:
403 self.elapsed += 1
404 iteration = self.elapsed
405 else:
406 self.elapsed += iteration
407
408 print('\r' + str(self), end='')
409 sys.stdout.flush()
410 self.update_iteration()
411
412 def update_iteration(self, val=None):
413 val = val if val is not None else self.elapsed / float(self.iterations)
414 self.__update_amount(val * 100.0)
415 self.prog_bar += ' %s of %s %s' % (
416 self.elapsed, self.iterations, self.text)
417
418 def __update_amount(self, new_amount):
419 percent_done = int(round((new_amount / 100.0) * 100.0))
420 all_full = self.width - 2
421 num_hashes = int(round((percent_done / 100.0) * all_full))
422 self.prog_bar = '[' + self.fill_char * \
423 num_hashes + ' ' * (all_full - num_hashes) + ']'
424 pct_place = (len(self.prog_bar) // 2) - len(str(percent_done))
425 pct_string = '%d%%' % percent_done
426 self.prog_bar = self.prog_bar[0:pct_place] + \
427 (pct_string + self.prog_bar[pct_place + len(pct_string):])
428
429 def __str__(self):
430 return str(self.prog_bar)
431
432
433 # make pandas datareader optional
434 # otherwise can be called via fix_yahoo_finance.download(...)
435 def pdr_override():
436 try:
437 import pandas_datareader
438 pandas_datareader.data.get_data_yahoo = download
439 except:
440 pass
441
442 pdr_override()
443
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/fix_yahoo_finance/__init__.py b/fix_yahoo_finance/__init__.py
--- a/fix_yahoo_finance/__init__.py
+++ b/fix_yahoo_finance/__init__.py
@@ -221,6 +221,7 @@
print("\nThe following tickers failed to download:\n",
', '.join(_FAILED_))
+ _DFS_ = {}
return data
| {"golden_diff": "diff --git a/fix_yahoo_finance/__init__.py b/fix_yahoo_finance/__init__.py\n--- a/fix_yahoo_finance/__init__.py\n+++ b/fix_yahoo_finance/__init__.py\n@@ -221,6 +221,7 @@\n print(\"\\nThe following tickers failed to download:\\n\",\n ', '.join(_FAILED_))\n \n+ _DFS_ = {}\n return data\n", "issue": "Usage with Anaconda/iPython?\nHi - thanks for the package, works great.\r\n\r\nQuestion - is it possible to use this on iPython/Anaconda?\r\n\r\nThanks!\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Yahoo! Finance Fix for Pandas Datareader\n# https://github.com/ranaroussi/fix-yahoo-finance\n#\n# Copyright 2017 Ran Aroussi\n#\n# Licensed under the GNU Lesser General Public License, v3.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.gnu.org/licenses/lgpl-3.0.en.html\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import print_function\n\n__version__ = \"0.0.18\"\n__author__ = \"Ran Aroussi\"\n__all__ = ['download', 'get_yahoo_crumb', 'parse_ticker_csv']\n\n\nimport datetime\nimport numpy as np\nimport pandas as pd\nimport time\nimport io\nimport requests\nimport re\nimport warnings\nimport sys\nimport multitasking\n\nwarnings.simplefilter(\"once\")\nwarnings.warn(\"\"\"\n Auto-overriding of pandas_datareader's get_data_yahoo() is deprecated and will be removed in future versions.\n Use pdr_override() to explicitly override it.\"\"\",\n DeprecationWarning)\n\n_YAHOO_COOKIE_ = ''\n_YAHOO_CRUMB_ = ''\n_YAHOO_CHECKED_ = None\n_YAHOO_TTL_ = 180\n\n\ndef get_yahoo_crumb(force=False):\n global _YAHOO_COOKIE_, _YAHOO_CRUMB_, _YAHOO_CHECKED_, _YAHOO_TTL_\n\n # use same cookie for 5 min\n if _YAHOO_CHECKED_ and not force:\n now = datetime.datetime.now()\n delta = (now - _YAHOO_CHECKED_).total_seconds()\n if delta < _YAHOO_TTL_:\n return (_YAHOO_CRUMB_, _YAHOO_COOKIE_)\n\n res = requests.get('https://finance.yahoo.com/quote/SPY/history')\n _YAHOO_COOKIE_ = res.cookies['B']\n\n pattern = re.compile('.*\"CrumbStore\":\\{\"crumb\":\"(?P<crumb>[^\"]+)\"\\}')\n for line in res.text.splitlines():\n m = pattern.match(line)\n if m is not None:\n _YAHOO_CRUMB_ = m.groupdict()['crumb']\n\n # set global params\n _YAHOO_CHECKED_ = datetime.datetime.now()\n\n return (_YAHOO_CRUMB_, _YAHOO_COOKIE_)\n\n\ndef parse_ticker_csv(csv_str, auto_adjust):\n df = pd.read_csv(csv_str, index_col=0, error_bad_lines=False\n ).replace('null', np.nan).dropna()\n\n df.index = pd.to_datetime(df.index)\n df = df.apply(pd.to_numeric)\n df['Volume'] = df['Volume'].fillna(0).astype(int)\n\n if auto_adjust:\n ratio = df[\"Close\"] / df[\"Adj Close\"]\n df[\"Adj Open\"] = df[\"Open\"] / ratio\n df[\"Adj High\"] = df[\"High\"] / ratio\n df[\"Adj Low\"] = df[\"Low\"] / ratio\n\n df.drop(\n [\"Open\", \"High\", \"Low\", \"Close\"],\n axis=1, inplace=True)\n\n df.rename(columns={\n \"Adj Open\": \"Open\", \"Adj High\": \"High\",\n \"Adj Low\": \"Low\", \"Adj Close\": \"Close\"\n }, inplace=True)\n\n df = df[['Open', 'High', 'Low', 'Close', 'Volume']]\n\n return df.groupby(df.index).first()\n\n\n_DFS_ = {}\n_COMPLETED_ = 0\n_PROGRESS_BAR_ = False\n_FAILED_ = []\n\n\ndef make_chunks(l, n):\n \"\"\"Yield successive n-sized chunks from l.\"\"\"\n for i in range(0, len(l), n):\n yield l[i:i + n]\n\n\ndef download(tickers, start=None, end=None, as_panel=True,\n group_by='column', auto_adjust=False, progress=True,\n actions=None, threads=1, *args, **kwargs):\n \"\"\"Download yahoo tickers\n :Parameters:\n\n tickers : str, list\n List of tickers to download\n start: str\n Download start date string (YYYY-MM-DD) or datetime. Default is 1950-01-01\n end: str\n Download end date string (YYYY-MM-DD) or datetime. Default is today\n as_panel : bool\n Return a multi-index DataFrame or Panel. Default is True (Panel), which is deprecated\n group_by : str\n Group by ticker or 'column' (default)\n auto_adjust: bool\n Adjust all OHLC automatically? Default is False\n actions: str\n Download dividend + stock splits data. Default is None (no actions)\n Options are 'inline' (returns history + actions) and 'only' (actions only)\n threads: int\n How may threads to use? Default is 1 thread\n \"\"\"\n\n global _DFS_, _COMPLETED_, _PROGRESS_BAR_, _FAILED_\n _COMPLETED_ = 0\n _FAILED_ = []\n\n # format start\n if start is None:\n start = int(time.mktime(time.strptime('1950-01-01', '%Y-%m-%d')))\n elif isinstance(start, datetime.datetime):\n start = int(time.mktime(start.timetuple()))\n else:\n start = int(time.mktime(time.strptime(str(start), '%Y-%m-%d')))\n\n # format end\n if end is None:\n end = int(time.mktime(datetime.datetime.now().timetuple()))\n elif isinstance(end, datetime.datetime):\n end = int(time.mktime(end.timetuple()))\n else:\n end = int(time.mktime(time.strptime(str(end), '%Y-%m-%d')))\n\n # create ticker list\n tickers = tickers if isinstance(tickers, list) else [tickers]\n tickers = [x.upper() for x in tickers]\n\n # initiate progress bar\n if progress:\n _PROGRESS_BAR_ = ProgressBar(len(tickers), 'downloaded')\n\n # download using single thread\n if threads is None or threads < 2:\n download_chunk(tickers, start=start, end=end,\n auto_adjust=auto_adjust, progress=progress,\n actions=actions, *args, **kwargs)\n # threaded download\n else:\n threads = min([threads, len(tickers)])\n\n # download in chunks\n chunks = 0\n for chunk in make_chunks(tickers, max([1, len(tickers) // threads])):\n chunks += len(chunk)\n download_thread(chunk, start=start, end=end,\n auto_adjust=auto_adjust, progress=progress,\n actions=actions, *args, **kwargs)\n if len(tickers[-chunks:]) > 0:\n download_thread(tickers[-chunks:], start=start, end=end,\n auto_adjust=auto_adjust, progress=progress,\n actions=actions, *args, **kwargs)\n\n # wait for completion\n while _COMPLETED_ < len(tickers):\n time.sleep(0.1)\n\n _PROGRESS_BAR_.completed()\n\n # create panel (derecated)\n if as_panel:\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n data = pd.Panel(_DFS_)\n if group_by == 'column':\n data = data.swapaxes(0, 2)\n\n # create multiIndex df\n else:\n data = pd.concat(_DFS_.values(), axis=1, keys=_DFS_.keys())\n if group_by == 'column':\n data.columns = data.columns.swaplevel(0, 1)\n data.sort_index(level=0, axis=1, inplace=True)\n if auto_adjust:\n data = data[['Open', 'High', 'Low', 'Close', 'Volume']]\n else:\n data = data[['Open', 'High', 'Low',\n 'Close', 'Adj Close', 'Volume']]\n\n # return single df if only one ticker\n if len(tickers) == 1:\n data = _DFS_[tickers[0]]\n\n if len(_FAILED_) > 0:\n print(\"\\nThe following tickers failed to download:\\n\",\n ', '.join(_FAILED_))\n\n return data\n\n\ndef download_one(ticker, start, end, interval, auto_adjust=None, actions=None):\n\n tried_once = False\n crumb, cookie = get_yahoo_crumb()\n\n url_str = \"https://query1.finance.yahoo.com/v7/finance/download/%s\"\n url_str += \"?period1=%s&period2=%s&interval=%s&events=%s&crumb=%s\"\n\n actions = None if '^' in ticker else actions\n\n if actions:\n url = url_str % (ticker, start, end, interval, 'div', crumb)\n res = requests.get(url, cookies={'B': cookie}).text\n # print(res)\n div = pd.DataFrame(columns=['action', 'value'])\n if \"error\" not in res:\n div = pd.read_csv(io.StringIO(res),\n index_col=0, error_bad_lines=False\n ).replace('null', np.nan).dropna()\n\n if isinstance(div, pd.DataFrame):\n div.index = pd.to_datetime(div.index)\n div[\"action\"] = \"DIVIDEND\"\n div = div.rename(columns={'Dividends': 'value'})\n div['value'] = div['value'].astype(float)\n\n # download Stock Splits data\n url = url_str % (ticker, start, end, interval, 'split', crumb)\n res = requests.get(url, cookies={'B': cookie}).text\n split = pd.DataFrame(columns=['action', 'value'])\n if \"error\" not in res:\n split = pd.read_csv(io.StringIO(res),\n index_col=0, error_bad_lines=False\n ).replace('null', np.nan).dropna()\n\n if isinstance(split, pd.DataFrame):\n split.index = pd.to_datetime(split.index)\n split[\"action\"] = \"SPLIT\"\n split = split.rename(columns={'Stock Splits': 'value'})\n if len(split.index) > 0:\n split['value'] = split.apply(\n lambda x: 1 / eval(x['value']), axis=1).astype(float)\n\n if actions == 'only':\n return pd.concat([div, split]).sort_index()\n\n # download history\n url = url_str % (ticker, start, end, interval, 'history', crumb)\n res = requests.get(url, cookies={'B': cookie}).text\n hist = pd.DataFrame(\n columns=['Open', 'High', 'Low', 'Close', 'Adj Close', 'Volume'])\n\n if \"error\" in res:\n return pd.DataFrame()\n\n hist = parse_ticker_csv(io.StringIO(res), auto_adjust)\n\n if len(hist.index) > 0:\n if actions is None:\n return hist\n\n hist['Dividends'] = div['value'] if len(div.index) > 0 else np.nan\n hist['Dividends'].fillna(0, inplace=True)\n hist['Stock Splits'] = split['value'] if len(\n split.index) > 0 else np.nan\n hist['Stock Splits'].fillna(1, inplace=True)\n\n return hist\n\n # empty len(hist.index) == 0\n if not tried_once:\n tried_once = True\n get_yahoo_crumb(force=True)\n return download_one(ticker, start, end, interval, auto_adjust, actions)\n\n\[email protected]\ndef download_thread(tickers, start=None, end=None,\n auto_adjust=False, progress=True,\n actions=False, *args, **kwargs):\n download_chunk(tickers, start=None, end=None,\n auto_adjust=False, progress=progress,\n actions=False, *args, **kwargs)\n\n\ndef download_chunk(tickers, start=None, end=None,\n auto_adjust=False, progress=True,\n actions=False, *args, **kwargs):\n\n global _DFS_, _COMPLETED_, _PROGRESS_BAR_, _FAILED_\n\n interval = kwargs[\"interval\"] if \"interval\" in kwargs else \"1d\"\n\n # url template\n url_str = \"https://query1.finance.yahoo.com/v7/finance/download/%s\"\n url_str += \"?period1=%s&period2=%s&interval=%s&events=%s&crumb=%s\"\n\n # failed tickers collectors\n round1_failed_tickers = []\n\n # start downloading\n for ticker in tickers:\n\n # yahoo crumb/cookie\n crumb, cookie = get_yahoo_crumb()\n\n tried_once = False\n try:\n hist = download_one(ticker, start, end,\n interval, auto_adjust, actions)\n if isinstance(hist, pd.DataFrame):\n _DFS_[ticker] = hist\n if progress:\n _PROGRESS_BAR_.animate()\n else:\n round1_failed_tickers.append(ticker)\n except:\n # something went wrong...\n # try one more time using a new cookie/crumb\n if not tried_once:\n tried_once = True\n try:\n get_yahoo_crumb(force=True)\n hist = download_one(ticker, start, end,\n interval, auto_adjust, actions)\n if isinstance(hist, pd.DataFrame):\n _DFS_[ticker] = hist\n if progress:\n _PROGRESS_BAR_.animate()\n else:\n round1_failed_tickers.append(ticker)\n except:\n round1_failed_tickers.append(ticker)\n time.sleep(0.001)\n\n # try failed items again before giving up\n _COMPLETED_ += len(tickers) - len(round1_failed_tickers)\n\n if len(round1_failed_tickers) > 0:\n get_yahoo_crumb(force=True)\n for ticker in round1_failed_tickers:\n try:\n hist = download_one(ticker, start, end,\n interval, auto_adjust, actions)\n if isinstance(hist, pd.DataFrame):\n _DFS_[ticker] = hist\n if progress:\n _PROGRESS_BAR_.animate()\n else:\n _FAILED_.append(ticker)\n except:\n _FAILED_.append(ticker)\n pass\n time.sleep(0.000001)\n _COMPLETED_ += 1\n\n\nclass ProgressBar:\n def __init__(self, iterations, text='completed'):\n self.text = text\n self.iterations = iterations\n self.prog_bar = '[]'\n self.fill_char = '*'\n self.width = 50\n self.__update_amount(0)\n self.elapsed = 1\n\n def completed(self):\n if self.elapsed > self.iterations:\n self.elapsed = self.iterations\n self.update_iteration(1)\n print('\\r' + str(self), end='')\n sys.stdout.flush()\n\n def animate(self, iteration=None):\n if iteration is None:\n self.elapsed += 1\n iteration = self.elapsed\n else:\n self.elapsed += iteration\n\n print('\\r' + str(self), end='')\n sys.stdout.flush()\n self.update_iteration()\n\n def update_iteration(self, val=None):\n val = val if val is not None else self.elapsed / float(self.iterations)\n self.__update_amount(val * 100.0)\n self.prog_bar += ' %s of %s %s' % (\n self.elapsed, self.iterations, self.text)\n\n def __update_amount(self, new_amount):\n percent_done = int(round((new_amount / 100.0) * 100.0))\n all_full = self.width - 2\n num_hashes = int(round((percent_done / 100.0) * all_full))\n self.prog_bar = '[' + self.fill_char * \\\n num_hashes + ' ' * (all_full - num_hashes) + ']'\n pct_place = (len(self.prog_bar) // 2) - len(str(percent_done))\n pct_string = '%d%%' % percent_done\n self.prog_bar = self.prog_bar[0:pct_place] + \\\n (pct_string + self.prog_bar[pct_place + len(pct_string):])\n\n def __str__(self):\n return str(self.prog_bar)\n\n\n# make pandas datareader optional\n# otherwise can be called via fix_yahoo_finance.download(...)\ndef pdr_override():\n try:\n import pandas_datareader\n pandas_datareader.data.get_data_yahoo = download\n except:\n pass\n\npdr_override()\n", "path": "fix_yahoo_finance/__init__.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Yahoo! Finance Fix for Pandas Datareader\n# https://github.com/ranaroussi/fix-yahoo-finance\n#\n# Copyright 2017 Ran Aroussi\n#\n# Licensed under the GNU Lesser General Public License, v3.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.gnu.org/licenses/lgpl-3.0.en.html\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nfrom __future__ import print_function\n\n__version__ = \"0.0.18\"\n__author__ = \"Ran Aroussi\"\n__all__ = ['download', 'get_yahoo_crumb', 'parse_ticker_csv']\n\n\nimport datetime\nimport numpy as np\nimport pandas as pd\nimport time\nimport io\nimport requests\nimport re\nimport warnings\nimport sys\nimport multitasking\n\nwarnings.simplefilter(\"once\")\nwarnings.warn(\"\"\"\n Auto-overriding of pandas_datareader's get_data_yahoo() is deprecated and will be removed in future versions.\n Use pdr_override() to explicitly override it.\"\"\",\n DeprecationWarning)\n\n_YAHOO_COOKIE_ = ''\n_YAHOO_CRUMB_ = ''\n_YAHOO_CHECKED_ = None\n_YAHOO_TTL_ = 180\n\n\ndef get_yahoo_crumb(force=False):\n global _YAHOO_COOKIE_, _YAHOO_CRUMB_, _YAHOO_CHECKED_, _YAHOO_TTL_\n\n # use same cookie for 5 min\n if _YAHOO_CHECKED_ and not force:\n now = datetime.datetime.now()\n delta = (now - _YAHOO_CHECKED_).total_seconds()\n if delta < _YAHOO_TTL_:\n return (_YAHOO_CRUMB_, _YAHOO_COOKIE_)\n\n res = requests.get('https://finance.yahoo.com/quote/SPY/history')\n _YAHOO_COOKIE_ = res.cookies['B']\n\n pattern = re.compile('.*\"CrumbStore\":\\{\"crumb\":\"(?P<crumb>[^\"]+)\"\\}')\n for line in res.text.splitlines():\n m = pattern.match(line)\n if m is not None:\n _YAHOO_CRUMB_ = m.groupdict()['crumb']\n\n # set global params\n _YAHOO_CHECKED_ = datetime.datetime.now()\n\n return (_YAHOO_CRUMB_, _YAHOO_COOKIE_)\n\n\ndef parse_ticker_csv(csv_str, auto_adjust):\n df = pd.read_csv(csv_str, index_col=0, error_bad_lines=False\n ).replace('null', np.nan).dropna()\n\n df.index = pd.to_datetime(df.index)\n df = df.apply(pd.to_numeric)\n df['Volume'] = df['Volume'].fillna(0).astype(int)\n\n if auto_adjust:\n ratio = df[\"Close\"] / df[\"Adj Close\"]\n df[\"Adj Open\"] = df[\"Open\"] / ratio\n df[\"Adj High\"] = df[\"High\"] / ratio\n df[\"Adj Low\"] = df[\"Low\"] / ratio\n\n df.drop(\n [\"Open\", \"High\", \"Low\", \"Close\"],\n axis=1, inplace=True)\n\n df.rename(columns={\n \"Adj Open\": \"Open\", \"Adj High\": \"High\",\n \"Adj Low\": \"Low\", \"Adj Close\": \"Close\"\n }, inplace=True)\n\n df = df[['Open', 'High', 'Low', 'Close', 'Volume']]\n\n return df.groupby(df.index).first()\n\n\n_DFS_ = {}\n_COMPLETED_ = 0\n_PROGRESS_BAR_ = False\n_FAILED_ = []\n\n\ndef make_chunks(l, n):\n \"\"\"Yield successive n-sized chunks from l.\"\"\"\n for i in range(0, len(l), n):\n yield l[i:i + n]\n\n\ndef download(tickers, start=None, end=None, as_panel=True,\n group_by='column', auto_adjust=False, progress=True,\n actions=None, threads=1, *args, **kwargs):\n \"\"\"Download yahoo tickers\n :Parameters:\n\n tickers : str, list\n List of tickers to download\n start: str\n Download start date string (YYYY-MM-DD) or datetime. Default is 1950-01-01\n end: str\n Download end date string (YYYY-MM-DD) or datetime. Default is today\n as_panel : bool\n Return a multi-index DataFrame or Panel. Default is True (Panel), which is deprecated\n group_by : str\n Group by ticker or 'column' (default)\n auto_adjust: bool\n Adjust all OHLC automatically? Default is False\n actions: str\n Download dividend + stock splits data. Default is None (no actions)\n Options are 'inline' (returns history + actions) and 'only' (actions only)\n threads: int\n How may threads to use? Default is 1 thread\n \"\"\"\n\n global _DFS_, _COMPLETED_, _PROGRESS_BAR_, _FAILED_\n _COMPLETED_ = 0\n _FAILED_ = []\n\n # format start\n if start is None:\n start = int(time.mktime(time.strptime('1950-01-01', '%Y-%m-%d')))\n elif isinstance(start, datetime.datetime):\n start = int(time.mktime(start.timetuple()))\n else:\n start = int(time.mktime(time.strptime(str(start), '%Y-%m-%d')))\n\n # format end\n if end is None:\n end = int(time.mktime(datetime.datetime.now().timetuple()))\n elif isinstance(end, datetime.datetime):\n end = int(time.mktime(end.timetuple()))\n else:\n end = int(time.mktime(time.strptime(str(end), '%Y-%m-%d')))\n\n # create ticker list\n tickers = tickers if isinstance(tickers, list) else [tickers]\n tickers = [x.upper() for x in tickers]\n\n # initiate progress bar\n if progress:\n _PROGRESS_BAR_ = ProgressBar(len(tickers), 'downloaded')\n\n # download using single thread\n if threads is None or threads < 2:\n download_chunk(tickers, start=start, end=end,\n auto_adjust=auto_adjust, progress=progress,\n actions=actions, *args, **kwargs)\n # threaded download\n else:\n threads = min([threads, len(tickers)])\n\n # download in chunks\n chunks = 0\n for chunk in make_chunks(tickers, max([1, len(tickers) // threads])):\n chunks += len(chunk)\n download_thread(chunk, start=start, end=end,\n auto_adjust=auto_adjust, progress=progress,\n actions=actions, *args, **kwargs)\n if len(tickers[-chunks:]) > 0:\n download_thread(tickers[-chunks:], start=start, end=end,\n auto_adjust=auto_adjust, progress=progress,\n actions=actions, *args, **kwargs)\n\n # wait for completion\n while _COMPLETED_ < len(tickers):\n time.sleep(0.1)\n\n _PROGRESS_BAR_.completed()\n\n # create panel (derecated)\n if as_panel:\n with warnings.catch_warnings():\n warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n data = pd.Panel(_DFS_)\n if group_by == 'column':\n data = data.swapaxes(0, 2)\n\n # create multiIndex df\n else:\n data = pd.concat(_DFS_.values(), axis=1, keys=_DFS_.keys())\n if group_by == 'column':\n data.columns = data.columns.swaplevel(0, 1)\n data.sort_index(level=0, axis=1, inplace=True)\n if auto_adjust:\n data = data[['Open', 'High', 'Low', 'Close', 'Volume']]\n else:\n data = data[['Open', 'High', 'Low',\n 'Close', 'Adj Close', 'Volume']]\n\n # return single df if only one ticker\n if len(tickers) == 1:\n data = _DFS_[tickers[0]]\n\n if len(_FAILED_) > 0:\n print(\"\\nThe following tickers failed to download:\\n\",\n ', '.join(_FAILED_))\n\n _DFS_ = {}\n return data\n\n\ndef download_one(ticker, start, end, interval, auto_adjust=None, actions=None):\n\n tried_once = False\n crumb, cookie = get_yahoo_crumb()\n\n url_str = \"https://query1.finance.yahoo.com/v7/finance/download/%s\"\n url_str += \"?period1=%s&period2=%s&interval=%s&events=%s&crumb=%s\"\n\n actions = None if '^' in ticker else actions\n\n if actions:\n url = url_str % (ticker, start, end, interval, 'div', crumb)\n res = requests.get(url, cookies={'B': cookie}).text\n # print(res)\n div = pd.DataFrame(columns=['action', 'value'])\n if \"error\" not in res:\n div = pd.read_csv(io.StringIO(res),\n index_col=0, error_bad_lines=False\n ).replace('null', np.nan).dropna()\n\n if isinstance(div, pd.DataFrame):\n div.index = pd.to_datetime(div.index)\n div[\"action\"] = \"DIVIDEND\"\n div = div.rename(columns={'Dividends': 'value'})\n div['value'] = div['value'].astype(float)\n\n # download Stock Splits data\n url = url_str % (ticker, start, end, interval, 'split', crumb)\n res = requests.get(url, cookies={'B': cookie}).text\n split = pd.DataFrame(columns=['action', 'value'])\n if \"error\" not in res:\n split = pd.read_csv(io.StringIO(res),\n index_col=0, error_bad_lines=False\n ).replace('null', np.nan).dropna()\n\n if isinstance(split, pd.DataFrame):\n split.index = pd.to_datetime(split.index)\n split[\"action\"] = \"SPLIT\"\n split = split.rename(columns={'Stock Splits': 'value'})\n if len(split.index) > 0:\n split['value'] = split.apply(\n lambda x: 1 / eval(x['value']), axis=1).astype(float)\n\n if actions == 'only':\n return pd.concat([div, split]).sort_index()\n\n # download history\n url = url_str % (ticker, start, end, interval, 'history', crumb)\n res = requests.get(url, cookies={'B': cookie}).text\n hist = pd.DataFrame(\n columns=['Open', 'High', 'Low', 'Close', 'Adj Close', 'Volume'])\n\n if \"error\" in res:\n return pd.DataFrame()\n\n hist = parse_ticker_csv(io.StringIO(res), auto_adjust)\n\n if len(hist.index) > 0:\n if actions is None:\n return hist\n\n hist['Dividends'] = div['value'] if len(div.index) > 0 else np.nan\n hist['Dividends'].fillna(0, inplace=True)\n hist['Stock Splits'] = split['value'] if len(\n split.index) > 0 else np.nan\n hist['Stock Splits'].fillna(1, inplace=True)\n\n return hist\n\n # empty len(hist.index) == 0\n if not tried_once:\n tried_once = True\n get_yahoo_crumb(force=True)\n return download_one(ticker, start, end, interval, auto_adjust, actions)\n\n\[email protected]\ndef download_thread(tickers, start=None, end=None,\n auto_adjust=False, progress=True,\n actions=False, *args, **kwargs):\n download_chunk(tickers, start=None, end=None,\n auto_adjust=False, progress=progress,\n actions=False, *args, **kwargs)\n\n\ndef download_chunk(tickers, start=None, end=None,\n auto_adjust=False, progress=True,\n actions=False, *args, **kwargs):\n\n global _DFS_, _COMPLETED_, _PROGRESS_BAR_, _FAILED_\n\n interval = kwargs[\"interval\"] if \"interval\" in kwargs else \"1d\"\n\n # url template\n url_str = \"https://query1.finance.yahoo.com/v7/finance/download/%s\"\n url_str += \"?period1=%s&period2=%s&interval=%s&events=%s&crumb=%s\"\n\n # failed tickers collectors\n round1_failed_tickers = []\n\n # start downloading\n for ticker in tickers:\n\n # yahoo crumb/cookie\n crumb, cookie = get_yahoo_crumb()\n\n tried_once = False\n try:\n hist = download_one(ticker, start, end,\n interval, auto_adjust, actions)\n if isinstance(hist, pd.DataFrame):\n _DFS_[ticker] = hist\n if progress:\n _PROGRESS_BAR_.animate()\n else:\n round1_failed_tickers.append(ticker)\n except:\n # something went wrong...\n # try one more time using a new cookie/crumb\n if not tried_once:\n tried_once = True\n try:\n get_yahoo_crumb(force=True)\n hist = download_one(ticker, start, end,\n interval, auto_adjust, actions)\n if isinstance(hist, pd.DataFrame):\n _DFS_[ticker] = hist\n if progress:\n _PROGRESS_BAR_.animate()\n else:\n round1_failed_tickers.append(ticker)\n except:\n round1_failed_tickers.append(ticker)\n time.sleep(0.001)\n\n # try failed items again before giving up\n _COMPLETED_ += len(tickers) - len(round1_failed_tickers)\n\n if len(round1_failed_tickers) > 0:\n get_yahoo_crumb(force=True)\n for ticker in round1_failed_tickers:\n try:\n hist = download_one(ticker, start, end,\n interval, auto_adjust, actions)\n if isinstance(hist, pd.DataFrame):\n _DFS_[ticker] = hist\n if progress:\n _PROGRESS_BAR_.animate()\n else:\n _FAILED_.append(ticker)\n except:\n _FAILED_.append(ticker)\n pass\n time.sleep(0.000001)\n _COMPLETED_ += 1\n\n\nclass ProgressBar:\n def __init__(self, iterations, text='completed'):\n self.text = text\n self.iterations = iterations\n self.prog_bar = '[]'\n self.fill_char = '*'\n self.width = 50\n self.__update_amount(0)\n self.elapsed = 1\n\n def completed(self):\n if self.elapsed > self.iterations:\n self.elapsed = self.iterations\n self.update_iteration(1)\n print('\\r' + str(self), end='')\n sys.stdout.flush()\n\n def animate(self, iteration=None):\n if iteration is None:\n self.elapsed += 1\n iteration = self.elapsed\n else:\n self.elapsed += iteration\n\n print('\\r' + str(self), end='')\n sys.stdout.flush()\n self.update_iteration()\n\n def update_iteration(self, val=None):\n val = val if val is not None else self.elapsed / float(self.iterations)\n self.__update_amount(val * 100.0)\n self.prog_bar += ' %s of %s %s' % (\n self.elapsed, self.iterations, self.text)\n\n def __update_amount(self, new_amount):\n percent_done = int(round((new_amount / 100.0) * 100.0))\n all_full = self.width - 2\n num_hashes = int(round((percent_done / 100.0) * all_full))\n self.prog_bar = '[' + self.fill_char * \\\n num_hashes + ' ' * (all_full - num_hashes) + ']'\n pct_place = (len(self.prog_bar) // 2) - len(str(percent_done))\n pct_string = '%d%%' % percent_done\n self.prog_bar = self.prog_bar[0:pct_place] + \\\n (pct_string + self.prog_bar[pct_place + len(pct_string):])\n\n def __str__(self):\n return str(self.prog_bar)\n\n\n# make pandas datareader optional\n# otherwise can be called via fix_yahoo_finance.download(...)\ndef pdr_override():\n try:\n import pandas_datareader\n pandas_datareader.data.get_data_yahoo = download\n except:\n pass\n\npdr_override()\n", "path": "fix_yahoo_finance/__init__.py"}]} |
gh_patches_debug_123 | rasdani/github-patches | git_diff | ibis-project__ibis-1760 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
MapD str.to_timestamp() : No translation rule for <class 'ibis.expr.operations.StringToTimestamp'>
expr = ibis.literal('20170206').to_timestamp('%Y%m%d')
client.execute(expr)
ibis.common.OperationNotDefinedError: No translation rule for <class 'ibis.expr.operations.StringToTimestamp'>
Backend=MapD_4.1.1 ibis=0.14 Python=3.6 Ubuntu=18.04 Anaconda=5.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ibis/mapd/operations.py`
Content:
```
1 import warnings
2 from copy import copy
3 from datetime import date, datetime
4 from io import StringIO
5
6 import ibis
7 import ibis.common as com
8 import ibis.expr.datatypes as dt
9 import ibis.expr.operations as ops
10 import ibis.expr.rules as rlz
11 import ibis.expr.types as ir
12 import ibis.util as util
13 from ibis.impala import compiler as impala_compiler
14 from ibis.mapd.identifiers import quote_identifier
15
16 _sql_type_names = {
17 'boolean': 'boolean',
18 'date': 'date',
19 'decimal': 'decimal',
20 'double': 'double',
21 'float32': 'float',
22 'float64': 'double',
23 'int8': 'smallint',
24 'int16': 'smallint',
25 'int32': 'int',
26 'int64': 'bigint',
27 'linestring': 'linestring',
28 'multipolygon': 'multipolygon',
29 'point': 'point',
30 'polygon': 'polygon',
31 'string': 'text',
32 'time': 'time',
33 'timestamp': 'timestamp',
34 }
35
36
37 def _is_floating(*args):
38 for arg in args:
39 if isinstance(arg, ir.FloatingColumn):
40 return True
41 return False
42
43
44 def _type_to_sql_string(tval):
45 if isinstance(tval, dt.Decimal):
46 return 'decimal({}, {})'.format(tval.precision, tval.scale)
47 else:
48 return _sql_type_names[tval.name.lower()]
49
50
51 def _cast(translator, expr):
52 from ibis.mapd.client import MapDDataType
53
54 op = expr.op()
55 arg, target = op.args
56 arg_ = translator.translate(arg)
57 type_ = str(MapDDataType.from_ibis(target, nullable=False))
58
59 return 'CAST({0!s} AS {1!s})'.format(arg_, type_)
60
61
62 def _all(expr):
63 op = expr.op()
64 arg = op.args[0]
65
66 if isinstance(arg, ir.BooleanValue):
67 arg = arg.ifelse(1, 0)
68
69 return (1 - arg).sum() == 0
70
71
72 def _any(expr):
73 op = expr.op()
74 arg = op.args[0]
75
76 if isinstance(arg, ir.BooleanValue):
77 arg = arg.ifelse(1, 0)
78
79 return arg.sum() >= 0
80
81
82 def _not_any(expr):
83 op = expr.op()
84 arg = op.args[0]
85
86 if isinstance(arg, ir.BooleanValue):
87 arg = arg.ifelse(1, 0)
88
89 return arg.sum() == 0
90
91
92 def _not_all(expr):
93 op = expr.op()
94 arg = op.args[0]
95
96 if isinstance(arg, ir.BooleanValue):
97 arg = arg.ifelse(1, 0)
98
99 return (1 - arg).sum() != 0
100
101
102 def _parenthesize(translator, expr):
103 op = expr.op()
104 op_klass = type(op)
105
106 # function calls don't need parens
107 what_ = translator.translate(expr)
108 if (op_klass in _binary_infix_ops) or (op_klass in _unary_ops):
109 return '({0!s})'.format(what_)
110 else:
111 return what_
112
113
114 def fixed_arity(func_name, arity):
115 def formatter(translator, expr):
116 op = expr.op()
117 arg_count = len(op.args)
118 if arity != arg_count:
119 msg = 'Incorrect number of args {0} instead of {1}'
120 raise com.UnsupportedOperationError(msg.format(arg_count, arity))
121 return _call(translator, func_name, *op.args)
122
123 formatter.__name__ = func_name
124 return formatter
125
126
127 def unary(func_name):
128 return fixed_arity(func_name, 1)
129
130
131 def _reduction_format(
132 translator,
133 func_name,
134 sql_func_name=None,
135 sql_signature='{}({})',
136 arg=None,
137 args=None,
138 where=None,
139 ):
140 if not sql_func_name:
141 sql_func_name = func_name
142
143 if where is not None:
144 arg = where.ifelse(arg, ibis.NA)
145
146 return sql_signature.format(
147 sql_func_name, ', '.join(map(translator.translate, [arg] + list(args)))
148 )
149
150
151 def _reduction(func_name, sql_func_name=None, sql_signature='{}({})'):
152 def formatter(translator, expr):
153 op = expr.op()
154
155 # HACK: support trailing arguments
156 where = op.where
157 args = [arg for arg in op.args if arg is not where]
158
159 return _reduction_format(
160 translator,
161 func_name,
162 sql_func_name,
163 sql_signature,
164 args[0],
165 args[1:],
166 where,
167 )
168
169 formatter.__name__ = func_name
170 return formatter
171
172
173 def _variance_like(func):
174 variants = {'sample': '{}_SAMP'.format(func), 'pop': '{}_POP'.format(func)}
175
176 def formatter(translator, expr):
177 arg, how, where = expr.op().args
178
179 return _reduction_format(
180 translator, variants[how].upper(), None, '{}({})', arg, [], where
181 )
182
183 formatter.__name__ = func
184 return formatter
185
186
187 def unary_prefix_op(prefix_op):
188 def formatter(translator, expr):
189 op = expr.op()
190 arg = _parenthesize(translator, op.args[0])
191
192 return '{0!s} {1!s}'.format(prefix_op.upper(), arg)
193
194 formatter.__name__ = prefix_op
195 return formatter
196
197
198 def binary_infix_op(infix_sym):
199 def formatter(translator, expr):
200 op = expr.op()
201
202 left, right = op.args[0], op.args[1]
203 left_ = _parenthesize(translator, left)
204 right_ = _parenthesize(translator, right)
205
206 return '{0!s} {1!s} {2!s}'.format(left_, infix_sym, right_)
207
208 return formatter
209
210
211 def _call(translator, func, *args):
212 args_ = ', '.join(map(translator.translate, args))
213 return '{0!s}({1!s})'.format(func, args_)
214
215
216 def _extract_field(sql_attr):
217 def extract_field_formatter(translator, expr):
218 op = expr.op()
219 arg = translator.translate(op.args[0])
220 return 'EXTRACT({} FROM {})'.format(sql_attr, arg)
221
222 return extract_field_formatter
223
224
225 # STATS
226
227
228 def _corr(translator, expr):
229 # pull out the arguments to the expression
230 args = expr.op().args
231
232 x, y, how, where = args
233
234 # compile the argument
235 compiled_x = translator.translate(x)
236 compiled_y = translator.translate(y)
237
238 return 'CORR({}, {})'.format(compiled_x, compiled_y)
239
240
241 def _cov(translator, expr):
242 # pull out the arguments to the expression
243 args = expr.op().args
244
245 x, y, how, where = args
246
247 # compile the argument
248 compiled_x = translator.translate(x)
249 compiled_y = translator.translate(y)
250
251 return 'COVAR_{}({}, {})'.format(how[:4].upper(), compiled_x, compiled_y)
252
253
254 # STRING
255
256
257 def _length(func_name='length', sql_func_name='CHAR_LENGTH'):
258 def __lenght(translator, expr):
259 # pull out the arguments to the expression
260 arg = expr.op().args[0]
261 # compile the argument
262 compiled_arg = translator.translate(arg)
263 return '{}({})'.format(sql_func_name, compiled_arg)
264
265 __lenght.__name__ = func_name
266 return __lenght
267
268
269 def _contains(translator, expr):
270 arg, pattern = expr.op().args[:2]
271
272 pattern_ = '%{}%'.format(translator.translate(pattern)[1:-1])
273
274 return _parenthesize(translator, arg.like(pattern_).ifelse(1, -1))
275
276
277 # GENERIC
278
279
280 def _value_list(translator, expr):
281 op = expr.op()
282 values_ = map(translator.translate, op.values)
283 return '({0})'.format(', '.join(values_))
284
285
286 def _interval_format(translator, expr):
287 dtype = expr.type()
288 if dtype.unit in {'ms', 'us', 'ns'}:
289 raise com.UnsupportedOperationError(
290 "MapD doesn't support subsecond interval resolutions"
291 )
292
293 return '{1}, (sign){0}'.format(expr.op().value, dtype.resolution.upper())
294
295
296 def _interval_from_integer(translator, expr):
297 op = expr.op()
298 arg, unit = op.args
299
300 dtype = expr.type()
301 if dtype.unit in {'ms', 'us', 'ns'}:
302 raise com.UnsupportedOperationError(
303 "MapD doesn't support subsecond interval resolutions"
304 )
305
306 arg_ = translator.translate(arg)
307 return '{}, (sign){}'.format(dtype.resolution.upper(), arg_)
308
309
310 def _timestamp_op(func, op_sign='+'):
311 def _formatter(translator, expr):
312 op = expr.op()
313 left, right = op.args
314
315 formatted_left = translator.translate(left)
316 formatted_right = translator.translate(right)
317
318 if isinstance(left, ir.DateValue):
319 formatted_left = 'CAST({} as timestamp)'.format(formatted_left)
320
321 return '{}({}, {})'.format(
322 func, formatted_right.replace('(sign)', op_sign), formatted_left
323 )
324
325 return _formatter
326
327
328 def _set_literal_format(translator, expr):
329 value_type = expr.type().value_type
330
331 formatted = [
332 translator.translate(ir.literal(x, type=value_type))
333 for x in expr.op().value
334 ]
335
336 return '({})'.format(', '.join(formatted))
337
338
339 def _cross_join(translator, expr):
340 args = expr.op().args
341 left, right = args[:2]
342 return translator.translate(left.join(right, ibis.literal(True)))
343
344
345 def _format_point_value(value):
346 return ' '.join(str(v) for v in value)
347
348
349 def _format_linestring_value(value):
350 return ', '.join(
351 '{}'.format(_format_point_value(point)) for point in value
352 )
353
354
355 def _format_polygon_value(value):
356 return ', '.join(
357 '({})'.format(_format_linestring_value(line)) for line in value
358 )
359
360
361 def _format_multipolygon_value(value):
362 return ', '.join(
363 '({})'.format(_format_polygon_value(polygon)) for polygon in value
364 )
365
366
367 def _format_geo_metadata(op, value):
368 value = copy(value)
369 srid = op.args[1].srid
370 geotype = op.args[1].geotype
371
372 if geotype is None or geotype not in ('geometry', 'geography'):
373 return "'{}'".format(value)
374
375 if geotype == 'geography':
376 geofunc = 'ST_GeogFromText'
377 else:
378 geofunc = 'ST_GeomFromText'
379
380 return "{}('{}'{})".format(
381 geofunc, value, ', {}'.format(srid) if srid else ''
382 )
383
384
385 def literal(translator, expr):
386 op = expr.op()
387 value = op.value
388
389 # geo spatial data type
390 if isinstance(expr, ir.PointScalar):
391 result = "POINT({0})".format(_format_point_value(value))
392 return _format_geo_metadata(op, result)
393 elif isinstance(expr, ir.LineStringScalar):
394 result = "LINESTRING({0})".format(_format_linestring_value(value))
395 return _format_geo_metadata(op, result)
396 elif isinstance(expr, ir.PolygonScalar):
397 result = "POLYGON({0!s})".format(_format_polygon_value(value))
398 return _format_geo_metadata(op, result)
399 elif isinstance(expr, ir.MultiPolygonScalar):
400 result = "MULTIPOLYGON({0})".format(_format_multipolygon_value(value))
401 return _format_geo_metadata(op, result)
402 # primitive data type
403 elif isinstance(expr, ir.BooleanValue):
404 return '1' if value else '0'
405 elif isinstance(expr, ir.StringValue):
406 return "'{0!s}'".format(value.replace("'", "\\'"))
407 elif isinstance(expr, ir.NumericValue):
408 return repr(value)
409 elif isinstance(expr, ir.SetScalar):
410 return _set_literal_format(translator, expr)
411 elif isinstance(expr, ir.IntervalValue):
412 return _interval_format(translator, expr)
413 elif isinstance(expr, ir.TimestampValue):
414 if isinstance(value, datetime):
415 if value.microsecond != 0:
416 msg = 'Unsupported subsecond accuracy {}'
417 warnings.warn(msg.format(value))
418 value = value.strftime('%Y-%m-%d %H:%M:%S')
419 elif isinstance(value, str):
420 # check if the datetime format is a valid format (
421 # '%Y-%m-%d %H:%M:%S' or '%Y-%m-%d'). if format is '%Y-%m-%d' it
422 # is converted to '%Y-%m-%d 00:00:00'
423 msg = (
424 "Literal datetime string should use '%Y-%m-%d %H:%M:%S' "
425 "format. When '%Y-%m-%d' format is used, datetime will be "
426 "converted automatically to '%Y-%m-%d 00:00:00'"
427 )
428
429 try:
430 dt_value = datetime.strptime(value, '%Y-%m-%d %H:%M:%S')
431 except ValueError:
432 try:
433 dt_value = datetime.strptime(value, '%Y-%m-%d')
434 warnings.warn(msg)
435 except ValueError:
436 raise Exception(msg)
437
438 value = dt_value.strftime('%Y-%m-%d %H:%M:%S')
439
440 return "'{0!s}'".format(value)
441 elif isinstance(expr, ir.DateValue):
442 if isinstance(value, date):
443 value = value.strftime('%Y-%m-%d')
444 return "toDate('{0!s}')".format(value)
445 # array data type
446 elif isinstance(expr, ir.ArrayValue):
447 return str(list(value))
448 else:
449 raise NotImplementedError(type(expr))
450
451
452 def _where(translator, expr):
453 # pull out the arguments to the expression
454 args = expr.op().args
455 condition, expr1, expr2 = args
456 expr = condition.ifelse(expr1, expr2)
457 return translator.translate(expr)
458
459
460 def raise_unsupported_expr_error(expr):
461 msg = "MapD backend doesn't support {} operation!"
462 op = expr.op()
463 raise com.UnsupportedOperationError(msg.format(type(op)))
464
465
466 def raise_unsupported_op_error(translator, expr, *args):
467 msg = "MapD backend doesn't support {} operation!"
468 op = expr.op()
469 raise com.UnsupportedOperationError(msg.format(type(op)))
470
471
472 # translator
473 def _name_expr(formatted_expr, quoted_name):
474 return '{} AS {}'.format(formatted_expr, quote_identifier(quoted_name))
475
476
477 class CaseFormatter:
478 def __init__(self, translator, base, cases, results, default):
479 self.translator = translator
480 self.base = base
481 self.cases = cases
482 self.results = results
483 self.default = default
484
485 # HACK
486 self.indent = 2
487 self.multiline = len(cases) > 1
488 self.buf = StringIO()
489
490 def _trans(self, expr):
491 return self.translator.translate(expr)
492
493 def get_result(self):
494 """
495
496 :return:
497 """
498 self.buf.seek(0)
499
500 self.buf.write('CASE')
501 if self.base is not None:
502 base_str = self._trans(self.base)
503 self.buf.write(' {0}'.format(base_str))
504
505 for case, result in zip(self.cases, self.results):
506 self._next_case()
507 case_str = self._trans(case)
508 result_str = self._trans(result)
509 self.buf.write('WHEN {0} THEN {1}'.format(case_str, result_str))
510
511 if self.default is not None:
512 self._next_case()
513 default_str = self._trans(self.default)
514 self.buf.write('ELSE {0}'.format(default_str))
515
516 if self.multiline:
517 self.buf.write('\nEND')
518 else:
519 self.buf.write(' END')
520
521 return self.buf.getvalue()
522
523 def _next_case(self):
524 if self.multiline:
525 self.buf.write('\n{0}'.format(' ' * self.indent))
526 else:
527 self.buf.write(' ')
528
529
530 def _table_array_view(translator, expr):
531 ctx = translator.context
532 table = expr.op().table
533 query = ctx.get_compiled_expr(table)
534 return '(\n{0}\n)'.format(util.indent(query, ctx.indent))
535
536
537 def _timestamp_truncate(translator, expr):
538 op = expr.op()
539 arg, unit = op.args
540
541 unit_ = dt.Interval(unit=unit).resolution.upper()
542
543 # return _call_date_trunc(translator, converter, arg)
544 arg_ = translator.translate(arg)
545 return 'DATE_TRUNC({0!s}, {1!s})'.format(unit_, arg_)
546
547
548 def _table_column(translator, expr):
549 op = expr.op()
550 field_name = op.name
551
552 quoted_name = quote_identifier(field_name, force=True)
553
554 table = op.table
555 ctx = translator.context
556
557 # If the column does not originate from the table set in the current SELECT
558 # context, we should format as a subquery
559 if translator.permit_subquery and ctx.is_foreign_expr(table):
560 proj_expr = table.projection([field_name]).to_array()
561 return _table_array_view(translator, proj_expr)
562
563 if ctx.need_aliases():
564 alias = ctx.get_ref(table)
565 if alias is not None:
566 quoted_name = '{}.{}'.format(alias, quoted_name)
567
568 return quoted_name
569
570
571 # AGGREGATION
572
573 approx_count_distinct = _reduction(
574 'approx_nunique',
575 sql_func_name='approx_count_distinct',
576 sql_signature='{}({}, 100)',
577 )
578
579 count_distinct = _reduction('count')
580 count = _reduction('count')
581
582
583 def _arbitrary(translator, expr):
584 arg, how, where = expr.op().args
585
586 if how not in (None, 'last'):
587 raise com.UnsupportedOperationError(
588 '{!r} value not supported for arbitrary in MapD'.format(how)
589 )
590
591 if where is not None:
592 arg = where.ifelse(arg, ibis.NA)
593
594 return 'SAMPLE({})'.format(translator.translate(arg))
595
596
597 # MATH
598
599
600 class NumericTruncate(ops.NumericBinaryOp):
601 """Truncates x to y decimal places"""
602
603 output_type = rlz.shape_like('left', ops.dt.float)
604
605
606 # GEOMETRIC
607
608
609 class Conv_4326_900913_X(ops.UnaryOp):
610 """
611 Converts WGS-84 latitude to WGS-84 Web Mercator x coordinate.
612 """
613
614 output_type = rlz.shape_like('arg', ops.dt.float)
615
616
617 class Conv_4326_900913_Y(ops.UnaryOp):
618 """
619 Converts WGS-84 longitude to WGS-84 Web Mercator y coordinate.
620
621 """
622
623 output_type = rlz.shape_like('arg', ops.dt.float)
624
625
626 # String
627
628
629 class ByteLength(ops.StringLength):
630 """Returns the length of a string in bytes length"""
631
632
633 # https://www.mapd.com/docs/latest/mapd-core-guide/dml/
634 _binary_infix_ops = {
635 # math
636 ops.Power: fixed_arity('power', 2),
637 ops.NotEquals: impala_compiler._binary_infix_op('<>'),
638 }
639
640 _unary_ops = {}
641
642 # COMPARISON
643 _comparison_ops = {}
644
645
646 # MATH
647 _math_ops = {
648 ops.Degrees: unary('degrees'), # MapD function
649 ops.Modulus: fixed_arity('mod', 2),
650 ops.Pi: fixed_arity('pi', 0),
651 ops.Radians: unary('radians'),
652 NumericTruncate: fixed_arity('truncate', 2),
653 }
654
655 # STATS
656 _stats_ops = {
657 ops.Correlation: _corr,
658 ops.StandardDev: _variance_like('stddev'),
659 ops.Variance: _variance_like('var'),
660 ops.Covariance: _cov,
661 }
662
663 # TRIGONOMETRIC
664 _trigonometric_ops = {
665 ops.Acos: unary('acos'),
666 ops.Asin: unary('asin'),
667 ops.Atan: unary('atan'),
668 ops.Atan2: fixed_arity('atan2', 2),
669 ops.Cos: unary('cos'),
670 ops.Cot: unary('cot'),
671 ops.Sin: unary('sin'),
672 ops.Tan: unary('tan'),
673 }
674
675 # GEOMETRIC
676 _geometric_ops = {
677 Conv_4326_900913_X: unary('conv_4326_900913_x'),
678 Conv_4326_900913_Y: unary('conv_4326_900913_y'),
679 }
680
681 # GEO SPATIAL
682 _geospatial_ops = {
683 ops.GeoArea: unary('ST_AREA'),
684 ops.GeoContains: fixed_arity('ST_CONTAINS', 2),
685 ops.GeoDistance: fixed_arity('ST_DISTANCE', 2),
686 ops.GeoLength: unary('ST_LENGTH'),
687 ops.GeoPerimeter: unary('ST_PERIMETER'),
688 ops.GeoMaxDistance: fixed_arity('ST_MAXDISTANCE', 2),
689 ops.GeoX: unary('ST_X'),
690 ops.GeoY: unary('ST_Y'),
691 ops.GeoXMin: unary('ST_XMIN'),
692 ops.GeoXMax: unary('ST_XMAX'),
693 ops.GeoYMin: unary('ST_YMIN'),
694 ops.GeoYMax: unary('ST_YMAX'),
695 ops.GeoStartPoint: unary('ST_STARTPOINT'),
696 ops.GeoEndPoint: unary('ST_ENDPOINT'),
697 ops.GeoPointN: fixed_arity('ST_POINTN', 2),
698 ops.GeoNPoints: unary('ST_NPOINTS'),
699 ops.GeoNRings: unary('ST_NRINGS'),
700 ops.GeoSRID: unary('ST_SRID'),
701 }
702
703 # STRING
704 _string_ops = {
705 ops.StringLength: _length(),
706 ByteLength: _length('byte_length', 'LENGTH'),
707 ops.StringSQLILike: binary_infix_op('ilike'),
708 ops.StringFind: _contains,
709 }
710
711 # DATE
712 _date_ops = {
713 ops.DateTruncate: _timestamp_truncate,
714 ops.TimestampTruncate: _timestamp_truncate,
715 # DIRECT EXTRACT OPERATIONS
716 ops.ExtractYear: _extract_field('YEAR'),
717 ops.ExtractMonth: _extract_field('MONTH'),
718 ops.ExtractDay: _extract_field('DAY'),
719 ops.ExtractHour: _extract_field('HOUR'),
720 ops.ExtractMinute: _extract_field('MINUTE'),
721 ops.ExtractSecond: _extract_field('SECOND'),
722 ops.IntervalAdd: _interval_from_integer,
723 ops.IntervalFromInteger: _interval_from_integer,
724 ops.DateAdd: _timestamp_op('TIMESTAMPADD'),
725 ops.DateSub: _timestamp_op('TIMESTAMPADD', '-'),
726 ops.TimestampAdd: _timestamp_op('TIMESTAMPADD'),
727 ops.TimestampSub: _timestamp_op('TIMESTAMPADD', '-'),
728 }
729
730 # AGGREGATION/REDUCTION
731 _agg_ops = {
732 ops.HLLCardinality: approx_count_distinct,
733 ops.DistinctColumn: unary_prefix_op('distinct'),
734 ops.Arbitrary: _arbitrary,
735 }
736
737 # GENERAL
738 _general_ops = {
739 ops.Literal: literal,
740 ops.ValueList: _value_list,
741 ops.Cast: _cast,
742 ops.Where: _where,
743 ops.TableColumn: _table_column,
744 ops.CrossJoin: _cross_join,
745 }
746
747 # UNSUPPORTED OPERATIONS
748 _unsupported_ops = [
749 # generic/aggregation
750 ops.CMSMedian,
751 ops.WindowOp,
752 ops.DecimalPrecision,
753 ops.DecimalScale,
754 ops.BaseConvert,
755 ops.CumulativeSum,
756 ops.CumulativeMin,
757 ops.CumulativeMax,
758 ops.CumulativeMean,
759 ops.CumulativeAny,
760 ops.CumulativeAll,
761 ops.IdenticalTo,
762 ops.RowNumber,
763 ops.DenseRank,
764 ops.MinRank,
765 ops.PercentRank,
766 ops.FirstValue,
767 ops.LastValue,
768 ops.NthValue,
769 ops.Lag,
770 ops.Lead,
771 ops.NTile,
772 ops.GroupConcat,
773 ops.NullIf,
774 ops.NullIfZero,
775 ops.NullLiteral,
776 ops.IsInf,
777 ops.IsNan,
778 ops.IfNull,
779 # string
780 ops.Lowercase,
781 ops.Uppercase,
782 ops.FindInSet,
783 ops.StringReplace,
784 ops.StringJoin,
785 ops.StringSplit,
786 ops.Translate,
787 ops.StringAscii,
788 ops.LPad,
789 ops.RPad,
790 ops.Strip,
791 ops.RStrip,
792 ops.LStrip,
793 ops.Capitalize,
794 ops.Substring,
795 ops.StrRight,
796 ops.Repeat,
797 ops.Reverse,
798 ops.RegexExtract,
799 ops.RegexReplace,
800 ops.ParseURL,
801 # Numeric
802 ops.Least,
803 ops.Greatest,
804 ops.Log2,
805 ops.Log,
806 ops.Round,
807 # date/time/timestamp
808 ops.TimestampFromUNIX,
809 ops.Date,
810 ops.TimeTruncate,
811 ops.TimestampDiff,
812 ops.DayOfWeekIndex,
813 ops.DayOfWeekName,
814 # table
815 ops.Union,
816 ]
817
818 _unsupported_ops = {k: raise_unsupported_op_error for k in _unsupported_ops}
819
820 # registry
821 _operation_registry = impala_compiler._operation_registry.copy()
822
823 _operation_registry.update(_general_ops)
824 _operation_registry.update(_binary_infix_ops)
825 _operation_registry.update(_unary_ops)
826 _operation_registry.update(_comparison_ops)
827 _operation_registry.update(_math_ops)
828 _operation_registry.update(_stats_ops)
829 _operation_registry.update(_trigonometric_ops)
830 _operation_registry.update(_geometric_ops)
831 _operation_registry.update(_string_ops)
832 _operation_registry.update(_date_ops)
833 _operation_registry.update(_agg_ops)
834 _operation_registry.update(_geospatial_ops)
835 # the last update should be with unsupported ops
836 _operation_registry.update(_unsupported_ops)
837
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ibis/mapd/operations.py b/ibis/mapd/operations.py
--- a/ibis/mapd/operations.py
+++ b/ibis/mapd/operations.py
@@ -783,6 +783,7 @@
ops.StringReplace,
ops.StringJoin,
ops.StringSplit,
+ ops.StringToTimestamp,
ops.Translate,
ops.StringAscii,
ops.LPad,
| {"golden_diff": "diff --git a/ibis/mapd/operations.py b/ibis/mapd/operations.py\n--- a/ibis/mapd/operations.py\n+++ b/ibis/mapd/operations.py\n@@ -783,6 +783,7 @@\n ops.StringReplace,\n ops.StringJoin,\n ops.StringSplit,\n+ ops.StringToTimestamp,\n ops.Translate,\n ops.StringAscii,\n ops.LPad,\n", "issue": "MapD str.to_timestamp() : No translation rule for <class 'ibis.expr.operations.StringToTimestamp'>\nexpr = ibis.literal('20170206').to_timestamp('%Y%m%d')\r\nclient.execute(expr)\r\n\r\nibis.common.OperationNotDefinedError: No translation rule for <class 'ibis.expr.operations.StringToTimestamp'>\r\n\r\nBackend=MapD_4.1.1 ibis=0.14 Python=3.6 Ubuntu=18.04 Anaconda=5.1\n", "before_files": [{"content": "import warnings\nfrom copy import copy\nfrom datetime import date, datetime\nfrom io import StringIO\n\nimport ibis\nimport ibis.common as com\nimport ibis.expr.datatypes as dt\nimport ibis.expr.operations as ops\nimport ibis.expr.rules as rlz\nimport ibis.expr.types as ir\nimport ibis.util as util\nfrom ibis.impala import compiler as impala_compiler\nfrom ibis.mapd.identifiers import quote_identifier\n\n_sql_type_names = {\n 'boolean': 'boolean',\n 'date': 'date',\n 'decimal': 'decimal',\n 'double': 'double',\n 'float32': 'float',\n 'float64': 'double',\n 'int8': 'smallint',\n 'int16': 'smallint',\n 'int32': 'int',\n 'int64': 'bigint',\n 'linestring': 'linestring',\n 'multipolygon': 'multipolygon',\n 'point': 'point',\n 'polygon': 'polygon',\n 'string': 'text',\n 'time': 'time',\n 'timestamp': 'timestamp',\n}\n\n\ndef _is_floating(*args):\n for arg in args:\n if isinstance(arg, ir.FloatingColumn):\n return True\n return False\n\n\ndef _type_to_sql_string(tval):\n if isinstance(tval, dt.Decimal):\n return 'decimal({}, {})'.format(tval.precision, tval.scale)\n else:\n return _sql_type_names[tval.name.lower()]\n\n\ndef _cast(translator, expr):\n from ibis.mapd.client import MapDDataType\n\n op = expr.op()\n arg, target = op.args\n arg_ = translator.translate(arg)\n type_ = str(MapDDataType.from_ibis(target, nullable=False))\n\n return 'CAST({0!s} AS {1!s})'.format(arg_, type_)\n\n\ndef _all(expr):\n op = expr.op()\n arg = op.args[0]\n\n if isinstance(arg, ir.BooleanValue):\n arg = arg.ifelse(1, 0)\n\n return (1 - arg).sum() == 0\n\n\ndef _any(expr):\n op = expr.op()\n arg = op.args[0]\n\n if isinstance(arg, ir.BooleanValue):\n arg = arg.ifelse(1, 0)\n\n return arg.sum() >= 0\n\n\ndef _not_any(expr):\n op = expr.op()\n arg = op.args[0]\n\n if isinstance(arg, ir.BooleanValue):\n arg = arg.ifelse(1, 0)\n\n return arg.sum() == 0\n\n\ndef _not_all(expr):\n op = expr.op()\n arg = op.args[0]\n\n if isinstance(arg, ir.BooleanValue):\n arg = arg.ifelse(1, 0)\n\n return (1 - arg).sum() != 0\n\n\ndef _parenthesize(translator, expr):\n op = expr.op()\n op_klass = type(op)\n\n # function calls don't need parens\n what_ = translator.translate(expr)\n if (op_klass in _binary_infix_ops) or (op_klass in _unary_ops):\n return '({0!s})'.format(what_)\n else:\n return what_\n\n\ndef fixed_arity(func_name, arity):\n def formatter(translator, expr):\n op = expr.op()\n arg_count = len(op.args)\n if arity != arg_count:\n msg = 'Incorrect number of args {0} instead of {1}'\n raise com.UnsupportedOperationError(msg.format(arg_count, arity))\n return _call(translator, func_name, *op.args)\n\n formatter.__name__ = func_name\n return formatter\n\n\ndef unary(func_name):\n return fixed_arity(func_name, 1)\n\n\ndef _reduction_format(\n translator,\n func_name,\n sql_func_name=None,\n sql_signature='{}({})',\n arg=None,\n args=None,\n where=None,\n):\n if not sql_func_name:\n sql_func_name = func_name\n\n if where is not None:\n arg = where.ifelse(arg, ibis.NA)\n\n return sql_signature.format(\n sql_func_name, ', '.join(map(translator.translate, [arg] + list(args)))\n )\n\n\ndef _reduction(func_name, sql_func_name=None, sql_signature='{}({})'):\n def formatter(translator, expr):\n op = expr.op()\n\n # HACK: support trailing arguments\n where = op.where\n args = [arg for arg in op.args if arg is not where]\n\n return _reduction_format(\n translator,\n func_name,\n sql_func_name,\n sql_signature,\n args[0],\n args[1:],\n where,\n )\n\n formatter.__name__ = func_name\n return formatter\n\n\ndef _variance_like(func):\n variants = {'sample': '{}_SAMP'.format(func), 'pop': '{}_POP'.format(func)}\n\n def formatter(translator, expr):\n arg, how, where = expr.op().args\n\n return _reduction_format(\n translator, variants[how].upper(), None, '{}({})', arg, [], where\n )\n\n formatter.__name__ = func\n return formatter\n\n\ndef unary_prefix_op(prefix_op):\n def formatter(translator, expr):\n op = expr.op()\n arg = _parenthesize(translator, op.args[0])\n\n return '{0!s} {1!s}'.format(prefix_op.upper(), arg)\n\n formatter.__name__ = prefix_op\n return formatter\n\n\ndef binary_infix_op(infix_sym):\n def formatter(translator, expr):\n op = expr.op()\n\n left, right = op.args[0], op.args[1]\n left_ = _parenthesize(translator, left)\n right_ = _parenthesize(translator, right)\n\n return '{0!s} {1!s} {2!s}'.format(left_, infix_sym, right_)\n\n return formatter\n\n\ndef _call(translator, func, *args):\n args_ = ', '.join(map(translator.translate, args))\n return '{0!s}({1!s})'.format(func, args_)\n\n\ndef _extract_field(sql_attr):\n def extract_field_formatter(translator, expr):\n op = expr.op()\n arg = translator.translate(op.args[0])\n return 'EXTRACT({} FROM {})'.format(sql_attr, arg)\n\n return extract_field_formatter\n\n\n# STATS\n\n\ndef _corr(translator, expr):\n # pull out the arguments to the expression\n args = expr.op().args\n\n x, y, how, where = args\n\n # compile the argument\n compiled_x = translator.translate(x)\n compiled_y = translator.translate(y)\n\n return 'CORR({}, {})'.format(compiled_x, compiled_y)\n\n\ndef _cov(translator, expr):\n # pull out the arguments to the expression\n args = expr.op().args\n\n x, y, how, where = args\n\n # compile the argument\n compiled_x = translator.translate(x)\n compiled_y = translator.translate(y)\n\n return 'COVAR_{}({}, {})'.format(how[:4].upper(), compiled_x, compiled_y)\n\n\n# STRING\n\n\ndef _length(func_name='length', sql_func_name='CHAR_LENGTH'):\n def __lenght(translator, expr):\n # pull out the arguments to the expression\n arg = expr.op().args[0]\n # compile the argument\n compiled_arg = translator.translate(arg)\n return '{}({})'.format(sql_func_name, compiled_arg)\n\n __lenght.__name__ = func_name\n return __lenght\n\n\ndef _contains(translator, expr):\n arg, pattern = expr.op().args[:2]\n\n pattern_ = '%{}%'.format(translator.translate(pattern)[1:-1])\n\n return _parenthesize(translator, arg.like(pattern_).ifelse(1, -1))\n\n\n# GENERIC\n\n\ndef _value_list(translator, expr):\n op = expr.op()\n values_ = map(translator.translate, op.values)\n return '({0})'.format(', '.join(values_))\n\n\ndef _interval_format(translator, expr):\n dtype = expr.type()\n if dtype.unit in {'ms', 'us', 'ns'}:\n raise com.UnsupportedOperationError(\n \"MapD doesn't support subsecond interval resolutions\"\n )\n\n return '{1}, (sign){0}'.format(expr.op().value, dtype.resolution.upper())\n\n\ndef _interval_from_integer(translator, expr):\n op = expr.op()\n arg, unit = op.args\n\n dtype = expr.type()\n if dtype.unit in {'ms', 'us', 'ns'}:\n raise com.UnsupportedOperationError(\n \"MapD doesn't support subsecond interval resolutions\"\n )\n\n arg_ = translator.translate(arg)\n return '{}, (sign){}'.format(dtype.resolution.upper(), arg_)\n\n\ndef _timestamp_op(func, op_sign='+'):\n def _formatter(translator, expr):\n op = expr.op()\n left, right = op.args\n\n formatted_left = translator.translate(left)\n formatted_right = translator.translate(right)\n\n if isinstance(left, ir.DateValue):\n formatted_left = 'CAST({} as timestamp)'.format(formatted_left)\n\n return '{}({}, {})'.format(\n func, formatted_right.replace('(sign)', op_sign), formatted_left\n )\n\n return _formatter\n\n\ndef _set_literal_format(translator, expr):\n value_type = expr.type().value_type\n\n formatted = [\n translator.translate(ir.literal(x, type=value_type))\n for x in expr.op().value\n ]\n\n return '({})'.format(', '.join(formatted))\n\n\ndef _cross_join(translator, expr):\n args = expr.op().args\n left, right = args[:2]\n return translator.translate(left.join(right, ibis.literal(True)))\n\n\ndef _format_point_value(value):\n return ' '.join(str(v) for v in value)\n\n\ndef _format_linestring_value(value):\n return ', '.join(\n '{}'.format(_format_point_value(point)) for point in value\n )\n\n\ndef _format_polygon_value(value):\n return ', '.join(\n '({})'.format(_format_linestring_value(line)) for line in value\n )\n\n\ndef _format_multipolygon_value(value):\n return ', '.join(\n '({})'.format(_format_polygon_value(polygon)) for polygon in value\n )\n\n\ndef _format_geo_metadata(op, value):\n value = copy(value)\n srid = op.args[1].srid\n geotype = op.args[1].geotype\n\n if geotype is None or geotype not in ('geometry', 'geography'):\n return \"'{}'\".format(value)\n\n if geotype == 'geography':\n geofunc = 'ST_GeogFromText'\n else:\n geofunc = 'ST_GeomFromText'\n\n return \"{}('{}'{})\".format(\n geofunc, value, ', {}'.format(srid) if srid else ''\n )\n\n\ndef literal(translator, expr):\n op = expr.op()\n value = op.value\n\n # geo spatial data type\n if isinstance(expr, ir.PointScalar):\n result = \"POINT({0})\".format(_format_point_value(value))\n return _format_geo_metadata(op, result)\n elif isinstance(expr, ir.LineStringScalar):\n result = \"LINESTRING({0})\".format(_format_linestring_value(value))\n return _format_geo_metadata(op, result)\n elif isinstance(expr, ir.PolygonScalar):\n result = \"POLYGON({0!s})\".format(_format_polygon_value(value))\n return _format_geo_metadata(op, result)\n elif isinstance(expr, ir.MultiPolygonScalar):\n result = \"MULTIPOLYGON({0})\".format(_format_multipolygon_value(value))\n return _format_geo_metadata(op, result)\n # primitive data type\n elif isinstance(expr, ir.BooleanValue):\n return '1' if value else '0'\n elif isinstance(expr, ir.StringValue):\n return \"'{0!s}'\".format(value.replace(\"'\", \"\\\\'\"))\n elif isinstance(expr, ir.NumericValue):\n return repr(value)\n elif isinstance(expr, ir.SetScalar):\n return _set_literal_format(translator, expr)\n elif isinstance(expr, ir.IntervalValue):\n return _interval_format(translator, expr)\n elif isinstance(expr, ir.TimestampValue):\n if isinstance(value, datetime):\n if value.microsecond != 0:\n msg = 'Unsupported subsecond accuracy {}'\n warnings.warn(msg.format(value))\n value = value.strftime('%Y-%m-%d %H:%M:%S')\n elif isinstance(value, str):\n # check if the datetime format is a valid format (\n # '%Y-%m-%d %H:%M:%S' or '%Y-%m-%d'). if format is '%Y-%m-%d' it\n # is converted to '%Y-%m-%d 00:00:00'\n msg = (\n \"Literal datetime string should use '%Y-%m-%d %H:%M:%S' \"\n \"format. When '%Y-%m-%d' format is used, datetime will be \"\n \"converted automatically to '%Y-%m-%d 00:00:00'\"\n )\n\n try:\n dt_value = datetime.strptime(value, '%Y-%m-%d %H:%M:%S')\n except ValueError:\n try:\n dt_value = datetime.strptime(value, '%Y-%m-%d')\n warnings.warn(msg)\n except ValueError:\n raise Exception(msg)\n\n value = dt_value.strftime('%Y-%m-%d %H:%M:%S')\n\n return \"'{0!s}'\".format(value)\n elif isinstance(expr, ir.DateValue):\n if isinstance(value, date):\n value = value.strftime('%Y-%m-%d')\n return \"toDate('{0!s}')\".format(value)\n # array data type\n elif isinstance(expr, ir.ArrayValue):\n return str(list(value))\n else:\n raise NotImplementedError(type(expr))\n\n\ndef _where(translator, expr):\n # pull out the arguments to the expression\n args = expr.op().args\n condition, expr1, expr2 = args\n expr = condition.ifelse(expr1, expr2)\n return translator.translate(expr)\n\n\ndef raise_unsupported_expr_error(expr):\n msg = \"MapD backend doesn't support {} operation!\"\n op = expr.op()\n raise com.UnsupportedOperationError(msg.format(type(op)))\n\n\ndef raise_unsupported_op_error(translator, expr, *args):\n msg = \"MapD backend doesn't support {} operation!\"\n op = expr.op()\n raise com.UnsupportedOperationError(msg.format(type(op)))\n\n\n# translator\ndef _name_expr(formatted_expr, quoted_name):\n return '{} AS {}'.format(formatted_expr, quote_identifier(quoted_name))\n\n\nclass CaseFormatter:\n def __init__(self, translator, base, cases, results, default):\n self.translator = translator\n self.base = base\n self.cases = cases\n self.results = results\n self.default = default\n\n # HACK\n self.indent = 2\n self.multiline = len(cases) > 1\n self.buf = StringIO()\n\n def _trans(self, expr):\n return self.translator.translate(expr)\n\n def get_result(self):\n \"\"\"\n\n :return:\n \"\"\"\n self.buf.seek(0)\n\n self.buf.write('CASE')\n if self.base is not None:\n base_str = self._trans(self.base)\n self.buf.write(' {0}'.format(base_str))\n\n for case, result in zip(self.cases, self.results):\n self._next_case()\n case_str = self._trans(case)\n result_str = self._trans(result)\n self.buf.write('WHEN {0} THEN {1}'.format(case_str, result_str))\n\n if self.default is not None:\n self._next_case()\n default_str = self._trans(self.default)\n self.buf.write('ELSE {0}'.format(default_str))\n\n if self.multiline:\n self.buf.write('\\nEND')\n else:\n self.buf.write(' END')\n\n return self.buf.getvalue()\n\n def _next_case(self):\n if self.multiline:\n self.buf.write('\\n{0}'.format(' ' * self.indent))\n else:\n self.buf.write(' ')\n\n\ndef _table_array_view(translator, expr):\n ctx = translator.context\n table = expr.op().table\n query = ctx.get_compiled_expr(table)\n return '(\\n{0}\\n)'.format(util.indent(query, ctx.indent))\n\n\ndef _timestamp_truncate(translator, expr):\n op = expr.op()\n arg, unit = op.args\n\n unit_ = dt.Interval(unit=unit).resolution.upper()\n\n # return _call_date_trunc(translator, converter, arg)\n arg_ = translator.translate(arg)\n return 'DATE_TRUNC({0!s}, {1!s})'.format(unit_, arg_)\n\n\ndef _table_column(translator, expr):\n op = expr.op()\n field_name = op.name\n\n quoted_name = quote_identifier(field_name, force=True)\n\n table = op.table\n ctx = translator.context\n\n # If the column does not originate from the table set in the current SELECT\n # context, we should format as a subquery\n if translator.permit_subquery and ctx.is_foreign_expr(table):\n proj_expr = table.projection([field_name]).to_array()\n return _table_array_view(translator, proj_expr)\n\n if ctx.need_aliases():\n alias = ctx.get_ref(table)\n if alias is not None:\n quoted_name = '{}.{}'.format(alias, quoted_name)\n\n return quoted_name\n\n\n# AGGREGATION\n\napprox_count_distinct = _reduction(\n 'approx_nunique',\n sql_func_name='approx_count_distinct',\n sql_signature='{}({}, 100)',\n)\n\ncount_distinct = _reduction('count')\ncount = _reduction('count')\n\n\ndef _arbitrary(translator, expr):\n arg, how, where = expr.op().args\n\n if how not in (None, 'last'):\n raise com.UnsupportedOperationError(\n '{!r} value not supported for arbitrary in MapD'.format(how)\n )\n\n if where is not None:\n arg = where.ifelse(arg, ibis.NA)\n\n return 'SAMPLE({})'.format(translator.translate(arg))\n\n\n# MATH\n\n\nclass NumericTruncate(ops.NumericBinaryOp):\n \"\"\"Truncates x to y decimal places\"\"\"\n\n output_type = rlz.shape_like('left', ops.dt.float)\n\n\n# GEOMETRIC\n\n\nclass Conv_4326_900913_X(ops.UnaryOp):\n \"\"\"\n Converts WGS-84 latitude to WGS-84 Web Mercator x coordinate.\n \"\"\"\n\n output_type = rlz.shape_like('arg', ops.dt.float)\n\n\nclass Conv_4326_900913_Y(ops.UnaryOp):\n \"\"\"\n Converts WGS-84 longitude to WGS-84 Web Mercator y coordinate.\n\n \"\"\"\n\n output_type = rlz.shape_like('arg', ops.dt.float)\n\n\n# String\n\n\nclass ByteLength(ops.StringLength):\n \"\"\"Returns the length of a string in bytes length\"\"\"\n\n\n# https://www.mapd.com/docs/latest/mapd-core-guide/dml/\n_binary_infix_ops = {\n # math\n ops.Power: fixed_arity('power', 2),\n ops.NotEquals: impala_compiler._binary_infix_op('<>'),\n}\n\n_unary_ops = {}\n\n# COMPARISON\n_comparison_ops = {}\n\n\n# MATH\n_math_ops = {\n ops.Degrees: unary('degrees'), # MapD function\n ops.Modulus: fixed_arity('mod', 2),\n ops.Pi: fixed_arity('pi', 0),\n ops.Radians: unary('radians'),\n NumericTruncate: fixed_arity('truncate', 2),\n}\n\n# STATS\n_stats_ops = {\n ops.Correlation: _corr,\n ops.StandardDev: _variance_like('stddev'),\n ops.Variance: _variance_like('var'),\n ops.Covariance: _cov,\n}\n\n# TRIGONOMETRIC\n_trigonometric_ops = {\n ops.Acos: unary('acos'),\n ops.Asin: unary('asin'),\n ops.Atan: unary('atan'),\n ops.Atan2: fixed_arity('atan2', 2),\n ops.Cos: unary('cos'),\n ops.Cot: unary('cot'),\n ops.Sin: unary('sin'),\n ops.Tan: unary('tan'),\n}\n\n# GEOMETRIC\n_geometric_ops = {\n Conv_4326_900913_X: unary('conv_4326_900913_x'),\n Conv_4326_900913_Y: unary('conv_4326_900913_y'),\n}\n\n# GEO SPATIAL\n_geospatial_ops = {\n ops.GeoArea: unary('ST_AREA'),\n ops.GeoContains: fixed_arity('ST_CONTAINS', 2),\n ops.GeoDistance: fixed_arity('ST_DISTANCE', 2),\n ops.GeoLength: unary('ST_LENGTH'),\n ops.GeoPerimeter: unary('ST_PERIMETER'),\n ops.GeoMaxDistance: fixed_arity('ST_MAXDISTANCE', 2),\n ops.GeoX: unary('ST_X'),\n ops.GeoY: unary('ST_Y'),\n ops.GeoXMin: unary('ST_XMIN'),\n ops.GeoXMax: unary('ST_XMAX'),\n ops.GeoYMin: unary('ST_YMIN'),\n ops.GeoYMax: unary('ST_YMAX'),\n ops.GeoStartPoint: unary('ST_STARTPOINT'),\n ops.GeoEndPoint: unary('ST_ENDPOINT'),\n ops.GeoPointN: fixed_arity('ST_POINTN', 2),\n ops.GeoNPoints: unary('ST_NPOINTS'),\n ops.GeoNRings: unary('ST_NRINGS'),\n ops.GeoSRID: unary('ST_SRID'),\n}\n\n# STRING\n_string_ops = {\n ops.StringLength: _length(),\n ByteLength: _length('byte_length', 'LENGTH'),\n ops.StringSQLILike: binary_infix_op('ilike'),\n ops.StringFind: _contains,\n}\n\n# DATE\n_date_ops = {\n ops.DateTruncate: _timestamp_truncate,\n ops.TimestampTruncate: _timestamp_truncate,\n # DIRECT EXTRACT OPERATIONS\n ops.ExtractYear: _extract_field('YEAR'),\n ops.ExtractMonth: _extract_field('MONTH'),\n ops.ExtractDay: _extract_field('DAY'),\n ops.ExtractHour: _extract_field('HOUR'),\n ops.ExtractMinute: _extract_field('MINUTE'),\n ops.ExtractSecond: _extract_field('SECOND'),\n ops.IntervalAdd: _interval_from_integer,\n ops.IntervalFromInteger: _interval_from_integer,\n ops.DateAdd: _timestamp_op('TIMESTAMPADD'),\n ops.DateSub: _timestamp_op('TIMESTAMPADD', '-'),\n ops.TimestampAdd: _timestamp_op('TIMESTAMPADD'),\n ops.TimestampSub: _timestamp_op('TIMESTAMPADD', '-'),\n}\n\n# AGGREGATION/REDUCTION\n_agg_ops = {\n ops.HLLCardinality: approx_count_distinct,\n ops.DistinctColumn: unary_prefix_op('distinct'),\n ops.Arbitrary: _arbitrary,\n}\n\n# GENERAL\n_general_ops = {\n ops.Literal: literal,\n ops.ValueList: _value_list,\n ops.Cast: _cast,\n ops.Where: _where,\n ops.TableColumn: _table_column,\n ops.CrossJoin: _cross_join,\n}\n\n# UNSUPPORTED OPERATIONS\n_unsupported_ops = [\n # generic/aggregation\n ops.CMSMedian,\n ops.WindowOp,\n ops.DecimalPrecision,\n ops.DecimalScale,\n ops.BaseConvert,\n ops.CumulativeSum,\n ops.CumulativeMin,\n ops.CumulativeMax,\n ops.CumulativeMean,\n ops.CumulativeAny,\n ops.CumulativeAll,\n ops.IdenticalTo,\n ops.RowNumber,\n ops.DenseRank,\n ops.MinRank,\n ops.PercentRank,\n ops.FirstValue,\n ops.LastValue,\n ops.NthValue,\n ops.Lag,\n ops.Lead,\n ops.NTile,\n ops.GroupConcat,\n ops.NullIf,\n ops.NullIfZero,\n ops.NullLiteral,\n ops.IsInf,\n ops.IsNan,\n ops.IfNull,\n # string\n ops.Lowercase,\n ops.Uppercase,\n ops.FindInSet,\n ops.StringReplace,\n ops.StringJoin,\n ops.StringSplit,\n ops.Translate,\n ops.StringAscii,\n ops.LPad,\n ops.RPad,\n ops.Strip,\n ops.RStrip,\n ops.LStrip,\n ops.Capitalize,\n ops.Substring,\n ops.StrRight,\n ops.Repeat,\n ops.Reverse,\n ops.RegexExtract,\n ops.RegexReplace,\n ops.ParseURL,\n # Numeric\n ops.Least,\n ops.Greatest,\n ops.Log2,\n ops.Log,\n ops.Round,\n # date/time/timestamp\n ops.TimestampFromUNIX,\n ops.Date,\n ops.TimeTruncate,\n ops.TimestampDiff,\n ops.DayOfWeekIndex,\n ops.DayOfWeekName,\n # table\n ops.Union,\n]\n\n_unsupported_ops = {k: raise_unsupported_op_error for k in _unsupported_ops}\n\n# registry\n_operation_registry = impala_compiler._operation_registry.copy()\n\n_operation_registry.update(_general_ops)\n_operation_registry.update(_binary_infix_ops)\n_operation_registry.update(_unary_ops)\n_operation_registry.update(_comparison_ops)\n_operation_registry.update(_math_ops)\n_operation_registry.update(_stats_ops)\n_operation_registry.update(_trigonometric_ops)\n_operation_registry.update(_geometric_ops)\n_operation_registry.update(_string_ops)\n_operation_registry.update(_date_ops)\n_operation_registry.update(_agg_ops)\n_operation_registry.update(_geospatial_ops)\n# the last update should be with unsupported ops\n_operation_registry.update(_unsupported_ops)\n", "path": "ibis/mapd/operations.py"}], "after_files": [{"content": "import warnings\nfrom copy import copy\nfrom datetime import date, datetime\nfrom io import StringIO\n\nimport ibis\nimport ibis.common as com\nimport ibis.expr.datatypes as dt\nimport ibis.expr.operations as ops\nimport ibis.expr.rules as rlz\nimport ibis.expr.types as ir\nimport ibis.util as util\nfrom ibis.impala import compiler as impala_compiler\nfrom ibis.mapd.identifiers import quote_identifier\n\n_sql_type_names = {\n 'boolean': 'boolean',\n 'date': 'date',\n 'decimal': 'decimal',\n 'double': 'double',\n 'float32': 'float',\n 'float64': 'double',\n 'int8': 'smallint',\n 'int16': 'smallint',\n 'int32': 'int',\n 'int64': 'bigint',\n 'linestring': 'linestring',\n 'multipolygon': 'multipolygon',\n 'point': 'point',\n 'polygon': 'polygon',\n 'string': 'text',\n 'time': 'time',\n 'timestamp': 'timestamp',\n}\n\n\ndef _is_floating(*args):\n for arg in args:\n if isinstance(arg, ir.FloatingColumn):\n return True\n return False\n\n\ndef _type_to_sql_string(tval):\n if isinstance(tval, dt.Decimal):\n return 'decimal({}, {})'.format(tval.precision, tval.scale)\n else:\n return _sql_type_names[tval.name.lower()]\n\n\ndef _cast(translator, expr):\n from ibis.mapd.client import MapDDataType\n\n op = expr.op()\n arg, target = op.args\n arg_ = translator.translate(arg)\n type_ = str(MapDDataType.from_ibis(target, nullable=False))\n\n return 'CAST({0!s} AS {1!s})'.format(arg_, type_)\n\n\ndef _all(expr):\n op = expr.op()\n arg = op.args[0]\n\n if isinstance(arg, ir.BooleanValue):\n arg = arg.ifelse(1, 0)\n\n return (1 - arg).sum() == 0\n\n\ndef _any(expr):\n op = expr.op()\n arg = op.args[0]\n\n if isinstance(arg, ir.BooleanValue):\n arg = arg.ifelse(1, 0)\n\n return arg.sum() >= 0\n\n\ndef _not_any(expr):\n op = expr.op()\n arg = op.args[0]\n\n if isinstance(arg, ir.BooleanValue):\n arg = arg.ifelse(1, 0)\n\n return arg.sum() == 0\n\n\ndef _not_all(expr):\n op = expr.op()\n arg = op.args[0]\n\n if isinstance(arg, ir.BooleanValue):\n arg = arg.ifelse(1, 0)\n\n return (1 - arg).sum() != 0\n\n\ndef _parenthesize(translator, expr):\n op = expr.op()\n op_klass = type(op)\n\n # function calls don't need parens\n what_ = translator.translate(expr)\n if (op_klass in _binary_infix_ops) or (op_klass in _unary_ops):\n return '({0!s})'.format(what_)\n else:\n return what_\n\n\ndef fixed_arity(func_name, arity):\n def formatter(translator, expr):\n op = expr.op()\n arg_count = len(op.args)\n if arity != arg_count:\n msg = 'Incorrect number of args {0} instead of {1}'\n raise com.UnsupportedOperationError(msg.format(arg_count, arity))\n return _call(translator, func_name, *op.args)\n\n formatter.__name__ = func_name\n return formatter\n\n\ndef unary(func_name):\n return fixed_arity(func_name, 1)\n\n\ndef _reduction_format(\n translator,\n func_name,\n sql_func_name=None,\n sql_signature='{}({})',\n arg=None,\n args=None,\n where=None,\n):\n if not sql_func_name:\n sql_func_name = func_name\n\n if where is not None:\n arg = where.ifelse(arg, ibis.NA)\n\n return sql_signature.format(\n sql_func_name, ', '.join(map(translator.translate, [arg] + list(args)))\n )\n\n\ndef _reduction(func_name, sql_func_name=None, sql_signature='{}({})'):\n def formatter(translator, expr):\n op = expr.op()\n\n # HACK: support trailing arguments\n where = op.where\n args = [arg for arg in op.args if arg is not where]\n\n return _reduction_format(\n translator,\n func_name,\n sql_func_name,\n sql_signature,\n args[0],\n args[1:],\n where,\n )\n\n formatter.__name__ = func_name\n return formatter\n\n\ndef _variance_like(func):\n variants = {'sample': '{}_SAMP'.format(func), 'pop': '{}_POP'.format(func)}\n\n def formatter(translator, expr):\n arg, how, where = expr.op().args\n\n return _reduction_format(\n translator, variants[how].upper(), None, '{}({})', arg, [], where\n )\n\n formatter.__name__ = func\n return formatter\n\n\ndef unary_prefix_op(prefix_op):\n def formatter(translator, expr):\n op = expr.op()\n arg = _parenthesize(translator, op.args[0])\n\n return '{0!s} {1!s}'.format(prefix_op.upper(), arg)\n\n formatter.__name__ = prefix_op\n return formatter\n\n\ndef binary_infix_op(infix_sym):\n def formatter(translator, expr):\n op = expr.op()\n\n left, right = op.args[0], op.args[1]\n left_ = _parenthesize(translator, left)\n right_ = _parenthesize(translator, right)\n\n return '{0!s} {1!s} {2!s}'.format(left_, infix_sym, right_)\n\n return formatter\n\n\ndef _call(translator, func, *args):\n args_ = ', '.join(map(translator.translate, args))\n return '{0!s}({1!s})'.format(func, args_)\n\n\ndef _extract_field(sql_attr):\n def extract_field_formatter(translator, expr):\n op = expr.op()\n arg = translator.translate(op.args[0])\n return 'EXTRACT({} FROM {})'.format(sql_attr, arg)\n\n return extract_field_formatter\n\n\n# STATS\n\n\ndef _corr(translator, expr):\n # pull out the arguments to the expression\n args = expr.op().args\n\n x, y, how, where = args\n\n # compile the argument\n compiled_x = translator.translate(x)\n compiled_y = translator.translate(y)\n\n return 'CORR({}, {})'.format(compiled_x, compiled_y)\n\n\ndef _cov(translator, expr):\n # pull out the arguments to the expression\n args = expr.op().args\n\n x, y, how, where = args\n\n # compile the argument\n compiled_x = translator.translate(x)\n compiled_y = translator.translate(y)\n\n return 'COVAR_{}({}, {})'.format(how[:4].upper(), compiled_x, compiled_y)\n\n\n# STRING\n\n\ndef _length(func_name='length', sql_func_name='CHAR_LENGTH'):\n def __lenght(translator, expr):\n # pull out the arguments to the expression\n arg = expr.op().args[0]\n # compile the argument\n compiled_arg = translator.translate(arg)\n return '{}({})'.format(sql_func_name, compiled_arg)\n\n __lenght.__name__ = func_name\n return __lenght\n\n\ndef _contains(translator, expr):\n arg, pattern = expr.op().args[:2]\n\n pattern_ = '%{}%'.format(translator.translate(pattern)[1:-1])\n\n return _parenthesize(translator, arg.like(pattern_).ifelse(1, -1))\n\n\n# GENERIC\n\n\ndef _value_list(translator, expr):\n op = expr.op()\n values_ = map(translator.translate, op.values)\n return '({0})'.format(', '.join(values_))\n\n\ndef _interval_format(translator, expr):\n dtype = expr.type()\n if dtype.unit in {'ms', 'us', 'ns'}:\n raise com.UnsupportedOperationError(\n \"MapD doesn't support subsecond interval resolutions\"\n )\n\n return '{1}, (sign){0}'.format(expr.op().value, dtype.resolution.upper())\n\n\ndef _interval_from_integer(translator, expr):\n op = expr.op()\n arg, unit = op.args\n\n dtype = expr.type()\n if dtype.unit in {'ms', 'us', 'ns'}:\n raise com.UnsupportedOperationError(\n \"MapD doesn't support subsecond interval resolutions\"\n )\n\n arg_ = translator.translate(arg)\n return '{}, (sign){}'.format(dtype.resolution.upper(), arg_)\n\n\ndef _timestamp_op(func, op_sign='+'):\n def _formatter(translator, expr):\n op = expr.op()\n left, right = op.args\n\n formatted_left = translator.translate(left)\n formatted_right = translator.translate(right)\n\n if isinstance(left, ir.DateValue):\n formatted_left = 'CAST({} as timestamp)'.format(formatted_left)\n\n return '{}({}, {})'.format(\n func, formatted_right.replace('(sign)', op_sign), formatted_left\n )\n\n return _formatter\n\n\ndef _set_literal_format(translator, expr):\n value_type = expr.type().value_type\n\n formatted = [\n translator.translate(ir.literal(x, type=value_type))\n for x in expr.op().value\n ]\n\n return '({})'.format(', '.join(formatted))\n\n\ndef _cross_join(translator, expr):\n args = expr.op().args\n left, right = args[:2]\n return translator.translate(left.join(right, ibis.literal(True)))\n\n\ndef _format_point_value(value):\n return ' '.join(str(v) for v in value)\n\n\ndef _format_linestring_value(value):\n return ', '.join(\n '{}'.format(_format_point_value(point)) for point in value\n )\n\n\ndef _format_polygon_value(value):\n return ', '.join(\n '({})'.format(_format_linestring_value(line)) for line in value\n )\n\n\ndef _format_multipolygon_value(value):\n return ', '.join(\n '({})'.format(_format_polygon_value(polygon)) for polygon in value\n )\n\n\ndef _format_geo_metadata(op, value):\n value = copy(value)\n srid = op.args[1].srid\n geotype = op.args[1].geotype\n\n if geotype is None or geotype not in ('geometry', 'geography'):\n return \"'{}'\".format(value)\n\n if geotype == 'geography':\n geofunc = 'ST_GeogFromText'\n else:\n geofunc = 'ST_GeomFromText'\n\n return \"{}('{}'{})\".format(\n geofunc, value, ', {}'.format(srid) if srid else ''\n )\n\n\ndef literal(translator, expr):\n op = expr.op()\n value = op.value\n\n # geo spatial data type\n if isinstance(expr, ir.PointScalar):\n result = \"POINT({0})\".format(_format_point_value(value))\n return _format_geo_metadata(op, result)\n elif isinstance(expr, ir.LineStringScalar):\n result = \"LINESTRING({0})\".format(_format_linestring_value(value))\n return _format_geo_metadata(op, result)\n elif isinstance(expr, ir.PolygonScalar):\n result = \"POLYGON({0!s})\".format(_format_polygon_value(value))\n return _format_geo_metadata(op, result)\n elif isinstance(expr, ir.MultiPolygonScalar):\n result = \"MULTIPOLYGON({0})\".format(_format_multipolygon_value(value))\n return _format_geo_metadata(op, result)\n # primitive data type\n elif isinstance(expr, ir.BooleanValue):\n return '1' if value else '0'\n elif isinstance(expr, ir.StringValue):\n return \"'{0!s}'\".format(value.replace(\"'\", \"\\\\'\"))\n elif isinstance(expr, ir.NumericValue):\n return repr(value)\n elif isinstance(expr, ir.SetScalar):\n return _set_literal_format(translator, expr)\n elif isinstance(expr, ir.IntervalValue):\n return _interval_format(translator, expr)\n elif isinstance(expr, ir.TimestampValue):\n if isinstance(value, datetime):\n if value.microsecond != 0:\n msg = 'Unsupported subsecond accuracy {}'\n warnings.warn(msg.format(value))\n value = value.strftime('%Y-%m-%d %H:%M:%S')\n elif isinstance(value, str):\n # check if the datetime format is a valid format (\n # '%Y-%m-%d %H:%M:%S' or '%Y-%m-%d'). if format is '%Y-%m-%d' it\n # is converted to '%Y-%m-%d 00:00:00'\n msg = (\n \"Literal datetime string should use '%Y-%m-%d %H:%M:%S' \"\n \"format. When '%Y-%m-%d' format is used, datetime will be \"\n \"converted automatically to '%Y-%m-%d 00:00:00'\"\n )\n\n try:\n dt_value = datetime.strptime(value, '%Y-%m-%d %H:%M:%S')\n except ValueError:\n try:\n dt_value = datetime.strptime(value, '%Y-%m-%d')\n warnings.warn(msg)\n except ValueError:\n raise Exception(msg)\n\n value = dt_value.strftime('%Y-%m-%d %H:%M:%S')\n\n return \"'{0!s}'\".format(value)\n elif isinstance(expr, ir.DateValue):\n if isinstance(value, date):\n value = value.strftime('%Y-%m-%d')\n return \"toDate('{0!s}')\".format(value)\n # array data type\n elif isinstance(expr, ir.ArrayValue):\n return str(list(value))\n else:\n raise NotImplementedError(type(expr))\n\n\ndef _where(translator, expr):\n # pull out the arguments to the expression\n args = expr.op().args\n condition, expr1, expr2 = args\n expr = condition.ifelse(expr1, expr2)\n return translator.translate(expr)\n\n\ndef raise_unsupported_expr_error(expr):\n msg = \"MapD backend doesn't support {} operation!\"\n op = expr.op()\n raise com.UnsupportedOperationError(msg.format(type(op)))\n\n\ndef raise_unsupported_op_error(translator, expr, *args):\n msg = \"MapD backend doesn't support {} operation!\"\n op = expr.op()\n raise com.UnsupportedOperationError(msg.format(type(op)))\n\n\n# translator\ndef _name_expr(formatted_expr, quoted_name):\n return '{} AS {}'.format(formatted_expr, quote_identifier(quoted_name))\n\n\nclass CaseFormatter:\n def __init__(self, translator, base, cases, results, default):\n self.translator = translator\n self.base = base\n self.cases = cases\n self.results = results\n self.default = default\n\n # HACK\n self.indent = 2\n self.multiline = len(cases) > 1\n self.buf = StringIO()\n\n def _trans(self, expr):\n return self.translator.translate(expr)\n\n def get_result(self):\n \"\"\"\n\n :return:\n \"\"\"\n self.buf.seek(0)\n\n self.buf.write('CASE')\n if self.base is not None:\n base_str = self._trans(self.base)\n self.buf.write(' {0}'.format(base_str))\n\n for case, result in zip(self.cases, self.results):\n self._next_case()\n case_str = self._trans(case)\n result_str = self._trans(result)\n self.buf.write('WHEN {0} THEN {1}'.format(case_str, result_str))\n\n if self.default is not None:\n self._next_case()\n default_str = self._trans(self.default)\n self.buf.write('ELSE {0}'.format(default_str))\n\n if self.multiline:\n self.buf.write('\\nEND')\n else:\n self.buf.write(' END')\n\n return self.buf.getvalue()\n\n def _next_case(self):\n if self.multiline:\n self.buf.write('\\n{0}'.format(' ' * self.indent))\n else:\n self.buf.write(' ')\n\n\ndef _table_array_view(translator, expr):\n ctx = translator.context\n table = expr.op().table\n query = ctx.get_compiled_expr(table)\n return '(\\n{0}\\n)'.format(util.indent(query, ctx.indent))\n\n\ndef _timestamp_truncate(translator, expr):\n op = expr.op()\n arg, unit = op.args\n\n unit_ = dt.Interval(unit=unit).resolution.upper()\n\n # return _call_date_trunc(translator, converter, arg)\n arg_ = translator.translate(arg)\n return 'DATE_TRUNC({0!s}, {1!s})'.format(unit_, arg_)\n\n\ndef _table_column(translator, expr):\n op = expr.op()\n field_name = op.name\n\n quoted_name = quote_identifier(field_name, force=True)\n\n table = op.table\n ctx = translator.context\n\n # If the column does not originate from the table set in the current SELECT\n # context, we should format as a subquery\n if translator.permit_subquery and ctx.is_foreign_expr(table):\n proj_expr = table.projection([field_name]).to_array()\n return _table_array_view(translator, proj_expr)\n\n if ctx.need_aliases():\n alias = ctx.get_ref(table)\n if alias is not None:\n quoted_name = '{}.{}'.format(alias, quoted_name)\n\n return quoted_name\n\n\n# AGGREGATION\n\napprox_count_distinct = _reduction(\n 'approx_nunique',\n sql_func_name='approx_count_distinct',\n sql_signature='{}({}, 100)',\n)\n\ncount_distinct = _reduction('count')\ncount = _reduction('count')\n\n\ndef _arbitrary(translator, expr):\n arg, how, where = expr.op().args\n\n if how not in (None, 'last'):\n raise com.UnsupportedOperationError(\n '{!r} value not supported for arbitrary in MapD'.format(how)\n )\n\n if where is not None:\n arg = where.ifelse(arg, ibis.NA)\n\n return 'SAMPLE({})'.format(translator.translate(arg))\n\n\n# MATH\n\n\nclass NumericTruncate(ops.NumericBinaryOp):\n \"\"\"Truncates x to y decimal places\"\"\"\n\n output_type = rlz.shape_like('left', ops.dt.float)\n\n\n# GEOMETRIC\n\n\nclass Conv_4326_900913_X(ops.UnaryOp):\n \"\"\"\n Converts WGS-84 latitude to WGS-84 Web Mercator x coordinate.\n \"\"\"\n\n output_type = rlz.shape_like('arg', ops.dt.float)\n\n\nclass Conv_4326_900913_Y(ops.UnaryOp):\n \"\"\"\n Converts WGS-84 longitude to WGS-84 Web Mercator y coordinate.\n\n \"\"\"\n\n output_type = rlz.shape_like('arg', ops.dt.float)\n\n\n# String\n\n\nclass ByteLength(ops.StringLength):\n \"\"\"Returns the length of a string in bytes length\"\"\"\n\n\n# https://www.mapd.com/docs/latest/mapd-core-guide/dml/\n_binary_infix_ops = {\n # math\n ops.Power: fixed_arity('power', 2),\n ops.NotEquals: impala_compiler._binary_infix_op('<>'),\n}\n\n_unary_ops = {}\n\n# COMPARISON\n_comparison_ops = {}\n\n\n# MATH\n_math_ops = {\n ops.Degrees: unary('degrees'), # MapD function\n ops.Modulus: fixed_arity('mod', 2),\n ops.Pi: fixed_arity('pi', 0),\n ops.Radians: unary('radians'),\n NumericTruncate: fixed_arity('truncate', 2),\n}\n\n# STATS\n_stats_ops = {\n ops.Correlation: _corr,\n ops.StandardDev: _variance_like('stddev'),\n ops.Variance: _variance_like('var'),\n ops.Covariance: _cov,\n}\n\n# TRIGONOMETRIC\n_trigonometric_ops = {\n ops.Acos: unary('acos'),\n ops.Asin: unary('asin'),\n ops.Atan: unary('atan'),\n ops.Atan2: fixed_arity('atan2', 2),\n ops.Cos: unary('cos'),\n ops.Cot: unary('cot'),\n ops.Sin: unary('sin'),\n ops.Tan: unary('tan'),\n}\n\n# GEOMETRIC\n_geometric_ops = {\n Conv_4326_900913_X: unary('conv_4326_900913_x'),\n Conv_4326_900913_Y: unary('conv_4326_900913_y'),\n}\n\n# GEO SPATIAL\n_geospatial_ops = {\n ops.GeoArea: unary('ST_AREA'),\n ops.GeoContains: fixed_arity('ST_CONTAINS', 2),\n ops.GeoDistance: fixed_arity('ST_DISTANCE', 2),\n ops.GeoLength: unary('ST_LENGTH'),\n ops.GeoPerimeter: unary('ST_PERIMETER'),\n ops.GeoMaxDistance: fixed_arity('ST_MAXDISTANCE', 2),\n ops.GeoX: unary('ST_X'),\n ops.GeoY: unary('ST_Y'),\n ops.GeoXMin: unary('ST_XMIN'),\n ops.GeoXMax: unary('ST_XMAX'),\n ops.GeoYMin: unary('ST_YMIN'),\n ops.GeoYMax: unary('ST_YMAX'),\n ops.GeoStartPoint: unary('ST_STARTPOINT'),\n ops.GeoEndPoint: unary('ST_ENDPOINT'),\n ops.GeoPointN: fixed_arity('ST_POINTN', 2),\n ops.GeoNPoints: unary('ST_NPOINTS'),\n ops.GeoNRings: unary('ST_NRINGS'),\n ops.GeoSRID: unary('ST_SRID'),\n}\n\n# STRING\n_string_ops = {\n ops.StringLength: _length(),\n ByteLength: _length('byte_length', 'LENGTH'),\n ops.StringSQLILike: binary_infix_op('ilike'),\n ops.StringFind: _contains,\n}\n\n# DATE\n_date_ops = {\n ops.DateTruncate: _timestamp_truncate,\n ops.TimestampTruncate: _timestamp_truncate,\n # DIRECT EXTRACT OPERATIONS\n ops.ExtractYear: _extract_field('YEAR'),\n ops.ExtractMonth: _extract_field('MONTH'),\n ops.ExtractDay: _extract_field('DAY'),\n ops.ExtractHour: _extract_field('HOUR'),\n ops.ExtractMinute: _extract_field('MINUTE'),\n ops.ExtractSecond: _extract_field('SECOND'),\n ops.IntervalAdd: _interval_from_integer,\n ops.IntervalFromInteger: _interval_from_integer,\n ops.DateAdd: _timestamp_op('TIMESTAMPADD'),\n ops.DateSub: _timestamp_op('TIMESTAMPADD', '-'),\n ops.TimestampAdd: _timestamp_op('TIMESTAMPADD'),\n ops.TimestampSub: _timestamp_op('TIMESTAMPADD', '-'),\n}\n\n# AGGREGATION/REDUCTION\n_agg_ops = {\n ops.HLLCardinality: approx_count_distinct,\n ops.DistinctColumn: unary_prefix_op('distinct'),\n ops.Arbitrary: _arbitrary,\n}\n\n# GENERAL\n_general_ops = {\n ops.Literal: literal,\n ops.ValueList: _value_list,\n ops.Cast: _cast,\n ops.Where: _where,\n ops.TableColumn: _table_column,\n ops.CrossJoin: _cross_join,\n}\n\n# UNSUPPORTED OPERATIONS\n_unsupported_ops = [\n # generic/aggregation\n ops.CMSMedian,\n ops.WindowOp,\n ops.DecimalPrecision,\n ops.DecimalScale,\n ops.BaseConvert,\n ops.CumulativeSum,\n ops.CumulativeMin,\n ops.CumulativeMax,\n ops.CumulativeMean,\n ops.CumulativeAny,\n ops.CumulativeAll,\n ops.IdenticalTo,\n ops.RowNumber,\n ops.DenseRank,\n ops.MinRank,\n ops.PercentRank,\n ops.FirstValue,\n ops.LastValue,\n ops.NthValue,\n ops.Lag,\n ops.Lead,\n ops.NTile,\n ops.GroupConcat,\n ops.NullIf,\n ops.NullIfZero,\n ops.NullLiteral,\n ops.IsInf,\n ops.IsNan,\n ops.IfNull,\n # string\n ops.Lowercase,\n ops.Uppercase,\n ops.FindInSet,\n ops.StringReplace,\n ops.StringJoin,\n ops.StringSplit,\n ops.StringToTimestamp,\n ops.Translate,\n ops.StringAscii,\n ops.LPad,\n ops.RPad,\n ops.Strip,\n ops.RStrip,\n ops.LStrip,\n ops.Capitalize,\n ops.Substring,\n ops.StrRight,\n ops.Repeat,\n ops.Reverse,\n ops.RegexExtract,\n ops.RegexReplace,\n ops.ParseURL,\n # Numeric\n ops.Least,\n ops.Greatest,\n ops.Log2,\n ops.Log,\n ops.Round,\n # date/time/timestamp\n ops.TimestampFromUNIX,\n ops.Date,\n ops.TimeTruncate,\n ops.TimestampDiff,\n ops.DayOfWeekIndex,\n ops.DayOfWeekName,\n # table\n ops.Union,\n]\n\n_unsupported_ops = {k: raise_unsupported_op_error for k in _unsupported_ops}\n\n# registry\n_operation_registry = impala_compiler._operation_registry.copy()\n\n_operation_registry.update(_general_ops)\n_operation_registry.update(_binary_infix_ops)\n_operation_registry.update(_unary_ops)\n_operation_registry.update(_comparison_ops)\n_operation_registry.update(_math_ops)\n_operation_registry.update(_stats_ops)\n_operation_registry.update(_trigonometric_ops)\n_operation_registry.update(_geometric_ops)\n_operation_registry.update(_string_ops)\n_operation_registry.update(_date_ops)\n_operation_registry.update(_agg_ops)\n_operation_registry.update(_geospatial_ops)\n# the last update should be with unsupported ops\n_operation_registry.update(_unsupported_ops)\n", "path": "ibis/mapd/operations.py"}]} |
gh_patches_debug_124 | rasdani/github-patches | git_diff | bookwyrm-social__bookwyrm-550 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
About page requires login
**Describe the bug**
Accessing the "About this server" link (https://bookwyrm.social/about) redirects to login
**To Reproduce**
Steps to reproduce the behavior:
1. Go to https://bookwyrm.social/about
2. redirected to login instead of seeing an about page (the URL is login/?next=/about)
**Expected behavior**
Access to information about this site / server
**Desktop (please complete the following information):**
- OS: linux
- Browser firefox
- Version 85 (developer edition)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bookwyrm/views/landing.py`
Content:
```
1 ''' non-interactive pages '''
2 from django.contrib.auth.decorators import login_required
3 from django.core.paginator import Paginator
4 from django.db.models import Avg, Max
5 from django.template.response import TemplateResponse
6 from django.utils import timezone
7 from django.utils.decorators import method_decorator
8 from django.views import View
9
10 from bookwyrm import forms, models
11 from bookwyrm.settings import PAGE_LENGTH
12 from .helpers import get_activity_feed
13
14
15 # pylint: disable= no-self-use
16 @method_decorator(login_required, name='dispatch')
17 class About(View):
18 ''' create invites '''
19 def get(self, request):
20 ''' more information about the instance '''
21 data = {
22 'title': 'About',
23 }
24 return TemplateResponse(request, 'about.html', data)
25
26 class Home(View):
27 ''' discover page or home feed depending on auth '''
28 def get(self, request):
29 ''' this is the same as the feed on the home tab '''
30 if request.user.is_authenticated:
31 feed_view = Feed.as_view()
32 return feed_view(request, 'home')
33 discover_view = Discover.as_view()
34 return discover_view(request)
35
36 class Discover(View):
37 ''' preview of recently reviewed books '''
38 def get(self, request):
39 ''' tiled book activity page '''
40 books = models.Edition.objects.filter(
41 review__published_date__isnull=False,
42 review__user__local=True,
43 review__privacy__in=['public', 'unlisted'],
44 ).exclude(
45 cover__exact=''
46 ).annotate(
47 Max('review__published_date')
48 ).order_by('-review__published_date__max')[:6]
49
50 ratings = {}
51 for book in books:
52 reviews = models.Review.objects.filter(
53 book__in=book.parent_work.editions.all()
54 )
55 reviews = get_activity_feed(
56 request.user, ['public', 'unlisted'], queryset=reviews)
57 ratings[book.id] = reviews.aggregate(Avg('rating'))['rating__avg']
58 data = {
59 'title': 'Discover',
60 'register_form': forms.RegisterForm(),
61 'books': list(set(books)),
62 'ratings': ratings
63 }
64 return TemplateResponse(request, 'discover.html', data)
65
66
67 @method_decorator(login_required, name='dispatch')
68 class Feed(View):
69 ''' activity stream '''
70 def get(self, request, tab):
71 ''' user's homepage with activity feed '''
72 try:
73 page = int(request.GET.get('page', 1))
74 except ValueError:
75 page = 1
76
77 suggested_books = get_suggested_books(request.user)
78
79 if tab == 'home':
80 activities = get_activity_feed(
81 request.user, ['public', 'unlisted', 'followers'],
82 following_only=True)
83 elif tab == 'local':
84 activities = get_activity_feed(
85 request.user, ['public', 'followers'], local_only=True)
86 else:
87 activities = get_activity_feed(
88 request.user, ['public', 'followers'])
89 paginated = Paginator(activities, PAGE_LENGTH)
90
91 goal = models.AnnualGoal.objects.filter(
92 user=request.user, year=timezone.now().year
93 ).first()
94 data = {
95 'title': 'Updates Feed',
96 'user': request.user,
97 'suggested_books': suggested_books,
98 'activities': paginated.page(page),
99 'tab': tab,
100 'goal': goal,
101 'goal_form': forms.GoalForm(),
102 }
103 return TemplateResponse(request, 'feed.html', data)
104
105
106 def get_suggested_books(user, max_books=5):
107 ''' helper to get a user's recent books '''
108 book_count = 0
109 preset_shelves = [
110 ('reading', max_books), ('read', 2), ('to-read', max_books)
111 ]
112 suggested_books = []
113 for (preset, shelf_max) in preset_shelves:
114 limit = shelf_max if shelf_max < (max_books - book_count) \
115 else max_books - book_count
116 shelf = user.shelf_set.get(identifier=preset)
117
118 shelf_books = shelf.shelfbook_set.order_by(
119 '-updated_date'
120 ).all()[:limit]
121 if not shelf_books:
122 continue
123 shelf_preview = {
124 'name': shelf.name,
125 'books': [s.book for s in shelf_books]
126 }
127 suggested_books.append(shelf_preview)
128 book_count += len(shelf_preview['books'])
129 return suggested_books
130
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bookwyrm/views/landing.py b/bookwyrm/views/landing.py
--- a/bookwyrm/views/landing.py
+++ b/bookwyrm/views/landing.py
@@ -13,7 +13,6 @@
# pylint: disable= no-self-use
-@method_decorator(login_required, name='dispatch')
class About(View):
''' create invites '''
def get(self, request):
| {"golden_diff": "diff --git a/bookwyrm/views/landing.py b/bookwyrm/views/landing.py\n--- a/bookwyrm/views/landing.py\n+++ b/bookwyrm/views/landing.py\n@@ -13,7 +13,6 @@\n \n \n # pylint: disable= no-self-use\n-@method_decorator(login_required, name='dispatch')\n class About(View):\n ''' create invites '''\n def get(self, request):\n", "issue": "About page requires login\n**Describe the bug**\r\nAccessing the \"About this server\" link (https://bookwyrm.social/about) redirects to login\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Go to https://bookwyrm.social/about\r\n2. redirected to login instead of seeing an about page (the URL is login/?next=/about)\r\n\r\n**Expected behavior**\r\nAccess to information about this site / server\r\n\r\n**Desktop (please complete the following information):**\r\n - OS: linux\r\n - Browser firefox\r\n - Version 85 (developer edition)\r\n\n", "before_files": [{"content": "''' non-interactive pages '''\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.paginator import Paginator\nfrom django.db.models import Avg, Max\nfrom django.template.response import TemplateResponse\nfrom django.utils import timezone\nfrom django.utils.decorators import method_decorator\nfrom django.views import View\n\nfrom bookwyrm import forms, models\nfrom bookwyrm.settings import PAGE_LENGTH\nfrom .helpers import get_activity_feed\n\n\n# pylint: disable= no-self-use\n@method_decorator(login_required, name='dispatch')\nclass About(View):\n ''' create invites '''\n def get(self, request):\n ''' more information about the instance '''\n data = {\n 'title': 'About',\n }\n return TemplateResponse(request, 'about.html', data)\n\nclass Home(View):\n ''' discover page or home feed depending on auth '''\n def get(self, request):\n ''' this is the same as the feed on the home tab '''\n if request.user.is_authenticated:\n feed_view = Feed.as_view()\n return feed_view(request, 'home')\n discover_view = Discover.as_view()\n return discover_view(request)\n\nclass Discover(View):\n ''' preview of recently reviewed books '''\n def get(self, request):\n ''' tiled book activity page '''\n books = models.Edition.objects.filter(\n review__published_date__isnull=False,\n review__user__local=True,\n review__privacy__in=['public', 'unlisted'],\n ).exclude(\n cover__exact=''\n ).annotate(\n Max('review__published_date')\n ).order_by('-review__published_date__max')[:6]\n\n ratings = {}\n for book in books:\n reviews = models.Review.objects.filter(\n book__in=book.parent_work.editions.all()\n )\n reviews = get_activity_feed(\n request.user, ['public', 'unlisted'], queryset=reviews)\n ratings[book.id] = reviews.aggregate(Avg('rating'))['rating__avg']\n data = {\n 'title': 'Discover',\n 'register_form': forms.RegisterForm(),\n 'books': list(set(books)),\n 'ratings': ratings\n }\n return TemplateResponse(request, 'discover.html', data)\n\n\n@method_decorator(login_required, name='dispatch')\nclass Feed(View):\n ''' activity stream '''\n def get(self, request, tab):\n ''' user's homepage with activity feed '''\n try:\n page = int(request.GET.get('page', 1))\n except ValueError:\n page = 1\n\n suggested_books = get_suggested_books(request.user)\n\n if tab == 'home':\n activities = get_activity_feed(\n request.user, ['public', 'unlisted', 'followers'],\n following_only=True)\n elif tab == 'local':\n activities = get_activity_feed(\n request.user, ['public', 'followers'], local_only=True)\n else:\n activities = get_activity_feed(\n request.user, ['public', 'followers'])\n paginated = Paginator(activities, PAGE_LENGTH)\n\n goal = models.AnnualGoal.objects.filter(\n user=request.user, year=timezone.now().year\n ).first()\n data = {\n 'title': 'Updates Feed',\n 'user': request.user,\n 'suggested_books': suggested_books,\n 'activities': paginated.page(page),\n 'tab': tab,\n 'goal': goal,\n 'goal_form': forms.GoalForm(),\n }\n return TemplateResponse(request, 'feed.html', data)\n\n\ndef get_suggested_books(user, max_books=5):\n ''' helper to get a user's recent books '''\n book_count = 0\n preset_shelves = [\n ('reading', max_books), ('read', 2), ('to-read', max_books)\n ]\n suggested_books = []\n for (preset, shelf_max) in preset_shelves:\n limit = shelf_max if shelf_max < (max_books - book_count) \\\n else max_books - book_count\n shelf = user.shelf_set.get(identifier=preset)\n\n shelf_books = shelf.shelfbook_set.order_by(\n '-updated_date'\n ).all()[:limit]\n if not shelf_books:\n continue\n shelf_preview = {\n 'name': shelf.name,\n 'books': [s.book for s in shelf_books]\n }\n suggested_books.append(shelf_preview)\n book_count += len(shelf_preview['books'])\n return suggested_books\n", "path": "bookwyrm/views/landing.py"}], "after_files": [{"content": "''' non-interactive pages '''\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.paginator import Paginator\nfrom django.db.models import Avg, Max\nfrom django.template.response import TemplateResponse\nfrom django.utils import timezone\nfrom django.utils.decorators import method_decorator\nfrom django.views import View\n\nfrom bookwyrm import forms, models\nfrom bookwyrm.settings import PAGE_LENGTH\nfrom .helpers import get_activity_feed\n\n\n# pylint: disable= no-self-use\nclass About(View):\n ''' create invites '''\n def get(self, request):\n ''' more information about the instance '''\n data = {\n 'title': 'About',\n }\n return TemplateResponse(request, 'about.html', data)\n\nclass Home(View):\n ''' discover page or home feed depending on auth '''\n def get(self, request):\n ''' this is the same as the feed on the home tab '''\n if request.user.is_authenticated:\n feed_view = Feed.as_view()\n return feed_view(request, 'home')\n discover_view = Discover.as_view()\n return discover_view(request)\n\nclass Discover(View):\n ''' preview of recently reviewed books '''\n def get(self, request):\n ''' tiled book activity page '''\n books = models.Edition.objects.filter(\n review__published_date__isnull=False,\n review__user__local=True,\n review__privacy__in=['public', 'unlisted'],\n ).exclude(\n cover__exact=''\n ).annotate(\n Max('review__published_date')\n ).order_by('-review__published_date__max')[:6]\n\n ratings = {}\n for book in books:\n reviews = models.Review.objects.filter(\n book__in=book.parent_work.editions.all()\n )\n reviews = get_activity_feed(\n request.user, ['public', 'unlisted'], queryset=reviews)\n ratings[book.id] = reviews.aggregate(Avg('rating'))['rating__avg']\n data = {\n 'title': 'Discover',\n 'register_form': forms.RegisterForm(),\n 'books': list(set(books)),\n 'ratings': ratings\n }\n return TemplateResponse(request, 'discover.html', data)\n\n\n@method_decorator(login_required, name='dispatch')\nclass Feed(View):\n ''' activity stream '''\n def get(self, request, tab):\n ''' user's homepage with activity feed '''\n try:\n page = int(request.GET.get('page', 1))\n except ValueError:\n page = 1\n\n suggested_books = get_suggested_books(request.user)\n\n if tab == 'home':\n activities = get_activity_feed(\n request.user, ['public', 'unlisted', 'followers'],\n following_only=True)\n elif tab == 'local':\n activities = get_activity_feed(\n request.user, ['public', 'followers'], local_only=True)\n else:\n activities = get_activity_feed(\n request.user, ['public', 'followers'])\n paginated = Paginator(activities, PAGE_LENGTH)\n\n goal = models.AnnualGoal.objects.filter(\n user=request.user, year=timezone.now().year\n ).first()\n data = {\n 'title': 'Updates Feed',\n 'user': request.user,\n 'suggested_books': suggested_books,\n 'activities': paginated.page(page),\n 'tab': tab,\n 'goal': goal,\n 'goal_form': forms.GoalForm(),\n }\n return TemplateResponse(request, 'feed.html', data)\n\n\ndef get_suggested_books(user, max_books=5):\n ''' helper to get a user's recent books '''\n book_count = 0\n preset_shelves = [\n ('reading', max_books), ('read', 2), ('to-read', max_books)\n ]\n suggested_books = []\n for (preset, shelf_max) in preset_shelves:\n limit = shelf_max if shelf_max < (max_books - book_count) \\\n else max_books - book_count\n shelf = user.shelf_set.get(identifier=preset)\n\n shelf_books = shelf.shelfbook_set.order_by(\n '-updated_date'\n ).all()[:limit]\n if not shelf_books:\n continue\n shelf_preview = {\n 'name': shelf.name,\n 'books': [s.book for s in shelf_books]\n }\n suggested_books.append(shelf_preview)\n book_count += len(shelf_preview['books'])\n return suggested_books\n", "path": "bookwyrm/views/landing.py"}]} |
gh_patches_debug_125 | rasdani/github-patches | git_diff | TheAlgorithms__Python-10664 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Improve our test coverage
### Feature description
Many of our existing algorithm files have little to no unit testing. This is problematic because this can easily let bugs slip through. We want some assurance that the code we currently have is correct and functional. We welcome all contributors to open PRs to help us add tests to our codebase.
### How to find low-coverage files
Go to the Actions tab in this repository and find the most recent **build** workflow run. Open the logs under "Run Tests" and scroll down until you find the section on code coverage:
```
---------- coverage: platform linux, python 3.12.0-final-0 -----------
Name Stmts Miss Cover Missing
-----------------------------------------------------------------------------------------------------------
quantum/q_fourier_transform.py 30 30 0% 14-93
scripts/validate_solutions.py 54 54 0% 2-94
strings/min_cost_string_conversion.py 78 75 4% 20-57, 61-75, 79-129
...
```
The "Cover" column tells you what percentage of the lines in that file are covered by tests. We want to increase this percentage for existing files. Find a file with low coverage percentage that you wish to write tests for, add doctests for each function, and open a PR with your changes. You do not need to have a perfect coverage percentage, but all functions should have doctests.
Some files will naturally be hard to write tests for. For example, the file may be poorly written because they lack any functions. Other files might be how-tos, meaning they simply demonstrate how to use an existing library's functions rather than implementing the algorithm themselves. Ignore these kinds of files, as they will need to be rewritten eventually. Furthermore, ignore files in the `web_programming` and `project_euler` directories. Web programming files are inherently hard to test and Project Euler files have their own validation workflow, so don't worry about their test coverage.
_**When you open your PR, put "Contributes to #9943" in the PR description.**_ Do not use the word "fixes", "resolves", or "closes". This issue is an ongoing one, and your PR will not single-handedly resolve this issue.
### How to add doctests
A doctest is a unit test that is contained within the documentation comment (docstring) for a function. Here is an example of what doctests look like within a docstring:
```py
def add(a: int, b: int) -> int:
"""
Adds two non-negative numbers.
>>> add(1, 1)
2
>>> add(2, 5)
7
>>> add(1, 0)
1
>>> add(-1, -1)
Traceback (most recent last):
...
ValueError: Numbers must be non-negative
"""
```
For every function in the file you choose, you should write doctests like the ones shown above in its docstring. If a function doesn't have a docstring, add one. Your doctests should be comprehensive but not excessive: you should write just enough tests to cover all basic cases as well as all edge cases (e.g., negative numbers, empty lists, etc).
Do not simply run a function on some example inputs and put its output as the expected output for a doctest. This assumes that the function is implemented correctly when it might not be. Verify independently that your doctests and their expected outputs are correct. **Your PR will not be merged if it has failing tests.** If you happen to discover a bug while writing doctests, please fix it.
_**Please read our [contributing guidelines](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md) before you contribute.**_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `maths/power_using_recursion.py`
Content:
```
1 """
2 == Raise base to the power of exponent using recursion ==
3 Input -->
4 Enter the base: 3
5 Enter the exponent: 4
6 Output -->
7 3 to the power of 4 is 81
8 Input -->
9 Enter the base: 2
10 Enter the exponent: 0
11 Output -->
12 2 to the power of 0 is 1
13 """
14
15
16 def power(base: int, exponent: int) -> float:
17 """
18 >>> power(3, 4)
19 81
20 >>> power(2, 0)
21 1
22 >>> all(power(base, exponent) == pow(base, exponent)
23 ... for base in range(-10, 10) for exponent in range(10))
24 True
25 >>> power('a', 1)
26 'a'
27 >>> power('a', 2)
28 Traceback (most recent call last):
29 ...
30 TypeError: can't multiply sequence by non-int of type 'str'
31 >>> power('a', 'b')
32 Traceback (most recent call last):
33 ...
34 TypeError: unsupported operand type(s) for -: 'str' and 'int'
35 >>> power(2, -1)
36 Traceback (most recent call last):
37 ...
38 RecursionError: maximum recursion depth exceeded
39 """
40 return base * power(base, (exponent - 1)) if exponent else 1
41
42
43 if __name__ == "__main__":
44 from doctests import testmod
45
46 testmod()
47 print("Raise base to the power of exponent using recursion...")
48 base = int(input("Enter the base: ").strip())
49 exponent = int(input("Enter the exponent: ").strip())
50 result = power(base, abs(exponent))
51 if exponent < 0: # power() does not properly deal w/ negative exponents
52 result = 1 / result
53 print(f"{base} to the power of {exponent} is {result}")
54
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/maths/power_using_recursion.py b/maths/power_using_recursion.py
--- a/maths/power_using_recursion.py
+++ b/maths/power_using_recursion.py
@@ -15,6 +15,8 @@
def power(base: int, exponent: int) -> float:
"""
+ Calculate the power of a base raised to an exponent.
+
>>> power(3, 4)
81
>>> power(2, 0)
| {"golden_diff": "diff --git a/maths/power_using_recursion.py b/maths/power_using_recursion.py\n--- a/maths/power_using_recursion.py\n+++ b/maths/power_using_recursion.py\n@@ -15,6 +15,8 @@\n \n def power(base: int, exponent: int) -> float:\n \"\"\"\n+ Calculate the power of a base raised to an exponent.\n+\n >>> power(3, 4)\n 81\n >>> power(2, 0)\n", "issue": "Improve our test coverage\n### Feature description\r\n\r\nMany of our existing algorithm files have little to no unit testing. This is problematic because this can easily let bugs slip through. We want some assurance that the code we currently have is correct and functional. We welcome all contributors to open PRs to help us add tests to our codebase.\r\n\r\n### How to find low-coverage files\r\n\r\nGo to the Actions tab in this repository and find the most recent **build** workflow run. Open the logs under \"Run Tests\" and scroll down until you find the section on code coverage:\r\n```\r\n---------- coverage: platform linux, python 3.12.0-final-0 -----------\r\nName Stmts Miss Cover Missing\r\n-----------------------------------------------------------------------------------------------------------\r\nquantum/q_fourier_transform.py 30 30 0% 14-93\r\nscripts/validate_solutions.py 54 54 0% 2-94\r\nstrings/min_cost_string_conversion.py 78 75 4% 20-57, 61-75, 79-129\r\n...\r\n```\r\nThe \"Cover\" column tells you what percentage of the lines in that file are covered by tests. We want to increase this percentage for existing files. Find a file with low coverage percentage that you wish to write tests for, add doctests for each function, and open a PR with your changes. You do not need to have a perfect coverage percentage, but all functions should have doctests.\r\n\r\nSome files will naturally be hard to write tests for. For example, the file may be poorly written because they lack any functions. Other files might be how-tos, meaning they simply demonstrate how to use an existing library's functions rather than implementing the algorithm themselves. Ignore these kinds of files, as they will need to be rewritten eventually. Furthermore, ignore files in the `web_programming` and `project_euler` directories. Web programming files are inherently hard to test and Project Euler files have their own validation workflow, so don't worry about their test coverage.\r\n\r\n_**When you open your PR, put \"Contributes to #9943\" in the PR description.**_ Do not use the word \"fixes\", \"resolves\", or \"closes\". This issue is an ongoing one, and your PR will not single-handedly resolve this issue.\r\n\r\n### How to add doctests\r\n\r\nA doctest is a unit test that is contained within the documentation comment (docstring) for a function. Here is an example of what doctests look like within a docstring:\r\n```py\r\ndef add(a: int, b: int) -> int:\r\n \"\"\"\r\n Adds two non-negative numbers.\r\n >>> add(1, 1)\r\n 2\r\n >>> add(2, 5)\r\n 7\r\n >>> add(1, 0)\r\n 1\r\n >>> add(-1, -1)\r\n Traceback (most recent last):\r\n ...\r\n ValueError: Numbers must be non-negative\r\n \"\"\"\r\n```\r\nFor every function in the file you choose, you should write doctests like the ones shown above in its docstring. If a function doesn't have a docstring, add one. Your doctests should be comprehensive but not excessive: you should write just enough tests to cover all basic cases as well as all edge cases (e.g., negative numbers, empty lists, etc).\r\n\r\nDo not simply run a function on some example inputs and put its output as the expected output for a doctest. This assumes that the function is implemented correctly when it might not be. Verify independently that your doctests and their expected outputs are correct. **Your PR will not be merged if it has failing tests.** If you happen to discover a bug while writing doctests, please fix it.\r\n\r\n_**Please read our [contributing guidelines](https://github.com/TheAlgorithms/Python/blob/master/CONTRIBUTING.md) before you contribute.**_\n", "before_files": [{"content": "\"\"\"\n== Raise base to the power of exponent using recursion ==\n Input -->\n Enter the base: 3\n Enter the exponent: 4\n Output -->\n 3 to the power of 4 is 81\n Input -->\n Enter the base: 2\n Enter the exponent: 0\n Output -->\n 2 to the power of 0 is 1\n\"\"\"\n\n\ndef power(base: int, exponent: int) -> float:\n \"\"\"\n >>> power(3, 4)\n 81\n >>> power(2, 0)\n 1\n >>> all(power(base, exponent) == pow(base, exponent)\n ... for base in range(-10, 10) for exponent in range(10))\n True\n >>> power('a', 1)\n 'a'\n >>> power('a', 2)\n Traceback (most recent call last):\n ...\n TypeError: can't multiply sequence by non-int of type 'str'\n >>> power('a', 'b')\n Traceback (most recent call last):\n ...\n TypeError: unsupported operand type(s) for -: 'str' and 'int'\n >>> power(2, -1)\n Traceback (most recent call last):\n ...\n RecursionError: maximum recursion depth exceeded\n \"\"\"\n return base * power(base, (exponent - 1)) if exponent else 1\n\n\nif __name__ == \"__main__\":\n from doctests import testmod\n\n testmod()\n print(\"Raise base to the power of exponent using recursion...\")\n base = int(input(\"Enter the base: \").strip())\n exponent = int(input(\"Enter the exponent: \").strip())\n result = power(base, abs(exponent))\n if exponent < 0: # power() does not properly deal w/ negative exponents\n result = 1 / result\n print(f\"{base} to the power of {exponent} is {result}\")\n", "path": "maths/power_using_recursion.py"}], "after_files": [{"content": "\"\"\"\n== Raise base to the power of exponent using recursion ==\n Input -->\n Enter the base: 3\n Enter the exponent: 4\n Output -->\n 3 to the power of 4 is 81\n Input -->\n Enter the base: 2\n Enter the exponent: 0\n Output -->\n 2 to the power of 0 is 1\n\"\"\"\n\n\ndef power(base: int, exponent: int) -> float:\n \"\"\"\n Calculate the power of a base raised to an exponent.\n\n >>> power(3, 4)\n 81\n >>> power(2, 0)\n 1\n >>> all(power(base, exponent) == pow(base, exponent)\n ... for base in range(-10, 10) for exponent in range(10))\n True\n >>> power('a', 1)\n 'a'\n >>> power('a', 2)\n Traceback (most recent call last):\n ...\n TypeError: can't multiply sequence by non-int of type 'str'\n >>> power('a', 'b')\n Traceback (most recent call last):\n ...\n TypeError: unsupported operand type(s) for -: 'str' and 'int'\n >>> power(2, -1)\n Traceback (most recent call last):\n ...\n RecursionError: maximum recursion depth exceeded\n \"\"\"\n return base * power(base, (exponent - 1)) if exponent else 1\n\n\nif __name__ == \"__main__\":\n from doctests import testmod\n\n testmod()\n print(\"Raise base to the power of exponent using recursion...\")\n base = int(input(\"Enter the base: \").strip())\n exponent = int(input(\"Enter the exponent: \").strip())\n result = power(base, abs(exponent))\n if exponent < 0: # power() does not properly deal w/ negative exponents\n result = 1 / result\n print(f\"{base} to the power of {exponent} is {result}\")\n", "path": "maths/power_using_recursion.py"}]} |
gh_patches_debug_126 | rasdani/github-patches | git_diff | scikit-hep__pyhf-2135 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Matplotlib broken in Pyodide demo in docs
In the docs https://pyhf.readthedocs.io/en/v0.7.0/, the Pyodide example is broken for me:
```pytb
---------------------------------------------------------------------------
ModuleNotFoundError Traceback (most recent call last)
Cell In[1], line 3
1 import piplite
2 await piplite.install(["pyhf==0.7.0"])
----> 3 get_ipython().run_line_magic('matplotlib', 'inline')
4 import pyhf
File /lib/python3.10/site-packages/IPython/core/interactiveshell.py:2369, in InteractiveShell.run_line_magic(self, magic_name, line, _stack_depth)
2367 kwargs['local_ns'] = self.get_local_scope(stack_depth)
2368 with self.builtin_trap:
-> 2369 result = fn(*args, **kwargs)
2371 # The code below prevents the output from being displayed
2372 # when using magics with decodator @output_can_be_silenced
2373 # when the last Python token in the expression is a ';'.
2374 if getattr(fn, magic.MAGIC_OUTPUT_CAN_BE_SILENCED, False):
File /lib/python3.10/site-packages/IPython/core/magics/pylab.py:99, in PylabMagics.matplotlib(self, line)
97 print("Available matplotlib backends: %s" % backends_list)
98 else:
---> 99 gui, backend = self.shell.enable_matplotlib(args.gui.lower() if isinstance(args.gui, str) else args.gui)
100 self._show_matplotlib_backend(args.gui, backend)
File /lib/python3.10/site-packages/IPython/core/interactiveshell.py:3540, in InteractiveShell.enable_matplotlib(self, gui)
3519 def enable_matplotlib(self, gui=None):
3520 """Enable interactive matplotlib and inline figure support.
3521
3522 This takes the following steps:
(...)
3538 display figures inline.
3539 """
-> 3540 from matplotlib_inline.backend_inline import configure_inline_support
3542 from IPython.core import pylabtools as pt
3543 gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select)
File /lib/python3.10/site-packages/matplotlib_inline/__init__.py:1
----> 1 from . import backend_inline, config # noqa
2 __version__ = "0.1.6" # noqa
File /lib/python3.10/site-packages/matplotlib_inline/backend_inline.py:6
1 """A matplotlib backend for publishing figures via display_data"""
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the BSD 3-Clause License.
----> 6 import matplotlib
7 from matplotlib import colors
8 from matplotlib.backends import backend_agg
ModuleNotFoundError: The module 'matplotlib' is included in the Pyodide distribution, but it is not installed.
You can install it by calling:
await micropip.install("matplotlib") in Python, or
await pyodide.loadPackage("matplotlib") in JavaScript
See https://pyodide.org/en/stable/usage/loading-packages.html for more details.
```
It used to work previously, though I can not say for sure when it last worked for me. Running on MacOS (ARM), tried Firefox and Chrome (resulting in the above), while Safari seems stuck executing the import commands provided.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/generate_jupyterlite_iframe.py`
Content:
```
1 import urllib.parse
2
3
4 def main():
5 code = """\
6 import piplite
7 await piplite.install(["pyhf==0.7.0"])
8 %matplotlib inline
9 import pyhf\
10 """
11
12 parsed_url = urllib.parse.quote(code)
13 url_base = "https://jupyterlite.github.io/demo/repl/index.html"
14 jupyterlite_options = "?kernel=python&toolbar=1&code="
15 jupyterlite_url = url_base + jupyterlite_options + parsed_url
16
17 print(f"# jupyterlite URL:\n{jupyterlite_url}")
18
19 jupyterlite_iframe_rst = f"""\
20 <iframe
21 src="{jupyterlite_url}"
22 width="100%"
23 height="500px"
24 ></iframe>\
25 """
26 print(f"\n# RST for iframe for jupyterlite.rst:\n{jupyterlite_iframe_rst}")
27
28
29 if __name__ == "__main__":
30 raise SystemExit(main())
31
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/generate_jupyterlite_iframe.py b/docs/generate_jupyterlite_iframe.py
--- a/docs/generate_jupyterlite_iframe.py
+++ b/docs/generate_jupyterlite_iframe.py
@@ -4,7 +4,7 @@
def main():
code = """\
import piplite
-await piplite.install(["pyhf==0.7.0"])
+await piplite.install(["pyhf==0.7.0", "matplotlib>=3.0.0"])
%matplotlib inline
import pyhf\
"""
| {"golden_diff": "diff --git a/docs/generate_jupyterlite_iframe.py b/docs/generate_jupyterlite_iframe.py\n--- a/docs/generate_jupyterlite_iframe.py\n+++ b/docs/generate_jupyterlite_iframe.py\n@@ -4,7 +4,7 @@\n def main():\n code = \"\"\"\\\n import piplite\n-await piplite.install([\"pyhf==0.7.0\"])\n+await piplite.install([\"pyhf==0.7.0\", \"matplotlib>=3.0.0\"])\n %matplotlib inline\n import pyhf\\\n \"\"\"\n", "issue": "Matplotlib broken in Pyodide demo in docs\nIn the docs https://pyhf.readthedocs.io/en/v0.7.0/, the Pyodide example is broken for me:\r\n```pytb\r\n---------------------------------------------------------------------------\r\nModuleNotFoundError Traceback (most recent call last)\r\nCell In[1], line 3\r\n 1 import piplite\r\n 2 await piplite.install([\"pyhf==0.7.0\"])\r\n----> 3 get_ipython().run_line_magic('matplotlib', 'inline')\r\n 4 import pyhf\r\n\r\nFile /lib/python3.10/site-packages/IPython/core/interactiveshell.py:2369, in InteractiveShell.run_line_magic(self, magic_name, line, _stack_depth)\r\n 2367 kwargs['local_ns'] = self.get_local_scope(stack_depth)\r\n 2368 with self.builtin_trap:\r\n-> 2369 result = fn(*args, **kwargs)\r\n 2371 # The code below prevents the output from being displayed\r\n 2372 # when using magics with decodator @output_can_be_silenced\r\n 2373 # when the last Python token in the expression is a ';'.\r\n 2374 if getattr(fn, magic.MAGIC_OUTPUT_CAN_BE_SILENCED, False):\r\n\r\nFile /lib/python3.10/site-packages/IPython/core/magics/pylab.py:99, in PylabMagics.matplotlib(self, line)\r\n 97 print(\"Available matplotlib backends: %s\" % backends_list)\r\n 98 else:\r\n---> 99 gui, backend = self.shell.enable_matplotlib(args.gui.lower() if isinstance(args.gui, str) else args.gui)\r\n 100 self._show_matplotlib_backend(args.gui, backend)\r\n\r\nFile /lib/python3.10/site-packages/IPython/core/interactiveshell.py:3540, in InteractiveShell.enable_matplotlib(self, gui)\r\n 3519 def enable_matplotlib(self, gui=None):\r\n 3520 \"\"\"Enable interactive matplotlib and inline figure support.\r\n 3521 \r\n 3522 This takes the following steps:\r\n (...)\r\n 3538 display figures inline.\r\n 3539 \"\"\"\r\n-> 3540 from matplotlib_inline.backend_inline import configure_inline_support\r\n 3542 from IPython.core import pylabtools as pt\r\n 3543 gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select)\r\n\r\nFile /lib/python3.10/site-packages/matplotlib_inline/__init__.py:1\r\n----> 1 from . import backend_inline, config # noqa\r\n 2 __version__ = \"0.1.6\" # noqa\r\n\r\nFile /lib/python3.10/site-packages/matplotlib_inline/backend_inline.py:6\r\n 1 \"\"\"A matplotlib backend for publishing figures via display_data\"\"\"\r\n 3 # Copyright (c) IPython Development Team.\r\n 4 # Distributed under the terms of the BSD 3-Clause License.\r\n----> 6 import matplotlib\r\n 7 from matplotlib import colors\r\n 8 from matplotlib.backends import backend_agg\r\n\r\nModuleNotFoundError: The module 'matplotlib' is included in the Pyodide distribution, but it is not installed.\r\nYou can install it by calling:\r\n await micropip.install(\"matplotlib\") in Python, or\r\n await pyodide.loadPackage(\"matplotlib\") in JavaScript\r\nSee https://pyodide.org/en/stable/usage/loading-packages.html for more details.\r\n```\r\nIt used to work previously, though I can not say for sure when it last worked for me. Running on MacOS (ARM), tried Firefox and Chrome (resulting in the above), while Safari seems stuck executing the import commands provided.\n", "before_files": [{"content": "import urllib.parse\n\n\ndef main():\n code = \"\"\"\\\nimport piplite\nawait piplite.install([\"pyhf==0.7.0\"])\n%matplotlib inline\nimport pyhf\\\n\"\"\"\n\n parsed_url = urllib.parse.quote(code)\n url_base = \"https://jupyterlite.github.io/demo/repl/index.html\"\n jupyterlite_options = \"?kernel=python&toolbar=1&code=\"\n jupyterlite_url = url_base + jupyterlite_options + parsed_url\n\n print(f\"# jupyterlite URL:\\n{jupyterlite_url}\")\n\n jupyterlite_iframe_rst = f\"\"\"\\\n <iframe\n src=\"{jupyterlite_url}\"\n width=\"100%\"\n height=\"500px\"\n ></iframe>\\\n\"\"\"\n print(f\"\\n# RST for iframe for jupyterlite.rst:\\n{jupyterlite_iframe_rst}\")\n\n\nif __name__ == \"__main__\":\n raise SystemExit(main())\n", "path": "docs/generate_jupyterlite_iframe.py"}], "after_files": [{"content": "import urllib.parse\n\n\ndef main():\n code = \"\"\"\\\nimport piplite\nawait piplite.install([\"pyhf==0.7.0\", \"matplotlib>=3.0.0\"])\n%matplotlib inline\nimport pyhf\\\n\"\"\"\n\n parsed_url = urllib.parse.quote(code)\n url_base = \"https://jupyterlite.github.io/demo/repl/index.html\"\n jupyterlite_options = \"?kernel=python&toolbar=1&code=\"\n jupyterlite_url = url_base + jupyterlite_options + parsed_url\n\n print(f\"# jupyterlite URL:\\n{jupyterlite_url}\")\n\n jupyterlite_iframe_rst = f\"\"\"\\\n <iframe\n src=\"{jupyterlite_url}\"\n width=\"100%\"\n height=\"500px\"\n ></iframe>\\\n\"\"\"\n print(f\"\\n# RST for iframe for jupyterlite.rst:\\n{jupyterlite_iframe_rst}\")\n\n\nif __name__ == \"__main__\":\n raise SystemExit(main())\n", "path": "docs/generate_jupyterlite_iframe.py"}]} |
gh_patches_debug_127 | rasdani/github-patches | git_diff | beetbox__beets-3159 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
BadFiles plugin crashes beets with latest git master
### Problem
If the `badfiles` plugin is activated, beets crashes when starting an import task.
Running this command in verbose (`-vv`) mode:
```sh
$ beet -vv import --write /data/music
user configuration: /home/jan/.config/beets/config.yaml
data directory: /home/jan/.config/beets
plugin paths:
Sending event: pluginload
artresizer: method is (2, (7, 0, 8))
lyrics: Disabling google source: no API key configured.
library database: /home/jan/beets.db
library directory: /data/music
Sending event: library_opened
Traceback (most recent call last):
File "/home/jan/.local/bin/beet", line 11, in <module>
load_entry_point('beets', 'console_scripts', 'beet')()
File "/data/jan/Projects/beets/beets/ui/__init__.py", line 1266, in main
_raw_main(args)
File "/data/jan/Projects/beets/beets/ui/__init__.py", line 1253, in _raw_main
subcommand.func(lib, suboptions, subargs)
File "/data/jan/Projects/beets/beets/ui/commands.py", line 955, in import_func
import_files(lib, paths, query)
File "/data/jan/Projects/beets/beets/ui/commands.py", line 925, in import_files
session.run()
File "/data/jan/Projects/beets/beets/importer.py", line 316, in run
for stage_func in plugins.early_import_stages():
File "/data/jan/Projects/beets/beets/plugins.py", line 426, in early_import_stages
stages += plugin.get_early_import_stages()
File "/data/jan/Projects/beets/beets/plugins.py", line 112, in get_early_import_stages
return self._set_stage_log_level(self.early_import_stages)
AttributeError: 'BadFiles' object has no attribute 'early_import_stages'
```
### Setup
* OS: Arch Linux
* Python version: 3.7.2
* beets version: be118b92
* Turning off plugins made problem go away (yes/no): Yes (Disabling the `badfiles` plugin suffices)
My configuration (output of `beet config`) is: https://gist.github.com/Holzhaus/500b790c06fe2250ac9182bd8a6760da
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `beetsplug/badfiles.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # This file is part of beets.
3 # Copyright 2016, François-Xavier Thomas.
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
15
16 """Use command-line tools to check for audio file corruption.
17 """
18
19 from __future__ import division, absolute_import, print_function
20
21 from subprocess import check_output, CalledProcessError, list2cmdline, STDOUT
22
23 import shlex
24 import os
25 import errno
26 import sys
27 import six
28 from beets.plugins import BeetsPlugin
29 from beets.ui import Subcommand
30 from beets.util import displayable_path, confit, par_map
31 from beets import ui
32
33
34 class CheckerCommandException(Exception):
35 """Raised when running a checker failed.
36
37 Attributes:
38 checker: Checker command name.
39 path: Path to the file being validated.
40 errno: Error number from the checker execution error.
41 msg: Message from the checker execution error.
42 """
43
44 def __init__(self, cmd, oserror):
45 self.checker = cmd[0]
46 self.path = cmd[-1]
47 self.errno = oserror.errno
48 self.msg = str(oserror)
49
50
51 class BadFiles(BeetsPlugin):
52 def __init__(self):
53 self.verbose = False
54
55 def run_command(self, cmd):
56 self._log.debug(u"running command: {}",
57 displayable_path(list2cmdline(cmd)))
58 try:
59 output = check_output(cmd, stderr=STDOUT)
60 errors = 0
61 status = 0
62 except CalledProcessError as e:
63 output = e.output
64 errors = 1
65 status = e.returncode
66 except OSError as e:
67 raise CheckerCommandException(cmd, e)
68 output = output.decode(sys.getfilesystemencoding())
69 return status, errors, [line for line in output.split("\n") if line]
70
71 def check_mp3val(self, path):
72 status, errors, output = self.run_command(["mp3val", path])
73 if status == 0:
74 output = [line for line in output if line.startswith("WARNING:")]
75 errors = len(output)
76 return status, errors, output
77
78 def check_flac(self, path):
79 return self.run_command(["flac", "-wst", path])
80
81 def check_custom(self, command):
82 def checker(path):
83 cmd = shlex.split(command)
84 cmd.append(path)
85 return self.run_command(cmd)
86 return checker
87
88 def get_checker(self, ext):
89 ext = ext.lower()
90 try:
91 command = self.config['commands'].get(dict).get(ext)
92 except confit.NotFoundError:
93 command = None
94 if command:
95 return self.check_custom(command)
96 if ext == "mp3":
97 return self.check_mp3val
98 if ext == "flac":
99 return self.check_flac
100
101 def check_item(self, item):
102 # First, check whether the path exists. If not, the user
103 # should probably run `beet update` to cleanup your library.
104 dpath = displayable_path(item.path)
105 self._log.debug(u"checking path: {}", dpath)
106 if not os.path.exists(item.path):
107 ui.print_(u"{}: file does not exist".format(
108 ui.colorize('text_error', dpath)))
109
110 # Run the checker against the file if one is found
111 ext = os.path.splitext(item.path)[1][1:].decode('utf8', 'ignore')
112 checker = self.get_checker(ext)
113 if not checker:
114 self._log.error(u"no checker specified in the config for {}",
115 ext)
116 return
117 path = item.path
118 if not isinstance(path, six.text_type):
119 path = item.path.decode(sys.getfilesystemencoding())
120 try:
121 status, errors, output = checker(path)
122 except CheckerCommandException as e:
123 if e.errno == errno.ENOENT:
124 self._log.error(
125 u"command not found: {} when validating file: {}",
126 e.checker,
127 e.path
128 )
129 else:
130 self._log.error(u"error invoking {}: {}", e.checker, e.msg)
131 return
132 if status > 0:
133 ui.print_(u"{}: checker exited with status {}"
134 .format(ui.colorize('text_error', dpath), status))
135 for line in output:
136 ui.print_(u" {}".format(displayable_path(line)))
137 elif errors > 0:
138 ui.print_(u"{}: checker found {} errors or warnings"
139 .format(ui.colorize('text_warning', dpath), errors))
140 for line in output:
141 ui.print_(u" {}".format(displayable_path(line)))
142 elif self.verbose:
143 ui.print_(u"{}: ok".format(ui.colorize('text_success', dpath)))
144
145 def command(self, lib, opts, args):
146 # Get items from arguments
147 items = lib.items(ui.decargs(args))
148 self.verbose = opts.verbose
149 par_map(self.check_item, items)
150
151 def commands(self):
152 bad_command = Subcommand('bad',
153 help=u'check for corrupt or missing files')
154 bad_command.parser.add_option(
155 u'-v', u'--verbose',
156 action='store_true', default=False, dest='verbose',
157 help=u'view results for both the bad and uncorrupted files'
158 )
159 bad_command.func = self.command
160 return [bad_command]
161
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/beetsplug/badfiles.py b/beetsplug/badfiles.py
--- a/beetsplug/badfiles.py
+++ b/beetsplug/badfiles.py
@@ -50,6 +50,7 @@
class BadFiles(BeetsPlugin):
def __init__(self):
+ super(BadFiles, self).__init__()
self.verbose = False
def run_command(self, cmd):
| {"golden_diff": "diff --git a/beetsplug/badfiles.py b/beetsplug/badfiles.py\n--- a/beetsplug/badfiles.py\n+++ b/beetsplug/badfiles.py\n@@ -50,6 +50,7 @@\n \n class BadFiles(BeetsPlugin):\n def __init__(self):\n+ super(BadFiles, self).__init__()\n self.verbose = False\n \n def run_command(self, cmd):\n", "issue": "BadFiles plugin crashes beets with latest git master\n### Problem\r\n\r\nIf the `badfiles` plugin is activated, beets crashes when starting an import task.\r\n\r\nRunning this command in verbose (`-vv`) mode:\r\n\r\n```sh\r\n$ beet -vv import --write /data/music\r\nuser configuration: /home/jan/.config/beets/config.yaml\r\ndata directory: /home/jan/.config/beets\r\nplugin paths:\r\nSending event: pluginload\r\nartresizer: method is (2, (7, 0, 8))\r\nlyrics: Disabling google source: no API key configured.\r\nlibrary database: /home/jan/beets.db\r\nlibrary directory: /data/music\r\nSending event: library_opened\r\nTraceback (most recent call last):\r\n File \"/home/jan/.local/bin/beet\", line 11, in <module>\r\n load_entry_point('beets', 'console_scripts', 'beet')()\r\n File \"/data/jan/Projects/beets/beets/ui/__init__.py\", line 1266, in main\r\n _raw_main(args)\r\n File \"/data/jan/Projects/beets/beets/ui/__init__.py\", line 1253, in _raw_main\r\n subcommand.func(lib, suboptions, subargs)\r\n File \"/data/jan/Projects/beets/beets/ui/commands.py\", line 955, in import_func\r\n import_files(lib, paths, query)\r\n File \"/data/jan/Projects/beets/beets/ui/commands.py\", line 925, in import_files\r\n session.run()\r\n File \"/data/jan/Projects/beets/beets/importer.py\", line 316, in run\r\n for stage_func in plugins.early_import_stages():\r\n File \"/data/jan/Projects/beets/beets/plugins.py\", line 426, in early_import_stages\r\n stages += plugin.get_early_import_stages()\r\n File \"/data/jan/Projects/beets/beets/plugins.py\", line 112, in get_early_import_stages\r\n return self._set_stage_log_level(self.early_import_stages)\r\nAttributeError: 'BadFiles' object has no attribute 'early_import_stages'\r\n```\r\n\r\n### Setup\r\n\r\n* OS: Arch Linux\r\n* Python version: 3.7.2\r\n* beets version: be118b92\r\n* Turning off plugins made problem go away (yes/no): Yes (Disabling the `badfiles` plugin suffices)\r\n\r\nMy configuration (output of `beet config`) is: https://gist.github.com/Holzhaus/500b790c06fe2250ac9182bd8a6760da\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# This file is part of beets.\n# Copyright 2016, Fran\u00e7ois-Xavier Thomas.\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be\n# included in all copies or substantial portions of the Software.\n\n\"\"\"Use command-line tools to check for audio file corruption.\n\"\"\"\n\nfrom __future__ import division, absolute_import, print_function\n\nfrom subprocess import check_output, CalledProcessError, list2cmdline, STDOUT\n\nimport shlex\nimport os\nimport errno\nimport sys\nimport six\nfrom beets.plugins import BeetsPlugin\nfrom beets.ui import Subcommand\nfrom beets.util import displayable_path, confit, par_map\nfrom beets import ui\n\n\nclass CheckerCommandException(Exception):\n \"\"\"Raised when running a checker failed.\n\n Attributes:\n checker: Checker command name.\n path: Path to the file being validated.\n errno: Error number from the checker execution error.\n msg: Message from the checker execution error.\n \"\"\"\n\n def __init__(self, cmd, oserror):\n self.checker = cmd[0]\n self.path = cmd[-1]\n self.errno = oserror.errno\n self.msg = str(oserror)\n\n\nclass BadFiles(BeetsPlugin):\n def __init__(self):\n self.verbose = False\n\n def run_command(self, cmd):\n self._log.debug(u\"running command: {}\",\n displayable_path(list2cmdline(cmd)))\n try:\n output = check_output(cmd, stderr=STDOUT)\n errors = 0\n status = 0\n except CalledProcessError as e:\n output = e.output\n errors = 1\n status = e.returncode\n except OSError as e:\n raise CheckerCommandException(cmd, e)\n output = output.decode(sys.getfilesystemencoding())\n return status, errors, [line for line in output.split(\"\\n\") if line]\n\n def check_mp3val(self, path):\n status, errors, output = self.run_command([\"mp3val\", path])\n if status == 0:\n output = [line for line in output if line.startswith(\"WARNING:\")]\n errors = len(output)\n return status, errors, output\n\n def check_flac(self, path):\n return self.run_command([\"flac\", \"-wst\", path])\n\n def check_custom(self, command):\n def checker(path):\n cmd = shlex.split(command)\n cmd.append(path)\n return self.run_command(cmd)\n return checker\n\n def get_checker(self, ext):\n ext = ext.lower()\n try:\n command = self.config['commands'].get(dict).get(ext)\n except confit.NotFoundError:\n command = None\n if command:\n return self.check_custom(command)\n if ext == \"mp3\":\n return self.check_mp3val\n if ext == \"flac\":\n return self.check_flac\n\n def check_item(self, item):\n # First, check whether the path exists. If not, the user\n # should probably run `beet update` to cleanup your library.\n dpath = displayable_path(item.path)\n self._log.debug(u\"checking path: {}\", dpath)\n if not os.path.exists(item.path):\n ui.print_(u\"{}: file does not exist\".format(\n ui.colorize('text_error', dpath)))\n\n # Run the checker against the file if one is found\n ext = os.path.splitext(item.path)[1][1:].decode('utf8', 'ignore')\n checker = self.get_checker(ext)\n if not checker:\n self._log.error(u\"no checker specified in the config for {}\",\n ext)\n return\n path = item.path\n if not isinstance(path, six.text_type):\n path = item.path.decode(sys.getfilesystemencoding())\n try:\n status, errors, output = checker(path)\n except CheckerCommandException as e:\n if e.errno == errno.ENOENT:\n self._log.error(\n u\"command not found: {} when validating file: {}\",\n e.checker,\n e.path\n )\n else:\n self._log.error(u\"error invoking {}: {}\", e.checker, e.msg)\n return\n if status > 0:\n ui.print_(u\"{}: checker exited with status {}\"\n .format(ui.colorize('text_error', dpath), status))\n for line in output:\n ui.print_(u\" {}\".format(displayable_path(line)))\n elif errors > 0:\n ui.print_(u\"{}: checker found {} errors or warnings\"\n .format(ui.colorize('text_warning', dpath), errors))\n for line in output:\n ui.print_(u\" {}\".format(displayable_path(line)))\n elif self.verbose:\n ui.print_(u\"{}: ok\".format(ui.colorize('text_success', dpath)))\n\n def command(self, lib, opts, args):\n # Get items from arguments\n items = lib.items(ui.decargs(args))\n self.verbose = opts.verbose\n par_map(self.check_item, items)\n\n def commands(self):\n bad_command = Subcommand('bad',\n help=u'check for corrupt or missing files')\n bad_command.parser.add_option(\n u'-v', u'--verbose',\n action='store_true', default=False, dest='verbose',\n help=u'view results for both the bad and uncorrupted files'\n )\n bad_command.func = self.command\n return [bad_command]\n", "path": "beetsplug/badfiles.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# This file is part of beets.\n# Copyright 2016, Fran\u00e7ois-Xavier Thomas.\n#\n# Permission is hereby granted, free of charge, to any person obtaining\n# a copy of this software and associated documentation files (the\n# \"Software\"), to deal in the Software without restriction, including\n# without limitation the rights to use, copy, modify, merge, publish,\n# distribute, sublicense, and/or sell copies of the Software, and to\n# permit persons to whom the Software is furnished to do so, subject to\n# the following conditions:\n#\n# The above copyright notice and this permission notice shall be\n# included in all copies or substantial portions of the Software.\n\n\"\"\"Use command-line tools to check for audio file corruption.\n\"\"\"\n\nfrom __future__ import division, absolute_import, print_function\n\nfrom subprocess import check_output, CalledProcessError, list2cmdline, STDOUT\n\nimport shlex\nimport os\nimport errno\nimport sys\nimport six\nfrom beets.plugins import BeetsPlugin\nfrom beets.ui import Subcommand\nfrom beets.util import displayable_path, confit, par_map\nfrom beets import ui\n\n\nclass CheckerCommandException(Exception):\n \"\"\"Raised when running a checker failed.\n\n Attributes:\n checker: Checker command name.\n path: Path to the file being validated.\n errno: Error number from the checker execution error.\n msg: Message from the checker execution error.\n \"\"\"\n\n def __init__(self, cmd, oserror):\n self.checker = cmd[0]\n self.path = cmd[-1]\n self.errno = oserror.errno\n self.msg = str(oserror)\n\n\nclass BadFiles(BeetsPlugin):\n def __init__(self):\n super(BadFiles, self).__init__()\n self.verbose = False\n\n def run_command(self, cmd):\n self._log.debug(u\"running command: {}\",\n displayable_path(list2cmdline(cmd)))\n try:\n output = check_output(cmd, stderr=STDOUT)\n errors = 0\n status = 0\n except CalledProcessError as e:\n output = e.output\n errors = 1\n status = e.returncode\n except OSError as e:\n raise CheckerCommandException(cmd, e)\n output = output.decode(sys.getfilesystemencoding())\n return status, errors, [line for line in output.split(\"\\n\") if line]\n\n def check_mp3val(self, path):\n status, errors, output = self.run_command([\"mp3val\", path])\n if status == 0:\n output = [line for line in output if line.startswith(\"WARNING:\")]\n errors = len(output)\n return status, errors, output\n\n def check_flac(self, path):\n return self.run_command([\"flac\", \"-wst\", path])\n\n def check_custom(self, command):\n def checker(path):\n cmd = shlex.split(command)\n cmd.append(path)\n return self.run_command(cmd)\n return checker\n\n def get_checker(self, ext):\n ext = ext.lower()\n try:\n command = self.config['commands'].get(dict).get(ext)\n except confit.NotFoundError:\n command = None\n if command:\n return self.check_custom(command)\n if ext == \"mp3\":\n return self.check_mp3val\n if ext == \"flac\":\n return self.check_flac\n\n def check_item(self, item):\n # First, check whether the path exists. If not, the user\n # should probably run `beet update` to cleanup your library.\n dpath = displayable_path(item.path)\n self._log.debug(u\"checking path: {}\", dpath)\n if not os.path.exists(item.path):\n ui.print_(u\"{}: file does not exist\".format(\n ui.colorize('text_error', dpath)))\n\n # Run the checker against the file if one is found\n ext = os.path.splitext(item.path)[1][1:].decode('utf8', 'ignore')\n checker = self.get_checker(ext)\n if not checker:\n self._log.error(u\"no checker specified in the config for {}\",\n ext)\n return\n path = item.path\n if not isinstance(path, six.text_type):\n path = item.path.decode(sys.getfilesystemencoding())\n try:\n status, errors, output = checker(path)\n except CheckerCommandException as e:\n if e.errno == errno.ENOENT:\n self._log.error(\n u\"command not found: {} when validating file: {}\",\n e.checker,\n e.path\n )\n else:\n self._log.error(u\"error invoking {}: {}\", e.checker, e.msg)\n return\n if status > 0:\n ui.print_(u\"{}: checker exited with status {}\"\n .format(ui.colorize('text_error', dpath), status))\n for line in output:\n ui.print_(u\" {}\".format(displayable_path(line)))\n elif errors > 0:\n ui.print_(u\"{}: checker found {} errors or warnings\"\n .format(ui.colorize('text_warning', dpath), errors))\n for line in output:\n ui.print_(u\" {}\".format(displayable_path(line)))\n elif self.verbose:\n ui.print_(u\"{}: ok\".format(ui.colorize('text_success', dpath)))\n\n def command(self, lib, opts, args):\n # Get items from arguments\n items = lib.items(ui.decargs(args))\n self.verbose = opts.verbose\n par_map(self.check_item, items)\n\n def commands(self):\n bad_command = Subcommand('bad',\n help=u'check for corrupt or missing files')\n bad_command.parser.add_option(\n u'-v', u'--verbose',\n action='store_true', default=False, dest='verbose',\n help=u'view results for both the bad and uncorrupted files'\n )\n bad_command.func = self.command\n return [bad_command]\n", "path": "beetsplug/badfiles.py"}]} |
gh_patches_debug_128 | rasdani/github-patches | git_diff | napari__napari-3501 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
In Napari Windows bundle console does not show
## 🐛 Bug
from https://napari.zulipchat.com/#narrow/stream/215289-release/topic/0.2E4.2E12.20bugfix.20release/near/258449904
```
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
~\AppData\Local\Programs\napari\\app\napari\utils\action_manager.py in ()
51 def callable(self, context):
52 if not hasattr(self, '_command_with_context'):
---> 53 self._command_with_context = lambda: call_with_context(
global self._command_with_context = undefined
global call_with_context =
global self.command = undefined
global context = undefined
54 self.command, context
55 )
~\AppData\Local\Programs\napari\\app\napari\utils\action_manager.py in call_with_context(function=>, context=)
40 ]
41 ctx = {k: v for k, v in context.items() if k in context_keys}
---> 42 return function(**ctx)
function = >
ctx = {}
43
44
~\AppData\Local\Programs\napari\\app\napari\_qt\qt_viewer.py in toggle_console_visibility(self=, event=None)
735 """
736 # force instantiation of console if not already instantiated
--> 737 _ = self.console
_ = undefined
self.console = undefined
738
739 viz = not self.dockConsole.isVisible()
~\AppData\Local\Programs\napari\\app\napari\_qt\qt_viewer.py in console(self=)
344 with warnings.catch_warnings():
345 warnings.filterwarnings("ignore")
--> 346 self.console = QtConsole(self.viewer)
self.console = undefined
QtConsole =
self.viewer = Viewer(axes=Axes(visible=False, labels=True, colored=True, dashed=False, arrows=True), camera=Camera(center=(0.0, 0.0, 0.0), zoom=1.0, angles=(0.0, 0.0, 90.0), perspective=0.0, interactive=True), cursor=Cursor(position=(1.0, 1.0), scaled=True, size=1, style=), dims=Dims(ndim=2, ndisplay=2, last_used=0, range=((0, 2, 1), (0, 2, 1)), current_step=(0, 0), order=(0, 1), axis_labels=('0', '1')), grid=GridCanvas(stride=1, shape=(-1, -1), enabled=False), layers=[], scale_bar=ScaleBar(visible=False, colored=False, ticks=True, position=, font_size=10.0, unit=None), text_overlay=TextOverlay(visible=False, color= (4,) float64, font_size=10.0, position=, text=''), help='', status='Ready', tooltip=Tooltip(visible=False, text=''), theme='light', title='napari', mouse_move_callbacks=[], mouse_drag_callbacks=[], mouse_double_click_callbacks=[], mouse_wheel_callbacks=[], _persisted_mouse_event={}, _mouse_drag_gen={}, _mouse_wheel_gen={}, keymap={'Control-Shift-C': >})
347 self.console.push(
348 {'napari': napari, 'action_manager': action_manager}
~\AppData\Local\Programs\napari\\app_packages\napari_console\qt_console.py in __init__(self=, viewer=Viewer(axes=Axes(visible=False, labels=True, col...QtViewer(0x2050140a150) at 0x000002050214BE40>>}))
90 # kernel.
91 kernel_manager = QtInProcessKernelManager()
---> 92 kernel_manager.start_kernel(show_banner=False)
kernel_manager.start_kernel = >
global show_banner = undefined
93 kernel_manager.kernel.gui = 'qt'
94
~\AppData\Local\Programs\napari\\app_packages\ipykernel\inprocess\manager.py in start_kernel(self=, **kwds={'show_banner': False})
44 def start_kernel(self, **kwds):
45 from ipykernel.inprocess.ipkernel import InProcessKernel
---> 46 self.kernel = InProcessKernel(parent=self, session=self.session)
self.kernel = None
InProcessKernel =
global parent = undefined
self =
global session = undefined
self.session =
47
48 def shutdown_kernel(self):
~\AppData\Local\Programs\napari\\app_packages\ipykernel\inprocess\ipkernel.py in __init__(self=, **traits={'parent': , 'session': })
70
71 def __init__(self, **traits):
---> 72 super(InProcessKernel, self).__init__(**traits)
global super = undefined
global InProcessKernel =
self.__init__ = >
traits = {'parent': , 'session': }
73
74 self._underlying_iopub_socket.observe(self._io_dispatch, names=['message_sent'])
~\AppData\Local\Programs\napari\\app_packages\ipykernel\ipkernel.py in __init__(self=, **kwargs={'parent': , 'session': })
87
88 # Initialize the InteractiveShell subclass
---> 89 self.shell = self.shell_class.instance(parent=self,
self.shell = None
self.shell_class.instance = >
global parent = undefined
self =
global profile_dir = undefined
self.profile_dir = None
global user_module = undefined
self.user_module = None
global user_ns = undefined
self.user_ns = None
global kernel = undefined
global compiler_class = undefined
global XCachingCompiler =
90 profile_dir = self.profile_dir,
91 user_module = self.user_module,
~\AppData\Local\Programs\napari\\app_packages\traitlets\config\configurable.py in instance(cls=, *args=(), **kwargs={'compiler_class': , 'kernel': , 'parent': , 'profile_dir': None, 'user_module': None, 'user_ns': None})
538 # Create and save the instance
539 if cls._instance is None:
--> 540 inst = cls(*args, **kwargs)
inst = undefined
cls =
args = ()
kwargs = {'parent': , 'profile_dir': None, 'user_module': None, 'user_ns': None, 'kernel': , 'compiler_class': }
541 # Now make sure that the instance will also be returned by
542 # parent classes' _instance attribute.
~\AppData\Local\Programs\napari\\app_packages\IPython\core\interactiveshell.py in __init__(self=, ipython_dir=None, profile_dir=None, user_module=None, user_ns=None, custom_exceptions=((), None), **kwargs={'compiler_class': , 'kernel': , 'parent': })
680 # because the traceback handlers hardcode the stdout/stderr streams.
681 # This logic in in debugger.Pdb and should eventually be changed.
--> 682 self.init_io()
self.init_io = >
683 self.init_traceback_handlers(custom_exceptions)
684 self.init_prompts()
~\AppData\Local\Programs\napari\\app_packages\IPython\core\interactiveshell.py in init_io(self=)
856 with warnings.catch_warnings():
857 warnings.simplefilter('ignore', DeprecationWarning)
--> 858 io.stdout = io.IOStream(sys.stdout)
global io.stdout = IPython.utils.io.IOStream()
global io.IOStream =
global sys.stdout = None
859 io.stderr = io.IOStream(sys.stderr)
860
~\AppData\Local\Programs\napari\\app_packages\IPython\utils\io.py in __init__(self= instance, stream=None, fallback=None)
29 stream = fallback
30 else:
---> 31 raise ValueError("fallback required, but not specified")
global ValueError = undefined
32 self.stream = stream
33 self._swrite = stream.write
ValueError: fallback required, but not specified
```
## To Reproduce
Steps to reproduce the behavior:
1. install last bundle on windows
2. open napari
3. open console
<!-- If you have a code sample, error messages, stack traces, please provide it here as well -->
## Expected behavior
<!-- A clear and concise description of what you expected to happen. -->
working console
## Environment
```
napari: 0.4.12rc1
Platform: Windows-10-10.0.19041-SP0
Python: 3.8.3 (tags/v3.8.3:6f8c832, May 13 2020, 22:37:02) [MSC v.1924 64 bit (AMD64)]
Qt: 5.15.2
PySide2: 5.15.2
NumPy: 1.19.3
SciPy: 1.7.1
Dask: 2021.09.1
VisPy: 0.9.1
OpenGL:
- GL version: 4.6.0 NVIDIA 456.71
- MAX_TEXTURE_SIZE: 32768
Screens:
- screen 1: resolution 2560x1440, scale 1.0
- screen 2: resolution 1920x1080, scale 1.0
- screen 3: resolution 1920x1080, scale 1.0
Plugins:
- console: 0.0.4
- scikit-image
- svg: 0.1.5
```
## Additional context
<!-- Add any other context about the problem here. -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bundle.py`
Content:
```
1 import configparser
2 import os
3 import platform
4 import re
5 import shutil
6 import subprocess
7 import sys
8 import time
9 from contextlib import contextmanager
10
11 import tomlkit
12
13 APP = 'napari'
14
15 # EXTRA_REQS will be added to the bundle, in addition to those specified in
16 # setup.cfg. To add additional packages to the bundle, or to override any of
17 # the packages listed here or in `setup.cfg, use the `--add` command line
18 # argument with a series of "pip install" style strings when running this file.
19 # For example, the following will ADD ome-zarr, and CHANGE the version of
20 # PySide2:
21 # python bundle.py --add 'PySide2==5.15.0' 'ome-zarr'
22
23 # This is now defined in setup.cfg "options.extras_require.bundle_run"
24 # EXTRA_REQS = []
25
26 WINDOWS = os.name == 'nt'
27 MACOS = sys.platform == 'darwin'
28 LINUX = sys.platform.startswith("linux")
29 HERE = os.path.abspath(os.path.dirname(__file__))
30 PYPROJECT_TOML = os.path.join(HERE, 'pyproject.toml')
31 SETUP_CFG = os.path.join(HERE, 'setup.cfg')
32
33
34 if WINDOWS:
35 BUILD_DIR = os.path.join(HERE, 'windows')
36 elif LINUX:
37 BUILD_DIR = os.path.join(HERE, 'linux')
38 elif MACOS:
39 BUILD_DIR = os.path.join(HERE, 'macOS')
40 APP_DIR = os.path.join(BUILD_DIR, APP, f'{APP}.app')
41
42
43 with open(os.path.join(HERE, "napari", "_version.py")) as f:
44 match = re.search(r'version\s?=\s?\'([^\']+)', f.read())
45 if match:
46 VERSION = match.groups()[0].split('+')[0]
47
48
49 @contextmanager
50 def patched_toml():
51 parser = configparser.ConfigParser()
52 parser.read(SETUP_CFG)
53 requirements = parser.get("options", "install_requires").splitlines()
54 requirements = [r.split('#')[0].strip() for r in requirements if r]
55
56 with open(PYPROJECT_TOML) as f:
57 original_toml = f.read()
58
59 toml = tomlkit.parse(original_toml)
60
61 # Initialize EXTRA_REQS from setup.cfg 'options.extras_require.bundle_run'
62 bundle_run = parser.get("options.extras_require", "bundle_run")
63 EXTRA_REQS = [
64 requirement.split('#')[0].strip()
65 for requirement in bundle_run.splitlines()
66 if requirement
67 ]
68
69 # parse command line arguments
70 if '--add' in sys.argv:
71 for item in sys.argv[sys.argv.index('--add') + 1 :]:
72 if item.startswith('-'):
73 break
74 EXTRA_REQS.append(item)
75
76 for item in EXTRA_REQS:
77 _base = re.split('<|>|=', item, maxsplit=1)[0]
78 for r in requirements:
79 if r.startswith(_base):
80 requirements.remove(r)
81 break
82 if _base.lower().startswith('pyqt5'):
83 try:
84 i = next(x for x in requirements if x.startswith('PySide'))
85 requirements.remove(i)
86 except StopIteration:
87 pass
88
89 requirements += EXTRA_REQS
90
91 toml['tool']['briefcase']['app'][APP]['requires'] = requirements
92 toml['tool']['briefcase']['version'] = VERSION
93
94 print("patching pyproject.toml to version: ", VERSION)
95 print(
96 "patching pyproject.toml requirements to:",
97 *toml['tool']['briefcase']['app'][APP]['requires'],
98 sep="\n ",
99 )
100
101 if MACOS:
102 # Workaround https://github.com/napari/napari/issues/2965
103 # Pin revisions to releases _before_ they switched to static libs
104 revision = {
105 (3, 6): 'b11',
106 (3, 7): 'b5',
107 (3, 8): 'b4',
108 (3, 9): 'b1',
109 }[sys.version_info[:2]]
110 app_table = toml['tool']['briefcase']['app'][APP]
111 app_table.add('macOS', tomlkit.table())
112 app_table['macOS']['support_revision'] = revision
113 print(
114 "patching pyproject.toml to pin support package to revision:",
115 revision,
116 )
117
118 with open(PYPROJECT_TOML, 'w') as f:
119 f.write(tomlkit.dumps(toml))
120
121 try:
122 yield
123 finally:
124 with open(PYPROJECT_TOML, 'w') as f:
125 f.write(original_toml)
126
127
128 def patch_dmgbuild():
129 if not MACOS:
130 return
131 from dmgbuild import core
132
133 with open(core.__file__) as f:
134 src = f.read()
135 with open(core.__file__, 'w') as f:
136 f.write(
137 src.replace(
138 "shutil.rmtree(os.path.join(mount_point, '.Trashes'), True)",
139 "shutil.rmtree(os.path.join(mount_point, '.Trashes'), True)"
140 ";time.sleep(30)",
141 )
142 )
143 print("patched dmgbuild.core")
144
145
146 def add_site_packages_to_path():
147 # on mac, make sure the site-packages folder exists even before the user
148 # has pip installed, so it is in sys.path on the first run
149 # (otherwise, newly installed plugins will not be detected until restart)
150 if MACOS:
151 pkgs_dir = os.path.join(
152 APP_DIR,
153 'Contents',
154 'Resources',
155 'Support',
156 'lib',
157 f'python{sys.version_info.major}.{sys.version_info.minor}',
158 'site-packages',
159 )
160 os.makedirs(pkgs_dir)
161 print("created site-packages at", pkgs_dir)
162
163 # on windows, briefcase uses a _pth file to determine the sys.path at
164 # runtime. https://docs.python.org/3/using/windows.html#finding-modules
165 # We update that file with the eventual location of pip site-packages
166 elif WINDOWS:
167 py = "".join(map(str, sys.version_info[:2]))
168 python_dir = os.path.join(BUILD_DIR, APP, 'src', 'python')
169 pth = os.path.join(python_dir, f'python{py}._pth')
170 with open(pth, "a") as f:
171 # Append 'hello' at the end of file
172 f.write(".\\\\Lib\\\\site-packages\n")
173 print("added bundled site-packages to", pth)
174
175 pkgs_dir = os.path.join(python_dir, 'Lib', 'site-packages')
176 os.makedirs(pkgs_dir)
177 print("created site-packages at", pkgs_dir)
178 with open(os.path.join(pkgs_dir, 'readme.txt'), 'w') as f:
179 f.write("this is where plugin packages will go")
180
181
182 def patch_wxs():
183 # must run after briefcase create
184 fname = os.path.join(BUILD_DIR, APP, f'{APP}.wxs')
185
186 if os.path.exists(fname):
187 with open(fname) as f:
188 source = f.read()
189 with open(fname, 'w') as f:
190 f.write(source.replace('pythonw.exe', 'python.exe'))
191 print("patched pythonw.exe -> python.exe")
192
193
194 def patch_python_lib_location():
195 # must run after briefcase create
196 support = os.path.join(
197 BUILD_DIR, APP, APP + ".app", "Contents", "Resources", "Support"
198 )
199 python_resources = os.path.join(support, "Python", "Resources")
200 os.makedirs(python_resources, exist_ok=True)
201 for subdir in ("bin", "lib"):
202 orig = os.path.join(support, subdir)
203 dest = os.path.join(python_resources, subdir)
204 os.symlink("../../" + subdir, dest)
205 print("symlinking", orig, "to", dest)
206
207
208 def patch_environment_variables():
209 os.environ["ARCH"] = architecture()
210
211
212 def architecture():
213 arch = platform.machine() or "generic"
214 # Try to canonicalize across OS
215 replacements = {
216 "amd64": "x86_64",
217 }
218 return replacements.get(arch.lower(), arch)
219
220
221 def make_zip():
222 import glob
223 import zipfile
224
225 if WINDOWS:
226 ext, OS = '*.msi', 'Windows'
227 elif LINUX:
228 ext, OS = '*.AppImage', 'Linux'
229 elif MACOS:
230 ext, OS = '*.dmg', 'macOS'
231 artifact = glob.glob(os.path.join(BUILD_DIR, ext))[0]
232 dest = f'napari-{VERSION}-{OS}-{architecture()}.zip'
233
234 with zipfile.ZipFile(dest, 'w', zipfile.ZIP_DEFLATED) as zf:
235 zf.write(artifact, arcname=os.path.basename(artifact))
236 print("created zipfile: ", dest)
237 return dest
238
239
240 def clean():
241 shutil.rmtree(BUILD_DIR, ignore_errors=True)
242
243
244 def bundle():
245 clean()
246
247 if MACOS:
248 patch_dmgbuild()
249
250 if LINUX:
251 patch_environment_variables()
252
253 # smoke test, and build resources
254 subprocess.check_call([sys.executable, '-m', APP, '--info'])
255
256 # the briefcase calls need to happen while the pyproject toml is patched
257 with patched_toml():
258 # create
259 cmd = ['briefcase', 'create'] + (['--no-docker'] if LINUX else [])
260 subprocess.check_call(cmd)
261
262 time.sleep(0.5)
263
264 add_site_packages_to_path()
265
266 if MACOS:
267 patch_python_lib_location()
268
269 # build
270 cmd = ['briefcase', 'build'] + (['--no-docker'] if LINUX else [])
271 subprocess.check_call(cmd)
272
273 # package
274 cmd = ['briefcase', 'package']
275 cmd += ['--no-sign'] if MACOS else (['--no-docker'] if LINUX else [])
276 subprocess.check_call(cmd)
277
278 # compress
279 dest = make_zip()
280 clean()
281
282 return dest
283
284
285 if __name__ == "__main__":
286 if '--clean' in sys.argv:
287 clean()
288 sys.exit()
289 if '--version' in sys.argv:
290 print(VERSION)
291 sys.exit()
292 if '--arch' in sys.argv:
293 print(architecture())
294 sys.exit()
295 print('created', bundle())
296
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bundle.py b/bundle.py
--- a/bundle.py
+++ b/bundle.py
@@ -263,7 +263,9 @@
add_site_packages_to_path()
- if MACOS:
+ if WINDOWS:
+ patch_wxs()
+ elif MACOS:
patch_python_lib_location()
# build
| {"golden_diff": "diff --git a/bundle.py b/bundle.py\n--- a/bundle.py\n+++ b/bundle.py\n@@ -263,7 +263,9 @@\n \n add_site_packages_to_path()\n \n- if MACOS:\n+ if WINDOWS:\n+ patch_wxs()\n+ elif MACOS:\n patch_python_lib_location()\n \n # build\n", "issue": "In Napari Windows bundle console does not show\n## \ud83d\udc1b Bug\r\nfrom https://napari.zulipchat.com/#narrow/stream/215289-release/topic/0.2E4.2E12.20bugfix.20release/near/258449904\r\n```\r\n---------------------------------------------------------------------------\r\nValueError Traceback (most recent call last)\r\n~\\AppData\\Local\\Programs\\napari\\\\app\\napari\\utils\\action_manager.py in ()\r\n 51 def callable(self, context):\r\n 52 if not hasattr(self, '_command_with_context'):\r\n---> 53 self._command_with_context = lambda: call_with_context(\r\n global self._command_with_context = undefined\r\n global call_with_context = \r\n global self.command = undefined\r\n global context = undefined\r\n 54 self.command, context\r\n 55 )\r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app\\napari\\utils\\action_manager.py in call_with_context(function=>, context=)\r\n 40 ]\r\n 41 ctx = {k: v for k, v in context.items() if k in context_keys}\r\n---> 42 return function(**ctx)\r\n function = >\r\n ctx = {}\r\n 43 \r\n 44 \r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app\\napari\\_qt\\qt_viewer.py in toggle_console_visibility(self=, event=None)\r\n 735 \"\"\"\r\n 736 # force instantiation of console if not already instantiated\r\n--> 737 _ = self.console\r\n _ = undefined\r\n self.console = undefined\r\n 738 \r\n 739 viz = not self.dockConsole.isVisible()\r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app\\napari\\_qt\\qt_viewer.py in console(self=)\r\n 344 with warnings.catch_warnings():\r\n 345 warnings.filterwarnings(\"ignore\")\r\n--> 346 self.console = QtConsole(self.viewer)\r\n self.console = undefined\r\n QtConsole = \r\n self.viewer = Viewer(axes=Axes(visible=False, labels=True, colored=True, dashed=False, arrows=True), camera=Camera(center=(0.0, 0.0, 0.0), zoom=1.0, angles=(0.0, 0.0, 90.0), perspective=0.0, interactive=True), cursor=Cursor(position=(1.0, 1.0), scaled=True, size=1, style=), dims=Dims(ndim=2, ndisplay=2, last_used=0, range=((0, 2, 1), (0, 2, 1)), current_step=(0, 0), order=(0, 1), axis_labels=('0', '1')), grid=GridCanvas(stride=1, shape=(-1, -1), enabled=False), layers=[], scale_bar=ScaleBar(visible=False, colored=False, ticks=True, position=, font_size=10.0, unit=None), text_overlay=TextOverlay(visible=False, color= (4,) float64, font_size=10.0, position=, text=''), help='', status='Ready', tooltip=Tooltip(visible=False, text=''), theme='light', title='napari', mouse_move_callbacks=[], mouse_drag_callbacks=[], mouse_double_click_callbacks=[], mouse_wheel_callbacks=[], _persisted_mouse_event={}, _mouse_drag_gen={}, _mouse_wheel_gen={}, keymap={'Control-Shift-C': >})\r\n 347 self.console.push(\r\n 348 {'napari': napari, 'action_manager': action_manager}\r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app_packages\\napari_console\\qt_console.py in __init__(self=, viewer=Viewer(axes=Axes(visible=False, labels=True, col...QtViewer(0x2050140a150) at 0x000002050214BE40>>}))\r\n 90 # kernel.\r\n 91 kernel_manager = QtInProcessKernelManager()\r\n---> 92 kernel_manager.start_kernel(show_banner=False)\r\n kernel_manager.start_kernel = >\r\n global show_banner = undefined\r\n 93 kernel_manager.kernel.gui = 'qt'\r\n 94 \r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app_packages\\ipykernel\\inprocess\\manager.py in start_kernel(self=, **kwds={'show_banner': False})\r\n 44 def start_kernel(self, **kwds):\r\n 45 from ipykernel.inprocess.ipkernel import InProcessKernel\r\n---> 46 self.kernel = InProcessKernel(parent=self, session=self.session)\r\n self.kernel = None\r\n InProcessKernel = \r\n global parent = undefined\r\n self = \r\n global session = undefined\r\n self.session = \r\n 47 \r\n 48 def shutdown_kernel(self):\r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app_packages\\ipykernel\\inprocess\\ipkernel.py in __init__(self=, **traits={'parent': , 'session': })\r\n 70 \r\n 71 def __init__(self, **traits):\r\n---> 72 super(InProcessKernel, self).__init__(**traits)\r\n global super = undefined\r\n global InProcessKernel = \r\n self.__init__ = >\r\n traits = {'parent': , 'session': }\r\n 73 \r\n 74 self._underlying_iopub_socket.observe(self._io_dispatch, names=['message_sent'])\r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app_packages\\ipykernel\\ipkernel.py in __init__(self=, **kwargs={'parent': , 'session': })\r\n 87 \r\n 88 # Initialize the InteractiveShell subclass\r\n---> 89 self.shell = self.shell_class.instance(parent=self,\r\n self.shell = None\r\n self.shell_class.instance = >\r\n global parent = undefined\r\n self = \r\n global profile_dir = undefined\r\n self.profile_dir = None\r\n global user_module = undefined\r\n self.user_module = None\r\n global user_ns = undefined\r\n self.user_ns = None\r\n global kernel = undefined\r\n global compiler_class = undefined\r\n global XCachingCompiler = \r\n 90 profile_dir = self.profile_dir,\r\n 91 user_module = self.user_module,\r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app_packages\\traitlets\\config\\configurable.py in instance(cls=, *args=(), **kwargs={'compiler_class': , 'kernel': , 'parent': , 'profile_dir': None, 'user_module': None, 'user_ns': None})\r\n 538 # Create and save the instance\r\n 539 if cls._instance is None:\r\n--> 540 inst = cls(*args, **kwargs)\r\n inst = undefined\r\n cls = \r\n args = ()\r\n kwargs = {'parent': , 'profile_dir': None, 'user_module': None, 'user_ns': None, 'kernel': , 'compiler_class': }\r\n 541 # Now make sure that the instance will also be returned by\r\n 542 # parent classes' _instance attribute.\r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app_packages\\IPython\\core\\interactiveshell.py in __init__(self=, ipython_dir=None, profile_dir=None, user_module=None, user_ns=None, custom_exceptions=((), None), **kwargs={'compiler_class': , 'kernel': , 'parent': })\r\n 680 # because the traceback handlers hardcode the stdout/stderr streams.\r\n 681 # This logic in in debugger.Pdb and should eventually be changed.\r\n--> 682 self.init_io()\r\n self.init_io = >\r\n 683 self.init_traceback_handlers(custom_exceptions)\r\n 684 self.init_prompts()\r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app_packages\\IPython\\core\\interactiveshell.py in init_io(self=)\r\n 856 with warnings.catch_warnings():\r\n 857 warnings.simplefilter('ignore', DeprecationWarning)\r\n--> 858 io.stdout = io.IOStream(sys.stdout)\r\n global io.stdout = IPython.utils.io.IOStream()\r\n global io.IOStream = \r\n global sys.stdout = None\r\n 859 io.stderr = io.IOStream(sys.stderr)\r\n 860 \r\n\r\n~\\AppData\\Local\\Programs\\napari\\\\app_packages\\IPython\\utils\\io.py in __init__(self= instance, stream=None, fallback=None)\r\n 29 stream = fallback\r\n 30 else:\r\n---> 31 raise ValueError(\"fallback required, but not specified\")\r\n global ValueError = undefined\r\n 32 self.stream = stream\r\n 33 self._swrite = stream.write\r\n\r\nValueError: fallback required, but not specified\r\n```\r\n\r\n## To Reproduce\r\n\r\nSteps to reproduce the behavior:\r\n\r\n1. install last bundle on windows\r\n2. open napari\r\n3. open console\r\n\r\n<!-- If you have a code sample, error messages, stack traces, please provide it here as well -->\r\n\r\n## Expected behavior\r\n\r\n<!-- A clear and concise description of what you expected to happen. -->\r\nworking console\r\n\r\n## Environment\r\n```\r\nnapari: 0.4.12rc1\r\nPlatform: Windows-10-10.0.19041-SP0\r\nPython: 3.8.3 (tags/v3.8.3:6f8c832, May 13 2020, 22:37:02) [MSC v.1924 64 bit (AMD64)]\r\nQt: 5.15.2\r\nPySide2: 5.15.2\r\nNumPy: 1.19.3\r\nSciPy: 1.7.1\r\nDask: 2021.09.1\r\nVisPy: 0.9.1\r\n\r\nOpenGL:\r\n- GL version: 4.6.0 NVIDIA 456.71\r\n- MAX_TEXTURE_SIZE: 32768\r\n\r\nScreens:\r\n- screen 1: resolution 2560x1440, scale 1.0\r\n- screen 2: resolution 1920x1080, scale 1.0\r\n- screen 3: resolution 1920x1080, scale 1.0\r\n\r\nPlugins:\r\n- console: 0.0.4\r\n- scikit-image\r\n- svg: 0.1.5\r\n```\r\n\r\n## Additional context\r\n\r\n<!-- Add any other context about the problem here. -->\r\n\n", "before_files": [{"content": "import configparser\nimport os\nimport platform\nimport re\nimport shutil\nimport subprocess\nimport sys\nimport time\nfrom contextlib import contextmanager\n\nimport tomlkit\n\nAPP = 'napari'\n\n# EXTRA_REQS will be added to the bundle, in addition to those specified in\n# setup.cfg. To add additional packages to the bundle, or to override any of\n# the packages listed here or in `setup.cfg, use the `--add` command line\n# argument with a series of \"pip install\" style strings when running this file.\n# For example, the following will ADD ome-zarr, and CHANGE the version of\n# PySide2:\n# python bundle.py --add 'PySide2==5.15.0' 'ome-zarr'\n\n# This is now defined in setup.cfg \"options.extras_require.bundle_run\"\n# EXTRA_REQS = []\n\nWINDOWS = os.name == 'nt'\nMACOS = sys.platform == 'darwin'\nLINUX = sys.platform.startswith(\"linux\")\nHERE = os.path.abspath(os.path.dirname(__file__))\nPYPROJECT_TOML = os.path.join(HERE, 'pyproject.toml')\nSETUP_CFG = os.path.join(HERE, 'setup.cfg')\n\n\nif WINDOWS:\n BUILD_DIR = os.path.join(HERE, 'windows')\nelif LINUX:\n BUILD_DIR = os.path.join(HERE, 'linux')\nelif MACOS:\n BUILD_DIR = os.path.join(HERE, 'macOS')\n APP_DIR = os.path.join(BUILD_DIR, APP, f'{APP}.app')\n\n\nwith open(os.path.join(HERE, \"napari\", \"_version.py\")) as f:\n match = re.search(r'version\\s?=\\s?\\'([^\\']+)', f.read())\n if match:\n VERSION = match.groups()[0].split('+')[0]\n\n\n@contextmanager\ndef patched_toml():\n parser = configparser.ConfigParser()\n parser.read(SETUP_CFG)\n requirements = parser.get(\"options\", \"install_requires\").splitlines()\n requirements = [r.split('#')[0].strip() for r in requirements if r]\n\n with open(PYPROJECT_TOML) as f:\n original_toml = f.read()\n\n toml = tomlkit.parse(original_toml)\n\n # Initialize EXTRA_REQS from setup.cfg 'options.extras_require.bundle_run'\n bundle_run = parser.get(\"options.extras_require\", \"bundle_run\")\n EXTRA_REQS = [\n requirement.split('#')[0].strip()\n for requirement in bundle_run.splitlines()\n if requirement\n ]\n\n # parse command line arguments\n if '--add' in sys.argv:\n for item in sys.argv[sys.argv.index('--add') + 1 :]:\n if item.startswith('-'):\n break\n EXTRA_REQS.append(item)\n\n for item in EXTRA_REQS:\n _base = re.split('<|>|=', item, maxsplit=1)[0]\n for r in requirements:\n if r.startswith(_base):\n requirements.remove(r)\n break\n if _base.lower().startswith('pyqt5'):\n try:\n i = next(x for x in requirements if x.startswith('PySide'))\n requirements.remove(i)\n except StopIteration:\n pass\n\n requirements += EXTRA_REQS\n\n toml['tool']['briefcase']['app'][APP]['requires'] = requirements\n toml['tool']['briefcase']['version'] = VERSION\n\n print(\"patching pyproject.toml to version: \", VERSION)\n print(\n \"patching pyproject.toml requirements to:\",\n *toml['tool']['briefcase']['app'][APP]['requires'],\n sep=\"\\n \",\n )\n\n if MACOS:\n # Workaround https://github.com/napari/napari/issues/2965\n # Pin revisions to releases _before_ they switched to static libs\n revision = {\n (3, 6): 'b11',\n (3, 7): 'b5',\n (3, 8): 'b4',\n (3, 9): 'b1',\n }[sys.version_info[:2]]\n app_table = toml['tool']['briefcase']['app'][APP]\n app_table.add('macOS', tomlkit.table())\n app_table['macOS']['support_revision'] = revision\n print(\n \"patching pyproject.toml to pin support package to revision:\",\n revision,\n )\n\n with open(PYPROJECT_TOML, 'w') as f:\n f.write(tomlkit.dumps(toml))\n\n try:\n yield\n finally:\n with open(PYPROJECT_TOML, 'w') as f:\n f.write(original_toml)\n\n\ndef patch_dmgbuild():\n if not MACOS:\n return\n from dmgbuild import core\n\n with open(core.__file__) as f:\n src = f.read()\n with open(core.__file__, 'w') as f:\n f.write(\n src.replace(\n \"shutil.rmtree(os.path.join(mount_point, '.Trashes'), True)\",\n \"shutil.rmtree(os.path.join(mount_point, '.Trashes'), True)\"\n \";time.sleep(30)\",\n )\n )\n print(\"patched dmgbuild.core\")\n\n\ndef add_site_packages_to_path():\n # on mac, make sure the site-packages folder exists even before the user\n # has pip installed, so it is in sys.path on the first run\n # (otherwise, newly installed plugins will not be detected until restart)\n if MACOS:\n pkgs_dir = os.path.join(\n APP_DIR,\n 'Contents',\n 'Resources',\n 'Support',\n 'lib',\n f'python{sys.version_info.major}.{sys.version_info.minor}',\n 'site-packages',\n )\n os.makedirs(pkgs_dir)\n print(\"created site-packages at\", pkgs_dir)\n\n # on windows, briefcase uses a _pth file to determine the sys.path at\n # runtime. https://docs.python.org/3/using/windows.html#finding-modules\n # We update that file with the eventual location of pip site-packages\n elif WINDOWS:\n py = \"\".join(map(str, sys.version_info[:2]))\n python_dir = os.path.join(BUILD_DIR, APP, 'src', 'python')\n pth = os.path.join(python_dir, f'python{py}._pth')\n with open(pth, \"a\") as f:\n # Append 'hello' at the end of file\n f.write(\".\\\\\\\\Lib\\\\\\\\site-packages\\n\")\n print(\"added bundled site-packages to\", pth)\n\n pkgs_dir = os.path.join(python_dir, 'Lib', 'site-packages')\n os.makedirs(pkgs_dir)\n print(\"created site-packages at\", pkgs_dir)\n with open(os.path.join(pkgs_dir, 'readme.txt'), 'w') as f:\n f.write(\"this is where plugin packages will go\")\n\n\ndef patch_wxs():\n # must run after briefcase create\n fname = os.path.join(BUILD_DIR, APP, f'{APP}.wxs')\n\n if os.path.exists(fname):\n with open(fname) as f:\n source = f.read()\n with open(fname, 'w') as f:\n f.write(source.replace('pythonw.exe', 'python.exe'))\n print(\"patched pythonw.exe -> python.exe\")\n\n\ndef patch_python_lib_location():\n # must run after briefcase create\n support = os.path.join(\n BUILD_DIR, APP, APP + \".app\", \"Contents\", \"Resources\", \"Support\"\n )\n python_resources = os.path.join(support, \"Python\", \"Resources\")\n os.makedirs(python_resources, exist_ok=True)\n for subdir in (\"bin\", \"lib\"):\n orig = os.path.join(support, subdir)\n dest = os.path.join(python_resources, subdir)\n os.symlink(\"../../\" + subdir, dest)\n print(\"symlinking\", orig, \"to\", dest)\n\n\ndef patch_environment_variables():\n os.environ[\"ARCH\"] = architecture()\n\n\ndef architecture():\n arch = platform.machine() or \"generic\"\n # Try to canonicalize across OS\n replacements = {\n \"amd64\": \"x86_64\",\n }\n return replacements.get(arch.lower(), arch)\n\n\ndef make_zip():\n import glob\n import zipfile\n\n if WINDOWS:\n ext, OS = '*.msi', 'Windows'\n elif LINUX:\n ext, OS = '*.AppImage', 'Linux'\n elif MACOS:\n ext, OS = '*.dmg', 'macOS'\n artifact = glob.glob(os.path.join(BUILD_DIR, ext))[0]\n dest = f'napari-{VERSION}-{OS}-{architecture()}.zip'\n\n with zipfile.ZipFile(dest, 'w', zipfile.ZIP_DEFLATED) as zf:\n zf.write(artifact, arcname=os.path.basename(artifact))\n print(\"created zipfile: \", dest)\n return dest\n\n\ndef clean():\n shutil.rmtree(BUILD_DIR, ignore_errors=True)\n\n\ndef bundle():\n clean()\n\n if MACOS:\n patch_dmgbuild()\n\n if LINUX:\n patch_environment_variables()\n\n # smoke test, and build resources\n subprocess.check_call([sys.executable, '-m', APP, '--info'])\n\n # the briefcase calls need to happen while the pyproject toml is patched\n with patched_toml():\n # create\n cmd = ['briefcase', 'create'] + (['--no-docker'] if LINUX else [])\n subprocess.check_call(cmd)\n\n time.sleep(0.5)\n\n add_site_packages_to_path()\n\n if MACOS:\n patch_python_lib_location()\n\n # build\n cmd = ['briefcase', 'build'] + (['--no-docker'] if LINUX else [])\n subprocess.check_call(cmd)\n\n # package\n cmd = ['briefcase', 'package']\n cmd += ['--no-sign'] if MACOS else (['--no-docker'] if LINUX else [])\n subprocess.check_call(cmd)\n\n # compress\n dest = make_zip()\n clean()\n\n return dest\n\n\nif __name__ == \"__main__\":\n if '--clean' in sys.argv:\n clean()\n sys.exit()\n if '--version' in sys.argv:\n print(VERSION)\n sys.exit()\n if '--arch' in sys.argv:\n print(architecture())\n sys.exit()\n print('created', bundle())\n", "path": "bundle.py"}], "after_files": [{"content": "import configparser\nimport os\nimport platform\nimport re\nimport shutil\nimport subprocess\nimport sys\nimport time\nfrom contextlib import contextmanager\n\nimport tomlkit\n\nAPP = 'napari'\n\n# EXTRA_REQS will be added to the bundle, in addition to those specified in\n# setup.cfg. To add additional packages to the bundle, or to override any of\n# the packages listed here or in `setup.cfg, use the `--add` command line\n# argument with a series of \"pip install\" style strings when running this file.\n# For example, the following will ADD ome-zarr, and CHANGE the version of\n# PySide2:\n# python bundle.py --add 'PySide2==5.15.0' 'ome-zarr'\n\n# This is now defined in setup.cfg \"options.extras_require.bundle_run\"\n# EXTRA_REQS = []\n\nWINDOWS = os.name == 'nt'\nMACOS = sys.platform == 'darwin'\nLINUX = sys.platform.startswith(\"linux\")\nHERE = os.path.abspath(os.path.dirname(__file__))\nPYPROJECT_TOML = os.path.join(HERE, 'pyproject.toml')\nSETUP_CFG = os.path.join(HERE, 'setup.cfg')\n\n\nif WINDOWS:\n BUILD_DIR = os.path.join(HERE, 'windows')\nelif LINUX:\n BUILD_DIR = os.path.join(HERE, 'linux')\nelif MACOS:\n BUILD_DIR = os.path.join(HERE, 'macOS')\n APP_DIR = os.path.join(BUILD_DIR, APP, f'{APP}.app')\n\n\nwith open(os.path.join(HERE, \"napari\", \"_version.py\")) as f:\n match = re.search(r'version\\s?=\\s?\\'([^\\']+)', f.read())\n if match:\n VERSION = match.groups()[0].split('+')[0]\n\n\n@contextmanager\ndef patched_toml():\n parser = configparser.ConfigParser()\n parser.read(SETUP_CFG)\n requirements = parser.get(\"options\", \"install_requires\").splitlines()\n requirements = [r.split('#')[0].strip() for r in requirements if r]\n\n with open(PYPROJECT_TOML) as f:\n original_toml = f.read()\n\n toml = tomlkit.parse(original_toml)\n\n # Initialize EXTRA_REQS from setup.cfg 'options.extras_require.bundle_run'\n bundle_run = parser.get(\"options.extras_require\", \"bundle_run\")\n EXTRA_REQS = [\n requirement.split('#')[0].strip()\n for requirement in bundle_run.splitlines()\n if requirement\n ]\n\n # parse command line arguments\n if '--add' in sys.argv:\n for item in sys.argv[sys.argv.index('--add') + 1 :]:\n if item.startswith('-'):\n break\n EXTRA_REQS.append(item)\n\n for item in EXTRA_REQS:\n _base = re.split('<|>|=', item, maxsplit=1)[0]\n for r in requirements:\n if r.startswith(_base):\n requirements.remove(r)\n break\n if _base.lower().startswith('pyqt5'):\n try:\n i = next(x for x in requirements if x.startswith('PySide'))\n requirements.remove(i)\n except StopIteration:\n pass\n\n requirements += EXTRA_REQS\n\n toml['tool']['briefcase']['app'][APP]['requires'] = requirements\n toml['tool']['briefcase']['version'] = VERSION\n\n print(\"patching pyproject.toml to version: \", VERSION)\n print(\n \"patching pyproject.toml requirements to:\",\n *toml['tool']['briefcase']['app'][APP]['requires'],\n sep=\"\\n \",\n )\n\n if MACOS:\n # Workaround https://github.com/napari/napari/issues/2965\n # Pin revisions to releases _before_ they switched to static libs\n revision = {\n (3, 6): 'b11',\n (3, 7): 'b5',\n (3, 8): 'b4',\n (3, 9): 'b1',\n }[sys.version_info[:2]]\n app_table = toml['tool']['briefcase']['app'][APP]\n app_table.add('macOS', tomlkit.table())\n app_table['macOS']['support_revision'] = revision\n print(\n \"patching pyproject.toml to pin support package to revision:\",\n revision,\n )\n\n with open(PYPROJECT_TOML, 'w') as f:\n f.write(tomlkit.dumps(toml))\n\n try:\n yield\n finally:\n with open(PYPROJECT_TOML, 'w') as f:\n f.write(original_toml)\n\n\ndef patch_dmgbuild():\n if not MACOS:\n return\n from dmgbuild import core\n\n with open(core.__file__) as f:\n src = f.read()\n with open(core.__file__, 'w') as f:\n f.write(\n src.replace(\n \"shutil.rmtree(os.path.join(mount_point, '.Trashes'), True)\",\n \"shutil.rmtree(os.path.join(mount_point, '.Trashes'), True)\"\n \";time.sleep(30)\",\n )\n )\n print(\"patched dmgbuild.core\")\n\n\ndef add_site_packages_to_path():\n # on mac, make sure the site-packages folder exists even before the user\n # has pip installed, so it is in sys.path on the first run\n # (otherwise, newly installed plugins will not be detected until restart)\n if MACOS:\n pkgs_dir = os.path.join(\n APP_DIR,\n 'Contents',\n 'Resources',\n 'Support',\n 'lib',\n f'python{sys.version_info.major}.{sys.version_info.minor}',\n 'site-packages',\n )\n os.makedirs(pkgs_dir)\n print(\"created site-packages at\", pkgs_dir)\n\n # on windows, briefcase uses a _pth file to determine the sys.path at\n # runtime. https://docs.python.org/3/using/windows.html#finding-modules\n # We update that file with the eventual location of pip site-packages\n elif WINDOWS:\n py = \"\".join(map(str, sys.version_info[:2]))\n python_dir = os.path.join(BUILD_DIR, APP, 'src', 'python')\n pth = os.path.join(python_dir, f'python{py}._pth')\n with open(pth, \"a\") as f:\n # Append 'hello' at the end of file\n f.write(\".\\\\\\\\Lib\\\\\\\\site-packages\\n\")\n print(\"added bundled site-packages to\", pth)\n\n pkgs_dir = os.path.join(python_dir, 'Lib', 'site-packages')\n os.makedirs(pkgs_dir)\n print(\"created site-packages at\", pkgs_dir)\n with open(os.path.join(pkgs_dir, 'readme.txt'), 'w') as f:\n f.write(\"this is where plugin packages will go\")\n\n\ndef patch_wxs():\n # must run after briefcase create\n fname = os.path.join(BUILD_DIR, APP, f'{APP}.wxs')\n\n if os.path.exists(fname):\n with open(fname) as f:\n source = f.read()\n with open(fname, 'w') as f:\n f.write(source.replace('pythonw.exe', 'python.exe'))\n print(\"patched pythonw.exe -> python.exe\")\n\n\ndef patch_python_lib_location():\n # must run after briefcase create\n support = os.path.join(\n BUILD_DIR, APP, APP + \".app\", \"Contents\", \"Resources\", \"Support\"\n )\n python_resources = os.path.join(support, \"Python\", \"Resources\")\n os.makedirs(python_resources, exist_ok=True)\n for subdir in (\"bin\", \"lib\"):\n orig = os.path.join(support, subdir)\n dest = os.path.join(python_resources, subdir)\n os.symlink(\"../../\" + subdir, dest)\n print(\"symlinking\", orig, \"to\", dest)\n\n\ndef patch_environment_variables():\n os.environ[\"ARCH\"] = architecture()\n\n\ndef architecture():\n arch = platform.machine() or \"generic\"\n # Try to canonicalize across OS\n replacements = {\n \"amd64\": \"x86_64\",\n }\n return replacements.get(arch.lower(), arch)\n\n\ndef make_zip():\n import glob\n import zipfile\n\n if WINDOWS:\n ext, OS = '*.msi', 'Windows'\n elif LINUX:\n ext, OS = '*.AppImage', 'Linux'\n elif MACOS:\n ext, OS = '*.dmg', 'macOS'\n artifact = glob.glob(os.path.join(BUILD_DIR, ext))[0]\n dest = f'napari-{VERSION}-{OS}-{architecture()}.zip'\n\n with zipfile.ZipFile(dest, 'w', zipfile.ZIP_DEFLATED) as zf:\n zf.write(artifact, arcname=os.path.basename(artifact))\n print(\"created zipfile: \", dest)\n return dest\n\n\ndef clean():\n shutil.rmtree(BUILD_DIR, ignore_errors=True)\n\n\ndef bundle():\n clean()\n\n if MACOS:\n patch_dmgbuild()\n\n if LINUX:\n patch_environment_variables()\n\n # smoke test, and build resources\n subprocess.check_call([sys.executable, '-m', APP, '--info'])\n\n # the briefcase calls need to happen while the pyproject toml is patched\n with patched_toml():\n # create\n cmd = ['briefcase', 'create'] + (['--no-docker'] if LINUX else [])\n subprocess.check_call(cmd)\n\n time.sleep(0.5)\n\n add_site_packages_to_path()\n\n if WINDOWS:\n patch_wxs()\n elif MACOS:\n patch_python_lib_location()\n\n # build\n cmd = ['briefcase', 'build'] + (['--no-docker'] if LINUX else [])\n subprocess.check_call(cmd)\n\n # package\n cmd = ['briefcase', 'package']\n cmd += ['--no-sign'] if MACOS else (['--no-docker'] if LINUX else [])\n subprocess.check_call(cmd)\n\n # compress\n dest = make_zip()\n clean()\n\n return dest\n\n\nif __name__ == \"__main__\":\n if '--clean' in sys.argv:\n clean()\n sys.exit()\n if '--version' in sys.argv:\n print(VERSION)\n sys.exit()\n if '--arch' in sys.argv:\n print(architecture())\n sys.exit()\n print('created', bundle())\n", "path": "bundle.py"}]} |
gh_patches_debug_129 | rasdani/github-patches | git_diff | jupyterhub__jupyterhub-263 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Single user server launch is broken
I think that #261 broke the launching of the single user server. I am seeing the following errors in the nbgrader tests:
```
Traceback (most recent call last):
File "/Users/jhamrick/.virtualenvs/nbgrader/bin/jupyterhub-singleuser", line 6, in <module>
exec(compile(open(__file__).read(), __file__, 'exec'))
File "/Users/jhamrick/project/tools/jupyterhub/scripts/jupyterhub-singleuser", line 4, in <module>
main()
File "/Users/jhamrick/project/tools/jupyterhub/jupyterhub/singleuser.py", line 221, in main
return SingleUserNotebookApp.launch_instance()
File "/Users/jhamrick/.virtualenvs/nbgrader/lib/python3.4/site-packages/IPython/config/application.py", line 573, in launch_instance
app.initialize(argv)
File "<string>", line 2, in initialize
File "/Users/jhamrick/.virtualenvs/nbgrader/lib/python3.4/site-packages/IPython/config/application.py", line 75, in catch_config_error
return method(app, *args, **kwargs)
File "/Users/jhamrick/.virtualenvs/nbgrader/lib/python3.4/site-packages/IPython/html/notebookapp.py", line 1015, in initialize
self.init_webapp()
File "/Users/jhamrick/project/tools/jupyterhub/jupyterhub/singleuser.py", line 191, in init_webapp
s['user'] = self.user
File "/Users/jhamrick/.virtualenvs/nbgrader/lib/python3.4/site-packages/traitlets/traitlets.py", line 438, in __get__
% (self.name, obj))
traitlets.traitlets.TraitError: No default value found for None trait of <jupyterhub.singleuser.SingleUserNotebookApp object at 0x102953b00>
```
If I revert to the version of jupyterhub prior to that PR, this error does not occur. @epifanio reported on gitter seeing the same thing as well, so I don't think it's isolated to nbgrader.
Given the error message, I suspect this has to do with ipython/traitlets#39 and/or ipython/traitlets#40 though I haven't actually tested it. I tried giving the `user` trait a default value but it did not seem to fix the error. I will try to do a bit more debugging, but I fear I don't really understand the internals of traitlets well enough to know exactly what's going on here.
Ping @takluyver and @minrk ?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `jupyterhub/singleuser.py`
Content:
```
1 #!/usr/bin/env python3
2 """Extend regular notebook server to be aware of multiuser things."""
3
4 # Copyright (c) Jupyter Development Team.
5 # Distributed under the terms of the Modified BSD License.
6
7 import os
8 try:
9 from urllib.parse import quote
10 except ImportError:
11 # PY2 Compat
12 from urllib import quote
13
14 import requests
15 from jinja2 import ChoiceLoader, FunctionLoader
16
17 from tornado import ioloop
18 from tornado.web import HTTPError
19
20 from traitlets import (
21 Integer,
22 Unicode,
23 CUnicode,
24 )
25
26 from IPython.html.notebookapp import NotebookApp, aliases as notebook_aliases
27 from IPython.html.auth.login import LoginHandler
28 from IPython.html.auth.logout import LogoutHandler
29
30 from IPython.html.utils import url_path_join
31
32
33 from distutils.version import LooseVersion as V
34
35 import IPython
36 if V(IPython.__version__) < V('3.0'):
37 raise ImportError("JupyterHub Requires IPython >= 3.0, found %s" % IPython.__version__)
38
39 # Define two methods to attach to AuthenticatedHandler,
40 # which authenticate via the central auth server.
41
42 class JupyterHubLoginHandler(LoginHandler):
43 @staticmethod
44 def login_available(settings):
45 return True
46
47 @staticmethod
48 def verify_token(self, cookie_name, encrypted_cookie):
49 """method for token verification"""
50 cookie_cache = self.settings['cookie_cache']
51 if encrypted_cookie in cookie_cache:
52 # we've seen this token before, don't ask upstream again
53 return cookie_cache[encrypted_cookie]
54
55 hub_api_url = self.settings['hub_api_url']
56 hub_api_key = self.settings['hub_api_key']
57 r = requests.get(url_path_join(
58 hub_api_url, "authorizations/cookie", cookie_name, quote(encrypted_cookie, safe=''),
59 ),
60 headers = {'Authorization' : 'token %s' % hub_api_key},
61 )
62 if r.status_code == 404:
63 data = None
64 elif r.status_code == 403:
65 self.log.error("I don't have permission to verify cookies, my auth token may have expired: [%i] %s", r.status_code, r.reason)
66 raise HTTPError(500, "Permission failure checking authorization, I may need to be restarted")
67 elif r.status_code >= 500:
68 self.log.error("Upstream failure verifying auth token: [%i] %s", r.status_code, r.reason)
69 raise HTTPError(502, "Failed to check authorization (upstream problem)")
70 elif r.status_code >= 400:
71 self.log.warn("Failed to check authorization: [%i] %s", r.status_code, r.reason)
72 raise HTTPError(500, "Failed to check authorization")
73 else:
74 data = r.json()
75 cookie_cache[encrypted_cookie] = data
76 return data
77
78 @staticmethod
79 def get_user(self):
80 """alternative get_current_user to query the central server"""
81 # only allow this to be called once per handler
82 # avoids issues if an error is raised,
83 # since this may be called again when trying to render the error page
84 if hasattr(self, '_cached_user'):
85 return self._cached_user
86
87 self._cached_user = None
88 my_user = self.settings['user']
89 encrypted_cookie = self.get_cookie(self.cookie_name)
90 if encrypted_cookie:
91 auth_data = JupyterHubLoginHandler.verify_token(self, self.cookie_name, encrypted_cookie)
92 if not auth_data:
93 # treat invalid token the same as no token
94 return None
95 user = auth_data['name']
96 if user == my_user:
97 self._cached_user = user
98 return user
99 else:
100 return None
101 else:
102 self.log.debug("No token cookie")
103 return None
104
105
106 class JupyterHubLogoutHandler(LogoutHandler):
107 def get(self):
108 self.redirect(url_path_join(self.settings['hub_prefix'], 'logout'))
109
110
111 # register new hub related command-line aliases
112 aliases = dict(notebook_aliases)
113 aliases.update({
114 'user' : 'SingleUserNotebookApp.user',
115 'cookie-name': 'SingleUserNotebookApp.cookie_name',
116 'hub-prefix': 'SingleUserNotebookApp.hub_prefix',
117 'hub-api-url': 'SingleUserNotebookApp.hub_api_url',
118 'base-url': 'SingleUserNotebookApp.base_url',
119 })
120
121 page_template = """
122 {% extends "templates/page.html" %}
123
124 {% block header_buttons %}
125 {{super()}}
126
127 <a href='{{hub_control_panel_url}}'
128 class='btn btn-default btn-sm navbar-btn pull-right'
129 style='margin-right: 4px; margin-left: 2px;'
130 >
131 Control Panel</a>
132 {% endblock %}
133 """
134
135 class SingleUserNotebookApp(NotebookApp):
136 """A Subclass of the regular NotebookApp that is aware of the parent multiuser context."""
137 user = CUnicode(config=True)
138 def _user_changed(self, name, old, new):
139 self.log.name = new
140 cookie_name = Unicode(config=True)
141 hub_prefix = Unicode(config=True)
142 hub_api_url = Unicode(config=True)
143 aliases = aliases
144 open_browser = False
145 trust_xheaders = True
146 login_handler_class = JupyterHubLoginHandler
147 logout_handler_class = JupyterHubLogoutHandler
148
149 cookie_cache_lifetime = Integer(
150 config=True,
151 default_value=300,
152 allow_none=True,
153 help="""
154 Time, in seconds, that we cache a validated cookie before requiring
155 revalidation with the hub.
156 """,
157 )
158
159 def _log_datefmt_default(self):
160 """Exclude date from default date format"""
161 return "%Y-%m-%d %H:%M:%S"
162
163 def _log_format_default(self):
164 """override default log format to include time"""
165 return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s"
166
167 def _confirm_exit(self):
168 # disable the exit confirmation for background notebook processes
169 ioloop.IOLoop.instance().stop()
170
171 def _clear_cookie_cache(self):
172 self.log.debug("Clearing cookie cache")
173 self.tornado_settings['cookie_cache'].clear()
174
175 def start(self):
176 # Start a PeriodicCallback to clear cached cookies. This forces us to
177 # revalidate our user with the Hub at least every
178 # `cookie_cache_lifetime` seconds.
179 if self.cookie_cache_lifetime:
180 ioloop.PeriodicCallback(
181 self._clear_cookie_cache,
182 self.cookie_cache_lifetime * 1e3,
183 ).start()
184 super(SingleUserNotebookApp, self).start()
185
186 def init_webapp(self):
187 # load the hub related settings into the tornado settings dict
188 env = os.environ
189 s = self.tornado_settings
190 s['cookie_cache'] = {}
191 s['user'] = self.user
192 s['hub_api_key'] = env.pop('JPY_API_TOKEN')
193 s['hub_prefix'] = self.hub_prefix
194 s['cookie_name'] = self.cookie_name
195 s['login_url'] = self.hub_prefix
196 s['hub_api_url'] = self.hub_api_url
197
198 super(SingleUserNotebookApp, self).init_webapp()
199 self.patch_templates()
200
201 def patch_templates(self):
202 """Patch page templates to add Hub-related buttons"""
203 env = self.web_app.settings['jinja2_env']
204
205 env.globals['hub_control_panel_url'] = \
206 url_path_join(self.hub_prefix, 'home')
207
208 # patch jinja env loading to modify page template
209 def get_page(name):
210 if name == 'page.html':
211 return page_template
212
213 orig_loader = env.loader
214 env.loader = ChoiceLoader([
215 FunctionLoader(get_page),
216 orig_loader,
217 ])
218
219
220 def main():
221 return SingleUserNotebookApp.launch_instance()
222
223
224 if __name__ == "__main__":
225 main()
226
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/jupyterhub/singleuser.py b/jupyterhub/singleuser.py
--- a/jupyterhub/singleuser.py
+++ b/jupyterhub/singleuser.py
@@ -17,7 +17,7 @@
from tornado import ioloop
from tornado.web import HTTPError
-from traitlets import (
+from IPython.utils.traitlets import (
Integer,
Unicode,
CUnicode,
| {"golden_diff": "diff --git a/jupyterhub/singleuser.py b/jupyterhub/singleuser.py\n--- a/jupyterhub/singleuser.py\n+++ b/jupyterhub/singleuser.py\n@@ -17,7 +17,7 @@\n from tornado import ioloop\n from tornado.web import HTTPError\n \n-from traitlets import (\n+from IPython.utils.traitlets import (\n Integer,\n Unicode,\n CUnicode,\n", "issue": "Single user server launch is broken\nI think that #261 broke the launching of the single user server. I am seeing the following errors in the nbgrader tests:\n\n```\nTraceback (most recent call last):\n File \"/Users/jhamrick/.virtualenvs/nbgrader/bin/jupyterhub-singleuser\", line 6, in <module>\n exec(compile(open(__file__).read(), __file__, 'exec'))\n File \"/Users/jhamrick/project/tools/jupyterhub/scripts/jupyterhub-singleuser\", line 4, in <module>\n main()\n File \"/Users/jhamrick/project/tools/jupyterhub/jupyterhub/singleuser.py\", line 221, in main\n return SingleUserNotebookApp.launch_instance()\n File \"/Users/jhamrick/.virtualenvs/nbgrader/lib/python3.4/site-packages/IPython/config/application.py\", line 573, in launch_instance\n app.initialize(argv)\n File \"<string>\", line 2, in initialize\n File \"/Users/jhamrick/.virtualenvs/nbgrader/lib/python3.4/site-packages/IPython/config/application.py\", line 75, in catch_config_error\n return method(app, *args, **kwargs)\n File \"/Users/jhamrick/.virtualenvs/nbgrader/lib/python3.4/site-packages/IPython/html/notebookapp.py\", line 1015, in initialize\n self.init_webapp()\n File \"/Users/jhamrick/project/tools/jupyterhub/jupyterhub/singleuser.py\", line 191, in init_webapp\n s['user'] = self.user\n File \"/Users/jhamrick/.virtualenvs/nbgrader/lib/python3.4/site-packages/traitlets/traitlets.py\", line 438, in __get__\n % (self.name, obj))\ntraitlets.traitlets.TraitError: No default value found for None trait of <jupyterhub.singleuser.SingleUserNotebookApp object at 0x102953b00>\n```\n\nIf I revert to the version of jupyterhub prior to that PR, this error does not occur. @epifanio reported on gitter seeing the same thing as well, so I don't think it's isolated to nbgrader.\n\nGiven the error message, I suspect this has to do with ipython/traitlets#39 and/or ipython/traitlets#40 though I haven't actually tested it. I tried giving the `user` trait a default value but it did not seem to fix the error. I will try to do a bit more debugging, but I fear I don't really understand the internals of traitlets well enough to know exactly what's going on here.\n\nPing @takluyver and @minrk ?\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n\"\"\"Extend regular notebook server to be aware of multiuser things.\"\"\"\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nimport os\ntry:\n from urllib.parse import quote\nexcept ImportError:\n # PY2 Compat\n from urllib import quote\n\nimport requests\nfrom jinja2 import ChoiceLoader, FunctionLoader\n\nfrom tornado import ioloop\nfrom tornado.web import HTTPError\n\nfrom traitlets import (\n Integer,\n Unicode,\n CUnicode,\n)\n\nfrom IPython.html.notebookapp import NotebookApp, aliases as notebook_aliases\nfrom IPython.html.auth.login import LoginHandler\nfrom IPython.html.auth.logout import LogoutHandler\n\nfrom IPython.html.utils import url_path_join\n\n\nfrom distutils.version import LooseVersion as V\n\nimport IPython\nif V(IPython.__version__) < V('3.0'):\n raise ImportError(\"JupyterHub Requires IPython >= 3.0, found %s\" % IPython.__version__)\n\n# Define two methods to attach to AuthenticatedHandler,\n# which authenticate via the central auth server.\n\nclass JupyterHubLoginHandler(LoginHandler):\n @staticmethod\n def login_available(settings):\n return True\n \n @staticmethod\n def verify_token(self, cookie_name, encrypted_cookie):\n \"\"\"method for token verification\"\"\"\n cookie_cache = self.settings['cookie_cache']\n if encrypted_cookie in cookie_cache:\n # we've seen this token before, don't ask upstream again\n return cookie_cache[encrypted_cookie]\n \n hub_api_url = self.settings['hub_api_url']\n hub_api_key = self.settings['hub_api_key']\n r = requests.get(url_path_join(\n hub_api_url, \"authorizations/cookie\", cookie_name, quote(encrypted_cookie, safe=''),\n ),\n headers = {'Authorization' : 'token %s' % hub_api_key},\n )\n if r.status_code == 404:\n data = None\n elif r.status_code == 403:\n self.log.error(\"I don't have permission to verify cookies, my auth token may have expired: [%i] %s\", r.status_code, r.reason)\n raise HTTPError(500, \"Permission failure checking authorization, I may need to be restarted\")\n elif r.status_code >= 500:\n self.log.error(\"Upstream failure verifying auth token: [%i] %s\", r.status_code, r.reason)\n raise HTTPError(502, \"Failed to check authorization (upstream problem)\")\n elif r.status_code >= 400:\n self.log.warn(\"Failed to check authorization: [%i] %s\", r.status_code, r.reason)\n raise HTTPError(500, \"Failed to check authorization\")\n else:\n data = r.json()\n cookie_cache[encrypted_cookie] = data\n return data\n \n @staticmethod\n def get_user(self):\n \"\"\"alternative get_current_user to query the central server\"\"\"\n # only allow this to be called once per handler\n # avoids issues if an error is raised,\n # since this may be called again when trying to render the error page\n if hasattr(self, '_cached_user'):\n return self._cached_user\n \n self._cached_user = None\n my_user = self.settings['user']\n encrypted_cookie = self.get_cookie(self.cookie_name)\n if encrypted_cookie:\n auth_data = JupyterHubLoginHandler.verify_token(self, self.cookie_name, encrypted_cookie)\n if not auth_data:\n # treat invalid token the same as no token\n return None\n user = auth_data['name']\n if user == my_user:\n self._cached_user = user\n return user\n else:\n return None\n else:\n self.log.debug(\"No token cookie\")\n return None\n\n\nclass JupyterHubLogoutHandler(LogoutHandler):\n def get(self):\n self.redirect(url_path_join(self.settings['hub_prefix'], 'logout'))\n\n\n# register new hub related command-line aliases\naliases = dict(notebook_aliases)\naliases.update({\n 'user' : 'SingleUserNotebookApp.user',\n 'cookie-name': 'SingleUserNotebookApp.cookie_name',\n 'hub-prefix': 'SingleUserNotebookApp.hub_prefix',\n 'hub-api-url': 'SingleUserNotebookApp.hub_api_url',\n 'base-url': 'SingleUserNotebookApp.base_url',\n})\n\npage_template = \"\"\"\n{% extends \"templates/page.html\" %}\n\n{% block header_buttons %}\n{{super()}}\n\n<a href='{{hub_control_panel_url}}'\n class='btn btn-default btn-sm navbar-btn pull-right'\n style='margin-right: 4px; margin-left: 2px;'\n>\nControl Panel</a>\n{% endblock %}\n\"\"\"\n\nclass SingleUserNotebookApp(NotebookApp):\n \"\"\"A Subclass of the regular NotebookApp that is aware of the parent multiuser context.\"\"\"\n user = CUnicode(config=True)\n def _user_changed(self, name, old, new):\n self.log.name = new\n cookie_name = Unicode(config=True)\n hub_prefix = Unicode(config=True)\n hub_api_url = Unicode(config=True)\n aliases = aliases\n open_browser = False\n trust_xheaders = True\n login_handler_class = JupyterHubLoginHandler\n logout_handler_class = JupyterHubLogoutHandler\n\n cookie_cache_lifetime = Integer(\n config=True,\n default_value=300,\n allow_none=True,\n help=\"\"\"\n Time, in seconds, that we cache a validated cookie before requiring\n revalidation with the hub.\n \"\"\",\n )\n\n def _log_datefmt_default(self):\n \"\"\"Exclude date from default date format\"\"\"\n return \"%Y-%m-%d %H:%M:%S\"\n\n def _log_format_default(self):\n \"\"\"override default log format to include time\"\"\"\n return \"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s\"\n\n def _confirm_exit(self):\n # disable the exit confirmation for background notebook processes\n ioloop.IOLoop.instance().stop()\n\n def _clear_cookie_cache(self):\n self.log.debug(\"Clearing cookie cache\")\n self.tornado_settings['cookie_cache'].clear()\n \n def start(self):\n # Start a PeriodicCallback to clear cached cookies. This forces us to\n # revalidate our user with the Hub at least every\n # `cookie_cache_lifetime` seconds.\n if self.cookie_cache_lifetime:\n ioloop.PeriodicCallback(\n self._clear_cookie_cache,\n self.cookie_cache_lifetime * 1e3,\n ).start()\n super(SingleUserNotebookApp, self).start()\n \n def init_webapp(self):\n # load the hub related settings into the tornado settings dict\n env = os.environ\n s = self.tornado_settings\n s['cookie_cache'] = {}\n s['user'] = self.user\n s['hub_api_key'] = env.pop('JPY_API_TOKEN')\n s['hub_prefix'] = self.hub_prefix\n s['cookie_name'] = self.cookie_name\n s['login_url'] = self.hub_prefix\n s['hub_api_url'] = self.hub_api_url\n \n super(SingleUserNotebookApp, self).init_webapp()\n self.patch_templates()\n \n def patch_templates(self):\n \"\"\"Patch page templates to add Hub-related buttons\"\"\"\n env = self.web_app.settings['jinja2_env']\n \n env.globals['hub_control_panel_url'] = \\\n url_path_join(self.hub_prefix, 'home')\n \n # patch jinja env loading to modify page template\n def get_page(name):\n if name == 'page.html':\n return page_template\n \n orig_loader = env.loader\n env.loader = ChoiceLoader([\n FunctionLoader(get_page),\n orig_loader,\n ])\n\n\ndef main():\n return SingleUserNotebookApp.launch_instance()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "jupyterhub/singleuser.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\"\"\"Extend regular notebook server to be aware of multiuser things.\"\"\"\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nimport os\ntry:\n from urllib.parse import quote\nexcept ImportError:\n # PY2 Compat\n from urllib import quote\n\nimport requests\nfrom jinja2 import ChoiceLoader, FunctionLoader\n\nfrom tornado import ioloop\nfrom tornado.web import HTTPError\n\nfrom IPython.utils.traitlets import (\n Integer,\n Unicode,\n CUnicode,\n)\n\nfrom IPython.html.notebookapp import NotebookApp, aliases as notebook_aliases\nfrom IPython.html.auth.login import LoginHandler\nfrom IPython.html.auth.logout import LogoutHandler\n\nfrom IPython.html.utils import url_path_join\n\n\nfrom distutils.version import LooseVersion as V\n\nimport IPython\nif V(IPython.__version__) < V('3.0'):\n raise ImportError(\"JupyterHub Requires IPython >= 3.0, found %s\" % IPython.__version__)\n\n# Define two methods to attach to AuthenticatedHandler,\n# which authenticate via the central auth server.\n\nclass JupyterHubLoginHandler(LoginHandler):\n @staticmethod\n def login_available(settings):\n return True\n \n @staticmethod\n def verify_token(self, cookie_name, encrypted_cookie):\n \"\"\"method for token verification\"\"\"\n cookie_cache = self.settings['cookie_cache']\n if encrypted_cookie in cookie_cache:\n # we've seen this token before, don't ask upstream again\n return cookie_cache[encrypted_cookie]\n \n hub_api_url = self.settings['hub_api_url']\n hub_api_key = self.settings['hub_api_key']\n r = requests.get(url_path_join(\n hub_api_url, \"authorizations/cookie\", cookie_name, quote(encrypted_cookie, safe=''),\n ),\n headers = {'Authorization' : 'token %s' % hub_api_key},\n )\n if r.status_code == 404:\n data = None\n elif r.status_code == 403:\n self.log.error(\"I don't have permission to verify cookies, my auth token may have expired: [%i] %s\", r.status_code, r.reason)\n raise HTTPError(500, \"Permission failure checking authorization, I may need to be restarted\")\n elif r.status_code >= 500:\n self.log.error(\"Upstream failure verifying auth token: [%i] %s\", r.status_code, r.reason)\n raise HTTPError(502, \"Failed to check authorization (upstream problem)\")\n elif r.status_code >= 400:\n self.log.warn(\"Failed to check authorization: [%i] %s\", r.status_code, r.reason)\n raise HTTPError(500, \"Failed to check authorization\")\n else:\n data = r.json()\n cookie_cache[encrypted_cookie] = data\n return data\n \n @staticmethod\n def get_user(self):\n \"\"\"alternative get_current_user to query the central server\"\"\"\n # only allow this to be called once per handler\n # avoids issues if an error is raised,\n # since this may be called again when trying to render the error page\n if hasattr(self, '_cached_user'):\n return self._cached_user\n \n self._cached_user = None\n my_user = self.settings['user']\n encrypted_cookie = self.get_cookie(self.cookie_name)\n if encrypted_cookie:\n auth_data = JupyterHubLoginHandler.verify_token(self, self.cookie_name, encrypted_cookie)\n if not auth_data:\n # treat invalid token the same as no token\n return None\n user = auth_data['name']\n if user == my_user:\n self._cached_user = user\n return user\n else:\n return None\n else:\n self.log.debug(\"No token cookie\")\n return None\n\n\nclass JupyterHubLogoutHandler(LogoutHandler):\n def get(self):\n self.redirect(url_path_join(self.settings['hub_prefix'], 'logout'))\n\n\n# register new hub related command-line aliases\naliases = dict(notebook_aliases)\naliases.update({\n 'user' : 'SingleUserNotebookApp.user',\n 'cookie-name': 'SingleUserNotebookApp.cookie_name',\n 'hub-prefix': 'SingleUserNotebookApp.hub_prefix',\n 'hub-api-url': 'SingleUserNotebookApp.hub_api_url',\n 'base-url': 'SingleUserNotebookApp.base_url',\n})\n\npage_template = \"\"\"\n{% extends \"templates/page.html\" %}\n\n{% block header_buttons %}\n{{super()}}\n\n<a href='{{hub_control_panel_url}}'\n class='btn btn-default btn-sm navbar-btn pull-right'\n style='margin-right: 4px; margin-left: 2px;'\n>\nControl Panel</a>\n{% endblock %}\n\"\"\"\n\nclass SingleUserNotebookApp(NotebookApp):\n \"\"\"A Subclass of the regular NotebookApp that is aware of the parent multiuser context.\"\"\"\n user = CUnicode(config=True)\n def _user_changed(self, name, old, new):\n self.log.name = new\n cookie_name = Unicode(config=True)\n hub_prefix = Unicode(config=True)\n hub_api_url = Unicode(config=True)\n aliases = aliases\n open_browser = False\n trust_xheaders = True\n login_handler_class = JupyterHubLoginHandler\n logout_handler_class = JupyterHubLogoutHandler\n\n cookie_cache_lifetime = Integer(\n config=True,\n default_value=300,\n allow_none=True,\n help=\"\"\"\n Time, in seconds, that we cache a validated cookie before requiring\n revalidation with the hub.\n \"\"\",\n )\n\n def _log_datefmt_default(self):\n \"\"\"Exclude date from default date format\"\"\"\n return \"%Y-%m-%d %H:%M:%S\"\n\n def _log_format_default(self):\n \"\"\"override default log format to include time\"\"\"\n return \"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s\"\n\n def _confirm_exit(self):\n # disable the exit confirmation for background notebook processes\n ioloop.IOLoop.instance().stop()\n\n def _clear_cookie_cache(self):\n self.log.debug(\"Clearing cookie cache\")\n self.tornado_settings['cookie_cache'].clear()\n \n def start(self):\n # Start a PeriodicCallback to clear cached cookies. This forces us to\n # revalidate our user with the Hub at least every\n # `cookie_cache_lifetime` seconds.\n if self.cookie_cache_lifetime:\n ioloop.PeriodicCallback(\n self._clear_cookie_cache,\n self.cookie_cache_lifetime * 1e3,\n ).start()\n super(SingleUserNotebookApp, self).start()\n \n def init_webapp(self):\n # load the hub related settings into the tornado settings dict\n env = os.environ\n s = self.tornado_settings\n s['cookie_cache'] = {}\n s['user'] = self.user\n s['hub_api_key'] = env.pop('JPY_API_TOKEN')\n s['hub_prefix'] = self.hub_prefix\n s['cookie_name'] = self.cookie_name\n s['login_url'] = self.hub_prefix\n s['hub_api_url'] = self.hub_api_url\n \n super(SingleUserNotebookApp, self).init_webapp()\n self.patch_templates()\n \n def patch_templates(self):\n \"\"\"Patch page templates to add Hub-related buttons\"\"\"\n env = self.web_app.settings['jinja2_env']\n \n env.globals['hub_control_panel_url'] = \\\n url_path_join(self.hub_prefix, 'home')\n \n # patch jinja env loading to modify page template\n def get_page(name):\n if name == 'page.html':\n return page_template\n \n orig_loader = env.loader\n env.loader = ChoiceLoader([\n FunctionLoader(get_page),\n orig_loader,\n ])\n\n\ndef main():\n return SingleUserNotebookApp.launch_instance()\n\n\nif __name__ == \"__main__\":\n main()\n", "path": "jupyterhub/singleuser.py"}]} |
gh_patches_debug_130 | rasdani/github-patches | git_diff | d2l-ai__d2l-en-2279 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ModuleNotFoundError when running the official pytorch colab notebook

I can replicate the error at multiple official pytorch colab notebooks, e.g.
https://colab.research.google.com/github/d2l-ai/d2l-pytorch-colab/blob/master/chapter_linear-classification/image-classification-dataset.ipynb#scrollTo=ee445cce
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup, find_packages
2 import d2l
3
4 requirements = [
5 'ipython>=7.23',
6 'jupyter',
7 'numpy',
8 'matplotlib',
9 'requests',
10 'pandas',
11 'gym'
12 ]
13
14 setup(
15 name='d2l',
16 version=d2l.__version__,
17 python_requires='>=3.5',
18 author='D2L Developers',
19 author_email='[email protected]',
20 url='https://d2l.ai',
21 description='Dive into Deep Learning',
22 license='MIT-0',
23 packages=find_packages(),
24 zip_safe=True,
25 install_requires=requirements,
26 )
27
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -2,10 +2,10 @@
import d2l
requirements = [
- 'ipython>=7.23',
'jupyter',
'numpy',
'matplotlib',
+ 'matplotlib-inline',
'requests',
'pandas',
'gym'
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -2,10 +2,10 @@\n import d2l\n \n requirements = [\n- 'ipython>=7.23',\n 'jupyter',\n 'numpy',\n 'matplotlib',\n+ 'matplotlib-inline',\n 'requests',\n 'pandas',\n 'gym'\n", "issue": "ModuleNotFoundError when running the official pytorch colab notebook\n\r\n\r\nI can replicate the error at multiple official pytorch colab notebooks, e.g. \r\n\r\nhttps://colab.research.google.com/github/d2l-ai/d2l-pytorch-colab/blob/master/chapter_linear-classification/image-classification-dataset.ipynb#scrollTo=ee445cce\r\n\r\n\r\n\n", "before_files": [{"content": "from setuptools import setup, find_packages\nimport d2l\n\nrequirements = [\n 'ipython>=7.23',\n 'jupyter',\n 'numpy',\n 'matplotlib',\n 'requests',\n 'pandas',\n 'gym'\n]\n\nsetup(\n name='d2l',\n version=d2l.__version__,\n python_requires='>=3.5',\n author='D2L Developers',\n author_email='[email protected]',\n url='https://d2l.ai',\n description='Dive into Deep Learning',\n license='MIT-0',\n packages=find_packages(),\n zip_safe=True,\n install_requires=requirements,\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup, find_packages\nimport d2l\n\nrequirements = [\n 'jupyter',\n 'numpy',\n 'matplotlib',\n 'matplotlib-inline',\n 'requests',\n 'pandas',\n 'gym'\n]\n\nsetup(\n name='d2l',\n version=d2l.__version__,\n python_requires='>=3.5',\n author='D2L Developers',\n author_email='[email protected]',\n url='https://d2l.ai',\n description='Dive into Deep Learning',\n license='MIT-0',\n packages=find_packages(),\n zip_safe=True,\n install_requires=requirements,\n)\n", "path": "setup.py"}]} |
gh_patches_debug_131 | rasdani/github-patches | git_diff | getpelican__pelican-2630 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Pelican binds to 0.0.0.0 instead of 127.0.0.1 when --bind is omitted
### Problem
When starting Pelican like
`$ pelican --listen`
it binds to all network interfaces. However:
```
$ pelican --help
...
-b BIND, --bind BIND IP to bind to when serving files via HTTP (default:
127.0.0.1) (default: None)
```
When ommiting --bind, it should bind to 127.0.0.1 only.
### Observation
My macOS machine presents a pop-up with the question if I want to allow '_The application "Python.app" to accept incoming network connections?_'. Typically, macOS only asks this question if a service is being bound to a public network interface and not the loop back interface.
After clicking allow, a netstat shows Pelican is bound to all available interfaces (note: one should first visit the site on the IP number of the public network interface, otherwise netstat won't show this):
```
$ netstat -an|grep '*.8000'
tcp4 0 0 *.8000 *.* LISTEN
```
### Cause
When pelican is started with --listen, but --bind is ommited, default settings from pelican/settings.py are used. In the DEFAULT_CONFIG dictionary, we find: 'BIND': '',
BIND is then passed to listen(), RootedHTTPServer(), BaseHTTPServer() and finally to socket()
The socket documentation states:
> For IPv4 addresses, two special forms are accepted instead of a host address: '' represents INADDR_ANY, which is used to bind to all interfaces, and the string '<broadcast>' represents INADDR_BROADCAST.
Thus, because the default setting of BIND is '', Pelican is bound to all interfaces and not, as documented and promised, 127.0.0.1 only.
### Solution
The solution is to set BIND to '127.0.0.1' in pelican/settings.py. As a workaround, it is also possible to set BIND = '127.0.0.1' in pelicanconf.py
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pelican/settings.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 from __future__ import print_function, unicode_literals
3
4 import copy
5 import inspect
6 import locale
7 import logging
8 import os
9 import re
10 from os.path import isabs
11 from posixpath import join as posix_join
12
13 import six
14
15 from pelican.log import LimitFilter
16
17
18 try:
19 # spec_from_file_location is the recommended way in Python 3.5+
20 import importlib.util
21
22 def load_source(name, path):
23 spec = importlib.util.spec_from_file_location(name, path)
24 mod = importlib.util.module_from_spec(spec)
25 spec.loader.exec_module(mod)
26 return mod
27 except ImportError:
28 # but it does not exist in Python 2.7, so fall back to imp
29 import imp
30 load_source = imp.load_source
31
32
33 logger = logging.getLogger(__name__)
34
35 DEFAULT_THEME = os.path.join(os.path.dirname(os.path.abspath(__file__)),
36 'themes', 'notmyidea')
37 DEFAULT_CONFIG = {
38 'PATH': os.curdir,
39 'ARTICLE_PATHS': [''],
40 'ARTICLE_EXCLUDES': [],
41 'PAGE_PATHS': ['pages'],
42 'PAGE_EXCLUDES': [],
43 'THEME': DEFAULT_THEME,
44 'OUTPUT_PATH': 'output',
45 'READERS': {},
46 'STATIC_PATHS': ['images'],
47 'STATIC_EXCLUDES': [],
48 'STATIC_EXCLUDE_SOURCES': True,
49 'THEME_STATIC_DIR': 'theme',
50 'THEME_STATIC_PATHS': ['static', ],
51 'FEED_ALL_ATOM': posix_join('feeds', 'all.atom.xml'),
52 'CATEGORY_FEED_ATOM': posix_join('feeds', '{slug}.atom.xml'),
53 'AUTHOR_FEED_ATOM': posix_join('feeds', '{slug}.atom.xml'),
54 'AUTHOR_FEED_RSS': posix_join('feeds', '{slug}.rss.xml'),
55 'TRANSLATION_FEED_ATOM': posix_join('feeds', 'all-{lang}.atom.xml'),
56 'FEED_MAX_ITEMS': '',
57 'RSS_FEED_SUMMARY_ONLY': True,
58 'SITEURL': '',
59 'SITENAME': 'A Pelican Blog',
60 'DISPLAY_PAGES_ON_MENU': True,
61 'DISPLAY_CATEGORIES_ON_MENU': True,
62 'DOCUTILS_SETTINGS': {},
63 'OUTPUT_SOURCES': False,
64 'OUTPUT_SOURCES_EXTENSION': '.text',
65 'USE_FOLDER_AS_CATEGORY': True,
66 'DEFAULT_CATEGORY': 'misc',
67 'WITH_FUTURE_DATES': True,
68 'CSS_FILE': 'main.css',
69 'NEWEST_FIRST_ARCHIVES': True,
70 'REVERSE_CATEGORY_ORDER': False,
71 'DELETE_OUTPUT_DIRECTORY': False,
72 'OUTPUT_RETENTION': [],
73 'INDEX_SAVE_AS': 'index.html',
74 'ARTICLE_URL': '{slug}.html',
75 'ARTICLE_SAVE_AS': '{slug}.html',
76 'ARTICLE_ORDER_BY': 'reversed-date',
77 'ARTICLE_LANG_URL': '{slug}-{lang}.html',
78 'ARTICLE_LANG_SAVE_AS': '{slug}-{lang}.html',
79 'DRAFT_URL': 'drafts/{slug}.html',
80 'DRAFT_SAVE_AS': posix_join('drafts', '{slug}.html'),
81 'DRAFT_LANG_URL': 'drafts/{slug}-{lang}.html',
82 'DRAFT_LANG_SAVE_AS': posix_join('drafts', '{slug}-{lang}.html'),
83 'PAGE_URL': 'pages/{slug}.html',
84 'PAGE_SAVE_AS': posix_join('pages', '{slug}.html'),
85 'PAGE_ORDER_BY': 'basename',
86 'PAGE_LANG_URL': 'pages/{slug}-{lang}.html',
87 'PAGE_LANG_SAVE_AS': posix_join('pages', '{slug}-{lang}.html'),
88 'DRAFT_PAGE_URL': 'drafts/pages/{slug}.html',
89 'DRAFT_PAGE_SAVE_AS': posix_join('drafts', 'pages', '{slug}.html'),
90 'DRAFT_PAGE_LANG_URL': 'drafts/pages/{slug}-{lang}.html',
91 'DRAFT_PAGE_LANG_SAVE_AS': posix_join('drafts', 'pages',
92 '{slug}-{lang}.html'),
93 'STATIC_URL': '{path}',
94 'STATIC_SAVE_AS': '{path}',
95 'STATIC_CREATE_LINKS': False,
96 'STATIC_CHECK_IF_MODIFIED': False,
97 'CATEGORY_URL': 'category/{slug}.html',
98 'CATEGORY_SAVE_AS': posix_join('category', '{slug}.html'),
99 'TAG_URL': 'tag/{slug}.html',
100 'TAG_SAVE_AS': posix_join('tag', '{slug}.html'),
101 'AUTHOR_URL': 'author/{slug}.html',
102 'AUTHOR_SAVE_AS': posix_join('author', '{slug}.html'),
103 'PAGINATION_PATTERNS': [
104 (1, '{name}{extension}', '{name}{extension}'),
105 (2, '{name}{number}{extension}', '{name}{number}{extension}'),
106 ],
107 'YEAR_ARCHIVE_URL': '',
108 'YEAR_ARCHIVE_SAVE_AS': '',
109 'MONTH_ARCHIVE_URL': '',
110 'MONTH_ARCHIVE_SAVE_AS': '',
111 'DAY_ARCHIVE_URL': '',
112 'DAY_ARCHIVE_SAVE_AS': '',
113 'RELATIVE_URLS': False,
114 'DEFAULT_LANG': 'en',
115 'ARTICLE_TRANSLATION_ID': 'slug',
116 'PAGE_TRANSLATION_ID': 'slug',
117 'DIRECT_TEMPLATES': ['index', 'tags', 'categories', 'authors', 'archives'],
118 'THEME_TEMPLATES_OVERRIDES': [],
119 'PAGINATED_TEMPLATES': {'index': None, 'tag': None, 'category': None,
120 'author': None},
121 'PELICAN_CLASS': 'pelican.Pelican',
122 'DEFAULT_DATE_FORMAT': '%a %d %B %Y',
123 'DATE_FORMATS': {},
124 'MARKDOWN': {
125 'extension_configs': {
126 'markdown.extensions.codehilite': {'css_class': 'highlight'},
127 'markdown.extensions.extra': {},
128 'markdown.extensions.meta': {},
129 },
130 'output_format': 'html5',
131 },
132 'JINJA_FILTERS': {},
133 'JINJA_ENVIRONMENT': {
134 'trim_blocks': True,
135 'lstrip_blocks': True,
136 'extensions': [],
137 },
138 'LOG_FILTER': [],
139 'LOCALE': [''], # defaults to user locale
140 'DEFAULT_PAGINATION': False,
141 'DEFAULT_ORPHANS': 0,
142 'DEFAULT_METADATA': {},
143 'FILENAME_METADATA': r'(?P<date>\d{4}-\d{2}-\d{2}).*',
144 'PATH_METADATA': '',
145 'EXTRA_PATH_METADATA': {},
146 'ARTICLE_PERMALINK_STRUCTURE': '',
147 'TYPOGRIFY': False,
148 'TYPOGRIFY_IGNORE_TAGS': [],
149 'SUMMARY_MAX_LENGTH': 50,
150 'PLUGIN_PATHS': [],
151 'PLUGINS': [],
152 'PYGMENTS_RST_OPTIONS': {},
153 'TEMPLATE_PAGES': {},
154 'TEMPLATE_EXTENSIONS': ['.html'],
155 'IGNORE_FILES': ['.#*'],
156 'SLUG_REGEX_SUBSTITUTIONS': [
157 (r'[^\w\s-]', ''), # remove non-alphabetical/whitespace/'-' chars
158 (r'(?u)\A\s*', ''), # strip leading whitespace
159 (r'(?u)\s*\Z', ''), # strip trailing whitespace
160 (r'[-\s]+', '-'), # reduce multiple whitespace or '-' to single '-'
161 ],
162 'INTRASITE_LINK_REGEX': '[{|](?P<what>.*?)[|}]',
163 'SLUGIFY_SOURCE': 'title',
164 'CACHE_CONTENT': False,
165 'CONTENT_CACHING_LAYER': 'reader',
166 'CACHE_PATH': 'cache',
167 'GZIP_CACHE': True,
168 'CHECK_MODIFIED_METHOD': 'mtime',
169 'LOAD_CONTENT_CACHE': False,
170 'WRITE_SELECTED': [],
171 'FORMATTED_FIELDS': ['summary'],
172 'PORT': 8000,
173 'BIND': '',
174 }
175
176 PYGMENTS_RST_OPTIONS = None
177
178
179 def read_settings(path=None, override=None):
180 settings = override or {}
181
182 if path:
183 settings = dict(get_settings_from_file(path), **settings)
184
185 if settings:
186 settings = handle_deprecated_settings(settings)
187
188 if path:
189 # Make relative paths absolute
190 def getabs(maybe_relative, base_path=path):
191 if isabs(maybe_relative):
192 return maybe_relative
193 return os.path.abspath(os.path.normpath(os.path.join(
194 os.path.dirname(base_path), maybe_relative)))
195
196 for p in ['PATH', 'OUTPUT_PATH', 'THEME', 'CACHE_PATH']:
197 if settings.get(p) is not None:
198 absp = getabs(settings[p])
199 # THEME may be a name rather than a path
200 if p != 'THEME' or os.path.exists(absp):
201 settings[p] = absp
202
203 if settings.get('PLUGIN_PATHS') is not None:
204 settings['PLUGIN_PATHS'] = [getabs(pluginpath)
205 for pluginpath
206 in settings['PLUGIN_PATHS']]
207
208 settings = dict(copy.deepcopy(DEFAULT_CONFIG), **settings)
209 settings = configure_settings(settings)
210
211 # This is because there doesn't seem to be a way to pass extra
212 # parameters to docutils directive handlers, so we have to have a
213 # variable here that we'll import from within Pygments.run (see
214 # rstdirectives.py) to see what the user defaults were.
215 global PYGMENTS_RST_OPTIONS
216 PYGMENTS_RST_OPTIONS = settings.get('PYGMENTS_RST_OPTIONS', None)
217 return settings
218
219
220 def get_settings_from_module(module=None):
221 """Loads settings from a module, returns a dictionary."""
222
223 context = {}
224 if module is not None:
225 context.update(
226 (k, v) for k, v in inspect.getmembers(module) if k.isupper())
227 return context
228
229
230 def get_settings_from_file(path):
231 """Loads settings from a file path, returning a dict."""
232
233 name, ext = os.path.splitext(os.path.basename(path))
234 module = load_source(name, path)
235 return get_settings_from_module(module)
236
237
238 def get_jinja_environment(settings):
239 """Sets the environment for Jinja"""
240
241 jinja_env = settings.setdefault('JINJA_ENVIRONMENT',
242 DEFAULT_CONFIG['JINJA_ENVIRONMENT'])
243
244 # Make sure we include the defaults if the user has set env variables
245 for key, value in DEFAULT_CONFIG['JINJA_ENVIRONMENT'].items():
246 if key not in jinja_env:
247 jinja_env[key] = value
248
249 return settings
250
251
252 def _printf_s_to_format_field(printf_string, format_field):
253 """Tries to replace %s with {format_field} in the provided printf_string.
254 Raises ValueError in case of failure.
255 """
256 TEST_STRING = 'PELICAN_PRINTF_S_DEPRECATION'
257 expected = printf_string % TEST_STRING
258
259 result = printf_string.replace('{', '{{').replace('}', '}}') \
260 % '{{{}}}'.format(format_field)
261 if result.format(**{format_field: TEST_STRING}) != expected:
262 raise ValueError('Failed to safely replace %s with {{{}}}'.format(
263 format_field))
264
265 return result
266
267
268 def handle_deprecated_settings(settings):
269 """Converts deprecated settings and issues warnings. Issues an exception
270 if both old and new setting is specified.
271 """
272
273 # PLUGIN_PATH -> PLUGIN_PATHS
274 if 'PLUGIN_PATH' in settings:
275 logger.warning('PLUGIN_PATH setting has been replaced by '
276 'PLUGIN_PATHS, moving it to the new setting name.')
277 settings['PLUGIN_PATHS'] = settings['PLUGIN_PATH']
278 del settings['PLUGIN_PATH']
279
280 # PLUGIN_PATHS: str -> [str]
281 if isinstance(settings.get('PLUGIN_PATHS'), six.string_types):
282 logger.warning("Defining PLUGIN_PATHS setting as string "
283 "has been deprecated (should be a list)")
284 settings['PLUGIN_PATHS'] = [settings['PLUGIN_PATHS']]
285
286 # JINJA_EXTENSIONS -> JINJA_ENVIRONMENT > extensions
287 if 'JINJA_EXTENSIONS' in settings:
288 logger.warning('JINJA_EXTENSIONS setting has been deprecated, '
289 'moving it to JINJA_ENVIRONMENT setting.')
290 settings['JINJA_ENVIRONMENT']['extensions'] = \
291 settings['JINJA_EXTENSIONS']
292 del settings['JINJA_EXTENSIONS']
293
294 # {ARTICLE,PAGE}_DIR -> {ARTICLE,PAGE}_PATHS
295 for key in ['ARTICLE', 'PAGE']:
296 old_key = key + '_DIR'
297 new_key = key + '_PATHS'
298 if old_key in settings:
299 logger.warning(
300 'Deprecated setting %s, moving it to %s list',
301 old_key, new_key)
302 settings[new_key] = [settings[old_key]] # also make a list
303 del settings[old_key]
304
305 # EXTRA_TEMPLATES_PATHS -> THEME_TEMPLATES_OVERRIDES
306 if 'EXTRA_TEMPLATES_PATHS' in settings:
307 logger.warning('EXTRA_TEMPLATES_PATHS is deprecated use '
308 'THEME_TEMPLATES_OVERRIDES instead.')
309 if ('THEME_TEMPLATES_OVERRIDES' in settings and
310 settings['THEME_TEMPLATES_OVERRIDES']):
311 raise Exception(
312 'Setting both EXTRA_TEMPLATES_PATHS and '
313 'THEME_TEMPLATES_OVERRIDES is not permitted. Please move to '
314 'only setting THEME_TEMPLATES_OVERRIDES.')
315 settings['THEME_TEMPLATES_OVERRIDES'] = \
316 settings['EXTRA_TEMPLATES_PATHS']
317 del settings['EXTRA_TEMPLATES_PATHS']
318
319 # MD_EXTENSIONS -> MARKDOWN
320 if 'MD_EXTENSIONS' in settings:
321 logger.warning('MD_EXTENSIONS is deprecated use MARKDOWN '
322 'instead. Falling back to the default.')
323 settings['MARKDOWN'] = DEFAULT_CONFIG['MARKDOWN']
324
325 # LESS_GENERATOR -> Webassets plugin
326 # FILES_TO_COPY -> STATIC_PATHS, EXTRA_PATH_METADATA
327 for old, new, doc in [
328 ('LESS_GENERATOR', 'the Webassets plugin', None),
329 ('FILES_TO_COPY', 'STATIC_PATHS and EXTRA_PATH_METADATA',
330 'https://github.com/getpelican/pelican/'
331 'blob/master/docs/settings.rst#path-metadata'),
332 ]:
333 if old in settings:
334 message = 'The {} setting has been removed in favor of {}'.format(
335 old, new)
336 if doc:
337 message += ', see {} for details'.format(doc)
338 logger.warning(message)
339
340 # PAGINATED_DIRECT_TEMPLATES -> PAGINATED_TEMPLATES
341 if 'PAGINATED_DIRECT_TEMPLATES' in settings:
342 message = 'The {} setting has been removed in favor of {}'.format(
343 'PAGINATED_DIRECT_TEMPLATES', 'PAGINATED_TEMPLATES')
344 logger.warning(message)
345
346 # set PAGINATED_TEMPLATES
347 if 'PAGINATED_TEMPLATES' not in settings:
348 settings['PAGINATED_TEMPLATES'] = {
349 'tag': None, 'category': None, 'author': None}
350
351 for t in settings['PAGINATED_DIRECT_TEMPLATES']:
352 if t not in settings['PAGINATED_TEMPLATES']:
353 settings['PAGINATED_TEMPLATES'][t] = None
354 del settings['PAGINATED_DIRECT_TEMPLATES']
355
356 # {SLUG,CATEGORY,TAG,AUTHOR}_SUBSTITUTIONS ->
357 # {SLUG,CATEGORY,TAG,AUTHOR}_REGEX_SUBSTITUTIONS
358 url_settings_url = \
359 'http://docs.getpelican.com/en/latest/settings.html#url-settings'
360 flavours = {'SLUG', 'CATEGORY', 'TAG', 'AUTHOR'}
361 old_values = {f: settings[f + '_SUBSTITUTIONS']
362 for f in flavours if f + '_SUBSTITUTIONS' in settings}
363 new_values = {f: settings[f + '_REGEX_SUBSTITUTIONS']
364 for f in flavours if f + '_REGEX_SUBSTITUTIONS' in settings}
365 if old_values and new_values:
366 raise Exception(
367 'Setting both {new_key} and {old_key} (or variants thereof) is '
368 'not permitted. Please move to only setting {new_key}.'
369 .format(old_key='SLUG_SUBSTITUTIONS',
370 new_key='SLUG_REGEX_SUBSTITUTIONS'))
371 if old_values:
372 message = ('{} and variants thereof are deprecated and will be '
373 'removed in the future. Please use {} and variants thereof '
374 'instead. Check {}.'
375 .format('SLUG_SUBSTITUTIONS', 'SLUG_REGEX_SUBSTITUTIONS',
376 url_settings_url))
377 logger.warning(message)
378 if old_values.get('SLUG'):
379 for f in {'CATEGORY', 'TAG'}:
380 if old_values.get(f):
381 old_values[f] = old_values['SLUG'] + old_values[f]
382 old_values['AUTHOR'] = old_values.get('AUTHOR', [])
383 for f in flavours:
384 if old_values.get(f) is not None:
385 regex_subs = []
386 # by default will replace non-alphanum characters
387 replace = True
388 for tpl in old_values[f]:
389 try:
390 src, dst, skip = tpl
391 if skip:
392 replace = False
393 except ValueError:
394 src, dst = tpl
395 regex_subs.append(
396 (re.escape(src), dst.replace('\\', r'\\')))
397
398 if replace:
399 regex_subs += [
400 (r'[^\w\s-]', ''),
401 (r'(?u)\A\s*', ''),
402 (r'(?u)\s*\Z', ''),
403 (r'[-\s]+', '-'),
404 ]
405 else:
406 regex_subs += [
407 (r'(?u)\A\s*', ''),
408 (r'(?u)\s*\Z', ''),
409 ]
410 settings[f + '_REGEX_SUBSTITUTIONS'] = regex_subs
411 settings.pop(f + '_SUBSTITUTIONS', None)
412
413 # `%s` -> '{slug}` or `{lang}` in FEED settings
414 for key in ['TRANSLATION_FEED_ATOM',
415 'TRANSLATION_FEED_RSS'
416 ]:
417 if settings.get(key) and '%s' in settings[key]:
418 logger.warning('%%s usage in %s is deprecated, use {lang} '
419 'instead.', key)
420 try:
421 settings[key] = _printf_s_to_format_field(
422 settings[key], 'lang')
423 except ValueError:
424 logger.warning('Failed to convert %%s to {lang} for %s. '
425 'Falling back to default.', key)
426 settings[key] = DEFAULT_CONFIG[key]
427 for key in ['AUTHOR_FEED_ATOM',
428 'AUTHOR_FEED_RSS',
429 'CATEGORY_FEED_ATOM',
430 'CATEGORY_FEED_RSS',
431 'TAG_FEED_ATOM',
432 'TAG_FEED_RSS',
433 ]:
434 if settings.get(key) and '%s' in settings[key]:
435 logger.warning('%%s usage in %s is deprecated, use {slug} '
436 'instead.', key)
437 try:
438 settings[key] = _printf_s_to_format_field(
439 settings[key], 'slug')
440 except ValueError:
441 logger.warning('Failed to convert %%s to {slug} for %s. '
442 'Falling back to default.', key)
443 settings[key] = DEFAULT_CONFIG[key]
444
445 return settings
446
447
448 def configure_settings(settings):
449 """Provide optimizations, error checking, and warnings for the given
450 settings.
451 Also, specify the log messages to be ignored.
452 """
453 if 'PATH' not in settings or not os.path.isdir(settings['PATH']):
454 raise Exception('You need to specify a path containing the content'
455 ' (see pelican --help for more information)')
456
457 # specify the log messages to be ignored
458 log_filter = settings.get('LOG_FILTER', DEFAULT_CONFIG['LOG_FILTER'])
459 LimitFilter._ignore.update(set(log_filter))
460
461 # lookup the theme in "pelican/themes" if the given one doesn't exist
462 if not os.path.isdir(settings['THEME']):
463 theme_path = os.path.join(
464 os.path.dirname(os.path.abspath(__file__)),
465 'themes',
466 settings['THEME'])
467 if os.path.exists(theme_path):
468 settings['THEME'] = theme_path
469 else:
470 raise Exception("Could not find the theme %s"
471 % settings['THEME'])
472
473 # make paths selected for writing absolute if necessary
474 settings['WRITE_SELECTED'] = [
475 os.path.abspath(path) for path in
476 settings.get('WRITE_SELECTED', DEFAULT_CONFIG['WRITE_SELECTED'])
477 ]
478
479 # standardize strings to lowercase strings
480 for key in ['DEFAULT_LANG']:
481 if key in settings:
482 settings[key] = settings[key].lower()
483
484 # set defaults for Jinja environment
485 settings = get_jinja_environment(settings)
486
487 # standardize strings to lists
488 for key in ['LOCALE']:
489 if key in settings and isinstance(settings[key], six.string_types):
490 settings[key] = [settings[key]]
491
492 # check settings that must be a particular type
493 for key, types in [
494 ('OUTPUT_SOURCES_EXTENSION', six.string_types),
495 ('FILENAME_METADATA', six.string_types),
496 ]:
497 if key in settings and not isinstance(settings[key], types):
498 value = settings.pop(key)
499 logger.warn(
500 'Detected misconfigured %s (%s), '
501 'falling back to the default (%s)',
502 key, value, DEFAULT_CONFIG[key])
503
504 # try to set the different locales, fallback on the default.
505 locales = settings.get('LOCALE', DEFAULT_CONFIG['LOCALE'])
506
507 for locale_ in locales:
508 try:
509 locale.setlocale(locale.LC_ALL, str(locale_))
510 break # break if it is successful
511 except locale.Error:
512 pass
513 else:
514 logger.warning(
515 "Locale could not be set. Check the LOCALE setting, ensuring it "
516 "is valid and available on your system.")
517
518 if ('SITEURL' in settings):
519 # If SITEURL has a trailing slash, remove it and provide a warning
520 siteurl = settings['SITEURL']
521 if (siteurl.endswith('/')):
522 settings['SITEURL'] = siteurl[:-1]
523 logger.warning("Removed extraneous trailing slash from SITEURL.")
524 # If SITEURL is defined but FEED_DOMAIN isn't,
525 # set FEED_DOMAIN to SITEURL
526 if 'FEED_DOMAIN' not in settings:
527 settings['FEED_DOMAIN'] = settings['SITEURL']
528
529 # check content caching layer and warn of incompatibilities
530 if settings.get('CACHE_CONTENT', False) and \
531 settings.get('CONTENT_CACHING_LAYER', '') == 'generator' and \
532 settings.get('WITH_FUTURE_DATES', False):
533 logger.warning(
534 "WITH_FUTURE_DATES conflicts with CONTENT_CACHING_LAYER "
535 "set to 'generator', use 'reader' layer instead")
536
537 # Warn if feeds are generated with both SITEURL & FEED_DOMAIN undefined
538 feed_keys = [
539 'FEED_ATOM', 'FEED_RSS',
540 'FEED_ALL_ATOM', 'FEED_ALL_RSS',
541 'CATEGORY_FEED_ATOM', 'CATEGORY_FEED_RSS',
542 'AUTHOR_FEED_ATOM', 'AUTHOR_FEED_RSS',
543 'TAG_FEED_ATOM', 'TAG_FEED_RSS',
544 'TRANSLATION_FEED_ATOM', 'TRANSLATION_FEED_RSS',
545 ]
546
547 if any(settings.get(k) for k in feed_keys):
548 if not settings.get('SITEURL'):
549 logger.warning('Feeds generated without SITEURL set properly may'
550 ' not be valid')
551
552 if 'TIMEZONE' not in settings:
553 logger.warning(
554 'No timezone information specified in the settings. Assuming'
555 ' your timezone is UTC for feed generation. Check '
556 'http://docs.getpelican.com/en/latest/settings.html#timezone '
557 'for more information')
558
559 # fix up pagination rules
560 from pelican.paginator import PaginationRule
561 pagination_rules = [
562 PaginationRule(*r) for r in settings.get(
563 'PAGINATION_PATTERNS',
564 DEFAULT_CONFIG['PAGINATION_PATTERNS'],
565 )
566 ]
567 settings['PAGINATION_PATTERNS'] = sorted(
568 pagination_rules,
569 key=lambda r: r[0],
570 )
571
572 # Save people from accidentally setting a string rather than a list
573 path_keys = (
574 'ARTICLE_EXCLUDES',
575 'DEFAULT_METADATA',
576 'DIRECT_TEMPLATES',
577 'THEME_TEMPLATES_OVERRIDES',
578 'FILES_TO_COPY',
579 'IGNORE_FILES',
580 'PAGINATED_DIRECT_TEMPLATES',
581 'PLUGINS',
582 'STATIC_EXCLUDES',
583 'STATIC_PATHS',
584 'THEME_STATIC_PATHS',
585 'ARTICLE_PATHS',
586 'PAGE_PATHS',
587 )
588 for PATH_KEY in filter(lambda k: k in settings, path_keys):
589 if isinstance(settings[PATH_KEY], six.string_types):
590 logger.warning("Detected misconfiguration with %s setting "
591 "(must be a list), falling back to the default",
592 PATH_KEY)
593 settings[PATH_KEY] = DEFAULT_CONFIG[PATH_KEY]
594
595 # Add {PAGE,ARTICLE}_PATHS to {ARTICLE,PAGE}_EXCLUDES
596 mutually_exclusive = ('ARTICLE', 'PAGE')
597 for type_1, type_2 in [mutually_exclusive, mutually_exclusive[::-1]]:
598 try:
599 includes = settings[type_1 + '_PATHS']
600 excludes = settings[type_2 + '_EXCLUDES']
601 for path in includes:
602 if path not in excludes:
603 excludes.append(path)
604 except KeyError:
605 continue # setting not specified, nothing to do
606
607 return settings
608
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pelican/settings.py b/pelican/settings.py
--- a/pelican/settings.py
+++ b/pelican/settings.py
@@ -170,7 +170,7 @@
'WRITE_SELECTED': [],
'FORMATTED_FIELDS': ['summary'],
'PORT': 8000,
- 'BIND': '',
+ 'BIND': '127.0.0.1',
}
PYGMENTS_RST_OPTIONS = None
| {"golden_diff": "diff --git a/pelican/settings.py b/pelican/settings.py\n--- a/pelican/settings.py\n+++ b/pelican/settings.py\n@@ -170,7 +170,7 @@\n 'WRITE_SELECTED': [],\n 'FORMATTED_FIELDS': ['summary'],\n 'PORT': 8000,\n- 'BIND': '',\n+ 'BIND': '127.0.0.1',\n }\n \n PYGMENTS_RST_OPTIONS = None\n", "issue": "Pelican binds to 0.0.0.0 instead of 127.0.0.1 when --bind is omitted\n### Problem\r\n\r\nWhen starting Pelican like\r\n`$ pelican --listen`\r\n\r\nit binds to all network interfaces. However:\r\n\r\n```\r\n$ pelican --help\r\n...\r\n -b BIND, --bind BIND IP to bind to when serving files via HTTP (default:\r\n 127.0.0.1) (default: None)\r\n```\r\n\r\nWhen ommiting --bind, it should bind to 127.0.0.1 only.\r\n\r\n\r\n### Observation\r\n\r\nMy macOS machine presents a pop-up with the question if I want to allow '_The application \"Python.app\" to accept incoming network connections?_'. Typically, macOS only asks this question if a service is being bound to a public network interface and not the loop back interface.\r\n\r\nAfter clicking allow, a netstat shows Pelican is bound to all available interfaces (note: one should first visit the site on the IP number of the public network interface, otherwise netstat won't show this):\r\n\r\n```\r\n$ netstat -an|grep '*.8000'\r\ntcp4 0 0 *.8000 *.* LISTEN\r\n\r\n```\r\n\r\n\r\n### Cause\r\n\r\nWhen pelican is started with --listen, but --bind is ommited, default settings from pelican/settings.py are used. In the DEFAULT_CONFIG dictionary, we find: 'BIND': '',\r\n\r\nBIND is then passed to listen(), RootedHTTPServer(), BaseHTTPServer() and finally to socket()\r\n\r\nThe socket documentation states:\r\n\r\n> For IPv4 addresses, two special forms are accepted instead of a host address: '' represents INADDR_ANY, which is used to bind to all interfaces, and the string '<broadcast>' represents INADDR_BROADCAST.\r\n\r\n\r\nThus, because the default setting of BIND is '', Pelican is bound to all interfaces and not, as documented and promised, 127.0.0.1 only.\r\n\r\n\r\n### Solution\r\n\r\nThe solution is to set BIND to '127.0.0.1' in pelican/settings.py. As a workaround, it is also possible to set BIND = '127.0.0.1' in pelicanconf.py\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import print_function, unicode_literals\n\nimport copy\nimport inspect\nimport locale\nimport logging\nimport os\nimport re\nfrom os.path import isabs\nfrom posixpath import join as posix_join\n\nimport six\n\nfrom pelican.log import LimitFilter\n\n\ntry:\n # spec_from_file_location is the recommended way in Python 3.5+\n import importlib.util\n\n def load_source(name, path):\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n return mod\nexcept ImportError:\n # but it does not exist in Python 2.7, so fall back to imp\n import imp\n load_source = imp.load_source\n\n\nlogger = logging.getLogger(__name__)\n\nDEFAULT_THEME = os.path.join(os.path.dirname(os.path.abspath(__file__)),\n 'themes', 'notmyidea')\nDEFAULT_CONFIG = {\n 'PATH': os.curdir,\n 'ARTICLE_PATHS': [''],\n 'ARTICLE_EXCLUDES': [],\n 'PAGE_PATHS': ['pages'],\n 'PAGE_EXCLUDES': [],\n 'THEME': DEFAULT_THEME,\n 'OUTPUT_PATH': 'output',\n 'READERS': {},\n 'STATIC_PATHS': ['images'],\n 'STATIC_EXCLUDES': [],\n 'STATIC_EXCLUDE_SOURCES': True,\n 'THEME_STATIC_DIR': 'theme',\n 'THEME_STATIC_PATHS': ['static', ],\n 'FEED_ALL_ATOM': posix_join('feeds', 'all.atom.xml'),\n 'CATEGORY_FEED_ATOM': posix_join('feeds', '{slug}.atom.xml'),\n 'AUTHOR_FEED_ATOM': posix_join('feeds', '{slug}.atom.xml'),\n 'AUTHOR_FEED_RSS': posix_join('feeds', '{slug}.rss.xml'),\n 'TRANSLATION_FEED_ATOM': posix_join('feeds', 'all-{lang}.atom.xml'),\n 'FEED_MAX_ITEMS': '',\n 'RSS_FEED_SUMMARY_ONLY': True,\n 'SITEURL': '',\n 'SITENAME': 'A Pelican Blog',\n 'DISPLAY_PAGES_ON_MENU': True,\n 'DISPLAY_CATEGORIES_ON_MENU': True,\n 'DOCUTILS_SETTINGS': {},\n 'OUTPUT_SOURCES': False,\n 'OUTPUT_SOURCES_EXTENSION': '.text',\n 'USE_FOLDER_AS_CATEGORY': True,\n 'DEFAULT_CATEGORY': 'misc',\n 'WITH_FUTURE_DATES': True,\n 'CSS_FILE': 'main.css',\n 'NEWEST_FIRST_ARCHIVES': True,\n 'REVERSE_CATEGORY_ORDER': False,\n 'DELETE_OUTPUT_DIRECTORY': False,\n 'OUTPUT_RETENTION': [],\n 'INDEX_SAVE_AS': 'index.html',\n 'ARTICLE_URL': '{slug}.html',\n 'ARTICLE_SAVE_AS': '{slug}.html',\n 'ARTICLE_ORDER_BY': 'reversed-date',\n 'ARTICLE_LANG_URL': '{slug}-{lang}.html',\n 'ARTICLE_LANG_SAVE_AS': '{slug}-{lang}.html',\n 'DRAFT_URL': 'drafts/{slug}.html',\n 'DRAFT_SAVE_AS': posix_join('drafts', '{slug}.html'),\n 'DRAFT_LANG_URL': 'drafts/{slug}-{lang}.html',\n 'DRAFT_LANG_SAVE_AS': posix_join('drafts', '{slug}-{lang}.html'),\n 'PAGE_URL': 'pages/{slug}.html',\n 'PAGE_SAVE_AS': posix_join('pages', '{slug}.html'),\n 'PAGE_ORDER_BY': 'basename',\n 'PAGE_LANG_URL': 'pages/{slug}-{lang}.html',\n 'PAGE_LANG_SAVE_AS': posix_join('pages', '{slug}-{lang}.html'),\n 'DRAFT_PAGE_URL': 'drafts/pages/{slug}.html',\n 'DRAFT_PAGE_SAVE_AS': posix_join('drafts', 'pages', '{slug}.html'),\n 'DRAFT_PAGE_LANG_URL': 'drafts/pages/{slug}-{lang}.html',\n 'DRAFT_PAGE_LANG_SAVE_AS': posix_join('drafts', 'pages',\n '{slug}-{lang}.html'),\n 'STATIC_URL': '{path}',\n 'STATIC_SAVE_AS': '{path}',\n 'STATIC_CREATE_LINKS': False,\n 'STATIC_CHECK_IF_MODIFIED': False,\n 'CATEGORY_URL': 'category/{slug}.html',\n 'CATEGORY_SAVE_AS': posix_join('category', '{slug}.html'),\n 'TAG_URL': 'tag/{slug}.html',\n 'TAG_SAVE_AS': posix_join('tag', '{slug}.html'),\n 'AUTHOR_URL': 'author/{slug}.html',\n 'AUTHOR_SAVE_AS': posix_join('author', '{slug}.html'),\n 'PAGINATION_PATTERNS': [\n (1, '{name}{extension}', '{name}{extension}'),\n (2, '{name}{number}{extension}', '{name}{number}{extension}'),\n ],\n 'YEAR_ARCHIVE_URL': '',\n 'YEAR_ARCHIVE_SAVE_AS': '',\n 'MONTH_ARCHIVE_URL': '',\n 'MONTH_ARCHIVE_SAVE_AS': '',\n 'DAY_ARCHIVE_URL': '',\n 'DAY_ARCHIVE_SAVE_AS': '',\n 'RELATIVE_URLS': False,\n 'DEFAULT_LANG': 'en',\n 'ARTICLE_TRANSLATION_ID': 'slug',\n 'PAGE_TRANSLATION_ID': 'slug',\n 'DIRECT_TEMPLATES': ['index', 'tags', 'categories', 'authors', 'archives'],\n 'THEME_TEMPLATES_OVERRIDES': [],\n 'PAGINATED_TEMPLATES': {'index': None, 'tag': None, 'category': None,\n 'author': None},\n 'PELICAN_CLASS': 'pelican.Pelican',\n 'DEFAULT_DATE_FORMAT': '%a %d %B %Y',\n 'DATE_FORMATS': {},\n 'MARKDOWN': {\n 'extension_configs': {\n 'markdown.extensions.codehilite': {'css_class': 'highlight'},\n 'markdown.extensions.extra': {},\n 'markdown.extensions.meta': {},\n },\n 'output_format': 'html5',\n },\n 'JINJA_FILTERS': {},\n 'JINJA_ENVIRONMENT': {\n 'trim_blocks': True,\n 'lstrip_blocks': True,\n 'extensions': [],\n },\n 'LOG_FILTER': [],\n 'LOCALE': [''], # defaults to user locale\n 'DEFAULT_PAGINATION': False,\n 'DEFAULT_ORPHANS': 0,\n 'DEFAULT_METADATA': {},\n 'FILENAME_METADATA': r'(?P<date>\\d{4}-\\d{2}-\\d{2}).*',\n 'PATH_METADATA': '',\n 'EXTRA_PATH_METADATA': {},\n 'ARTICLE_PERMALINK_STRUCTURE': '',\n 'TYPOGRIFY': False,\n 'TYPOGRIFY_IGNORE_TAGS': [],\n 'SUMMARY_MAX_LENGTH': 50,\n 'PLUGIN_PATHS': [],\n 'PLUGINS': [],\n 'PYGMENTS_RST_OPTIONS': {},\n 'TEMPLATE_PAGES': {},\n 'TEMPLATE_EXTENSIONS': ['.html'],\n 'IGNORE_FILES': ['.#*'],\n 'SLUG_REGEX_SUBSTITUTIONS': [\n (r'[^\\w\\s-]', ''), # remove non-alphabetical/whitespace/'-' chars\n (r'(?u)\\A\\s*', ''), # strip leading whitespace\n (r'(?u)\\s*\\Z', ''), # strip trailing whitespace\n (r'[-\\s]+', '-'), # reduce multiple whitespace or '-' to single '-'\n ],\n 'INTRASITE_LINK_REGEX': '[{|](?P<what>.*?)[|}]',\n 'SLUGIFY_SOURCE': 'title',\n 'CACHE_CONTENT': False,\n 'CONTENT_CACHING_LAYER': 'reader',\n 'CACHE_PATH': 'cache',\n 'GZIP_CACHE': True,\n 'CHECK_MODIFIED_METHOD': 'mtime',\n 'LOAD_CONTENT_CACHE': False,\n 'WRITE_SELECTED': [],\n 'FORMATTED_FIELDS': ['summary'],\n 'PORT': 8000,\n 'BIND': '',\n}\n\nPYGMENTS_RST_OPTIONS = None\n\n\ndef read_settings(path=None, override=None):\n settings = override or {}\n\n if path:\n settings = dict(get_settings_from_file(path), **settings)\n\n if settings:\n settings = handle_deprecated_settings(settings)\n\n if path:\n # Make relative paths absolute\n def getabs(maybe_relative, base_path=path):\n if isabs(maybe_relative):\n return maybe_relative\n return os.path.abspath(os.path.normpath(os.path.join(\n os.path.dirname(base_path), maybe_relative)))\n\n for p in ['PATH', 'OUTPUT_PATH', 'THEME', 'CACHE_PATH']:\n if settings.get(p) is not None:\n absp = getabs(settings[p])\n # THEME may be a name rather than a path\n if p != 'THEME' or os.path.exists(absp):\n settings[p] = absp\n\n if settings.get('PLUGIN_PATHS') is not None:\n settings['PLUGIN_PATHS'] = [getabs(pluginpath)\n for pluginpath\n in settings['PLUGIN_PATHS']]\n\n settings = dict(copy.deepcopy(DEFAULT_CONFIG), **settings)\n settings = configure_settings(settings)\n\n # This is because there doesn't seem to be a way to pass extra\n # parameters to docutils directive handlers, so we have to have a\n # variable here that we'll import from within Pygments.run (see\n # rstdirectives.py) to see what the user defaults were.\n global PYGMENTS_RST_OPTIONS\n PYGMENTS_RST_OPTIONS = settings.get('PYGMENTS_RST_OPTIONS', None)\n return settings\n\n\ndef get_settings_from_module(module=None):\n \"\"\"Loads settings from a module, returns a dictionary.\"\"\"\n\n context = {}\n if module is not None:\n context.update(\n (k, v) for k, v in inspect.getmembers(module) if k.isupper())\n return context\n\n\ndef get_settings_from_file(path):\n \"\"\"Loads settings from a file path, returning a dict.\"\"\"\n\n name, ext = os.path.splitext(os.path.basename(path))\n module = load_source(name, path)\n return get_settings_from_module(module)\n\n\ndef get_jinja_environment(settings):\n \"\"\"Sets the environment for Jinja\"\"\"\n\n jinja_env = settings.setdefault('JINJA_ENVIRONMENT',\n DEFAULT_CONFIG['JINJA_ENVIRONMENT'])\n\n # Make sure we include the defaults if the user has set env variables\n for key, value in DEFAULT_CONFIG['JINJA_ENVIRONMENT'].items():\n if key not in jinja_env:\n jinja_env[key] = value\n\n return settings\n\n\ndef _printf_s_to_format_field(printf_string, format_field):\n \"\"\"Tries to replace %s with {format_field} in the provided printf_string.\n Raises ValueError in case of failure.\n \"\"\"\n TEST_STRING = 'PELICAN_PRINTF_S_DEPRECATION'\n expected = printf_string % TEST_STRING\n\n result = printf_string.replace('{', '{{').replace('}', '}}') \\\n % '{{{}}}'.format(format_field)\n if result.format(**{format_field: TEST_STRING}) != expected:\n raise ValueError('Failed to safely replace %s with {{{}}}'.format(\n format_field))\n\n return result\n\n\ndef handle_deprecated_settings(settings):\n \"\"\"Converts deprecated settings and issues warnings. Issues an exception\n if both old and new setting is specified.\n \"\"\"\n\n # PLUGIN_PATH -> PLUGIN_PATHS\n if 'PLUGIN_PATH' in settings:\n logger.warning('PLUGIN_PATH setting has been replaced by '\n 'PLUGIN_PATHS, moving it to the new setting name.')\n settings['PLUGIN_PATHS'] = settings['PLUGIN_PATH']\n del settings['PLUGIN_PATH']\n\n # PLUGIN_PATHS: str -> [str]\n if isinstance(settings.get('PLUGIN_PATHS'), six.string_types):\n logger.warning(\"Defining PLUGIN_PATHS setting as string \"\n \"has been deprecated (should be a list)\")\n settings['PLUGIN_PATHS'] = [settings['PLUGIN_PATHS']]\n\n # JINJA_EXTENSIONS -> JINJA_ENVIRONMENT > extensions\n if 'JINJA_EXTENSIONS' in settings:\n logger.warning('JINJA_EXTENSIONS setting has been deprecated, '\n 'moving it to JINJA_ENVIRONMENT setting.')\n settings['JINJA_ENVIRONMENT']['extensions'] = \\\n settings['JINJA_EXTENSIONS']\n del settings['JINJA_EXTENSIONS']\n\n # {ARTICLE,PAGE}_DIR -> {ARTICLE,PAGE}_PATHS\n for key in ['ARTICLE', 'PAGE']:\n old_key = key + '_DIR'\n new_key = key + '_PATHS'\n if old_key in settings:\n logger.warning(\n 'Deprecated setting %s, moving it to %s list',\n old_key, new_key)\n settings[new_key] = [settings[old_key]] # also make a list\n del settings[old_key]\n\n # EXTRA_TEMPLATES_PATHS -> THEME_TEMPLATES_OVERRIDES\n if 'EXTRA_TEMPLATES_PATHS' in settings:\n logger.warning('EXTRA_TEMPLATES_PATHS is deprecated use '\n 'THEME_TEMPLATES_OVERRIDES instead.')\n if ('THEME_TEMPLATES_OVERRIDES' in settings and\n settings['THEME_TEMPLATES_OVERRIDES']):\n raise Exception(\n 'Setting both EXTRA_TEMPLATES_PATHS and '\n 'THEME_TEMPLATES_OVERRIDES is not permitted. Please move to '\n 'only setting THEME_TEMPLATES_OVERRIDES.')\n settings['THEME_TEMPLATES_OVERRIDES'] = \\\n settings['EXTRA_TEMPLATES_PATHS']\n del settings['EXTRA_TEMPLATES_PATHS']\n\n # MD_EXTENSIONS -> MARKDOWN\n if 'MD_EXTENSIONS' in settings:\n logger.warning('MD_EXTENSIONS is deprecated use MARKDOWN '\n 'instead. Falling back to the default.')\n settings['MARKDOWN'] = DEFAULT_CONFIG['MARKDOWN']\n\n # LESS_GENERATOR -> Webassets plugin\n # FILES_TO_COPY -> STATIC_PATHS, EXTRA_PATH_METADATA\n for old, new, doc in [\n ('LESS_GENERATOR', 'the Webassets plugin', None),\n ('FILES_TO_COPY', 'STATIC_PATHS and EXTRA_PATH_METADATA',\n 'https://github.com/getpelican/pelican/'\n 'blob/master/docs/settings.rst#path-metadata'),\n ]:\n if old in settings:\n message = 'The {} setting has been removed in favor of {}'.format(\n old, new)\n if doc:\n message += ', see {} for details'.format(doc)\n logger.warning(message)\n\n # PAGINATED_DIRECT_TEMPLATES -> PAGINATED_TEMPLATES\n if 'PAGINATED_DIRECT_TEMPLATES' in settings:\n message = 'The {} setting has been removed in favor of {}'.format(\n 'PAGINATED_DIRECT_TEMPLATES', 'PAGINATED_TEMPLATES')\n logger.warning(message)\n\n # set PAGINATED_TEMPLATES\n if 'PAGINATED_TEMPLATES' not in settings:\n settings['PAGINATED_TEMPLATES'] = {\n 'tag': None, 'category': None, 'author': None}\n\n for t in settings['PAGINATED_DIRECT_TEMPLATES']:\n if t not in settings['PAGINATED_TEMPLATES']:\n settings['PAGINATED_TEMPLATES'][t] = None\n del settings['PAGINATED_DIRECT_TEMPLATES']\n\n # {SLUG,CATEGORY,TAG,AUTHOR}_SUBSTITUTIONS ->\n # {SLUG,CATEGORY,TAG,AUTHOR}_REGEX_SUBSTITUTIONS\n url_settings_url = \\\n 'http://docs.getpelican.com/en/latest/settings.html#url-settings'\n flavours = {'SLUG', 'CATEGORY', 'TAG', 'AUTHOR'}\n old_values = {f: settings[f + '_SUBSTITUTIONS']\n for f in flavours if f + '_SUBSTITUTIONS' in settings}\n new_values = {f: settings[f + '_REGEX_SUBSTITUTIONS']\n for f in flavours if f + '_REGEX_SUBSTITUTIONS' in settings}\n if old_values and new_values:\n raise Exception(\n 'Setting both {new_key} and {old_key} (or variants thereof) is '\n 'not permitted. Please move to only setting {new_key}.'\n .format(old_key='SLUG_SUBSTITUTIONS',\n new_key='SLUG_REGEX_SUBSTITUTIONS'))\n if old_values:\n message = ('{} and variants thereof are deprecated and will be '\n 'removed in the future. Please use {} and variants thereof '\n 'instead. Check {}.'\n .format('SLUG_SUBSTITUTIONS', 'SLUG_REGEX_SUBSTITUTIONS',\n url_settings_url))\n logger.warning(message)\n if old_values.get('SLUG'):\n for f in {'CATEGORY', 'TAG'}:\n if old_values.get(f):\n old_values[f] = old_values['SLUG'] + old_values[f]\n old_values['AUTHOR'] = old_values.get('AUTHOR', [])\n for f in flavours:\n if old_values.get(f) is not None:\n regex_subs = []\n # by default will replace non-alphanum characters\n replace = True\n for tpl in old_values[f]:\n try:\n src, dst, skip = tpl\n if skip:\n replace = False\n except ValueError:\n src, dst = tpl\n regex_subs.append(\n (re.escape(src), dst.replace('\\\\', r'\\\\')))\n\n if replace:\n regex_subs += [\n (r'[^\\w\\s-]', ''),\n (r'(?u)\\A\\s*', ''),\n (r'(?u)\\s*\\Z', ''),\n (r'[-\\s]+', '-'),\n ]\n else:\n regex_subs += [\n (r'(?u)\\A\\s*', ''),\n (r'(?u)\\s*\\Z', ''),\n ]\n settings[f + '_REGEX_SUBSTITUTIONS'] = regex_subs\n settings.pop(f + '_SUBSTITUTIONS', None)\n\n # `%s` -> '{slug}` or `{lang}` in FEED settings\n for key in ['TRANSLATION_FEED_ATOM',\n 'TRANSLATION_FEED_RSS'\n ]:\n if settings.get(key) and '%s' in settings[key]:\n logger.warning('%%s usage in %s is deprecated, use {lang} '\n 'instead.', key)\n try:\n settings[key] = _printf_s_to_format_field(\n settings[key], 'lang')\n except ValueError:\n logger.warning('Failed to convert %%s to {lang} for %s. '\n 'Falling back to default.', key)\n settings[key] = DEFAULT_CONFIG[key]\n for key in ['AUTHOR_FEED_ATOM',\n 'AUTHOR_FEED_RSS',\n 'CATEGORY_FEED_ATOM',\n 'CATEGORY_FEED_RSS',\n 'TAG_FEED_ATOM',\n 'TAG_FEED_RSS',\n ]:\n if settings.get(key) and '%s' in settings[key]:\n logger.warning('%%s usage in %s is deprecated, use {slug} '\n 'instead.', key)\n try:\n settings[key] = _printf_s_to_format_field(\n settings[key], 'slug')\n except ValueError:\n logger.warning('Failed to convert %%s to {slug} for %s. '\n 'Falling back to default.', key)\n settings[key] = DEFAULT_CONFIG[key]\n\n return settings\n\n\ndef configure_settings(settings):\n \"\"\"Provide optimizations, error checking, and warnings for the given\n settings.\n Also, specify the log messages to be ignored.\n \"\"\"\n if 'PATH' not in settings or not os.path.isdir(settings['PATH']):\n raise Exception('You need to specify a path containing the content'\n ' (see pelican --help for more information)')\n\n # specify the log messages to be ignored\n log_filter = settings.get('LOG_FILTER', DEFAULT_CONFIG['LOG_FILTER'])\n LimitFilter._ignore.update(set(log_filter))\n\n # lookup the theme in \"pelican/themes\" if the given one doesn't exist\n if not os.path.isdir(settings['THEME']):\n theme_path = os.path.join(\n os.path.dirname(os.path.abspath(__file__)),\n 'themes',\n settings['THEME'])\n if os.path.exists(theme_path):\n settings['THEME'] = theme_path\n else:\n raise Exception(\"Could not find the theme %s\"\n % settings['THEME'])\n\n # make paths selected for writing absolute if necessary\n settings['WRITE_SELECTED'] = [\n os.path.abspath(path) for path in\n settings.get('WRITE_SELECTED', DEFAULT_CONFIG['WRITE_SELECTED'])\n ]\n\n # standardize strings to lowercase strings\n for key in ['DEFAULT_LANG']:\n if key in settings:\n settings[key] = settings[key].lower()\n\n # set defaults for Jinja environment\n settings = get_jinja_environment(settings)\n\n # standardize strings to lists\n for key in ['LOCALE']:\n if key in settings and isinstance(settings[key], six.string_types):\n settings[key] = [settings[key]]\n\n # check settings that must be a particular type\n for key, types in [\n ('OUTPUT_SOURCES_EXTENSION', six.string_types),\n ('FILENAME_METADATA', six.string_types),\n ]:\n if key in settings and not isinstance(settings[key], types):\n value = settings.pop(key)\n logger.warn(\n 'Detected misconfigured %s (%s), '\n 'falling back to the default (%s)',\n key, value, DEFAULT_CONFIG[key])\n\n # try to set the different locales, fallback on the default.\n locales = settings.get('LOCALE', DEFAULT_CONFIG['LOCALE'])\n\n for locale_ in locales:\n try:\n locale.setlocale(locale.LC_ALL, str(locale_))\n break # break if it is successful\n except locale.Error:\n pass\n else:\n logger.warning(\n \"Locale could not be set. Check the LOCALE setting, ensuring it \"\n \"is valid and available on your system.\")\n\n if ('SITEURL' in settings):\n # If SITEURL has a trailing slash, remove it and provide a warning\n siteurl = settings['SITEURL']\n if (siteurl.endswith('/')):\n settings['SITEURL'] = siteurl[:-1]\n logger.warning(\"Removed extraneous trailing slash from SITEURL.\")\n # If SITEURL is defined but FEED_DOMAIN isn't,\n # set FEED_DOMAIN to SITEURL\n if 'FEED_DOMAIN' not in settings:\n settings['FEED_DOMAIN'] = settings['SITEURL']\n\n # check content caching layer and warn of incompatibilities\n if settings.get('CACHE_CONTENT', False) and \\\n settings.get('CONTENT_CACHING_LAYER', '') == 'generator' and \\\n settings.get('WITH_FUTURE_DATES', False):\n logger.warning(\n \"WITH_FUTURE_DATES conflicts with CONTENT_CACHING_LAYER \"\n \"set to 'generator', use 'reader' layer instead\")\n\n # Warn if feeds are generated with both SITEURL & FEED_DOMAIN undefined\n feed_keys = [\n 'FEED_ATOM', 'FEED_RSS',\n 'FEED_ALL_ATOM', 'FEED_ALL_RSS',\n 'CATEGORY_FEED_ATOM', 'CATEGORY_FEED_RSS',\n 'AUTHOR_FEED_ATOM', 'AUTHOR_FEED_RSS',\n 'TAG_FEED_ATOM', 'TAG_FEED_RSS',\n 'TRANSLATION_FEED_ATOM', 'TRANSLATION_FEED_RSS',\n ]\n\n if any(settings.get(k) for k in feed_keys):\n if not settings.get('SITEURL'):\n logger.warning('Feeds generated without SITEURL set properly may'\n ' not be valid')\n\n if 'TIMEZONE' not in settings:\n logger.warning(\n 'No timezone information specified in the settings. Assuming'\n ' your timezone is UTC for feed generation. Check '\n 'http://docs.getpelican.com/en/latest/settings.html#timezone '\n 'for more information')\n\n # fix up pagination rules\n from pelican.paginator import PaginationRule\n pagination_rules = [\n PaginationRule(*r) for r in settings.get(\n 'PAGINATION_PATTERNS',\n DEFAULT_CONFIG['PAGINATION_PATTERNS'],\n )\n ]\n settings['PAGINATION_PATTERNS'] = sorted(\n pagination_rules,\n key=lambda r: r[0],\n )\n\n # Save people from accidentally setting a string rather than a list\n path_keys = (\n 'ARTICLE_EXCLUDES',\n 'DEFAULT_METADATA',\n 'DIRECT_TEMPLATES',\n 'THEME_TEMPLATES_OVERRIDES',\n 'FILES_TO_COPY',\n 'IGNORE_FILES',\n 'PAGINATED_DIRECT_TEMPLATES',\n 'PLUGINS',\n 'STATIC_EXCLUDES',\n 'STATIC_PATHS',\n 'THEME_STATIC_PATHS',\n 'ARTICLE_PATHS',\n 'PAGE_PATHS',\n )\n for PATH_KEY in filter(lambda k: k in settings, path_keys):\n if isinstance(settings[PATH_KEY], six.string_types):\n logger.warning(\"Detected misconfiguration with %s setting \"\n \"(must be a list), falling back to the default\",\n PATH_KEY)\n settings[PATH_KEY] = DEFAULT_CONFIG[PATH_KEY]\n\n # Add {PAGE,ARTICLE}_PATHS to {ARTICLE,PAGE}_EXCLUDES\n mutually_exclusive = ('ARTICLE', 'PAGE')\n for type_1, type_2 in [mutually_exclusive, mutually_exclusive[::-1]]:\n try:\n includes = settings[type_1 + '_PATHS']\n excludes = settings[type_2 + '_EXCLUDES']\n for path in includes:\n if path not in excludes:\n excludes.append(path)\n except KeyError:\n continue # setting not specified, nothing to do\n\n return settings\n", "path": "pelican/settings.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\nfrom __future__ import print_function, unicode_literals\n\nimport copy\nimport inspect\nimport locale\nimport logging\nimport os\nimport re\nfrom os.path import isabs\nfrom posixpath import join as posix_join\n\nimport six\n\nfrom pelican.log import LimitFilter\n\n\ntry:\n # spec_from_file_location is the recommended way in Python 3.5+\n import importlib.util\n\n def load_source(name, path):\n spec = importlib.util.spec_from_file_location(name, path)\n mod = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(mod)\n return mod\nexcept ImportError:\n # but it does not exist in Python 2.7, so fall back to imp\n import imp\n load_source = imp.load_source\n\n\nlogger = logging.getLogger(__name__)\n\nDEFAULT_THEME = os.path.join(os.path.dirname(os.path.abspath(__file__)),\n 'themes', 'notmyidea')\nDEFAULT_CONFIG = {\n 'PATH': os.curdir,\n 'ARTICLE_PATHS': [''],\n 'ARTICLE_EXCLUDES': [],\n 'PAGE_PATHS': ['pages'],\n 'PAGE_EXCLUDES': [],\n 'THEME': DEFAULT_THEME,\n 'OUTPUT_PATH': 'output',\n 'READERS': {},\n 'STATIC_PATHS': ['images'],\n 'STATIC_EXCLUDES': [],\n 'STATIC_EXCLUDE_SOURCES': True,\n 'THEME_STATIC_DIR': 'theme',\n 'THEME_STATIC_PATHS': ['static', ],\n 'FEED_ALL_ATOM': posix_join('feeds', 'all.atom.xml'),\n 'CATEGORY_FEED_ATOM': posix_join('feeds', '{slug}.atom.xml'),\n 'AUTHOR_FEED_ATOM': posix_join('feeds', '{slug}.atom.xml'),\n 'AUTHOR_FEED_RSS': posix_join('feeds', '{slug}.rss.xml'),\n 'TRANSLATION_FEED_ATOM': posix_join('feeds', 'all-{lang}.atom.xml'),\n 'FEED_MAX_ITEMS': '',\n 'RSS_FEED_SUMMARY_ONLY': True,\n 'SITEURL': '',\n 'SITENAME': 'A Pelican Blog',\n 'DISPLAY_PAGES_ON_MENU': True,\n 'DISPLAY_CATEGORIES_ON_MENU': True,\n 'DOCUTILS_SETTINGS': {},\n 'OUTPUT_SOURCES': False,\n 'OUTPUT_SOURCES_EXTENSION': '.text',\n 'USE_FOLDER_AS_CATEGORY': True,\n 'DEFAULT_CATEGORY': 'misc',\n 'WITH_FUTURE_DATES': True,\n 'CSS_FILE': 'main.css',\n 'NEWEST_FIRST_ARCHIVES': True,\n 'REVERSE_CATEGORY_ORDER': False,\n 'DELETE_OUTPUT_DIRECTORY': False,\n 'OUTPUT_RETENTION': [],\n 'INDEX_SAVE_AS': 'index.html',\n 'ARTICLE_URL': '{slug}.html',\n 'ARTICLE_SAVE_AS': '{slug}.html',\n 'ARTICLE_ORDER_BY': 'reversed-date',\n 'ARTICLE_LANG_URL': '{slug}-{lang}.html',\n 'ARTICLE_LANG_SAVE_AS': '{slug}-{lang}.html',\n 'DRAFT_URL': 'drafts/{slug}.html',\n 'DRAFT_SAVE_AS': posix_join('drafts', '{slug}.html'),\n 'DRAFT_LANG_URL': 'drafts/{slug}-{lang}.html',\n 'DRAFT_LANG_SAVE_AS': posix_join('drafts', '{slug}-{lang}.html'),\n 'PAGE_URL': 'pages/{slug}.html',\n 'PAGE_SAVE_AS': posix_join('pages', '{slug}.html'),\n 'PAGE_ORDER_BY': 'basename',\n 'PAGE_LANG_URL': 'pages/{slug}-{lang}.html',\n 'PAGE_LANG_SAVE_AS': posix_join('pages', '{slug}-{lang}.html'),\n 'DRAFT_PAGE_URL': 'drafts/pages/{slug}.html',\n 'DRAFT_PAGE_SAVE_AS': posix_join('drafts', 'pages', '{slug}.html'),\n 'DRAFT_PAGE_LANG_URL': 'drafts/pages/{slug}-{lang}.html',\n 'DRAFT_PAGE_LANG_SAVE_AS': posix_join('drafts', 'pages',\n '{slug}-{lang}.html'),\n 'STATIC_URL': '{path}',\n 'STATIC_SAVE_AS': '{path}',\n 'STATIC_CREATE_LINKS': False,\n 'STATIC_CHECK_IF_MODIFIED': False,\n 'CATEGORY_URL': 'category/{slug}.html',\n 'CATEGORY_SAVE_AS': posix_join('category', '{slug}.html'),\n 'TAG_URL': 'tag/{slug}.html',\n 'TAG_SAVE_AS': posix_join('tag', '{slug}.html'),\n 'AUTHOR_URL': 'author/{slug}.html',\n 'AUTHOR_SAVE_AS': posix_join('author', '{slug}.html'),\n 'PAGINATION_PATTERNS': [\n (1, '{name}{extension}', '{name}{extension}'),\n (2, '{name}{number}{extension}', '{name}{number}{extension}'),\n ],\n 'YEAR_ARCHIVE_URL': '',\n 'YEAR_ARCHIVE_SAVE_AS': '',\n 'MONTH_ARCHIVE_URL': '',\n 'MONTH_ARCHIVE_SAVE_AS': '',\n 'DAY_ARCHIVE_URL': '',\n 'DAY_ARCHIVE_SAVE_AS': '',\n 'RELATIVE_URLS': False,\n 'DEFAULT_LANG': 'en',\n 'ARTICLE_TRANSLATION_ID': 'slug',\n 'PAGE_TRANSLATION_ID': 'slug',\n 'DIRECT_TEMPLATES': ['index', 'tags', 'categories', 'authors', 'archives'],\n 'THEME_TEMPLATES_OVERRIDES': [],\n 'PAGINATED_TEMPLATES': {'index': None, 'tag': None, 'category': None,\n 'author': None},\n 'PELICAN_CLASS': 'pelican.Pelican',\n 'DEFAULT_DATE_FORMAT': '%a %d %B %Y',\n 'DATE_FORMATS': {},\n 'MARKDOWN': {\n 'extension_configs': {\n 'markdown.extensions.codehilite': {'css_class': 'highlight'},\n 'markdown.extensions.extra': {},\n 'markdown.extensions.meta': {},\n },\n 'output_format': 'html5',\n },\n 'JINJA_FILTERS': {},\n 'JINJA_ENVIRONMENT': {\n 'trim_blocks': True,\n 'lstrip_blocks': True,\n 'extensions': [],\n },\n 'LOG_FILTER': [],\n 'LOCALE': [''], # defaults to user locale\n 'DEFAULT_PAGINATION': False,\n 'DEFAULT_ORPHANS': 0,\n 'DEFAULT_METADATA': {},\n 'FILENAME_METADATA': r'(?P<date>\\d{4}-\\d{2}-\\d{2}).*',\n 'PATH_METADATA': '',\n 'EXTRA_PATH_METADATA': {},\n 'ARTICLE_PERMALINK_STRUCTURE': '',\n 'TYPOGRIFY': False,\n 'TYPOGRIFY_IGNORE_TAGS': [],\n 'SUMMARY_MAX_LENGTH': 50,\n 'PLUGIN_PATHS': [],\n 'PLUGINS': [],\n 'PYGMENTS_RST_OPTIONS': {},\n 'TEMPLATE_PAGES': {},\n 'TEMPLATE_EXTENSIONS': ['.html'],\n 'IGNORE_FILES': ['.#*'],\n 'SLUG_REGEX_SUBSTITUTIONS': [\n (r'[^\\w\\s-]', ''), # remove non-alphabetical/whitespace/'-' chars\n (r'(?u)\\A\\s*', ''), # strip leading whitespace\n (r'(?u)\\s*\\Z', ''), # strip trailing whitespace\n (r'[-\\s]+', '-'), # reduce multiple whitespace or '-' to single '-'\n ],\n 'INTRASITE_LINK_REGEX': '[{|](?P<what>.*?)[|}]',\n 'SLUGIFY_SOURCE': 'title',\n 'CACHE_CONTENT': False,\n 'CONTENT_CACHING_LAYER': 'reader',\n 'CACHE_PATH': 'cache',\n 'GZIP_CACHE': True,\n 'CHECK_MODIFIED_METHOD': 'mtime',\n 'LOAD_CONTENT_CACHE': False,\n 'WRITE_SELECTED': [],\n 'FORMATTED_FIELDS': ['summary'],\n 'PORT': 8000,\n 'BIND': '127.0.0.1',\n}\n\nPYGMENTS_RST_OPTIONS = None\n\n\ndef read_settings(path=None, override=None):\n settings = override or {}\n\n if path:\n settings = dict(get_settings_from_file(path), **settings)\n\n if settings:\n settings = handle_deprecated_settings(settings)\n\n if path:\n # Make relative paths absolute\n def getabs(maybe_relative, base_path=path):\n if isabs(maybe_relative):\n return maybe_relative\n return os.path.abspath(os.path.normpath(os.path.join(\n os.path.dirname(base_path), maybe_relative)))\n\n for p in ['PATH', 'OUTPUT_PATH', 'THEME', 'CACHE_PATH']:\n if settings.get(p) is not None:\n absp = getabs(settings[p])\n # THEME may be a name rather than a path\n if p != 'THEME' or os.path.exists(absp):\n settings[p] = absp\n\n if settings.get('PLUGIN_PATHS') is not None:\n settings['PLUGIN_PATHS'] = [getabs(pluginpath)\n for pluginpath\n in settings['PLUGIN_PATHS']]\n\n settings = dict(copy.deepcopy(DEFAULT_CONFIG), **settings)\n settings = configure_settings(settings)\n\n # This is because there doesn't seem to be a way to pass extra\n # parameters to docutils directive handlers, so we have to have a\n # variable here that we'll import from within Pygments.run (see\n # rstdirectives.py) to see what the user defaults were.\n global PYGMENTS_RST_OPTIONS\n PYGMENTS_RST_OPTIONS = settings.get('PYGMENTS_RST_OPTIONS', None)\n return settings\n\n\ndef get_settings_from_module(module=None):\n \"\"\"Loads settings from a module, returns a dictionary.\"\"\"\n\n context = {}\n if module is not None:\n context.update(\n (k, v) for k, v in inspect.getmembers(module) if k.isupper())\n return context\n\n\ndef get_settings_from_file(path):\n \"\"\"Loads settings from a file path, returning a dict.\"\"\"\n\n name, ext = os.path.splitext(os.path.basename(path))\n module = load_source(name, path)\n return get_settings_from_module(module)\n\n\ndef get_jinja_environment(settings):\n \"\"\"Sets the environment for Jinja\"\"\"\n\n jinja_env = settings.setdefault('JINJA_ENVIRONMENT',\n DEFAULT_CONFIG['JINJA_ENVIRONMENT'])\n\n # Make sure we include the defaults if the user has set env variables\n for key, value in DEFAULT_CONFIG['JINJA_ENVIRONMENT'].items():\n if key not in jinja_env:\n jinja_env[key] = value\n\n return settings\n\n\ndef _printf_s_to_format_field(printf_string, format_field):\n \"\"\"Tries to replace %s with {format_field} in the provided printf_string.\n Raises ValueError in case of failure.\n \"\"\"\n TEST_STRING = 'PELICAN_PRINTF_S_DEPRECATION'\n expected = printf_string % TEST_STRING\n\n result = printf_string.replace('{', '{{').replace('}', '}}') \\\n % '{{{}}}'.format(format_field)\n if result.format(**{format_field: TEST_STRING}) != expected:\n raise ValueError('Failed to safely replace %s with {{{}}}'.format(\n format_field))\n\n return result\n\n\ndef handle_deprecated_settings(settings):\n \"\"\"Converts deprecated settings and issues warnings. Issues an exception\n if both old and new setting is specified.\n \"\"\"\n\n # PLUGIN_PATH -> PLUGIN_PATHS\n if 'PLUGIN_PATH' in settings:\n logger.warning('PLUGIN_PATH setting has been replaced by '\n 'PLUGIN_PATHS, moving it to the new setting name.')\n settings['PLUGIN_PATHS'] = settings['PLUGIN_PATH']\n del settings['PLUGIN_PATH']\n\n # PLUGIN_PATHS: str -> [str]\n if isinstance(settings.get('PLUGIN_PATHS'), six.string_types):\n logger.warning(\"Defining PLUGIN_PATHS setting as string \"\n \"has been deprecated (should be a list)\")\n settings['PLUGIN_PATHS'] = [settings['PLUGIN_PATHS']]\n\n # JINJA_EXTENSIONS -> JINJA_ENVIRONMENT > extensions\n if 'JINJA_EXTENSIONS' in settings:\n logger.warning('JINJA_EXTENSIONS setting has been deprecated, '\n 'moving it to JINJA_ENVIRONMENT setting.')\n settings['JINJA_ENVIRONMENT']['extensions'] = \\\n settings['JINJA_EXTENSIONS']\n del settings['JINJA_EXTENSIONS']\n\n # {ARTICLE,PAGE}_DIR -> {ARTICLE,PAGE}_PATHS\n for key in ['ARTICLE', 'PAGE']:\n old_key = key + '_DIR'\n new_key = key + '_PATHS'\n if old_key in settings:\n logger.warning(\n 'Deprecated setting %s, moving it to %s list',\n old_key, new_key)\n settings[new_key] = [settings[old_key]] # also make a list\n del settings[old_key]\n\n # EXTRA_TEMPLATES_PATHS -> THEME_TEMPLATES_OVERRIDES\n if 'EXTRA_TEMPLATES_PATHS' in settings:\n logger.warning('EXTRA_TEMPLATES_PATHS is deprecated use '\n 'THEME_TEMPLATES_OVERRIDES instead.')\n if ('THEME_TEMPLATES_OVERRIDES' in settings and\n settings['THEME_TEMPLATES_OVERRIDES']):\n raise Exception(\n 'Setting both EXTRA_TEMPLATES_PATHS and '\n 'THEME_TEMPLATES_OVERRIDES is not permitted. Please move to '\n 'only setting THEME_TEMPLATES_OVERRIDES.')\n settings['THEME_TEMPLATES_OVERRIDES'] = \\\n settings['EXTRA_TEMPLATES_PATHS']\n del settings['EXTRA_TEMPLATES_PATHS']\n\n # MD_EXTENSIONS -> MARKDOWN\n if 'MD_EXTENSIONS' in settings:\n logger.warning('MD_EXTENSIONS is deprecated use MARKDOWN '\n 'instead. Falling back to the default.')\n settings['MARKDOWN'] = DEFAULT_CONFIG['MARKDOWN']\n\n # LESS_GENERATOR -> Webassets plugin\n # FILES_TO_COPY -> STATIC_PATHS, EXTRA_PATH_METADATA\n for old, new, doc in [\n ('LESS_GENERATOR', 'the Webassets plugin', None),\n ('FILES_TO_COPY', 'STATIC_PATHS and EXTRA_PATH_METADATA',\n 'https://github.com/getpelican/pelican/'\n 'blob/master/docs/settings.rst#path-metadata'),\n ]:\n if old in settings:\n message = 'The {} setting has been removed in favor of {}'.format(\n old, new)\n if doc:\n message += ', see {} for details'.format(doc)\n logger.warning(message)\n\n # PAGINATED_DIRECT_TEMPLATES -> PAGINATED_TEMPLATES\n if 'PAGINATED_DIRECT_TEMPLATES' in settings:\n message = 'The {} setting has been removed in favor of {}'.format(\n 'PAGINATED_DIRECT_TEMPLATES', 'PAGINATED_TEMPLATES')\n logger.warning(message)\n\n # set PAGINATED_TEMPLATES\n if 'PAGINATED_TEMPLATES' not in settings:\n settings['PAGINATED_TEMPLATES'] = {\n 'tag': None, 'category': None, 'author': None}\n\n for t in settings['PAGINATED_DIRECT_TEMPLATES']:\n if t not in settings['PAGINATED_TEMPLATES']:\n settings['PAGINATED_TEMPLATES'][t] = None\n del settings['PAGINATED_DIRECT_TEMPLATES']\n\n # {SLUG,CATEGORY,TAG,AUTHOR}_SUBSTITUTIONS ->\n # {SLUG,CATEGORY,TAG,AUTHOR}_REGEX_SUBSTITUTIONS\n url_settings_url = \\\n 'http://docs.getpelican.com/en/latest/settings.html#url-settings'\n flavours = {'SLUG', 'CATEGORY', 'TAG', 'AUTHOR'}\n old_values = {f: settings[f + '_SUBSTITUTIONS']\n for f in flavours if f + '_SUBSTITUTIONS' in settings}\n new_values = {f: settings[f + '_REGEX_SUBSTITUTIONS']\n for f in flavours if f + '_REGEX_SUBSTITUTIONS' in settings}\n if old_values and new_values:\n raise Exception(\n 'Setting both {new_key} and {old_key} (or variants thereof) is '\n 'not permitted. Please move to only setting {new_key}.'\n .format(old_key='SLUG_SUBSTITUTIONS',\n new_key='SLUG_REGEX_SUBSTITUTIONS'))\n if old_values:\n message = ('{} and variants thereof are deprecated and will be '\n 'removed in the future. Please use {} and variants thereof '\n 'instead. Check {}.'\n .format('SLUG_SUBSTITUTIONS', 'SLUG_REGEX_SUBSTITUTIONS',\n url_settings_url))\n logger.warning(message)\n if old_values.get('SLUG'):\n for f in {'CATEGORY', 'TAG'}:\n if old_values.get(f):\n old_values[f] = old_values['SLUG'] + old_values[f]\n old_values['AUTHOR'] = old_values.get('AUTHOR', [])\n for f in flavours:\n if old_values.get(f) is not None:\n regex_subs = []\n # by default will replace non-alphanum characters\n replace = True\n for tpl in old_values[f]:\n try:\n src, dst, skip = tpl\n if skip:\n replace = False\n except ValueError:\n src, dst = tpl\n regex_subs.append(\n (re.escape(src), dst.replace('\\\\', r'\\\\')))\n\n if replace:\n regex_subs += [\n (r'[^\\w\\s-]', ''),\n (r'(?u)\\A\\s*', ''),\n (r'(?u)\\s*\\Z', ''),\n (r'[-\\s]+', '-'),\n ]\n else:\n regex_subs += [\n (r'(?u)\\A\\s*', ''),\n (r'(?u)\\s*\\Z', ''),\n ]\n settings[f + '_REGEX_SUBSTITUTIONS'] = regex_subs\n settings.pop(f + '_SUBSTITUTIONS', None)\n\n # `%s` -> '{slug}` or `{lang}` in FEED settings\n for key in ['TRANSLATION_FEED_ATOM',\n 'TRANSLATION_FEED_RSS'\n ]:\n if settings.get(key) and '%s' in settings[key]:\n logger.warning('%%s usage in %s is deprecated, use {lang} '\n 'instead.', key)\n try:\n settings[key] = _printf_s_to_format_field(\n settings[key], 'lang')\n except ValueError:\n logger.warning('Failed to convert %%s to {lang} for %s. '\n 'Falling back to default.', key)\n settings[key] = DEFAULT_CONFIG[key]\n for key in ['AUTHOR_FEED_ATOM',\n 'AUTHOR_FEED_RSS',\n 'CATEGORY_FEED_ATOM',\n 'CATEGORY_FEED_RSS',\n 'TAG_FEED_ATOM',\n 'TAG_FEED_RSS',\n ]:\n if settings.get(key) and '%s' in settings[key]:\n logger.warning('%%s usage in %s is deprecated, use {slug} '\n 'instead.', key)\n try:\n settings[key] = _printf_s_to_format_field(\n settings[key], 'slug')\n except ValueError:\n logger.warning('Failed to convert %%s to {slug} for %s. '\n 'Falling back to default.', key)\n settings[key] = DEFAULT_CONFIG[key]\n\n return settings\n\n\ndef configure_settings(settings):\n \"\"\"Provide optimizations, error checking, and warnings for the given\n settings.\n Also, specify the log messages to be ignored.\n \"\"\"\n if 'PATH' not in settings or not os.path.isdir(settings['PATH']):\n raise Exception('You need to specify a path containing the content'\n ' (see pelican --help for more information)')\n\n # specify the log messages to be ignored\n log_filter = settings.get('LOG_FILTER', DEFAULT_CONFIG['LOG_FILTER'])\n LimitFilter._ignore.update(set(log_filter))\n\n # lookup the theme in \"pelican/themes\" if the given one doesn't exist\n if not os.path.isdir(settings['THEME']):\n theme_path = os.path.join(\n os.path.dirname(os.path.abspath(__file__)),\n 'themes',\n settings['THEME'])\n if os.path.exists(theme_path):\n settings['THEME'] = theme_path\n else:\n raise Exception(\"Could not find the theme %s\"\n % settings['THEME'])\n\n # make paths selected for writing absolute if necessary\n settings['WRITE_SELECTED'] = [\n os.path.abspath(path) for path in\n settings.get('WRITE_SELECTED', DEFAULT_CONFIG['WRITE_SELECTED'])\n ]\n\n # standardize strings to lowercase strings\n for key in ['DEFAULT_LANG']:\n if key in settings:\n settings[key] = settings[key].lower()\n\n # set defaults for Jinja environment\n settings = get_jinja_environment(settings)\n\n # standardize strings to lists\n for key in ['LOCALE']:\n if key in settings and isinstance(settings[key], six.string_types):\n settings[key] = [settings[key]]\n\n # check settings that must be a particular type\n for key, types in [\n ('OUTPUT_SOURCES_EXTENSION', six.string_types),\n ('FILENAME_METADATA', six.string_types),\n ]:\n if key in settings and not isinstance(settings[key], types):\n value = settings.pop(key)\n logger.warn(\n 'Detected misconfigured %s (%s), '\n 'falling back to the default (%s)',\n key, value, DEFAULT_CONFIG[key])\n\n # try to set the different locales, fallback on the default.\n locales = settings.get('LOCALE', DEFAULT_CONFIG['LOCALE'])\n\n for locale_ in locales:\n try:\n locale.setlocale(locale.LC_ALL, str(locale_))\n break # break if it is successful\n except locale.Error:\n pass\n else:\n logger.warning(\n \"Locale could not be set. Check the LOCALE setting, ensuring it \"\n \"is valid and available on your system.\")\n\n if ('SITEURL' in settings):\n # If SITEURL has a trailing slash, remove it and provide a warning\n siteurl = settings['SITEURL']\n if (siteurl.endswith('/')):\n settings['SITEURL'] = siteurl[:-1]\n logger.warning(\"Removed extraneous trailing slash from SITEURL.\")\n # If SITEURL is defined but FEED_DOMAIN isn't,\n # set FEED_DOMAIN to SITEURL\n if 'FEED_DOMAIN' not in settings:\n settings['FEED_DOMAIN'] = settings['SITEURL']\n\n # check content caching layer and warn of incompatibilities\n if settings.get('CACHE_CONTENT', False) and \\\n settings.get('CONTENT_CACHING_LAYER', '') == 'generator' and \\\n settings.get('WITH_FUTURE_DATES', False):\n logger.warning(\n \"WITH_FUTURE_DATES conflicts with CONTENT_CACHING_LAYER \"\n \"set to 'generator', use 'reader' layer instead\")\n\n # Warn if feeds are generated with both SITEURL & FEED_DOMAIN undefined\n feed_keys = [\n 'FEED_ATOM', 'FEED_RSS',\n 'FEED_ALL_ATOM', 'FEED_ALL_RSS',\n 'CATEGORY_FEED_ATOM', 'CATEGORY_FEED_RSS',\n 'AUTHOR_FEED_ATOM', 'AUTHOR_FEED_RSS',\n 'TAG_FEED_ATOM', 'TAG_FEED_RSS',\n 'TRANSLATION_FEED_ATOM', 'TRANSLATION_FEED_RSS',\n ]\n\n if any(settings.get(k) for k in feed_keys):\n if not settings.get('SITEURL'):\n logger.warning('Feeds generated without SITEURL set properly may'\n ' not be valid')\n\n if 'TIMEZONE' not in settings:\n logger.warning(\n 'No timezone information specified in the settings. Assuming'\n ' your timezone is UTC for feed generation. Check '\n 'http://docs.getpelican.com/en/latest/settings.html#timezone '\n 'for more information')\n\n # fix up pagination rules\n from pelican.paginator import PaginationRule\n pagination_rules = [\n PaginationRule(*r) for r in settings.get(\n 'PAGINATION_PATTERNS',\n DEFAULT_CONFIG['PAGINATION_PATTERNS'],\n )\n ]\n settings['PAGINATION_PATTERNS'] = sorted(\n pagination_rules,\n key=lambda r: r[0],\n )\n\n # Save people from accidentally setting a string rather than a list\n path_keys = (\n 'ARTICLE_EXCLUDES',\n 'DEFAULT_METADATA',\n 'DIRECT_TEMPLATES',\n 'THEME_TEMPLATES_OVERRIDES',\n 'FILES_TO_COPY',\n 'IGNORE_FILES',\n 'PAGINATED_DIRECT_TEMPLATES',\n 'PLUGINS',\n 'STATIC_EXCLUDES',\n 'STATIC_PATHS',\n 'THEME_STATIC_PATHS',\n 'ARTICLE_PATHS',\n 'PAGE_PATHS',\n )\n for PATH_KEY in filter(lambda k: k in settings, path_keys):\n if isinstance(settings[PATH_KEY], six.string_types):\n logger.warning(\"Detected misconfiguration with %s setting \"\n \"(must be a list), falling back to the default\",\n PATH_KEY)\n settings[PATH_KEY] = DEFAULT_CONFIG[PATH_KEY]\n\n # Add {PAGE,ARTICLE}_PATHS to {ARTICLE,PAGE}_EXCLUDES\n mutually_exclusive = ('ARTICLE', 'PAGE')\n for type_1, type_2 in [mutually_exclusive, mutually_exclusive[::-1]]:\n try:\n includes = settings[type_1 + '_PATHS']\n excludes = settings[type_2 + '_EXCLUDES']\n for path in includes:\n if path not in excludes:\n excludes.append(path)\n except KeyError:\n continue # setting not specified, nothing to do\n\n return settings\n", "path": "pelican/settings.py"}]} |
gh_patches_debug_132 | rasdani/github-patches | git_diff | spacetelescope__jwql-474 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
API ReadTheDocs failing
It seems as though our webhook to ReadTheDocs is not currently updating the API docs:
[](https://jwql.readthedocs.io/en/latest/?badge=latest)
Actually, it looks like our doc builds have been failing for a month!
https://readthedocs.org/projects/jwql/builds/
There is an error message here that should be helpful for investigating: https://readthedocs.org/projects/jwql/builds/9177249/
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `jwql/utils/utils.py`
Content:
```
1 """Various utility functions for the ``jwql`` project.
2
3 Authors
4 -------
5
6 - Matthew Bourque
7 - Lauren Chambers
8
9 Use
10 ---
11
12 This module can be imported as such:
13
14 >>> import utils
15 settings = get_config()
16
17 References
18 ----------
19
20 Filename parser modified from Joe Hunkeler:
21 https://gist.github.com/jhunkeler/f08783ca2da7bfd1f8e9ee1d207da5ff
22
23 Various documentation related to JWST filename conventions:
24 - https://jwst-docs.stsci.edu/display/JDAT/File+Naming+Conventions+and+Data+Products
25 - https://innerspace.stsci.edu/pages/viewpage.action?pageId=94092600
26 - https://innerspace.stsci.edu/pages/viewpage.action?spaceKey=SCSB&title=JWST+Science+Data+Products
27 - https://jwst-docs.stsci.edu/display/JDAT/Understanding+Associations?q=association%20candidate
28 - https://jwst-pipeline.readthedocs.io/en/stable/jwst/introduction.html#pipeline-step-suffix-definitions
29 - JWST TR JWST-STScI-004800, SM-12
30 """
31
32 import datetime
33 import getpass
34 import json
35 import os
36 import re
37 import shutil
38
39 import jsonschema
40
41 from jwql.utils import permissions
42 from jwql.utils.constants import FILE_SUFFIX_TYPES, JWST_INSTRUMENT_NAMES_SHORTHAND
43
44 __location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
45
46
47 def copy_files(files, out_dir):
48 """Copy a given file to a given directory. Only try to copy the file
49 if it is not already present in the output directory.
50
51 Parameters
52 ----------
53 files : list
54 List of files to be copied
55
56 out_dir : str
57 Destination directory
58
59 Returns
60 -------
61 success : list
62 Files successfully copied (or that already existed in out_dir)
63
64 failed : list
65 Files that were not copied
66 """
67
68 # Copy files if they do not already exist
69 success = []
70 failed = []
71 for input_file in files:
72 input_new_path = os.path.join(out_dir, os.path.basename(input_file))
73 if os.path.isfile(input_new_path):
74 success.append(input_new_path)
75 else:
76 try:
77 shutil.copy2(input_file, out_dir)
78 success.append(input_new_path)
79 permissions.set_permissions(input_new_path)
80 except:
81 failed.append(input_file)
82 return success, failed
83
84
85 def download_mast_data(query_results, output_dir):
86 """Example function for downloading MAST query results. From MAST
87 website (``https://mast.stsci.edu/api/v0/pyex.html``)
88
89 Parameters
90 ----------
91 query_results : list
92 List of dictionaries returned by a MAST query.
93
94 output_dir : str
95 Directory into which the files will be downlaoded
96 """
97
98 # Set up the https connection
99 server = 'mast.stsci.edu'
100 conn = httplib.HTTPSConnection(server)
101
102 # Dowload the products
103 print('Number of query results: {}'.format(len(query_results)))
104
105 for i in range(len(query_results)):
106
107 # Make full output file path
108 output_file = os.path.join(output_dir, query_results[i]['filename'])
109
110 print('Output file is {}'.format(output_file))
111
112 # Download the data
113 uri = query_results[i]['dataURI']
114
115 print('uri is {}'.format(uri))
116
117 conn.request("GET", "/api/v0/download/file?uri=" + uri)
118 resp = conn.getresponse()
119 file_content = resp.read()
120
121 # Save to file
122 with open(output_file, 'wb') as file_obj:
123 file_obj.write(file_content)
124
125 # Check for file
126 if not os.path.isfile(output_file):
127 print("ERROR: {} failed to download.".format(output_file))
128 else:
129 statinfo = os.stat(output_file)
130 if statinfo.st_size > 0:
131 print("DOWNLOAD COMPLETE: ", output_file)
132 else:
133 print("ERROR: {} file is empty.".format(output_file))
134 conn.close()
135
136
137 def ensure_dir_exists(fullpath):
138 """Creates dirs from ``fullpath`` if they do not already exist."""
139 if not os.path.exists(fullpath):
140 os.makedirs(fullpath)
141 permissions.set_permissions(fullpath)
142
143
144 def filename_parser(filename):
145 """Return a dictionary that contains the properties of a given
146 JWST file (e.g. program ID, visit number, detector, etc.).
147
148 Parameters
149 ----------
150 filename : str
151 Path or name of JWST file to parse
152
153 Returns
154 -------
155 filename_dict : dict
156 Collection of file properties
157
158 Raises
159 ------
160 ValueError
161 When the provided file does not follow naming conventions
162 """
163
164 filename = os.path.basename(filename)
165 file_root_name = (len(filename.split('.')) < 2)
166
167 # Stage 1 and 2 filenames
168 # e.g. "jw80500012009_01101_00012_nrcalong_uncal.fits"
169 stage_1_and_2 = \
170 r"jw" \
171 r"(?P<program_id>\d{5})"\
172 r"(?P<observation>\d{3})"\
173 r"(?P<visit>\d{3})"\
174 r"_(?P<visit_group>\d{2})"\
175 r"(?P<parallel_seq_id>\d{1})"\
176 r"(?P<activity>\w{2})"\
177 r"_(?P<exposure_id>\d+)"\
178 r"_(?P<detector>((?!_)[\w])+)"
179
180 # Stage 2c outlier detection filenames
181 # e.g. "jw94015002002_02108_00001_mirimage_o002_crf.fits"
182 stage_2c = \
183 r"jw" \
184 r"(?P<program_id>\d{5})" \
185 r"(?P<observation>\d{3})" \
186 r"(?P<visit>\d{3})" \
187 r"_(?P<visit_group>\d{2})" \
188 r"(?P<parallel_seq_id>\d{1})" \
189 r"(?P<activity>\w{2})" \
190 r"_(?P<exposure_id>\d+)" \
191 r"_(?P<detector>((?!_)[\w])+)"\
192 r"_(?P<ac_id>(o\d{3}|(c|a|r)\d{4}))"
193
194 # Stage 3 filenames with target ID
195 # e.g. "jw80600-o009_t001_miri_f1130w_i2d.fits"
196 stage_3_target_id = \
197 r"jw" \
198 r"(?P<program_id>\d{5})"\
199 r"-(?P<ac_id>(o\d{3}|(c|a|r)\d{4}))"\
200 r"_(?P<target_id>(t)\d{3})"\
201 r"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))"\
202 r"_(?P<optical_elements>((?!_)[\w-])+)"
203
204 # Stage 3 filenames with source ID
205 # e.g. "jw80600-o009_s00001_miri_f1130w_i2d.fits"
206 stage_3_source_id = \
207 r"jw" \
208 r"(?P<program_id>\d{5})"\
209 r"-(?P<ac_id>(o\d{3}|(c|a|r)\d{4}))"\
210 r"_(?P<source_id>(s)\d{5})"\
211 r"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))"\
212 r"_(?P<optical_elements>((?!_)[\w-])+)"
213
214 # Stage 3 filenames with target ID and epoch
215 # e.g. "jw80600-o009_t001-epoch1_miri_f1130w_i2d.fits"
216 stage_3_target_id_epoch = \
217 r"jw" \
218 r"(?P<program_id>\d{5})"\
219 r"-(?P<ac_id>(o\d{3}|(c|a|r)\d{4}))"\
220 r"_(?P<target_id>(t)\d{3})"\
221 r"-epoch(?P<epoch>\d{1})"\
222 r"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))"\
223 r"_(?P<optical_elements>((?!_)[\w-])+)"
224
225 # Stage 3 filenames with source ID and epoch
226 # e.g. "jw80600-o009_s00001-epoch1_miri_f1130w_i2d.fits"
227 stage_3_source_id_epoch = \
228 r"jw" \
229 r"(?P<program_id>\d{5})"\
230 r"-(?P<ac_id>(o\d{3}|(c|a|r)\d{4}))"\
231 r"_(?P<source_id>(s)\d{5})"\
232 r"-epoch(?P<epoch>\d{1})"\
233 r"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))"\
234 r"_(?P<optical_elements>((?!_)[\w-])+)"
235
236 # Time series filenames
237 # e.g. "jw00733003001_02101_00002-seg001_nrs1_rate.fits"
238 time_series = \
239 r"jw" \
240 r"(?P<program_id>\d{5})"\
241 r"(?P<observation>\d{3})"\
242 r"(?P<visit>\d{3})"\
243 r"_(?P<visit_group>\d{2})"\
244 r"(?P<parallel_seq_id>\d{1})"\
245 r"(?P<activity>\w{2})"\
246 r"_(?P<exposure_id>\d+)"\
247 r"-seg(?P<segment>\d{3})"\
248 r"_(?P<detector>\w+)"
249
250 # Guider filenames
251 # e.g. "jw00729011001_gs-id_1_image_cal.fits" or
252 # "jw00799003001_gs-acq1_2019154181705_stream.fits"
253 guider = \
254 r"jw" \
255 r"(?P<program_id>\d{5})" \
256 r"(?P<observation>\d{3})" \
257 r"(?P<visit>\d{3})" \
258 r"_gs-(?P<guider_mode>(id|acq1|acq2|track|fg))" \
259 r"_((?P<date_time>\d{13})|(?P<guide_star_attempt_id>\d{1}))"
260
261 # Build list of filename types
262 filename_types = [
263 stage_1_and_2,
264 stage_2c,
265 stage_3_target_id,
266 stage_3_source_id,
267 stage_3_target_id_epoch,
268 stage_3_source_id_epoch,
269 time_series,
270 guider]
271
272 filename_type_names = [
273 'stage_1_and_2',
274 'stage_2c',
275 'stage_3_target_id',
276 'stage_3_source_id',
277 'stage_3_target_id_epoch',
278 'stage_3_source_id_epoch',
279 'time_series',
280 'guider'
281 ]
282
283 # Try to parse the filename
284 for filename_type, filename_type_name in zip(filename_types, filename_type_names):
285
286 # If full filename, try using suffix
287 if not file_root_name:
288 filename_type += r"_(?P<suffix>{}).*".format('|'.join(FILE_SUFFIX_TYPES))
289 # If not, make sure the provided regex matches the entire filename root
290 else:
291 filename_type += r"$"
292
293 elements = re.compile(filename_type)
294 jwst_file = elements.match(filename)
295
296 # Stop when you find a format that matches
297 if jwst_file is not None:
298 name_match = filename_type_name
299 break
300
301 try:
302 # Convert the regex match to a dictionary
303 filename_dict = jwst_file.groupdict()
304
305 # Add the filename type to that dict
306 filename_dict['filename_type'] = name_match
307
308 # Also, add the instrument if not already there
309 if 'instrument' not in filename_dict.keys():
310 if name_match == 'guider':
311 filename_dict['instrument'] = 'fgs'
312 elif 'detector' in filename_dict.keys():
313 filename_dict['instrument'] = JWST_INSTRUMENT_NAMES_SHORTHAND[
314 filename_dict['detector'][:3]
315 ]
316
317 # Raise error if unable to parse the filename
318 except AttributeError:
319 jdox_url = 'https://jwst-docs.stsci.edu/display/JDAT/' \
320 'File+Naming+Conventions+and+Data+Products'
321 raise ValueError(
322 'Provided file {} does not follow JWST naming conventions. '
323 'See {} for further information.'.format(filename, jdox_url)
324 )
325
326 return filename_dict
327
328
329 def filesystem_path(filename):
330 """Return the full path to a given file in the filesystem
331
332 Parameters
333 ----------
334 filename : str
335 File to locate (e.g. ``jw86600006001_02101_00008_guider1_cal.fits``)
336
337 Returns
338 -------
339 full_path : str
340 Full path to the given file, including filename
341 """
342
343 filesystem_base = get_config()["filesystem"]
344
345 # Subdirectory name is based on the proposal ID
346 subdir = 'jw{}'.format(filename_parser(filename)['program_id'])
347 full_path = os.path.join(filesystem_base, subdir, filename)
348
349 # Check to see if the file exists
350 if os.path.isfile(full_path):
351 return full_path
352 else:
353 raise FileNotFoundError(
354 '{} is not in the predicted location: {}'.format(filename, full_path)
355 )
356
357
358 def get_base_url():
359 """Return the beginning part of the URL to the ``jwql`` web app
360 based on which user is running the software.
361
362 If the admin account is running the code, the ``base_url`` is
363 assumed to be the production URL. If not, the ``base_url`` is
364 assumed to be local.
365
366 Returns
367 -------
368 base_url : str
369 The beginning part of the URL to the ``jwql`` web app
370 """
371
372 username = getpass.getuser()
373 if username == get_config()['admin_account']:
374 base_url = 'https://dljwql.stsci.edu'
375 else:
376 base_url = 'http://127.0.0.1:8000'
377
378 return base_url
379
380
381 def get_config():
382 """Return a dictionary that holds the contents of the ``jwql``
383 config file.
384
385 Returns
386 -------
387 settings : dict
388 A dictionary that holds the contents of the config file.
389 """
390 config_file_location = os.path.join(__location__, 'config.json')
391
392 # Make sure the file exists
393 if not os.path.isfile(config_file_location):
394 raise FileNotFoundError('The JWQL package requires a configuration file (config.json) '
395 'to be placed within the jwql/utils directory. '
396 'This file is missing. Please read the relevant wiki page '
397 '(https://github.com/spacetelescope/jwql/wiki/'
398 'Config-file) for more information.')
399
400 with open(config_file_location, 'r') as config_file_object:
401 try:
402 # Load it with JSON
403 settings = json.load(config_file_object)
404 except json.JSONDecodeError as e:
405 # Raise a more helpful error if there is a formatting problem
406 raise ValueError('Incorrectly formatted config.json file. '
407 'Please fix JSON formatting: {}'.format(e))
408
409 # Ensure the file has all the needed entries with expected data types
410 _validate_config(settings)
411
412 return settings
413
414
415 def check_config_for_key(key):
416 """Check that the config.json file contains the specified key
417 and that the entry is not empty
418
419 Parameters
420 -------
421 key : str
422 The configuration file key to verify
423 """
424 try:
425 get_config()[key]
426 except KeyError:
427 raise KeyError(
428 'The key `{}` is not present in config.json. Please add it.'.format(key)
429 + ' See the relevant wiki page (https://github.com/spacetelescope/'
430 'jwql/wiki/Config-file) for more information.'
431 )
432
433 if get_config()[key] == "":
434 raise ValueError(
435 'Please complete the `{}` field in your config.json. '.format(key)
436 + ' See the relevant wiki page (https://github.com/spacetelescope/'
437 'jwql/wiki/Config-file) for more information.'
438 )
439
440
441 def _validate_config(config_file_dict):
442 """Check that the config.json file contains all the needed entries with
443 expected data types
444
445 Parameters
446 ----------
447 config_file_dict : dict
448 The configuration JSON file loaded as a dictionary
449
450 Notes
451 -----
452 See here for more information on JSON schemas:
453 https://json-schema.org/learn/getting-started-step-by-step.html
454 """
455 # Define the schema for config.json
456 schema = {
457 "type": "object", # Must be a JSON object
458 "properties": { # List all the possible entries and their types
459 "connection_string": {"type": "string"},
460 "database": {
461 "type": "object",
462 "properties": {
463 "engine": {"type": "string"},
464 "name": {"type": "string"},
465 "user": {"type": "string"},
466 "password": {"type": "string"},
467 "host": {"type": "string"},
468 "port": {"type": "string"}
469 },
470 "required": ['engine', 'name', 'user', 'password', 'host', 'port']
471 },
472 "filesystem": {"type": "string"},
473 "preview_image_filesystem": {"type": "string"},
474 "thumbnail_filesystem": {"type": "string"},
475 "outputs": {"type": "string"},
476 "jwql_dir": {"type": "string"},
477 "admin_account": {"type": "string"},
478 "log_dir": {"type": "string"},
479 "test_dir": {"type": "string"},
480 "test_data": {"type": "string"},
481 "setup_file": {"type": "string"},
482 "auth_mast": {"type": "string"},
483 "client_id": {"type": "string"},
484 "client_secret": {"type": "string"},
485 "mast_token": {"type": "string"},
486 },
487 # List which entries are needed (all of them)
488 "required": ["connection_string", "database", "filesystem",
489 "preview_image_filesystem", "thumbnail_filesystem",
490 "outputs", "jwql_dir", "admin_account", "log_dir",
491 "test_dir", "test_data", "setup_file", "auth_mast",
492 "client_id", "client_secret", "mast_token"]
493 }
494
495 # Test that the provided config file dict matches the schema
496 try:
497 jsonschema.validate(instance=config_file_dict, schema=schema)
498 except jsonschema.ValidationError as e:
499 raise jsonschema.ValidationError(
500 'Provided config.json does not match the ' + \
501 'required JSON schema: {}'.format(e.message)
502 )
503
504
505 def initialize_instrument_monitor(module):
506 """Configures a log file for the instrument monitor run and
507 captures the start time of the monitor
508
509 Parameters
510 ----------
511 module : str
512 The module name (e.g. ``dark_monitor``)
513
514 Returns
515 -------
516 start_time : datetime object
517 The start time of the monitor
518 log_file : str
519 The path to where the log file is stored
520 """
521
522 from jwql.utils.logging_functions import configure_logging
523
524 start_time = datetime.datetime.now()
525 log_file = configure_logging(module)
526
527 return start_time, log_file
528
529
530 def update_monitor_table(module, start_time, log_file):
531 """Update the ``monitor`` database table with information about
532 the instrument monitor run
533
534 Parameters
535 ----------
536 module : str
537 The module name (e.g. ``dark_monitor``)
538 start_time : datetime object
539 The start time of the monitor
540 log_file : str
541 The path to where the log file is stored
542 """
543
544 from jwql.database.database_interface import Monitor
545
546 new_entry = {}
547 new_entry['monitor_name'] = module
548 new_entry['start_time'] = start_time
549 new_entry['end_time'] = datetime.datetime.now()
550 new_entry['log_file'] = os.path.basename(log_file)
551
552 Monitor.__table__.insert().execute(new_entry)
553
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/jwql/utils/utils.py b/jwql/utils/utils.py
--- a/jwql/utils/utils.py
+++ b/jwql/utils/utils.py
@@ -417,7 +417,7 @@
and that the entry is not empty
Parameters
- -------
+ ----------
key : str
The configuration file key to verify
"""
| {"golden_diff": "diff --git a/jwql/utils/utils.py b/jwql/utils/utils.py\n--- a/jwql/utils/utils.py\n+++ b/jwql/utils/utils.py\n@@ -417,7 +417,7 @@\n and that the entry is not empty\n \n Parameters\n- -------\n+ ----------\n key : str\n The configuration file key to verify\n \"\"\"\n", "issue": "API ReadTheDocs failing\nIt seems as though our webhook to ReadTheDocs is not currently updating the API docs:\r\n[](https://jwql.readthedocs.io/en/latest/?badge=latest)\r\n\r\nActually, it looks like our doc builds have been failing for a month!\r\nhttps://readthedocs.org/projects/jwql/builds/\r\n\r\nThere is an error message here that should be helpful for investigating: https://readthedocs.org/projects/jwql/builds/9177249/\r\n\r\n\n", "before_files": [{"content": "\"\"\"Various utility functions for the ``jwql`` project.\n\nAuthors\n-------\n\n - Matthew Bourque\n - Lauren Chambers\n\nUse\n---\n\n This module can be imported as such:\n\n >>> import utils\n settings = get_config()\n\nReferences\n----------\n\n Filename parser modified from Joe Hunkeler:\n https://gist.github.com/jhunkeler/f08783ca2da7bfd1f8e9ee1d207da5ff\n\n Various documentation related to JWST filename conventions:\n - https://jwst-docs.stsci.edu/display/JDAT/File+Naming+Conventions+and+Data+Products\n - https://innerspace.stsci.edu/pages/viewpage.action?pageId=94092600\n - https://innerspace.stsci.edu/pages/viewpage.action?spaceKey=SCSB&title=JWST+Science+Data+Products\n - https://jwst-docs.stsci.edu/display/JDAT/Understanding+Associations?q=association%20candidate\n - https://jwst-pipeline.readthedocs.io/en/stable/jwst/introduction.html#pipeline-step-suffix-definitions\n - JWST TR JWST-STScI-004800, SM-12\n \"\"\"\n\nimport datetime\nimport getpass\nimport json\nimport os\nimport re\nimport shutil\n\nimport jsonschema\n\nfrom jwql.utils import permissions\nfrom jwql.utils.constants import FILE_SUFFIX_TYPES, JWST_INSTRUMENT_NAMES_SHORTHAND\n\n__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))\n\n\ndef copy_files(files, out_dir):\n \"\"\"Copy a given file to a given directory. Only try to copy the file\n if it is not already present in the output directory.\n\n Parameters\n ----------\n files : list\n List of files to be copied\n\n out_dir : str\n Destination directory\n\n Returns\n -------\n success : list\n Files successfully copied (or that already existed in out_dir)\n\n failed : list\n Files that were not copied\n \"\"\"\n\n # Copy files if they do not already exist\n success = []\n failed = []\n for input_file in files:\n input_new_path = os.path.join(out_dir, os.path.basename(input_file))\n if os.path.isfile(input_new_path):\n success.append(input_new_path)\n else:\n try:\n shutil.copy2(input_file, out_dir)\n success.append(input_new_path)\n permissions.set_permissions(input_new_path)\n except:\n failed.append(input_file)\n return success, failed\n\n\ndef download_mast_data(query_results, output_dir):\n \"\"\"Example function for downloading MAST query results. From MAST\n website (``https://mast.stsci.edu/api/v0/pyex.html``)\n\n Parameters\n ----------\n query_results : list\n List of dictionaries returned by a MAST query.\n\n output_dir : str\n Directory into which the files will be downlaoded\n \"\"\"\n\n # Set up the https connection\n server = 'mast.stsci.edu'\n conn = httplib.HTTPSConnection(server)\n\n # Dowload the products\n print('Number of query results: {}'.format(len(query_results)))\n\n for i in range(len(query_results)):\n\n # Make full output file path\n output_file = os.path.join(output_dir, query_results[i]['filename'])\n\n print('Output file is {}'.format(output_file))\n\n # Download the data\n uri = query_results[i]['dataURI']\n\n print('uri is {}'.format(uri))\n\n conn.request(\"GET\", \"/api/v0/download/file?uri=\" + uri)\n resp = conn.getresponse()\n file_content = resp.read()\n\n # Save to file\n with open(output_file, 'wb') as file_obj:\n file_obj.write(file_content)\n\n # Check for file\n if not os.path.isfile(output_file):\n print(\"ERROR: {} failed to download.\".format(output_file))\n else:\n statinfo = os.stat(output_file)\n if statinfo.st_size > 0:\n print(\"DOWNLOAD COMPLETE: \", output_file)\n else:\n print(\"ERROR: {} file is empty.\".format(output_file))\n conn.close()\n\n\ndef ensure_dir_exists(fullpath):\n \"\"\"Creates dirs from ``fullpath`` if they do not already exist.\"\"\"\n if not os.path.exists(fullpath):\n os.makedirs(fullpath)\n permissions.set_permissions(fullpath)\n\n\ndef filename_parser(filename):\n \"\"\"Return a dictionary that contains the properties of a given\n JWST file (e.g. program ID, visit number, detector, etc.).\n\n Parameters\n ----------\n filename : str\n Path or name of JWST file to parse\n\n Returns\n -------\n filename_dict : dict\n Collection of file properties\n\n Raises\n ------\n ValueError\n When the provided file does not follow naming conventions\n \"\"\"\n\n filename = os.path.basename(filename)\n file_root_name = (len(filename.split('.')) < 2)\n\n # Stage 1 and 2 filenames\n # e.g. \"jw80500012009_01101_00012_nrcalong_uncal.fits\"\n stage_1_and_2 = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"(?P<observation>\\d{3})\"\\\n r\"(?P<visit>\\d{3})\"\\\n r\"_(?P<visit_group>\\d{2})\"\\\n r\"(?P<parallel_seq_id>\\d{1})\"\\\n r\"(?P<activity>\\w{2})\"\\\n r\"_(?P<exposure_id>\\d+)\"\\\n r\"_(?P<detector>((?!_)[\\w])+)\"\n\n # Stage 2c outlier detection filenames\n # e.g. \"jw94015002002_02108_00001_mirimage_o002_crf.fits\"\n stage_2c = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\" \\\n r\"(?P<observation>\\d{3})\" \\\n r\"(?P<visit>\\d{3})\" \\\n r\"_(?P<visit_group>\\d{2})\" \\\n r\"(?P<parallel_seq_id>\\d{1})\" \\\n r\"(?P<activity>\\w{2})\" \\\n r\"_(?P<exposure_id>\\d+)\" \\\n r\"_(?P<detector>((?!_)[\\w])+)\"\\\n r\"_(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\n\n # Stage 3 filenames with target ID\n # e.g. \"jw80600-o009_t001_miri_f1130w_i2d.fits\"\n stage_3_target_id = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"-(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\\\n r\"_(?P<target_id>(t)\\d{3})\"\\\n r\"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))\"\\\n r\"_(?P<optical_elements>((?!_)[\\w-])+)\"\n\n # Stage 3 filenames with source ID\n # e.g. \"jw80600-o009_s00001_miri_f1130w_i2d.fits\"\n stage_3_source_id = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"-(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\\\n r\"_(?P<source_id>(s)\\d{5})\"\\\n r\"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))\"\\\n r\"_(?P<optical_elements>((?!_)[\\w-])+)\"\n\n # Stage 3 filenames with target ID and epoch\n # e.g. \"jw80600-o009_t001-epoch1_miri_f1130w_i2d.fits\"\n stage_3_target_id_epoch = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"-(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\\\n r\"_(?P<target_id>(t)\\d{3})\"\\\n r\"-epoch(?P<epoch>\\d{1})\"\\\n r\"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))\"\\\n r\"_(?P<optical_elements>((?!_)[\\w-])+)\"\n\n # Stage 3 filenames with source ID and epoch\n # e.g. \"jw80600-o009_s00001-epoch1_miri_f1130w_i2d.fits\"\n stage_3_source_id_epoch = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"-(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\\\n r\"_(?P<source_id>(s)\\d{5})\"\\\n r\"-epoch(?P<epoch>\\d{1})\"\\\n r\"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))\"\\\n r\"_(?P<optical_elements>((?!_)[\\w-])+)\"\n\n # Time series filenames\n # e.g. \"jw00733003001_02101_00002-seg001_nrs1_rate.fits\"\n time_series = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"(?P<observation>\\d{3})\"\\\n r\"(?P<visit>\\d{3})\"\\\n r\"_(?P<visit_group>\\d{2})\"\\\n r\"(?P<parallel_seq_id>\\d{1})\"\\\n r\"(?P<activity>\\w{2})\"\\\n r\"_(?P<exposure_id>\\d+)\"\\\n r\"-seg(?P<segment>\\d{3})\"\\\n r\"_(?P<detector>\\w+)\"\n\n # Guider filenames\n # e.g. \"jw00729011001_gs-id_1_image_cal.fits\" or\n # \"jw00799003001_gs-acq1_2019154181705_stream.fits\"\n guider = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\" \\\n r\"(?P<observation>\\d{3})\" \\\n r\"(?P<visit>\\d{3})\" \\\n r\"_gs-(?P<guider_mode>(id|acq1|acq2|track|fg))\" \\\n r\"_((?P<date_time>\\d{13})|(?P<guide_star_attempt_id>\\d{1}))\"\n\n # Build list of filename types\n filename_types = [\n stage_1_and_2,\n stage_2c,\n stage_3_target_id,\n stage_3_source_id,\n stage_3_target_id_epoch,\n stage_3_source_id_epoch,\n time_series,\n guider]\n\n filename_type_names = [\n 'stage_1_and_2',\n 'stage_2c',\n 'stage_3_target_id',\n 'stage_3_source_id',\n 'stage_3_target_id_epoch',\n 'stage_3_source_id_epoch',\n 'time_series',\n 'guider'\n ]\n\n # Try to parse the filename\n for filename_type, filename_type_name in zip(filename_types, filename_type_names):\n\n # If full filename, try using suffix\n if not file_root_name:\n filename_type += r\"_(?P<suffix>{}).*\".format('|'.join(FILE_SUFFIX_TYPES))\n # If not, make sure the provided regex matches the entire filename root\n else:\n filename_type += r\"$\"\n\n elements = re.compile(filename_type)\n jwst_file = elements.match(filename)\n\n # Stop when you find a format that matches\n if jwst_file is not None:\n name_match = filename_type_name\n break\n\n try:\n # Convert the regex match to a dictionary\n filename_dict = jwst_file.groupdict()\n\n # Add the filename type to that dict\n filename_dict['filename_type'] = name_match\n\n # Also, add the instrument if not already there\n if 'instrument' not in filename_dict.keys():\n if name_match == 'guider':\n filename_dict['instrument'] = 'fgs'\n elif 'detector' in filename_dict.keys():\n filename_dict['instrument'] = JWST_INSTRUMENT_NAMES_SHORTHAND[\n filename_dict['detector'][:3]\n ]\n\n # Raise error if unable to parse the filename\n except AttributeError:\n jdox_url = 'https://jwst-docs.stsci.edu/display/JDAT/' \\\n 'File+Naming+Conventions+and+Data+Products'\n raise ValueError(\n 'Provided file {} does not follow JWST naming conventions. '\n 'See {} for further information.'.format(filename, jdox_url)\n )\n\n return filename_dict\n\n\ndef filesystem_path(filename):\n \"\"\"Return the full path to a given file in the filesystem\n\n Parameters\n ----------\n filename : str\n File to locate (e.g. ``jw86600006001_02101_00008_guider1_cal.fits``)\n\n Returns\n -------\n full_path : str\n Full path to the given file, including filename\n \"\"\"\n\n filesystem_base = get_config()[\"filesystem\"]\n\n # Subdirectory name is based on the proposal ID\n subdir = 'jw{}'.format(filename_parser(filename)['program_id'])\n full_path = os.path.join(filesystem_base, subdir, filename)\n\n # Check to see if the file exists\n if os.path.isfile(full_path):\n return full_path\n else:\n raise FileNotFoundError(\n '{} is not in the predicted location: {}'.format(filename, full_path)\n )\n\n\ndef get_base_url():\n \"\"\"Return the beginning part of the URL to the ``jwql`` web app\n based on which user is running the software.\n\n If the admin account is running the code, the ``base_url`` is\n assumed to be the production URL. If not, the ``base_url`` is\n assumed to be local.\n\n Returns\n -------\n base_url : str\n The beginning part of the URL to the ``jwql`` web app\n \"\"\"\n\n username = getpass.getuser()\n if username == get_config()['admin_account']:\n base_url = 'https://dljwql.stsci.edu'\n else:\n base_url = 'http://127.0.0.1:8000'\n\n return base_url\n\n\ndef get_config():\n \"\"\"Return a dictionary that holds the contents of the ``jwql``\n config file.\n\n Returns\n -------\n settings : dict\n A dictionary that holds the contents of the config file.\n \"\"\"\n config_file_location = os.path.join(__location__, 'config.json')\n\n # Make sure the file exists\n if not os.path.isfile(config_file_location):\n raise FileNotFoundError('The JWQL package requires a configuration file (config.json) '\n 'to be placed within the jwql/utils directory. '\n 'This file is missing. Please read the relevant wiki page '\n '(https://github.com/spacetelescope/jwql/wiki/'\n 'Config-file) for more information.')\n\n with open(config_file_location, 'r') as config_file_object:\n try:\n # Load it with JSON\n settings = json.load(config_file_object)\n except json.JSONDecodeError as e:\n # Raise a more helpful error if there is a formatting problem\n raise ValueError('Incorrectly formatted config.json file. '\n 'Please fix JSON formatting: {}'.format(e))\n\n # Ensure the file has all the needed entries with expected data types\n _validate_config(settings)\n\n return settings\n\n\ndef check_config_for_key(key):\n \"\"\"Check that the config.json file contains the specified key\n and that the entry is not empty\n\n Parameters\n -------\n key : str\n The configuration file key to verify\n \"\"\"\n try:\n get_config()[key]\n except KeyError:\n raise KeyError(\n 'The key `{}` is not present in config.json. Please add it.'.format(key)\n + ' See the relevant wiki page (https://github.com/spacetelescope/'\n 'jwql/wiki/Config-file) for more information.'\n )\n\n if get_config()[key] == \"\":\n raise ValueError(\n 'Please complete the `{}` field in your config.json. '.format(key)\n + ' See the relevant wiki page (https://github.com/spacetelescope/'\n 'jwql/wiki/Config-file) for more information.'\n )\n\n\ndef _validate_config(config_file_dict):\n \"\"\"Check that the config.json file contains all the needed entries with\n expected data types\n\n Parameters\n ----------\n config_file_dict : dict\n The configuration JSON file loaded as a dictionary\n\n Notes\n -----\n See here for more information on JSON schemas:\n https://json-schema.org/learn/getting-started-step-by-step.html\n \"\"\"\n # Define the schema for config.json\n schema = {\n \"type\": \"object\", # Must be a JSON object\n \"properties\": { # List all the possible entries and their types\n \"connection_string\": {\"type\": \"string\"},\n \"database\": {\n \"type\": \"object\",\n \"properties\": {\n \"engine\": {\"type\": \"string\"},\n \"name\": {\"type\": \"string\"},\n \"user\": {\"type\": \"string\"},\n \"password\": {\"type\": \"string\"},\n \"host\": {\"type\": \"string\"},\n \"port\": {\"type\": \"string\"}\n },\n \"required\": ['engine', 'name', 'user', 'password', 'host', 'port']\n },\n \"filesystem\": {\"type\": \"string\"},\n \"preview_image_filesystem\": {\"type\": \"string\"},\n \"thumbnail_filesystem\": {\"type\": \"string\"},\n \"outputs\": {\"type\": \"string\"},\n \"jwql_dir\": {\"type\": \"string\"},\n \"admin_account\": {\"type\": \"string\"},\n \"log_dir\": {\"type\": \"string\"},\n \"test_dir\": {\"type\": \"string\"},\n \"test_data\": {\"type\": \"string\"},\n \"setup_file\": {\"type\": \"string\"},\n \"auth_mast\": {\"type\": \"string\"},\n \"client_id\": {\"type\": \"string\"},\n \"client_secret\": {\"type\": \"string\"},\n \"mast_token\": {\"type\": \"string\"},\n },\n # List which entries are needed (all of them)\n \"required\": [\"connection_string\", \"database\", \"filesystem\",\n \"preview_image_filesystem\", \"thumbnail_filesystem\",\n \"outputs\", \"jwql_dir\", \"admin_account\", \"log_dir\",\n \"test_dir\", \"test_data\", \"setup_file\", \"auth_mast\",\n \"client_id\", \"client_secret\", \"mast_token\"]\n }\n\n # Test that the provided config file dict matches the schema\n try:\n jsonschema.validate(instance=config_file_dict, schema=schema)\n except jsonschema.ValidationError as e:\n raise jsonschema.ValidationError(\n 'Provided config.json does not match the ' + \\\n 'required JSON schema: {}'.format(e.message)\n )\n\n\ndef initialize_instrument_monitor(module):\n \"\"\"Configures a log file for the instrument monitor run and\n captures the start time of the monitor\n\n Parameters\n ----------\n module : str\n The module name (e.g. ``dark_monitor``)\n\n Returns\n -------\n start_time : datetime object\n The start time of the monitor\n log_file : str\n The path to where the log file is stored\n \"\"\"\n\n from jwql.utils.logging_functions import configure_logging\n\n start_time = datetime.datetime.now()\n log_file = configure_logging(module)\n\n return start_time, log_file\n\n\ndef update_monitor_table(module, start_time, log_file):\n \"\"\"Update the ``monitor`` database table with information about\n the instrument monitor run\n\n Parameters\n ----------\n module : str\n The module name (e.g. ``dark_monitor``)\n start_time : datetime object\n The start time of the monitor\n log_file : str\n The path to where the log file is stored\n \"\"\"\n\n from jwql.database.database_interface import Monitor\n\n new_entry = {}\n new_entry['monitor_name'] = module\n new_entry['start_time'] = start_time\n new_entry['end_time'] = datetime.datetime.now()\n new_entry['log_file'] = os.path.basename(log_file)\n\n Monitor.__table__.insert().execute(new_entry)\n", "path": "jwql/utils/utils.py"}], "after_files": [{"content": "\"\"\"Various utility functions for the ``jwql`` project.\n\nAuthors\n-------\n\n - Matthew Bourque\n - Lauren Chambers\n\nUse\n---\n\n This module can be imported as such:\n\n >>> import utils\n settings = get_config()\n\nReferences\n----------\n\n Filename parser modified from Joe Hunkeler:\n https://gist.github.com/jhunkeler/f08783ca2da7bfd1f8e9ee1d207da5ff\n\n Various documentation related to JWST filename conventions:\n - https://jwst-docs.stsci.edu/display/JDAT/File+Naming+Conventions+and+Data+Products\n - https://innerspace.stsci.edu/pages/viewpage.action?pageId=94092600\n - https://innerspace.stsci.edu/pages/viewpage.action?spaceKey=SCSB&title=JWST+Science+Data+Products\n - https://jwst-docs.stsci.edu/display/JDAT/Understanding+Associations?q=association%20candidate\n - https://jwst-pipeline.readthedocs.io/en/stable/jwst/introduction.html#pipeline-step-suffix-definitions\n - JWST TR JWST-STScI-004800, SM-12\n \"\"\"\n\nimport datetime\nimport getpass\nimport json\nimport os\nimport re\nimport shutil\n\nimport jsonschema\n\nfrom jwql.utils import permissions\nfrom jwql.utils.constants import FILE_SUFFIX_TYPES, JWST_INSTRUMENT_NAMES_SHORTHAND\n\n__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))\n\n\ndef copy_files(files, out_dir):\n \"\"\"Copy a given file to a given directory. Only try to copy the file\n if it is not already present in the output directory.\n\n Parameters\n ----------\n files : list\n List of files to be copied\n\n out_dir : str\n Destination directory\n\n Returns\n -------\n success : list\n Files successfully copied (or that already existed in out_dir)\n\n failed : list\n Files that were not copied\n \"\"\"\n\n # Copy files if they do not already exist\n success = []\n failed = []\n for input_file in files:\n input_new_path = os.path.join(out_dir, os.path.basename(input_file))\n if os.path.isfile(input_new_path):\n success.append(input_new_path)\n else:\n try:\n shutil.copy2(input_file, out_dir)\n success.append(input_new_path)\n permissions.set_permissions(input_new_path)\n except:\n failed.append(input_file)\n return success, failed\n\n\ndef download_mast_data(query_results, output_dir):\n \"\"\"Example function for downloading MAST query results. From MAST\n website (``https://mast.stsci.edu/api/v0/pyex.html``)\n\n Parameters\n ----------\n query_results : list\n List of dictionaries returned by a MAST query.\n\n output_dir : str\n Directory into which the files will be downlaoded\n \"\"\"\n\n # Set up the https connection\n server = 'mast.stsci.edu'\n conn = httplib.HTTPSConnection(server)\n\n # Dowload the products\n print('Number of query results: {}'.format(len(query_results)))\n\n for i in range(len(query_results)):\n\n # Make full output file path\n output_file = os.path.join(output_dir, query_results[i]['filename'])\n\n print('Output file is {}'.format(output_file))\n\n # Download the data\n uri = query_results[i]['dataURI']\n\n print('uri is {}'.format(uri))\n\n conn.request(\"GET\", \"/api/v0/download/file?uri=\" + uri)\n resp = conn.getresponse()\n file_content = resp.read()\n\n # Save to file\n with open(output_file, 'wb') as file_obj:\n file_obj.write(file_content)\n\n # Check for file\n if not os.path.isfile(output_file):\n print(\"ERROR: {} failed to download.\".format(output_file))\n else:\n statinfo = os.stat(output_file)\n if statinfo.st_size > 0:\n print(\"DOWNLOAD COMPLETE: \", output_file)\n else:\n print(\"ERROR: {} file is empty.\".format(output_file))\n conn.close()\n\n\ndef ensure_dir_exists(fullpath):\n \"\"\"Creates dirs from ``fullpath`` if they do not already exist.\"\"\"\n if not os.path.exists(fullpath):\n os.makedirs(fullpath)\n permissions.set_permissions(fullpath)\n\n\ndef filename_parser(filename):\n \"\"\"Return a dictionary that contains the properties of a given\n JWST file (e.g. program ID, visit number, detector, etc.).\n\n Parameters\n ----------\n filename : str\n Path or name of JWST file to parse\n\n Returns\n -------\n filename_dict : dict\n Collection of file properties\n\n Raises\n ------\n ValueError\n When the provided file does not follow naming conventions\n \"\"\"\n\n filename = os.path.basename(filename)\n file_root_name = (len(filename.split('.')) < 2)\n\n # Stage 1 and 2 filenames\n # e.g. \"jw80500012009_01101_00012_nrcalong_uncal.fits\"\n stage_1_and_2 = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"(?P<observation>\\d{3})\"\\\n r\"(?P<visit>\\d{3})\"\\\n r\"_(?P<visit_group>\\d{2})\"\\\n r\"(?P<parallel_seq_id>\\d{1})\"\\\n r\"(?P<activity>\\w{2})\"\\\n r\"_(?P<exposure_id>\\d+)\"\\\n r\"_(?P<detector>((?!_)[\\w])+)\"\n\n # Stage 2c outlier detection filenames\n # e.g. \"jw94015002002_02108_00001_mirimage_o002_crf.fits\"\n stage_2c = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\" \\\n r\"(?P<observation>\\d{3})\" \\\n r\"(?P<visit>\\d{3})\" \\\n r\"_(?P<visit_group>\\d{2})\" \\\n r\"(?P<parallel_seq_id>\\d{1})\" \\\n r\"(?P<activity>\\w{2})\" \\\n r\"_(?P<exposure_id>\\d+)\" \\\n r\"_(?P<detector>((?!_)[\\w])+)\"\\\n r\"_(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\n\n # Stage 3 filenames with target ID\n # e.g. \"jw80600-o009_t001_miri_f1130w_i2d.fits\"\n stage_3_target_id = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"-(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\\\n r\"_(?P<target_id>(t)\\d{3})\"\\\n r\"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))\"\\\n r\"_(?P<optical_elements>((?!_)[\\w-])+)\"\n\n # Stage 3 filenames with source ID\n # e.g. \"jw80600-o009_s00001_miri_f1130w_i2d.fits\"\n stage_3_source_id = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"-(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\\\n r\"_(?P<source_id>(s)\\d{5})\"\\\n r\"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))\"\\\n r\"_(?P<optical_elements>((?!_)[\\w-])+)\"\n\n # Stage 3 filenames with target ID and epoch\n # e.g. \"jw80600-o009_t001-epoch1_miri_f1130w_i2d.fits\"\n stage_3_target_id_epoch = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"-(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\\\n r\"_(?P<target_id>(t)\\d{3})\"\\\n r\"-epoch(?P<epoch>\\d{1})\"\\\n r\"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))\"\\\n r\"_(?P<optical_elements>((?!_)[\\w-])+)\"\n\n # Stage 3 filenames with source ID and epoch\n # e.g. \"jw80600-o009_s00001-epoch1_miri_f1130w_i2d.fits\"\n stage_3_source_id_epoch = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"-(?P<ac_id>(o\\d{3}|(c|a|r)\\d{4}))\"\\\n r\"_(?P<source_id>(s)\\d{5})\"\\\n r\"-epoch(?P<epoch>\\d{1})\"\\\n r\"_(?P<instrument>(nircam|niriss|nirspec|miri|fgs))\"\\\n r\"_(?P<optical_elements>((?!_)[\\w-])+)\"\n\n # Time series filenames\n # e.g. \"jw00733003001_02101_00002-seg001_nrs1_rate.fits\"\n time_series = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\"\\\n r\"(?P<observation>\\d{3})\"\\\n r\"(?P<visit>\\d{3})\"\\\n r\"_(?P<visit_group>\\d{2})\"\\\n r\"(?P<parallel_seq_id>\\d{1})\"\\\n r\"(?P<activity>\\w{2})\"\\\n r\"_(?P<exposure_id>\\d+)\"\\\n r\"-seg(?P<segment>\\d{3})\"\\\n r\"_(?P<detector>\\w+)\"\n\n # Guider filenames\n # e.g. \"jw00729011001_gs-id_1_image_cal.fits\" or\n # \"jw00799003001_gs-acq1_2019154181705_stream.fits\"\n guider = \\\n r\"jw\" \\\n r\"(?P<program_id>\\d{5})\" \\\n r\"(?P<observation>\\d{3})\" \\\n r\"(?P<visit>\\d{3})\" \\\n r\"_gs-(?P<guider_mode>(id|acq1|acq2|track|fg))\" \\\n r\"_((?P<date_time>\\d{13})|(?P<guide_star_attempt_id>\\d{1}))\"\n\n # Build list of filename types\n filename_types = [\n stage_1_and_2,\n stage_2c,\n stage_3_target_id,\n stage_3_source_id,\n stage_3_target_id_epoch,\n stage_3_source_id_epoch,\n time_series,\n guider]\n\n filename_type_names = [\n 'stage_1_and_2',\n 'stage_2c',\n 'stage_3_target_id',\n 'stage_3_source_id',\n 'stage_3_target_id_epoch',\n 'stage_3_source_id_epoch',\n 'time_series',\n 'guider'\n ]\n\n # Try to parse the filename\n for filename_type, filename_type_name in zip(filename_types, filename_type_names):\n\n # If full filename, try using suffix\n if not file_root_name:\n filename_type += r\"_(?P<suffix>{}).*\".format('|'.join(FILE_SUFFIX_TYPES))\n # If not, make sure the provided regex matches the entire filename root\n else:\n filename_type += r\"$\"\n\n elements = re.compile(filename_type)\n jwst_file = elements.match(filename)\n\n # Stop when you find a format that matches\n if jwst_file is not None:\n name_match = filename_type_name\n break\n\n try:\n # Convert the regex match to a dictionary\n filename_dict = jwst_file.groupdict()\n\n # Add the filename type to that dict\n filename_dict['filename_type'] = name_match\n\n # Also, add the instrument if not already there\n if 'instrument' not in filename_dict.keys():\n if name_match == 'guider':\n filename_dict['instrument'] = 'fgs'\n elif 'detector' in filename_dict.keys():\n filename_dict['instrument'] = JWST_INSTRUMENT_NAMES_SHORTHAND[\n filename_dict['detector'][:3]\n ]\n\n # Raise error if unable to parse the filename\n except AttributeError:\n jdox_url = 'https://jwst-docs.stsci.edu/display/JDAT/' \\\n 'File+Naming+Conventions+and+Data+Products'\n raise ValueError(\n 'Provided file {} does not follow JWST naming conventions. '\n 'See {} for further information.'.format(filename, jdox_url)\n )\n\n return filename_dict\n\n\ndef filesystem_path(filename):\n \"\"\"Return the full path to a given file in the filesystem\n\n Parameters\n ----------\n filename : str\n File to locate (e.g. ``jw86600006001_02101_00008_guider1_cal.fits``)\n\n Returns\n -------\n full_path : str\n Full path to the given file, including filename\n \"\"\"\n\n filesystem_base = get_config()[\"filesystem\"]\n\n # Subdirectory name is based on the proposal ID\n subdir = 'jw{}'.format(filename_parser(filename)['program_id'])\n full_path = os.path.join(filesystem_base, subdir, filename)\n\n # Check to see if the file exists\n if os.path.isfile(full_path):\n return full_path\n else:\n raise FileNotFoundError(\n '{} is not in the predicted location: {}'.format(filename, full_path)\n )\n\n\ndef get_base_url():\n \"\"\"Return the beginning part of the URL to the ``jwql`` web app\n based on which user is running the software.\n\n If the admin account is running the code, the ``base_url`` is\n assumed to be the production URL. If not, the ``base_url`` is\n assumed to be local.\n\n Returns\n -------\n base_url : str\n The beginning part of the URL to the ``jwql`` web app\n \"\"\"\n\n username = getpass.getuser()\n if username == get_config()['admin_account']:\n base_url = 'https://dljwql.stsci.edu'\n else:\n base_url = 'http://127.0.0.1:8000'\n\n return base_url\n\n\ndef get_config():\n \"\"\"Return a dictionary that holds the contents of the ``jwql``\n config file.\n\n Returns\n -------\n settings : dict\n A dictionary that holds the contents of the config file.\n \"\"\"\n config_file_location = os.path.join(__location__, 'config.json')\n\n # Make sure the file exists\n if not os.path.isfile(config_file_location):\n raise FileNotFoundError('The JWQL package requires a configuration file (config.json) '\n 'to be placed within the jwql/utils directory. '\n 'This file is missing. Please read the relevant wiki page '\n '(https://github.com/spacetelescope/jwql/wiki/'\n 'Config-file) for more information.')\n\n with open(config_file_location, 'r') as config_file_object:\n try:\n # Load it with JSON\n settings = json.load(config_file_object)\n except json.JSONDecodeError as e:\n # Raise a more helpful error if there is a formatting problem\n raise ValueError('Incorrectly formatted config.json file. '\n 'Please fix JSON formatting: {}'.format(e))\n\n # Ensure the file has all the needed entries with expected data types\n _validate_config(settings)\n\n return settings\n\n\ndef check_config_for_key(key):\n \"\"\"Check that the config.json file contains the specified key\n and that the entry is not empty\n\n Parameters\n ----------\n key : str\n The configuration file key to verify\n \"\"\"\n try:\n get_config()[key]\n except KeyError:\n raise KeyError(\n 'The key `{}` is not present in config.json. Please add it.'.format(key)\n + ' See the relevant wiki page (https://github.com/spacetelescope/'\n 'jwql/wiki/Config-file) for more information.'\n )\n\n if get_config()[key] == \"\":\n raise ValueError(\n 'Please complete the `{}` field in your config.json. '.format(key)\n + ' See the relevant wiki page (https://github.com/spacetelescope/'\n 'jwql/wiki/Config-file) for more information.'\n )\n\n\ndef _validate_config(config_file_dict):\n \"\"\"Check that the config.json file contains all the needed entries with\n expected data types\n\n Parameters\n ----------\n config_file_dict : dict\n The configuration JSON file loaded as a dictionary\n\n Notes\n -----\n See here for more information on JSON schemas:\n https://json-schema.org/learn/getting-started-step-by-step.html\n \"\"\"\n # Define the schema for config.json\n schema = {\n \"type\": \"object\", # Must be a JSON object\n \"properties\": { # List all the possible entries and their types\n \"connection_string\": {\"type\": \"string\"},\n \"database\": {\n \"type\": \"object\",\n \"properties\": {\n \"engine\": {\"type\": \"string\"},\n \"name\": {\"type\": \"string\"},\n \"user\": {\"type\": \"string\"},\n \"password\": {\"type\": \"string\"},\n \"host\": {\"type\": \"string\"},\n \"port\": {\"type\": \"string\"}\n },\n \"required\": ['engine', 'name', 'user', 'password', 'host', 'port']\n },\n \"filesystem\": {\"type\": \"string\"},\n \"preview_image_filesystem\": {\"type\": \"string\"},\n \"thumbnail_filesystem\": {\"type\": \"string\"},\n \"outputs\": {\"type\": \"string\"},\n \"jwql_dir\": {\"type\": \"string\"},\n \"admin_account\": {\"type\": \"string\"},\n \"log_dir\": {\"type\": \"string\"},\n \"test_dir\": {\"type\": \"string\"},\n \"test_data\": {\"type\": \"string\"},\n \"setup_file\": {\"type\": \"string\"},\n \"auth_mast\": {\"type\": \"string\"},\n \"client_id\": {\"type\": \"string\"},\n \"client_secret\": {\"type\": \"string\"},\n \"mast_token\": {\"type\": \"string\"},\n },\n # List which entries are needed (all of them)\n \"required\": [\"connection_string\", \"database\", \"filesystem\",\n \"preview_image_filesystem\", \"thumbnail_filesystem\",\n \"outputs\", \"jwql_dir\", \"admin_account\", \"log_dir\",\n \"test_dir\", \"test_data\", \"setup_file\", \"auth_mast\",\n \"client_id\", \"client_secret\", \"mast_token\"]\n }\n\n # Test that the provided config file dict matches the schema\n try:\n jsonschema.validate(instance=config_file_dict, schema=schema)\n except jsonschema.ValidationError as e:\n raise jsonschema.ValidationError(\n 'Provided config.json does not match the ' + \\\n 'required JSON schema: {}'.format(e.message)\n )\n\n\ndef initialize_instrument_monitor(module):\n \"\"\"Configures a log file for the instrument monitor run and\n captures the start time of the monitor\n\n Parameters\n ----------\n module : str\n The module name (e.g. ``dark_monitor``)\n\n Returns\n -------\n start_time : datetime object\n The start time of the monitor\n log_file : str\n The path to where the log file is stored\n \"\"\"\n\n from jwql.utils.logging_functions import configure_logging\n\n start_time = datetime.datetime.now()\n log_file = configure_logging(module)\n\n return start_time, log_file\n\n\ndef update_monitor_table(module, start_time, log_file):\n \"\"\"Update the ``monitor`` database table with information about\n the instrument monitor run\n\n Parameters\n ----------\n module : str\n The module name (e.g. ``dark_monitor``)\n start_time : datetime object\n The start time of the monitor\n log_file : str\n The path to where the log file is stored\n \"\"\"\n\n from jwql.database.database_interface import Monitor\n\n new_entry = {}\n new_entry['monitor_name'] = module\n new_entry['start_time'] = start_time\n new_entry['end_time'] = datetime.datetime.now()\n new_entry['log_file'] = os.path.basename(log_file)\n\n Monitor.__table__.insert().execute(new_entry)\n", "path": "jwql/utils/utils.py"}]} |
gh_patches_debug_133 | rasdani/github-patches | git_diff | weni-ai__bothub-engine-76 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Next link in pagination broken in production
The links starts with ```https://bothub/```, correct is ```https://bothub.it/```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup, find_packages
2
3 with open('requirements.txt') as fp:
4 install_requires = fp.read()
5 install_requires = list(
6 filter(lambda x: len(x) > 0, install_requires.split('\n')))
7
8 setup(
9 name='bothub',
10 version='1.7.1',
11 description='bothub',
12 packages=find_packages(),
13 install_requires=install_requires,
14 python_requires='>=3.6',
15 )
16
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@
setup(
name='bothub',
- version='1.7.1',
+ version='1.7.2',
description='bothub',
packages=find_packages(),
install_requires=install_requires,
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -7,7 +7,7 @@\n \n setup(\n name='bothub',\n- version='1.7.1',\n+ version='1.7.2',\n description='bothub',\n packages=find_packages(),\n install_requires=install_requires,\n", "issue": "Next link in pagination broken in production\nThe links starts with ```https://bothub/```, correct is ```https://bothub.it/```\n", "before_files": [{"content": "from setuptools import setup, find_packages\n\nwith open('requirements.txt') as fp:\n install_requires = fp.read()\ninstall_requires = list(\n filter(lambda x: len(x) > 0, install_requires.split('\\n')))\n\nsetup(\n name='bothub',\n version='1.7.1',\n description='bothub',\n packages=find_packages(),\n install_requires=install_requires,\n python_requires='>=3.6',\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup, find_packages\n\nwith open('requirements.txt') as fp:\n install_requires = fp.read()\ninstall_requires = list(\n filter(lambda x: len(x) > 0, install_requires.split('\\n')))\n\nsetup(\n name='bothub',\n version='1.7.2',\n description='bothub',\n packages=find_packages(),\n install_requires=install_requires,\n python_requires='>=3.6',\n)\n", "path": "setup.py"}]} |
gh_patches_debug_134 | rasdani/github-patches | git_diff | engnadeau__pybotics-751 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Create a way to add your own arm model[FEATURE]
## User Story
<!-- A clear and concise description of what the problem is.
I want to add my own arm configuration to the list of pre-trained models.
## Potential Solutions
<!-- A clear and concise description of what you want to happen. -->
If there was a comment next to each line of one of the arrays containing the pre-trained model saying what exactly each value was supposed to represent, that would help.
<!-- A clear and concise description of any alternative solutions or features you've considered. -->
I tried looking at the spec sheets of the arms and matching up values but I couldn't figure much out.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pybotics/predefined_models.py`
Content:
```
1 """Predefined robot models."""
2 import numpy as np # type: ignore
3
4
5 def kuka_lbr_iiwa_7() -> np.ndarray: # pragma: no cover
6 """Get KUKA LBR iiwa 7 MDH model."""
7 return np.array(
8 [
9 [0, 0, 0, 340],
10 [-np.pi / 2, 0, 0, 0],
11 [np.pi / 2, 0, 0, 400],
12 [np.pi / 2, 0, 0, 0],
13 [-np.pi / 2, 0, 0, 400],
14 [-np.pi / 2, 0, 0, 0],
15 [np.pi / 2, 0, 0, 126],
16 ]
17 )
18
19
20 def mecademic_meca500() -> np.ndarray: # pragma: no cover
21 """Get Meca500 MDH model."""
22 return np.array(
23 [
24 [0, 0, 0, 135],
25 [-np.pi / 2, 0, -np.pi / 2, 0],
26 [0, 135, 0, 0],
27 [-np.pi / 2, 38, 0, 120],
28 [np.pi / 2, 0, 0, 0],
29 [-np.pi / 2, 0, np.pi, 72],
30 ]
31 )
32
33
34 def puma560() -> np.ndarray: # pragma: no cover
35 """Get PUMA560 MDH model."""
36 return np.array(
37 [
38 [0, 0, 0, 0],
39 [-np.pi / 2, 0, 0, 0],
40 [0, 612.7, 0, 0],
41 [0, 571.6, 0, 163.9],
42 [-np.pi / 2, 0, 0, 115.7],
43 [np.pi / 2, 0, np.pi, 92.2],
44 ]
45 )
46
47
48 def ur10() -> np.ndarray: # pragma: no cover
49 """Get UR10 MDH model."""
50 return np.array(
51 [
52 [0, 0, 0, 118],
53 [np.pi / 2, 0, np.pi, 0],
54 [0, 612.7, 0, 0],
55 [0, 571.6, 0, 163.9],
56 [-np.pi / 2, 0, 0, 115.7],
57 [np.pi / 2, 0, np.pi, 92.2],
58 ]
59 )
60
61
62 def abb_irb120() -> np.ndarray: # pragma: no cover
63 """Get ABB irb120 MDH model."""
64 return np.array(
65 [
66 [0, 0, 0, 290],
67 [-np.pi / 2, 0, -np.pi / 2, 0],
68 [0, 270, 0, 0],
69 [-np.pi / 2, 70, 0, 302],
70 [np.pi / 2, 0, 0, 0],
71 [-np.pi / 2, 0, np.pi, 72],
72 ]
73 )
74
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pybotics/predefined_models.py b/pybotics/predefined_models.py
--- a/pybotics/predefined_models.py
+++ b/pybotics/predefined_models.py
@@ -1,4 +1,8 @@
-"""Predefined robot models."""
+"""Predefined robot models.
+
+These models correspond to the Modified Denavit–Hartenberg parameters:
+https://en.wikipedia.org/wiki/Denavit%E2%80%93Hartenberg_parameters
+"""
import numpy as np # type: ignore
| {"golden_diff": "diff --git a/pybotics/predefined_models.py b/pybotics/predefined_models.py\n--- a/pybotics/predefined_models.py\n+++ b/pybotics/predefined_models.py\n@@ -1,4 +1,8 @@\n-\"\"\"Predefined robot models.\"\"\"\n+\"\"\"Predefined robot models.\n+\n+These models correspond to the Modified Denavit\u2013Hartenberg parameters:\n+https://en.wikipedia.org/wiki/Denavit%E2%80%93Hartenberg_parameters\n+\"\"\"\n import numpy as np # type: ignore\n", "issue": "Create a way to add your own arm model[FEATURE]\n## User Story\r\n\r\n<!-- A clear and concise description of what the problem is. \r\nI want to add my own arm configuration to the list of pre-trained models.\r\n\r\n## Potential Solutions\r\n\r\n<!-- A clear and concise description of what you want to happen. -->\r\nIf there was a comment next to each line of one of the arrays containing the pre-trained model saying what exactly each value was supposed to represent, that would help.\r\n<!-- A clear and concise description of any alternative solutions or features you've considered. -->\r\nI tried looking at the spec sheets of the arms and matching up values but I couldn't figure much out. \r\n\r\n\n", "before_files": [{"content": "\"\"\"Predefined robot models.\"\"\"\nimport numpy as np # type: ignore\n\n\ndef kuka_lbr_iiwa_7() -> np.ndarray: # pragma: no cover\n \"\"\"Get KUKA LBR iiwa 7 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 340],\n [-np.pi / 2, 0, 0, 0],\n [np.pi / 2, 0, 0, 400],\n [np.pi / 2, 0, 0, 0],\n [-np.pi / 2, 0, 0, 400],\n [-np.pi / 2, 0, 0, 0],\n [np.pi / 2, 0, 0, 126],\n ]\n )\n\n\ndef mecademic_meca500() -> np.ndarray: # pragma: no cover\n \"\"\"Get Meca500 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 135],\n [-np.pi / 2, 0, -np.pi / 2, 0],\n [0, 135, 0, 0],\n [-np.pi / 2, 38, 0, 120],\n [np.pi / 2, 0, 0, 0],\n [-np.pi / 2, 0, np.pi, 72],\n ]\n )\n\n\ndef puma560() -> np.ndarray: # pragma: no cover\n \"\"\"Get PUMA560 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 0],\n [-np.pi / 2, 0, 0, 0],\n [0, 612.7, 0, 0],\n [0, 571.6, 0, 163.9],\n [-np.pi / 2, 0, 0, 115.7],\n [np.pi / 2, 0, np.pi, 92.2],\n ]\n )\n\n\ndef ur10() -> np.ndarray: # pragma: no cover\n \"\"\"Get UR10 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 118],\n [np.pi / 2, 0, np.pi, 0],\n [0, 612.7, 0, 0],\n [0, 571.6, 0, 163.9],\n [-np.pi / 2, 0, 0, 115.7],\n [np.pi / 2, 0, np.pi, 92.2],\n ]\n )\n\n\ndef abb_irb120() -> np.ndarray: # pragma: no cover\n \"\"\"Get ABB irb120 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 290],\n [-np.pi / 2, 0, -np.pi / 2, 0],\n [0, 270, 0, 0],\n [-np.pi / 2, 70, 0, 302],\n [np.pi / 2, 0, 0, 0],\n [-np.pi / 2, 0, np.pi, 72],\n ]\n )\n", "path": "pybotics/predefined_models.py"}], "after_files": [{"content": "\"\"\"Predefined robot models.\n\nThese models correspond to the Modified Denavit\u2013Hartenberg parameters:\nhttps://en.wikipedia.org/wiki/Denavit%E2%80%93Hartenberg_parameters\n\"\"\"\nimport numpy as np # type: ignore\n\n\ndef kuka_lbr_iiwa_7() -> np.ndarray: # pragma: no cover\n \"\"\"Get KUKA LBR iiwa 7 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 340],\n [-np.pi / 2, 0, 0, 0],\n [np.pi / 2, 0, 0, 400],\n [np.pi / 2, 0, 0, 0],\n [-np.pi / 2, 0, 0, 400],\n [-np.pi / 2, 0, 0, 0],\n [np.pi / 2, 0, 0, 126],\n ]\n )\n\n\ndef mecademic_meca500() -> np.ndarray: # pragma: no cover\n \"\"\"Get Meca500 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 135],\n [-np.pi / 2, 0, -np.pi / 2, 0],\n [0, 135, 0, 0],\n [-np.pi / 2, 38, 0, 120],\n [np.pi / 2, 0, 0, 0],\n [-np.pi / 2, 0, np.pi, 72],\n ]\n )\n\n\ndef puma560() -> np.ndarray: # pragma: no cover\n \"\"\"Get PUMA560 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 0],\n [-np.pi / 2, 0, 0, 0],\n [0, 612.7, 0, 0],\n [0, 571.6, 0, 163.9],\n [-np.pi / 2, 0, 0, 115.7],\n [np.pi / 2, 0, np.pi, 92.2],\n ]\n )\n\n\ndef ur10() -> np.ndarray: # pragma: no cover\n \"\"\"Get UR10 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 118],\n [np.pi / 2, 0, np.pi, 0],\n [0, 612.7, 0, 0],\n [0, 571.6, 0, 163.9],\n [-np.pi / 2, 0, 0, 115.7],\n [np.pi / 2, 0, np.pi, 92.2],\n ]\n )\n\n\ndef abb_irb120() -> np.ndarray: # pragma: no cover\n \"\"\"Get ABB irb120 MDH model.\"\"\"\n return np.array(\n [\n [0, 0, 0, 290],\n [-np.pi / 2, 0, -np.pi / 2, 0],\n [0, 270, 0, 0],\n [-np.pi / 2, 70, 0, 302],\n [np.pi / 2, 0, 0, 0],\n [-np.pi / 2, 0, np.pi, 72],\n ]\n )\n", "path": "pybotics/predefined_models.py"}]} |
gh_patches_debug_135 | rasdani/github-patches | git_diff | searx__searx-3091 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Qwant engine QAnon results
**Version of Searx**
1.0.0-297-f2f72575.
**How did you install Searx?**
Docker.
**What happened?**
When Qwant has no results for something, it displays tons of results containing the world “Q”, including QAnon related stuff.
**How To Reproduce**
1. Search for `DNTWLOCK` on Qwant.
2. Qwant will show no results.
3. Search for `DNTWLOCK` on Searx.
4. Searx will return a lot of results containing the letter Q alone.
**Expected behavior**
No results should show if Qwant has no results, and of course, no QAnon stuff either.
**Screenshots & Logs**


--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `searx/engines/qwant.py`
Content:
```
1 # SPDX-License-Identifier: AGPL-3.0-or-later
2 # lint: pylint
3 """Qwant (Web, News, Images, Videos)
4
5 This engine uses the Qwant API (https://api.qwant.com/v3). The API is
6 undocumented but can be reverse engineered by reading the network log of
7 https://www.qwant.com/ queries.
8
9 This implementation is used by different qwant engines in the settings.yml::
10
11 - name: qwant
12 categories: general
13 ...
14 - name: qwant news
15 categories: news
16 ...
17 - name: qwant images
18 categories: images
19 ...
20 - name: qwant videos
21 categories: videos
22 ...
23
24 """
25
26 from datetime import (
27 datetime,
28 timedelta,
29 )
30 from json import loads
31 from urllib.parse import urlencode
32 from flask_babel import gettext
33
34 from searx.utils import match_language
35 from searx.exceptions import SearxEngineAPIException
36 from searx.network import raise_for_httperror
37
38
39 # about
40 about = {
41 "website": 'https://www.qwant.com/',
42 "wikidata_id": 'Q14657870',
43 "official_api_documentation": None,
44 "use_official_api": True,
45 "require_api_key": False,
46 "results": 'JSON',
47 }
48
49 # engine dependent config
50 categories = []
51 paging = True
52 supported_languages_url = about['website']
53
54 category_to_keyword = {
55 'general': 'web',
56 'news': 'news',
57 'images': 'images',
58 'videos': 'videos',
59 }
60
61 # search-url
62 url = 'https://api.qwant.com/v3/search/{keyword}?q={query}&count={count}&offset={offset}'
63
64
65 def request(query, params):
66 """Qwant search request"""
67 keyword = category_to_keyword[categories[0]]
68 count = 10 # web: count must be equal to 10
69
70 if keyword == 'images':
71 count = 50
72 offset = (params['pageno'] - 1) * count
73 # count + offset must be lower than 250
74 offset = min(offset, 199)
75 else:
76 offset = (params['pageno'] - 1) * count
77 # count + offset must be lower than 50
78 offset = min(offset, 40)
79
80 params['url'] = url.format(
81 keyword=keyword,
82 query=urlencode({'q': query}),
83 offset=offset,
84 count=count,
85 )
86
87 # add language tag
88 if params['language'] == 'all':
89 params['url'] += '&locale=en_us'
90 else:
91 language = match_language(
92 params['language'],
93 # pylint: disable=undefined-variable
94 supported_languages,
95 language_aliases,
96 )
97 params['url'] += '&locale=' + language.replace('-', '_').lower()
98
99 params['raise_for_httperror'] = False
100 return params
101
102
103 def response(resp):
104 """Get response from Qwant's search request"""
105 # pylint: disable=too-many-locals, too-many-branches, too-many-statements
106
107 keyword = category_to_keyword[categories[0]]
108 results = []
109
110 # load JSON result
111 search_results = loads(resp.text)
112 data = search_results.get('data', {})
113
114 # check for an API error
115 if search_results.get('status') != 'success':
116 msg = ",".join(data.get('message', ['unknown', ]))
117 raise SearxEngineAPIException('API error::' + msg)
118
119 # raise for other errors
120 raise_for_httperror(resp)
121
122 if keyword == 'web':
123 # The WEB query contains a list named 'mainline'. This list can contain
124 # different result types (e.g. mainline[0]['type'] returns type of the
125 # result items in mainline[0]['items']
126 mainline = data.get('result', {}).get('items', {}).get('mainline', {})
127 else:
128 # Queries on News, Images and Videos do not have a list named 'mainline'
129 # in the response. The result items are directly in the list
130 # result['items'].
131 mainline = data.get('result', {}).get('items', [])
132 mainline = [
133 {'type': keyword, 'items': mainline},
134 ]
135
136 # return empty array if there are no results
137 if not mainline:
138 return []
139
140 for row in mainline:
141
142 mainline_type = row.get('type', 'web')
143 if mainline_type != keyword:
144 continue
145
146 if mainline_type == 'ads':
147 # ignore adds
148 continue
149
150 mainline_items = row.get('items', [])
151 for item in mainline_items:
152
153 title = item.get('title', None)
154 res_url = item.get('url', None)
155
156 if mainline_type == 'web':
157 content = item['desc']
158 results.append({
159 'title': title,
160 'url': res_url,
161 'content': content,
162 })
163
164 elif mainline_type == 'news':
165
166 pub_date = item['date']
167 if pub_date is not None:
168 pub_date = datetime.fromtimestamp(pub_date)
169 news_media = item.get('media', [])
170 img_src = None
171 if news_media:
172 img_src = news_media[0].get('pict', {}).get('url', None)
173 results.append({
174 'title': title,
175 'url': res_url,
176 'publishedDate': pub_date,
177 'img_src': img_src,
178 })
179
180 elif mainline_type == 'images':
181 thumbnail = item['thumbnail']
182 img_src = item['media']
183 results.append({
184 'title': title,
185 'url': res_url,
186 'template': 'images.html',
187 'thumbnail_src': thumbnail,
188 'img_src': img_src,
189 })
190
191 elif mainline_type == 'videos':
192 # some videos do not have a description: while qwant-video
193 # returns an empty string, such video from a qwant-web query
194 # miss the 'desc' key.
195 d, s, c = item.get('desc'), item.get('source'), item.get('channel')
196 content_parts = []
197 if d:
198 content_parts.append(d)
199 if s:
200 content_parts.append("%s: %s " % (gettext("Source"), s))
201 if c:
202 content_parts.append("%s: %s " % (gettext("Channel"), c))
203 content = ' // '.join(content_parts)
204 length = item['duration']
205 if length is not None:
206 length = timedelta(milliseconds=length)
207 pub_date = item['date']
208 if pub_date is not None:
209 pub_date = datetime.fromtimestamp(pub_date)
210 thumbnail = item['thumbnail']
211 # from some locations (DE and others?) the s2 link do
212 # response a 'Please wait ..' but does not deliver the thumbnail
213 thumbnail = thumbnail.replace(
214 'https://s2.qwant.com',
215 'https://s1.qwant.com', 1
216 )
217 results.append({
218 'title': title,
219 'url': res_url,
220 'content': content,
221 'publishedDate': pub_date,
222 'thumbnail': thumbnail,
223 'template': 'videos.html',
224 'length': length,
225 })
226
227 return results
228
229
230 # get supported languages from their site
231 def _fetch_supported_languages(resp):
232 # list of regions is embedded in page as a js object
233 response_text = resp.text
234 response_text = response_text[response_text.find('INITIAL_PROPS'):]
235 response_text = response_text[response_text.find('{'):response_text.find('</script>')]
236
237 regions_json = loads(response_text)
238
239 supported_languages = []
240 for country, langs in regions_json['locales'].items():
241 for lang in langs['langs']:
242 lang_code = "{lang}-{country}".format(lang=lang, country=country)
243 supported_languages.append(lang_code)
244
245 return supported_languages
246
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/searx/engines/qwant.py b/searx/engines/qwant.py
--- a/searx/engines/qwant.py
+++ b/searx/engines/qwant.py
@@ -59,7 +59,7 @@
}
# search-url
-url = 'https://api.qwant.com/v3/search/{keyword}?q={query}&count={count}&offset={offset}'
+url = 'https://api.qwant.com/v3/search/{keyword}?{query}&count={count}&offset={offset}'
def request(query, params):
| {"golden_diff": "diff --git a/searx/engines/qwant.py b/searx/engines/qwant.py\n--- a/searx/engines/qwant.py\n+++ b/searx/engines/qwant.py\n@@ -59,7 +59,7 @@\n }\n \n # search-url\n-url = 'https://api.qwant.com/v3/search/{keyword}?q={query}&count={count}&offset={offset}'\n+url = 'https://api.qwant.com/v3/search/{keyword}?{query}&count={count}&offset={offset}'\n \n \n def request(query, params):\n", "issue": "Qwant engine QAnon results\n**Version of Searx**\r\n1.0.0-297-f2f72575.\r\n\r\n**How did you install Searx?**\r\nDocker.\r\n\r\n**What happened?**\r\nWhen Qwant has no results for something, it displays tons of results containing the world \u201cQ\u201d, including QAnon related stuff.\r\n\r\n**How To Reproduce**\r\n1. Search for `DNTWLOCK` on Qwant.\r\n2. Qwant will show no results.\r\n3. Search for `DNTWLOCK` on Searx.\r\n4. Searx will return a lot of results containing the letter Q alone.\r\n\r\n**Expected behavior**\r\nNo results should show if Qwant has no results, and of course, no QAnon stuff either.\r\n\r\n**Screenshots & Logs**\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n# lint: pylint\n\"\"\"Qwant (Web, News, Images, Videos)\n\nThis engine uses the Qwant API (https://api.qwant.com/v3). The API is\nundocumented but can be reverse engineered by reading the network log of\nhttps://www.qwant.com/ queries.\n\nThis implementation is used by different qwant engines in the settings.yml::\n\n - name: qwant\n categories: general\n ...\n - name: qwant news\n categories: news\n ...\n - name: qwant images\n categories: images\n ...\n - name: qwant videos\n categories: videos\n ...\n\n\"\"\"\n\nfrom datetime import (\n datetime,\n timedelta,\n)\nfrom json import loads\nfrom urllib.parse import urlencode\nfrom flask_babel import gettext\n\nfrom searx.utils import match_language\nfrom searx.exceptions import SearxEngineAPIException\nfrom searx.network import raise_for_httperror\n\n\n# about\nabout = {\n \"website\": 'https://www.qwant.com/',\n \"wikidata_id\": 'Q14657870',\n \"official_api_documentation\": None,\n \"use_official_api\": True,\n \"require_api_key\": False,\n \"results\": 'JSON',\n}\n\n# engine dependent config\ncategories = []\npaging = True\nsupported_languages_url = about['website']\n\ncategory_to_keyword = {\n 'general': 'web',\n 'news': 'news',\n 'images': 'images',\n 'videos': 'videos',\n}\n\n# search-url\nurl = 'https://api.qwant.com/v3/search/{keyword}?q={query}&count={count}&offset={offset}'\n\n\ndef request(query, params):\n \"\"\"Qwant search request\"\"\"\n keyword = category_to_keyword[categories[0]]\n count = 10 # web: count must be equal to 10\n\n if keyword == 'images':\n count = 50\n offset = (params['pageno'] - 1) * count\n # count + offset must be lower than 250\n offset = min(offset, 199)\n else:\n offset = (params['pageno'] - 1) * count\n # count + offset must be lower than 50\n offset = min(offset, 40)\n\n params['url'] = url.format(\n keyword=keyword,\n query=urlencode({'q': query}),\n offset=offset,\n count=count,\n )\n\n # add language tag\n if params['language'] == 'all':\n params['url'] += '&locale=en_us'\n else:\n language = match_language(\n params['language'],\n # pylint: disable=undefined-variable\n supported_languages,\n language_aliases,\n )\n params['url'] += '&locale=' + language.replace('-', '_').lower()\n\n params['raise_for_httperror'] = False\n return params\n\n\ndef response(resp):\n \"\"\"Get response from Qwant's search request\"\"\"\n # pylint: disable=too-many-locals, too-many-branches, too-many-statements\n\n keyword = category_to_keyword[categories[0]]\n results = []\n\n # load JSON result\n search_results = loads(resp.text)\n data = search_results.get('data', {})\n\n # check for an API error\n if search_results.get('status') != 'success':\n msg = \",\".join(data.get('message', ['unknown', ]))\n raise SearxEngineAPIException('API error::' + msg)\n\n # raise for other errors\n raise_for_httperror(resp)\n\n if keyword == 'web':\n # The WEB query contains a list named 'mainline'. This list can contain\n # different result types (e.g. mainline[0]['type'] returns type of the\n # result items in mainline[0]['items']\n mainline = data.get('result', {}).get('items', {}).get('mainline', {})\n else:\n # Queries on News, Images and Videos do not have a list named 'mainline'\n # in the response. The result items are directly in the list\n # result['items'].\n mainline = data.get('result', {}).get('items', [])\n mainline = [\n {'type': keyword, 'items': mainline},\n ]\n\n # return empty array if there are no results\n if not mainline:\n return []\n\n for row in mainline:\n\n mainline_type = row.get('type', 'web')\n if mainline_type != keyword:\n continue\n\n if mainline_type == 'ads':\n # ignore adds\n continue\n\n mainline_items = row.get('items', [])\n for item in mainline_items:\n\n title = item.get('title', None)\n res_url = item.get('url', None)\n\n if mainline_type == 'web':\n content = item['desc']\n results.append({\n 'title': title,\n 'url': res_url,\n 'content': content,\n })\n\n elif mainline_type == 'news':\n\n pub_date = item['date']\n if pub_date is not None:\n pub_date = datetime.fromtimestamp(pub_date)\n news_media = item.get('media', [])\n img_src = None\n if news_media:\n img_src = news_media[0].get('pict', {}).get('url', None)\n results.append({\n 'title': title,\n 'url': res_url,\n 'publishedDate': pub_date,\n 'img_src': img_src,\n })\n\n elif mainline_type == 'images':\n thumbnail = item['thumbnail']\n img_src = item['media']\n results.append({\n 'title': title,\n 'url': res_url,\n 'template': 'images.html',\n 'thumbnail_src': thumbnail,\n 'img_src': img_src,\n })\n\n elif mainline_type == 'videos':\n # some videos do not have a description: while qwant-video\n # returns an empty string, such video from a qwant-web query\n # miss the 'desc' key.\n d, s, c = item.get('desc'), item.get('source'), item.get('channel')\n content_parts = []\n if d:\n content_parts.append(d)\n if s:\n content_parts.append(\"%s: %s \" % (gettext(\"Source\"), s))\n if c:\n content_parts.append(\"%s: %s \" % (gettext(\"Channel\"), c))\n content = ' // '.join(content_parts)\n length = item['duration']\n if length is not None:\n length = timedelta(milliseconds=length)\n pub_date = item['date']\n if pub_date is not None:\n pub_date = datetime.fromtimestamp(pub_date)\n thumbnail = item['thumbnail']\n # from some locations (DE and others?) the s2 link do\n # response a 'Please wait ..' but does not deliver the thumbnail\n thumbnail = thumbnail.replace(\n 'https://s2.qwant.com',\n 'https://s1.qwant.com', 1\n )\n results.append({\n 'title': title,\n 'url': res_url,\n 'content': content,\n 'publishedDate': pub_date,\n 'thumbnail': thumbnail,\n 'template': 'videos.html',\n 'length': length,\n })\n\n return results\n\n\n# get supported languages from their site\ndef _fetch_supported_languages(resp):\n # list of regions is embedded in page as a js object\n response_text = resp.text\n response_text = response_text[response_text.find('INITIAL_PROPS'):]\n response_text = response_text[response_text.find('{'):response_text.find('</script>')]\n\n regions_json = loads(response_text)\n\n supported_languages = []\n for country, langs in regions_json['locales'].items():\n for lang in langs['langs']:\n lang_code = \"{lang}-{country}\".format(lang=lang, country=country)\n supported_languages.append(lang_code)\n\n return supported_languages\n", "path": "searx/engines/qwant.py"}], "after_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n# lint: pylint\n\"\"\"Qwant (Web, News, Images, Videos)\n\nThis engine uses the Qwant API (https://api.qwant.com/v3). The API is\nundocumented but can be reverse engineered by reading the network log of\nhttps://www.qwant.com/ queries.\n\nThis implementation is used by different qwant engines in the settings.yml::\n\n - name: qwant\n categories: general\n ...\n - name: qwant news\n categories: news\n ...\n - name: qwant images\n categories: images\n ...\n - name: qwant videos\n categories: videos\n ...\n\n\"\"\"\n\nfrom datetime import (\n datetime,\n timedelta,\n)\nfrom json import loads\nfrom urllib.parse import urlencode\nfrom flask_babel import gettext\n\nfrom searx.utils import match_language\nfrom searx.exceptions import SearxEngineAPIException\nfrom searx.network import raise_for_httperror\n\n\n# about\nabout = {\n \"website\": 'https://www.qwant.com/',\n \"wikidata_id\": 'Q14657870',\n \"official_api_documentation\": None,\n \"use_official_api\": True,\n \"require_api_key\": False,\n \"results\": 'JSON',\n}\n\n# engine dependent config\ncategories = []\npaging = True\nsupported_languages_url = about['website']\n\ncategory_to_keyword = {\n 'general': 'web',\n 'news': 'news',\n 'images': 'images',\n 'videos': 'videos',\n}\n\n# search-url\nurl = 'https://api.qwant.com/v3/search/{keyword}?{query}&count={count}&offset={offset}'\n\n\ndef request(query, params):\n \"\"\"Qwant search request\"\"\"\n keyword = category_to_keyword[categories[0]]\n count = 10 # web: count must be equal to 10\n\n if keyword == 'images':\n count = 50\n offset = (params['pageno'] - 1) * count\n # count + offset must be lower than 250\n offset = min(offset, 199)\n else:\n offset = (params['pageno'] - 1) * count\n # count + offset must be lower than 50\n offset = min(offset, 40)\n\n params['url'] = url.format(\n keyword=keyword,\n query=urlencode({'q': query}),\n offset=offset,\n count=count,\n )\n\n # add language tag\n if params['language'] == 'all':\n params['url'] += '&locale=en_us'\n else:\n language = match_language(\n params['language'],\n # pylint: disable=undefined-variable\n supported_languages,\n language_aliases,\n )\n params['url'] += '&locale=' + language.replace('-', '_').lower()\n\n params['raise_for_httperror'] = False\n return params\n\n\ndef response(resp):\n \"\"\"Get response from Qwant's search request\"\"\"\n # pylint: disable=too-many-locals, too-many-branches, too-many-statements\n\n keyword = category_to_keyword[categories[0]]\n results = []\n\n # load JSON result\n search_results = loads(resp.text)\n data = search_results.get('data', {})\n\n # check for an API error\n if search_results.get('status') != 'success':\n msg = \",\".join(data.get('message', ['unknown', ]))\n raise SearxEngineAPIException('API error::' + msg)\n\n # raise for other errors\n raise_for_httperror(resp)\n\n if keyword == 'web':\n # The WEB query contains a list named 'mainline'. This list can contain\n # different result types (e.g. mainline[0]['type'] returns type of the\n # result items in mainline[0]['items']\n mainline = data.get('result', {}).get('items', {}).get('mainline', {})\n else:\n # Queries on News, Images and Videos do not have a list named 'mainline'\n # in the response. The result items are directly in the list\n # result['items'].\n mainline = data.get('result', {}).get('items', [])\n mainline = [\n {'type': keyword, 'items': mainline},\n ]\n\n # return empty array if there are no results\n if not mainline:\n return []\n\n for row in mainline:\n\n mainline_type = row.get('type', 'web')\n if mainline_type != keyword:\n continue\n\n if mainline_type == 'ads':\n # ignore adds\n continue\n\n mainline_items = row.get('items', [])\n for item in mainline_items:\n\n title = item.get('title', None)\n res_url = item.get('url', None)\n\n if mainline_type == 'web':\n content = item['desc']\n results.append({\n 'title': title,\n 'url': res_url,\n 'content': content,\n })\n\n elif mainline_type == 'news':\n\n pub_date = item['date']\n if pub_date is not None:\n pub_date = datetime.fromtimestamp(pub_date)\n news_media = item.get('media', [])\n img_src = None\n if news_media:\n img_src = news_media[0].get('pict', {}).get('url', None)\n results.append({\n 'title': title,\n 'url': res_url,\n 'publishedDate': pub_date,\n 'img_src': img_src,\n })\n\n elif mainline_type == 'images':\n thumbnail = item['thumbnail']\n img_src = item['media']\n results.append({\n 'title': title,\n 'url': res_url,\n 'template': 'images.html',\n 'thumbnail_src': thumbnail,\n 'img_src': img_src,\n })\n\n elif mainline_type == 'videos':\n # some videos do not have a description: while qwant-video\n # returns an empty string, such video from a qwant-web query\n # miss the 'desc' key.\n d, s, c = item.get('desc'), item.get('source'), item.get('channel')\n content_parts = []\n if d:\n content_parts.append(d)\n if s:\n content_parts.append(\"%s: %s \" % (gettext(\"Source\"), s))\n if c:\n content_parts.append(\"%s: %s \" % (gettext(\"Channel\"), c))\n content = ' // '.join(content_parts)\n length = item['duration']\n if length is not None:\n length = timedelta(milliseconds=length)\n pub_date = item['date']\n if pub_date is not None:\n pub_date = datetime.fromtimestamp(pub_date)\n thumbnail = item['thumbnail']\n # from some locations (DE and others?) the s2 link do\n # response a 'Please wait ..' but does not deliver the thumbnail\n thumbnail = thumbnail.replace(\n 'https://s2.qwant.com',\n 'https://s1.qwant.com', 1\n )\n results.append({\n 'title': title,\n 'url': res_url,\n 'content': content,\n 'publishedDate': pub_date,\n 'thumbnail': thumbnail,\n 'template': 'videos.html',\n 'length': length,\n })\n\n return results\n\n\n# get supported languages from their site\ndef _fetch_supported_languages(resp):\n # list of regions is embedded in page as a js object\n response_text = resp.text\n response_text = response_text[response_text.find('INITIAL_PROPS'):]\n response_text = response_text[response_text.find('{'):response_text.find('</script>')]\n\n regions_json = loads(response_text)\n\n supported_languages = []\n for country, langs in regions_json['locales'].items():\n for lang in langs['langs']:\n lang_code = \"{lang}-{country}\".format(lang=lang, country=country)\n supported_languages.append(lang_code)\n\n return supported_languages\n", "path": "searx/engines/qwant.py"}]} |
gh_patches_debug_136 | rasdani/github-patches | git_diff | litestar-org__litestar-2330 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
StaticFilesConfig and virtual directories
I'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem.
This is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.
https://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `litestar/openapi/spec/enums.py`
Content:
```
1 from enum import Enum
2
3 __all__ = ("OpenAPIFormat", "OpenAPIType")
4
5
6 class OpenAPIFormat(str, Enum):
7 """Formats extracted from: https://datatracker.ietf.org/doc/html/draft-bhutton-json-schema-validation-00#page-13"""
8
9 DATE = "date"
10 DATE_TIME = "date-time"
11 TIME = "time"
12 DURATION = "duration"
13 URL = "url"
14 EMAIL = "email"
15 IDN_EMAIL = "idn-email"
16 HOST_NAME = "hostname"
17 IDN_HOST_NAME = "idn-hostname"
18 IPV4 = "ipv4"
19 IPV6 = "ipv6"
20 URI = "uri"
21 URI_REFERENCE = "uri-reference"
22 URI_TEMPLATE = "uri-template"
23 JSON_POINTER = "json-pointer"
24 RELATIVE_JSON_POINTER = "relative-json-pointer"
25 IRI = "iri-reference"
26 IRI_REFERENCE = "iri-reference" # noqa: PIE796
27 UUID = "uuid"
28 REGEX = "regex"
29
30
31 class OpenAPIType(str, Enum):
32 """An OopenAPI type."""
33
34 ARRAY = "array"
35 BOOLEAN = "boolean"
36 INTEGER = "integer"
37 NULL = "null"
38 NUMBER = "number"
39 OBJECT = "object"
40 STRING = "string"
41
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/litestar/openapi/spec/enums.py b/litestar/openapi/spec/enums.py
--- a/litestar/openapi/spec/enums.py
+++ b/litestar/openapi/spec/enums.py
@@ -26,6 +26,7 @@
IRI_REFERENCE = "iri-reference" # noqa: PIE796
UUID = "uuid"
REGEX = "regex"
+ BINARY = "binary"
class OpenAPIType(str, Enum):
| {"golden_diff": "diff --git a/litestar/openapi/spec/enums.py b/litestar/openapi/spec/enums.py\n--- a/litestar/openapi/spec/enums.py\n+++ b/litestar/openapi/spec/enums.py\n@@ -26,6 +26,7 @@\n IRI_REFERENCE = \"iri-reference\" # noqa: PIE796\n UUID = \"uuid\"\n REGEX = \"regex\"\n+ BINARY = \"binary\"\n \n \n class OpenAPIType(str, Enum):\n", "issue": "StaticFilesConfig and virtual directories\nI'm trying to write a ``FileSystemProtocol`` to load files from the package data using [importlib_resources](https://importlib-resources.readthedocs.io/en/latest/using.html#). But because ``directories`` is defined as ``DirectoryPath``, pydantic checks if the given directories exist in the local filesystem. \r\n\r\nThis is not generally true, especially in any kind of virtual filesystem (e.g. a zipped package). I think this condition should be relaxed to support virtual filesystems.\r\n\r\nhttps://github.com/starlite-api/starlite/blob/9bb6dcd57c10a591377cf8e3a537e9292566d5b9/starlite/config/static_files.py#L32\n", "before_files": [{"content": "from enum import Enum\n\n__all__ = (\"OpenAPIFormat\", \"OpenAPIType\")\n\n\nclass OpenAPIFormat(str, Enum):\n \"\"\"Formats extracted from: https://datatracker.ietf.org/doc/html/draft-bhutton-json-schema-validation-00#page-13\"\"\"\n\n DATE = \"date\"\n DATE_TIME = \"date-time\"\n TIME = \"time\"\n DURATION = \"duration\"\n URL = \"url\"\n EMAIL = \"email\"\n IDN_EMAIL = \"idn-email\"\n HOST_NAME = \"hostname\"\n IDN_HOST_NAME = \"idn-hostname\"\n IPV4 = \"ipv4\"\n IPV6 = \"ipv6\"\n URI = \"uri\"\n URI_REFERENCE = \"uri-reference\"\n URI_TEMPLATE = \"uri-template\"\n JSON_POINTER = \"json-pointer\"\n RELATIVE_JSON_POINTER = \"relative-json-pointer\"\n IRI = \"iri-reference\"\n IRI_REFERENCE = \"iri-reference\" # noqa: PIE796\n UUID = \"uuid\"\n REGEX = \"regex\"\n\n\nclass OpenAPIType(str, Enum):\n \"\"\"An OopenAPI type.\"\"\"\n\n ARRAY = \"array\"\n BOOLEAN = \"boolean\"\n INTEGER = \"integer\"\n NULL = \"null\"\n NUMBER = \"number\"\n OBJECT = \"object\"\n STRING = \"string\"\n", "path": "litestar/openapi/spec/enums.py"}], "after_files": [{"content": "from enum import Enum\n\n__all__ = (\"OpenAPIFormat\", \"OpenAPIType\")\n\n\nclass OpenAPIFormat(str, Enum):\n \"\"\"Formats extracted from: https://datatracker.ietf.org/doc/html/draft-bhutton-json-schema-validation-00#page-13\"\"\"\n\n DATE = \"date\"\n DATE_TIME = \"date-time\"\n TIME = \"time\"\n DURATION = \"duration\"\n URL = \"url\"\n EMAIL = \"email\"\n IDN_EMAIL = \"idn-email\"\n HOST_NAME = \"hostname\"\n IDN_HOST_NAME = \"idn-hostname\"\n IPV4 = \"ipv4\"\n IPV6 = \"ipv6\"\n URI = \"uri\"\n URI_REFERENCE = \"uri-reference\"\n URI_TEMPLATE = \"uri-template\"\n JSON_POINTER = \"json-pointer\"\n RELATIVE_JSON_POINTER = \"relative-json-pointer\"\n IRI = \"iri-reference\"\n IRI_REFERENCE = \"iri-reference\" # noqa: PIE796\n UUID = \"uuid\"\n REGEX = \"regex\"\n BINARY = \"binary\"\n\n\nclass OpenAPIType(str, Enum):\n \"\"\"An OopenAPI type.\"\"\"\n\n ARRAY = \"array\"\n BOOLEAN = \"boolean\"\n INTEGER = \"integer\"\n NULL = \"null\"\n NUMBER = \"number\"\n OBJECT = \"object\"\n STRING = \"string\"\n", "path": "litestar/openapi/spec/enums.py"}]} |
gh_patches_debug_137 | rasdani/github-patches | git_diff | conda-forge__conda-smithy-1727 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Python 3 regression: Undefined Jinja2 variables get rendered as empty string in linting
### Solution to issue cannot be found in the documentation.
- [X] I checked the documentation.
### Issue
For linting, undefined Jinja2 variables get rendered by `conda_smithy.utils.NullUndefined`. That class contains a `__unicode__` method that returns the name of the variable. This is useful to put a clear placeholder where variables will be filled by variants from `conda_build_config.yaml` during the actual build. However, `NullUndefined` doesn't overwrite the `__str__` method of Jinja's own `Undefined`, which returns an empty string.
In effect, linting in a Python 2 environment renders, e.g. `- {{ libjpeg }}` as `- libjpeg`, but in a Python 3 environment, we get `- ` which becomes `None` in the `requirements_section` dictionary.
### Installed packages
```shell
-
```
### Environment info
```shell
-
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conda_smithy/utils.py`
Content:
```
1 import shutil
2 import tempfile
3 import io
4 import jinja2
5 import datetime
6 import time
7 import os
8 import sys
9 from pathlib import Path
10 from collections import defaultdict
11 from contextlib import contextmanager
12
13 import ruamel.yaml
14
15
16 def get_feedstock_name_from_meta(meta):
17 """Resolve the feedtstock name from the parsed meta.yaml."""
18 if "feedstock-name" in meta.meta["extra"]:
19 return meta.meta["extra"]["feedstock-name"]
20 elif "parent_recipe" in meta.meta["extra"]:
21 return meta.meta["extra"]["parent_recipe"]["name"]
22 else:
23 return meta.name()
24
25
26 def get_feedstock_about_from_meta(meta) -> dict:
27 """Fetch the feedtstock about from the parsed meta.yaml."""
28 # it turns out that conda_build would not preserve the feedstock about:
29 # - if a subpackage does not have about, it uses the feedstock's
30 # - if a subpackage has about, it's used as is
31 # therefore we need to parse the yaml again just to get the about section...
32 if "parent_recipe" in meta.meta["extra"]:
33 recipe_meta = os.path.join(
34 meta.meta["extra"]["parent_recipe"]["path"], "meta.yaml"
35 )
36 with io.open(recipe_meta, "rt") as fh:
37 content = render_meta_yaml("".join(fh))
38 meta = get_yaml().load(content)
39 return dict(meta["about"])
40 else:
41 # no parent recipe for any reason, use self's about
42 return dict(meta.meta["about"])
43
44
45 def get_yaml():
46 # define global yaml API
47 # roundrip-loader and allowing duplicate keys
48 # for handling # [filter] / # [not filter]
49 # Don't use a global variable for this as a global
50 # variable will make conda-smithy thread unsafe.
51 yaml = ruamel.yaml.YAML(typ="rt")
52 yaml.allow_duplicate_keys = True
53 return yaml
54
55
56 @contextmanager
57 def tmp_directory():
58 tmp_dir = tempfile.mkdtemp("_recipe")
59 yield tmp_dir
60 shutil.rmtree(tmp_dir)
61
62
63 class NullUndefined(jinja2.Undefined):
64 def __unicode__(self):
65 return self._undefined_name
66
67 def __getattr__(self, name):
68 return "{}.{}".format(self, name)
69
70 def __getitem__(self, name):
71 return '{}["{}"]'.format(self, name)
72
73
74 class MockOS(dict):
75 def __init__(self):
76 self.environ = defaultdict(lambda: "")
77 self.sep = "/"
78
79
80 def stub_compatible_pin(*args, **kwargs):
81 return f"compatible_pin {args[0]}"
82
83
84 def stub_subpackage_pin(*args, **kwargs):
85 return f"subpackage_pin {args[0]}"
86
87
88 def render_meta_yaml(text):
89 env = jinja2.Environment(undefined=NullUndefined)
90
91 # stub out cb3 jinja2 functions - they are not important for linting
92 # if we don't stub them out, the ruamel.yaml load fails to interpret them
93 # we can't just use conda-build's api.render functionality, because it would apply selectors
94 env.globals.update(
95 dict(
96 compiler=lambda x: x + "_compiler_stub",
97 pin_subpackage=stub_subpackage_pin,
98 pin_compatible=stub_compatible_pin,
99 cdt=lambda *args, **kwargs: "cdt_stub",
100 load_file_regex=lambda *args, **kwargs: defaultdict(lambda: ""),
101 datetime=datetime,
102 time=time,
103 target_platform="linux-64",
104 mpi="mpi",
105 )
106 )
107 mockos = MockOS()
108 py_ver = "3.7"
109 context = {"os": mockos, "environ": mockos.environ, "PY_VER": py_ver}
110 content = env.from_string(text).render(context)
111 return content
112
113
114 @contextmanager
115 def update_conda_forge_config(forge_yaml):
116 """Utility method used to update conda forge configuration files
117
118 Uage:
119 >>> with update_conda_forge_config(somepath) as cfg:
120 ... cfg['foo'] = 'bar'
121 """
122 if os.path.exists(forge_yaml):
123 with open(forge_yaml, "r") as fh:
124 code = get_yaml().load(fh)
125 else:
126 code = {}
127
128 # Code could come in as an empty list.
129 if not code:
130 code = {}
131
132 yield code
133
134 get_yaml().dump(code, Path(forge_yaml))
135
136
137 def merge_dict(src, dest):
138 """Recursive merge dictionary"""
139 for key, value in src.items():
140 if isinstance(value, dict):
141 # get node or create one
142 node = dest.setdefault(key, {})
143 merge_dict(value, node)
144 else:
145 dest[key] = value
146
147 return dest
148
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conda_smithy/utils.py b/conda_smithy/utils.py
--- a/conda_smithy/utils.py
+++ b/conda_smithy/utils.py
@@ -61,7 +61,7 @@
class NullUndefined(jinja2.Undefined):
- def __unicode__(self):
+ def __str__(self):
return self._undefined_name
def __getattr__(self, name):
| {"golden_diff": "diff --git a/conda_smithy/utils.py b/conda_smithy/utils.py\n--- a/conda_smithy/utils.py\n+++ b/conda_smithy/utils.py\n@@ -61,7 +61,7 @@\n \n \n class NullUndefined(jinja2.Undefined):\n- def __unicode__(self):\n+ def __str__(self):\n return self._undefined_name\n \n def __getattr__(self, name):\n", "issue": "Python 3 regression: Undefined Jinja2 variables get rendered as empty string in linting\n### Solution to issue cannot be found in the documentation.\n\n- [X] I checked the documentation.\n\n### Issue\n\nFor linting, undefined Jinja2 variables get rendered by `conda_smithy.utils.NullUndefined`. That class contains a `__unicode__` method that returns the name of the variable. This is useful to put a clear placeholder where variables will be filled by variants from `conda_build_config.yaml` during the actual build. However, `NullUndefined` doesn't overwrite the `__str__` method of Jinja's own `Undefined`, which returns an empty string.\r\n\r\nIn effect, linting in a Python 2 environment renders, e.g. `- {{ libjpeg }}` as `- libjpeg`, but in a Python 3 environment, we get `- ` which becomes `None` in the `requirements_section` dictionary.\n\n### Installed packages\n\n```shell\n-\n```\n\n\n### Environment info\n\n```shell\n-\n```\n\n", "before_files": [{"content": "import shutil\nimport tempfile\nimport io\nimport jinja2\nimport datetime\nimport time\nimport os\nimport sys\nfrom pathlib import Path\nfrom collections import defaultdict\nfrom contextlib import contextmanager\n\nimport ruamel.yaml\n\n\ndef get_feedstock_name_from_meta(meta):\n \"\"\"Resolve the feedtstock name from the parsed meta.yaml.\"\"\"\n if \"feedstock-name\" in meta.meta[\"extra\"]:\n return meta.meta[\"extra\"][\"feedstock-name\"]\n elif \"parent_recipe\" in meta.meta[\"extra\"]:\n return meta.meta[\"extra\"][\"parent_recipe\"][\"name\"]\n else:\n return meta.name()\n\n\ndef get_feedstock_about_from_meta(meta) -> dict:\n \"\"\"Fetch the feedtstock about from the parsed meta.yaml.\"\"\"\n # it turns out that conda_build would not preserve the feedstock about:\n # - if a subpackage does not have about, it uses the feedstock's\n # - if a subpackage has about, it's used as is\n # therefore we need to parse the yaml again just to get the about section...\n if \"parent_recipe\" in meta.meta[\"extra\"]:\n recipe_meta = os.path.join(\n meta.meta[\"extra\"][\"parent_recipe\"][\"path\"], \"meta.yaml\"\n )\n with io.open(recipe_meta, \"rt\") as fh:\n content = render_meta_yaml(\"\".join(fh))\n meta = get_yaml().load(content)\n return dict(meta[\"about\"])\n else:\n # no parent recipe for any reason, use self's about\n return dict(meta.meta[\"about\"])\n\n\ndef get_yaml():\n # define global yaml API\n # roundrip-loader and allowing duplicate keys\n # for handling # [filter] / # [not filter]\n # Don't use a global variable for this as a global\n # variable will make conda-smithy thread unsafe.\n yaml = ruamel.yaml.YAML(typ=\"rt\")\n yaml.allow_duplicate_keys = True\n return yaml\n\n\n@contextmanager\ndef tmp_directory():\n tmp_dir = tempfile.mkdtemp(\"_recipe\")\n yield tmp_dir\n shutil.rmtree(tmp_dir)\n\n\nclass NullUndefined(jinja2.Undefined):\n def __unicode__(self):\n return self._undefined_name\n\n def __getattr__(self, name):\n return \"{}.{}\".format(self, name)\n\n def __getitem__(self, name):\n return '{}[\"{}\"]'.format(self, name)\n\n\nclass MockOS(dict):\n def __init__(self):\n self.environ = defaultdict(lambda: \"\")\n self.sep = \"/\"\n\n\ndef stub_compatible_pin(*args, **kwargs):\n return f\"compatible_pin {args[0]}\"\n\n\ndef stub_subpackage_pin(*args, **kwargs):\n return f\"subpackage_pin {args[0]}\"\n\n\ndef render_meta_yaml(text):\n env = jinja2.Environment(undefined=NullUndefined)\n\n # stub out cb3 jinja2 functions - they are not important for linting\n # if we don't stub them out, the ruamel.yaml load fails to interpret them\n # we can't just use conda-build's api.render functionality, because it would apply selectors\n env.globals.update(\n dict(\n compiler=lambda x: x + \"_compiler_stub\",\n pin_subpackage=stub_subpackage_pin,\n pin_compatible=stub_compatible_pin,\n cdt=lambda *args, **kwargs: \"cdt_stub\",\n load_file_regex=lambda *args, **kwargs: defaultdict(lambda: \"\"),\n datetime=datetime,\n time=time,\n target_platform=\"linux-64\",\n mpi=\"mpi\",\n )\n )\n mockos = MockOS()\n py_ver = \"3.7\"\n context = {\"os\": mockos, \"environ\": mockos.environ, \"PY_VER\": py_ver}\n content = env.from_string(text).render(context)\n return content\n\n\n@contextmanager\ndef update_conda_forge_config(forge_yaml):\n \"\"\"Utility method used to update conda forge configuration files\n\n Uage:\n >>> with update_conda_forge_config(somepath) as cfg:\n ... cfg['foo'] = 'bar'\n \"\"\"\n if os.path.exists(forge_yaml):\n with open(forge_yaml, \"r\") as fh:\n code = get_yaml().load(fh)\n else:\n code = {}\n\n # Code could come in as an empty list.\n if not code:\n code = {}\n\n yield code\n\n get_yaml().dump(code, Path(forge_yaml))\n\n\ndef merge_dict(src, dest):\n \"\"\"Recursive merge dictionary\"\"\"\n for key, value in src.items():\n if isinstance(value, dict):\n # get node or create one\n node = dest.setdefault(key, {})\n merge_dict(value, node)\n else:\n dest[key] = value\n\n return dest\n", "path": "conda_smithy/utils.py"}], "after_files": [{"content": "import shutil\nimport tempfile\nimport io\nimport jinja2\nimport datetime\nimport time\nimport os\nimport sys\nfrom pathlib import Path\nfrom collections import defaultdict\nfrom contextlib import contextmanager\n\nimport ruamel.yaml\n\n\ndef get_feedstock_name_from_meta(meta):\n \"\"\"Resolve the feedtstock name from the parsed meta.yaml.\"\"\"\n if \"feedstock-name\" in meta.meta[\"extra\"]:\n return meta.meta[\"extra\"][\"feedstock-name\"]\n elif \"parent_recipe\" in meta.meta[\"extra\"]:\n return meta.meta[\"extra\"][\"parent_recipe\"][\"name\"]\n else:\n return meta.name()\n\n\ndef get_feedstock_about_from_meta(meta) -> dict:\n \"\"\"Fetch the feedtstock about from the parsed meta.yaml.\"\"\"\n # it turns out that conda_build would not preserve the feedstock about:\n # - if a subpackage does not have about, it uses the feedstock's\n # - if a subpackage has about, it's used as is\n # therefore we need to parse the yaml again just to get the about section...\n if \"parent_recipe\" in meta.meta[\"extra\"]:\n recipe_meta = os.path.join(\n meta.meta[\"extra\"][\"parent_recipe\"][\"path\"], \"meta.yaml\"\n )\n with io.open(recipe_meta, \"rt\") as fh:\n content = render_meta_yaml(\"\".join(fh))\n meta = get_yaml().load(content)\n return dict(meta[\"about\"])\n else:\n # no parent recipe for any reason, use self's about\n return dict(meta.meta[\"about\"])\n\n\ndef get_yaml():\n # define global yaml API\n # roundrip-loader and allowing duplicate keys\n # for handling # [filter] / # [not filter]\n # Don't use a global variable for this as a global\n # variable will make conda-smithy thread unsafe.\n yaml = ruamel.yaml.YAML(typ=\"rt\")\n yaml.allow_duplicate_keys = True\n return yaml\n\n\n@contextmanager\ndef tmp_directory():\n tmp_dir = tempfile.mkdtemp(\"_recipe\")\n yield tmp_dir\n shutil.rmtree(tmp_dir)\n\n\nclass NullUndefined(jinja2.Undefined):\n def __str__(self):\n return self._undefined_name\n\n def __getattr__(self, name):\n return \"{}.{}\".format(self, name)\n\n def __getitem__(self, name):\n return '{}[\"{}\"]'.format(self, name)\n\n\nclass MockOS(dict):\n def __init__(self):\n self.environ = defaultdict(lambda: \"\")\n self.sep = \"/\"\n\n\ndef stub_compatible_pin(*args, **kwargs):\n return f\"compatible_pin {args[0]}\"\n\n\ndef stub_subpackage_pin(*args, **kwargs):\n return f\"subpackage_pin {args[0]}\"\n\n\ndef render_meta_yaml(text):\n env = jinja2.Environment(undefined=NullUndefined)\n\n # stub out cb3 jinja2 functions - they are not important for linting\n # if we don't stub them out, the ruamel.yaml load fails to interpret them\n # we can't just use conda-build's api.render functionality, because it would apply selectors\n env.globals.update(\n dict(\n compiler=lambda x: x + \"_compiler_stub\",\n pin_subpackage=stub_subpackage_pin,\n pin_compatible=stub_compatible_pin,\n cdt=lambda *args, **kwargs: \"cdt_stub\",\n load_file_regex=lambda *args, **kwargs: defaultdict(lambda: \"\"),\n datetime=datetime,\n time=time,\n target_platform=\"linux-64\",\n mpi=\"mpi\",\n )\n )\n mockos = MockOS()\n py_ver = \"3.7\"\n context = {\"os\": mockos, \"environ\": mockos.environ, \"PY_VER\": py_ver}\n content = env.from_string(text).render(context)\n return content\n\n\n@contextmanager\ndef update_conda_forge_config(forge_yaml):\n \"\"\"Utility method used to update conda forge configuration files\n\n Uage:\n >>> with update_conda_forge_config(somepath) as cfg:\n ... cfg['foo'] = 'bar'\n \"\"\"\n if os.path.exists(forge_yaml):\n with open(forge_yaml, \"r\") as fh:\n code = get_yaml().load(fh)\n else:\n code = {}\n\n # Code could come in as an empty list.\n if not code:\n code = {}\n\n yield code\n\n get_yaml().dump(code, Path(forge_yaml))\n\n\ndef merge_dict(src, dest):\n \"\"\"Recursive merge dictionary\"\"\"\n for key, value in src.items():\n if isinstance(value, dict):\n # get node or create one\n node = dest.setdefault(key, {})\n merge_dict(value, node)\n else:\n dest[key] = value\n\n return dest\n", "path": "conda_smithy/utils.py"}]} |
gh_patches_debug_138 | rasdani/github-patches | git_diff | ydataai__ydata-profiling-80 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
This call to matplotlib.use() has no effect because the backend has already
/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/pandas_profiling/base.py:20: UserWarning:
This call to matplotlib.use() has no effect because the backend has already
been chosen; matplotlib.use() must be called *before* pylab, matplotlib.pyplot,
or matplotlib.backends is imported for the first time.
The backend was *originally* set to 'module://ipykernel.pylab.backend_inline' by the following code:
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/runpy.py", line 72, in _run_code
exec code in run_globals
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel_launcher.py", line 16, in <module>
app.launch_new_instance()
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/traitlets/config/application.py", line 658, in launch_instance
app.start()
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/kernelapp.py", line 477, in start
ioloop.IOLoop.instance().start()
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/zmq/eventloop/ioloop.py", line 177, in start
super(ZMQIOLoop, self).start()
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/tornado/ioloop.py", line 888, in start
handler_func(fd_obj, events)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/tornado/stack_context.py", line 277, in null_wrapper
return fn(*args, **kwargs)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/zmq/eventloop/zmqstream.py", line 440, in _handle_events
self._handle_recv()
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/zmq/eventloop/zmqstream.py", line 472, in _handle_recv
self._run_callback(callback, msg)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/zmq/eventloop/zmqstream.py", line 414, in _run_callback
callback(*args, **kwargs)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/tornado/stack_context.py", line 277, in null_wrapper
return fn(*args, **kwargs)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/kernelbase.py", line 283, in dispatcher
return self.dispatch_shell(stream, msg)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/kernelbase.py", line 235, in dispatch_shell
handler(stream, idents, msg)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/kernelbase.py", line 399, in execute_request
user_expressions, allow_stdin)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/ipkernel.py", line 196, in do_execute
res = shell.run_cell(code, store_history=store_history, silent=silent)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/zmqshell.py", line 533, in run_cell
return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.py", line 2718, in run_cell
interactivity=interactivity, compiler=compiler, result=result)
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.py", line 2822, in run_ast_nodes
if self.run_code(code, result):
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.py", line 2882, in run_code
exec(code_obj, self.user_global_ns, self.user_ns)
File "<ipython-input-2-47d9d0ad501d>", line 8, in <module>
import matplotlib.pyplot as plt
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/matplotlib/pyplot.py", line 69, in <module>
from matplotlib.backends import pylab_setup
File "/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/matplotlib/backends/__init__.py", line 14, in <module>
line for line in traceback.format_stack()
matplotlib.use('Agg')
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pandas_profiling/base.py`
Content:
```
1 from __future__ import division
2
3 import sys
4
5 import itertools
6
7 try:
8 from StringIO import BytesIO
9 except ImportError:
10 from io import BytesIO
11
12 try:
13 from urllib import quote
14 except ImportError:
15 from urllib.parse import quote
16
17 import base64
18
19 import matplotlib
20 matplotlib.use('Agg')
21
22 import numpy as np
23 import pandas as pd
24 import pandas_profiling.formatters as formatters, pandas_profiling.templates as templates
25 from matplotlib import pyplot as plt
26 from pkg_resources import resource_filename
27 import six
28 import multiprocessing
29 from functools import partial
30 from distutils.version import LooseVersion
31
32
33 def pretty_name(x):
34 x *= 100
35 if x == int(x):
36 return '%.0f%%' % x
37 else:
38 return '%.1f%%' % x
39
40
41 def get_vartype(data):
42 # TODO: Shall not be computed several times
43 distinct_count=data.nunique(dropna=False)
44 leng=len(data)
45 if distinct_count <=1:
46 return 'CONST'
47 elif pd.api.types.is_bool_dtype(data):
48 return 'BOOL'
49 elif pd.api.types.is_numeric_dtype(data):
50 return 'NUM'
51 elif pd.api.types.is_datetime64_dtype(data):
52 return 'DATE'
53 elif distinct_count==leng:
54 return 'UNIQUE'
55 else:
56 return 'CAT'
57
58
59 def describe_numeric_1d(series, **kwargs):
60 stats = {'mean': series.mean(), 'std': series.std(), 'variance': series.var(), 'min': series.min(),
61 'max': series.max()}
62 stats['range'] = stats['max'] - stats['min']
63
64 for x in np.array([0.05, 0.25, 0.5, 0.75, 0.95]):
65 stats[pretty_name(x)] = series.dropna().quantile(x) # The dropna() is a workaround for https://github.com/pydata/pandas/issues/13098
66 stats['iqr'] = stats['75%'] - stats['25%']
67 stats['kurtosis'] = series.kurt()
68 stats['skewness'] = series.skew()
69 stats['sum'] = series.sum()
70 stats['mad'] = series.mad()
71 stats['cv'] = stats['std'] / stats['mean'] if stats['mean'] else np.NaN
72 stats['type'] = "NUM"
73 stats['n_zeros'] = (len(series) - np.count_nonzero(series))
74 stats['p_zeros'] = stats['n_zeros'] / len(series)
75 # Histograms
76 stats['histogram'] = histogram(series, **kwargs)
77 stats['mini_histogram'] = mini_histogram(series, **kwargs)
78 return pd.Series(stats, name=series.name)
79
80
81 def _plot_histogram(series, bins=10, figsize=(6, 4), facecolor='#337ab7'):
82 """Plot an histogram from the data and return the AxesSubplot object.
83
84 Parameters
85 ----------
86 series: Series, default None
87 The data to plot
88 figsize: a tuple (width, height) in inches, default (6,4)
89 The size of the figure.
90 facecolor: str
91 The color code.
92
93 Returns
94 -------
95 matplotlib.AxesSubplot, The plot.
96 """
97 if get_vartype(series) == 'DATE':
98 # TODO: These calls should be merged
99 fig = plt.figure(figsize=figsize)
100 plot = fig.add_subplot(111)
101 plot.set_ylabel('Frequency')
102 try:
103 plot.hist(series.values, facecolor=facecolor, bins=bins)
104 except TypeError: # matplotlib 1.4 can't plot dates so will show empty plot instead
105 pass
106 else:
107 plot = series.plot(kind='hist', figsize=figsize,
108 facecolor=facecolor,
109 bins=bins) # TODO when running on server, send this off to a different thread
110 return plot
111
112
113 def histogram(series, **kwargs):
114 """Plot an histogram of the data.
115
116 Parameters
117 ----------
118 series: Series, default None
119 The data to plot.
120
121 Returns
122 -------
123 str, The resulting image encoded as a string.
124 """
125 imgdata = BytesIO()
126 plot = _plot_histogram(series, **kwargs)
127 plot.figure.subplots_adjust(left=0.15, right=0.95, top=0.9, bottom=0.1, wspace=0, hspace=0)
128 plot.figure.savefig(imgdata)
129 imgdata.seek(0)
130 result_string = 'data:image/png;base64,' + quote(base64.b64encode(imgdata.getvalue()))
131 # TODO Think about writing this to disk instead of caching them in strings
132 plt.close(plot.figure)
133 return result_string
134
135
136 def mini_histogram(series, **kwargs):
137 """Plot a small (mini) histogram of the data.
138
139 Parameters
140 ----------
141 series: Series, default None
142 The data to plot.
143
144 Returns
145 -------
146 str, The resulting image encoded as a string.
147 """
148 imgdata = BytesIO()
149 plot = _plot_histogram(series, figsize=(2, 0.75), **kwargs)
150 plot.axes.get_yaxis().set_visible(False)
151
152 if LooseVersion(matplotlib.__version__) <= '1.5.9':
153 plot.set_axis_bgcolor("w")
154 else:
155 plot.set_facecolor("w")
156
157 xticks = plot.xaxis.get_major_ticks()
158 for tick in xticks[1:-1]:
159 tick.set_visible(False)
160 tick.label.set_visible(False)
161 for tick in (xticks[0], xticks[-1]):
162 tick.label.set_fontsize(8)
163 plot.figure.subplots_adjust(left=0.15, right=0.85, top=1, bottom=0.35, wspace=0, hspace=0)
164 plot.figure.savefig(imgdata)
165 imgdata.seek(0)
166 result_string = 'data:image/png;base64,' + quote(base64.b64encode(imgdata.getvalue()))
167 plt.close(plot.figure)
168 return result_string
169
170
171 def describe_date_1d(series):
172 stats = {'min': series.min(), 'max': series.max()}
173 stats['range'] = stats['max'] - stats['min']
174 stats['type'] = "DATE"
175 stats['histogram'] = histogram(series)
176 stats['mini_histogram'] = mini_histogram(series)
177 return pd.Series(stats, name=series.name)
178
179
180 def describe_categorical_1d(data):
181 # Only run if at least 1 non-missing value
182 objcounts = data.value_counts()
183 top, freq = objcounts.index[0], objcounts.iloc[0]
184 names = []
185 result = []
186
187 if get_vartype(data) == 'CAT':
188 names += ['top', 'freq', 'type']
189 result += [top, freq, 'CAT']
190
191 return pd.Series(result, index=names, name=data.name)
192
193 def describe_boolean_1d(data):
194 objcounts = data.value_counts()
195 top, freq = objcounts.index[0], objcounts.iloc[0]
196 # The mean of boolean is an interesting information
197 mean = data.mean()
198 names = []
199 result = []
200 names += ['top', 'freq', 'type', 'mean']
201 result += [top, freq, 'BOOL', mean]
202
203 return pd.Series(result, index=names, name=data.name)
204
205 def describe_constant_1d(data):
206 return pd.Series(['CONST'], index=['type'], name=data.name)
207
208
209 def describe_unique_1d(data):
210 return pd.Series(['UNIQUE'], index=['type'], name=data.name)
211
212
213 def describe_1d(data, **kwargs):
214 leng = len(data) # number of observations in the Series
215 count = data.count() # number of non-NaN observations in the Series
216
217 # Replace infinite values with NaNs to avoid issues with
218 # histograms later.
219 data.replace(to_replace=[np.inf, np.NINF, np.PINF], value=np.nan, inplace=True)
220
221 n_infinite = count - data.count() # number of infinte observations in the Series
222
223 distinct_count = data.nunique(dropna=False) # number of unique elements in the Series
224 if count > distinct_count > 1:
225 mode = data.mode().iloc[0]
226 else:
227 mode = data[0]
228
229 results_data = {'count': count,
230 'distinct_count': distinct_count,
231 'p_missing': 1 - count / leng,
232 'n_missing': leng - count,
233 'p_infinite': n_infinite / leng,
234 'n_infinite': n_infinite,
235 'is_unique': distinct_count == leng,
236 'mode': mode,
237 'p_unique': distinct_count / leng}
238 try:
239 # pandas 0.17 onwards
240 results_data['memorysize'] = data.memory_usage()
241 except:
242 results_data['memorysize'] = 0
243
244 result = pd.Series(results_data, name=data.name)
245
246 vartype = get_vartype(data)
247 if vartype == 'CONST':
248 result = result.append(describe_constant_1d(data))
249 elif vartype == 'BOOL':
250 result = result.append(describe_boolean_1d(data, **kwargs))
251 elif vartype == 'NUM':
252 result = result.append(describe_numeric_1d(data, **kwargs))
253 elif vartype == 'DATE':
254 result = result.append(describe_date_1d(data, **kwargs))
255 elif vartype == 'UNIQUE':
256 result = result.append(describe_unique_1d(data, **kwargs))
257 else:
258 result = result.append(describe_categorical_1d(data))
259 return result
260
261
262 def multiprocess_func(x, **kwargs):
263 return x[0], describe_1d(x[1], **kwargs)
264
265
266 def describe(df, bins=10, check_correlation=True, correlation_overrides=None, pool_size=multiprocessing.cpu_count(), **kwargs):
267 """
268 Generates a object containing summary statistics for a given DataFrame
269 :param df: DataFrame to be analyzed
270 :param bins: Number of bins in histogram
271 :param check_correlation: Flag, set to False to skip correlation checks.
272 :param correlation_overrides: Variable names not to be rejected because they are correlated
273 :param pool_size: Number of workers in thread pool
274 :return: Dictionary containing
275 table: general statistics on the DataFrame
276 variables: summary statistics for each variable
277 freq: frequency table
278 """
279
280 if not isinstance(df, pd.DataFrame):
281 raise TypeError("df must be of type pandas.DataFrame")
282 if df.empty:
283 raise ValueError("df can not be empty")
284
285 try:
286 # reset matplotlib style before use
287 # Fails in matplotlib 1.4.x so plot might look bad
288 matplotlib.style.use("default")
289 except:
290 pass
291
292 matplotlib.style.use(resource_filename(__name__, "pandas_profiling.mplstyle"))
293
294 if not pd.Index(np.arange(0, len(df))).equals(df.index):
295 # Treat index as any other column
296 df = df.reset_index()
297
298 # Describe all variables in a univariate way
299 pool = multiprocessing.Pool(pool_size)
300 local_multiprocess_func = partial(multiprocess_func, **kwargs)
301 ldesc = {col: s for col, s in pool.map(local_multiprocess_func, df.iteritems())}
302 pool.close()
303
304 # Check correlations between variable
305 if check_correlation is True:
306 ''' TODO: corr(x,y) > 0.9 and corr(y,z) > 0.9 does not imply corr(x,z) > 0.9
307 If x~y and y~z but not x~z, it would be better to delete only y
308 Better way would be to find out which variable causes the highest increase in multicollinearity.
309 '''
310 corr = df.corr()
311 for x, corr_x in corr.iterrows():
312 if correlation_overrides and x in correlation_overrides:
313 continue
314
315 for y, corr in corr_x.iteritems():
316 if x == y: break
317
318 if corr > 0.9:
319 ldesc[x] = pd.Series(['CORR', y, corr], index=['type', 'correlation_var', 'correlation'])
320
321 categorical_variables = [(name, data) for (name, data) in df.iteritems() if get_vartype(data)=='CAT']
322 for (name1, data1), (name2, data2) in itertools.combinations(categorical_variables, 2):
323 if correlation_overrides and name1 in correlation_overrides:
324 continue
325
326 confusion_matrix=pd.crosstab(data1,data2)
327 if confusion_matrix.values.diagonal().sum() == len(df):
328 ldesc[name1] = pd.Series(['RECODED', name2], index=['type', 'correlation_var'])
329
330 # Convert ldesc to a DataFrame
331 names = []
332 ldesc_indexes = sorted([x.index for x in ldesc.values()], key=len)
333 for idxnames in ldesc_indexes:
334 for name in idxnames:
335 if name not in names:
336 names.append(name)
337 variable_stats = pd.concat(ldesc, join_axes=pd.Index([names]), axis=1)
338 variable_stats.columns.names = df.columns.names
339
340 # General statistics
341 table_stats = {'n': len(df), 'nvar': len(df.columns)}
342 table_stats['total_missing'] = variable_stats.loc['n_missing'].sum() / (table_stats['n'] * table_stats['nvar'])
343 table_stats['n_duplicates'] = sum(df.duplicated())
344
345 memsize = df.memory_usage(index=True).sum()
346 table_stats['memsize'] = formatters.fmt_bytesize(memsize)
347 table_stats['recordsize'] = formatters.fmt_bytesize(memsize / table_stats['n'])
348
349 table_stats.update({k: 0 for k in ("NUM", "DATE", "CONST", "CAT", "UNIQUE", "CORR", "RECODED", "BOOL")})
350 table_stats.update(dict(variable_stats.loc['type'].value_counts()))
351 table_stats['REJECTED'] = table_stats['CONST'] + table_stats['CORR'] + table_stats['RECODED']
352
353 return {'table': table_stats, 'variables': variable_stats.T, 'freq': {k: df[k].value_counts() for k in df.columns}}
354
355
356 def to_html(sample, stats_object):
357 """Generate a HTML report from summary statistics and a given sample.
358
359 Parameters
360 ----------
361 sample: DataFrame containing the sample you want to print
362 stats_object: Dictionary containing summary statistics. Should be generated with an appropriate describe() function
363
364 Returns
365 -------
366 str, containing profile report in HTML format
367 """
368
369 n_obs = stats_object['table']['n']
370
371 value_formatters = formatters.value_formatters
372 row_formatters = formatters.row_formatters
373
374 if not isinstance(sample, pd.DataFrame):
375 raise TypeError("sample must be of type pandas.DataFrame")
376
377 if not isinstance(stats_object, dict):
378 raise TypeError("stats_object must be of type dict. Did you generate this using the pandas_profiling.describe() function?")
379
380 if set(stats_object.keys()) != {'table', 'variables', 'freq'}:
381 raise TypeError("stats_object badly formatted. Did you generate this using the pandas_profiling-eda.describe() function?")
382
383 def fmt(value, name):
384 if pd.isnull(value):
385 return ""
386 if name in value_formatters:
387 return value_formatters[name](value)
388 elif isinstance(value, float):
389 return value_formatters[formatters.DEFAULT_FLOAT_FORMATTER](value)
390 else:
391 if sys.version_info.major == 3:
392 return str(value)
393 else:
394 return unicode(value)
395
396 def _format_row(freq, label, max_freq, row_template, n, extra_class=''):
397 width = int(freq / max_freq * 99) + 1
398 if width > 20:
399 label_in_bar = freq
400 label_after_bar = ""
401 else:
402 label_in_bar = " "
403 label_after_bar = freq
404
405 return row_template.render(label=label,
406 width=width,
407 count=freq,
408 percentage='{:2.1f}'.format(freq / n * 100),
409 extra_class=extra_class,
410 label_in_bar=label_in_bar,
411 label_after_bar=label_after_bar)
412
413 def freq_table(freqtable, n, table_template, row_template, max_number_to_print, nb_col=6):
414
415 freq_rows_html = u''
416
417 if max_number_to_print > n:
418 max_number_to_print=n
419
420 if max_number_to_print < len(freqtable):
421 freq_other = sum(freqtable.iloc[max_number_to_print:])
422 min_freq = freqtable.values[max_number_to_print]
423 else:
424 freq_other = 0
425 min_freq = 0
426
427 freq_missing = n - sum(freqtable)
428 max_freq = max(freqtable.values[0], freq_other, freq_missing)
429
430 # TODO: Correctly sort missing and other
431
432 for label, freq in six.iteritems(freqtable.iloc[0:max_number_to_print]):
433 freq_rows_html += _format_row(freq, label, max_freq, row_template, n)
434
435 if freq_other > min_freq:
436 freq_rows_html += _format_row(freq_other,
437 "Other values (%s)" % (freqtable.count() - max_number_to_print), max_freq, row_template, n,
438 extra_class='other')
439
440 if freq_missing > min_freq:
441 freq_rows_html += _format_row(freq_missing, "(Missing)", max_freq, row_template, n, extra_class='missing')
442
443 return table_template.render(rows=freq_rows_html, varid=hash(idx), nb_col=nb_col)
444
445 def extreme_obs_table(freqtable, table_template, row_template, number_to_print, n, ascending = True):
446 if ascending:
447 obs_to_print = freqtable.sort_index().iloc[:number_to_print]
448 else:
449 obs_to_print = freqtable.sort_index().iloc[-number_to_print:]
450
451 freq_rows_html = ''
452 max_freq = max(obs_to_print.values)
453
454 for label, freq in six.iteritems(obs_to_print):
455 freq_rows_html += _format_row(freq, label, max_freq, row_template, n)
456
457 return table_template.render(rows=freq_rows_html)
458
459 # Variables
460 rows_html = u""
461 messages = []
462
463 for idx, row in stats_object['variables'].iterrows():
464
465 formatted_values = {'varname': idx, 'varid': hash(idx)}
466 row_classes = {}
467
468 for col, value in six.iteritems(row):
469 formatted_values[col] = fmt(value, col)
470
471 for col in set(row.index) & six.viewkeys(row_formatters):
472 row_classes[col] = row_formatters[col](row[col])
473 if row_classes[col] == "alert" and col in templates.messages:
474 messages.append(templates.messages[col].format(formatted_values, varname = formatters.fmt_varname(idx)))
475
476 if row['type'] in {'CAT', 'BOOL'}:
477 formatted_values['minifreqtable'] = freq_table(stats_object['freq'][idx], n_obs,
478 templates.template('mini_freq_table'),
479 templates.template('mini_freq_table_row'),
480 3,
481 templates.mini_freq_table_nb_col[row['type']])
482
483 if row['distinct_count'] > 50:
484 messages.append(templates.messages['HIGH_CARDINALITY'].format(formatted_values, varname = formatters.fmt_varname(idx)))
485 row_classes['distinct_count'] = "alert"
486 else:
487 row_classes['distinct_count'] = ""
488
489 if row['type'] == 'UNIQUE':
490 obs = stats_object['freq'][idx].index
491
492 formatted_values['firstn'] = pd.DataFrame(obs[0:3], columns=["First 3 values"]).to_html(classes="example_values", index=False)
493 formatted_values['lastn'] = pd.DataFrame(obs[-3:], columns=["Last 3 values"]).to_html(classes="example_values", index=False)
494
495 if row['type'] in {'CORR', 'CONST', 'RECODED'}:
496 formatted_values['varname'] = formatters.fmt_varname(idx)
497 messages.append(templates.messages[row['type']].format(formatted_values))
498 else:
499 formatted_values['freqtable'] = freq_table(stats_object['freq'][idx], n_obs,
500 templates.template('freq_table'), templates.template('freq_table_row'), 10)
501 formatted_values['firstn_expanded'] = extreme_obs_table(stats_object['freq'][idx], templates.template('freq_table'), templates.template('freq_table_row'), 5, n_obs, ascending = True)
502 formatted_values['lastn_expanded'] = extreme_obs_table(stats_object['freq'][idx], templates.template('freq_table'), templates.template('freq_table_row'), 5, n_obs, ascending = False)
503
504 rows_html += templates.row_templates_dict[row['type']].render(values=formatted_values, row_classes=row_classes)
505
506 # Overview
507 formatted_values = {k: fmt(v, k) for k, v in six.iteritems(stats_object['table'])}
508
509 row_classes={}
510 for col in six.viewkeys(stats_object['table']) & six.viewkeys(row_formatters):
511 row_classes[col] = row_formatters[col](stats_object['table'][col])
512 if row_classes[col] == "alert" and col in templates.messages:
513 messages.append(templates.messages[col].format(formatted_values, varname = formatters.fmt_varname(idx)))
514
515 messages_html = u''
516 for msg in messages:
517 messages_html += templates.message_row.format(message=msg)
518
519 overview_html = templates.template('overview').render(values=formatted_values, row_classes = row_classes, messages=messages_html)
520
521 # Sample
522
523 sample_html = templates.template('sample').render(sample_table_html=sample.to_html(classes="sample"))
524 # TODO: should be done in the template
525 return templates.template('base').render({'overview_html': overview_html, 'rows_html': rows_html, 'sample_html': sample_html})
526
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pandas_profiling/base.py b/pandas_profiling/base.py
--- a/pandas_profiling/base.py
+++ b/pandas_profiling/base.py
@@ -17,7 +17,8 @@
import base64
import matplotlib
-matplotlib.use('Agg')
+# Fix #68, this call is not needed and brings side effects in some use cases
+# matplotlib.use('Agg')
import numpy as np
import pandas as pd
| {"golden_diff": "diff --git a/pandas_profiling/base.py b/pandas_profiling/base.py\n--- a/pandas_profiling/base.py\n+++ b/pandas_profiling/base.py\n@@ -17,7 +17,8 @@\n import base64\n \n import matplotlib\n-matplotlib.use('Agg')\n+# Fix #68, this call is not needed and brings side effects in some use cases\n+# matplotlib.use('Agg')\n \n import numpy as np\n import pandas as pd\n", "issue": "This call to matplotlib.use() has no effect because the backend has already\n/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/pandas_profiling/base.py:20: UserWarning: \r\nThis call to matplotlib.use() has no effect because the backend has already\r\nbeen chosen; matplotlib.use() must be called *before* pylab, matplotlib.pyplot,\r\nor matplotlib.backends is imported for the first time.\r\n\r\nThe backend was *originally* set to 'module://ipykernel.pylab.backend_inline' by the following code:\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/runpy.py\", line 174, in _run_module_as_main\r\n \"__main__\", fname, loader, pkg_name)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/runpy.py\", line 72, in _run_code\r\n exec code in run_globals\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel_launcher.py\", line 16, in <module>\r\n app.launch_new_instance()\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/traitlets/config/application.py\", line 658, in launch_instance\r\n app.start()\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/kernelapp.py\", line 477, in start\r\n ioloop.IOLoop.instance().start()\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/zmq/eventloop/ioloop.py\", line 177, in start\r\n super(ZMQIOLoop, self).start()\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/tornado/ioloop.py\", line 888, in start\r\n handler_func(fd_obj, events)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/tornado/stack_context.py\", line 277, in null_wrapper\r\n return fn(*args, **kwargs)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/zmq/eventloop/zmqstream.py\", line 440, in _handle_events\r\n self._handle_recv()\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/zmq/eventloop/zmqstream.py\", line 472, in _handle_recv\r\n self._run_callback(callback, msg)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/zmq/eventloop/zmqstream.py\", line 414, in _run_callback\r\n callback(*args, **kwargs)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/tornado/stack_context.py\", line 277, in null_wrapper\r\n return fn(*args, **kwargs)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/kernelbase.py\", line 283, in dispatcher\r\n return self.dispatch_shell(stream, msg)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/kernelbase.py\", line 235, in dispatch_shell\r\n handler(stream, idents, msg)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/kernelbase.py\", line 399, in execute_request\r\n user_expressions, allow_stdin)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/ipkernel.py\", line 196, in do_execute\r\n res = shell.run_cell(code, store_history=store_history, silent=silent)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/ipykernel/zmqshell.py\", line 533, in run_cell\r\n return super(ZMQInteractiveShell, self).run_cell(*args, **kwargs)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.py\", line 2718, in run_cell\r\n interactivity=interactivity, compiler=compiler, result=result)\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.py\", line 2822, in run_ast_nodes\r\n if self.run_code(code, result):\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.py\", line 2882, in run_code\r\n exec(code_obj, self.user_global_ns, self.user_ns)\r\n File \"<ipython-input-2-47d9d0ad501d>\", line 8, in <module>\r\n import matplotlib.pyplot as plt\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/matplotlib/pyplot.py\", line 69, in <module>\r\n from matplotlib.backends import pylab_setup\r\n File \"/home/flash1/work/software/python/anaconda2/lib/python2.7/site-packages/matplotlib/backends/__init__.py\", line 14, in <module>\r\n line for line in traceback.format_stack()\r\n\r\n\r\n matplotlib.use('Agg')\n", "before_files": [{"content": "from __future__ import division\n\nimport sys\n\nimport itertools\n\ntry:\n from StringIO import BytesIO\nexcept ImportError:\n from io import BytesIO\n\ntry:\n from urllib import quote\nexcept ImportError:\n from urllib.parse import quote\n\nimport base64\n\nimport matplotlib\nmatplotlib.use('Agg')\n\nimport numpy as np\nimport pandas as pd\nimport pandas_profiling.formatters as formatters, pandas_profiling.templates as templates\nfrom matplotlib import pyplot as plt\nfrom pkg_resources import resource_filename\nimport six\nimport multiprocessing\nfrom functools import partial\nfrom distutils.version import LooseVersion\n\n\ndef pretty_name(x):\n x *= 100\n if x == int(x):\n return '%.0f%%' % x\n else:\n return '%.1f%%' % x\n\n\ndef get_vartype(data):\n # TODO: Shall not be computed several times\n distinct_count=data.nunique(dropna=False)\n leng=len(data)\n if distinct_count <=1:\n return 'CONST'\n elif pd.api.types.is_bool_dtype(data):\n return 'BOOL'\n elif pd.api.types.is_numeric_dtype(data):\n return 'NUM'\n elif pd.api.types.is_datetime64_dtype(data):\n return 'DATE'\n elif distinct_count==leng:\n return 'UNIQUE'\n else:\n return 'CAT'\n\n\ndef describe_numeric_1d(series, **kwargs):\n stats = {'mean': series.mean(), 'std': series.std(), 'variance': series.var(), 'min': series.min(),\n 'max': series.max()}\n stats['range'] = stats['max'] - stats['min']\n\n for x in np.array([0.05, 0.25, 0.5, 0.75, 0.95]):\n stats[pretty_name(x)] = series.dropna().quantile(x) # The dropna() is a workaround for https://github.com/pydata/pandas/issues/13098\n stats['iqr'] = stats['75%'] - stats['25%']\n stats['kurtosis'] = series.kurt()\n stats['skewness'] = series.skew()\n stats['sum'] = series.sum()\n stats['mad'] = series.mad()\n stats['cv'] = stats['std'] / stats['mean'] if stats['mean'] else np.NaN\n stats['type'] = \"NUM\"\n stats['n_zeros'] = (len(series) - np.count_nonzero(series))\n stats['p_zeros'] = stats['n_zeros'] / len(series)\n # Histograms\n stats['histogram'] = histogram(series, **kwargs)\n stats['mini_histogram'] = mini_histogram(series, **kwargs)\n return pd.Series(stats, name=series.name)\n\n\ndef _plot_histogram(series, bins=10, figsize=(6, 4), facecolor='#337ab7'):\n \"\"\"Plot an histogram from the data and return the AxesSubplot object.\n\n Parameters\n ----------\n series: Series, default None\n The data to plot\n figsize: a tuple (width, height) in inches, default (6,4)\n The size of the figure.\n facecolor: str\n The color code.\n\n Returns\n -------\n matplotlib.AxesSubplot, The plot.\n \"\"\"\n if get_vartype(series) == 'DATE':\n # TODO: These calls should be merged\n fig = plt.figure(figsize=figsize)\n plot = fig.add_subplot(111)\n plot.set_ylabel('Frequency')\n try:\n plot.hist(series.values, facecolor=facecolor, bins=bins)\n except TypeError: # matplotlib 1.4 can't plot dates so will show empty plot instead\n pass\n else:\n plot = series.plot(kind='hist', figsize=figsize,\n facecolor=facecolor,\n bins=bins) # TODO when running on server, send this off to a different thread\n return plot\n\n\ndef histogram(series, **kwargs):\n \"\"\"Plot an histogram of the data.\n\n Parameters\n ----------\n series: Series, default None\n The data to plot.\n\n Returns\n -------\n str, The resulting image encoded as a string.\n \"\"\"\n imgdata = BytesIO()\n plot = _plot_histogram(series, **kwargs)\n plot.figure.subplots_adjust(left=0.15, right=0.95, top=0.9, bottom=0.1, wspace=0, hspace=0)\n plot.figure.savefig(imgdata)\n imgdata.seek(0)\n result_string = 'data:image/png;base64,' + quote(base64.b64encode(imgdata.getvalue()))\n # TODO Think about writing this to disk instead of caching them in strings\n plt.close(plot.figure)\n return result_string\n\n\ndef mini_histogram(series, **kwargs):\n \"\"\"Plot a small (mini) histogram of the data.\n\n Parameters\n ----------\n series: Series, default None\n The data to plot.\n\n Returns\n -------\n str, The resulting image encoded as a string.\n \"\"\"\n imgdata = BytesIO()\n plot = _plot_histogram(series, figsize=(2, 0.75), **kwargs)\n plot.axes.get_yaxis().set_visible(False)\n\n if LooseVersion(matplotlib.__version__) <= '1.5.9':\n plot.set_axis_bgcolor(\"w\")\n else:\n plot.set_facecolor(\"w\")\n\n xticks = plot.xaxis.get_major_ticks()\n for tick in xticks[1:-1]:\n tick.set_visible(False)\n tick.label.set_visible(False)\n for tick in (xticks[0], xticks[-1]):\n tick.label.set_fontsize(8)\n plot.figure.subplots_adjust(left=0.15, right=0.85, top=1, bottom=0.35, wspace=0, hspace=0)\n plot.figure.savefig(imgdata)\n imgdata.seek(0)\n result_string = 'data:image/png;base64,' + quote(base64.b64encode(imgdata.getvalue()))\n plt.close(plot.figure)\n return result_string\n\n\ndef describe_date_1d(series):\n stats = {'min': series.min(), 'max': series.max()}\n stats['range'] = stats['max'] - stats['min']\n stats['type'] = \"DATE\"\n stats['histogram'] = histogram(series)\n stats['mini_histogram'] = mini_histogram(series)\n return pd.Series(stats, name=series.name)\n\n\ndef describe_categorical_1d(data):\n # Only run if at least 1 non-missing value\n objcounts = data.value_counts()\n top, freq = objcounts.index[0], objcounts.iloc[0]\n names = []\n result = []\n\n if get_vartype(data) == 'CAT':\n names += ['top', 'freq', 'type']\n result += [top, freq, 'CAT']\n\n return pd.Series(result, index=names, name=data.name)\n\ndef describe_boolean_1d(data):\n objcounts = data.value_counts()\n top, freq = objcounts.index[0], objcounts.iloc[0]\n # The mean of boolean is an interesting information\n mean = data.mean()\n names = []\n result = []\n names += ['top', 'freq', 'type', 'mean']\n result += [top, freq, 'BOOL', mean]\n\n return pd.Series(result, index=names, name=data.name)\n\ndef describe_constant_1d(data):\n return pd.Series(['CONST'], index=['type'], name=data.name)\n\n\ndef describe_unique_1d(data):\n return pd.Series(['UNIQUE'], index=['type'], name=data.name)\n\n\ndef describe_1d(data, **kwargs):\n leng = len(data) # number of observations in the Series\n count = data.count() # number of non-NaN observations in the Series\n\n # Replace infinite values with NaNs to avoid issues with\n # histograms later.\n data.replace(to_replace=[np.inf, np.NINF, np.PINF], value=np.nan, inplace=True)\n\n n_infinite = count - data.count() # number of infinte observations in the Series\n\n distinct_count = data.nunique(dropna=False) # number of unique elements in the Series\n if count > distinct_count > 1:\n mode = data.mode().iloc[0]\n else:\n mode = data[0]\n\n results_data = {'count': count,\n 'distinct_count': distinct_count,\n 'p_missing': 1 - count / leng,\n 'n_missing': leng - count,\n 'p_infinite': n_infinite / leng,\n 'n_infinite': n_infinite,\n 'is_unique': distinct_count == leng,\n 'mode': mode,\n 'p_unique': distinct_count / leng}\n try:\n # pandas 0.17 onwards\n results_data['memorysize'] = data.memory_usage()\n except:\n results_data['memorysize'] = 0\n\n result = pd.Series(results_data, name=data.name)\n\n vartype = get_vartype(data)\n if vartype == 'CONST':\n result = result.append(describe_constant_1d(data))\n elif vartype == 'BOOL':\n result = result.append(describe_boolean_1d(data, **kwargs))\n elif vartype == 'NUM':\n result = result.append(describe_numeric_1d(data, **kwargs))\n elif vartype == 'DATE':\n result = result.append(describe_date_1d(data, **kwargs))\n elif vartype == 'UNIQUE':\n result = result.append(describe_unique_1d(data, **kwargs))\n else:\n result = result.append(describe_categorical_1d(data))\n return result\n\n\ndef multiprocess_func(x, **kwargs):\n return x[0], describe_1d(x[1], **kwargs)\n\n\ndef describe(df, bins=10, check_correlation=True, correlation_overrides=None, pool_size=multiprocessing.cpu_count(), **kwargs):\n \"\"\"\n Generates a object containing summary statistics for a given DataFrame\n :param df: DataFrame to be analyzed\n :param bins: Number of bins in histogram\n :param check_correlation: Flag, set to False to skip correlation checks.\n :param correlation_overrides: Variable names not to be rejected because they are correlated\n :param pool_size: Number of workers in thread pool\n :return: Dictionary containing\n table: general statistics on the DataFrame\n variables: summary statistics for each variable\n freq: frequency table\n \"\"\"\n\n if not isinstance(df, pd.DataFrame):\n raise TypeError(\"df must be of type pandas.DataFrame\")\n if df.empty:\n raise ValueError(\"df can not be empty\")\n\n try:\n # reset matplotlib style before use\n # Fails in matplotlib 1.4.x so plot might look bad\n matplotlib.style.use(\"default\")\n except:\n pass\n\n matplotlib.style.use(resource_filename(__name__, \"pandas_profiling.mplstyle\"))\n\n if not pd.Index(np.arange(0, len(df))).equals(df.index):\n # Treat index as any other column\n df = df.reset_index()\n\n # Describe all variables in a univariate way\n pool = multiprocessing.Pool(pool_size)\n local_multiprocess_func = partial(multiprocess_func, **kwargs)\n ldesc = {col: s for col, s in pool.map(local_multiprocess_func, df.iteritems())}\n pool.close()\n\n # Check correlations between variable\n if check_correlation is True:\n ''' TODO: corr(x,y) > 0.9 and corr(y,z) > 0.9 does not imply corr(x,z) > 0.9\n If x~y and y~z but not x~z, it would be better to delete only y\n Better way would be to find out which variable causes the highest increase in multicollinearity.\n '''\n corr = df.corr()\n for x, corr_x in corr.iterrows():\n if correlation_overrides and x in correlation_overrides:\n continue\n\n for y, corr in corr_x.iteritems():\n if x == y: break\n\n if corr > 0.9:\n ldesc[x] = pd.Series(['CORR', y, corr], index=['type', 'correlation_var', 'correlation'])\n\n categorical_variables = [(name, data) for (name, data) in df.iteritems() if get_vartype(data)=='CAT']\n for (name1, data1), (name2, data2) in itertools.combinations(categorical_variables, 2):\n if correlation_overrides and name1 in correlation_overrides:\n continue\n\n confusion_matrix=pd.crosstab(data1,data2)\n if confusion_matrix.values.diagonal().sum() == len(df):\n ldesc[name1] = pd.Series(['RECODED', name2], index=['type', 'correlation_var'])\n\n # Convert ldesc to a DataFrame\n names = []\n ldesc_indexes = sorted([x.index for x in ldesc.values()], key=len)\n for idxnames in ldesc_indexes:\n for name in idxnames:\n if name not in names:\n names.append(name)\n variable_stats = pd.concat(ldesc, join_axes=pd.Index([names]), axis=1)\n variable_stats.columns.names = df.columns.names\n\n # General statistics\n table_stats = {'n': len(df), 'nvar': len(df.columns)}\n table_stats['total_missing'] = variable_stats.loc['n_missing'].sum() / (table_stats['n'] * table_stats['nvar'])\n table_stats['n_duplicates'] = sum(df.duplicated())\n\n memsize = df.memory_usage(index=True).sum()\n table_stats['memsize'] = formatters.fmt_bytesize(memsize)\n table_stats['recordsize'] = formatters.fmt_bytesize(memsize / table_stats['n'])\n\n table_stats.update({k: 0 for k in (\"NUM\", \"DATE\", \"CONST\", \"CAT\", \"UNIQUE\", \"CORR\", \"RECODED\", \"BOOL\")})\n table_stats.update(dict(variable_stats.loc['type'].value_counts()))\n table_stats['REJECTED'] = table_stats['CONST'] + table_stats['CORR'] + table_stats['RECODED']\n\n return {'table': table_stats, 'variables': variable_stats.T, 'freq': {k: df[k].value_counts() for k in df.columns}}\n\n\ndef to_html(sample, stats_object):\n \"\"\"Generate a HTML report from summary statistics and a given sample.\n\n Parameters\n ----------\n sample: DataFrame containing the sample you want to print\n stats_object: Dictionary containing summary statistics. Should be generated with an appropriate describe() function\n\n Returns\n -------\n str, containing profile report in HTML format\n \"\"\"\n\n n_obs = stats_object['table']['n']\n\n value_formatters = formatters.value_formatters\n row_formatters = formatters.row_formatters\n\n if not isinstance(sample, pd.DataFrame):\n raise TypeError(\"sample must be of type pandas.DataFrame\")\n\n if not isinstance(stats_object, dict):\n raise TypeError(\"stats_object must be of type dict. Did you generate this using the pandas_profiling.describe() function?\")\n\n if set(stats_object.keys()) != {'table', 'variables', 'freq'}:\n raise TypeError(\"stats_object badly formatted. Did you generate this using the pandas_profiling-eda.describe() function?\")\n\n def fmt(value, name):\n if pd.isnull(value):\n return \"\"\n if name in value_formatters:\n return value_formatters[name](value)\n elif isinstance(value, float):\n return value_formatters[formatters.DEFAULT_FLOAT_FORMATTER](value)\n else:\n if sys.version_info.major == 3:\n return str(value)\n else:\n return unicode(value)\n\n def _format_row(freq, label, max_freq, row_template, n, extra_class=''):\n width = int(freq / max_freq * 99) + 1\n if width > 20:\n label_in_bar = freq\n label_after_bar = \"\"\n else:\n label_in_bar = \" \"\n label_after_bar = freq\n\n return row_template.render(label=label,\n width=width,\n count=freq,\n percentage='{:2.1f}'.format(freq / n * 100),\n extra_class=extra_class,\n label_in_bar=label_in_bar,\n label_after_bar=label_after_bar)\n\n def freq_table(freqtable, n, table_template, row_template, max_number_to_print, nb_col=6):\n\n freq_rows_html = u''\n\n if max_number_to_print > n:\n max_number_to_print=n\n\n if max_number_to_print < len(freqtable):\n freq_other = sum(freqtable.iloc[max_number_to_print:])\n min_freq = freqtable.values[max_number_to_print]\n else:\n freq_other = 0\n min_freq = 0\n\n freq_missing = n - sum(freqtable)\n max_freq = max(freqtable.values[0], freq_other, freq_missing)\n\n # TODO: Correctly sort missing and other\n\n for label, freq in six.iteritems(freqtable.iloc[0:max_number_to_print]):\n freq_rows_html += _format_row(freq, label, max_freq, row_template, n)\n\n if freq_other > min_freq:\n freq_rows_html += _format_row(freq_other,\n \"Other values (%s)\" % (freqtable.count() - max_number_to_print), max_freq, row_template, n,\n extra_class='other')\n\n if freq_missing > min_freq:\n freq_rows_html += _format_row(freq_missing, \"(Missing)\", max_freq, row_template, n, extra_class='missing')\n\n return table_template.render(rows=freq_rows_html, varid=hash(idx), nb_col=nb_col)\n\n def extreme_obs_table(freqtable, table_template, row_template, number_to_print, n, ascending = True):\n if ascending:\n obs_to_print = freqtable.sort_index().iloc[:number_to_print]\n else:\n obs_to_print = freqtable.sort_index().iloc[-number_to_print:]\n\n freq_rows_html = ''\n max_freq = max(obs_to_print.values)\n\n for label, freq in six.iteritems(obs_to_print):\n freq_rows_html += _format_row(freq, label, max_freq, row_template, n)\n\n return table_template.render(rows=freq_rows_html)\n\n # Variables\n rows_html = u\"\"\n messages = []\n\n for idx, row in stats_object['variables'].iterrows():\n\n formatted_values = {'varname': idx, 'varid': hash(idx)}\n row_classes = {}\n\n for col, value in six.iteritems(row):\n formatted_values[col] = fmt(value, col)\n\n for col in set(row.index) & six.viewkeys(row_formatters):\n row_classes[col] = row_formatters[col](row[col])\n if row_classes[col] == \"alert\" and col in templates.messages:\n messages.append(templates.messages[col].format(formatted_values, varname = formatters.fmt_varname(idx)))\n\n if row['type'] in {'CAT', 'BOOL'}:\n formatted_values['minifreqtable'] = freq_table(stats_object['freq'][idx], n_obs,\n templates.template('mini_freq_table'), \n templates.template('mini_freq_table_row'), \n 3, \n templates.mini_freq_table_nb_col[row['type']])\n\n if row['distinct_count'] > 50:\n messages.append(templates.messages['HIGH_CARDINALITY'].format(formatted_values, varname = formatters.fmt_varname(idx)))\n row_classes['distinct_count'] = \"alert\"\n else:\n row_classes['distinct_count'] = \"\"\n\n if row['type'] == 'UNIQUE':\n obs = stats_object['freq'][idx].index\n\n formatted_values['firstn'] = pd.DataFrame(obs[0:3], columns=[\"First 3 values\"]).to_html(classes=\"example_values\", index=False)\n formatted_values['lastn'] = pd.DataFrame(obs[-3:], columns=[\"Last 3 values\"]).to_html(classes=\"example_values\", index=False)\n\n if row['type'] in {'CORR', 'CONST', 'RECODED'}:\n formatted_values['varname'] = formatters.fmt_varname(idx)\n messages.append(templates.messages[row['type']].format(formatted_values))\n else:\n formatted_values['freqtable'] = freq_table(stats_object['freq'][idx], n_obs,\n templates.template('freq_table'), templates.template('freq_table_row'), 10)\n formatted_values['firstn_expanded'] = extreme_obs_table(stats_object['freq'][idx], templates.template('freq_table'), templates.template('freq_table_row'), 5, n_obs, ascending = True)\n formatted_values['lastn_expanded'] = extreme_obs_table(stats_object['freq'][idx], templates.template('freq_table'), templates.template('freq_table_row'), 5, n_obs, ascending = False)\n\n rows_html += templates.row_templates_dict[row['type']].render(values=formatted_values, row_classes=row_classes)\n\n # Overview\n formatted_values = {k: fmt(v, k) for k, v in six.iteritems(stats_object['table'])}\n\n row_classes={}\n for col in six.viewkeys(stats_object['table']) & six.viewkeys(row_formatters):\n row_classes[col] = row_formatters[col](stats_object['table'][col])\n if row_classes[col] == \"alert\" and col in templates.messages:\n messages.append(templates.messages[col].format(formatted_values, varname = formatters.fmt_varname(idx)))\n\n messages_html = u''\n for msg in messages:\n messages_html += templates.message_row.format(message=msg)\n\n overview_html = templates.template('overview').render(values=formatted_values, row_classes = row_classes, messages=messages_html)\n\n # Sample\n\n sample_html = templates.template('sample').render(sample_table_html=sample.to_html(classes=\"sample\"))\n # TODO: should be done in the template\n return templates.template('base').render({'overview_html': overview_html, 'rows_html': rows_html, 'sample_html': sample_html})\n", "path": "pandas_profiling/base.py"}], "after_files": [{"content": "from __future__ import division\n\nimport sys\n\nimport itertools\n\ntry:\n from StringIO import BytesIO\nexcept ImportError:\n from io import BytesIO\n\ntry:\n from urllib import quote\nexcept ImportError:\n from urllib.parse import quote\n\nimport base64\n\nimport matplotlib\n# Fix #68, this call is not needed and brings side effects in some use cases\n# matplotlib.use('Agg')\n\nimport numpy as np\nimport pandas as pd\nimport pandas_profiling.formatters as formatters, pandas_profiling.templates as templates\nfrom matplotlib import pyplot as plt\nfrom pkg_resources import resource_filename\nimport six\nimport multiprocessing\nfrom functools import partial\nfrom distutils.version import LooseVersion\n\n\ndef pretty_name(x):\n x *= 100\n if x == int(x):\n return '%.0f%%' % x\n else:\n return '%.1f%%' % x\n\n\ndef get_vartype(data):\n # TODO: Shall not be computed several times\n distinct_count=data.nunique(dropna=False)\n leng=len(data)\n if distinct_count <=1:\n return 'CONST'\n elif pd.api.types.is_bool_dtype(data):\n return 'BOOL'\n elif pd.api.types.is_numeric_dtype(data):\n return 'NUM'\n elif pd.api.types.is_datetime64_dtype(data):\n return 'DATE'\n elif distinct_count==leng:\n return 'UNIQUE'\n else:\n return 'CAT'\n\n\ndef describe_numeric_1d(series, **kwargs):\n stats = {'mean': series.mean(), 'std': series.std(), 'variance': series.var(), 'min': series.min(),\n 'max': series.max()}\n stats['range'] = stats['max'] - stats['min']\n\n for x in np.array([0.05, 0.25, 0.5, 0.75, 0.95]):\n stats[pretty_name(x)] = series.dropna().quantile(x) # The dropna() is a workaround for https://github.com/pydata/pandas/issues/13098\n stats['iqr'] = stats['75%'] - stats['25%']\n stats['kurtosis'] = series.kurt()\n stats['skewness'] = series.skew()\n stats['sum'] = series.sum()\n stats['mad'] = series.mad()\n stats['cv'] = stats['std'] / stats['mean'] if stats['mean'] else np.NaN\n stats['type'] = \"NUM\"\n stats['n_zeros'] = (len(series) - np.count_nonzero(series))\n stats['p_zeros'] = stats['n_zeros'] / len(series)\n # Histograms\n stats['histogram'] = histogram(series, **kwargs)\n stats['mini_histogram'] = mini_histogram(series, **kwargs)\n return pd.Series(stats, name=series.name)\n\n\ndef _plot_histogram(series, bins=10, figsize=(6, 4), facecolor='#337ab7'):\n \"\"\"Plot an histogram from the data and return the AxesSubplot object.\n\n Parameters\n ----------\n series: Series, default None\n The data to plot\n figsize: a tuple (width, height) in inches, default (6,4)\n The size of the figure.\n facecolor: str\n The color code.\n\n Returns\n -------\n matplotlib.AxesSubplot, The plot.\n \"\"\"\n if get_vartype(series) == 'DATE':\n # TODO: These calls should be merged\n fig = plt.figure(figsize=figsize)\n plot = fig.add_subplot(111)\n plot.set_ylabel('Frequency')\n try:\n plot.hist(series.values, facecolor=facecolor, bins=bins)\n except TypeError: # matplotlib 1.4 can't plot dates so will show empty plot instead\n pass\n else:\n plot = series.plot(kind='hist', figsize=figsize,\n facecolor=facecolor,\n bins=bins) # TODO when running on server, send this off to a different thread\n return plot\n\n\ndef histogram(series, **kwargs):\n \"\"\"Plot an histogram of the data.\n\n Parameters\n ----------\n series: Series, default None\n The data to plot.\n\n Returns\n -------\n str, The resulting image encoded as a string.\n \"\"\"\n imgdata = BytesIO()\n plot = _plot_histogram(series, **kwargs)\n plot.figure.subplots_adjust(left=0.15, right=0.95, top=0.9, bottom=0.1, wspace=0, hspace=0)\n plot.figure.savefig(imgdata)\n imgdata.seek(0)\n result_string = 'data:image/png;base64,' + quote(base64.b64encode(imgdata.getvalue()))\n # TODO Think about writing this to disk instead of caching them in strings\n plt.close(plot.figure)\n return result_string\n\n\ndef mini_histogram(series, **kwargs):\n \"\"\"Plot a small (mini) histogram of the data.\n\n Parameters\n ----------\n series: Series, default None\n The data to plot.\n\n Returns\n -------\n str, The resulting image encoded as a string.\n \"\"\"\n imgdata = BytesIO()\n plot = _plot_histogram(series, figsize=(2, 0.75), **kwargs)\n plot.axes.get_yaxis().set_visible(False)\n\n if LooseVersion(matplotlib.__version__) <= '1.5.9':\n plot.set_axis_bgcolor(\"w\")\n else:\n plot.set_facecolor(\"w\")\n\n xticks = plot.xaxis.get_major_ticks()\n for tick in xticks[1:-1]:\n tick.set_visible(False)\n tick.label.set_visible(False)\n for tick in (xticks[0], xticks[-1]):\n tick.label.set_fontsize(8)\n plot.figure.subplots_adjust(left=0.15, right=0.85, top=1, bottom=0.35, wspace=0, hspace=0)\n plot.figure.savefig(imgdata)\n imgdata.seek(0)\n result_string = 'data:image/png;base64,' + quote(base64.b64encode(imgdata.getvalue()))\n plt.close(plot.figure)\n return result_string\n\n\ndef describe_date_1d(series):\n stats = {'min': series.min(), 'max': series.max()}\n stats['range'] = stats['max'] - stats['min']\n stats['type'] = \"DATE\"\n stats['histogram'] = histogram(series)\n stats['mini_histogram'] = mini_histogram(series)\n return pd.Series(stats, name=series.name)\n\n\ndef describe_categorical_1d(data):\n # Only run if at least 1 non-missing value\n objcounts = data.value_counts()\n top, freq = objcounts.index[0], objcounts.iloc[0]\n names = []\n result = []\n\n if get_vartype(data) == 'CAT':\n names += ['top', 'freq', 'type']\n result += [top, freq, 'CAT']\n\n return pd.Series(result, index=names, name=data.name)\n\ndef describe_boolean_1d(data):\n objcounts = data.value_counts()\n top, freq = objcounts.index[0], objcounts.iloc[0]\n # The mean of boolean is an interesting information\n mean = data.mean()\n names = []\n result = []\n names += ['top', 'freq', 'type', 'mean']\n result += [top, freq, 'BOOL', mean]\n\n return pd.Series(result, index=names, name=data.name)\n\ndef describe_constant_1d(data):\n return pd.Series(['CONST'], index=['type'], name=data.name)\n\n\ndef describe_unique_1d(data):\n return pd.Series(['UNIQUE'], index=['type'], name=data.name)\n\n\ndef describe_1d(data, **kwargs):\n leng = len(data) # number of observations in the Series\n count = data.count() # number of non-NaN observations in the Series\n\n # Replace infinite values with NaNs to avoid issues with\n # histograms later.\n data.replace(to_replace=[np.inf, np.NINF, np.PINF], value=np.nan, inplace=True)\n\n n_infinite = count - data.count() # number of infinte observations in the Series\n\n distinct_count = data.nunique(dropna=False) # number of unique elements in the Series\n if count > distinct_count > 1:\n mode = data.mode().iloc[0]\n else:\n mode = data[0]\n\n results_data = {'count': count,\n 'distinct_count': distinct_count,\n 'p_missing': 1 - count / leng,\n 'n_missing': leng - count,\n 'p_infinite': n_infinite / leng,\n 'n_infinite': n_infinite,\n 'is_unique': distinct_count == leng,\n 'mode': mode,\n 'p_unique': distinct_count / leng}\n try:\n # pandas 0.17 onwards\n results_data['memorysize'] = data.memory_usage()\n except:\n results_data['memorysize'] = 0\n\n result = pd.Series(results_data, name=data.name)\n\n vartype = get_vartype(data)\n if vartype == 'CONST':\n result = result.append(describe_constant_1d(data))\n elif vartype == 'BOOL':\n result = result.append(describe_boolean_1d(data, **kwargs))\n elif vartype == 'NUM':\n result = result.append(describe_numeric_1d(data, **kwargs))\n elif vartype == 'DATE':\n result = result.append(describe_date_1d(data, **kwargs))\n elif vartype == 'UNIQUE':\n result = result.append(describe_unique_1d(data, **kwargs))\n else:\n result = result.append(describe_categorical_1d(data))\n return result\n\n\ndef multiprocess_func(x, **kwargs):\n return x[0], describe_1d(x[1], **kwargs)\n\n\ndef describe(df, bins=10, check_correlation=True, correlation_overrides=None, pool_size=multiprocessing.cpu_count(), **kwargs):\n \"\"\"\n Generates a object containing summary statistics for a given DataFrame\n :param df: DataFrame to be analyzed\n :param bins: Number of bins in histogram\n :param check_correlation: Flag, set to False to skip correlation checks.\n :param correlation_overrides: Variable names not to be rejected because they are correlated\n :param pool_size: Number of workers in thread pool\n :return: Dictionary containing\n table: general statistics on the DataFrame\n variables: summary statistics for each variable\n freq: frequency table\n \"\"\"\n\n if not isinstance(df, pd.DataFrame):\n raise TypeError(\"df must be of type pandas.DataFrame\")\n if df.empty:\n raise ValueError(\"df can not be empty\")\n\n try:\n # reset matplotlib style before use\n # Fails in matplotlib 1.4.x so plot might look bad\n matplotlib.style.use(\"default\")\n except:\n pass\n\n matplotlib.style.use(resource_filename(__name__, \"pandas_profiling.mplstyle\"))\n\n if not pd.Index(np.arange(0, len(df))).equals(df.index):\n # Treat index as any other column\n df = df.reset_index()\n\n # Describe all variables in a univariate way\n pool = multiprocessing.Pool(pool_size)\n local_multiprocess_func = partial(multiprocess_func, **kwargs)\n ldesc = {col: s for col, s in pool.map(local_multiprocess_func, df.iteritems())}\n pool.close()\n\n # Check correlations between variable\n if check_correlation is True:\n ''' TODO: corr(x,y) > 0.9 and corr(y,z) > 0.9 does not imply corr(x,z) > 0.9\n If x~y and y~z but not x~z, it would be better to delete only y\n Better way would be to find out which variable causes the highest increase in multicollinearity.\n '''\n corr = df.corr()\n for x, corr_x in corr.iterrows():\n if correlation_overrides and x in correlation_overrides:\n continue\n\n for y, corr in corr_x.iteritems():\n if x == y: break\n\n if corr > 0.9:\n ldesc[x] = pd.Series(['CORR', y, corr], index=['type', 'correlation_var', 'correlation'])\n\n categorical_variables = [(name, data) for (name, data) in df.iteritems() if get_vartype(data)=='CAT']\n for (name1, data1), (name2, data2) in itertools.combinations(categorical_variables, 2):\n if correlation_overrides and name1 in correlation_overrides:\n continue\n\n confusion_matrix=pd.crosstab(data1,data2)\n if confusion_matrix.values.diagonal().sum() == len(df):\n ldesc[name1] = pd.Series(['RECODED', name2], index=['type', 'correlation_var'])\n\n # Convert ldesc to a DataFrame\n names = []\n ldesc_indexes = sorted([x.index for x in ldesc.values()], key=len)\n for idxnames in ldesc_indexes:\n for name in idxnames:\n if name not in names:\n names.append(name)\n variable_stats = pd.concat(ldesc, join_axes=pd.Index([names]), axis=1)\n variable_stats.columns.names = df.columns.names\n\n # General statistics\n table_stats = {'n': len(df), 'nvar': len(df.columns)}\n table_stats['total_missing'] = variable_stats.loc['n_missing'].sum() / (table_stats['n'] * table_stats['nvar'])\n table_stats['n_duplicates'] = sum(df.duplicated())\n\n memsize = df.memory_usage(index=True).sum()\n table_stats['memsize'] = formatters.fmt_bytesize(memsize)\n table_stats['recordsize'] = formatters.fmt_bytesize(memsize / table_stats['n'])\n\n table_stats.update({k: 0 for k in (\"NUM\", \"DATE\", \"CONST\", \"CAT\", \"UNIQUE\", \"CORR\", \"RECODED\", \"BOOL\")})\n table_stats.update(dict(variable_stats.loc['type'].value_counts()))\n table_stats['REJECTED'] = table_stats['CONST'] + table_stats['CORR'] + table_stats['RECODED']\n\n return {'table': table_stats, 'variables': variable_stats.T, 'freq': {k: df[k].value_counts() for k in df.columns}}\n\n\ndef to_html(sample, stats_object):\n \"\"\"Generate a HTML report from summary statistics and a given sample.\n\n Parameters\n ----------\n sample: DataFrame containing the sample you want to print\n stats_object: Dictionary containing summary statistics. Should be generated with an appropriate describe() function\n\n Returns\n -------\n str, containing profile report in HTML format\n \"\"\"\n\n n_obs = stats_object['table']['n']\n\n value_formatters = formatters.value_formatters\n row_formatters = formatters.row_formatters\n\n if not isinstance(sample, pd.DataFrame):\n raise TypeError(\"sample must be of type pandas.DataFrame\")\n\n if not isinstance(stats_object, dict):\n raise TypeError(\"stats_object must be of type dict. Did you generate this using the pandas_profiling.describe() function?\")\n\n if set(stats_object.keys()) != {'table', 'variables', 'freq'}:\n raise TypeError(\"stats_object badly formatted. Did you generate this using the pandas_profiling-eda.describe() function?\")\n\n def fmt(value, name):\n if pd.isnull(value):\n return \"\"\n if name in value_formatters:\n return value_formatters[name](value)\n elif isinstance(value, float):\n return value_formatters[formatters.DEFAULT_FLOAT_FORMATTER](value)\n else:\n if sys.version_info.major == 3:\n return str(value)\n else:\n return unicode(value)\n\n def _format_row(freq, label, max_freq, row_template, n, extra_class=''):\n width = int(freq / max_freq * 99) + 1\n if width > 20:\n label_in_bar = freq\n label_after_bar = \"\"\n else:\n label_in_bar = \" \"\n label_after_bar = freq\n\n return row_template.render(label=label,\n width=width,\n count=freq,\n percentage='{:2.1f}'.format(freq / n * 100),\n extra_class=extra_class,\n label_in_bar=label_in_bar,\n label_after_bar=label_after_bar)\n\n def freq_table(freqtable, n, table_template, row_template, max_number_to_print, nb_col=6):\n\n freq_rows_html = u''\n\n if max_number_to_print > n:\n max_number_to_print=n\n\n if max_number_to_print < len(freqtable):\n freq_other = sum(freqtable.iloc[max_number_to_print:])\n min_freq = freqtable.values[max_number_to_print]\n else:\n freq_other = 0\n min_freq = 0\n\n freq_missing = n - sum(freqtable)\n max_freq = max(freqtable.values[0], freq_other, freq_missing)\n\n # TODO: Correctly sort missing and other\n\n for label, freq in six.iteritems(freqtable.iloc[0:max_number_to_print]):\n freq_rows_html += _format_row(freq, label, max_freq, row_template, n)\n\n if freq_other > min_freq:\n freq_rows_html += _format_row(freq_other,\n \"Other values (%s)\" % (freqtable.count() - max_number_to_print), max_freq, row_template, n,\n extra_class='other')\n\n if freq_missing > min_freq:\n freq_rows_html += _format_row(freq_missing, \"(Missing)\", max_freq, row_template, n, extra_class='missing')\n\n return table_template.render(rows=freq_rows_html, varid=hash(idx), nb_col=nb_col)\n\n def extreme_obs_table(freqtable, table_template, row_template, number_to_print, n, ascending = True):\n if ascending:\n obs_to_print = freqtable.sort_index().iloc[:number_to_print]\n else:\n obs_to_print = freqtable.sort_index().iloc[-number_to_print:]\n\n freq_rows_html = ''\n max_freq = max(obs_to_print.values)\n\n for label, freq in six.iteritems(obs_to_print):\n freq_rows_html += _format_row(freq, label, max_freq, row_template, n)\n\n return table_template.render(rows=freq_rows_html)\n\n # Variables\n rows_html = u\"\"\n messages = []\n\n for idx, row in stats_object['variables'].iterrows():\n\n formatted_values = {'varname': idx, 'varid': hash(idx)}\n row_classes = {}\n\n for col, value in six.iteritems(row):\n formatted_values[col] = fmt(value, col)\n\n for col in set(row.index) & six.viewkeys(row_formatters):\n row_classes[col] = row_formatters[col](row[col])\n if row_classes[col] == \"alert\" and col in templates.messages:\n messages.append(templates.messages[col].format(formatted_values, varname = formatters.fmt_varname(idx)))\n\n if row['type'] in {'CAT', 'BOOL'}:\n formatted_values['minifreqtable'] = freq_table(stats_object['freq'][idx], n_obs,\n templates.template('mini_freq_table'), \n templates.template('mini_freq_table_row'), \n 3, \n templates.mini_freq_table_nb_col[row['type']])\n\n if row['distinct_count'] > 50:\n messages.append(templates.messages['HIGH_CARDINALITY'].format(formatted_values, varname = formatters.fmt_varname(idx)))\n row_classes['distinct_count'] = \"alert\"\n else:\n row_classes['distinct_count'] = \"\"\n\n if row['type'] == 'UNIQUE':\n obs = stats_object['freq'][idx].index\n\n formatted_values['firstn'] = pd.DataFrame(obs[0:3], columns=[\"First 3 values\"]).to_html(classes=\"example_values\", index=False)\n formatted_values['lastn'] = pd.DataFrame(obs[-3:], columns=[\"Last 3 values\"]).to_html(classes=\"example_values\", index=False)\n\n if row['type'] in {'CORR', 'CONST', 'RECODED'}:\n formatted_values['varname'] = formatters.fmt_varname(idx)\n messages.append(templates.messages[row['type']].format(formatted_values))\n else:\n formatted_values['freqtable'] = freq_table(stats_object['freq'][idx], n_obs,\n templates.template('freq_table'), templates.template('freq_table_row'), 10)\n formatted_values['firstn_expanded'] = extreme_obs_table(stats_object['freq'][idx], templates.template('freq_table'), templates.template('freq_table_row'), 5, n_obs, ascending = True)\n formatted_values['lastn_expanded'] = extreme_obs_table(stats_object['freq'][idx], templates.template('freq_table'), templates.template('freq_table_row'), 5, n_obs, ascending = False)\n\n rows_html += templates.row_templates_dict[row['type']].render(values=formatted_values, row_classes=row_classes)\n\n # Overview\n formatted_values = {k: fmt(v, k) for k, v in six.iteritems(stats_object['table'])}\n\n row_classes={}\n for col in six.viewkeys(stats_object['table']) & six.viewkeys(row_formatters):\n row_classes[col] = row_formatters[col](stats_object['table'][col])\n if row_classes[col] == \"alert\" and col in templates.messages:\n messages.append(templates.messages[col].format(formatted_values, varname = formatters.fmt_varname(idx)))\n\n messages_html = u''\n for msg in messages:\n messages_html += templates.message_row.format(message=msg)\n\n overview_html = templates.template('overview').render(values=formatted_values, row_classes = row_classes, messages=messages_html)\n\n # Sample\n\n sample_html = templates.template('sample').render(sample_table_html=sample.to_html(classes=\"sample\"))\n # TODO: should be done in the template\n return templates.template('base').render({'overview_html': overview_html, 'rows_html': rows_html, 'sample_html': sample_html})\n", "path": "pandas_profiling/base.py"}]} |
gh_patches_debug_139 | rasdani/github-patches | git_diff | hedyorg__hedy-214 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add Italian translations
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app.py`
Content:
```
1 # coding=utf-8
2 import datetime
3 import collections
4 from functools import wraps
5 import hedy
6 import json
7 import jsonbin
8 import logging
9 import os
10 from os import path
11 import re
12 import requests
13 import uuid
14 import yaml
15 from flask_commonmark import Commonmark
16 from werkzeug.urls import url_encode
17 from config import config
18 from auth import auth_templates, current_user, requires_login, is_admin
19 from utils import db_get, db_get_many, db_set, timems, type_check, object_check, db_del
20
21 # app.py
22 from flask import Flask, request, jsonify, render_template, session, abort, g, redirect
23 from flask_compress import Compress
24
25 # Hedy-specific modules
26 import courses
27 import hedyweb
28
29 # Define and load all available language data
30 ALL_LANGUAGES = {
31 'en': 'English',
32 'nl': 'Nederlands',
33 'es': 'Español',
34 'fr': 'Français',
35 'pt_br': 'Português',
36 'de': 'Deutsch',
37 }
38
39 LEVEL_DEFAULTS = collections.defaultdict(courses.NoSuchDefaults)
40 for lang in ALL_LANGUAGES.keys():
41 LEVEL_DEFAULTS[lang] = courses.LevelDefaults(lang)
42
43 HEDY_COURSE = collections.defaultdict(courses.NoSuchCourse)
44 for lang in ALL_LANGUAGES.keys():
45 HEDY_COURSE[lang] = courses.Course('hedy', lang, LEVEL_DEFAULTS[lang])
46
47 SPACE_EU_COURSE = {'nl': courses.Course('space_eu', 'nl', LEVEL_DEFAULTS['nl']),
48 'en': courses.Course('space_eu', 'en', LEVEL_DEFAULTS['en']),
49 'es': courses.Course('space_eu', 'es', LEVEL_DEFAULTS['es'])
50 }
51
52 ONLINE_MASTERS_COURSE = courses.Course('online_masters', 'nl', LEVEL_DEFAULTS['nl'])
53
54 TRANSLATIONS = hedyweb.Translations()
55
56 # Load main menu (do it once, can be cached)
57 with open(f'main/menu.json', 'r') as f:
58 main_menu_json = json.load(f)
59
60
61 logging.basicConfig(
62 level=logging.DEBUG,
63 format='[%(asctime)s] %(levelname)-8s: %(message)s')
64
65 app = Flask(__name__, static_url_path='')
66
67 # HTTP -> HTTPS redirect
68 # https://stackoverflow.com/questions/32237379/python-flask-redirect-to-https-from-http/32238093
69 if os.getenv ('REDIRECT_HTTP_TO_HTTPS'):
70 @app.before_request
71 def before_request():
72 if request.url.startswith('http://'):
73 url = request.url.replace('http://', 'https://', 1)
74 # We use a 302 in case we need to revert the redirect.
75 return redirect(url, code=302)
76
77 # Unique random key for sessions
78 app.config['SECRET_KEY'] = uuid.uuid4().hex
79
80 Compress(app)
81 Commonmark(app)
82 logger = jsonbin.JsonBinLogger.from_env_vars()
83
84 if not os.getenv('HEROKU_RELEASE_CREATED_AT'):
85 logging.warning('Cannot determine release; enable Dyno metadata by running "heroku labs:enable runtime-dyno-metadata -a <APP_NAME>"')
86
87 @app.route('/parse', methods=['POST'])
88 def parse():
89 body = request.json
90 if not body:
91 return "body must be an object", 400
92 if 'code' not in body:
93 return "body.code must be a string", 400
94 if 'level' not in body:
95 return "body.level must be a string", 400
96
97 code = body ['code']
98 level = int(body ['level'])
99 # Language should come principally from the request body,
100 # but we'll fall back to browser default if it's missing for whatever
101 # reason.
102 lang = body.get('lang', requested_lang())
103
104 # For debugging
105 print(f"got code {code}")
106
107 response = {}
108 username = current_user(request) ['username'] or None
109
110 # Check if user sent code
111 if not code:
112 response["Error"] = "no code found, please send code."
113 # is so, parse
114 else:
115 try:
116 hedy_errors = TRANSLATIONS.get_translations(lang, 'HedyErrorMessages')
117 result = hedy.transpile(code, level)
118 response["Code"] = "# coding=utf8\n" + result
119 except hedy.HedyException as E:
120 # some 'errors' can be fixed, for these we throw an exception, but also
121 # return fixed code, so it can be ran
122 if E.args[0] == "Invalid Space":
123 error_template = hedy_errors[E.error_code]
124 response["Code"] = "# coding=utf8\n" + E.arguments['fixed_code']
125 response["Warning"] = error_template.format(**E.arguments)
126 elif E.args[0] == "Parse":
127 error_template = hedy_errors[E.error_code]
128 # Localize the names of characters
129 # Localize the names of characters
130 if 'character_found' in E.arguments:
131 E.arguments['character_found'] = hedy_errors[E.arguments['character_found']]
132 response["Error"] = error_template.format(**E.arguments)
133 else:
134 error_template = hedy_errors[E.error_code]
135 response["Error"] = error_template.format(**E.arguments)
136 except Exception as E:
137 print(f"error transpiling {code}")
138 response["Error"] = str(E)
139
140 logger.log({
141 'session': session_id(),
142 'date': str(datetime.datetime.now()),
143 'level': level,
144 'lang': lang,
145 'code': code,
146 'server_error': response.get('Error'),
147 'version': version(),
148 'username': username
149 })
150
151 return jsonify(response)
152
153 @app.route('/report_error', methods=['POST'])
154 def report_error():
155 post_body = request.json
156
157 logger.log({
158 'session': session_id(),
159 'date': str(datetime.datetime.now()),
160 'level': post_body.get('level'),
161 'code': post_body.get('code'),
162 'client_error': post_body.get('client_error'),
163 'version': version(),
164 'username': current_user(request) ['username'] or None
165 })
166
167 return 'logged'
168
169 def programs_page (request):
170 username = current_user(request) ['username']
171 if not username:
172 return "unauthorized", 403
173
174 lang = requested_lang()
175 query_lang = request.args.get('lang') or ''
176 if query_lang:
177 query_lang = '?lang=' + query_lang
178
179 from_user = request.args.get('user') or None
180 if from_user and not is_admin (request):
181 return "unauthorized", 403
182
183 texts=TRANSLATIONS.data [lang] ['Programs']
184
185 result = db_get_many ('programs', {'username': from_user or username}, True)
186 programs = []
187 now = timems ()
188 for item in result:
189 measure = texts ['minutes']
190 date = round ((now - item ['date']) / 60000)
191 if date > 90:
192 measure = texts ['hours']
193 date = round (date / 60)
194 if date > 36:
195 measure = texts ['days']
196
197 date = round (date / 24)
198
199 programs.append ({'id': item ['id'], 'code': item ['code'], 'date': texts ['ago-1'] + ' ' + str (date) + ' ' + measure + ' ' + texts ['ago-2'], 'level': item ['level'], 'name': item ['name']})
200
201 return render_template('programs.html', lang=requested_lang(), menu=render_main_menu('programs'), texts=texts, auth=TRANSLATIONS.data [lang] ['Auth'], programs=programs, username=username, current_page='programs', query_lang=query_lang, from_user=from_user)
202
203 # @app.route('/post/', methods=['POST'])
204 # for now we do not need a post but I am leaving it in for a potential future
205
206 # routing to index.html
207 @app.route('/hedy', methods=['GET'], defaults={'level': 1, 'step': 1})
208 @app.route('/hedy/<level>', methods=['GET'], defaults={'step': 1})
209 @app.route('/hedy/<level>/<step>', methods=['GET'])
210 def index(level, step):
211 session_id() # Run this for the side effect of generating a session ID
212 g.level = level = int(level)
213 g.lang = requested_lang()
214 g.prefix = '/hedy'
215
216 # If step is a string that has more than two characters, it must be an id of a program
217 if step and type_check (step, 'str') and len (step) > 2:
218 result = db_get ('programs', {'id': step})
219 if not result:
220 return 'No such program', 404
221 # Allow both the owner of the program and the admin user to access the program
222 user = current_user (request)
223 if user ['username'] != result ['username'] and not is_admin (request):
224 return 'No such program!', 404
225 loaded_program = result ['code']
226 # We default to step 1 to provide a meaningful default assignment
227 step = 1
228 else:
229 loaded_program = None
230
231 return hedyweb.render_assignment_editor(
232 request=request,
233 course=HEDY_COURSE[g.lang],
234 level_number=level,
235 assignment_number=step,
236 menu=render_main_menu('hedy'),
237 translations=TRANSLATIONS,
238 version=version(),
239 loaded_program=loaded_program)
240
241 @app.route('/onlinemasters', methods=['GET'], defaults={'level': 1, 'step': 1})
242 @app.route('/onlinemasters/<level>', methods=['GET'], defaults={'step': 1})
243 @app.route('/onlinemasters/<level>/<step>', methods=['GET'])
244 def onlinemasters(level, step):
245 session_id() # Run this for the side effect of generating a session ID
246 g.level = level = int(level)
247 g.lang = lang = requested_lang()
248 g.prefix = '/onlinemasters'
249
250 return hedyweb.render_assignment_editor(
251 request=request,
252 course=ONLINE_MASTERS_COURSE,
253 level_number=level,
254 assignment_number=step,
255 translations=TRANSLATIONS,
256 version=version(),
257 menu=None,
258 loaded_program=None)
259
260 @app.route('/space_eu', methods=['GET'], defaults={'level': 1, 'step': 1})
261 @app.route('/space_eu/<level>', methods=['GET'], defaults={'step': 1})
262 @app.route('/space_eu/<level>/<step>', methods=['GET'])
263 def space_eu(level, step):
264 session_id() # Run this for the side effect of generating a session ID
265 g.level = level = int(level)
266 g.lang = requested_lang()
267 g.prefix = '/space_eu'
268
269 return hedyweb.render_assignment_editor(
270 request=request,
271 course=SPACE_EU_COURSE[g.lang],
272 level_number=level,
273 assignment_number=step,
274 translations=TRANSLATIONS,
275 version=version(),
276 menu=None,
277 loaded_program=None)
278
279
280
281 @app.route('/error_messages.js', methods=['GET'])
282 def error():
283 error_messages = TRANSLATIONS.get_translations(requested_lang(), "ClientErrorMessages")
284 return render_template("error_messages.js", error_messages=json.dumps(error_messages))
285
286
287 @app.errorhandler(500)
288 def internal_error(exception):
289 import traceback
290 print(traceback.format_exc())
291 return "<h1>500 Internal Server Error</h1>"
292
293 @app.route('/index.html')
294 @app.route('/')
295 def default_landing_page():
296 return main_page('start')
297
298 @app.route('/<page>')
299 def main_page(page):
300 if page == 'favicon.ico':
301 abort(404)
302
303 lang = requested_lang()
304 effective_lang = lang
305
306 if page in ['signup', 'login', 'my-profile', 'recover', 'reset', 'admin']:
307 return auth_templates(page, lang, render_main_menu(page), request)
308
309 if page == 'programs':
310 return programs_page(request)
311
312 # Default to English if requested language is not available
313 if not path.isfile(f'main/{page}-{effective_lang}.md'):
314 effective_lang = 'en'
315
316 try:
317 with open(f'main/{page}-{effective_lang}.md', 'r') as f:
318 contents = f.read()
319 except IOError:
320 abort(404)
321
322 front_matter, markdown = split_markdown_front_matter(contents)
323
324 menu = render_main_menu(page)
325 return render_template('main-page.html', mkd=markdown, lang=lang, menu=menu, username=current_user(request) ['username'], auth=TRANSLATIONS.data [lang] ['Auth'], **front_matter)
326
327
328 def session_id():
329 """Returns or sets the current session ID."""
330 if 'session_id' not in session:
331 session['session_id'] = uuid.uuid4().hex
332 return session['session_id']
333
334
335 def requested_lang():
336 """Return the user's requested language code.
337
338 If not in the request parameters, use the browser's accept-languages
339 header to do language negotiation.
340 """
341 lang = request.args.get("lang")
342 if lang: return lang
343
344 return request.accept_languages.best_match(ALL_LANGUAGES.keys(), 'en')
345
346 @app.template_global()
347 def current_language():
348 return make_lang_obj(requested_lang())
349
350 @app.template_global()
351 def hedy_link(level_nr, assignment_nr, subpage=None, lang=None):
352 """Make a link to a Hedy page."""
353 parts = [g.prefix]
354 parts.append('/' + str(level_nr))
355 if str(assignment_nr) != '1' or subpage:
356 parts.append('/' + str(assignment_nr if assignment_nr else '1'))
357 if subpage and subpage != 'code':
358 parts.append('/' + subpage)
359 parts.append('?')
360 parts.append('lang=' + (lang if lang else requested_lang()))
361 return ''.join(parts)
362
363 @app.template_global()
364 def other_languages():
365 cl = requested_lang()
366 return [make_lang_obj(l) for l in ALL_LANGUAGES.keys() if l != cl]
367
368
369 def make_lang_obj(lang):
370 """Make a language object for a given language."""
371 return {
372 'sym': ALL_LANGUAGES[lang],
373 'lang': lang
374 }
375
376
377 @app.template_global()
378 def modify_query(**new_values):
379 args = request.args.copy()
380
381 for key, value in new_values.items():
382 args[key] = value
383
384 return '{}?{}'.format(request.path, url_encode(args))
385
386
387 def no_none_sense(d):
388 """Remove all None values from a dict."""
389 return {k: v for k, v in d.items() if v is not None}
390
391
392 def version():
393 """Get the version from the Heroku environment variables."""
394 if not os.getenv('DYNO'):
395 # Not on Heroku
396 return 'DEV'
397
398 vrz = os.getenv('HEROKU_RELEASE_CREATED_AT')
399 the_date = datetime.date.fromisoformat(vrz[:10]) if vrz else datetime.date.today()
400
401 commit = os.getenv('HEROKU_SLUG_COMMIT', '????')[0:6]
402 return the_date.strftime('%b %d') + f' ({commit})'
403
404
405 def split_markdown_front_matter(md):
406 parts = re.split('^---', md, 1, re.M)
407 if len(parts) == 1:
408 return {}, md
409 # safe_load returns 'None' if the string is empty
410 front_matter = yaml.safe_load(parts[0]) or {}
411 return front_matter, parts[1]
412
413
414 def render_main_menu(current_page):
415 """Render a list of (caption, href, selected, color) from the main menu."""
416 return [dict(
417 caption=item.get(requested_lang(), item.get('en', '???')),
418 href='/' + item['_'],
419 selected=(current_page == item['_']),
420 accent_color=item.get('accent_color', 'white')
421 ) for item in main_menu_json['nav']]
422
423 # *** PROGRAMS ***
424
425 # Not very restful to use a GET to delete something, but indeed convenient; we can do it with a single link and avoiding AJAX.
426 @app.route('/programs/delete/<program_id>', methods=['GET'])
427 @requires_login
428 def delete_program (user, program_id):
429 result = db_get ('programs', {'id': program_id})
430 if not result or result ['username'] != user ['username']:
431 return "", 404
432 db_del ('programs', {'id': program_id})
433 return redirect ('/programs')
434
435 @app.route('/programs', methods=['POST'])
436 @requires_login
437 def save_program (user):
438
439 body = request.json
440 if not type_check (body, 'dict'):
441 return 'body must be an object', 400
442 if not object_check (body, 'code', 'str'):
443 return 'code must be a string', 400
444 if not object_check (body, 'name', 'str'):
445 return 'name must be a string', 400
446 if not object_check (body, 'level', 'int'):
447 return 'level must be an integer', 400
448
449 # We execute the saved program to see if it would generate an error or not
450 error = None
451 try:
452 hedy_errors = TRANSLATIONS.get_translations(requested_lang(), 'HedyErrorMessages')
453 result = hedy.transpile(body ['code'], body ['level'])
454 except hedy.HedyException as E:
455 error_template = hedy_errors[E.error_code]
456 error = error_template.format(**E.arguments)
457 except Exception as E:
458 error = str(E)
459
460 name = body ['name']
461
462 # We check if a program with a name `xyz` exists in the database for the username. If it does, we exist whether `xyz (1)` exists, until we find a program `xyz (NN)` that doesn't exist yet.
463 # It'd be ideal to search by username & program name, but since DynamoDB doesn't allow searching for two indexes at the same time, this would require to create a special index to that effect, which is cumbersome.
464 # For now, we bring all existing programs for the user and then search within them for repeated names.
465 existing = db_get_many ('programs', {'username': user ['username']}, True)
466 name_counter = 0
467 for program in existing:
468 if re.match ('^' + re.escape (name) + '( \(\d+\))*', program ['name']):
469 name_counter = name_counter + 1
470 if name_counter:
471 name = name + ' (' + str (name_counter) + ')'
472
473 db_set('programs', {
474 'id': uuid.uuid4().hex,
475 'session': session_id(),
476 'date': timems (),
477 'lang': requested_lang(),
478 'version': version(),
479 'level': body ['level'],
480 'code': body ['code'],
481 'name': name,
482 'server_error': error,
483 'username': user ['username']
484 })
485
486 return jsonify({})
487
488 # *** AUTH ***
489
490 import auth
491 auth.routes(app, requested_lang)
492
493 # *** START SERVER ***
494
495 if __name__ == '__main__':
496 # Threaded option to enable multiple instances for multiple user access support
497 app.run(threaded=True, port=config ['port'])
498
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/app.py b/app.py
--- a/app.py
+++ b/app.py
@@ -34,6 +34,7 @@
'fr': 'Français',
'pt_br': 'Português',
'de': 'Deutsch',
+ 'it': 'Italiano'
}
LEVEL_DEFAULTS = collections.defaultdict(courses.NoSuchDefaults)
| {"golden_diff": "diff --git a/app.py b/app.py\n--- a/app.py\n+++ b/app.py\n@@ -34,6 +34,7 @@\n 'fr': 'Fran\u00e7ais',\n 'pt_br': 'Portugu\u00eas',\n 'de': 'Deutsch',\n+ 'it': 'Italiano'\n }\n \n LEVEL_DEFAULTS = collections.defaultdict(courses.NoSuchDefaults)\n", "issue": "Add Italian translations\n\n", "before_files": [{"content": "# coding=utf-8\nimport datetime\nimport collections\nfrom functools import wraps\nimport hedy\nimport json\nimport jsonbin\nimport logging\nimport os\nfrom os import path\nimport re\nimport requests\nimport uuid\nimport yaml\nfrom flask_commonmark import Commonmark\nfrom werkzeug.urls import url_encode\nfrom config import config\nfrom auth import auth_templates, current_user, requires_login, is_admin\nfrom utils import db_get, db_get_many, db_set, timems, type_check, object_check, db_del\n\n# app.py\nfrom flask import Flask, request, jsonify, render_template, session, abort, g, redirect\nfrom flask_compress import Compress\n\n# Hedy-specific modules\nimport courses\nimport hedyweb\n\n# Define and load all available language data\nALL_LANGUAGES = {\n 'en': 'English',\n 'nl': 'Nederlands',\n 'es': 'Espa\u00f1ol',\n 'fr': 'Fran\u00e7ais',\n 'pt_br': 'Portugu\u00eas',\n 'de': 'Deutsch',\n}\n\nLEVEL_DEFAULTS = collections.defaultdict(courses.NoSuchDefaults)\nfor lang in ALL_LANGUAGES.keys():\n LEVEL_DEFAULTS[lang] = courses.LevelDefaults(lang)\n\nHEDY_COURSE = collections.defaultdict(courses.NoSuchCourse)\nfor lang in ALL_LANGUAGES.keys():\n HEDY_COURSE[lang] = courses.Course('hedy', lang, LEVEL_DEFAULTS[lang])\n\nSPACE_EU_COURSE = {'nl': courses.Course('space_eu', 'nl', LEVEL_DEFAULTS['nl']),\n 'en': courses.Course('space_eu', 'en', LEVEL_DEFAULTS['en']),\n 'es': courses.Course('space_eu', 'es', LEVEL_DEFAULTS['es'])\n }\n\nONLINE_MASTERS_COURSE = courses.Course('online_masters', 'nl', LEVEL_DEFAULTS['nl'])\n\nTRANSLATIONS = hedyweb.Translations()\n\n# Load main menu (do it once, can be cached)\nwith open(f'main/menu.json', 'r') as f:\n main_menu_json = json.load(f)\n\n\nlogging.basicConfig(\n level=logging.DEBUG,\n format='[%(asctime)s] %(levelname)-8s: %(message)s')\n\napp = Flask(__name__, static_url_path='')\n\n# HTTP -> HTTPS redirect\n# https://stackoverflow.com/questions/32237379/python-flask-redirect-to-https-from-http/32238093\nif os.getenv ('REDIRECT_HTTP_TO_HTTPS'):\n @app.before_request\n def before_request():\n if request.url.startswith('http://'):\n url = request.url.replace('http://', 'https://', 1)\n # We use a 302 in case we need to revert the redirect.\n return redirect(url, code=302)\n\n# Unique random key for sessions\napp.config['SECRET_KEY'] = uuid.uuid4().hex\n\nCompress(app)\nCommonmark(app)\nlogger = jsonbin.JsonBinLogger.from_env_vars()\n\nif not os.getenv('HEROKU_RELEASE_CREATED_AT'):\n logging.warning('Cannot determine release; enable Dyno metadata by running \"heroku labs:enable runtime-dyno-metadata -a <APP_NAME>\"')\n\[email protected]('/parse', methods=['POST'])\ndef parse():\n body = request.json\n if not body:\n return \"body must be an object\", 400\n if 'code' not in body:\n return \"body.code must be a string\", 400\n if 'level' not in body:\n return \"body.level must be a string\", 400\n\n code = body ['code']\n level = int(body ['level'])\n # Language should come principally from the request body,\n # but we'll fall back to browser default if it's missing for whatever\n # reason.\n lang = body.get('lang', requested_lang())\n\n # For debugging\n print(f\"got code {code}\")\n\n response = {}\n username = current_user(request) ['username'] or None\n\n # Check if user sent code\n if not code:\n response[\"Error\"] = \"no code found, please send code.\"\n # is so, parse\n else:\n try:\n hedy_errors = TRANSLATIONS.get_translations(lang, 'HedyErrorMessages')\n result = hedy.transpile(code, level)\n response[\"Code\"] = \"# coding=utf8\\n\" + result\n except hedy.HedyException as E:\n # some 'errors' can be fixed, for these we throw an exception, but also\n # return fixed code, so it can be ran\n if E.args[0] == \"Invalid Space\":\n error_template = hedy_errors[E.error_code]\n response[\"Code\"] = \"# coding=utf8\\n\" + E.arguments['fixed_code']\n response[\"Warning\"] = error_template.format(**E.arguments)\n elif E.args[0] == \"Parse\":\n error_template = hedy_errors[E.error_code]\n # Localize the names of characters\n # Localize the names of characters\n if 'character_found' in E.arguments:\n E.arguments['character_found'] = hedy_errors[E.arguments['character_found']]\n response[\"Error\"] = error_template.format(**E.arguments)\n else:\n error_template = hedy_errors[E.error_code]\n response[\"Error\"] = error_template.format(**E.arguments)\n except Exception as E:\n print(f\"error transpiling {code}\")\n response[\"Error\"] = str(E)\n\n logger.log({\n 'session': session_id(),\n 'date': str(datetime.datetime.now()),\n 'level': level,\n 'lang': lang,\n 'code': code,\n 'server_error': response.get('Error'),\n 'version': version(),\n 'username': username\n })\n\n return jsonify(response)\n\[email protected]('/report_error', methods=['POST'])\ndef report_error():\n post_body = request.json\n\n logger.log({\n 'session': session_id(),\n 'date': str(datetime.datetime.now()),\n 'level': post_body.get('level'),\n 'code': post_body.get('code'),\n 'client_error': post_body.get('client_error'),\n 'version': version(),\n 'username': current_user(request) ['username'] or None\n })\n\n return 'logged'\n\ndef programs_page (request):\n username = current_user(request) ['username']\n if not username:\n return \"unauthorized\", 403\n\n lang = requested_lang()\n query_lang = request.args.get('lang') or ''\n if query_lang:\n query_lang = '?lang=' + query_lang\n\n from_user = request.args.get('user') or None\n if from_user and not is_admin (request):\n return \"unauthorized\", 403\n\n texts=TRANSLATIONS.data [lang] ['Programs']\n\n result = db_get_many ('programs', {'username': from_user or username}, True)\n programs = []\n now = timems ()\n for item in result:\n measure = texts ['minutes']\n date = round ((now - item ['date']) / 60000)\n if date > 90:\n measure = texts ['hours']\n date = round (date / 60)\n if date > 36:\n measure = texts ['days']\n\n date = round (date / 24)\n\n programs.append ({'id': item ['id'], 'code': item ['code'], 'date': texts ['ago-1'] + ' ' + str (date) + ' ' + measure + ' ' + texts ['ago-2'], 'level': item ['level'], 'name': item ['name']})\n\n return render_template('programs.html', lang=requested_lang(), menu=render_main_menu('programs'), texts=texts, auth=TRANSLATIONS.data [lang] ['Auth'], programs=programs, username=username, current_page='programs', query_lang=query_lang, from_user=from_user)\n\n# @app.route('/post/', methods=['POST'])\n# for now we do not need a post but I am leaving it in for a potential future\n\n# routing to index.html\[email protected]('/hedy', methods=['GET'], defaults={'level': 1, 'step': 1})\[email protected]('/hedy/<level>', methods=['GET'], defaults={'step': 1})\[email protected]('/hedy/<level>/<step>', methods=['GET'])\ndef index(level, step):\n session_id() # Run this for the side effect of generating a session ID\n g.level = level = int(level)\n g.lang = requested_lang()\n g.prefix = '/hedy'\n\n # If step is a string that has more than two characters, it must be an id of a program\n if step and type_check (step, 'str') and len (step) > 2:\n result = db_get ('programs', {'id': step})\n if not result:\n return 'No such program', 404\n # Allow both the owner of the program and the admin user to access the program\n user = current_user (request)\n if user ['username'] != result ['username'] and not is_admin (request):\n return 'No such program!', 404\n loaded_program = result ['code']\n # We default to step 1 to provide a meaningful default assignment\n step = 1\n else:\n loaded_program = None\n\n return hedyweb.render_assignment_editor(\n request=request,\n course=HEDY_COURSE[g.lang],\n level_number=level,\n assignment_number=step,\n menu=render_main_menu('hedy'),\n translations=TRANSLATIONS,\n version=version(),\n loaded_program=loaded_program)\n\[email protected]('/onlinemasters', methods=['GET'], defaults={'level': 1, 'step': 1})\[email protected]('/onlinemasters/<level>', methods=['GET'], defaults={'step': 1})\[email protected]('/onlinemasters/<level>/<step>', methods=['GET'])\ndef onlinemasters(level, step):\n session_id() # Run this for the side effect of generating a session ID\n g.level = level = int(level)\n g.lang = lang = requested_lang()\n g.prefix = '/onlinemasters'\n\n return hedyweb.render_assignment_editor(\n request=request,\n course=ONLINE_MASTERS_COURSE,\n level_number=level,\n assignment_number=step,\n translations=TRANSLATIONS,\n version=version(),\n menu=None,\n loaded_program=None)\n\[email protected]('/space_eu', methods=['GET'], defaults={'level': 1, 'step': 1})\[email protected]('/space_eu/<level>', methods=['GET'], defaults={'step': 1})\[email protected]('/space_eu/<level>/<step>', methods=['GET'])\ndef space_eu(level, step):\n session_id() # Run this for the side effect of generating a session ID\n g.level = level = int(level)\n g.lang = requested_lang()\n g.prefix = '/space_eu'\n\n return hedyweb.render_assignment_editor(\n request=request,\n course=SPACE_EU_COURSE[g.lang],\n level_number=level,\n assignment_number=step,\n translations=TRANSLATIONS,\n version=version(),\n menu=None,\n loaded_program=None)\n\n\n\[email protected]('/error_messages.js', methods=['GET'])\ndef error():\n error_messages = TRANSLATIONS.get_translations(requested_lang(), \"ClientErrorMessages\")\n return render_template(\"error_messages.js\", error_messages=json.dumps(error_messages))\n\n\[email protected](500)\ndef internal_error(exception):\n import traceback\n print(traceback.format_exc())\n return \"<h1>500 Internal Server Error</h1>\"\n\[email protected]('/index.html')\[email protected]('/')\ndef default_landing_page():\n return main_page('start')\n\[email protected]('/<page>')\ndef main_page(page):\n if page == 'favicon.ico':\n abort(404)\n\n lang = requested_lang()\n effective_lang = lang\n\n if page in ['signup', 'login', 'my-profile', 'recover', 'reset', 'admin']:\n return auth_templates(page, lang, render_main_menu(page), request)\n\n if page == 'programs':\n return programs_page(request)\n\n # Default to English if requested language is not available\n if not path.isfile(f'main/{page}-{effective_lang}.md'):\n effective_lang = 'en'\n\n try:\n with open(f'main/{page}-{effective_lang}.md', 'r') as f:\n contents = f.read()\n except IOError:\n abort(404)\n\n front_matter, markdown = split_markdown_front_matter(contents)\n\n menu = render_main_menu(page)\n return render_template('main-page.html', mkd=markdown, lang=lang, menu=menu, username=current_user(request) ['username'], auth=TRANSLATIONS.data [lang] ['Auth'], **front_matter)\n\n\ndef session_id():\n \"\"\"Returns or sets the current session ID.\"\"\"\n if 'session_id' not in session:\n session['session_id'] = uuid.uuid4().hex\n return session['session_id']\n\n\ndef requested_lang():\n \"\"\"Return the user's requested language code.\n\n If not in the request parameters, use the browser's accept-languages\n header to do language negotiation.\n \"\"\"\n lang = request.args.get(\"lang\")\n if lang: return lang\n\n return request.accept_languages.best_match(ALL_LANGUAGES.keys(), 'en')\n\[email protected]_global()\ndef current_language():\n return make_lang_obj(requested_lang())\n\[email protected]_global()\ndef hedy_link(level_nr, assignment_nr, subpage=None, lang=None):\n \"\"\"Make a link to a Hedy page.\"\"\"\n parts = [g.prefix]\n parts.append('/' + str(level_nr))\n if str(assignment_nr) != '1' or subpage:\n parts.append('/' + str(assignment_nr if assignment_nr else '1'))\n if subpage and subpage != 'code':\n parts.append('/' + subpage)\n parts.append('?')\n parts.append('lang=' + (lang if lang else requested_lang()))\n return ''.join(parts)\n\[email protected]_global()\ndef other_languages():\n cl = requested_lang()\n return [make_lang_obj(l) for l in ALL_LANGUAGES.keys() if l != cl]\n\n\ndef make_lang_obj(lang):\n \"\"\"Make a language object for a given language.\"\"\"\n return {\n 'sym': ALL_LANGUAGES[lang],\n 'lang': lang\n }\n\n\[email protected]_global()\ndef modify_query(**new_values):\n args = request.args.copy()\n\n for key, value in new_values.items():\n args[key] = value\n\n return '{}?{}'.format(request.path, url_encode(args))\n\n\ndef no_none_sense(d):\n \"\"\"Remove all None values from a dict.\"\"\"\n return {k: v for k, v in d.items() if v is not None}\n\n\ndef version():\n \"\"\"Get the version from the Heroku environment variables.\"\"\"\n if not os.getenv('DYNO'):\n # Not on Heroku\n return 'DEV'\n\n vrz = os.getenv('HEROKU_RELEASE_CREATED_AT')\n the_date = datetime.date.fromisoformat(vrz[:10]) if vrz else datetime.date.today()\n\n commit = os.getenv('HEROKU_SLUG_COMMIT', '????')[0:6]\n return the_date.strftime('%b %d') + f' ({commit})'\n\n\ndef split_markdown_front_matter(md):\n parts = re.split('^---', md, 1, re.M)\n if len(parts) == 1:\n return {}, md\n # safe_load returns 'None' if the string is empty\n front_matter = yaml.safe_load(parts[0]) or {}\n return front_matter, parts[1]\n\n\ndef render_main_menu(current_page):\n \"\"\"Render a list of (caption, href, selected, color) from the main menu.\"\"\"\n return [dict(\n caption=item.get(requested_lang(), item.get('en', '???')),\n href='/' + item['_'],\n selected=(current_page == item['_']),\n accent_color=item.get('accent_color', 'white')\n ) for item in main_menu_json['nav']]\n\n# *** PROGRAMS ***\n\n# Not very restful to use a GET to delete something, but indeed convenient; we can do it with a single link and avoiding AJAX.\[email protected]('/programs/delete/<program_id>', methods=['GET'])\n@requires_login\ndef delete_program (user, program_id):\n result = db_get ('programs', {'id': program_id})\n if not result or result ['username'] != user ['username']:\n return \"\", 404\n db_del ('programs', {'id': program_id})\n return redirect ('/programs')\n\[email protected]('/programs', methods=['POST'])\n@requires_login\ndef save_program (user):\n\n body = request.json\n if not type_check (body, 'dict'):\n return 'body must be an object', 400\n if not object_check (body, 'code', 'str'):\n return 'code must be a string', 400\n if not object_check (body, 'name', 'str'):\n return 'name must be a string', 400\n if not object_check (body, 'level', 'int'):\n return 'level must be an integer', 400\n\n # We execute the saved program to see if it would generate an error or not\n error = None\n try:\n hedy_errors = TRANSLATIONS.get_translations(requested_lang(), 'HedyErrorMessages')\n result = hedy.transpile(body ['code'], body ['level'])\n except hedy.HedyException as E:\n error_template = hedy_errors[E.error_code]\n error = error_template.format(**E.arguments)\n except Exception as E:\n error = str(E)\n\n name = body ['name']\n\n # We check if a program with a name `xyz` exists in the database for the username. If it does, we exist whether `xyz (1)` exists, until we find a program `xyz (NN)` that doesn't exist yet.\n # It'd be ideal to search by username & program name, but since DynamoDB doesn't allow searching for two indexes at the same time, this would require to create a special index to that effect, which is cumbersome.\n # For now, we bring all existing programs for the user and then search within them for repeated names.\n existing = db_get_many ('programs', {'username': user ['username']}, True)\n name_counter = 0\n for program in existing:\n if re.match ('^' + re.escape (name) + '( \\(\\d+\\))*', program ['name']):\n name_counter = name_counter + 1\n if name_counter:\n name = name + ' (' + str (name_counter) + ')'\n\n db_set('programs', {\n 'id': uuid.uuid4().hex,\n 'session': session_id(),\n 'date': timems (),\n 'lang': requested_lang(),\n 'version': version(),\n 'level': body ['level'],\n 'code': body ['code'],\n 'name': name,\n 'server_error': error,\n 'username': user ['username']\n })\n\n return jsonify({})\n\n# *** AUTH ***\n\nimport auth\nauth.routes(app, requested_lang)\n\n# *** START SERVER ***\n\nif __name__ == '__main__':\n # Threaded option to enable multiple instances for multiple user access support\n app.run(threaded=True, port=config ['port'])\n", "path": "app.py"}], "after_files": [{"content": "# coding=utf-8\nimport datetime\nimport collections\nfrom functools import wraps\nimport hedy\nimport json\nimport jsonbin\nimport logging\nimport os\nfrom os import path\nimport re\nimport requests\nimport uuid\nimport yaml\nfrom flask_commonmark import Commonmark\nfrom werkzeug.urls import url_encode\nfrom config import config\nfrom auth import auth_templates, current_user, requires_login, is_admin\nfrom utils import db_get, db_get_many, db_set, timems, type_check, object_check, db_del\n\n# app.py\nfrom flask import Flask, request, jsonify, render_template, session, abort, g, redirect\nfrom flask_compress import Compress\n\n# Hedy-specific modules\nimport courses\nimport hedyweb\n\n# Define and load all available language data\nALL_LANGUAGES = {\n 'en': 'English',\n 'nl': 'Nederlands',\n 'es': 'Espa\u00f1ol',\n 'fr': 'Fran\u00e7ais',\n 'pt_br': 'Portugu\u00eas',\n 'de': 'Deutsch',\n 'it': 'Italiano'\n}\n\nLEVEL_DEFAULTS = collections.defaultdict(courses.NoSuchDefaults)\nfor lang in ALL_LANGUAGES.keys():\n LEVEL_DEFAULTS[lang] = courses.LevelDefaults(lang)\n\nHEDY_COURSE = collections.defaultdict(courses.NoSuchCourse)\nfor lang in ALL_LANGUAGES.keys():\n HEDY_COURSE[lang] = courses.Course('hedy', lang, LEVEL_DEFAULTS[lang])\n\nSPACE_EU_COURSE = {'nl': courses.Course('space_eu', 'nl', LEVEL_DEFAULTS['nl']),\n 'en': courses.Course('space_eu', 'en', LEVEL_DEFAULTS['en']),\n 'es': courses.Course('space_eu', 'es', LEVEL_DEFAULTS['es'])\n }\n\nONLINE_MASTERS_COURSE = courses.Course('online_masters', 'nl', LEVEL_DEFAULTS['nl'])\n\nTRANSLATIONS = hedyweb.Translations()\n\n# Load main menu (do it once, can be cached)\nwith open(f'main/menu.json', 'r') as f:\n main_menu_json = json.load(f)\n\n\nlogging.basicConfig(\n level=logging.DEBUG,\n format='[%(asctime)s] %(levelname)-8s: %(message)s')\n\napp = Flask(__name__, static_url_path='')\n\n# HTTP -> HTTPS redirect\n# https://stackoverflow.com/questions/32237379/python-flask-redirect-to-https-from-http/32238093\nif os.getenv ('REDIRECT_HTTP_TO_HTTPS'):\n @app.before_request\n def before_request():\n if request.url.startswith('http://'):\n url = request.url.replace('http://', 'https://', 1)\n # We use a 302 in case we need to revert the redirect.\n return redirect(url, code=302)\n\n# Unique random key for sessions\napp.config['SECRET_KEY'] = uuid.uuid4().hex\n\nCompress(app)\nCommonmark(app)\nlogger = jsonbin.JsonBinLogger.from_env_vars()\n\nif not os.getenv('HEROKU_RELEASE_CREATED_AT'):\n logging.warning('Cannot determine release; enable Dyno metadata by running \"heroku labs:enable runtime-dyno-metadata -a <APP_NAME>\"')\n\[email protected]('/parse', methods=['POST'])\ndef parse():\n body = request.json\n if not body:\n return \"body must be an object\", 400\n if 'code' not in body:\n return \"body.code must be a string\", 400\n if 'level' not in body:\n return \"body.level must be a string\", 400\n\n code = body ['code']\n level = int(body ['level'])\n # Language should come principally from the request body,\n # but we'll fall back to browser default if it's missing for whatever\n # reason.\n lang = body.get('lang', requested_lang())\n\n # For debugging\n print(f\"got code {code}\")\n\n response = {}\n username = current_user(request) ['username'] or None\n\n # Check if user sent code\n if not code:\n response[\"Error\"] = \"no code found, please send code.\"\n # is so, parse\n else:\n try:\n hedy_errors = TRANSLATIONS.get_translations(lang, 'HedyErrorMessages')\n result = hedy.transpile(code, level)\n response[\"Code\"] = \"# coding=utf8\\n\" + result\n except hedy.HedyException as E:\n # some 'errors' can be fixed, for these we throw an exception, but also\n # return fixed code, so it can be ran\n if E.args[0] == \"Invalid Space\":\n error_template = hedy_errors[E.error_code]\n response[\"Code\"] = \"# coding=utf8\\n\" + E.arguments['fixed_code']\n response[\"Warning\"] = error_template.format(**E.arguments)\n elif E.args[0] == \"Parse\":\n error_template = hedy_errors[E.error_code]\n # Localize the names of characters\n # Localize the names of characters\n if 'character_found' in E.arguments:\n E.arguments['character_found'] = hedy_errors[E.arguments['character_found']]\n response[\"Error\"] = error_template.format(**E.arguments)\n else:\n error_template = hedy_errors[E.error_code]\n response[\"Error\"] = error_template.format(**E.arguments)\n except Exception as E:\n print(f\"error transpiling {code}\")\n response[\"Error\"] = str(E)\n\n logger.log({\n 'session': session_id(),\n 'date': str(datetime.datetime.now()),\n 'level': level,\n 'lang': lang,\n 'code': code,\n 'server_error': response.get('Error'),\n 'version': version(),\n 'username': username\n })\n\n return jsonify(response)\n\[email protected]('/report_error', methods=['POST'])\ndef report_error():\n post_body = request.json\n\n logger.log({\n 'session': session_id(),\n 'date': str(datetime.datetime.now()),\n 'level': post_body.get('level'),\n 'code': post_body.get('code'),\n 'client_error': post_body.get('client_error'),\n 'version': version(),\n 'username': current_user(request) ['username'] or None\n })\n\n return 'logged'\n\ndef programs_page (request):\n username = current_user(request) ['username']\n if not username:\n return \"unauthorized\", 403\n\n lang = requested_lang()\n query_lang = request.args.get('lang') or ''\n if query_lang:\n query_lang = '?lang=' + query_lang\n\n from_user = request.args.get('user') or None\n if from_user and not is_admin (request):\n return \"unauthorized\", 403\n\n texts=TRANSLATIONS.data [lang] ['Programs']\n\n result = db_get_many ('programs', {'username': from_user or username}, True)\n programs = []\n now = timems ()\n for item in result:\n measure = texts ['minutes']\n date = round ((now - item ['date']) / 60000)\n if date > 90:\n measure = texts ['hours']\n date = round (date / 60)\n if date > 36:\n measure = texts ['days']\n\n date = round (date / 24)\n\n programs.append ({'id': item ['id'], 'code': item ['code'], 'date': texts ['ago-1'] + ' ' + str (date) + ' ' + measure + ' ' + texts ['ago-2'], 'level': item ['level'], 'name': item ['name']})\n\n return render_template('programs.html', lang=requested_lang(), menu=render_main_menu('programs'), texts=texts, auth=TRANSLATIONS.data [lang] ['Auth'], programs=programs, username=username, current_page='programs', query_lang=query_lang, from_user=from_user)\n\n# @app.route('/post/', methods=['POST'])\n# for now we do not need a post but I am leaving it in for a potential future\n\n# routing to index.html\[email protected]('/hedy', methods=['GET'], defaults={'level': 1, 'step': 1})\[email protected]('/hedy/<level>', methods=['GET'], defaults={'step': 1})\[email protected]('/hedy/<level>/<step>', methods=['GET'])\ndef index(level, step):\n session_id() # Run this for the side effect of generating a session ID\n g.level = level = int(level)\n g.lang = requested_lang()\n g.prefix = '/hedy'\n\n # If step is a string that has more than two characters, it must be an id of a program\n if step and type_check (step, 'str') and len (step) > 2:\n result = db_get ('programs', {'id': step})\n if not result:\n return 'No such program', 404\n # Allow both the owner of the program and the admin user to access the program\n user = current_user (request)\n if user ['username'] != result ['username'] and not is_admin (request):\n return 'No such program!', 404\n loaded_program = result ['code']\n # We default to step 1 to provide a meaningful default assignment\n step = 1\n else:\n loaded_program = None\n\n return hedyweb.render_assignment_editor(\n request=request,\n course=HEDY_COURSE[g.lang],\n level_number=level,\n assignment_number=step,\n menu=render_main_menu('hedy'),\n translations=TRANSLATIONS,\n version=version(),\n loaded_program=loaded_program)\n\[email protected]('/onlinemasters', methods=['GET'], defaults={'level': 1, 'step': 1})\[email protected]('/onlinemasters/<level>', methods=['GET'], defaults={'step': 1})\[email protected]('/onlinemasters/<level>/<step>', methods=['GET'])\ndef onlinemasters(level, step):\n session_id() # Run this for the side effect of generating a session ID\n g.level = level = int(level)\n g.lang = lang = requested_lang()\n g.prefix = '/onlinemasters'\n\n return hedyweb.render_assignment_editor(\n request=request,\n course=ONLINE_MASTERS_COURSE,\n level_number=level,\n assignment_number=step,\n translations=TRANSLATIONS,\n version=version(),\n menu=None,\n loaded_program=None)\n\[email protected]('/space_eu', methods=['GET'], defaults={'level': 1, 'step': 1})\[email protected]('/space_eu/<level>', methods=['GET'], defaults={'step': 1})\[email protected]('/space_eu/<level>/<step>', methods=['GET'])\ndef space_eu(level, step):\n session_id() # Run this for the side effect of generating a session ID\n g.level = level = int(level)\n g.lang = requested_lang()\n g.prefix = '/space_eu'\n\n return hedyweb.render_assignment_editor(\n request=request,\n course=SPACE_EU_COURSE[g.lang],\n level_number=level,\n assignment_number=step,\n translations=TRANSLATIONS,\n version=version(),\n menu=None,\n loaded_program=None)\n\n\n\[email protected]('/error_messages.js', methods=['GET'])\ndef error():\n error_messages = TRANSLATIONS.get_translations(requested_lang(), \"ClientErrorMessages\")\n return render_template(\"error_messages.js\", error_messages=json.dumps(error_messages))\n\n\[email protected](500)\ndef internal_error(exception):\n import traceback\n print(traceback.format_exc())\n return \"<h1>500 Internal Server Error</h1>\"\n\[email protected]('/index.html')\[email protected]('/')\ndef default_landing_page():\n return main_page('start')\n\[email protected]('/<page>')\ndef main_page(page):\n if page == 'favicon.ico':\n abort(404)\n\n lang = requested_lang()\n effective_lang = lang\n\n if page in ['signup', 'login', 'my-profile', 'recover', 'reset', 'admin']:\n return auth_templates(page, lang, render_main_menu(page), request)\n\n if page == 'programs':\n return programs_page(request)\n\n # Default to English if requested language is not available\n if not path.isfile(f'main/{page}-{effective_lang}.md'):\n effective_lang = 'en'\n\n try:\n with open(f'main/{page}-{effective_lang}.md', 'r') as f:\n contents = f.read()\n except IOError:\n abort(404)\n\n front_matter, markdown = split_markdown_front_matter(contents)\n\n menu = render_main_menu(page)\n return render_template('main-page.html', mkd=markdown, lang=lang, menu=menu, username=current_user(request) ['username'], auth=TRANSLATIONS.data [lang] ['Auth'], **front_matter)\n\n\ndef session_id():\n \"\"\"Returns or sets the current session ID.\"\"\"\n if 'session_id' not in session:\n session['session_id'] = uuid.uuid4().hex\n return session['session_id']\n\n\ndef requested_lang():\n \"\"\"Return the user's requested language code.\n\n If not in the request parameters, use the browser's accept-languages\n header to do language negotiation.\n \"\"\"\n lang = request.args.get(\"lang\")\n if lang: return lang\n\n return request.accept_languages.best_match(ALL_LANGUAGES.keys(), 'en')\n\[email protected]_global()\ndef current_language():\n return make_lang_obj(requested_lang())\n\[email protected]_global()\ndef hedy_link(level_nr, assignment_nr, subpage=None, lang=None):\n \"\"\"Make a link to a Hedy page.\"\"\"\n parts = [g.prefix]\n parts.append('/' + str(level_nr))\n if str(assignment_nr) != '1' or subpage:\n parts.append('/' + str(assignment_nr if assignment_nr else '1'))\n if subpage and subpage != 'code':\n parts.append('/' + subpage)\n parts.append('?')\n parts.append('lang=' + (lang if lang else requested_lang()))\n return ''.join(parts)\n\[email protected]_global()\ndef other_languages():\n cl = requested_lang()\n return [make_lang_obj(l) for l in ALL_LANGUAGES.keys() if l != cl]\n\n\ndef make_lang_obj(lang):\n \"\"\"Make a language object for a given language.\"\"\"\n return {\n 'sym': ALL_LANGUAGES[lang],\n 'lang': lang\n }\n\n\[email protected]_global()\ndef modify_query(**new_values):\n args = request.args.copy()\n\n for key, value in new_values.items():\n args[key] = value\n\n return '{}?{}'.format(request.path, url_encode(args))\n\n\ndef no_none_sense(d):\n \"\"\"Remove all None values from a dict.\"\"\"\n return {k: v for k, v in d.items() if v is not None}\n\n\ndef version():\n \"\"\"Get the version from the Heroku environment variables.\"\"\"\n if not os.getenv('DYNO'):\n # Not on Heroku\n return 'DEV'\n\n vrz = os.getenv('HEROKU_RELEASE_CREATED_AT')\n the_date = datetime.date.fromisoformat(vrz[:10]) if vrz else datetime.date.today()\n\n commit = os.getenv('HEROKU_SLUG_COMMIT', '????')[0:6]\n return the_date.strftime('%b %d') + f' ({commit})'\n\n\ndef split_markdown_front_matter(md):\n parts = re.split('^---', md, 1, re.M)\n if len(parts) == 1:\n return {}, md\n # safe_load returns 'None' if the string is empty\n front_matter = yaml.safe_load(parts[0]) or {}\n return front_matter, parts[1]\n\n\ndef render_main_menu(current_page):\n \"\"\"Render a list of (caption, href, selected, color) from the main menu.\"\"\"\n return [dict(\n caption=item.get(requested_lang(), item.get('en', '???')),\n href='/' + item['_'],\n selected=(current_page == item['_']),\n accent_color=item.get('accent_color', 'white')\n ) for item in main_menu_json['nav']]\n\n# *** PROGRAMS ***\n\n# Not very restful to use a GET to delete something, but indeed convenient; we can do it with a single link and avoiding AJAX.\[email protected]('/programs/delete/<program_id>', methods=['GET'])\n@requires_login\ndef delete_program (user, program_id):\n result = db_get ('programs', {'id': program_id})\n if not result or result ['username'] != user ['username']:\n return \"\", 404\n db_del ('programs', {'id': program_id})\n return redirect ('/programs')\n\[email protected]('/programs', methods=['POST'])\n@requires_login\ndef save_program (user):\n\n body = request.json\n if not type_check (body, 'dict'):\n return 'body must be an object', 400\n if not object_check (body, 'code', 'str'):\n return 'code must be a string', 400\n if not object_check (body, 'name', 'str'):\n return 'name must be a string', 400\n if not object_check (body, 'level', 'int'):\n return 'level must be an integer', 400\n\n # We execute the saved program to see if it would generate an error or not\n error = None\n try:\n hedy_errors = TRANSLATIONS.get_translations(requested_lang(), 'HedyErrorMessages')\n result = hedy.transpile(body ['code'], body ['level'])\n except hedy.HedyException as E:\n error_template = hedy_errors[E.error_code]\n error = error_template.format(**E.arguments)\n except Exception as E:\n error = str(E)\n\n name = body ['name']\n\n # We check if a program with a name `xyz` exists in the database for the username. If it does, we exist whether `xyz (1)` exists, until we find a program `xyz (NN)` that doesn't exist yet.\n # It'd be ideal to search by username & program name, but since DynamoDB doesn't allow searching for two indexes at the same time, this would require to create a special index to that effect, which is cumbersome.\n # For now, we bring all existing programs for the user and then search within them for repeated names.\n existing = db_get_many ('programs', {'username': user ['username']}, True)\n name_counter = 0\n for program in existing:\n if re.match ('^' + re.escape (name) + '( \\(\\d+\\))*', program ['name']):\n name_counter = name_counter + 1\n if name_counter:\n name = name + ' (' + str (name_counter) + ')'\n\n db_set('programs', {\n 'id': uuid.uuid4().hex,\n 'session': session_id(),\n 'date': timems (),\n 'lang': requested_lang(),\n 'version': version(),\n 'level': body ['level'],\n 'code': body ['code'],\n 'name': name,\n 'server_error': error,\n 'username': user ['username']\n })\n\n return jsonify({})\n\n# *** AUTH ***\n\nimport auth\nauth.routes(app, requested_lang)\n\n# *** START SERVER ***\n\nif __name__ == '__main__':\n # Threaded option to enable multiple instances for multiple user access support\n app.run(threaded=True, port=config ['port'])\n", "path": "app.py"}]} |
gh_patches_debug_140 | rasdani/github-patches | git_diff | doccano__doccano-1280 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ModuleNotFoundError: No module named 'dj_rest_auth'
<!-- Before submitting an issue, make sure to check the docs and closed issues and FAQ to see if any of the solutions work for you. https://github.com/doccano/doccano/wiki/Frequently-Asked-Questions -->
I was using `pip install` to install doccano, which is due to my lack of knowledge about docker. And I run into the following problem:
(To sum up, module `dj_rest_auth` was not found when setting up database and createing admin user.)
```bash
(pytorch) D:\pythonwork\NLP\grad>doccano
Setup Database.
Traceback (most recent call last):
File "d:\anaconda3\envs\pytorch\lib\site-packages\app\manage.py", line 15, in <module>
execute_from_command_line(sys.argv)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 401, in execute_from_command_line
utility.execute()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 377, in execute
django.setup()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\__init__.py", line 24, in setup
apps.populate(settings.INSTALLED_APPS)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\registry.py", line 91, in populate
app_config = AppConfig.create(entry)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\config.py", line 90, in create
module = import_module(entry)
File "d:\anaconda3\envs\pytorch\lib\importlib\__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 965, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'dj_rest_auth'
Traceback (most recent call last):
File "d:\anaconda3\envs\pytorch\lib\site-packages\app\manage.py", line 15, in <module>
execute_from_command_line(sys.argv)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 401, in execute_from_command_line
utility.execute()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 377, in execute
django.setup()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\__init__.py", line 24, in setup
apps.populate(settings.INSTALLED_APPS)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\registry.py", line 91, in populate
app_config = AppConfig.create(entry)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\config.py", line 90, in create
module = import_module(entry)
File "d:\anaconda3\envs\pytorch\lib\importlib\__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 965, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'dj_rest_auth'
Traceback (most recent call last):
File "d:\anaconda3\envs\pytorch\lib\site-packages\app\manage.py", line 15, in <module>
execute_from_command_line(sys.argv)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 401, in execute_from_command_line
utility.execute()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 377, in execute
django.setup()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\__init__.py", line 24, in setup
apps.populate(settings.INSTALLED_APPS)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\registry.py", line 91, in populate
app_config = AppConfig.create(entry)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\config.py", line 90, in create
module = import_module(entry)
File "d:\anaconda3\envs\pytorch\lib\importlib\__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 965, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'dj_rest_auth'
Create admin user.
Traceback (most recent call last):
File "d:\anaconda3\envs\pytorch\lib\site-packages\app\manage.py", line 15, in <module>
execute_from_command_line(sys.argv)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 401, in execute_from_command_line
utility.execute()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 377, in execute
django.setup()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\__init__.py", line 24, in setup
apps.populate(settings.INSTALLED_APPS)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\registry.py", line 91, in populate
app_config = AppConfig.create(entry)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\config.py", line 90, in create
module = import_module(entry)
File "d:\anaconda3\envs\pytorch\lib\importlib\__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 965, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'dj_rest_auth'
Starting server with port 8000.
Exception in thread django-main-thread:
Traceback (most recent call last):
File "d:\anaconda3\envs\pytorch\lib\threading.py", line 926, in _bootstrap_inner
self.run()
File "d:\anaconda3\envs\pytorch\lib\threading.py", line 870, in run
self._target(*self._args, **self._kwargs)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 53, in wrapper
fn(*args, **kwargs)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\commands\runserver.py", line 110, in inner_run
autoreload.raise_last_exception()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 76, in raise_last_exception
raise _exception[1]
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 357, in execute
autoreload.check_errors(django.setup)()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 53, in wrapper
fn(*args, **kwargs)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\__init__.py", line 24, in setup
apps.populate(settings.INSTALLED_APPS)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\registry.py", line 91, in populate
app_config = AppConfig.create(entry)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\apps\config.py", line 90, in create
module = import_module(entry)
File "d:\anaconda3\envs\pytorch\lib\importlib\__init__.py", line 127, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
File "<frozen importlib._bootstrap>", line 1006, in _gcd_import
File "<frozen importlib._bootstrap>", line 983, in _find_and_load
File "<frozen importlib._bootstrap>", line 965, in _find_and_load_unlocked
ModuleNotFoundError: No module named 'dj_rest_auth'
Traceback (most recent call last):
File "d:\anaconda3\envs\pytorch\lib\site-packages\app\manage.py", line 15, in <module>
execute_from_command_line(sys.argv)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 401, in execute_from_command_line
utility.execute()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\base.py", line 330, in run_from_argv
self.execute(*args, **cmd_options)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\commands\runserver.py", line 61, in execute
super().execute(*args, **options)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\base.py", line 371, in execute
output = self.handle(*args, **options)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\commands\runserver.py", line 96, in handle
self.run(**options)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\core\management\commands\runserver.py", line 103, in run
autoreload.run_with_reloader(self.inner_run, **options)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 618, in run_with_reloader
start_django(reloader, main_func, *args, **kwargs)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 603, in start_django
reloader.run(django_main_thread)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 318, in run
self.run_loop()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 324, in run_loop
next(ticker)
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 364, in tick
for filepath, mtime in self.snapshot_files():
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 380, in snapshot_files
for file in self.watched_files():
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 278, in watched_files
yield from iter_all_python_module_files()
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 105, in iter_all_python_module_files
return iter_modules_and_files(modules, frozenset(_error_files))
File "d:\anaconda3\envs\pytorch\lib\site-packages\django\utils\autoreload.py", line 141, in iter_modules_and_files
resolved_path = path.resolve(strict=True).absolute()
File "d:\anaconda3\envs\pytorch\lib\pathlib.py", line 1166, in resolve
s = self._flavour.resolve(self, strict=strict)
File "d:\anaconda3\envs\pytorch\lib\pathlib.py", line 200, in resolve
return self._ext_to_normal(_getfinalpathname(s))
OSError: [WinError 123] 文件名、目录名或卷标语法不正确。: '<frozen importlib._bootstrap>'
```
It seemed to be something wrong with `File <frozen importlib._bootstrap>`, but I cannot find the position of it.
Your Environment
---------
<!-- Include details of your environment.-->
* Operating System: Windows
* Python Version Used: 3.7.10
* When you install doccano: 2021.03.30
* How did you install doccano (Heroku button etc): pip install
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 import io
4 import os
5
6 from setuptools import find_packages, setup
7
8 NAME = 'doccano'
9 DESCRIPTION = 'doccano, text annotation tool for machine learning practitioners'
10 URL = 'https://github.com/doccano/doccano'
11 EMAIL = '[email protected]'
12 AUTHOR = 'Hironsan'
13 LICENSE = 'MIT'
14
15 here = os.path.abspath(os.path.dirname(__file__))
16 with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
17 long_description = '\n' + f.read()
18
19 required = [
20 'apache-libcloud>=3.2.0',
21 'colour>=0.1.5',
22 'conllu>=4.2.2',
23 'dj-database-url>=0.5.0',
24 'django-cors-headers>=3.5.0',
25 'django-filter>=2.4.0',
26 'django-rest-polymorphic>=0.1.9',
27 'djangorestframework-csv>=2.1.0',
28 'djangorestframework-xml>=2.0.0',
29 'drf-yasg>=1.20.0',
30 'environs>=9.2.0',
31 'furl>=2.1.0',
32 'pyexcel>=0.6.6',
33 'pyexcel-xlsx>=0.6.0',
34 'python-jose>=3.2.0',
35 'seqeval>=1.2.2',
36 'social-auth-app-django>=4.0.0',
37 'whitenoise>=5.2.0',
38 'auto-labeling-pipeline>=0.1.12'
39 ]
40
41 setup(
42 name=NAME,
43 use_scm_version=True,
44 setup_requires=['setuptools_scm'],
45 description=DESCRIPTION,
46 long_description=long_description,
47 long_description_content_type='text/markdown',
48 author=AUTHOR,
49 author_email=EMAIL,
50 url=URL,
51 packages=find_packages(exclude=('*.tests',)),
52 entry_points={
53 'console_scripts': [
54 'doccano = app.doccano.doccano:main'
55 ]
56 },
57 install_requires=required,
58 extras_require={
59 'postgresql': ['psycopg2-binary>=2.8.6'],
60 'mssql': ['django-mssql-backend>=2.8.1'],
61 },
62 include_package_data=True,
63 license=LICENSE,
64 classifiers=[
65 'License :: OSI Approved :: MIT License',
66 'Programming Language :: Python',
67 'Programming Language :: Python :: 3.6',
68 'Programming Language :: Python :: 3.7',
69 'Programming Language :: Python :: 3.8',
70 'Programming Language :: Python :: Implementation :: CPython',
71 'Programming Language :: Python :: Implementation :: PyPy'
72 ],
73 )
74
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,8 @@
'seqeval>=1.2.2',
'social-auth-app-django>=4.0.0',
'whitenoise>=5.2.0',
- 'auto-labeling-pipeline>=0.1.12'
+ 'auto-labeling-pipeline>=0.1.12',
+ 'dj-rest-auth>=2.1.4'
]
setup(
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -35,7 +35,8 @@\n 'seqeval>=1.2.2',\n 'social-auth-app-django>=4.0.0',\n 'whitenoise>=5.2.0',\n- 'auto-labeling-pipeline>=0.1.12'\n+ 'auto-labeling-pipeline>=0.1.12',\n+ 'dj-rest-auth>=2.1.4'\n ]\n \n setup(\n", "issue": "ModuleNotFoundError: No module named 'dj_rest_auth'\n<!-- Before submitting an issue, make sure to check the docs and closed issues and FAQ to see if any of the solutions work for you. https://github.com/doccano/doccano/wiki/Frequently-Asked-Questions -->\r\n\r\nI was using `pip install` to install doccano, which is due to my lack of knowledge about docker. And I run into the following problem: \r\n(To sum up, module `dj_rest_auth` was not found when setting up database and createing admin user.)\r\n\r\n```bash\r\n(pytorch) D:\\pythonwork\\NLP\\grad>doccano \r\nSetup Database. \r\nTraceback (most recent call last): \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\app\\manage.py\", line 15, in <module> \r\n execute_from_command_line(sys.argv) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 401, in execute_from_command_line \r\n utility.execute() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 377, in execute \r\n django.setup() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\__init__.py\", line 24, in setup \r\n apps.populate(settings.INSTALLED_APPS) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\registry.py\", line 91, in populate \r\n app_config = AppConfig.create(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\config.py\", line 90, in create \r\n module = import_module(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\importlib\\__init__.py\", line 127, in import_module \r\n return _bootstrap._gcd_import(name[level:], package, level) \r\n File \"<frozen importlib._bootstrap>\", line 1006, in _gcd_import \r\n File \"<frozen importlib._bootstrap>\", line 983, in _find_and_load \r\n File \"<frozen importlib._bootstrap>\", line 965, in _find_and_load_unlocked \r\nModuleNotFoundError: No module named 'dj_rest_auth' \r\nTraceback (most recent call last): \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\app\\manage.py\", line 15, in <module> \r\n execute_from_command_line(sys.argv) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 401, in execute_from_command_line \r\n utility.execute() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 377, in execute \r\n django.setup() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\__init__.py\", line 24, in setup \r\n apps.populate(settings.INSTALLED_APPS) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\registry.py\", line 91, in populate \r\n app_config = AppConfig.create(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\config.py\", line 90, in create \r\n module = import_module(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\importlib\\__init__.py\", line 127, in import_module \r\n return _bootstrap._gcd_import(name[level:], package, level) \r\n File \"<frozen importlib._bootstrap>\", line 1006, in _gcd_import \r\n File \"<frozen importlib._bootstrap>\", line 983, in _find_and_load \r\n File \"<frozen importlib._bootstrap>\", line 965, in _find_and_load_unlocked \r\nModuleNotFoundError: No module named 'dj_rest_auth' \r\nTraceback (most recent call last): \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\app\\manage.py\", line 15, in <module> \r\n execute_from_command_line(sys.argv) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 401, in execute_from_command_line \r\n utility.execute() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 377, in execute \r\n django.setup() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\__init__.py\", line 24, in setup \r\n apps.populate(settings.INSTALLED_APPS) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\registry.py\", line 91, in populate \r\n app_config = AppConfig.create(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\config.py\", line 90, in create \r\n module = import_module(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\importlib\\__init__.py\", line 127, in import_module \r\n return _bootstrap._gcd_import(name[level:], package, level) \r\n File \"<frozen importlib._bootstrap>\", line 1006, in _gcd_import \r\n File \"<frozen importlib._bootstrap>\", line 983, in _find_and_load \r\n File \"<frozen importlib._bootstrap>\", line 965, in _find_and_load_unlocked \r\nModuleNotFoundError: No module named 'dj_rest_auth' \r\nCreate admin user. \r\nTraceback (most recent call last): \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\app\\manage.py\", line 15, in <module> \r\n execute_from_command_line(sys.argv) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 401, in execute_from_command_line \r\n utility.execute() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 377, in execute \r\n django.setup() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\__init__.py\", line 24, in setup \r\n apps.populate(settings.INSTALLED_APPS) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\registry.py\", line 91, in populate \r\n app_config = AppConfig.create(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\config.py\", line 90, in create \r\n module = import_module(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\importlib\\__init__.py\", line 127, in import_module \r\n return _bootstrap._gcd_import(name[level:], package, level) \r\n File \"<frozen importlib._bootstrap>\", line 1006, in _gcd_import \r\n File \"<frozen importlib._bootstrap>\", line 983, in _find_and_load \r\n File \"<frozen importlib._bootstrap>\", line 965, in _find_and_load_unlocked \r\nModuleNotFoundError: No module named 'dj_rest_auth' \r\nStarting server with port 8000. \r\nException in thread django-main-thread: \r\nTraceback (most recent call last): \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\threading.py\", line 926, in _bootstrap_inner \r\n self.run() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\threading.py\", line 870, in run \r\n self._target(*self._args, **self._kwargs) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 53, in wrapper \r\n fn(*args, **kwargs) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\commands\\runserver.py\", line 110, in inner_run \r\n autoreload.raise_last_exception() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 76, in raise_last_exception \r\n raise _exception[1] \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 357, in execute \r\n autoreload.check_errors(django.setup)() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 53, in wrapper \r\n fn(*args, **kwargs) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\__init__.py\", line 24, in setup \r\n apps.populate(settings.INSTALLED_APPS) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\registry.py\", line 91, in populate \r\n app_config = AppConfig.create(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\apps\\config.py\", line 90, in create \r\n module = import_module(entry) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\importlib\\__init__.py\", line 127, in import_module \r\n return _bootstrap._gcd_import(name[level:], package, level) \r\n File \"<frozen importlib._bootstrap>\", line 1006, in _gcd_import \r\n File \"<frozen importlib._bootstrap>\", line 983, in _find_and_load \r\n File \"<frozen importlib._bootstrap>\", line 965, in _find_and_load_unlocked \r\nModuleNotFoundError: No module named 'dj_rest_auth' \r\n \r\nTraceback (most recent call last): \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\app\\manage.py\", line 15, in <module> \r\n execute_from_command_line(sys.argv) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 401, in execute_from_command_line \r\n utility.execute() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\__init__.py\", line 395, in execute \r\n self.fetch_command(subcommand).run_from_argv(self.argv) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\base.py\", line 330, in run_from_argv \r\n self.execute(*args, **cmd_options) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\commands\\runserver.py\", line 61, in execute \r\n super().execute(*args, **options) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\base.py\", line 371, in execute \r\n output = self.handle(*args, **options) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\commands\\runserver.py\", line 96, in handle \r\n self.run(**options) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\core\\management\\commands\\runserver.py\", line 103, in run \r\n autoreload.run_with_reloader(self.inner_run, **options) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 618, in run_with_reloader \r\n start_django(reloader, main_func, *args, **kwargs) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 603, in start_django \r\n reloader.run(django_main_thread) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 318, in run \r\n self.run_loop() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 324, in run_loop \r\n next(ticker) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 364, in tick \r\n for filepath, mtime in self.snapshot_files(): \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 380, in snapshot_files \r\n for file in self.watched_files(): \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 278, in watched_files \r\n yield from iter_all_python_module_files() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 105, in iter_all_python_module_files \r\n return iter_modules_and_files(modules, frozenset(_error_files)) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\site-packages\\django\\utils\\autoreload.py\", line 141, in iter_modules_and_files \r\n resolved_path = path.resolve(strict=True).absolute() \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\pathlib.py\", line 1166, in resolve \r\n s = self._flavour.resolve(self, strict=strict) \r\n File \"d:\\anaconda3\\envs\\pytorch\\lib\\pathlib.py\", line 200, in resolve \r\n return self._ext_to_normal(_getfinalpathname(s)) \r\nOSError: [WinError 123] \u6587\u4ef6\u540d\u3001\u76ee\u5f55\u540d\u6216\u5377\u6807\u8bed\u6cd5\u4e0d\u6b63\u786e\u3002: '<frozen importlib._bootstrap>'\r\n```\r\nIt seemed to be something wrong with `File <frozen importlib._bootstrap>`, but I cannot find the position of it. \r\n\r\nYour Environment\r\n---------\r\n<!-- Include details of your environment.-->\r\n* Operating System: Windows\r\n* Python Version Used: 3.7.10\r\n* When you install doccano: 2021.03.30\r\n* How did you install doccano (Heroku button etc): pip install\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport io\nimport os\n\nfrom setuptools import find_packages, setup\n\nNAME = 'doccano'\nDESCRIPTION = 'doccano, text annotation tool for machine learning practitioners'\nURL = 'https://github.com/doccano/doccano'\nEMAIL = '[email protected]'\nAUTHOR = 'Hironsan'\nLICENSE = 'MIT'\n\nhere = os.path.abspath(os.path.dirname(__file__))\nwith io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = '\\n' + f.read()\n\nrequired = [\n 'apache-libcloud>=3.2.0',\n 'colour>=0.1.5',\n 'conllu>=4.2.2',\n 'dj-database-url>=0.5.0',\n 'django-cors-headers>=3.5.0',\n 'django-filter>=2.4.0',\n 'django-rest-polymorphic>=0.1.9',\n 'djangorestframework-csv>=2.1.0',\n 'djangorestframework-xml>=2.0.0',\n 'drf-yasg>=1.20.0',\n 'environs>=9.2.0',\n 'furl>=2.1.0',\n 'pyexcel>=0.6.6',\n 'pyexcel-xlsx>=0.6.0',\n 'python-jose>=3.2.0',\n 'seqeval>=1.2.2',\n 'social-auth-app-django>=4.0.0',\n 'whitenoise>=5.2.0',\n 'auto-labeling-pipeline>=0.1.12'\n]\n\nsetup(\n name=NAME,\n use_scm_version=True,\n setup_requires=['setuptools_scm'],\n description=DESCRIPTION,\n long_description=long_description,\n long_description_content_type='text/markdown',\n author=AUTHOR,\n author_email=EMAIL,\n url=URL,\n packages=find_packages(exclude=('*.tests',)),\n entry_points={\n 'console_scripts': [\n 'doccano = app.doccano.doccano:main'\n ]\n },\n install_requires=required,\n extras_require={\n 'postgresql': ['psycopg2-binary>=2.8.6'],\n 'mssql': ['django-mssql-backend>=2.8.1'],\n },\n include_package_data=True,\n license=LICENSE,\n classifiers=[\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy'\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\nimport io\nimport os\n\nfrom setuptools import find_packages, setup\n\nNAME = 'doccano'\nDESCRIPTION = 'doccano, text annotation tool for machine learning practitioners'\nURL = 'https://github.com/doccano/doccano'\nEMAIL = '[email protected]'\nAUTHOR = 'Hironsan'\nLICENSE = 'MIT'\n\nhere = os.path.abspath(os.path.dirname(__file__))\nwith io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = '\\n' + f.read()\n\nrequired = [\n 'apache-libcloud>=3.2.0',\n 'colour>=0.1.5',\n 'conllu>=4.2.2',\n 'dj-database-url>=0.5.0',\n 'django-cors-headers>=3.5.0',\n 'django-filter>=2.4.0',\n 'django-rest-polymorphic>=0.1.9',\n 'djangorestframework-csv>=2.1.0',\n 'djangorestframework-xml>=2.0.0',\n 'drf-yasg>=1.20.0',\n 'environs>=9.2.0',\n 'furl>=2.1.0',\n 'pyexcel>=0.6.6',\n 'pyexcel-xlsx>=0.6.0',\n 'python-jose>=3.2.0',\n 'seqeval>=1.2.2',\n 'social-auth-app-django>=4.0.0',\n 'whitenoise>=5.2.0',\n 'auto-labeling-pipeline>=0.1.12',\n 'dj-rest-auth>=2.1.4'\n]\n\nsetup(\n name=NAME,\n use_scm_version=True,\n setup_requires=['setuptools_scm'],\n description=DESCRIPTION,\n long_description=long_description,\n long_description_content_type='text/markdown',\n author=AUTHOR,\n author_email=EMAIL,\n url=URL,\n packages=find_packages(exclude=('*.tests',)),\n entry_points={\n 'console_scripts': [\n 'doccano = app.doccano.doccano:main'\n ]\n },\n install_requires=required,\n extras_require={\n 'postgresql': ['psycopg2-binary>=2.8.6'],\n 'mssql': ['django-mssql-backend>=2.8.1'],\n },\n include_package_data=True,\n license=LICENSE,\n classifiers=[\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3.8',\n 'Programming Language :: Python :: Implementation :: CPython',\n 'Programming Language :: Python :: Implementation :: PyPy'\n ],\n)\n", "path": "setup.py"}]} |
gh_patches_debug_141 | rasdani/github-patches | git_diff | dask__dask-4903 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
keyerror in dask.config.rename when working with distributed
calling `dask-worker` using `dask==1.2.2`, `distributed==1.28.1`, I get the following error:
`Traceback (most recent call last): File "/opt/conda/envs/worker/bin/dask-worker", line 7, in <module> from distributed.cli.dask_worker import go File "/opt/conda/envs/worker/lib/python3.6/site-packages/distributed/__init__.py", line 3, in <module> from . import config File "/opt/conda/envs/worker/lib/python3.6/site-packages/distributed/config.py", line 55, in <module> dask.config.rename(aliases) File "/opt/conda/envs/worker/lib/python3.6/site-packages/dask/config.py", line 451, in rename del config[k] # TODO: support nested keys KeyError: 'tick-maximum-delay'`
It looks like this was introduced by #4742 when `dask.config.rename` was not properly adjusted, so that it can run into these KeyErrors. Current state of the function:
```def rename(aliases, config=config):
""" Rename old keys to new keys
This helps migrate older configuration versions over time
"""
old = []
new = {}
for o, n in aliases.items():
value = get(o, None, config=config)
if value is not None:
old.append(o)
new[n] = value
for k in old:
del config[k] # TODO: support nested keys
set(new, config=config)
```
Suggest changing the 3rd to last line to `del config[canonical_name(k, config)]`. Will file an appropriate PR.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `dask/config.py`
Content:
```
1 from __future__ import print_function, division, absolute_import
2
3 import ast
4 import os
5 import sys
6 import threading
7 try:
8 import yaml
9 except ImportError:
10 yaml = None
11
12 from .compatibility import makedirs, builtins, Mapping
13
14
15 no_default = '__no_default__'
16
17
18 paths = [
19 os.getenv('DASK_ROOT_CONFIG', '/etc/dask'),
20 os.path.join(sys.prefix, 'etc', 'dask'),
21 os.path.join(os.path.expanduser('~'), '.config', 'dask'),
22 os.path.join(os.path.expanduser('~'), '.dask')
23 ]
24
25 if 'DASK_CONFIG' in os.environ:
26 PATH = os.environ['DASK_CONFIG']
27 paths.append(PATH)
28 else:
29 PATH = os.path.join(os.path.expanduser('~'), '.config', 'dask')
30
31
32 global_config = config = {}
33
34
35 config_lock = threading.Lock()
36
37
38 defaults = []
39
40
41 def canonical_name(k, config):
42 """Return the canonical name for a key.
43
44 Handles user choice of '-' or '_' conventions by standardizing on whichever
45 version was set first. If a key already exists in either hyphen or
46 underscore form, the existing version is the canonical name. If neither
47 version exists the original key is used as is.
48 """
49 try:
50 if k in config:
51 return k
52 except TypeError:
53 # config is not a mapping, return the same name as provided
54 return k
55
56 altk = k.replace('_', '-') if '_' in k else k.replace('-', '_')
57
58 if altk in config:
59 return altk
60
61 return k
62
63
64 def update(old, new, priority='new'):
65 """ Update a nested dictionary with values from another
66
67 This is like dict.update except that it smoothly merges nested values
68
69 This operates in-place and modifies old
70
71 Parameters
72 ----------
73 priority: string {'old', 'new'}
74 If new (default) then the new dictionary has preference.
75 Otherwise the old dictionary does.
76
77 Examples
78 --------
79 >>> a = {'x': 1, 'y': {'a': 2}}
80 >>> b = {'x': 2, 'y': {'b': 3}}
81 >>> update(a, b) # doctest: +SKIP
82 {'x': 2, 'y': {'a': 2, 'b': 3}}
83
84 >>> a = {'x': 1, 'y': {'a': 2}}
85 >>> b = {'x': 2, 'y': {'b': 3}}
86 >>> update(a, b, priority='old') # doctest: +SKIP
87 {'x': 1, 'y': {'a': 2, 'b': 3}}
88
89 See Also
90 --------
91 dask.config.merge
92 """
93 for k, v in new.items():
94 k = canonical_name(k, old)
95
96 if isinstance(v, Mapping):
97 if k not in old or old[k] is None:
98 old[k] = {}
99 update(old[k], v, priority=priority)
100 else:
101 if priority == 'new' or k not in old:
102 old[k] = v
103
104 return old
105
106
107 def merge(*dicts):
108 """ Update a sequence of nested dictionaries
109
110 This prefers the values in the latter dictionaries to those in the former
111
112 Examples
113 --------
114 >>> a = {'x': 1, 'y': {'a': 2}}
115 >>> b = {'y': {'b': 3}}
116 >>> merge(a, b) # doctest: +SKIP
117 {'x': 1, 'y': {'a': 2, 'b': 3}}
118
119 See Also
120 --------
121 dask.config.update
122 """
123 result = {}
124 for d in dicts:
125 update(result, d)
126 return result
127
128
129 def collect_yaml(paths=paths):
130 """ Collect configuration from yaml files
131
132 This searches through a list of paths, expands to find all yaml or json
133 files, and then parses each file.
134 """
135 # Find all paths
136 file_paths = []
137 for path in paths:
138 if os.path.exists(path):
139 if os.path.isdir(path):
140 try:
141 file_paths.extend(sorted([
142 os.path.join(path, p)
143 for p in os.listdir(path)
144 if os.path.splitext(p)[1].lower() in ('.json', '.yaml', '.yml')
145 ]))
146 except OSError:
147 # Ignore permission errors
148 pass
149 else:
150 file_paths.append(path)
151
152 configs = []
153
154 # Parse yaml files
155 for path in file_paths:
156 try:
157 with open(path) as f:
158 data = yaml.safe_load(f.read()) or {}
159 configs.append(data)
160 except (OSError, IOError):
161 # Ignore permission errors
162 pass
163
164 return configs
165
166
167 def collect_env(env=None):
168 """ Collect config from environment variables
169
170 This grabs environment variables of the form "DASK_FOO__BAR_BAZ=123" and
171 turns these into config variables of the form ``{"foo": {"bar-baz": 123}}``
172 It transforms the key and value in the following way:
173
174 - Lower-cases the key text
175 - Treats ``__`` (double-underscore) as nested access
176 - Calls ``ast.literal_eval`` on the value
177 """
178 if env is None:
179 env = os.environ
180 d = {}
181 for name, value in env.items():
182 if name.startswith('DASK_'):
183 varname = name[5:].lower().replace('__', '.')
184 try:
185 d[varname] = ast.literal_eval(value)
186 except (SyntaxError, ValueError):
187 d[varname] = value
188
189 result = {}
190 set(d, config=result)
191
192 return result
193
194
195 def ensure_file(
196 source,
197 destination=None,
198 comment=True):
199 """
200 Copy file to default location if it does not already exist
201
202 This tries to move a default configuration file to a default location if
203 if does not already exist. It also comments out that file by default.
204
205 This is to be used by downstream modules (like dask.distributed) that may
206 have default configuration files that they wish to include in the default
207 configuration path.
208
209 Parameters
210 ----------
211 source : string, filename
212 Source configuration file, typically within a source directory.
213 destination : string, directory
214 Destination directory. Configurable by ``DASK_CONFIG`` environment
215 variable, falling back to ~/.config/dask.
216 comment : bool, True by default
217 Whether or not to comment out the config file when copying.
218 """
219 if destination is None:
220 destination = PATH
221
222 # destination is a file and already exists, never overwrite
223 if os.path.isfile(destination):
224 return
225
226 # If destination is not an existing file, interpret as a directory,
227 # use the source basename as the filename
228 directory = destination
229 destination = os.path.join(directory, os.path.basename(source))
230
231 try:
232 if not os.path.exists(destination):
233 makedirs(directory, exist_ok=True)
234
235 # Atomically create destination. Parallel testing discovered
236 # a race condition where a process can be busy creating the
237 # destination while another process reads an empty config file.
238 tmp = '%s.tmp.%d' % (destination, os.getpid())
239 with open(source) as f:
240 lines = list(f)
241
242 if comment:
243 lines = ['# ' + line
244 if line.strip() and not line.startswith('#')
245 else line
246 for line in lines]
247
248 with open(tmp, 'w') as f:
249 f.write(''.join(lines))
250
251 try:
252 os.rename(tmp, destination)
253 except OSError:
254 os.remove(tmp)
255 except (IOError, OSError):
256 pass
257
258
259 class set(object):
260 """ Temporarily set configuration values within a context manager
261
262 Examples
263 --------
264 >>> import dask
265 >>> with dask.config.set({'foo': 123}):
266 ... pass
267
268 See Also
269 --------
270 dask.config.get
271 """
272 def __init__(self, arg=None, config=config, lock=config_lock, **kwargs):
273 if arg and not kwargs:
274 kwargs = arg
275
276 with lock:
277 self.config = config
278 self.old = {}
279
280 for key, value in kwargs.items():
281 self._assign(key.split('.'), value, config, old=self.old)
282
283 def __enter__(self):
284 return self.config
285
286 def __exit__(self, type, value, traceback):
287 for keys, value in self.old.items():
288 if value == '--delete--':
289 d = self.config
290 try:
291 while len(keys) > 1:
292 d = d[keys[0]]
293 keys = keys[1:]
294 del d[keys[0]]
295 except KeyError:
296 pass
297 else:
298 self._assign(keys, value, self.config)
299
300 @classmethod
301 def _assign(cls, keys, value, d, old=None, path=[]):
302 """ Assign value into a nested configuration dictionary
303
304 Optionally record the old values in old
305
306 Parameters
307 ----------
308 keys: Sequence[str]
309 The nested path of keys to assign the value, similar to toolz.put_in
310 value: object
311 d: dict
312 The part of the nested dictionary into which we want to assign the
313 value
314 old: dict, optional
315 If provided this will hold the old values
316 path: List[str]
317 Used internally to hold the path of old values
318 """
319 key = canonical_name(keys[0], d)
320 if len(keys) == 1:
321 if old is not None:
322 path_key = tuple(path + [key])
323 if key in d:
324 old[path_key] = d[key]
325 else:
326 old[path_key] = '--delete--'
327 d[key] = value
328 else:
329 if key not in d:
330 d[key] = {}
331 if old is not None:
332 old[tuple(path + [key])] = '--delete--'
333 old = None
334 cls._assign(keys[1:], value, d[key], path=path + [key], old=old)
335
336
337 def collect(paths=paths, env=None):
338 """
339 Collect configuration from paths and environment variables
340
341 Parameters
342 ----------
343 paths : List[str]
344 A list of paths to search for yaml config files
345
346 env : dict
347 The system environment variables
348
349 Returns
350 -------
351 config: dict
352
353 See Also
354 --------
355 dask.config.refresh: collect configuration and update into primary config
356 """
357 if env is None:
358 env = os.environ
359 configs = []
360
361 if yaml:
362 configs.extend(collect_yaml(paths=paths))
363
364 configs.append(collect_env(env=env))
365
366 return merge(*configs)
367
368
369 def refresh(config=config, defaults=defaults, **kwargs):
370 """
371 Update configuration by re-reading yaml files and env variables
372
373 This mutates the global dask.config.config, or the config parameter if
374 passed in.
375
376 This goes through the following stages:
377
378 1. Clearing out all old configuration
379 2. Updating from the stored defaults from downstream libraries
380 (see update_defaults)
381 3. Updating from yaml files and environment variables
382
383 Note that some functionality only checks configuration once at startup and
384 may not change behavior, even if configuration changes. It is recommended
385 to restart your python process if convenient to ensure that new
386 configuration changes take place.
387
388 See Also
389 --------
390 dask.config.collect: for parameters
391 dask.config.update_defaults
392 """
393 config.clear()
394
395 for d in defaults:
396 update(config, d, priority='old')
397
398 update(config, collect(**kwargs))
399
400
401 def get(key, default=no_default, config=config):
402 """
403 Get elements from global config
404
405 Use '.' for nested access
406
407 Examples
408 --------
409 >>> from dask import config
410 >>> config.get('foo') # doctest: +SKIP
411 {'x': 1, 'y': 2}
412
413 >>> config.get('foo.x') # doctest: +SKIP
414 1
415
416 >>> config.get('foo.x.y', default=123) # doctest: +SKIP
417 123
418
419 See Also
420 --------
421 dask.config.set
422 """
423 keys = key.split('.')
424 result = config
425 for k in keys:
426 k = canonical_name(k, result)
427 try:
428 result = result[k]
429 except (TypeError, IndexError, KeyError):
430 if default is not no_default:
431 return default
432 else:
433 raise
434 return result
435
436
437 def rename(aliases, config=config):
438 """ Rename old keys to new keys
439
440 This helps migrate older configuration versions over time
441 """
442 old = []
443 new = {}
444 for o, n in aliases.items():
445 value = get(o, None, config=config)
446 if value is not None:
447 old.append(o)
448 new[n] = value
449
450 for k in old:
451 del config[k] # TODO: support nested keys
452
453 set(new, config=config)
454
455
456 def update_defaults(new, config=config, defaults=defaults):
457 """ Add a new set of defaults to the configuration
458
459 It does two things:
460
461 1. Add the defaults to a global collection to be used by refresh later
462 2. Updates the global config with the new configuration
463 prioritizing older values over newer ones
464 """
465 defaults.append(new)
466 update(config, new, priority='old')
467
468
469 def expand_environment_variables(config):
470 ''' Expand environment variables in a nested config dictionary
471
472 This function will recursively search through any nested dictionaries
473 and/or lists.
474
475 Parameters
476 ----------
477 config : dict, iterable, or str
478 Input object to search for environment variables
479
480 Returns
481 -------
482 config : same type as input
483
484 Examples
485 --------
486 >>> expand_environment_variables({'x': [1, 2, '$USER']}) # doctest: +SKIP
487 {'x': [1, 2, 'my-username']}
488 '''
489 if isinstance(config, Mapping):
490 return {k: expand_environment_variables(v) for k, v in config.items()}
491 elif isinstance(config, str):
492 return os.path.expandvars(config)
493 elif isinstance(config, (list, tuple, builtins.set)):
494 return type(config)([expand_environment_variables(v) for v in config])
495 else:
496 return config
497
498
499 refresh()
500
501
502 if yaml:
503 fn = os.path.join(os.path.dirname(__file__), "dask.yaml")
504 ensure_file(source=fn)
505
506 with open(fn) as f:
507 _defaults = yaml.safe_load(f)
508
509 update_defaults(_defaults)
510 del fn, _defaults
511
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/dask/config.py b/dask/config.py
--- a/dask/config.py
+++ b/dask/config.py
@@ -448,7 +448,7 @@
new[n] = value
for k in old:
- del config[k] # TODO: support nested keys
+ del config[canonical_name(k, config)] # TODO: support nested keys
set(new, config=config)
| {"golden_diff": "diff --git a/dask/config.py b/dask/config.py\n--- a/dask/config.py\n+++ b/dask/config.py\n@@ -448,7 +448,7 @@\n new[n] = value\n \n for k in old:\n- del config[k] # TODO: support nested keys\n+ del config[canonical_name(k, config)] # TODO: support nested keys\n \n set(new, config=config)\n", "issue": "keyerror in dask.config.rename when working with distributed\ncalling `dask-worker` using `dask==1.2.2`, `distributed==1.28.1`, I get the following error:\r\n\r\n`Traceback (most recent call last): File \"/opt/conda/envs/worker/bin/dask-worker\", line 7, in <module> from distributed.cli.dask_worker import go File \"/opt/conda/envs/worker/lib/python3.6/site-packages/distributed/__init__.py\", line 3, in <module> from . import config File \"/opt/conda/envs/worker/lib/python3.6/site-packages/distributed/config.py\", line 55, in <module> dask.config.rename(aliases) File \"/opt/conda/envs/worker/lib/python3.6/site-packages/dask/config.py\", line 451, in rename del config[k] # TODO: support nested keys KeyError: 'tick-maximum-delay'`\r\n\r\nIt looks like this was introduced by #4742 when `dask.config.rename` was not properly adjusted, so that it can run into these KeyErrors. Current state of the function:\r\n\r\n```def rename(aliases, config=config):\r\n \"\"\" Rename old keys to new keys\r\n This helps migrate older configuration versions over time\r\n \"\"\"\r\n old = []\r\n new = {}\r\n for o, n in aliases.items():\r\n value = get(o, None, config=config)\r\n if value is not None:\r\n old.append(o)\r\n new[n] = value\r\n\r\n for k in old:\r\n del config[k] # TODO: support nested keys\r\n\r\n set(new, config=config)\r\n```\r\n\r\nSuggest changing the 3rd to last line to `del config[canonical_name(k, config)]`. Will file an appropriate PR.\n", "before_files": [{"content": "from __future__ import print_function, division, absolute_import\n\nimport ast\nimport os\nimport sys\nimport threading\ntry:\n import yaml\nexcept ImportError:\n yaml = None\n\nfrom .compatibility import makedirs, builtins, Mapping\n\n\nno_default = '__no_default__'\n\n\npaths = [\n os.getenv('DASK_ROOT_CONFIG', '/etc/dask'),\n os.path.join(sys.prefix, 'etc', 'dask'),\n os.path.join(os.path.expanduser('~'), '.config', 'dask'),\n os.path.join(os.path.expanduser('~'), '.dask')\n]\n\nif 'DASK_CONFIG' in os.environ:\n PATH = os.environ['DASK_CONFIG']\n paths.append(PATH)\nelse:\n PATH = os.path.join(os.path.expanduser('~'), '.config', 'dask')\n\n\nglobal_config = config = {}\n\n\nconfig_lock = threading.Lock()\n\n\ndefaults = []\n\n\ndef canonical_name(k, config):\n \"\"\"Return the canonical name for a key.\n\n Handles user choice of '-' or '_' conventions by standardizing on whichever\n version was set first. If a key already exists in either hyphen or\n underscore form, the existing version is the canonical name. If neither\n version exists the original key is used as is.\n \"\"\"\n try:\n if k in config:\n return k\n except TypeError:\n # config is not a mapping, return the same name as provided\n return k\n\n altk = k.replace('_', '-') if '_' in k else k.replace('-', '_')\n\n if altk in config:\n return altk\n\n return k\n\n\ndef update(old, new, priority='new'):\n \"\"\" Update a nested dictionary with values from another\n\n This is like dict.update except that it smoothly merges nested values\n\n This operates in-place and modifies old\n\n Parameters\n ----------\n priority: string {'old', 'new'}\n If new (default) then the new dictionary has preference.\n Otherwise the old dictionary does.\n\n Examples\n --------\n >>> a = {'x': 1, 'y': {'a': 2}}\n >>> b = {'x': 2, 'y': {'b': 3}}\n >>> update(a, b) # doctest: +SKIP\n {'x': 2, 'y': {'a': 2, 'b': 3}}\n\n >>> a = {'x': 1, 'y': {'a': 2}}\n >>> b = {'x': 2, 'y': {'b': 3}}\n >>> update(a, b, priority='old') # doctest: +SKIP\n {'x': 1, 'y': {'a': 2, 'b': 3}}\n\n See Also\n --------\n dask.config.merge\n \"\"\"\n for k, v in new.items():\n k = canonical_name(k, old)\n\n if isinstance(v, Mapping):\n if k not in old or old[k] is None:\n old[k] = {}\n update(old[k], v, priority=priority)\n else:\n if priority == 'new' or k not in old:\n old[k] = v\n\n return old\n\n\ndef merge(*dicts):\n \"\"\" Update a sequence of nested dictionaries\n\n This prefers the values in the latter dictionaries to those in the former\n\n Examples\n --------\n >>> a = {'x': 1, 'y': {'a': 2}}\n >>> b = {'y': {'b': 3}}\n >>> merge(a, b) # doctest: +SKIP\n {'x': 1, 'y': {'a': 2, 'b': 3}}\n\n See Also\n --------\n dask.config.update\n \"\"\"\n result = {}\n for d in dicts:\n update(result, d)\n return result\n\n\ndef collect_yaml(paths=paths):\n \"\"\" Collect configuration from yaml files\n\n This searches through a list of paths, expands to find all yaml or json\n files, and then parses each file.\n \"\"\"\n # Find all paths\n file_paths = []\n for path in paths:\n if os.path.exists(path):\n if os.path.isdir(path):\n try:\n file_paths.extend(sorted([\n os.path.join(path, p)\n for p in os.listdir(path)\n if os.path.splitext(p)[1].lower() in ('.json', '.yaml', '.yml')\n ]))\n except OSError:\n # Ignore permission errors\n pass\n else:\n file_paths.append(path)\n\n configs = []\n\n # Parse yaml files\n for path in file_paths:\n try:\n with open(path) as f:\n data = yaml.safe_load(f.read()) or {}\n configs.append(data)\n except (OSError, IOError):\n # Ignore permission errors\n pass\n\n return configs\n\n\ndef collect_env(env=None):\n \"\"\" Collect config from environment variables\n\n This grabs environment variables of the form \"DASK_FOO__BAR_BAZ=123\" and\n turns these into config variables of the form ``{\"foo\": {\"bar-baz\": 123}}``\n It transforms the key and value in the following way:\n\n - Lower-cases the key text\n - Treats ``__`` (double-underscore) as nested access\n - Calls ``ast.literal_eval`` on the value\n \"\"\"\n if env is None:\n env = os.environ\n d = {}\n for name, value in env.items():\n if name.startswith('DASK_'):\n varname = name[5:].lower().replace('__', '.')\n try:\n d[varname] = ast.literal_eval(value)\n except (SyntaxError, ValueError):\n d[varname] = value\n\n result = {}\n set(d, config=result)\n\n return result\n\n\ndef ensure_file(\n source,\n destination=None,\n comment=True):\n \"\"\"\n Copy file to default location if it does not already exist\n\n This tries to move a default configuration file to a default location if\n if does not already exist. It also comments out that file by default.\n\n This is to be used by downstream modules (like dask.distributed) that may\n have default configuration files that they wish to include in the default\n configuration path.\n\n Parameters\n ----------\n source : string, filename\n Source configuration file, typically within a source directory.\n destination : string, directory\n Destination directory. Configurable by ``DASK_CONFIG`` environment\n variable, falling back to ~/.config/dask.\n comment : bool, True by default\n Whether or not to comment out the config file when copying.\n \"\"\"\n if destination is None:\n destination = PATH\n\n # destination is a file and already exists, never overwrite\n if os.path.isfile(destination):\n return\n\n # If destination is not an existing file, interpret as a directory,\n # use the source basename as the filename\n directory = destination\n destination = os.path.join(directory, os.path.basename(source))\n\n try:\n if not os.path.exists(destination):\n makedirs(directory, exist_ok=True)\n\n # Atomically create destination. Parallel testing discovered\n # a race condition where a process can be busy creating the\n # destination while another process reads an empty config file.\n tmp = '%s.tmp.%d' % (destination, os.getpid())\n with open(source) as f:\n lines = list(f)\n\n if comment:\n lines = ['# ' + line\n if line.strip() and not line.startswith('#')\n else line\n for line in lines]\n\n with open(tmp, 'w') as f:\n f.write(''.join(lines))\n\n try:\n os.rename(tmp, destination)\n except OSError:\n os.remove(tmp)\n except (IOError, OSError):\n pass\n\n\nclass set(object):\n \"\"\" Temporarily set configuration values within a context manager\n\n Examples\n --------\n >>> import dask\n >>> with dask.config.set({'foo': 123}):\n ... pass\n\n See Also\n --------\n dask.config.get\n \"\"\"\n def __init__(self, arg=None, config=config, lock=config_lock, **kwargs):\n if arg and not kwargs:\n kwargs = arg\n\n with lock:\n self.config = config\n self.old = {}\n\n for key, value in kwargs.items():\n self._assign(key.split('.'), value, config, old=self.old)\n\n def __enter__(self):\n return self.config\n\n def __exit__(self, type, value, traceback):\n for keys, value in self.old.items():\n if value == '--delete--':\n d = self.config\n try:\n while len(keys) > 1:\n d = d[keys[0]]\n keys = keys[1:]\n del d[keys[0]]\n except KeyError:\n pass\n else:\n self._assign(keys, value, self.config)\n\n @classmethod\n def _assign(cls, keys, value, d, old=None, path=[]):\n \"\"\" Assign value into a nested configuration dictionary\n\n Optionally record the old values in old\n\n Parameters\n ----------\n keys: Sequence[str]\n The nested path of keys to assign the value, similar to toolz.put_in\n value: object\n d: dict\n The part of the nested dictionary into which we want to assign the\n value\n old: dict, optional\n If provided this will hold the old values\n path: List[str]\n Used internally to hold the path of old values\n \"\"\"\n key = canonical_name(keys[0], d)\n if len(keys) == 1:\n if old is not None:\n path_key = tuple(path + [key])\n if key in d:\n old[path_key] = d[key]\n else:\n old[path_key] = '--delete--'\n d[key] = value\n else:\n if key not in d:\n d[key] = {}\n if old is not None:\n old[tuple(path + [key])] = '--delete--'\n old = None\n cls._assign(keys[1:], value, d[key], path=path + [key], old=old)\n\n\ndef collect(paths=paths, env=None):\n \"\"\"\n Collect configuration from paths and environment variables\n\n Parameters\n ----------\n paths : List[str]\n A list of paths to search for yaml config files\n\n env : dict\n The system environment variables\n\n Returns\n -------\n config: dict\n\n See Also\n --------\n dask.config.refresh: collect configuration and update into primary config\n \"\"\"\n if env is None:\n env = os.environ\n configs = []\n\n if yaml:\n configs.extend(collect_yaml(paths=paths))\n\n configs.append(collect_env(env=env))\n\n return merge(*configs)\n\n\ndef refresh(config=config, defaults=defaults, **kwargs):\n \"\"\"\n Update configuration by re-reading yaml files and env variables\n\n This mutates the global dask.config.config, or the config parameter if\n passed in.\n\n This goes through the following stages:\n\n 1. Clearing out all old configuration\n 2. Updating from the stored defaults from downstream libraries\n (see update_defaults)\n 3. Updating from yaml files and environment variables\n\n Note that some functionality only checks configuration once at startup and\n may not change behavior, even if configuration changes. It is recommended\n to restart your python process if convenient to ensure that new\n configuration changes take place.\n\n See Also\n --------\n dask.config.collect: for parameters\n dask.config.update_defaults\n \"\"\"\n config.clear()\n\n for d in defaults:\n update(config, d, priority='old')\n\n update(config, collect(**kwargs))\n\n\ndef get(key, default=no_default, config=config):\n \"\"\"\n Get elements from global config\n\n Use '.' for nested access\n\n Examples\n --------\n >>> from dask import config\n >>> config.get('foo') # doctest: +SKIP\n {'x': 1, 'y': 2}\n\n >>> config.get('foo.x') # doctest: +SKIP\n 1\n\n >>> config.get('foo.x.y', default=123) # doctest: +SKIP\n 123\n\n See Also\n --------\n dask.config.set\n \"\"\"\n keys = key.split('.')\n result = config\n for k in keys:\n k = canonical_name(k, result)\n try:\n result = result[k]\n except (TypeError, IndexError, KeyError):\n if default is not no_default:\n return default\n else:\n raise\n return result\n\n\ndef rename(aliases, config=config):\n \"\"\" Rename old keys to new keys\n\n This helps migrate older configuration versions over time\n \"\"\"\n old = []\n new = {}\n for o, n in aliases.items():\n value = get(o, None, config=config)\n if value is not None:\n old.append(o)\n new[n] = value\n\n for k in old:\n del config[k] # TODO: support nested keys\n\n set(new, config=config)\n\n\ndef update_defaults(new, config=config, defaults=defaults):\n \"\"\" Add a new set of defaults to the configuration\n\n It does two things:\n\n 1. Add the defaults to a global collection to be used by refresh later\n 2. Updates the global config with the new configuration\n prioritizing older values over newer ones\n \"\"\"\n defaults.append(new)\n update(config, new, priority='old')\n\n\ndef expand_environment_variables(config):\n ''' Expand environment variables in a nested config dictionary\n\n This function will recursively search through any nested dictionaries\n and/or lists.\n\n Parameters\n ----------\n config : dict, iterable, or str\n Input object to search for environment variables\n\n Returns\n -------\n config : same type as input\n\n Examples\n --------\n >>> expand_environment_variables({'x': [1, 2, '$USER']}) # doctest: +SKIP\n {'x': [1, 2, 'my-username']}\n '''\n if isinstance(config, Mapping):\n return {k: expand_environment_variables(v) for k, v in config.items()}\n elif isinstance(config, str):\n return os.path.expandvars(config)\n elif isinstance(config, (list, tuple, builtins.set)):\n return type(config)([expand_environment_variables(v) for v in config])\n else:\n return config\n\n\nrefresh()\n\n\nif yaml:\n fn = os.path.join(os.path.dirname(__file__), \"dask.yaml\")\n ensure_file(source=fn)\n\n with open(fn) as f:\n _defaults = yaml.safe_load(f)\n\n update_defaults(_defaults)\n del fn, _defaults\n", "path": "dask/config.py"}], "after_files": [{"content": "from __future__ import print_function, division, absolute_import\n\nimport ast\nimport os\nimport sys\nimport threading\ntry:\n import yaml\nexcept ImportError:\n yaml = None\n\nfrom .compatibility import makedirs, builtins, Mapping\n\n\nno_default = '__no_default__'\n\n\npaths = [\n os.getenv('DASK_ROOT_CONFIG', '/etc/dask'),\n os.path.join(sys.prefix, 'etc', 'dask'),\n os.path.join(os.path.expanduser('~'), '.config', 'dask'),\n os.path.join(os.path.expanduser('~'), '.dask')\n]\n\nif 'DASK_CONFIG' in os.environ:\n PATH = os.environ['DASK_CONFIG']\n paths.append(PATH)\nelse:\n PATH = os.path.join(os.path.expanduser('~'), '.config', 'dask')\n\n\nglobal_config = config = {}\n\n\nconfig_lock = threading.Lock()\n\n\ndefaults = []\n\n\ndef canonical_name(k, config):\n \"\"\"Return the canonical name for a key.\n\n Handles user choice of '-' or '_' conventions by standardizing on whichever\n version was set first. If a key already exists in either hyphen or\n underscore form, the existing version is the canonical name. If neither\n version exists the original key is used as is.\n \"\"\"\n try:\n if k in config:\n return k\n except TypeError:\n # config is not a mapping, return the same name as provided\n return k\n\n altk = k.replace('_', '-') if '_' in k else k.replace('-', '_')\n\n if altk in config:\n return altk\n\n return k\n\n\ndef update(old, new, priority='new'):\n \"\"\" Update a nested dictionary with values from another\n\n This is like dict.update except that it smoothly merges nested values\n\n This operates in-place and modifies old\n\n Parameters\n ----------\n priority: string {'old', 'new'}\n If new (default) then the new dictionary has preference.\n Otherwise the old dictionary does.\n\n Examples\n --------\n >>> a = {'x': 1, 'y': {'a': 2}}\n >>> b = {'x': 2, 'y': {'b': 3}}\n >>> update(a, b) # doctest: +SKIP\n {'x': 2, 'y': {'a': 2, 'b': 3}}\n\n >>> a = {'x': 1, 'y': {'a': 2}}\n >>> b = {'x': 2, 'y': {'b': 3}}\n >>> update(a, b, priority='old') # doctest: +SKIP\n {'x': 1, 'y': {'a': 2, 'b': 3}}\n\n See Also\n --------\n dask.config.merge\n \"\"\"\n for k, v in new.items():\n k = canonical_name(k, old)\n\n if isinstance(v, Mapping):\n if k not in old or old[k] is None:\n old[k] = {}\n update(old[k], v, priority=priority)\n else:\n if priority == 'new' or k not in old:\n old[k] = v\n\n return old\n\n\ndef merge(*dicts):\n \"\"\" Update a sequence of nested dictionaries\n\n This prefers the values in the latter dictionaries to those in the former\n\n Examples\n --------\n >>> a = {'x': 1, 'y': {'a': 2}}\n >>> b = {'y': {'b': 3}}\n >>> merge(a, b) # doctest: +SKIP\n {'x': 1, 'y': {'a': 2, 'b': 3}}\n\n See Also\n --------\n dask.config.update\n \"\"\"\n result = {}\n for d in dicts:\n update(result, d)\n return result\n\n\ndef collect_yaml(paths=paths):\n \"\"\" Collect configuration from yaml files\n\n This searches through a list of paths, expands to find all yaml or json\n files, and then parses each file.\n \"\"\"\n # Find all paths\n file_paths = []\n for path in paths:\n if os.path.exists(path):\n if os.path.isdir(path):\n try:\n file_paths.extend(sorted([\n os.path.join(path, p)\n for p in os.listdir(path)\n if os.path.splitext(p)[1].lower() in ('.json', '.yaml', '.yml')\n ]))\n except OSError:\n # Ignore permission errors\n pass\n else:\n file_paths.append(path)\n\n configs = []\n\n # Parse yaml files\n for path in file_paths:\n try:\n with open(path) as f:\n data = yaml.safe_load(f.read()) or {}\n configs.append(data)\n except (OSError, IOError):\n # Ignore permission errors\n pass\n\n return configs\n\n\ndef collect_env(env=None):\n \"\"\" Collect config from environment variables\n\n This grabs environment variables of the form \"DASK_FOO__BAR_BAZ=123\" and\n turns these into config variables of the form ``{\"foo\": {\"bar-baz\": 123}}``\n It transforms the key and value in the following way:\n\n - Lower-cases the key text\n - Treats ``__`` (double-underscore) as nested access\n - Calls ``ast.literal_eval`` on the value\n \"\"\"\n if env is None:\n env = os.environ\n d = {}\n for name, value in env.items():\n if name.startswith('DASK_'):\n varname = name[5:].lower().replace('__', '.')\n try:\n d[varname] = ast.literal_eval(value)\n except (SyntaxError, ValueError):\n d[varname] = value\n\n result = {}\n set(d, config=result)\n\n return result\n\n\ndef ensure_file(\n source,\n destination=None,\n comment=True):\n \"\"\"\n Copy file to default location if it does not already exist\n\n This tries to move a default configuration file to a default location if\n if does not already exist. It also comments out that file by default.\n\n This is to be used by downstream modules (like dask.distributed) that may\n have default configuration files that they wish to include in the default\n configuration path.\n\n Parameters\n ----------\n source : string, filename\n Source configuration file, typically within a source directory.\n destination : string, directory\n Destination directory. Configurable by ``DASK_CONFIG`` environment\n variable, falling back to ~/.config/dask.\n comment : bool, True by default\n Whether or not to comment out the config file when copying.\n \"\"\"\n if destination is None:\n destination = PATH\n\n # destination is a file and already exists, never overwrite\n if os.path.isfile(destination):\n return\n\n # If destination is not an existing file, interpret as a directory,\n # use the source basename as the filename\n directory = destination\n destination = os.path.join(directory, os.path.basename(source))\n\n try:\n if not os.path.exists(destination):\n makedirs(directory, exist_ok=True)\n\n # Atomically create destination. Parallel testing discovered\n # a race condition where a process can be busy creating the\n # destination while another process reads an empty config file.\n tmp = '%s.tmp.%d' % (destination, os.getpid())\n with open(source) as f:\n lines = list(f)\n\n if comment:\n lines = ['# ' + line\n if line.strip() and not line.startswith('#')\n else line\n for line in lines]\n\n with open(tmp, 'w') as f:\n f.write(''.join(lines))\n\n try:\n os.rename(tmp, destination)\n except OSError:\n os.remove(tmp)\n except (IOError, OSError):\n pass\n\n\nclass set(object):\n \"\"\" Temporarily set configuration values within a context manager\n\n Examples\n --------\n >>> import dask\n >>> with dask.config.set({'foo': 123}):\n ... pass\n\n See Also\n --------\n dask.config.get\n \"\"\"\n def __init__(self, arg=None, config=config, lock=config_lock, **kwargs):\n if arg and not kwargs:\n kwargs = arg\n\n with lock:\n self.config = config\n self.old = {}\n\n for key, value in kwargs.items():\n self._assign(key.split('.'), value, config, old=self.old)\n\n def __enter__(self):\n return self.config\n\n def __exit__(self, type, value, traceback):\n for keys, value in self.old.items():\n if value == '--delete--':\n d = self.config\n try:\n while len(keys) > 1:\n d = d[keys[0]]\n keys = keys[1:]\n del d[keys[0]]\n except KeyError:\n pass\n else:\n self._assign(keys, value, self.config)\n\n @classmethod\n def _assign(cls, keys, value, d, old=None, path=[]):\n \"\"\" Assign value into a nested configuration dictionary\n\n Optionally record the old values in old\n\n Parameters\n ----------\n keys: Sequence[str]\n The nested path of keys to assign the value, similar to toolz.put_in\n value: object\n d: dict\n The part of the nested dictionary into which we want to assign the\n value\n old: dict, optional\n If provided this will hold the old values\n path: List[str]\n Used internally to hold the path of old values\n \"\"\"\n key = canonical_name(keys[0], d)\n if len(keys) == 1:\n if old is not None:\n path_key = tuple(path + [key])\n if key in d:\n old[path_key] = d[key]\n else:\n old[path_key] = '--delete--'\n d[key] = value\n else:\n if key not in d:\n d[key] = {}\n if old is not None:\n old[tuple(path + [key])] = '--delete--'\n old = None\n cls._assign(keys[1:], value, d[key], path=path + [key], old=old)\n\n\ndef collect(paths=paths, env=None):\n \"\"\"\n Collect configuration from paths and environment variables\n\n Parameters\n ----------\n paths : List[str]\n A list of paths to search for yaml config files\n\n env : dict\n The system environment variables\n\n Returns\n -------\n config: dict\n\n See Also\n --------\n dask.config.refresh: collect configuration and update into primary config\n \"\"\"\n if env is None:\n env = os.environ\n configs = []\n\n if yaml:\n configs.extend(collect_yaml(paths=paths))\n\n configs.append(collect_env(env=env))\n\n return merge(*configs)\n\n\ndef refresh(config=config, defaults=defaults, **kwargs):\n \"\"\"\n Update configuration by re-reading yaml files and env variables\n\n This mutates the global dask.config.config, or the config parameter if\n passed in.\n\n This goes through the following stages:\n\n 1. Clearing out all old configuration\n 2. Updating from the stored defaults from downstream libraries\n (see update_defaults)\n 3. Updating from yaml files and environment variables\n\n Note that some functionality only checks configuration once at startup and\n may not change behavior, even if configuration changes. It is recommended\n to restart your python process if convenient to ensure that new\n configuration changes take place.\n\n See Also\n --------\n dask.config.collect: for parameters\n dask.config.update_defaults\n \"\"\"\n config.clear()\n\n for d in defaults:\n update(config, d, priority='old')\n\n update(config, collect(**kwargs))\n\n\ndef get(key, default=no_default, config=config):\n \"\"\"\n Get elements from global config\n\n Use '.' for nested access\n\n Examples\n --------\n >>> from dask import config\n >>> config.get('foo') # doctest: +SKIP\n {'x': 1, 'y': 2}\n\n >>> config.get('foo.x') # doctest: +SKIP\n 1\n\n >>> config.get('foo.x.y', default=123) # doctest: +SKIP\n 123\n\n See Also\n --------\n dask.config.set\n \"\"\"\n keys = key.split('.')\n result = config\n for k in keys:\n k = canonical_name(k, result)\n try:\n result = result[k]\n except (TypeError, IndexError, KeyError):\n if default is not no_default:\n return default\n else:\n raise\n return result\n\n\ndef rename(aliases, config=config):\n \"\"\" Rename old keys to new keys\n\n This helps migrate older configuration versions over time\n \"\"\"\n old = []\n new = {}\n for o, n in aliases.items():\n value = get(o, None, config=config)\n if value is not None:\n old.append(o)\n new[n] = value\n\n for k in old:\n del config[canonical_name(k, config)] # TODO: support nested keys\n\n set(new, config=config)\n\n\ndef update_defaults(new, config=config, defaults=defaults):\n \"\"\" Add a new set of defaults to the configuration\n\n It does two things:\n\n 1. Add the defaults to a global collection to be used by refresh later\n 2. Updates the global config with the new configuration\n prioritizing older values over newer ones\n \"\"\"\n defaults.append(new)\n update(config, new, priority='old')\n\n\ndef expand_environment_variables(config):\n ''' Expand environment variables in a nested config dictionary\n\n This function will recursively search through any nested dictionaries\n and/or lists.\n\n Parameters\n ----------\n config : dict, iterable, or str\n Input object to search for environment variables\n\n Returns\n -------\n config : same type as input\n\n Examples\n --------\n >>> expand_environment_variables({'x': [1, 2, '$USER']}) # doctest: +SKIP\n {'x': [1, 2, 'my-username']}\n '''\n if isinstance(config, Mapping):\n return {k: expand_environment_variables(v) for k, v in config.items()}\n elif isinstance(config, str):\n return os.path.expandvars(config)\n elif isinstance(config, (list, tuple, builtins.set)):\n return type(config)([expand_environment_variables(v) for v in config])\n else:\n return config\n\n\nrefresh()\n\n\nif yaml:\n fn = os.path.join(os.path.dirname(__file__), \"dask.yaml\")\n ensure_file(source=fn)\n\n with open(fn) as f:\n _defaults = yaml.safe_load(f)\n\n update_defaults(_defaults)\n del fn, _defaults\n", "path": "dask/config.py"}]} |
gh_patches_debug_142 | rasdani/github-patches | git_diff | statsmodels__statsmodels-1001 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
BLD data_files for stats.libqsturng
originally reported in #831
I did not manage to get the test and data files to install in libqsturng.
I worked around this for the required test data file in PR #905
However copyright and one txt file are in the sdist but are missing in the installed package
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 """
2 Much of the build system code was adapted from work done by the pandas
3 developers [1], which was in turn based on work done in pyzmq [2] and lxml [3].
4
5 [1] http://pandas.pydata.org
6 [2] http://zeromq.github.io/pyzmq/
7 [3] http://lxml.de/
8 """
9
10 import os
11 from os.path import splitext, basename, join as pjoin
12 import sys
13 import subprocess
14 import re
15
16 # may need to work around setuptools bug by providing a fake Pyrex
17 try:
18 import Cython
19 sys.path.insert(0, pjoin(os.path.dirname(__file__), "fake_pyrex"))
20 except ImportError:
21 pass
22
23 # try bootstrapping setuptools if it doesn't exist
24 try:
25 import pkg_resources
26 try:
27 pkg_resources.require("setuptools>=0.6c5")
28 except pkg_resources.VersionConflict:
29 from ez_setup import use_setuptools
30 use_setuptools(version="0.6c5")
31 from setuptools import setup, Command, find_packages
32 _have_setuptools = True
33 except ImportError:
34 # no setuptools installed
35 from distutils.core import setup, Command
36 _have_setuptools = False
37
38 setuptools_kwargs = {}
39 if sys.version_info[0] >= 3:
40 setuptools_kwargs = {'use_2to3': True,
41 'zip_safe': False,
42 #'use_2to3_exclude_fixers': [],
43 }
44 if not _have_setuptools:
45 sys.exit("need setuptools/distribute for Py3k"
46 "\n$ pip install distribute")
47
48 else:
49 setuptools_kwargs = {
50 'install_requires': [],
51 'zip_safe': False,
52 }
53
54 if not _have_setuptools:
55 setuptools_kwargs = {}
56
57 curdir = os.path.abspath(os.path.dirname(__file__))
58 README = open(pjoin(curdir, "README.txt")).read()
59 CHANGES = open(pjoin(curdir, "CHANGES.txt")).read()
60
61 DISTNAME = 'statsmodels'
62 DESCRIPTION = 'Statistical computations and models for use with SciPy'
63 LONG_DESCRIPTION = README + '\n\n' + CHANGES
64 MAINTAINER = 'Skipper Seabold, Josef Perktold'
65 MAINTAINER_EMAIL ='[email protected]'
66 URL = 'http://statsmodels.sourceforge.net/'
67 LICENSE = 'BSD License'
68 DOWNLOAD_URL = ''
69
70 from distutils.extension import Extension
71 from distutils.command.build import build
72 from distutils.command.sdist import sdist
73 from distutils.command.build_ext import build_ext as _build_ext
74
75 try:
76 from Cython.Distutils import build_ext as _build_ext
77 # from Cython.Distutils import Extension # to get pyrex debugging symbols
78 cython = True
79 except ImportError:
80 cython = False
81
82
83 class build_ext(_build_ext):
84 def build_extensions(self):
85 numpy_incl = pkg_resources.resource_filename('numpy', 'core/include')
86
87 for ext in self.extensions:
88 if (hasattr(ext, 'include_dirs') and
89 not numpy_incl in ext.include_dirs):
90 ext.include_dirs.append(numpy_incl)
91 _build_ext.build_extensions(self)
92
93
94 def strip_rc(version):
95 return re.sub(r"rc\d+$", "", version)
96
97 def check_dependency_versions(min_versions):
98 """
99 Don't let setuptools do this. It's rude.
100
101 Just makes sure it can import the packages and if not, stops the build
102 process.
103 """
104 from distutils.version import StrictVersion
105 try:
106 from numpy.version import short_version as npversion
107 except ImportError:
108 raise ImportError("statsmodels requires numpy")
109 try:
110 from scipy.version import short_version as spversion
111 except ImportError:
112 try: # scipy 0.7.0
113 from scipy.version import version as spversion
114 except ImportError:
115 raise ImportError("statsmodels requires scipy")
116 try:
117 from pandas.version import version as pversion
118 except ImportError:
119 raise ImportError("statsmodels requires pandas")
120 try:
121 from patsy import __version__ as patsy_version
122 except ImportError:
123 raise ImportError("statsmodels requires patsy. http://patsy.readthedocs.org")
124
125 try:
126 assert StrictVersion(strip_rc(npversion)) >= min_versions['numpy']
127 except AssertionError:
128 raise ImportError("Numpy version is %s. Requires >= %s" %
129 (npversion, min_versions['numpy']))
130 try:
131 assert StrictVersion(strip_rc(spversion)) >= min_versions['scipy']
132 except AssertionError:
133 raise ImportError("Scipy version is %s. Requires >= %s" %
134 (spversion, min_versions['scipy']))
135 try:
136 #NOTE: not sure how robust this regex is but it at least allows
137 # double digit version numbering
138 pversion = re.match("\d*\.\d*\.\d*", pversion).group()
139 assert StrictVersion(pversion) >= min_versions['pandas']
140 except AssertionError:
141 raise ImportError("Pandas version is %s. Requires >= %s" %
142 (pversion, min_versions['pandas']))
143
144 try: # patsy dev looks like 0.1.0+dev
145 pversion = re.match("\d*\.\d*\.\d*", patsy_version).group()
146 assert StrictVersion(pversion) >= min_versions['patsy']
147 except AssertionError:
148 raise ImportError("Patsy version is %s. Requires >= %s" %
149 (pversion, min_versions["patsy"]))
150
151
152 MAJ = 0
153 MIN = 5
154 REV = 0
155 ISRELEASED = False
156 VERSION = '%d.%d.%d' % (MAJ,MIN,REV)
157
158 classifiers = [ 'Development Status :: 4 - Beta',
159 'Environment :: Console',
160 'Programming Language :: Python :: 2.5',
161 'Programming Language :: Python :: 2.6',
162 'Programming Language :: Python :: 2.7',
163 'Programming Language :: Python :: 3.2',
164 'Operating System :: OS Independent',
165 'Intended Audience :: Developers',
166 'Intended Audience :: Science/Research',
167 'License :: OSI Approved :: BSD License',
168 'Topic :: Scientific/Engineering']
169
170 # Return the git revision as a string
171 def git_version():
172 def _minimal_ext_cmd(cmd):
173 # construct minimal environment
174 env = {}
175 for k in ['SYSTEMROOT', 'PATH']:
176 v = os.environ.get(k)
177 if v is not None:
178 env[k] = v
179 # LANGUAGE is used on win32
180 env['LANGUAGE'] = 'C'
181 env['LANG'] = 'C'
182 env['LC_ALL'] = 'C'
183 out = subprocess.Popen(" ".join(cmd), stdout = subprocess.PIPE, env=env,
184 shell=True).communicate()[0]
185 return out
186
187 try:
188 out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
189 GIT_REVISION = out.strip().decode('ascii')
190 except OSError:
191 GIT_REVISION = "Unknown"
192
193 return GIT_REVISION
194
195 def write_version_py(filename=pjoin(curdir, 'statsmodels/version.py')):
196 cnt = "\n".join(["",
197 "# THIS FILE IS GENERATED FROM SETUP.PY",
198 "short_version = '%(version)s'",
199 "version = '%(version)s'",
200 "full_version = '%(full_version)s'",
201 "git_revision = '%(git_revision)s'",
202 "release = %(isrelease)s", "",
203 "if not release:",
204 " version = full_version"])
205 # Adding the git rev number needs to be done inside write_version_py(),
206 # otherwise the import of numpy.version messes up the build under Python 3.
207 FULLVERSION = VERSION
208 dowrite = True
209 if os.path.exists('.git'):
210 GIT_REVISION = git_version()
211 elif os.path.exists(filename):
212 # must be a source distribution, use existing version file
213 try:
214 from statsmodels.version import git_revision as GIT_REVISION
215 except ImportError:
216 dowrite = False
217 else:
218 GIT_REVISION = "Unknown"
219
220 if not ISRELEASED:
221 FULLVERSION += '.dev-' + GIT_REVISION[:7]
222
223
224 if dowrite:
225 try:
226 a = open(filename, 'w')
227 a.write(cnt % {'version': VERSION,
228 'full_version' : FULLVERSION,
229 'git_revision' : GIT_REVISION,
230 'isrelease': str(ISRELEASED)})
231 finally:
232 a.close()
233
234 try:
235 from distutils.command.build_py import build_py_2to3 as build_py
236 except ImportError:
237 # 2.x
238 from distutils.command.build_py import build_py
239
240
241 class CleanCommand(Command):
242 """Custom distutils command to clean the .so and .pyc files."""
243
244 user_options = [("all", "a", "")]
245
246 def initialize_options(self):
247 self.all = True
248 self._clean_me = []
249 self._clean_trees = []
250 self._clean_exclude = ["bspline_ext.c",
251 "bspline_impl.c"]
252
253 for root, dirs, files in list(os.walk('statsmodels')):
254 for f in files:
255 if f in self._clean_exclude:
256 continue
257 if os.path.splitext(f)[-1] in ('.pyc', '.so', '.o',
258 '.pyo',
259 '.pyd', '.c', '.orig'):
260 self._clean_me.append(pjoin(root, f))
261 for d in dirs:
262 if d == '__pycache__':
263 self._clean_trees.append(pjoin(root, d))
264
265 for d in ('build',):
266 if os.path.exists(d):
267 self._clean_trees.append(d)
268
269 def finalize_options(self):
270 pass
271
272 def run(self):
273 for clean_me in self._clean_me:
274 try:
275 os.unlink(clean_me)
276 except Exception:
277 pass
278 for clean_tree in self._clean_trees:
279 try:
280 import shutil
281 shutil.rmtree(clean_tree)
282 except Exception:
283 pass
284
285
286 class CheckSDist(sdist):
287 """Custom sdist that ensures Cython has compiled all pyx files to c."""
288
289 _pyxfiles = ['statsmodels/nonparametric/linbin.pyx',
290 'statsmodels/nonparametric/_smoothers_lowess.pyx',
291 'statsmodels/tsa/kalmanf/kalman_loglike.pyx']
292
293 def initialize_options(self):
294 sdist.initialize_options(self)
295
296 '''
297 self._pyxfiles = []
298 for root, dirs, files in os.walk('statsmodels'):
299 for f in files:
300 if f.endswith('.pyx'):
301 self._pyxfiles.append(pjoin(root, f))
302 '''
303
304 def run(self):
305 if 'cython' in cmdclass:
306 self.run_command('cython')
307 else:
308 for pyxfile in self._pyxfiles:
309 cfile = pyxfile[:-3] + 'c'
310 msg = "C-source file '%s' not found." % (cfile) +\
311 " Run 'setup.py cython' before sdist."
312 assert os.path.isfile(cfile), msg
313 sdist.run(self)
314
315
316 class CheckingBuildExt(build_ext):
317 """Subclass build_ext to get clearer report if Cython is necessary."""
318
319 def check_cython_extensions(self, extensions):
320 for ext in extensions:
321 for src in ext.sources:
322 if not os.path.exists(src):
323 raise Exception("""Cython-generated file '%s' not found.
324 Cython is required to compile statsmodels from a development branch.
325 Please install Cython or download a source release of statsmodels.
326 """ % src)
327
328 def build_extensions(self):
329 self.check_cython_extensions(self.extensions)
330 build_ext.build_extensions(self)
331
332
333 class CythonCommand(build_ext):
334 """Custom distutils command subclassed from Cython.Distutils.build_ext
335 to compile pyx->c, and stop there. All this does is override the
336 C-compile method build_extension() with a no-op."""
337 def build_extension(self, ext):
338 pass
339
340
341 class DummyBuildSrc(Command):
342 """ numpy's build_src command interferes with Cython's build_ext.
343 """
344 user_options = []
345
346 def initialize_options(self):
347 self.py_modules_dict = {}
348
349 def finalize_options(self):
350 pass
351
352 def run(self):
353 pass
354
355
356 cmdclass = {'clean': CleanCommand,
357 'build': build,
358 'sdist': CheckSDist}
359
360 if cython:
361 suffix = ".pyx"
362 cmdclass["build_ext"] = CheckingBuildExt
363 cmdclass["cython"] = CythonCommand
364 else:
365 suffix = ".c"
366 cmdclass["build_src"] = DummyBuildSrc
367 cmdclass["build_ext"] = CheckingBuildExt
368
369 lib_depends = []
370
371 def srcpath(name=None, suffix='.pyx', subdir='src'):
372 return pjoin('statsmodels', subdir, name + suffix)
373
374 if suffix == ".pyx":
375 lib_depends = [srcpath(f, suffix=".pyx") for f in lib_depends]
376 else:
377 lib_depends = []
378
379 common_include = []
380
381 # some linux distros require it
382 libraries = ['m'] if 'win32' not in sys.platform else []
383
384 ext_data = dict(
385 kalman_loglike = {"pyxfile" : "tsa/kalmanf/kalman_loglike",
386 "depends" : [],
387 "sources" : []},
388
389 linbin = {"pyxfile" : "nonparametric/linbin",
390 "depends" : [],
391 "sources" : []},
392 _smoothers_lowess = {"pyxfile" : "nonparametric/_smoothers_lowess",
393 "depends" : [],
394 "sources" : []}
395 )
396
397 def pxd(name):
398 return os.path.abspath(pjoin('pandas', name + '.pxd'))
399
400 extensions = []
401 for name, data in ext_data.items():
402 sources = [srcpath(data['pyxfile'], suffix=suffix, subdir='')]
403 pxds = [pxd(x) for x in data.get('pxdfiles', [])]
404 destdir = ".".join(os.path.dirname(data["pyxfile"]).split("/"))
405 if suffix == '.pyx' and pxds:
406 sources.extend(pxds)
407
408 sources.extend(data.get('sources', []))
409
410 include = data.get('include', common_include)
411
412 obj = Extension('statsmodels.%s.%s' % (destdir, name),
413 sources=sources,
414 depends=data.get('depends', []),
415 include_dirs=include)
416
417 extensions.append(obj)
418
419 if suffix == '.pyx' and 'setuptools' in sys.modules:
420 # undo dumb setuptools bug clobbering .pyx sources back to .c
421 for ext in extensions:
422 if ext.sources[0].endswith('.c'):
423 root, _ = os.path.splitext(ext.sources[0])
424 ext.sources[0] = root + suffix
425
426 if _have_setuptools:
427 setuptools_kwargs["test_suite"] = "nose.collector"
428
429 try:
430 from os.path import relpath
431 except ImportError: # python 2.5
432
433 def relpath(path, start=os.curdir):
434 """Return a relative version of a path"""
435 if not path:
436 raise ValueError("no path specified")
437 start_list = os.path.abspath(start).split(os.path.sep)
438 path_list = os.path.abspath(path).split(os.path.sep)
439 # Work out how much of the filepath is shared by start and path.
440 i = len(os.path.commonprefix([start_list, path_list]))
441 rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
442 if not rel_list:
443 return os.curdir
444 return pjoin(*rel_list)
445
446 def get_data_files():
447 sep = os.path.sep
448 # install the datasets
449 data_files = {}
450 root = pjoin(curdir, "statsmodels", "datasets")
451 for i in os.listdir(root):
452 if i is "tests":
453 continue
454 path = pjoin(root, i)
455 if os.path.isdir(path):
456 data_files.update({relpath(path).replace(sep, ".") : ["*.csv",
457 "*.dta"]})
458 # add all the tests and results files
459 for r, ds, fs in os.walk(pjoin(curdir, "statsmodels")):
460 if r.endswith('results') and 'sandbox' not in r:
461 data_files.update({relpath(r).replace(sep, ".") : ["*.csv",
462 "*.txt"]})
463
464 return data_files
465
466 if __name__ == "__main__":
467 if os.path.exists('MANIFEST'):
468 os.unlink('MANIFEST')
469
470 min_versions = {
471 'numpy' : '1.4.0',
472 'scipy' : '0.7.0',
473 'pandas' : '0.7.1',
474 'patsy' : '0.1.0',
475 }
476 if sys.version_info[0] == 3 and sys.version_info[1] >= 3:
477 # 3.3 needs numpy 1.7+
478 min_versions.update({"numpy" : "1.7.0b2"})
479
480 check_dependency_versions(min_versions)
481 write_version_py()
482
483 # this adds *.csv and *.dta files in datasets folders
484 # and *.csv and *.txt files in test/results folders
485 package_data = get_data_files()
486 packages = find_packages()
487 packages.append("statsmodels.tsa.vector_ar.data")
488
489 package_data["statsmodels.datasets.tests"].append("*.zip")
490 package_data["statsmodels.iolib.tests.results"].append("*.dta")
491 package_data["statsmodels.stats.tests.results"].append("*.json")
492 package_data["statsmodels.tsa.vector_ar.tests.results"].append("*.npz")
493 # data files that don't follow the tests/results pattern. should fix.
494 package_data.update({"statsmodels.stats.tests" : ["*.txt"]})
495 # the next two are in the sdist, but I don't manage to get them installed
496 package_data.update({"statsmodels.stats.libqstrung" :
497 ["*.r", "*.txt", "*.dat"]})
498 package_data.update({"statsmodels.stats.libqstrung.tests" :
499 ["*.csv", "*.dat"]})
500 package_data.update({"statsmodels.tsa.vector_ar.data" : ["*.dat"]})
501 package_data.update({"statsmodels.tsa.vector_ar.data" : ["*.dat"]})
502 # Why are we installing this stuff?
503
504 #TODO: deal with this. Not sure if it ever worked for bdists
505 #('docs/build/htmlhelp/statsmodelsdoc.chm',
506 # 'statsmodels/statsmodelsdoc.chm')
507
508 setup(name = DISTNAME,
509 version = VERSION,
510 maintainer = MAINTAINER,
511 ext_modules = extensions,
512 maintainer_email = MAINTAINER_EMAIL,
513 description = DESCRIPTION,
514 license = LICENSE,
515 url = URL,
516 download_url = DOWNLOAD_URL,
517 long_description = LONG_DESCRIPTION,
518 classifiers = classifiers,
519 platforms = 'any',
520 cmdclass = cmdclass,
521 packages = packages,
522 package_data = package_data,
523 **setuptools_kwargs)
524
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -520,4 +520,5 @@
cmdclass = cmdclass,
packages = packages,
package_data = package_data,
+ include_package_data=True,
**setuptools_kwargs)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -520,4 +520,5 @@\n cmdclass = cmdclass,\n packages = packages,\n package_data = package_data,\n+ include_package_data=True,\n **setuptools_kwargs)\n", "issue": "BLD data_files for stats.libqsturng\noriginally reported in #831\n\nI did not manage to get the test and data files to install in libqsturng.\n\nI worked around this for the required test data file in PR #905\n\nHowever copyright and one txt file are in the sdist but are missing in the installed package\n\n", "before_files": [{"content": "\"\"\"\nMuch of the build system code was adapted from work done by the pandas\ndevelopers [1], which was in turn based on work done in pyzmq [2] and lxml [3].\n\n[1] http://pandas.pydata.org\n[2] http://zeromq.github.io/pyzmq/\n[3] http://lxml.de/\n\"\"\"\n\nimport os\nfrom os.path import splitext, basename, join as pjoin\nimport sys\nimport subprocess\nimport re\n\n# may need to work around setuptools bug by providing a fake Pyrex\ntry:\n import Cython\n sys.path.insert(0, pjoin(os.path.dirname(__file__), \"fake_pyrex\"))\nexcept ImportError:\n pass\n\n# try bootstrapping setuptools if it doesn't exist\ntry:\n import pkg_resources\n try:\n pkg_resources.require(\"setuptools>=0.6c5\")\n except pkg_resources.VersionConflict:\n from ez_setup import use_setuptools\n use_setuptools(version=\"0.6c5\")\n from setuptools import setup, Command, find_packages\n _have_setuptools = True\nexcept ImportError:\n # no setuptools installed\n from distutils.core import setup, Command\n _have_setuptools = False\n\nsetuptools_kwargs = {}\nif sys.version_info[0] >= 3:\n setuptools_kwargs = {'use_2to3': True,\n 'zip_safe': False,\n #'use_2to3_exclude_fixers': [],\n }\n if not _have_setuptools:\n sys.exit(\"need setuptools/distribute for Py3k\"\n \"\\n$ pip install distribute\")\n\nelse:\n setuptools_kwargs = {\n 'install_requires': [],\n 'zip_safe': False,\n }\n\n if not _have_setuptools:\n setuptools_kwargs = {}\n\ncurdir = os.path.abspath(os.path.dirname(__file__))\nREADME = open(pjoin(curdir, \"README.txt\")).read()\nCHANGES = open(pjoin(curdir, \"CHANGES.txt\")).read()\n\nDISTNAME = 'statsmodels'\nDESCRIPTION = 'Statistical computations and models for use with SciPy'\nLONG_DESCRIPTION = README + '\\n\\n' + CHANGES\nMAINTAINER = 'Skipper Seabold, Josef Perktold'\nMAINTAINER_EMAIL ='[email protected]'\nURL = 'http://statsmodels.sourceforge.net/'\nLICENSE = 'BSD License'\nDOWNLOAD_URL = ''\n\nfrom distutils.extension import Extension\nfrom distutils.command.build import build\nfrom distutils.command.sdist import sdist\nfrom distutils.command.build_ext import build_ext as _build_ext\n\ntry:\n from Cython.Distutils import build_ext as _build_ext\n # from Cython.Distutils import Extension # to get pyrex debugging symbols\n cython = True\nexcept ImportError:\n cython = False\n\n\nclass build_ext(_build_ext):\n def build_extensions(self):\n numpy_incl = pkg_resources.resource_filename('numpy', 'core/include')\n\n for ext in self.extensions:\n if (hasattr(ext, 'include_dirs') and\n not numpy_incl in ext.include_dirs):\n ext.include_dirs.append(numpy_incl)\n _build_ext.build_extensions(self)\n\n\ndef strip_rc(version):\n return re.sub(r\"rc\\d+$\", \"\", version)\n\ndef check_dependency_versions(min_versions):\n \"\"\"\n Don't let setuptools do this. It's rude.\n\n Just makes sure it can import the packages and if not, stops the build\n process.\n \"\"\"\n from distutils.version import StrictVersion\n try:\n from numpy.version import short_version as npversion\n except ImportError:\n raise ImportError(\"statsmodels requires numpy\")\n try:\n from scipy.version import short_version as spversion\n except ImportError:\n try: # scipy 0.7.0\n from scipy.version import version as spversion\n except ImportError:\n raise ImportError(\"statsmodels requires scipy\")\n try:\n from pandas.version import version as pversion\n except ImportError:\n raise ImportError(\"statsmodels requires pandas\")\n try:\n from patsy import __version__ as patsy_version\n except ImportError:\n raise ImportError(\"statsmodels requires patsy. http://patsy.readthedocs.org\")\n\n try:\n assert StrictVersion(strip_rc(npversion)) >= min_versions['numpy']\n except AssertionError:\n raise ImportError(\"Numpy version is %s. Requires >= %s\" %\n (npversion, min_versions['numpy']))\n try:\n assert StrictVersion(strip_rc(spversion)) >= min_versions['scipy']\n except AssertionError:\n raise ImportError(\"Scipy version is %s. Requires >= %s\" %\n (spversion, min_versions['scipy']))\n try:\n #NOTE: not sure how robust this regex is but it at least allows\n # double digit version numbering\n pversion = re.match(\"\\d*\\.\\d*\\.\\d*\", pversion).group()\n assert StrictVersion(pversion) >= min_versions['pandas']\n except AssertionError:\n raise ImportError(\"Pandas version is %s. Requires >= %s\" %\n (pversion, min_versions['pandas']))\n\n try: # patsy dev looks like 0.1.0+dev\n pversion = re.match(\"\\d*\\.\\d*\\.\\d*\", patsy_version).group()\n assert StrictVersion(pversion) >= min_versions['patsy']\n except AssertionError:\n raise ImportError(\"Patsy version is %s. Requires >= %s\" %\n (pversion, min_versions[\"patsy\"]))\n\n\nMAJ = 0\nMIN = 5\nREV = 0\nISRELEASED = False\nVERSION = '%d.%d.%d' % (MAJ,MIN,REV)\n\nclassifiers = [ 'Development Status :: 4 - Beta',\n 'Environment :: Console',\n 'Programming Language :: Python :: 2.5',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.2',\n 'Operating System :: OS Independent',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: BSD License',\n 'Topic :: Scientific/Engineering']\n\n# Return the git revision as a string\ndef git_version():\n def _minimal_ext_cmd(cmd):\n # construct minimal environment\n env = {}\n for k in ['SYSTEMROOT', 'PATH']:\n v = os.environ.get(k)\n if v is not None:\n env[k] = v\n # LANGUAGE is used on win32\n env['LANGUAGE'] = 'C'\n env['LANG'] = 'C'\n env['LC_ALL'] = 'C'\n out = subprocess.Popen(\" \".join(cmd), stdout = subprocess.PIPE, env=env,\n shell=True).communicate()[0]\n return out\n\n try:\n out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])\n GIT_REVISION = out.strip().decode('ascii')\n except OSError:\n GIT_REVISION = \"Unknown\"\n\n return GIT_REVISION\n\ndef write_version_py(filename=pjoin(curdir, 'statsmodels/version.py')):\n cnt = \"\\n\".join([\"\",\n \"# THIS FILE IS GENERATED FROM SETUP.PY\",\n \"short_version = '%(version)s'\",\n \"version = '%(version)s'\",\n \"full_version = '%(full_version)s'\",\n \"git_revision = '%(git_revision)s'\",\n \"release = %(isrelease)s\", \"\",\n \"if not release:\",\n \" version = full_version\"])\n # Adding the git rev number needs to be done inside write_version_py(),\n # otherwise the import of numpy.version messes up the build under Python 3.\n FULLVERSION = VERSION\n dowrite = True\n if os.path.exists('.git'):\n GIT_REVISION = git_version()\n elif os.path.exists(filename):\n # must be a source distribution, use existing version file\n try:\n from statsmodels.version import git_revision as GIT_REVISION\n except ImportError:\n dowrite = False\n else:\n GIT_REVISION = \"Unknown\"\n\n if not ISRELEASED:\n FULLVERSION += '.dev-' + GIT_REVISION[:7]\n\n\n if dowrite:\n try:\n a = open(filename, 'w')\n a.write(cnt % {'version': VERSION,\n 'full_version' : FULLVERSION,\n 'git_revision' : GIT_REVISION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\ntry:\n from distutils.command.build_py import build_py_2to3 as build_py\nexcept ImportError:\n # 2.x\n from distutils.command.build_py import build_py\n\n\nclass CleanCommand(Command):\n \"\"\"Custom distutils command to clean the .so and .pyc files.\"\"\"\n\n user_options = [(\"all\", \"a\", \"\")]\n\n def initialize_options(self):\n self.all = True\n self._clean_me = []\n self._clean_trees = []\n self._clean_exclude = [\"bspline_ext.c\",\n \"bspline_impl.c\"]\n\n for root, dirs, files in list(os.walk('statsmodels')):\n for f in files:\n if f in self._clean_exclude:\n continue\n if os.path.splitext(f)[-1] in ('.pyc', '.so', '.o',\n '.pyo',\n '.pyd', '.c', '.orig'):\n self._clean_me.append(pjoin(root, f))\n for d in dirs:\n if d == '__pycache__':\n self._clean_trees.append(pjoin(root, d))\n\n for d in ('build',):\n if os.path.exists(d):\n self._clean_trees.append(d)\n\n def finalize_options(self):\n pass\n\n def run(self):\n for clean_me in self._clean_me:\n try:\n os.unlink(clean_me)\n except Exception:\n pass\n for clean_tree in self._clean_trees:\n try:\n import shutil\n shutil.rmtree(clean_tree)\n except Exception:\n pass\n\n\nclass CheckSDist(sdist):\n \"\"\"Custom sdist that ensures Cython has compiled all pyx files to c.\"\"\"\n\n _pyxfiles = ['statsmodels/nonparametric/linbin.pyx',\n 'statsmodels/nonparametric/_smoothers_lowess.pyx',\n 'statsmodels/tsa/kalmanf/kalman_loglike.pyx']\n\n def initialize_options(self):\n sdist.initialize_options(self)\n\n '''\n self._pyxfiles = []\n for root, dirs, files in os.walk('statsmodels'):\n for f in files:\n if f.endswith('.pyx'):\n self._pyxfiles.append(pjoin(root, f))\n '''\n\n def run(self):\n if 'cython' in cmdclass:\n self.run_command('cython')\n else:\n for pyxfile in self._pyxfiles:\n cfile = pyxfile[:-3] + 'c'\n msg = \"C-source file '%s' not found.\" % (cfile) +\\\n \" Run 'setup.py cython' before sdist.\"\n assert os.path.isfile(cfile), msg\n sdist.run(self)\n\n\nclass CheckingBuildExt(build_ext):\n \"\"\"Subclass build_ext to get clearer report if Cython is necessary.\"\"\"\n\n def check_cython_extensions(self, extensions):\n for ext in extensions:\n for src in ext.sources:\n if not os.path.exists(src):\n raise Exception(\"\"\"Cython-generated file '%s' not found.\n Cython is required to compile statsmodels from a development branch.\n Please install Cython or download a source release of statsmodels.\n \"\"\" % src)\n\n def build_extensions(self):\n self.check_cython_extensions(self.extensions)\n build_ext.build_extensions(self)\n\n\nclass CythonCommand(build_ext):\n \"\"\"Custom distutils command subclassed from Cython.Distutils.build_ext\n to compile pyx->c, and stop there. All this does is override the\n C-compile method build_extension() with a no-op.\"\"\"\n def build_extension(self, ext):\n pass\n\n\nclass DummyBuildSrc(Command):\n \"\"\" numpy's build_src command interferes with Cython's build_ext.\n \"\"\"\n user_options = []\n\n def initialize_options(self):\n self.py_modules_dict = {}\n\n def finalize_options(self):\n pass\n\n def run(self):\n pass\n\n\ncmdclass = {'clean': CleanCommand,\n 'build': build,\n 'sdist': CheckSDist}\n\nif cython:\n suffix = \".pyx\"\n cmdclass[\"build_ext\"] = CheckingBuildExt\n cmdclass[\"cython\"] = CythonCommand\nelse:\n suffix = \".c\"\n cmdclass[\"build_src\"] = DummyBuildSrc\n cmdclass[\"build_ext\"] = CheckingBuildExt\n\nlib_depends = []\n\ndef srcpath(name=None, suffix='.pyx', subdir='src'):\n return pjoin('statsmodels', subdir, name + suffix)\n\nif suffix == \".pyx\":\n lib_depends = [srcpath(f, suffix=\".pyx\") for f in lib_depends]\nelse:\n lib_depends = []\n\ncommon_include = []\n\n# some linux distros require it\nlibraries = ['m'] if 'win32' not in sys.platform else []\n\next_data = dict(\n kalman_loglike = {\"pyxfile\" : \"tsa/kalmanf/kalman_loglike\",\n \"depends\" : [],\n \"sources\" : []},\n\n linbin = {\"pyxfile\" : \"nonparametric/linbin\",\n \"depends\" : [],\n \"sources\" : []},\n _smoothers_lowess = {\"pyxfile\" : \"nonparametric/_smoothers_lowess\",\n \"depends\" : [],\n \"sources\" : []}\n )\n\ndef pxd(name):\n return os.path.abspath(pjoin('pandas', name + '.pxd'))\n\nextensions = []\nfor name, data in ext_data.items():\n sources = [srcpath(data['pyxfile'], suffix=suffix, subdir='')]\n pxds = [pxd(x) for x in data.get('pxdfiles', [])]\n destdir = \".\".join(os.path.dirname(data[\"pyxfile\"]).split(\"/\"))\n if suffix == '.pyx' and pxds:\n sources.extend(pxds)\n\n sources.extend(data.get('sources', []))\n\n include = data.get('include', common_include)\n\n obj = Extension('statsmodels.%s.%s' % (destdir, name),\n sources=sources,\n depends=data.get('depends', []),\n include_dirs=include)\n\n extensions.append(obj)\n\nif suffix == '.pyx' and 'setuptools' in sys.modules:\n # undo dumb setuptools bug clobbering .pyx sources back to .c\n for ext in extensions:\n if ext.sources[0].endswith('.c'):\n root, _ = os.path.splitext(ext.sources[0])\n ext.sources[0] = root + suffix\n\nif _have_setuptools:\n setuptools_kwargs[\"test_suite\"] = \"nose.collector\"\n\ntry:\n from os.path import relpath\nexcept ImportError: # python 2.5\n\n def relpath(path, start=os.curdir):\n \"\"\"Return a relative version of a path\"\"\"\n if not path:\n raise ValueError(\"no path specified\")\n start_list = os.path.abspath(start).split(os.path.sep)\n path_list = os.path.abspath(path).split(os.path.sep)\n # Work out how much of the filepath is shared by start and path.\n i = len(os.path.commonprefix([start_list, path_list]))\n rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]\n if not rel_list:\n return os.curdir\n return pjoin(*rel_list)\n\ndef get_data_files():\n sep = os.path.sep\n # install the datasets\n data_files = {}\n root = pjoin(curdir, \"statsmodels\", \"datasets\")\n for i in os.listdir(root):\n if i is \"tests\":\n continue\n path = pjoin(root, i)\n if os.path.isdir(path):\n data_files.update({relpath(path).replace(sep, \".\") : [\"*.csv\",\n \"*.dta\"]})\n # add all the tests and results files\n for r, ds, fs in os.walk(pjoin(curdir, \"statsmodels\")):\n if r.endswith('results') and 'sandbox' not in r:\n data_files.update({relpath(r).replace(sep, \".\") : [\"*.csv\",\n \"*.txt\"]})\n\n return data_files\n\nif __name__ == \"__main__\":\n if os.path.exists('MANIFEST'):\n os.unlink('MANIFEST')\n\n min_versions = {\n 'numpy' : '1.4.0',\n 'scipy' : '0.7.0',\n 'pandas' : '0.7.1',\n 'patsy' : '0.1.0',\n }\n if sys.version_info[0] == 3 and sys.version_info[1] >= 3:\n # 3.3 needs numpy 1.7+\n min_versions.update({\"numpy\" : \"1.7.0b2\"})\n\n check_dependency_versions(min_versions)\n write_version_py()\n\n # this adds *.csv and *.dta files in datasets folders\n # and *.csv and *.txt files in test/results folders\n package_data = get_data_files()\n packages = find_packages()\n packages.append(\"statsmodels.tsa.vector_ar.data\")\n\n package_data[\"statsmodels.datasets.tests\"].append(\"*.zip\")\n package_data[\"statsmodels.iolib.tests.results\"].append(\"*.dta\")\n package_data[\"statsmodels.stats.tests.results\"].append(\"*.json\")\n package_data[\"statsmodels.tsa.vector_ar.tests.results\"].append(\"*.npz\")\n # data files that don't follow the tests/results pattern. should fix.\n package_data.update({\"statsmodels.stats.tests\" : [\"*.txt\"]})\n # the next two are in the sdist, but I don't manage to get them installed\n package_data.update({\"statsmodels.stats.libqstrung\" :\n [\"*.r\", \"*.txt\", \"*.dat\"]})\n package_data.update({\"statsmodels.stats.libqstrung.tests\" :\n [\"*.csv\", \"*.dat\"]})\n package_data.update({\"statsmodels.tsa.vector_ar.data\" : [\"*.dat\"]})\n package_data.update({\"statsmodels.tsa.vector_ar.data\" : [\"*.dat\"]})\n # Why are we installing this stuff?\n\n #TODO: deal with this. Not sure if it ever worked for bdists\n #('docs/build/htmlhelp/statsmodelsdoc.chm',\n # 'statsmodels/statsmodelsdoc.chm')\n\n setup(name = DISTNAME,\n version = VERSION,\n maintainer = MAINTAINER,\n ext_modules = extensions,\n maintainer_email = MAINTAINER_EMAIL,\n description = DESCRIPTION,\n license = LICENSE,\n url = URL,\n download_url = DOWNLOAD_URL,\n long_description = LONG_DESCRIPTION,\n classifiers = classifiers,\n platforms = 'any',\n cmdclass = cmdclass,\n packages = packages,\n package_data = package_data,\n **setuptools_kwargs)\n", "path": "setup.py"}], "after_files": [{"content": "\"\"\"\nMuch of the build system code was adapted from work done by the pandas\ndevelopers [1], which was in turn based on work done in pyzmq [2] and lxml [3].\n\n[1] http://pandas.pydata.org\n[2] http://zeromq.github.io/pyzmq/\n[3] http://lxml.de/\n\"\"\"\n\nimport os\nfrom os.path import splitext, basename, join as pjoin\nimport sys\nimport subprocess\nimport re\n\n# may need to work around setuptools bug by providing a fake Pyrex\ntry:\n import Cython\n sys.path.insert(0, pjoin(os.path.dirname(__file__), \"fake_pyrex\"))\nexcept ImportError:\n pass\n\n# try bootstrapping setuptools if it doesn't exist\ntry:\n import pkg_resources\n try:\n pkg_resources.require(\"setuptools>=0.6c5\")\n except pkg_resources.VersionConflict:\n from ez_setup import use_setuptools\n use_setuptools(version=\"0.6c5\")\n from setuptools import setup, Command, find_packages\n _have_setuptools = True\nexcept ImportError:\n # no setuptools installed\n from distutils.core import setup, Command\n _have_setuptools = False\n\nsetuptools_kwargs = {}\nif sys.version_info[0] >= 3:\n setuptools_kwargs = {'use_2to3': True,\n 'zip_safe': False,\n #'use_2to3_exclude_fixers': [],\n }\n if not _have_setuptools:\n sys.exit(\"need setuptools/distribute for Py3k\"\n \"\\n$ pip install distribute\")\n\nelse:\n setuptools_kwargs = {\n 'install_requires': [],\n 'zip_safe': False,\n }\n\n if not _have_setuptools:\n setuptools_kwargs = {}\n\ncurdir = os.path.abspath(os.path.dirname(__file__))\nREADME = open(pjoin(curdir, \"README.txt\")).read()\nCHANGES = open(pjoin(curdir, \"CHANGES.txt\")).read()\n\nDISTNAME = 'statsmodels'\nDESCRIPTION = 'Statistical computations and models for use with SciPy'\nLONG_DESCRIPTION = README + '\\n\\n' + CHANGES\nMAINTAINER = 'Skipper Seabold, Josef Perktold'\nMAINTAINER_EMAIL ='[email protected]'\nURL = 'http://statsmodels.sourceforge.net/'\nLICENSE = 'BSD License'\nDOWNLOAD_URL = ''\n\nfrom distutils.extension import Extension\nfrom distutils.command.build import build\nfrom distutils.command.sdist import sdist\nfrom distutils.command.build_ext import build_ext as _build_ext\n\ntry:\n from Cython.Distutils import build_ext as _build_ext\n # from Cython.Distutils import Extension # to get pyrex debugging symbols\n cython = True\nexcept ImportError:\n cython = False\n\n\nclass build_ext(_build_ext):\n def build_extensions(self):\n numpy_incl = pkg_resources.resource_filename('numpy', 'core/include')\n\n for ext in self.extensions:\n if (hasattr(ext, 'include_dirs') and\n not numpy_incl in ext.include_dirs):\n ext.include_dirs.append(numpy_incl)\n _build_ext.build_extensions(self)\n\n\ndef strip_rc(version):\n return re.sub(r\"rc\\d+$\", \"\", version)\n\ndef check_dependency_versions(min_versions):\n \"\"\"\n Don't let setuptools do this. It's rude.\n\n Just makes sure it can import the packages and if not, stops the build\n process.\n \"\"\"\n from distutils.version import StrictVersion\n try:\n from numpy.version import short_version as npversion\n except ImportError:\n raise ImportError(\"statsmodels requires numpy\")\n try:\n from scipy.version import short_version as spversion\n except ImportError:\n try: # scipy 0.7.0\n from scipy.version import version as spversion\n except ImportError:\n raise ImportError(\"statsmodels requires scipy\")\n try:\n from pandas.version import version as pversion\n except ImportError:\n raise ImportError(\"statsmodels requires pandas\")\n try:\n from patsy import __version__ as patsy_version\n except ImportError:\n raise ImportError(\"statsmodels requires patsy. http://patsy.readthedocs.org\")\n\n try:\n assert StrictVersion(strip_rc(npversion)) >= min_versions['numpy']\n except AssertionError:\n raise ImportError(\"Numpy version is %s. Requires >= %s\" %\n (npversion, min_versions['numpy']))\n try:\n assert StrictVersion(strip_rc(spversion)) >= min_versions['scipy']\n except AssertionError:\n raise ImportError(\"Scipy version is %s. Requires >= %s\" %\n (spversion, min_versions['scipy']))\n try:\n #NOTE: not sure how robust this regex is but it at least allows\n # double digit version numbering\n pversion = re.match(\"\\d*\\.\\d*\\.\\d*\", pversion).group()\n assert StrictVersion(pversion) >= min_versions['pandas']\n except AssertionError:\n raise ImportError(\"Pandas version is %s. Requires >= %s\" %\n (pversion, min_versions['pandas']))\n\n try: # patsy dev looks like 0.1.0+dev\n pversion = re.match(\"\\d*\\.\\d*\\.\\d*\", patsy_version).group()\n assert StrictVersion(pversion) >= min_versions['patsy']\n except AssertionError:\n raise ImportError(\"Patsy version is %s. Requires >= %s\" %\n (pversion, min_versions[\"patsy\"]))\n\n\nMAJ = 0\nMIN = 5\nREV = 0\nISRELEASED = False\nVERSION = '%d.%d.%d' % (MAJ,MIN,REV)\n\nclassifiers = [ 'Development Status :: 4 - Beta',\n 'Environment :: Console',\n 'Programming Language :: Python :: 2.5',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.2',\n 'Operating System :: OS Independent',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: BSD License',\n 'Topic :: Scientific/Engineering']\n\n# Return the git revision as a string\ndef git_version():\n def _minimal_ext_cmd(cmd):\n # construct minimal environment\n env = {}\n for k in ['SYSTEMROOT', 'PATH']:\n v = os.environ.get(k)\n if v is not None:\n env[k] = v\n # LANGUAGE is used on win32\n env['LANGUAGE'] = 'C'\n env['LANG'] = 'C'\n env['LC_ALL'] = 'C'\n out = subprocess.Popen(\" \".join(cmd), stdout = subprocess.PIPE, env=env,\n shell=True).communicate()[0]\n return out\n\n try:\n out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])\n GIT_REVISION = out.strip().decode('ascii')\n except OSError:\n GIT_REVISION = \"Unknown\"\n\n return GIT_REVISION\n\ndef write_version_py(filename=pjoin(curdir, 'statsmodels/version.py')):\n cnt = \"\\n\".join([\"\",\n \"# THIS FILE IS GENERATED FROM SETUP.PY\",\n \"short_version = '%(version)s'\",\n \"version = '%(version)s'\",\n \"full_version = '%(full_version)s'\",\n \"git_revision = '%(git_revision)s'\",\n \"release = %(isrelease)s\", \"\",\n \"if not release:\",\n \" version = full_version\"])\n # Adding the git rev number needs to be done inside write_version_py(),\n # otherwise the import of numpy.version messes up the build under Python 3.\n FULLVERSION = VERSION\n dowrite = True\n if os.path.exists('.git'):\n GIT_REVISION = git_version()\n elif os.path.exists(filename):\n # must be a source distribution, use existing version file\n try:\n from statsmodels.version import git_revision as GIT_REVISION\n except ImportError:\n dowrite = False\n else:\n GIT_REVISION = \"Unknown\"\n\n if not ISRELEASED:\n FULLVERSION += '.dev-' + GIT_REVISION[:7]\n\n\n if dowrite:\n try:\n a = open(filename, 'w')\n a.write(cnt % {'version': VERSION,\n 'full_version' : FULLVERSION,\n 'git_revision' : GIT_REVISION,\n 'isrelease': str(ISRELEASED)})\n finally:\n a.close()\n\ntry:\n from distutils.command.build_py import build_py_2to3 as build_py\nexcept ImportError:\n # 2.x\n from distutils.command.build_py import build_py\n\n\nclass CleanCommand(Command):\n \"\"\"Custom distutils command to clean the .so and .pyc files.\"\"\"\n\n user_options = [(\"all\", \"a\", \"\")]\n\n def initialize_options(self):\n self.all = True\n self._clean_me = []\n self._clean_trees = []\n self._clean_exclude = [\"bspline_ext.c\",\n \"bspline_impl.c\"]\n\n for root, dirs, files in list(os.walk('statsmodels')):\n for f in files:\n if f in self._clean_exclude:\n continue\n if os.path.splitext(f)[-1] in ('.pyc', '.so', '.o',\n '.pyo',\n '.pyd', '.c', '.orig'):\n self._clean_me.append(pjoin(root, f))\n for d in dirs:\n if d == '__pycache__':\n self._clean_trees.append(pjoin(root, d))\n\n for d in ('build',):\n if os.path.exists(d):\n self._clean_trees.append(d)\n\n def finalize_options(self):\n pass\n\n def run(self):\n for clean_me in self._clean_me:\n try:\n os.unlink(clean_me)\n except Exception:\n pass\n for clean_tree in self._clean_trees:\n try:\n import shutil\n shutil.rmtree(clean_tree)\n except Exception:\n pass\n\n\nclass CheckSDist(sdist):\n \"\"\"Custom sdist that ensures Cython has compiled all pyx files to c.\"\"\"\n\n _pyxfiles = ['statsmodels/nonparametric/linbin.pyx',\n 'statsmodels/nonparametric/_smoothers_lowess.pyx',\n 'statsmodels/tsa/kalmanf/kalman_loglike.pyx']\n\n def initialize_options(self):\n sdist.initialize_options(self)\n\n '''\n self._pyxfiles = []\n for root, dirs, files in os.walk('statsmodels'):\n for f in files:\n if f.endswith('.pyx'):\n self._pyxfiles.append(pjoin(root, f))\n '''\n\n def run(self):\n if 'cython' in cmdclass:\n self.run_command('cython')\n else:\n for pyxfile in self._pyxfiles:\n cfile = pyxfile[:-3] + 'c'\n msg = \"C-source file '%s' not found.\" % (cfile) +\\\n \" Run 'setup.py cython' before sdist.\"\n assert os.path.isfile(cfile), msg\n sdist.run(self)\n\n\nclass CheckingBuildExt(build_ext):\n \"\"\"Subclass build_ext to get clearer report if Cython is necessary.\"\"\"\n\n def check_cython_extensions(self, extensions):\n for ext in extensions:\n for src in ext.sources:\n if not os.path.exists(src):\n raise Exception(\"\"\"Cython-generated file '%s' not found.\n Cython is required to compile statsmodels from a development branch.\n Please install Cython or download a source release of statsmodels.\n \"\"\" % src)\n\n def build_extensions(self):\n self.check_cython_extensions(self.extensions)\n build_ext.build_extensions(self)\n\n\nclass CythonCommand(build_ext):\n \"\"\"Custom distutils command subclassed from Cython.Distutils.build_ext\n to compile pyx->c, and stop there. All this does is override the\n C-compile method build_extension() with a no-op.\"\"\"\n def build_extension(self, ext):\n pass\n\n\nclass DummyBuildSrc(Command):\n \"\"\" numpy's build_src command interferes with Cython's build_ext.\n \"\"\"\n user_options = []\n\n def initialize_options(self):\n self.py_modules_dict = {}\n\n def finalize_options(self):\n pass\n\n def run(self):\n pass\n\n\ncmdclass = {'clean': CleanCommand,\n 'build': build,\n 'sdist': CheckSDist}\n\nif cython:\n suffix = \".pyx\"\n cmdclass[\"build_ext\"] = CheckingBuildExt\n cmdclass[\"cython\"] = CythonCommand\nelse:\n suffix = \".c\"\n cmdclass[\"build_src\"] = DummyBuildSrc\n cmdclass[\"build_ext\"] = CheckingBuildExt\n\nlib_depends = []\n\ndef srcpath(name=None, suffix='.pyx', subdir='src'):\n return pjoin('statsmodels', subdir, name + suffix)\n\nif suffix == \".pyx\":\n lib_depends = [srcpath(f, suffix=\".pyx\") for f in lib_depends]\nelse:\n lib_depends = []\n\ncommon_include = []\n\n# some linux distros require it\nlibraries = ['m'] if 'win32' not in sys.platform else []\n\next_data = dict(\n kalman_loglike = {\"pyxfile\" : \"tsa/kalmanf/kalman_loglike\",\n \"depends\" : [],\n \"sources\" : []},\n\n linbin = {\"pyxfile\" : \"nonparametric/linbin\",\n \"depends\" : [],\n \"sources\" : []},\n _smoothers_lowess = {\"pyxfile\" : \"nonparametric/_smoothers_lowess\",\n \"depends\" : [],\n \"sources\" : []}\n )\n\ndef pxd(name):\n return os.path.abspath(pjoin('pandas', name + '.pxd'))\n\nextensions = []\nfor name, data in ext_data.items():\n sources = [srcpath(data['pyxfile'], suffix=suffix, subdir='')]\n pxds = [pxd(x) for x in data.get('pxdfiles', [])]\n destdir = \".\".join(os.path.dirname(data[\"pyxfile\"]).split(\"/\"))\n if suffix == '.pyx' and pxds:\n sources.extend(pxds)\n\n sources.extend(data.get('sources', []))\n\n include = data.get('include', common_include)\n\n obj = Extension('statsmodels.%s.%s' % (destdir, name),\n sources=sources,\n depends=data.get('depends', []),\n include_dirs=include)\n\n extensions.append(obj)\n\nif suffix == '.pyx' and 'setuptools' in sys.modules:\n # undo dumb setuptools bug clobbering .pyx sources back to .c\n for ext in extensions:\n if ext.sources[0].endswith('.c'):\n root, _ = os.path.splitext(ext.sources[0])\n ext.sources[0] = root + suffix\n\nif _have_setuptools:\n setuptools_kwargs[\"test_suite\"] = \"nose.collector\"\n\ntry:\n from os.path import relpath\nexcept ImportError: # python 2.5\n\n def relpath(path, start=os.curdir):\n \"\"\"Return a relative version of a path\"\"\"\n if not path:\n raise ValueError(\"no path specified\")\n start_list = os.path.abspath(start).split(os.path.sep)\n path_list = os.path.abspath(path).split(os.path.sep)\n # Work out how much of the filepath is shared by start and path.\n i = len(os.path.commonprefix([start_list, path_list]))\n rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]\n if not rel_list:\n return os.curdir\n return pjoin(*rel_list)\n\ndef get_data_files():\n sep = os.path.sep\n # install the datasets\n data_files = {}\n root = pjoin(curdir, \"statsmodels\", \"datasets\")\n for i in os.listdir(root):\n if i is \"tests\":\n continue\n path = pjoin(root, i)\n if os.path.isdir(path):\n data_files.update({relpath(path).replace(sep, \".\") : [\"*.csv\",\n \"*.dta\"]})\n # add all the tests and results files\n for r, ds, fs in os.walk(pjoin(curdir, \"statsmodels\")):\n if r.endswith('results') and 'sandbox' not in r:\n data_files.update({relpath(r).replace(sep, \".\") : [\"*.csv\",\n \"*.txt\"]})\n\n return data_files\n\nif __name__ == \"__main__\":\n if os.path.exists('MANIFEST'):\n os.unlink('MANIFEST')\n\n min_versions = {\n 'numpy' : '1.4.0',\n 'scipy' : '0.7.0',\n 'pandas' : '0.7.1',\n 'patsy' : '0.1.0',\n }\n if sys.version_info[0] == 3 and sys.version_info[1] >= 3:\n # 3.3 needs numpy 1.7+\n min_versions.update({\"numpy\" : \"1.7.0b2\"})\n\n check_dependency_versions(min_versions)\n write_version_py()\n\n # this adds *.csv and *.dta files in datasets folders\n # and *.csv and *.txt files in test/results folders\n package_data = get_data_files()\n packages = find_packages()\n packages.append(\"statsmodels.tsa.vector_ar.data\")\n\n package_data[\"statsmodels.datasets.tests\"].append(\"*.zip\")\n package_data[\"statsmodels.iolib.tests.results\"].append(\"*.dta\")\n package_data[\"statsmodels.stats.tests.results\"].append(\"*.json\")\n package_data[\"statsmodels.tsa.vector_ar.tests.results\"].append(\"*.npz\")\n # data files that don't follow the tests/results pattern. should fix.\n package_data.update({\"statsmodels.stats.tests\" : [\"*.txt\"]})\n # the next two are in the sdist, but I don't manage to get them installed\n package_data.update({\"statsmodels.stats.libqstrung\" :\n [\"*.r\", \"*.txt\", \"*.dat\"]})\n package_data.update({\"statsmodels.stats.libqstrung.tests\" :\n [\"*.csv\", \"*.dat\"]})\n package_data.update({\"statsmodels.tsa.vector_ar.data\" : [\"*.dat\"]})\n package_data.update({\"statsmodels.tsa.vector_ar.data\" : [\"*.dat\"]})\n # Why are we installing this stuff?\n\n #TODO: deal with this. Not sure if it ever worked for bdists\n #('docs/build/htmlhelp/statsmodelsdoc.chm',\n # 'statsmodels/statsmodelsdoc.chm')\n\n setup(name = DISTNAME,\n version = VERSION,\n maintainer = MAINTAINER,\n ext_modules = extensions,\n maintainer_email = MAINTAINER_EMAIL,\n description = DESCRIPTION,\n license = LICENSE,\n url = URL,\n download_url = DOWNLOAD_URL,\n long_description = LONG_DESCRIPTION,\n classifiers = classifiers,\n platforms = 'any',\n cmdclass = cmdclass,\n packages = packages,\n package_data = package_data,\n include_package_data=True,\n **setuptools_kwargs)\n", "path": "setup.py"}]} |
gh_patches_debug_143 | rasdani/github-patches | git_diff | liberapay__liberapay.com-1785 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
The username change warning isn't clear enough
<https://mastodonten.de/@scroom/100724362756380264>:
> [@Liberapay](https://mastodon.xyz/@Liberapay) I have a question about existing links to donation accounts: I changed my name and then I got a hint that I had to change the donation links. So far the old link still works. Will it be turned off at some point?
>
> https://liberapay.com/~32819/donate
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `liberapay/constants.py`
Content:
```
1 from collections import defaultdict, namedtuple, OrderedDict
2 from datetime import date, datetime, timedelta
3 from decimal import Decimal, ROUND_FLOOR, ROUND_HALF_UP, ROUND_UP
4 import re
5
6 from babel.numbers import get_currency_precision
7 from mangopay.utils import Money
8 from markupsafe import Markup
9 from pando.utils import utc
10
11
12 def ordered_set(keys):
13 return OrderedDict((k, None) for k in keys)
14
15
16 def check_bits(bits):
17 assert len(set(bits)) == len(bits) # no duplicates
18 assert not [b for b in bits if '{0:b}'.format(b).count('1') != 1] # single bit
19
20
21 Event = namedtuple('Event', 'name bit title')
22
23
24 class Fees(namedtuple('Fees', ('var', 'fix'))):
25 VAT = Decimal('0.17') # 17% (Luxembourg rate)
26 VAT_1 = VAT + 1
27
28 @property
29 def with_vat(self):
30 r = (self.var * self.VAT_1 * 100, self.fix * self.VAT_1)
31 return r[0] if not r[1] else r[1].round_up() if not r[0] else r
32
33
34 def to_precision(x, precision, rounding=ROUND_HALF_UP):
35 """Round `x` to keep only `precision` of its most significant digits.
36
37 >>> to_precision(Decimal('0.0086820'), 2)
38 Decimal('0.0087')
39 >>> to_precision(Decimal('13567.89'), 3)
40 Decimal('13600')
41 >>> to_precision(Decimal('0.000'), 4)
42 Decimal('0')
43 """
44 if x == 0:
45 return Decimal(0)
46 log10 = x.log10().to_integral(ROUND_FLOOR)
47 # round
48 factor = Decimal(10) ** (log10 + 1)
49 r = (x / factor).quantize(Decimal(10) ** -precision, rounding=rounding) * factor
50 # remove trailing zeros
51 r = r.quantize(Decimal(10) ** (log10 - precision + 1))
52 return r
53
54
55 def convert_symbolic_amount(amount, target_currency, precision=2, rounding=ROUND_HALF_UP):
56 from liberapay.website import website
57 rate = website.currency_exchange_rates[('EUR', target_currency)]
58 minimum = Money.MINIMUMS[target_currency].amount
59 return max(
60 to_precision(amount * rate, precision, rounding).quantize(minimum, rounding),
61 minimum
62 )
63
64
65 class MoneyAutoConvertDict(defaultdict):
66
67 def __init__(self, *args, **kw):
68 super(MoneyAutoConvertDict, self).__init__(None, *args, **kw)
69
70 def __missing__(self, currency):
71 r = Money(convert_symbolic_amount(self['EUR'].amount, currency, 1), currency)
72 self[currency] = r
73 return r
74
75
76 StandardTip = namedtuple('StandardTip', 'label weekly monthly yearly')
77
78
79 _ = lambda a: a
80
81 ASCII_ALLOWED_IN_USERNAME = set("0123456789"
82 "abcdefghijklmnopqrstuvwxyz"
83 "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
84 "-_.")
85
86 AVATAR_QUERY = '?s=160&d=404'
87 AVATAR_SOURCES = (
88 'libravatar bitbucket facebook github gitlab google mastodon pleroma twitch twitter youtube'
89 ).split()
90
91 BASE64URL_CHARS = set('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_')
92
93 BIRTHDAY = date(2015, 5, 22)
94
95 CARD_BRANDS = {
96 'amex': 'American Express',
97 'diners': 'Diners Club',
98 'discover': 'Discover',
99 'jcb': 'JCB',
100 'mastercard': 'Mastercard',
101 'unionpay': 'UnionPay',
102 'visa': 'Visa',
103 'unknown': '',
104 }
105
106 CURRENCIES = ordered_set([
107 'EUR', 'USD',
108 'AUD', 'BGN', 'BRL', 'CAD', 'CHF', 'CNY', 'CZK', 'DKK', 'GBP', 'HKD', 'HRK',
109 'HUF', 'IDR', 'ILS', 'INR', 'ISK', 'JPY', 'KRW', 'MXN', 'MYR', 'NOK', 'NZD',
110 'PHP', 'PLN', 'RON', 'RUB', 'SEK', 'SGD', 'THB', 'TRY', 'ZAR'
111 ])
112
113 D_CENT = Decimal('0.01')
114 D_MAX = Decimal('999999999999.99')
115 D_ZERO = Decimal('0.00')
116
117 class _DonationLimits(defaultdict):
118 def __missing__(self, currency):
119 minimum = Money.MINIMUMS[currency].amount
120 eur_weekly_amounts = DONATION_LIMITS_EUR_USD['weekly']
121 converted_weekly_amounts = (
122 convert_symbolic_amount(eur_weekly_amounts[0], currency),
123 convert_symbolic_amount(eur_weekly_amounts[1], currency)
124 )
125 r = {
126 'weekly': tuple(Money(x, currency) for x in converted_weekly_amounts),
127 'monthly': tuple(
128 Money((x * Decimal(52) / Decimal(12)).quantize(minimum, rounding=ROUND_UP), currency)
129 for x in converted_weekly_amounts
130 ),
131 'yearly': tuple(Money(x * Decimal(52), currency) for x in converted_weekly_amounts),
132 }
133 self[currency] = r
134 return r
135
136 DONATION_LIMITS_WEEKLY_EUR_USD = (Decimal('0.01'), Decimal('100.00'))
137 DONATION_LIMITS_EUR_USD = {
138 'weekly': DONATION_LIMITS_WEEKLY_EUR_USD,
139 'monthly': tuple((x * Decimal(52) / Decimal(12)).quantize(D_CENT, rounding=ROUND_UP)
140 for x in DONATION_LIMITS_WEEKLY_EUR_USD),
141 'yearly': tuple(x * Decimal(52) for x in DONATION_LIMITS_WEEKLY_EUR_USD),
142 }
143 DONATION_LIMITS = _DonationLimits(None, {
144 'EUR': {k: (Money(v[0], 'EUR'), Money(v[1], 'EUR')) for k, v in DONATION_LIMITS_EUR_USD.items()},
145 'USD': {k: (Money(v[0], 'USD'), Money(v[1], 'USD')) for k, v in DONATION_LIMITS_EUR_USD.items()},
146 })
147
148 DOMAIN_RE = re.compile(r'''
149 ^
150 ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\.)+
151 [a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?
152 $
153 ''', re.VERBOSE)
154
155 ELSEWHERE_ACTIONS = {'connect', 'lock', 'unlock'}
156
157 EMAIL_VERIFICATION_TIMEOUT = timedelta(hours=24)
158 EMAIL_RE = re.compile(r'''
159 # This is the regexp used by MangoPay (as of February 2017).
160 # It rejects some valid but exotic addresses.
161 # https://en.wikipedia.org/wiki/Email_address
162 ^
163 [a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+(\.[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+)*
164 @
165 ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\.)+[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?
166 $
167 ''', re.VERBOSE)
168
169 EPOCH = datetime(1970, 1, 1, 0, 0, 0, 0, utc)
170
171 EUROZONE = set("AT BE CY DE EE ES FI FR GR IE IT LT LU LV MT NL PT SI SK".split())
172 SEPA = EUROZONE | set("AD BG CH CZ DK GB GI HR HU IS LI MC NO PL RO SE VA".split())
173
174 EVENTS = [
175 Event('income', 1, _("Every week as long as I am receiving donations")),
176 Event('donate_reminder', 2, _("When it's time to renew my donations")),
177 Event('pledgee_joined', 16, _("When someone I pledge to joins Liberapay")),
178 Event('team_invite', 32, _("When someone invites me to join a team")),
179 Event('payin_failed', 2**11, _("When a payment I initiated fails")),
180 Event('payin_succeeded', 2**12, _("When a payment I initiated succeeds")),
181 Event('payin_refund_initiated', 2**13, _("When money is being refunded back to me")),
182 Event('upcoming_debit', 2**14, _("When an automatic donation renewal payment is upcoming")),
183 Event('missing_route', 2**15, _("When I no longer have any valid payment instrument")),
184 Event('renewal_aborted', 2**16, _("When a donation renewal payment has been aborted")),
185 ]
186 check_bits([e.bit for e in EVENTS])
187 EVENTS = OrderedDict((e.name, e) for e in EVENTS)
188 EVENTS_S = ' '.join(EVENTS.keys())
189
190 # https://www.mangopay.com/pricing/
191 FEE_PAYIN_BANK_WIRE = Fees(Decimal('0.005'), 0) # 0.5%
192 FEE_PAYIN_CARD = {
193 'EUR': Fees(Decimal('0.018'), Money('0.18', 'EUR')), # 1.8% + €0.18
194 'USD': Fees(Decimal('0.025'), Money('0.30', 'USD')), # 2.5% + $0.30
195 }
196 FEE_PAYIN_DIRECT_DEBIT = {
197 'EUR': Fees(0, Money('0.50', 'EUR')), # €0.50
198 'GBP': Fees(0, Money('0.50', 'GBP')), # £0.50
199 }
200 FEE_PAYOUT = {
201 'EUR': {
202 'domestic': (SEPA, Fees(0, 0)),
203 'foreign': Fees(0, 0),
204 },
205 'GBP': {
206 'domestic': ({'GB'}, Fees(0, Money('0.45', 'GBP'))),
207 'foreign': Fees(0, Money('1.90', 'GBP')),
208 },
209 'USD': {
210 '*': Fees(0, Money('3.00', 'USD')),
211 },
212 }
213 FEE_PAYOUT_WARN = Decimal('0.03') # warn user when fee exceeds 3%
214
215 HTML_A = Markup('<a href="%s">%s</a>')
216
217 IDENTITY_FIELDS = set("""
218 birthdate headquarters_address name nationality occupation organization_name
219 postal_address
220 """.split())
221
222 INVOICE_DOC_MAX_SIZE = 5000000
223 INVOICE_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'png']
224 INVOICE_DOCS_LIMIT = 25
225
226 INVOICE_NATURES = {
227 'expense': _("Expense Report"),
228 }
229
230 INVOICE_STATUSES = {
231 'pre': _("Draft"),
232 'new': _("Sent (awaiting approval)"),
233 'retracted': _("Retracted"),
234 'accepted': _("Accepted (awaiting payment)"),
235 'paid': _("Paid"),
236 'rejected': _("Rejected"),
237 }
238
239 # https://docs.mangopay.com/api-references/kyc-rules/
240 KYC_DOC_MAX_SIZE = 7000000
241 KYC_DOC_MAX_SIZE_MB = int(KYC_DOC_MAX_SIZE / 1000000)
242 KYC_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'gif', 'png']
243 KYC_DOCS_EXTS_STR = ', '.join(KYC_DOCS_EXTS)
244 KYC_INCOME_THRESHOLDS = [(i, Money(a, 'EUR')) for i, a in (
245 (1, 18000),
246 (2, 30000),
247 (3, 50000),
248 (4, 80000),
249 (5, 120000),
250 (6, 120000),
251 )]
252 KYC_PAYIN_YEARLY_THRESHOLD = Money('2500', 'EUR')
253 KYC_PAYOUT_YEARLY_THRESHOLD = Money('1000', 'EUR')
254
255 LAUNCH_TIME = datetime(2016, 2, 3, 12, 50, 0, 0, utc)
256
257 PARTICIPANT_KINDS = {
258 'individual': _("Individual"),
259 'organization': _("Organization"),
260 'group': _("Team"),
261 }
262
263 PASSWORD_MIN_SIZE = 8
264 PASSWORD_MAX_SIZE = 150
265
266 PAYIN_BANK_WIRE_MIN = {k: Money('2.00', k) for k in ('EUR', 'USD')} # fee ≈ 0.99%
267 PAYIN_BANK_WIRE_TARGET = {k: Money('5.00', k) for k in ('EUR', 'USD')} # fee ≈ 0.6%
268 PAYIN_BANK_WIRE_MAX = {k: Money('2500.00', k) for k in ('EUR', 'USD')}
269 PAYIN_CARD_MIN = {
270 'EUR': Money('15.00', 'EUR'), # fee ≈ 3.5%
271 'USD': Money('20.00', 'USD'), # fee ≈ 4.58%
272 }
273 PAYIN_CARD_TARGET = {
274 'EUR': Money('92.00', 'EUR'), # fee ≈ 2.33%
275 'USD': Money('95.00', 'USD'), # fee ≈ 3.27%
276 }
277 PAYIN_CARD_MAX = {k: Money('2500.00', k) for k in ('EUR', 'USD')}
278 PAYIN_DIRECT_DEBIT_COUNTRIES = {
279 # https://support.gocardless.com/hc/en-gb/articles/115005758445
280 'EUR': EUROZONE | set("MC SM".split()),
281 }
282 PAYIN_DIRECT_DEBIT_MIN_EUR_GBP = Decimal('15.00') # fee ≈ 3.78%
283 PAYIN_DIRECT_DEBIT_MIN = {
284 'EUR': Money(PAYIN_DIRECT_DEBIT_MIN_EUR_GBP, 'EUR'),
285 'GBP': Money(PAYIN_DIRECT_DEBIT_MIN_EUR_GBP, 'GBP'),
286 }
287 PAYIN_DIRECT_DEBIT_TARGET_EUR_GBP = Decimal('99.00') # fee ≈ 0.59%
288 PAYIN_DIRECT_DEBIT_TARGET = {
289 'EUR': Money(PAYIN_DIRECT_DEBIT_TARGET_EUR_GBP, 'EUR'),
290 'GBP': Money(PAYIN_DIRECT_DEBIT_TARGET_EUR_GBP, 'GBP'),
291 }
292 PAYIN_DIRECT_DEBIT_MAX = {k: Money('2500.00', k) for k in ('EUR', 'USD')}
293
294 PAYIN_AMOUNTS = {
295 'paypal': {
296 'min_acceptable': MoneyAutoConvertDict({ # fee > 10%
297 'EUR': Money('2.00', 'EUR'),
298 'USD': Money('2.00', 'USD'),
299 }),
300 'min_recommended': MoneyAutoConvertDict({ # fee < 8%
301 'EUR': Money('10.00', 'EUR'),
302 'USD': Money('12.00', 'USD'),
303 }),
304 'low_fee': MoneyAutoConvertDict({ # fee < 6%
305 'EUR': Money('40.00', 'EUR'),
306 'USD': Money('48.00', 'USD'),
307 }),
308 'max_acceptable': MoneyAutoConvertDict({
309 'EUR': Money('5000.00', 'EUR'),
310 'USD': Money('5000.00', 'USD'),
311 }),
312 },
313 'stripe': {
314 'min_acceptable': MoneyAutoConvertDict({ # fee > 10%
315 'EUR': Money('2.00', 'EUR'),
316 'USD': Money('2.00', 'USD'),
317 }),
318 'min_recommended': MoneyAutoConvertDict({ # fee < 8%
319 'EUR': Money('10.00', 'EUR'),
320 'USD': Money('12.00', 'USD'),
321 }),
322 'low_fee': MoneyAutoConvertDict({ # fee < 6%
323 'EUR': Money('40.00', 'EUR'),
324 'USD': Money('48.00', 'USD'),
325 }),
326 'max_acceptable': MoneyAutoConvertDict({
327 'EUR': Money('5000.00', 'EUR'),
328 'USD': Money('5000.00', 'USD'),
329 }),
330 },
331 }
332
333 PAYMENT_METHODS = {
334 'mango-ba': _("Direct Debit"),
335 'mango-bw': _("Bank Wire"),
336 'mango-cc': _("Credit Card"),
337 'paypal': "PayPal",
338 'stripe-card': _("Credit/Debit Card"),
339 'stripe-sdd': _("Direct Debit"),
340 }
341 PAYMENT_SLUGS = {
342 'mango-ba': 'direct-debit',
343 'mango-bw': 'bankwire',
344 'mango-cc': 'card',
345 }
346
347 PAYOUT_COUNTRIES = {
348 'paypal': set("""
349 AD AE AG AI AL AM AN AO AR AT AU AW AZ BA BB BE BF BG BH BI BJ BM BN BO
350 BR BS BT BW BY BZ C2 CA CD CG CH CI CK CL CM CO CR CV CY CZ DE DJ DK DM
351 DO DZ EC EE EG ER ES ET FI FJ FK FM FO FR GA GD GE GF GI GL GM GN GP GR
352 GT GW GY HK HN HR HU ID IE IL IN IS IT JM JO JP KE KG KH KI KM KN KR KW
353 KY KZ LA LC LI LK LS LT LU LV MA MC MD ME MG MH MK ML MN MQ MR MS MT MU
354 MV MW MX MY MZ NA NC NE NF NG NI NL NO NP NR NU NZ OM PA PE PF PG PH PL
355 PM PN PT PW PY QA RE RO RS RU RW SA SB SC SE SG SH SI SJ SK SL SM SN SO
356 SR ST SV SZ TC TD TG TH TJ TM TN TO TT TT TT TT TV TW TZ UA UG GB US UY
357 VA VC VE VG VN VU WF WS YE YT ZA ZM ZW
358 PR
359 """.split()), # https://www.paypal.com/us/webapps/mpp/country-worldwide
360
361 'stripe': set("""
362 AT AU BE BG CA CH CY CZ DE DK EE ES FI FR GB GR HK IE IT JP LT LU LV MT
363 MX MY NL NO NZ PL PT RO SE SG SI SK US
364 PR
365 """.split()), # https://stripe.com/global
366 }
367
368 # https://developer.paypal.com/docs/api/reference/currency-codes/
369 PAYPAL_CURRENCIES = set("""
370 AUD CAD CHF CZK DKK EUR GBP HKD HUF ILS JPY MXN NOK NZD PHP PLN RUB SEK SGD
371 THB TWD USD
372 """.split())
373
374 PERIOD_CONVERSION_MAP = {
375 ('weekly', 'weekly'): Decimal(1),
376 ('monthly', 'weekly'): Decimal(12) / Decimal(52),
377 ('yearly', 'weekly'): Decimal(1) / Decimal(52),
378 ('weekly', 'monthly'): Decimal(52) / Decimal(12),
379 ('monthly', 'monthly'): Decimal(1),
380 ('yearly', 'monthly'): Decimal(1) / Decimal(12),
381 ('weekly', 'yearly'): Decimal(52),
382 ('monthly', 'yearly'): Decimal(12),
383 ('yearly', 'yearly'): Decimal(1),
384 }
385
386 PERIOD_CONVERSION_RATES = {
387 'weekly': Decimal(1),
388 'monthly': Decimal(12) / Decimal(52),
389 'yearly': Decimal(1) / Decimal(52),
390 }
391
392 POSTAL_ADDRESS_KEYS = (
393 'AddressLine1', 'AddressLine2', 'City', 'Region', 'PostalCode', 'Country'
394 )
395 POSTAL_ADDRESS_KEYS_LIBERAPAY = (
396 'country', 'region', 'city', 'postal_code', 'local_address'
397 )
398 POSTAL_ADDRESS_KEYS_STRIPE = (
399 'line1', 'line2', 'city', 'state', 'postal_code', 'country'
400 )
401
402 PRIVACY_FIELDS = OrderedDict([
403 ('hide_giving', (_("Hide total giving from others."), False)),
404 ('hide_receiving', (_("Hide total receiving from others."), False)),
405 ('hide_from_search', (_("Hide this profile from search results on Liberapay."), True)),
406 ('profile_noindex', (_("Tell web search engines not to index this profile."), True)),
407 ('hide_from_lists', (_("Prevent this profile from being listed on Liberapay."), True)),
408 ])
409 PRIVACY_FIELDS_S = ' '.join(PRIVACY_FIELDS.keys())
410
411 PRIVILEGES = dict(admin=1, run_payday=2)
412 check_bits(list(PRIVILEGES.values()))
413
414 PROFILE_VISIBILITY_ATTRS = ('profile_noindex', 'hide_from_lists', 'hide_from_search')
415
416 PUBLIC_NAME_MAX_SIZE = 64
417
418 QUARANTINE = timedelta(weeks=0)
419
420 RATE_LIMITS = {
421 'add_email.source': (5, 60*60*24), # 5 per day
422 'add_email.target': (2, 60*60*24), # 2 per day
423 'admin.http-unsafe': (10, 60*60*24), # 10 per day
424 'change_currency': (4, 60*60*24*7), # 4 per week
425 'change_password': (7, 60*60*24*7), # 7 per week
426 'change_username': (7, 60*60*24*7), # 7 per week
427 'check_password': (25, 60*60*24*7), # 25 per week
428 'elsewhere-lookup.ip-addr': (5, 20), # 5 per 20 seconds
429 'email.bypass_error': (2, 60*60*24*7), # 2 per week
430 'email.unblacklist.source': (5, 60*60*24*7), # 5 per week
431 'email.unblacklist.target': (3, 60*60*24*7), # 3 per week
432 'http-query.ip-addr': (10, 10), # 10 per 10 seconds
433 'http-query.user': (10, 10), # 10 per 10 seconds
434 'http-unsafe.ip-addr': (10, 10), # 10 per 10 seconds
435 'http-unsafe.user': (10, 10), # 10 per 10 seconds
436 'insert_identity': (7, 60*60*24*7), # 7 per week
437 'log-in.country': (10, 60), # 10 per minute per country
438 'log-in.email': (10, 60*60*24), # 10 per day
439 'log-in.email.not-verified': (2, 60*60*24), # 2 per day
440 'log-in.email.verified': (10, 60*60*24), # 10 per day
441 'log-in.ip-addr': (5, 5*60), # 5 per 5 minutes per IP address
442 'log-in.password': (3, 60*60), # 3 per hour
443 'make_team': (5, 60*60*24*7), # 5 per week
444 'payin.from-user': (15, 60*60*24*7), # 15 per week
445 'payin.from-ip-addr': (15, 60*60*24*7), # 15 per week
446 'refetch_elsewhere_data': (1, 60*60*24*7), # retry after one week
447 'refetch_repos': (1, 60*60*24), # retry after one day
448 'sign-up.email': (1, 5*60), # this is used to detect near-simultaneous requests,
449 # so 5 minutes should be plenty enough
450 'sign-up.ip-addr': (5, 60*60), # 5 per hour per IP address
451 'sign-up.ip-net': (15, 60*60), # 15 per hour per IP network
452 'sign-up.country': (5, 5*60), # 5 per 5 minutes per country
453 'sign-up.ip-version': (15, 5*60), # 15 per 5 minutes per IP version
454 }
455
456 SAFE_METHODS = {'GET', 'HEAD', 'OPTIONS'}
457
458 SESSION = 'session'
459 SESSION_REFRESH = timedelta(hours=1)
460 SESSION_TIMEOUT = timedelta(hours=6)
461
462
463 def make_standard_tip(label, weekly, currency):
464 precision = get_currency_precision(currency)
465 minimum = D_CENT if precision == 2 else Decimal(10) ** (-precision)
466 return StandardTip(
467 label,
468 Money(weekly, currency),
469 Money((weekly / PERIOD_CONVERSION_RATES['monthly']).quantize(minimum), currency),
470 Money((weekly / PERIOD_CONVERSION_RATES['yearly']).quantize(minimum), currency),
471 )
472
473
474 class _StandardTips(defaultdict):
475 def __missing__(self, currency):
476 r = [
477 make_standard_tip(
478 label, convert_symbolic_amount(weekly, currency), currency
479 ) for label, weekly in STANDARD_TIPS_EUR_USD
480 ]
481 self[currency] = r
482 return r
483
484
485 STANDARD_TIPS_EUR_USD = (
486 (_("Symbolic"), Decimal('0.01')),
487 (_("Small"), Decimal('0.25')),
488 (_("Medium"), Decimal('1.00')),
489 (_("Large"), Decimal('5.00')),
490 (_("Maximum"), DONATION_LIMITS_EUR_USD['weekly'][1]),
491 )
492 STANDARD_TIPS = _StandardTips(None, {
493 'EUR': [make_standard_tip(label, weekly, 'EUR') for label, weekly in STANDARD_TIPS_EUR_USD],
494 'USD': [make_standard_tip(label, weekly, 'USD') for label, weekly in STANDARD_TIPS_EUR_USD],
495 })
496
497 SUMMARY_MAX_SIZE = 100
498
499 TAKE_THROTTLING_THRESHOLD = MoneyAutoConvertDict(
500 {k: Money('1.00', k) for k in ('EUR', 'USD')}
501 )
502
503 USERNAME_MAX_SIZE = 32
504 USERNAME_SUFFIX_BLACKLIST = set('.txt .html .htm .json .xml'.split())
505
506 del _
507
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/liberapay/constants.py b/liberapay/constants.py
--- a/liberapay/constants.py
+++ b/liberapay/constants.py
@@ -330,6 +330,10 @@
},
}
+PAYIN_SETTLEMENT_DELAYS = {
+ 'stripe-sdd': timedelta(days=6),
+}
+
PAYMENT_METHODS = {
'mango-ba': _("Direct Debit"),
'mango-bw': _("Bank Wire"),
| {"golden_diff": "diff --git a/liberapay/constants.py b/liberapay/constants.py\n--- a/liberapay/constants.py\n+++ b/liberapay/constants.py\n@@ -330,6 +330,10 @@\n },\n }\n \n+PAYIN_SETTLEMENT_DELAYS = {\n+ 'stripe-sdd': timedelta(days=6),\n+}\n+\n PAYMENT_METHODS = {\n 'mango-ba': _(\"Direct Debit\"),\n 'mango-bw': _(\"Bank Wire\"),\n", "issue": "The username change warning isn't clear enough\n<https://mastodonten.de/@scroom/100724362756380264>:\r\n\r\n> [@Liberapay](https://mastodon.xyz/@Liberapay) I have a question about existing links to donation accounts: I changed my name and then I got a hint that I had to change the donation links. So far the old link still works. Will it be turned off at some point?\r\n>\r\n> https://liberapay.com/~32819/donate\n", "before_files": [{"content": "from collections import defaultdict, namedtuple, OrderedDict\nfrom datetime import date, datetime, timedelta\nfrom decimal import Decimal, ROUND_FLOOR, ROUND_HALF_UP, ROUND_UP\nimport re\n\nfrom babel.numbers import get_currency_precision\nfrom mangopay.utils import Money\nfrom markupsafe import Markup\nfrom pando.utils import utc\n\n\ndef ordered_set(keys):\n return OrderedDict((k, None) for k in keys)\n\n\ndef check_bits(bits):\n assert len(set(bits)) == len(bits) # no duplicates\n assert not [b for b in bits if '{0:b}'.format(b).count('1') != 1] # single bit\n\n\nEvent = namedtuple('Event', 'name bit title')\n\n\nclass Fees(namedtuple('Fees', ('var', 'fix'))):\n VAT = Decimal('0.17') # 17% (Luxembourg rate)\n VAT_1 = VAT + 1\n\n @property\n def with_vat(self):\n r = (self.var * self.VAT_1 * 100, self.fix * self.VAT_1)\n return r[0] if not r[1] else r[1].round_up() if not r[0] else r\n\n\ndef to_precision(x, precision, rounding=ROUND_HALF_UP):\n \"\"\"Round `x` to keep only `precision` of its most significant digits.\n\n >>> to_precision(Decimal('0.0086820'), 2)\n Decimal('0.0087')\n >>> to_precision(Decimal('13567.89'), 3)\n Decimal('13600')\n >>> to_precision(Decimal('0.000'), 4)\n Decimal('0')\n \"\"\"\n if x == 0:\n return Decimal(0)\n log10 = x.log10().to_integral(ROUND_FLOOR)\n # round\n factor = Decimal(10) ** (log10 + 1)\n r = (x / factor).quantize(Decimal(10) ** -precision, rounding=rounding) * factor\n # remove trailing zeros\n r = r.quantize(Decimal(10) ** (log10 - precision + 1))\n return r\n\n\ndef convert_symbolic_amount(amount, target_currency, precision=2, rounding=ROUND_HALF_UP):\n from liberapay.website import website\n rate = website.currency_exchange_rates[('EUR', target_currency)]\n minimum = Money.MINIMUMS[target_currency].amount\n return max(\n to_precision(amount * rate, precision, rounding).quantize(minimum, rounding),\n minimum\n )\n\n\nclass MoneyAutoConvertDict(defaultdict):\n\n def __init__(self, *args, **kw):\n super(MoneyAutoConvertDict, self).__init__(None, *args, **kw)\n\n def __missing__(self, currency):\n r = Money(convert_symbolic_amount(self['EUR'].amount, currency, 1), currency)\n self[currency] = r\n return r\n\n\nStandardTip = namedtuple('StandardTip', 'label weekly monthly yearly')\n\n\n_ = lambda a: a\n\nASCII_ALLOWED_IN_USERNAME = set(\"0123456789\"\n \"abcdefghijklmnopqrstuvwxyz\"\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n \"-_.\")\n\nAVATAR_QUERY = '?s=160&d=404'\nAVATAR_SOURCES = (\n 'libravatar bitbucket facebook github gitlab google mastodon pleroma twitch twitter youtube'\n).split()\n\nBASE64URL_CHARS = set('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_')\n\nBIRTHDAY = date(2015, 5, 22)\n\nCARD_BRANDS = {\n 'amex': 'American Express',\n 'diners': 'Diners Club',\n 'discover': 'Discover',\n 'jcb': 'JCB',\n 'mastercard': 'Mastercard',\n 'unionpay': 'UnionPay',\n 'visa': 'Visa',\n 'unknown': '',\n}\n\nCURRENCIES = ordered_set([\n 'EUR', 'USD',\n 'AUD', 'BGN', 'BRL', 'CAD', 'CHF', 'CNY', 'CZK', 'DKK', 'GBP', 'HKD', 'HRK',\n 'HUF', 'IDR', 'ILS', 'INR', 'ISK', 'JPY', 'KRW', 'MXN', 'MYR', 'NOK', 'NZD',\n 'PHP', 'PLN', 'RON', 'RUB', 'SEK', 'SGD', 'THB', 'TRY', 'ZAR'\n])\n\nD_CENT = Decimal('0.01')\nD_MAX = Decimal('999999999999.99')\nD_ZERO = Decimal('0.00')\n\nclass _DonationLimits(defaultdict):\n def __missing__(self, currency):\n minimum = Money.MINIMUMS[currency].amount\n eur_weekly_amounts = DONATION_LIMITS_EUR_USD['weekly']\n converted_weekly_amounts = (\n convert_symbolic_amount(eur_weekly_amounts[0], currency),\n convert_symbolic_amount(eur_weekly_amounts[1], currency)\n )\n r = {\n 'weekly': tuple(Money(x, currency) for x in converted_weekly_amounts),\n 'monthly': tuple(\n Money((x * Decimal(52) / Decimal(12)).quantize(minimum, rounding=ROUND_UP), currency)\n for x in converted_weekly_amounts\n ),\n 'yearly': tuple(Money(x * Decimal(52), currency) for x in converted_weekly_amounts),\n }\n self[currency] = r\n return r\n\nDONATION_LIMITS_WEEKLY_EUR_USD = (Decimal('0.01'), Decimal('100.00'))\nDONATION_LIMITS_EUR_USD = {\n 'weekly': DONATION_LIMITS_WEEKLY_EUR_USD,\n 'monthly': tuple((x * Decimal(52) / Decimal(12)).quantize(D_CENT, rounding=ROUND_UP)\n for x in DONATION_LIMITS_WEEKLY_EUR_USD),\n 'yearly': tuple(x * Decimal(52) for x in DONATION_LIMITS_WEEKLY_EUR_USD),\n}\nDONATION_LIMITS = _DonationLimits(None, {\n 'EUR': {k: (Money(v[0], 'EUR'), Money(v[1], 'EUR')) for k, v in DONATION_LIMITS_EUR_USD.items()},\n 'USD': {k: (Money(v[0], 'USD'), Money(v[1], 'USD')) for k, v in DONATION_LIMITS_EUR_USD.items()},\n})\n\nDOMAIN_RE = re.compile(r'''\n ^\n ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+\n [a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\n $\n''', re.VERBOSE)\n\nELSEWHERE_ACTIONS = {'connect', 'lock', 'unlock'}\n\nEMAIL_VERIFICATION_TIMEOUT = timedelta(hours=24)\nEMAIL_RE = re.compile(r'''\n # This is the regexp used by MangoPay (as of February 2017).\n # It rejects some valid but exotic addresses.\n # https://en.wikipedia.org/wiki/Email_address\n ^\n [a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+(\\.[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+)*\n @\n ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\n $\n''', re.VERBOSE)\n\nEPOCH = datetime(1970, 1, 1, 0, 0, 0, 0, utc)\n\nEUROZONE = set(\"AT BE CY DE EE ES FI FR GR IE IT LT LU LV MT NL PT SI SK\".split())\nSEPA = EUROZONE | set(\"AD BG CH CZ DK GB GI HR HU IS LI MC NO PL RO SE VA\".split())\n\nEVENTS = [\n Event('income', 1, _(\"Every week as long as I am receiving donations\")),\n Event('donate_reminder', 2, _(\"When it's time to renew my donations\")),\n Event('pledgee_joined', 16, _(\"When someone I pledge to joins Liberapay\")),\n Event('team_invite', 32, _(\"When someone invites me to join a team\")),\n Event('payin_failed', 2**11, _(\"When a payment I initiated fails\")),\n Event('payin_succeeded', 2**12, _(\"When a payment I initiated succeeds\")),\n Event('payin_refund_initiated', 2**13, _(\"When money is being refunded back to me\")),\n Event('upcoming_debit', 2**14, _(\"When an automatic donation renewal payment is upcoming\")),\n Event('missing_route', 2**15, _(\"When I no longer have any valid payment instrument\")),\n Event('renewal_aborted', 2**16, _(\"When a donation renewal payment has been aborted\")),\n]\ncheck_bits([e.bit for e in EVENTS])\nEVENTS = OrderedDict((e.name, e) for e in EVENTS)\nEVENTS_S = ' '.join(EVENTS.keys())\n\n# https://www.mangopay.com/pricing/\nFEE_PAYIN_BANK_WIRE = Fees(Decimal('0.005'), 0) # 0.5%\nFEE_PAYIN_CARD = {\n 'EUR': Fees(Decimal('0.018'), Money('0.18', 'EUR')), # 1.8% + \u20ac0.18\n 'USD': Fees(Decimal('0.025'), Money('0.30', 'USD')), # 2.5% + $0.30\n}\nFEE_PAYIN_DIRECT_DEBIT = {\n 'EUR': Fees(0, Money('0.50', 'EUR')), # \u20ac0.50\n 'GBP': Fees(0, Money('0.50', 'GBP')), # \u00a30.50\n}\nFEE_PAYOUT = {\n 'EUR': {\n 'domestic': (SEPA, Fees(0, 0)),\n 'foreign': Fees(0, 0),\n },\n 'GBP': {\n 'domestic': ({'GB'}, Fees(0, Money('0.45', 'GBP'))),\n 'foreign': Fees(0, Money('1.90', 'GBP')),\n },\n 'USD': {\n '*': Fees(0, Money('3.00', 'USD')),\n },\n}\nFEE_PAYOUT_WARN = Decimal('0.03') # warn user when fee exceeds 3%\n\nHTML_A = Markup('<a href=\"%s\">%s</a>')\n\nIDENTITY_FIELDS = set(\"\"\"\n birthdate headquarters_address name nationality occupation organization_name\n postal_address\n\"\"\".split())\n\nINVOICE_DOC_MAX_SIZE = 5000000\nINVOICE_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'png']\nINVOICE_DOCS_LIMIT = 25\n\nINVOICE_NATURES = {\n 'expense': _(\"Expense Report\"),\n}\n\nINVOICE_STATUSES = {\n 'pre': _(\"Draft\"),\n 'new': _(\"Sent (awaiting approval)\"),\n 'retracted': _(\"Retracted\"),\n 'accepted': _(\"Accepted (awaiting payment)\"),\n 'paid': _(\"Paid\"),\n 'rejected': _(\"Rejected\"),\n}\n\n# https://docs.mangopay.com/api-references/kyc-rules/\nKYC_DOC_MAX_SIZE = 7000000\nKYC_DOC_MAX_SIZE_MB = int(KYC_DOC_MAX_SIZE / 1000000)\nKYC_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'gif', 'png']\nKYC_DOCS_EXTS_STR = ', '.join(KYC_DOCS_EXTS)\nKYC_INCOME_THRESHOLDS = [(i, Money(a, 'EUR')) for i, a in (\n (1, 18000),\n (2, 30000),\n (3, 50000),\n (4, 80000),\n (5, 120000),\n (6, 120000),\n)]\nKYC_PAYIN_YEARLY_THRESHOLD = Money('2500', 'EUR')\nKYC_PAYOUT_YEARLY_THRESHOLD = Money('1000', 'EUR')\n\nLAUNCH_TIME = datetime(2016, 2, 3, 12, 50, 0, 0, utc)\n\nPARTICIPANT_KINDS = {\n 'individual': _(\"Individual\"),\n 'organization': _(\"Organization\"),\n 'group': _(\"Team\"),\n}\n\nPASSWORD_MIN_SIZE = 8\nPASSWORD_MAX_SIZE = 150\n\nPAYIN_BANK_WIRE_MIN = {k: Money('2.00', k) for k in ('EUR', 'USD')} # fee \u2248 0.99%\nPAYIN_BANK_WIRE_TARGET = {k: Money('5.00', k) for k in ('EUR', 'USD')} # fee \u2248 0.6%\nPAYIN_BANK_WIRE_MAX = {k: Money('2500.00', k) for k in ('EUR', 'USD')}\nPAYIN_CARD_MIN = {\n 'EUR': Money('15.00', 'EUR'), # fee \u2248 3.5%\n 'USD': Money('20.00', 'USD'), # fee \u2248 4.58%\n}\nPAYIN_CARD_TARGET = {\n 'EUR': Money('92.00', 'EUR'), # fee \u2248 2.33%\n 'USD': Money('95.00', 'USD'), # fee \u2248 3.27%\n}\nPAYIN_CARD_MAX = {k: Money('2500.00', k) for k in ('EUR', 'USD')}\nPAYIN_DIRECT_DEBIT_COUNTRIES = {\n # https://support.gocardless.com/hc/en-gb/articles/115005758445\n 'EUR': EUROZONE | set(\"MC SM\".split()),\n}\nPAYIN_DIRECT_DEBIT_MIN_EUR_GBP = Decimal('15.00') # fee \u2248 3.78%\nPAYIN_DIRECT_DEBIT_MIN = {\n 'EUR': Money(PAYIN_DIRECT_DEBIT_MIN_EUR_GBP, 'EUR'),\n 'GBP': Money(PAYIN_DIRECT_DEBIT_MIN_EUR_GBP, 'GBP'),\n}\nPAYIN_DIRECT_DEBIT_TARGET_EUR_GBP = Decimal('99.00') # fee \u2248 0.59%\nPAYIN_DIRECT_DEBIT_TARGET = {\n 'EUR': Money(PAYIN_DIRECT_DEBIT_TARGET_EUR_GBP, 'EUR'),\n 'GBP': Money(PAYIN_DIRECT_DEBIT_TARGET_EUR_GBP, 'GBP'),\n}\nPAYIN_DIRECT_DEBIT_MAX = {k: Money('2500.00', k) for k in ('EUR', 'USD')}\n\nPAYIN_AMOUNTS = {\n 'paypal': {\n 'min_acceptable': MoneyAutoConvertDict({ # fee > 10%\n 'EUR': Money('2.00', 'EUR'),\n 'USD': Money('2.00', 'USD'),\n }),\n 'min_recommended': MoneyAutoConvertDict({ # fee < 8%\n 'EUR': Money('10.00', 'EUR'),\n 'USD': Money('12.00', 'USD'),\n }),\n 'low_fee': MoneyAutoConvertDict({ # fee < 6%\n 'EUR': Money('40.00', 'EUR'),\n 'USD': Money('48.00', 'USD'),\n }),\n 'max_acceptable': MoneyAutoConvertDict({\n 'EUR': Money('5000.00', 'EUR'),\n 'USD': Money('5000.00', 'USD'),\n }),\n },\n 'stripe': {\n 'min_acceptable': MoneyAutoConvertDict({ # fee > 10%\n 'EUR': Money('2.00', 'EUR'),\n 'USD': Money('2.00', 'USD'),\n }),\n 'min_recommended': MoneyAutoConvertDict({ # fee < 8%\n 'EUR': Money('10.00', 'EUR'),\n 'USD': Money('12.00', 'USD'),\n }),\n 'low_fee': MoneyAutoConvertDict({ # fee < 6%\n 'EUR': Money('40.00', 'EUR'),\n 'USD': Money('48.00', 'USD'),\n }),\n 'max_acceptable': MoneyAutoConvertDict({\n 'EUR': Money('5000.00', 'EUR'),\n 'USD': Money('5000.00', 'USD'),\n }),\n },\n}\n\nPAYMENT_METHODS = {\n 'mango-ba': _(\"Direct Debit\"),\n 'mango-bw': _(\"Bank Wire\"),\n 'mango-cc': _(\"Credit Card\"),\n 'paypal': \"PayPal\",\n 'stripe-card': _(\"Credit/Debit Card\"),\n 'stripe-sdd': _(\"Direct Debit\"),\n}\nPAYMENT_SLUGS = {\n 'mango-ba': 'direct-debit',\n 'mango-bw': 'bankwire',\n 'mango-cc': 'card',\n}\n\nPAYOUT_COUNTRIES = {\n 'paypal': set(\"\"\"\n AD AE AG AI AL AM AN AO AR AT AU AW AZ BA BB BE BF BG BH BI BJ BM BN BO\n BR BS BT BW BY BZ C2 CA CD CG CH CI CK CL CM CO CR CV CY CZ DE DJ DK DM\n DO DZ EC EE EG ER ES ET FI FJ FK FM FO FR GA GD GE GF GI GL GM GN GP GR\n GT GW GY HK HN HR HU ID IE IL IN IS IT JM JO JP KE KG KH KI KM KN KR KW\n KY KZ LA LC LI LK LS LT LU LV MA MC MD ME MG MH MK ML MN MQ MR MS MT MU\n MV MW MX MY MZ NA NC NE NF NG NI NL NO NP NR NU NZ OM PA PE PF PG PH PL\n PM PN PT PW PY QA RE RO RS RU RW SA SB SC SE SG SH SI SJ SK SL SM SN SO\n SR ST SV SZ TC TD TG TH TJ TM TN TO TT TT TT TT TV TW TZ UA UG GB US UY\n VA VC VE VG VN VU WF WS YE YT ZA ZM ZW\n PR\n \"\"\".split()), # https://www.paypal.com/us/webapps/mpp/country-worldwide\n\n 'stripe': set(\"\"\"\n AT AU BE BG CA CH CY CZ DE DK EE ES FI FR GB GR HK IE IT JP LT LU LV MT\n MX MY NL NO NZ PL PT RO SE SG SI SK US\n PR\n \"\"\".split()), # https://stripe.com/global\n}\n\n# https://developer.paypal.com/docs/api/reference/currency-codes/\nPAYPAL_CURRENCIES = set(\"\"\"\n AUD CAD CHF CZK DKK EUR GBP HKD HUF ILS JPY MXN NOK NZD PHP PLN RUB SEK SGD\n THB TWD USD\n\"\"\".split())\n\nPERIOD_CONVERSION_MAP = {\n ('weekly', 'weekly'): Decimal(1),\n ('monthly', 'weekly'): Decimal(12) / Decimal(52),\n ('yearly', 'weekly'): Decimal(1) / Decimal(52),\n ('weekly', 'monthly'): Decimal(52) / Decimal(12),\n ('monthly', 'monthly'): Decimal(1),\n ('yearly', 'monthly'): Decimal(1) / Decimal(12),\n ('weekly', 'yearly'): Decimal(52),\n ('monthly', 'yearly'): Decimal(12),\n ('yearly', 'yearly'): Decimal(1),\n}\n\nPERIOD_CONVERSION_RATES = {\n 'weekly': Decimal(1),\n 'monthly': Decimal(12) / Decimal(52),\n 'yearly': Decimal(1) / Decimal(52),\n}\n\nPOSTAL_ADDRESS_KEYS = (\n 'AddressLine1', 'AddressLine2', 'City', 'Region', 'PostalCode', 'Country'\n)\nPOSTAL_ADDRESS_KEYS_LIBERAPAY = (\n 'country', 'region', 'city', 'postal_code', 'local_address'\n)\nPOSTAL_ADDRESS_KEYS_STRIPE = (\n 'line1', 'line2', 'city', 'state', 'postal_code', 'country'\n)\n\nPRIVACY_FIELDS = OrderedDict([\n ('hide_giving', (_(\"Hide total giving from others.\"), False)),\n ('hide_receiving', (_(\"Hide total receiving from others.\"), False)),\n ('hide_from_search', (_(\"Hide this profile from search results on Liberapay.\"), True)),\n ('profile_noindex', (_(\"Tell web search engines not to index this profile.\"), True)),\n ('hide_from_lists', (_(\"Prevent this profile from being listed on Liberapay.\"), True)),\n])\nPRIVACY_FIELDS_S = ' '.join(PRIVACY_FIELDS.keys())\n\nPRIVILEGES = dict(admin=1, run_payday=2)\ncheck_bits(list(PRIVILEGES.values()))\n\nPROFILE_VISIBILITY_ATTRS = ('profile_noindex', 'hide_from_lists', 'hide_from_search')\n\nPUBLIC_NAME_MAX_SIZE = 64\n\nQUARANTINE = timedelta(weeks=0)\n\nRATE_LIMITS = {\n 'add_email.source': (5, 60*60*24), # 5 per day\n 'add_email.target': (2, 60*60*24), # 2 per day\n 'admin.http-unsafe': (10, 60*60*24), # 10 per day\n 'change_currency': (4, 60*60*24*7), # 4 per week\n 'change_password': (7, 60*60*24*7), # 7 per week\n 'change_username': (7, 60*60*24*7), # 7 per week\n 'check_password': (25, 60*60*24*7), # 25 per week\n 'elsewhere-lookup.ip-addr': (5, 20), # 5 per 20 seconds\n 'email.bypass_error': (2, 60*60*24*7), # 2 per week\n 'email.unblacklist.source': (5, 60*60*24*7), # 5 per week\n 'email.unblacklist.target': (3, 60*60*24*7), # 3 per week\n 'http-query.ip-addr': (10, 10), # 10 per 10 seconds\n 'http-query.user': (10, 10), # 10 per 10 seconds\n 'http-unsafe.ip-addr': (10, 10), # 10 per 10 seconds\n 'http-unsafe.user': (10, 10), # 10 per 10 seconds\n 'insert_identity': (7, 60*60*24*7), # 7 per week\n 'log-in.country': (10, 60), # 10 per minute per country\n 'log-in.email': (10, 60*60*24), # 10 per day\n 'log-in.email.not-verified': (2, 60*60*24), # 2 per day\n 'log-in.email.verified': (10, 60*60*24), # 10 per day\n 'log-in.ip-addr': (5, 5*60), # 5 per 5 minutes per IP address\n 'log-in.password': (3, 60*60), # 3 per hour\n 'make_team': (5, 60*60*24*7), # 5 per week\n 'payin.from-user': (15, 60*60*24*7), # 15 per week\n 'payin.from-ip-addr': (15, 60*60*24*7), # 15 per week\n 'refetch_elsewhere_data': (1, 60*60*24*7), # retry after one week\n 'refetch_repos': (1, 60*60*24), # retry after one day\n 'sign-up.email': (1, 5*60), # this is used to detect near-simultaneous requests,\n # so 5 minutes should be plenty enough\n 'sign-up.ip-addr': (5, 60*60), # 5 per hour per IP address\n 'sign-up.ip-net': (15, 60*60), # 15 per hour per IP network\n 'sign-up.country': (5, 5*60), # 5 per 5 minutes per country\n 'sign-up.ip-version': (15, 5*60), # 15 per 5 minutes per IP version\n}\n\nSAFE_METHODS = {'GET', 'HEAD', 'OPTIONS'}\n\nSESSION = 'session'\nSESSION_REFRESH = timedelta(hours=1)\nSESSION_TIMEOUT = timedelta(hours=6)\n\n\ndef make_standard_tip(label, weekly, currency):\n precision = get_currency_precision(currency)\n minimum = D_CENT if precision == 2 else Decimal(10) ** (-precision)\n return StandardTip(\n label,\n Money(weekly, currency),\n Money((weekly / PERIOD_CONVERSION_RATES['monthly']).quantize(minimum), currency),\n Money((weekly / PERIOD_CONVERSION_RATES['yearly']).quantize(minimum), currency),\n )\n\n\nclass _StandardTips(defaultdict):\n def __missing__(self, currency):\n r = [\n make_standard_tip(\n label, convert_symbolic_amount(weekly, currency), currency\n ) for label, weekly in STANDARD_TIPS_EUR_USD\n ]\n self[currency] = r\n return r\n\n\nSTANDARD_TIPS_EUR_USD = (\n (_(\"Symbolic\"), Decimal('0.01')),\n (_(\"Small\"), Decimal('0.25')),\n (_(\"Medium\"), Decimal('1.00')),\n (_(\"Large\"), Decimal('5.00')),\n (_(\"Maximum\"), DONATION_LIMITS_EUR_USD['weekly'][1]),\n)\nSTANDARD_TIPS = _StandardTips(None, {\n 'EUR': [make_standard_tip(label, weekly, 'EUR') for label, weekly in STANDARD_TIPS_EUR_USD],\n 'USD': [make_standard_tip(label, weekly, 'USD') for label, weekly in STANDARD_TIPS_EUR_USD],\n})\n\nSUMMARY_MAX_SIZE = 100\n\nTAKE_THROTTLING_THRESHOLD = MoneyAutoConvertDict(\n {k: Money('1.00', k) for k in ('EUR', 'USD')}\n)\n\nUSERNAME_MAX_SIZE = 32\nUSERNAME_SUFFIX_BLACKLIST = set('.txt .html .htm .json .xml'.split())\n\ndel _\n", "path": "liberapay/constants.py"}], "after_files": [{"content": "from collections import defaultdict, namedtuple, OrderedDict\nfrom datetime import date, datetime, timedelta\nfrom decimal import Decimal, ROUND_FLOOR, ROUND_HALF_UP, ROUND_UP\nimport re\n\nfrom babel.numbers import get_currency_precision\nfrom mangopay.utils import Money\nfrom markupsafe import Markup\nfrom pando.utils import utc\n\n\ndef ordered_set(keys):\n return OrderedDict((k, None) for k in keys)\n\n\ndef check_bits(bits):\n assert len(set(bits)) == len(bits) # no duplicates\n assert not [b for b in bits if '{0:b}'.format(b).count('1') != 1] # single bit\n\n\nEvent = namedtuple('Event', 'name bit title')\n\n\nclass Fees(namedtuple('Fees', ('var', 'fix'))):\n VAT = Decimal('0.17') # 17% (Luxembourg rate)\n VAT_1 = VAT + 1\n\n @property\n def with_vat(self):\n r = (self.var * self.VAT_1 * 100, self.fix * self.VAT_1)\n return r[0] if not r[1] else r[1].round_up() if not r[0] else r\n\n\ndef to_precision(x, precision, rounding=ROUND_HALF_UP):\n \"\"\"Round `x` to keep only `precision` of its most significant digits.\n\n >>> to_precision(Decimal('0.0086820'), 2)\n Decimal('0.0087')\n >>> to_precision(Decimal('13567.89'), 3)\n Decimal('13600')\n >>> to_precision(Decimal('0.000'), 4)\n Decimal('0')\n \"\"\"\n if x == 0:\n return Decimal(0)\n log10 = x.log10().to_integral(ROUND_FLOOR)\n # round\n factor = Decimal(10) ** (log10 + 1)\n r = (x / factor).quantize(Decimal(10) ** -precision, rounding=rounding) * factor\n # remove trailing zeros\n r = r.quantize(Decimal(10) ** (log10 - precision + 1))\n return r\n\n\ndef convert_symbolic_amount(amount, target_currency, precision=2, rounding=ROUND_HALF_UP):\n from liberapay.website import website\n rate = website.currency_exchange_rates[('EUR', target_currency)]\n minimum = Money.MINIMUMS[target_currency].amount\n return max(\n to_precision(amount * rate, precision, rounding).quantize(minimum, rounding),\n minimum\n )\n\n\nclass MoneyAutoConvertDict(defaultdict):\n\n def __init__(self, *args, **kw):\n super(MoneyAutoConvertDict, self).__init__(None, *args, **kw)\n\n def __missing__(self, currency):\n r = Money(convert_symbolic_amount(self['EUR'].amount, currency, 1), currency)\n self[currency] = r\n return r\n\n\nStandardTip = namedtuple('StandardTip', 'label weekly monthly yearly')\n\n\n_ = lambda a: a\n\nASCII_ALLOWED_IN_USERNAME = set(\"0123456789\"\n \"abcdefghijklmnopqrstuvwxyz\"\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\"\n \"-_.\")\n\nAVATAR_QUERY = '?s=160&d=404'\nAVATAR_SOURCES = (\n 'libravatar bitbucket facebook github gitlab google mastodon pleroma twitch twitter youtube'\n).split()\n\nBASE64URL_CHARS = set('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_')\n\nBIRTHDAY = date(2015, 5, 22)\n\nCARD_BRANDS = {\n 'amex': 'American Express',\n 'diners': 'Diners Club',\n 'discover': 'Discover',\n 'jcb': 'JCB',\n 'mastercard': 'Mastercard',\n 'unionpay': 'UnionPay',\n 'visa': 'Visa',\n 'unknown': '',\n}\n\nCURRENCIES = ordered_set([\n 'EUR', 'USD',\n 'AUD', 'BGN', 'BRL', 'CAD', 'CHF', 'CNY', 'CZK', 'DKK', 'GBP', 'HKD', 'HRK',\n 'HUF', 'IDR', 'ILS', 'INR', 'ISK', 'JPY', 'KRW', 'MXN', 'MYR', 'NOK', 'NZD',\n 'PHP', 'PLN', 'RON', 'RUB', 'SEK', 'SGD', 'THB', 'TRY', 'ZAR'\n])\n\nD_CENT = Decimal('0.01')\nD_MAX = Decimal('999999999999.99')\nD_ZERO = Decimal('0.00')\n\nclass _DonationLimits(defaultdict):\n def __missing__(self, currency):\n minimum = Money.MINIMUMS[currency].amount\n eur_weekly_amounts = DONATION_LIMITS_EUR_USD['weekly']\n converted_weekly_amounts = (\n convert_symbolic_amount(eur_weekly_amounts[0], currency),\n convert_symbolic_amount(eur_weekly_amounts[1], currency)\n )\n r = {\n 'weekly': tuple(Money(x, currency) for x in converted_weekly_amounts),\n 'monthly': tuple(\n Money((x * Decimal(52) / Decimal(12)).quantize(minimum, rounding=ROUND_UP), currency)\n for x in converted_weekly_amounts\n ),\n 'yearly': tuple(Money(x * Decimal(52), currency) for x in converted_weekly_amounts),\n }\n self[currency] = r\n return r\n\nDONATION_LIMITS_WEEKLY_EUR_USD = (Decimal('0.01'), Decimal('100.00'))\nDONATION_LIMITS_EUR_USD = {\n 'weekly': DONATION_LIMITS_WEEKLY_EUR_USD,\n 'monthly': tuple((x * Decimal(52) / Decimal(12)).quantize(D_CENT, rounding=ROUND_UP)\n for x in DONATION_LIMITS_WEEKLY_EUR_USD),\n 'yearly': tuple(x * Decimal(52) for x in DONATION_LIMITS_WEEKLY_EUR_USD),\n}\nDONATION_LIMITS = _DonationLimits(None, {\n 'EUR': {k: (Money(v[0], 'EUR'), Money(v[1], 'EUR')) for k, v in DONATION_LIMITS_EUR_USD.items()},\n 'USD': {k: (Money(v[0], 'USD'), Money(v[1], 'USD')) for k, v in DONATION_LIMITS_EUR_USD.items()},\n})\n\nDOMAIN_RE = re.compile(r'''\n ^\n ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+\n [a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\n $\n''', re.VERBOSE)\n\nELSEWHERE_ACTIONS = {'connect', 'lock', 'unlock'}\n\nEMAIL_VERIFICATION_TIMEOUT = timedelta(hours=24)\nEMAIL_RE = re.compile(r'''\n # This is the regexp used by MangoPay (as of February 2017).\n # It rejects some valid but exotic addresses.\n # https://en.wikipedia.org/wiki/Email_address\n ^\n [a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+(\\.[a-zA-Z0-9!#$%&'*+/=?^_`{|}~-]+)*\n @\n ([a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\\.)+[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?\n $\n''', re.VERBOSE)\n\nEPOCH = datetime(1970, 1, 1, 0, 0, 0, 0, utc)\n\nEUROZONE = set(\"AT BE CY DE EE ES FI FR GR IE IT LT LU LV MT NL PT SI SK\".split())\nSEPA = EUROZONE | set(\"AD BG CH CZ DK GB GI HR HU IS LI MC NO PL RO SE VA\".split())\n\nEVENTS = [\n Event('income', 1, _(\"Every week as long as I am receiving donations\")),\n Event('donate_reminder', 2, _(\"When it's time to renew my donations\")),\n Event('pledgee_joined', 16, _(\"When someone I pledge to joins Liberapay\")),\n Event('team_invite', 32, _(\"When someone invites me to join a team\")),\n Event('payin_failed', 2**11, _(\"When a payment I initiated fails\")),\n Event('payin_succeeded', 2**12, _(\"When a payment I initiated succeeds\")),\n Event('payin_refund_initiated', 2**13, _(\"When money is being refunded back to me\")),\n Event('upcoming_debit', 2**14, _(\"When an automatic donation renewal payment is upcoming\")),\n Event('missing_route', 2**15, _(\"When I no longer have any valid payment instrument\")),\n Event('renewal_aborted', 2**16, _(\"When a donation renewal payment has been aborted\")),\n]\ncheck_bits([e.bit for e in EVENTS])\nEVENTS = OrderedDict((e.name, e) for e in EVENTS)\nEVENTS_S = ' '.join(EVENTS.keys())\n\n# https://www.mangopay.com/pricing/\nFEE_PAYIN_BANK_WIRE = Fees(Decimal('0.005'), 0) # 0.5%\nFEE_PAYIN_CARD = {\n 'EUR': Fees(Decimal('0.018'), Money('0.18', 'EUR')), # 1.8% + \u20ac0.18\n 'USD': Fees(Decimal('0.025'), Money('0.30', 'USD')), # 2.5% + $0.30\n}\nFEE_PAYIN_DIRECT_DEBIT = {\n 'EUR': Fees(0, Money('0.50', 'EUR')), # \u20ac0.50\n 'GBP': Fees(0, Money('0.50', 'GBP')), # \u00a30.50\n}\nFEE_PAYOUT = {\n 'EUR': {\n 'domestic': (SEPA, Fees(0, 0)),\n 'foreign': Fees(0, 0),\n },\n 'GBP': {\n 'domestic': ({'GB'}, Fees(0, Money('0.45', 'GBP'))),\n 'foreign': Fees(0, Money('1.90', 'GBP')),\n },\n 'USD': {\n '*': Fees(0, Money('3.00', 'USD')),\n },\n}\nFEE_PAYOUT_WARN = Decimal('0.03') # warn user when fee exceeds 3%\n\nHTML_A = Markup('<a href=\"%s\">%s</a>')\n\nIDENTITY_FIELDS = set(\"\"\"\n birthdate headquarters_address name nationality occupation organization_name\n postal_address\n\"\"\".split())\n\nINVOICE_DOC_MAX_SIZE = 5000000\nINVOICE_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'png']\nINVOICE_DOCS_LIMIT = 25\n\nINVOICE_NATURES = {\n 'expense': _(\"Expense Report\"),\n}\n\nINVOICE_STATUSES = {\n 'pre': _(\"Draft\"),\n 'new': _(\"Sent (awaiting approval)\"),\n 'retracted': _(\"Retracted\"),\n 'accepted': _(\"Accepted (awaiting payment)\"),\n 'paid': _(\"Paid\"),\n 'rejected': _(\"Rejected\"),\n}\n\n# https://docs.mangopay.com/api-references/kyc-rules/\nKYC_DOC_MAX_SIZE = 7000000\nKYC_DOC_MAX_SIZE_MB = int(KYC_DOC_MAX_SIZE / 1000000)\nKYC_DOCS_EXTS = ['pdf', 'jpeg', 'jpg', 'gif', 'png']\nKYC_DOCS_EXTS_STR = ', '.join(KYC_DOCS_EXTS)\nKYC_INCOME_THRESHOLDS = [(i, Money(a, 'EUR')) for i, a in (\n (1, 18000),\n (2, 30000),\n (3, 50000),\n (4, 80000),\n (5, 120000),\n (6, 120000),\n)]\nKYC_PAYIN_YEARLY_THRESHOLD = Money('2500', 'EUR')\nKYC_PAYOUT_YEARLY_THRESHOLD = Money('1000', 'EUR')\n\nLAUNCH_TIME = datetime(2016, 2, 3, 12, 50, 0, 0, utc)\n\nPARTICIPANT_KINDS = {\n 'individual': _(\"Individual\"),\n 'organization': _(\"Organization\"),\n 'group': _(\"Team\"),\n}\n\nPASSWORD_MIN_SIZE = 8\nPASSWORD_MAX_SIZE = 150\n\nPAYIN_BANK_WIRE_MIN = {k: Money('2.00', k) for k in ('EUR', 'USD')} # fee \u2248 0.99%\nPAYIN_BANK_WIRE_TARGET = {k: Money('5.00', k) for k in ('EUR', 'USD')} # fee \u2248 0.6%\nPAYIN_BANK_WIRE_MAX = {k: Money('2500.00', k) for k in ('EUR', 'USD')}\nPAYIN_CARD_MIN = {\n 'EUR': Money('15.00', 'EUR'), # fee \u2248 3.5%\n 'USD': Money('20.00', 'USD'), # fee \u2248 4.58%\n}\nPAYIN_CARD_TARGET = {\n 'EUR': Money('92.00', 'EUR'), # fee \u2248 2.33%\n 'USD': Money('95.00', 'USD'), # fee \u2248 3.27%\n}\nPAYIN_CARD_MAX = {k: Money('2500.00', k) for k in ('EUR', 'USD')}\nPAYIN_DIRECT_DEBIT_COUNTRIES = {\n # https://support.gocardless.com/hc/en-gb/articles/115005758445\n 'EUR': EUROZONE | set(\"MC SM\".split()),\n}\nPAYIN_DIRECT_DEBIT_MIN_EUR_GBP = Decimal('15.00') # fee \u2248 3.78%\nPAYIN_DIRECT_DEBIT_MIN = {\n 'EUR': Money(PAYIN_DIRECT_DEBIT_MIN_EUR_GBP, 'EUR'),\n 'GBP': Money(PAYIN_DIRECT_DEBIT_MIN_EUR_GBP, 'GBP'),\n}\nPAYIN_DIRECT_DEBIT_TARGET_EUR_GBP = Decimal('99.00') # fee \u2248 0.59%\nPAYIN_DIRECT_DEBIT_TARGET = {\n 'EUR': Money(PAYIN_DIRECT_DEBIT_TARGET_EUR_GBP, 'EUR'),\n 'GBP': Money(PAYIN_DIRECT_DEBIT_TARGET_EUR_GBP, 'GBP'),\n}\nPAYIN_DIRECT_DEBIT_MAX = {k: Money('2500.00', k) for k in ('EUR', 'USD')}\n\nPAYIN_AMOUNTS = {\n 'paypal': {\n 'min_acceptable': MoneyAutoConvertDict({ # fee > 10%\n 'EUR': Money('2.00', 'EUR'),\n 'USD': Money('2.00', 'USD'),\n }),\n 'min_recommended': MoneyAutoConvertDict({ # fee < 8%\n 'EUR': Money('10.00', 'EUR'),\n 'USD': Money('12.00', 'USD'),\n }),\n 'low_fee': MoneyAutoConvertDict({ # fee < 6%\n 'EUR': Money('40.00', 'EUR'),\n 'USD': Money('48.00', 'USD'),\n }),\n 'max_acceptable': MoneyAutoConvertDict({\n 'EUR': Money('5000.00', 'EUR'),\n 'USD': Money('5000.00', 'USD'),\n }),\n },\n 'stripe': {\n 'min_acceptable': MoneyAutoConvertDict({ # fee > 10%\n 'EUR': Money('2.00', 'EUR'),\n 'USD': Money('2.00', 'USD'),\n }),\n 'min_recommended': MoneyAutoConvertDict({ # fee < 8%\n 'EUR': Money('10.00', 'EUR'),\n 'USD': Money('12.00', 'USD'),\n }),\n 'low_fee': MoneyAutoConvertDict({ # fee < 6%\n 'EUR': Money('40.00', 'EUR'),\n 'USD': Money('48.00', 'USD'),\n }),\n 'max_acceptable': MoneyAutoConvertDict({\n 'EUR': Money('5000.00', 'EUR'),\n 'USD': Money('5000.00', 'USD'),\n }),\n },\n}\n\nPAYIN_SETTLEMENT_DELAYS = {\n 'stripe-sdd': timedelta(days=6),\n}\n\nPAYMENT_METHODS = {\n 'mango-ba': _(\"Direct Debit\"),\n 'mango-bw': _(\"Bank Wire\"),\n 'mango-cc': _(\"Credit Card\"),\n 'paypal': \"PayPal\",\n 'stripe-card': _(\"Credit/Debit Card\"),\n 'stripe-sdd': _(\"Direct Debit\"),\n}\nPAYMENT_SLUGS = {\n 'mango-ba': 'direct-debit',\n 'mango-bw': 'bankwire',\n 'mango-cc': 'card',\n}\n\nPAYOUT_COUNTRIES = {\n 'paypal': set(\"\"\"\n AD AE AG AI AL AM AN AO AR AT AU AW AZ BA BB BE BF BG BH BI BJ BM BN BO\n BR BS BT BW BY BZ C2 CA CD CG CH CI CK CL CM CO CR CV CY CZ DE DJ DK DM\n DO DZ EC EE EG ER ES ET FI FJ FK FM FO FR GA GD GE GF GI GL GM GN GP GR\n GT GW GY HK HN HR HU ID IE IL IN IS IT JM JO JP KE KG KH KI KM KN KR KW\n KY KZ LA LC LI LK LS LT LU LV MA MC MD ME MG MH MK ML MN MQ MR MS MT MU\n MV MW MX MY MZ NA NC NE NF NG NI NL NO NP NR NU NZ OM PA PE PF PG PH PL\n PM PN PT PW PY QA RE RO RS RU RW SA SB SC SE SG SH SI SJ SK SL SM SN SO\n SR ST SV SZ TC TD TG TH TJ TM TN TO TT TT TT TT TV TW TZ UA UG GB US UY\n VA VC VE VG VN VU WF WS YE YT ZA ZM ZW\n PR\n \"\"\".split()), # https://www.paypal.com/us/webapps/mpp/country-worldwide\n\n 'stripe': set(\"\"\"\n AT AU BE BG CA CH CY CZ DE DK EE ES FI FR GB GR HK IE IT JP LT LU LV MT\n MX MY NL NO NZ PL PT RO SE SG SI SK US\n PR\n \"\"\".split()), # https://stripe.com/global\n}\n\n# https://developer.paypal.com/docs/api/reference/currency-codes/\nPAYPAL_CURRENCIES = set(\"\"\"\n AUD CAD CHF CZK DKK EUR GBP HKD HUF ILS JPY MXN NOK NZD PHP PLN RUB SEK SGD\n THB TWD USD\n\"\"\".split())\n\nPERIOD_CONVERSION_MAP = {\n ('weekly', 'weekly'): Decimal(1),\n ('monthly', 'weekly'): Decimal(12) / Decimal(52),\n ('yearly', 'weekly'): Decimal(1) / Decimal(52),\n ('weekly', 'monthly'): Decimal(52) / Decimal(12),\n ('monthly', 'monthly'): Decimal(1),\n ('yearly', 'monthly'): Decimal(1) / Decimal(12),\n ('weekly', 'yearly'): Decimal(52),\n ('monthly', 'yearly'): Decimal(12),\n ('yearly', 'yearly'): Decimal(1),\n}\n\nPERIOD_CONVERSION_RATES = {\n 'weekly': Decimal(1),\n 'monthly': Decimal(12) / Decimal(52),\n 'yearly': Decimal(1) / Decimal(52),\n}\n\nPOSTAL_ADDRESS_KEYS = (\n 'AddressLine1', 'AddressLine2', 'City', 'Region', 'PostalCode', 'Country'\n)\nPOSTAL_ADDRESS_KEYS_LIBERAPAY = (\n 'country', 'region', 'city', 'postal_code', 'local_address'\n)\nPOSTAL_ADDRESS_KEYS_STRIPE = (\n 'line1', 'line2', 'city', 'state', 'postal_code', 'country'\n)\n\nPRIVACY_FIELDS = OrderedDict([\n ('hide_giving', (_(\"Hide total giving from others.\"), False)),\n ('hide_receiving', (_(\"Hide total receiving from others.\"), False)),\n ('hide_from_search', (_(\"Hide this profile from search results on Liberapay.\"), True)),\n ('profile_noindex', (_(\"Tell web search engines not to index this profile.\"), True)),\n ('hide_from_lists', (_(\"Prevent this profile from being listed on Liberapay.\"), True)),\n])\nPRIVACY_FIELDS_S = ' '.join(PRIVACY_FIELDS.keys())\n\nPRIVILEGES = dict(admin=1, run_payday=2)\ncheck_bits(list(PRIVILEGES.values()))\n\nPROFILE_VISIBILITY_ATTRS = ('profile_noindex', 'hide_from_lists', 'hide_from_search')\n\nPUBLIC_NAME_MAX_SIZE = 64\n\nQUARANTINE = timedelta(weeks=0)\n\nRATE_LIMITS = {\n 'add_email.source': (5, 60*60*24), # 5 per day\n 'add_email.target': (2, 60*60*24), # 2 per day\n 'admin.http-unsafe': (10, 60*60*24), # 10 per day\n 'change_currency': (4, 60*60*24*7), # 4 per week\n 'change_password': (7, 60*60*24*7), # 7 per week\n 'change_username': (7, 60*60*24*7), # 7 per week\n 'check_password': (25, 60*60*24*7), # 25 per week\n 'elsewhere-lookup.ip-addr': (5, 20), # 5 per 20 seconds\n 'email.bypass_error': (2, 60*60*24*7), # 2 per week\n 'email.unblacklist.source': (5, 60*60*24*7), # 5 per week\n 'email.unblacklist.target': (3, 60*60*24*7), # 3 per week\n 'http-query.ip-addr': (10, 10), # 10 per 10 seconds\n 'http-query.user': (10, 10), # 10 per 10 seconds\n 'http-unsafe.ip-addr': (10, 10), # 10 per 10 seconds\n 'http-unsafe.user': (10, 10), # 10 per 10 seconds\n 'insert_identity': (7, 60*60*24*7), # 7 per week\n 'log-in.country': (10, 60), # 10 per minute per country\n 'log-in.email': (10, 60*60*24), # 10 per day\n 'log-in.email.not-verified': (2, 60*60*24), # 2 per day\n 'log-in.email.verified': (10, 60*60*24), # 10 per day\n 'log-in.ip-addr': (5, 5*60), # 5 per 5 minutes per IP address\n 'log-in.password': (3, 60*60), # 3 per hour\n 'make_team': (5, 60*60*24*7), # 5 per week\n 'payin.from-user': (15, 60*60*24*7), # 15 per week\n 'payin.from-ip-addr': (15, 60*60*24*7), # 15 per week\n 'refetch_elsewhere_data': (1, 60*60*24*7), # retry after one week\n 'refetch_repos': (1, 60*60*24), # retry after one day\n 'sign-up.email': (1, 5*60), # this is used to detect near-simultaneous requests,\n # so 5 minutes should be plenty enough\n 'sign-up.ip-addr': (5, 60*60), # 5 per hour per IP address\n 'sign-up.ip-net': (15, 60*60), # 15 per hour per IP network\n 'sign-up.country': (5, 5*60), # 5 per 5 minutes per country\n 'sign-up.ip-version': (15, 5*60), # 15 per 5 minutes per IP version\n}\n\nSAFE_METHODS = {'GET', 'HEAD', 'OPTIONS'}\n\nSESSION = 'session'\nSESSION_REFRESH = timedelta(hours=1)\nSESSION_TIMEOUT = timedelta(hours=6)\n\n\ndef make_standard_tip(label, weekly, currency):\n precision = get_currency_precision(currency)\n minimum = D_CENT if precision == 2 else Decimal(10) ** (-precision)\n return StandardTip(\n label,\n Money(weekly, currency),\n Money((weekly / PERIOD_CONVERSION_RATES['monthly']).quantize(minimum), currency),\n Money((weekly / PERIOD_CONVERSION_RATES['yearly']).quantize(minimum), currency),\n )\n\n\nclass _StandardTips(defaultdict):\n def __missing__(self, currency):\n r = [\n make_standard_tip(\n label, convert_symbolic_amount(weekly, currency), currency\n ) for label, weekly in STANDARD_TIPS_EUR_USD\n ]\n self[currency] = r\n return r\n\n\nSTANDARD_TIPS_EUR_USD = (\n (_(\"Symbolic\"), Decimal('0.01')),\n (_(\"Small\"), Decimal('0.25')),\n (_(\"Medium\"), Decimal('1.00')),\n (_(\"Large\"), Decimal('5.00')),\n (_(\"Maximum\"), DONATION_LIMITS_EUR_USD['weekly'][1]),\n)\nSTANDARD_TIPS = _StandardTips(None, {\n 'EUR': [make_standard_tip(label, weekly, 'EUR') for label, weekly in STANDARD_TIPS_EUR_USD],\n 'USD': [make_standard_tip(label, weekly, 'USD') for label, weekly in STANDARD_TIPS_EUR_USD],\n})\n\nSUMMARY_MAX_SIZE = 100\n\nTAKE_THROTTLING_THRESHOLD = MoneyAutoConvertDict(\n {k: Money('1.00', k) for k in ('EUR', 'USD')}\n)\n\nUSERNAME_MAX_SIZE = 32\nUSERNAME_SUFFIX_BLACKLIST = set('.txt .html .htm .json .xml'.split())\n\ndel _\n", "path": "liberapay/constants.py"}]} |
gh_patches_debug_144 | rasdani/github-patches | git_diff | cocotb__cocotb-3179 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
CI fail "ValueError: Ghdl: Simulator does not support Verilog"
We get a GHDL failure in the release CI pipeline on master:
Log: https://github.com/cocotb/cocotb/actions/runs/3816250240/jobs/6491822293
```
nox > Running simulator-specific tests against a simulator SIM=ghdl, HDL_TOPLEVEL_LANG=vhdl, TOPLEVEL_LANG=vhdl, VHDL_GPI_INTERFACE=vpi
nox > pytest -v -k simulator_required
============================= test session starts ==============================
platform linux -- Python 3.8.15, pytest-7.2.0, pluggy-1.0.0 -- /home/runner/work/cocotb/cocotb/.nox/release_test_sim-sim-ghdl-toplevel_lang-vhdl-gpi_interface-vpi/bin/python
cachedir: .pytest_cache
rootdir: /home/runner/work/cocotb/cocotb, configfile: setup.cfg, testpaths: tests/pytest, examples/simple_dff
collecting ... collected 109 items / 100 deselected / 9 selected
tests/pytest/test_cocotb.py::test_cocotb FAILED [ 11%]
tests/pytest/test_parallel_cocotb.py::test_cocotb_parallel_compile FAILED [ 22%]
tests/pytest/test_parallel_cocotb.py::test_cocotb_parallel[0] FAILED [ 33%]
tests/pytest/test_parallel_cocotb.py::test_cocotb_parallel[1] FAILED [ 44%]
tests/pytest/test_parallel_cocotb.py::test_cocotb_parallel[2] FAILED [ 55%]
tests/pytest/test_parallel_cocotb.py::test_cocotb_parallel[3] FAILED [ 66%]
tests/pytest/test_runner.py::test_runner[parameters0] FAILED [ 77%]
tests/pytest/test_runner.py::test_runner[parameters1] FAILED [ 88%]
examples/simple_dff/test_dff.py::test_simple_dff_runner FAILED [100%]
```
Looks like somehow the chosen toplevel language doesn't make it to the runner.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `noxfile.py`
Content:
```
1 # Copyright cocotb contributors
2 # Licensed under the Revised BSD License, see LICENSE for details.
3 # SPDX-License-Identifier: BSD-3-Clause
4 import glob
5 import os
6 import shutil
7 import sys
8 from contextlib import suppress
9 from pathlib import Path
10 from typing import Dict, List, Optional, Tuple
11
12 import nox
13
14 # Sessions run by default if nox is called without further arguments.
15 nox.options.sessions = ["dev_test"]
16
17 test_deps = ["pytest"]
18 coverage_deps = ["coverage", "pytest-cov"]
19 # gcovr 5.1 has an issue parsing some gcov files, so pin to 5.0. See
20 # https://github.com/gcovr/gcovr/issues/596
21 # When using gcovr 5.0, deprecated jinja2.Markup was removed in 3.1, so an
22 # Exception is raised during html report generation.
23 # See https://github.com/gcovr/gcovr/pull/576
24 # These issues are fixed on gcovr master branch, so next release should work.
25 coverage_report_deps = ["coverage", "jinja2<3.1", "gcovr==5.0"]
26
27 dev_deps = [
28 "black",
29 "isort",
30 "mypy",
31 "pre-commit",
32 "nox",
33 "flake8",
34 "clang-format",
35 ]
36
37 #
38 # Helpers for use within this file.
39 #
40
41
42 def simulator_support_matrix() -> List[Tuple[str, str, str]]:
43 """
44 Get a list of supported simulator/toplevel-language/GPI-interface tuples.
45 """
46
47 # Simulators with support for VHDL through VHPI, and Verilog through VPI.
48 standard = [
49 (sim, toplevel_lang, gpi_interface)
50 for sim in ("activehdl", "rivierapro", "xcelium")
51 for toplevel_lang in ("verilog", "vhdl")
52 for gpi_interface in ("vpi", "vhpi")
53 if (toplevel_lang, gpi_interface) in (("verilog", "vpi"), ("vhdl", "vhpi"))
54 ]
55
56 # Special-case simulators.
57 special = [
58 ("cvc", "verilog", "vpi"),
59 ("ghdl", "vhdl", "vpi"),
60 ("icarus", "verilog", "vpi"),
61 ("questa", "verilog", "vpi"),
62 ("questa", "vhdl", "fli"),
63 ("questa", "vhdl", "vhpi"),
64 ("verilator", "verilog", "vpi"),
65 ("vcs", "verilog", "vpi"),
66 ]
67
68 return standard + special
69
70
71 def env_vars_for_test(
72 sim: Optional[str], toplevel_lang: Optional[str], gpi_interface: Optional[str]
73 ) -> Dict[str, str]:
74 """Prepare the environment variables controlling the test run."""
75 e = {}
76 if sim is not None:
77 e["SIM"] = sim
78
79 if os.getenv("TOPLEVEL_LANG") is not None:
80 e["HDL_TOPLEVEL_LANG"] = os.getenv("TOPLEVEL_LANG")
81
82 if toplevel_lang is not None:
83 e["TOPLEVEL_LANG"] = toplevel_lang
84 e["HDL_TOPLEVEL_LANG"] = toplevel_lang
85
86 assert not (toplevel_lang == "verilog" and gpi_interface != "vpi")
87 if toplevel_lang == "vhdl" and gpi_interface is not None:
88 e["VHDL_GPI_INTERFACE"] = gpi_interface
89
90 return e
91
92
93 def stringify_dict(d: Dict[str, str]) -> str:
94 return ", ".join(f"{k}={v}" for k, v in d.items())
95
96
97 def configure_env_for_dev_build(session: nox.session) -> None:
98 """Set environment variables for a development build.
99
100 - Enable coverage collection.
101 - Build with more aggressive error checking.
102 """
103 session.env["CFLAGS"] = "-Werror -Wno-deprecated-declarations -g --coverage"
104 session.env["COCOTB_LIBRARY_COVERAGE"] = "1"
105 session.env["CXXFLAGS"] = "-Werror"
106 session.env["LDFLAGS"] = "--coverage"
107
108
109 #
110 # Development pipeline
111 #
112 # - Use nox to build an sdist; no separate build step is required.
113 # - Run tests against the installed sdist.
114 # - Collect coverage.
115 #
116
117
118 @nox.session
119 def dev_build(session: nox.Session) -> None:
120 session.warn("No building is necessary for development sessions.")
121
122
123 @nox.session
124 def dev_test(session: nox.Session) -> None:
125 """Run all development tests as configured through environment variables."""
126
127 dev_test_sim(session, sim=None, toplevel_lang=None, gpi_interface=None)
128 dev_test_nosim(session)
129 dev_coverage_combine(session)
130
131
132 @nox.session
133 @nox.parametrize("sim,toplevel_lang,gpi_interface", simulator_support_matrix())
134 def dev_test_sim(
135 session: nox.Session,
136 sim: Optional[str],
137 toplevel_lang: Optional[str],
138 gpi_interface: Optional[str],
139 ) -> None:
140 """Test a development version of cocotb against a simulator."""
141
142 configure_env_for_dev_build(session)
143
144 session.run("pip", "install", *test_deps, *coverage_deps)
145
146 # Editable installs break C/C++ coverage collection; don't use them.
147 # C/C++ coverage collection requires that the object files produced by the
148 # compiler are not moved around, otherwise the gcno and gcda files produced
149 # at compile and runtime, respectively, are located in the wrong
150 # directories. Depending on the version of the Python install machinery
151 # editable builds are done in a directory in /tmp, which is removed after
152 # the build completes, taking all gcno files with them, as well as the path
153 # to place the gcda files.
154 session.run("pip", "install", ".")
155
156 env = env_vars_for_test(sim, toplevel_lang, gpi_interface)
157 config_str = stringify_dict(env)
158
159 # Remove a potentially existing coverage file from a previous run for the
160 # same test configuration. Use a filename *not* starting with `.coverage.`,
161 # as coverage.py assumes ownership over these files and deleted them at
162 # will.
163 coverage_file = Path(f".cov.test.sim-{sim}-{toplevel_lang}-{gpi_interface}")
164 with suppress(FileNotFoundError):
165 coverage_file.unlink()
166
167 session.log(f"Running 'make test' against a simulator {config_str}")
168 session.run("make", "clean", "test", external=True, env=env)
169
170 session.log(f"Running simulator-specific tests against a simulator {config_str}")
171 session.run(
172 "pytest",
173 "-v",
174 "--cov=cocotb",
175 "--cov-branch",
176 # Don't display coverage report here
177 "--cov-report=",
178 "-k",
179 "simulator_required",
180 env=env,
181 )
182 Path(".coverage").rename(".coverage.pytest")
183
184 session.log(f"All tests passed with configuration {config_str}!")
185
186 # Combine coverage produced during the test runs, and place it in a file
187 # with a name specific to this invocation of dev_test_sim().
188 coverage_files = glob.glob("**/.coverage.cocotb", recursive=True)
189 if not coverage_files:
190 session.error(
191 "No coverage files found. Something went wrong during the test execution."
192 )
193 coverage_files.append(".coverage.pytest")
194 session.run("coverage", "combine", "--append", *coverage_files)
195 Path(".coverage").rename(coverage_file)
196
197 session.log(f"Stored Python coverage for this test run in {coverage_file}.")
198
199
200 @nox.session
201 def dev_test_nosim(session: nox.Session) -> None:
202 """Run the simulator-agnostic tests against a cocotb development version."""
203
204 configure_env_for_dev_build(session)
205
206 session.run("pip", "install", *test_deps, *coverage_deps)
207 session.run("pip", "install", "-e", ".")
208
209 # Remove a potentially existing coverage file from a previous run for the
210 # same test configuration. Use a filename *not* starting with `.coverage.`,
211 # as coverage.py assumes ownership over these files and deleted them at
212 # will.
213 coverage_file = Path(".cov.test.nosim")
214 with suppress(FileNotFoundError):
215 coverage_file.unlink()
216
217 # Run pytest with the default configuration in setup.cfg.
218 session.log("Running simulator-agnostic tests with pytest")
219 session.run(
220 "pytest",
221 "-v",
222 "--cov=cocotb",
223 "--cov-branch",
224 # Don't display coverage report here
225 "--cov-report=",
226 "-k",
227 "not simulator_required",
228 )
229
230 # Run pytest for files which can only be tested in the source tree, not in
231 # the installed binary (otherwise we get an "import file mismatch" error
232 # from pytest).
233 session.log("Running simulator-agnostic tests in the source tree with pytest")
234 pytest_sourcetree = [
235 "cocotb/utils.py",
236 "cocotb/binary.py",
237 "cocotb/types/",
238 "cocotb/_sim_versions.py",
239 ]
240 session.run(
241 "pytest",
242 "-v",
243 "--doctest-modules",
244 "--cov=cocotb",
245 "--cov-branch",
246 # Don't display coverage report here
247 "--cov-report=",
248 # Append to the .coverage file created in the previous pytest
249 # invocation in this session.
250 "--cov-append",
251 "-k",
252 "not simulator_required",
253 *pytest_sourcetree,
254 )
255
256 session.log("All tests passed!")
257
258 # Rename the .coverage file to make it unique to the session.
259 Path(".coverage").rename(coverage_file)
260
261 session.log(f"Stored Python coverage for this test run in {coverage_file}.")
262
263
264 @nox.session
265 def dev_coverage_combine(session: nox.Session) -> None:
266 """Combine coverage from previous dev_* runs into a .coverage file."""
267 session.run("pip", "install", *coverage_report_deps)
268
269 coverage_files = glob.glob("**/.cov.test.*", recursive=True)
270 session.run("coverage", "combine", *coverage_files)
271 assert Path(".coverage").is_file()
272
273 session.log("Wrote combined coverage database for all tests to '.coverage'.")
274
275 session.notify("dev_coverage_report")
276
277
278 @nox.session
279 def dev_coverage_report(session: nox.Session) -> None:
280 """Report coverage results."""
281 session.run("pip", "install", *coverage_report_deps)
282
283 # Produce Cobertura XML coverage reports.
284 session.log("Producing Python and C/C++ coverage in Cobertura XML format")
285
286 coverage_python_xml = Path(".python_coverage.xml")
287 session.run("coverage", "xml", "-o", str(coverage_python_xml))
288 assert coverage_python_xml.is_file()
289
290 coverage_cpp_xml = Path(".cpp_coverage.xml")
291 session.run(
292 "gcovr",
293 "--xml",
294 "--output",
295 str(coverage_cpp_xml),
296 ".",
297 )
298 assert coverage_cpp_xml.is_file()
299
300 session.log(
301 f"Cobertura XML files written to {str(coverage_cpp_xml)!r} (C/C++) and {str(coverage_python_xml)!r} (Python)"
302 )
303
304 # Report human-readable coverage.
305 session.log("Python coverage")
306 session.run("coverage", "report")
307
308 session.log("Library coverage")
309 session.run("gcovr", "--print-summary", "--txt")
310
311
312 #
313 # Release pipeline.
314 #
315 # - Clean out the dist directory.
316 # - Build wheels (release builds).
317 # - Install cocotb from wheel.
318 # - Run tests against cocotb installed from the wheel.
319 #
320 # The release pipeline does not collect coverage, and does not run doctests.
321 #
322
323 # Directory containing the distribution artifacts (sdist and bdist).
324 dist_dir = "dist"
325
326
327 @nox.session
328 def release_clean(session: nox.Session) -> None:
329 """Remove all build artifacts from the dist directory."""
330 shutil.rmtree(dist_dir, ignore_errors=True)
331
332
333 @nox.session
334 def release_build(session: nox.Session) -> None:
335 """Build a release (sdist and bdist)."""
336 session.notify("release_build_bdist")
337 session.notify("release_build_sdist")
338
339
340 @nox.session
341 def release_build_bdist(session: nox.Session) -> None:
342 """Build a binary distribution (wheels) on the current operating system."""
343
344 # Pin a version to ensure reproducible builds.
345 session.run("pip", "install", "cibuildwheel==2.11.2")
346
347 # cibuildwheel only auto-detects the platform if it runs on a CI server.
348 # Do the auto-detect manually to enable local runs.
349 if sys.platform.startswith("linux"):
350 platform = "linux"
351 elif sys.platform == "darwin":
352 platform = "macos"
353 elif sys.platform == "win32":
354 platform = "windows"
355 else:
356 session.error(f"Unknown platform: {sys.platform!r}")
357
358 session.log("Building binary distribution (wheels)")
359 session.run(
360 "cibuildwheel",
361 "--platform",
362 platform,
363 "--output-dir",
364 dist_dir,
365 )
366
367 session.log(
368 f"Binary distribution in release mode for {platform!r} built into {dist_dir!r}"
369 )
370
371
372 @nox.session
373 def release_build_sdist(session: nox.Session) -> None:
374 """Build the source distribution."""
375
376 session.run("pip", "install", "build")
377
378 session.log("Building source distribution (sdist)")
379 session.run("python", "-m", "build", "--sdist", "--outdir", dist_dir, ".")
380
381 session.log(f"Source distribution in release mode built into {dist_dir!r}")
382
383
384 @nox.session
385 def release_test_sdist(session: nox.Session) -> None:
386 """Build and install the sdist."""
387
388 # Find the sdist to install.
389 sdists = list(Path(dist_dir).glob("cocotb-*.tar.gz"))
390 if len(sdists) == 0:
391 session.error(
392 f"No *.tar.gz sdist file found in {dist_dir!r} "
393 f"Run the 'release_build' session first."
394 )
395 if len(sdists) > 1:
396 session.error(
397 f"More than one potential sdist found in the {dist_dir!r} "
398 f"directory. Run the 'release_clean' session first!"
399 )
400 sdist_path = sdists[0]
401 assert sdist_path.is_file()
402
403 session.log("Installing cocotb from sdist, which includes the build step")
404 session.run(
405 "pip",
406 "install",
407 str(sdist_path),
408 )
409
410 session.log("Running cocotb-config as basic installation smoke test")
411 session.run("cocotb-config", "--version")
412
413
414 def release_install(session: nox.Session) -> None:
415 """Helper: Install cocotb from wheels and also install test dependencies."""
416
417 # We have to disable the use of the PyPi index when installing cocotb to
418 # guarantee that the wheels in dist are being used. But without an index
419 # pip cannot find the dependencies, which need to be installed from PyPi.
420 # Work around that by explicitly installing the dependencies first from
421 # PyPi, and then installing cocotb itself from the local dist directory.
422
423 session.log("Installing cocotb dependencies from PyPi")
424 session.run("pip", "install", "find_libpython")
425
426 session.log(f"Installing cocotb from wheels in {dist_dir!r}")
427 session.run(
428 "pip",
429 "install",
430 "--force-reinstall",
431 "--only-binary",
432 "cocotb",
433 "--no-index",
434 "--no-dependencies",
435 "--find-links",
436 dist_dir,
437 "cocotb",
438 )
439
440 session.log("Running cocotb-config as basic installation smoke test")
441 session.run("cocotb-config", "--version")
442
443 session.log("Installing test dependencies")
444 session.run("pip", "install", *test_deps)
445
446
447 @nox.session
448 @nox.parametrize("sim,toplevel_lang,gpi_interface", simulator_support_matrix())
449 def release_test_sim(
450 session: nox.Session, sim: str, toplevel_lang: str, gpi_interface: str
451 ) -> None:
452 """Test a release version of cocotb against a simulator."""
453
454 release_install(session)
455
456 env = env_vars_for_test(sim, toplevel_lang, gpi_interface)
457 config_str = stringify_dict(env)
458
459 session.log(f"Running tests against a simulator: {config_str}")
460 session.run("make", "clean", "test", external=True, env=env)
461
462 session.log(f"Running simulator-specific tests against a simulator {config_str}")
463 session.run(
464 "pytest",
465 "-v",
466 "-k",
467 "simulator_required",
468 )
469
470 session.log(f"All tests passed with configuration {config_str}!")
471
472
473 @nox.session
474 def release_test_nosim(session: nox.Session) -> None:
475 """Run the simulator-agnostic tests against a cocotb release."""
476
477 release_install(session)
478
479 session.log("Running simulator-agnostic tests")
480 session.run(
481 "pytest",
482 "-v",
483 "-k",
484 "not simulator_required",
485 )
486
487 session.log("All tests passed!")
488
489
490 @nox.session
491 def docs(session: nox.Session) -> None:
492 """invoke sphinx-build to build the HTML docs"""
493 session.run("pip", "install", "-r", "documentation/requirements.txt")
494 session.run("pip", "install", "-e", ".")
495 outdir = session.cache_dir / "docs_out"
496 session.run(
497 "sphinx-build", "./documentation/source", str(outdir), "--color", "-b", "html"
498 )
499 index = (outdir / "index.html").resolve().as_uri()
500 session.log(f"Documentation is available at {index}")
501
502
503 @nox.session
504 def docs_linkcheck(session: nox.Session) -> None:
505 """invoke sphinx-build to linkcheck the docs"""
506 session.run("pip", "install", "-r", "documentation/requirements.txt")
507 session.run("pip", "install", "-e", ".")
508 outdir = session.cache_dir / "docs_out"
509 session.run(
510 "sphinx-build",
511 "./documentation/source",
512 str(outdir),
513 "--color",
514 "-b",
515 "linkcheck",
516 )
517
518
519 @nox.session
520 def docs_spelling(session: nox.Session) -> None:
521 """invoke sphinx-build to spellcheck the docs"""
522 session.run("pip", "install", "-r", "documentation/requirements.txt")
523 session.run("pip", "install", "-e", ".")
524 outdir = session.cache_dir / "docs_out"
525 session.run(
526 "sphinx-build",
527 "./documentation/source",
528 str(outdir),
529 "--color",
530 "-b",
531 "spelling",
532 )
533
534
535 @nox.session(reuse_venv=True)
536 def dev(session: nox.Session) -> None:
537 """Build a development environment and optionally run a command given as extra args"""
538
539 configure_env_for_dev_build(session)
540
541 session.run("pip", "install", *test_deps)
542 session.run("pip", "install", *dev_deps)
543 session.run("pip", "install", "-e", ".")
544 if session.posargs:
545 session.run(*session.posargs, external=True)
546
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/noxfile.py b/noxfile.py
--- a/noxfile.py
+++ b/noxfile.py
@@ -465,6 +465,7 @@
"-v",
"-k",
"simulator_required",
+ env=env,
)
session.log(f"All tests passed with configuration {config_str}!")
| {"golden_diff": "diff --git a/noxfile.py b/noxfile.py\n--- a/noxfile.py\n+++ b/noxfile.py\n@@ -465,6 +465,7 @@\n \"-v\",\n \"-k\",\n \"simulator_required\",\n+ env=env,\n )\n \n session.log(f\"All tests passed with configuration {config_str}!\")\n", "issue": "CI fail \"ValueError: Ghdl: Simulator does not support Verilog\"\nWe get a GHDL failure in the release CI pipeline on master:\r\n\r\nLog: https://github.com/cocotb/cocotb/actions/runs/3816250240/jobs/6491822293\r\n\r\n```\r\nnox > Running simulator-specific tests against a simulator SIM=ghdl, HDL_TOPLEVEL_LANG=vhdl, TOPLEVEL_LANG=vhdl, VHDL_GPI_INTERFACE=vpi\r\nnox > pytest -v -k simulator_required\r\n============================= test session starts ==============================\r\nplatform linux -- Python 3.8.15, pytest-7.2.0, pluggy-1.0.0 -- /home/runner/work/cocotb/cocotb/.nox/release_test_sim-sim-ghdl-toplevel_lang-vhdl-gpi_interface-vpi/bin/python\r\ncachedir: .pytest_cache\r\nrootdir: /home/runner/work/cocotb/cocotb, configfile: setup.cfg, testpaths: tests/pytest, examples/simple_dff\r\ncollecting ... collected 109 items / 100 deselected / 9 selected\r\n\r\ntests/pytest/test_cocotb.py::test_cocotb FAILED [ 11%]\r\ntests/pytest/test_parallel_cocotb.py::test_cocotb_parallel_compile FAILED [ 22%]\r\ntests/pytest/test_parallel_cocotb.py::test_cocotb_parallel[0] FAILED [ 33%]\r\ntests/pytest/test_parallel_cocotb.py::test_cocotb_parallel[1] FAILED [ 44%]\r\ntests/pytest/test_parallel_cocotb.py::test_cocotb_parallel[2] FAILED [ 55%]\r\ntests/pytest/test_parallel_cocotb.py::test_cocotb_parallel[3] FAILED [ 66%]\r\ntests/pytest/test_runner.py::test_runner[parameters0] FAILED [ 77%]\r\ntests/pytest/test_runner.py::test_runner[parameters1] FAILED [ 88%]\r\nexamples/simple_dff/test_dff.py::test_simple_dff_runner FAILED [100%]\r\n```\r\n\r\nLooks like somehow the chosen toplevel language doesn't make it to the runner.\n", "before_files": [{"content": "# Copyright cocotb contributors\n# Licensed under the Revised BSD License, see LICENSE for details.\n# SPDX-License-Identifier: BSD-3-Clause\nimport glob\nimport os\nimport shutil\nimport sys\nfrom contextlib import suppress\nfrom pathlib import Path\nfrom typing import Dict, List, Optional, Tuple\n\nimport nox\n\n# Sessions run by default if nox is called without further arguments.\nnox.options.sessions = [\"dev_test\"]\n\ntest_deps = [\"pytest\"]\ncoverage_deps = [\"coverage\", \"pytest-cov\"]\n# gcovr 5.1 has an issue parsing some gcov files, so pin to 5.0. See\n# https://github.com/gcovr/gcovr/issues/596\n# When using gcovr 5.0, deprecated jinja2.Markup was removed in 3.1, so an\n# Exception is raised during html report generation.\n# See https://github.com/gcovr/gcovr/pull/576\n# These issues are fixed on gcovr master branch, so next release should work.\ncoverage_report_deps = [\"coverage\", \"jinja2<3.1\", \"gcovr==5.0\"]\n\ndev_deps = [\n \"black\",\n \"isort\",\n \"mypy\",\n \"pre-commit\",\n \"nox\",\n \"flake8\",\n \"clang-format\",\n]\n\n#\n# Helpers for use within this file.\n#\n\n\ndef simulator_support_matrix() -> List[Tuple[str, str, str]]:\n \"\"\"\n Get a list of supported simulator/toplevel-language/GPI-interface tuples.\n \"\"\"\n\n # Simulators with support for VHDL through VHPI, and Verilog through VPI.\n standard = [\n (sim, toplevel_lang, gpi_interface)\n for sim in (\"activehdl\", \"rivierapro\", \"xcelium\")\n for toplevel_lang in (\"verilog\", \"vhdl\")\n for gpi_interface in (\"vpi\", \"vhpi\")\n if (toplevel_lang, gpi_interface) in ((\"verilog\", \"vpi\"), (\"vhdl\", \"vhpi\"))\n ]\n\n # Special-case simulators.\n special = [\n (\"cvc\", \"verilog\", \"vpi\"),\n (\"ghdl\", \"vhdl\", \"vpi\"),\n (\"icarus\", \"verilog\", \"vpi\"),\n (\"questa\", \"verilog\", \"vpi\"),\n (\"questa\", \"vhdl\", \"fli\"),\n (\"questa\", \"vhdl\", \"vhpi\"),\n (\"verilator\", \"verilog\", \"vpi\"),\n (\"vcs\", \"verilog\", \"vpi\"),\n ]\n\n return standard + special\n\n\ndef env_vars_for_test(\n sim: Optional[str], toplevel_lang: Optional[str], gpi_interface: Optional[str]\n) -> Dict[str, str]:\n \"\"\"Prepare the environment variables controlling the test run.\"\"\"\n e = {}\n if sim is not None:\n e[\"SIM\"] = sim\n\n if os.getenv(\"TOPLEVEL_LANG\") is not None:\n e[\"HDL_TOPLEVEL_LANG\"] = os.getenv(\"TOPLEVEL_LANG\")\n\n if toplevel_lang is not None:\n e[\"TOPLEVEL_LANG\"] = toplevel_lang\n e[\"HDL_TOPLEVEL_LANG\"] = toplevel_lang\n\n assert not (toplevel_lang == \"verilog\" and gpi_interface != \"vpi\")\n if toplevel_lang == \"vhdl\" and gpi_interface is not None:\n e[\"VHDL_GPI_INTERFACE\"] = gpi_interface\n\n return e\n\n\ndef stringify_dict(d: Dict[str, str]) -> str:\n return \", \".join(f\"{k}={v}\" for k, v in d.items())\n\n\ndef configure_env_for_dev_build(session: nox.session) -> None:\n \"\"\"Set environment variables for a development build.\n\n - Enable coverage collection.\n - Build with more aggressive error checking.\n \"\"\"\n session.env[\"CFLAGS\"] = \"-Werror -Wno-deprecated-declarations -g --coverage\"\n session.env[\"COCOTB_LIBRARY_COVERAGE\"] = \"1\"\n session.env[\"CXXFLAGS\"] = \"-Werror\"\n session.env[\"LDFLAGS\"] = \"--coverage\"\n\n\n#\n# Development pipeline\n#\n# - Use nox to build an sdist; no separate build step is required.\n# - Run tests against the installed sdist.\n# - Collect coverage.\n#\n\n\[email protected]\ndef dev_build(session: nox.Session) -> None:\n session.warn(\"No building is necessary for development sessions.\")\n\n\[email protected]\ndef dev_test(session: nox.Session) -> None:\n \"\"\"Run all development tests as configured through environment variables.\"\"\"\n\n dev_test_sim(session, sim=None, toplevel_lang=None, gpi_interface=None)\n dev_test_nosim(session)\n dev_coverage_combine(session)\n\n\[email protected]\[email protected](\"sim,toplevel_lang,gpi_interface\", simulator_support_matrix())\ndef dev_test_sim(\n session: nox.Session,\n sim: Optional[str],\n toplevel_lang: Optional[str],\n gpi_interface: Optional[str],\n) -> None:\n \"\"\"Test a development version of cocotb against a simulator.\"\"\"\n\n configure_env_for_dev_build(session)\n\n session.run(\"pip\", \"install\", *test_deps, *coverage_deps)\n\n # Editable installs break C/C++ coverage collection; don't use them.\n # C/C++ coverage collection requires that the object files produced by the\n # compiler are not moved around, otherwise the gcno and gcda files produced\n # at compile and runtime, respectively, are located in the wrong\n # directories. Depending on the version of the Python install machinery\n # editable builds are done in a directory in /tmp, which is removed after\n # the build completes, taking all gcno files with them, as well as the path\n # to place the gcda files.\n session.run(\"pip\", \"install\", \".\")\n\n env = env_vars_for_test(sim, toplevel_lang, gpi_interface)\n config_str = stringify_dict(env)\n\n # Remove a potentially existing coverage file from a previous run for the\n # same test configuration. Use a filename *not* starting with `.coverage.`,\n # as coverage.py assumes ownership over these files and deleted them at\n # will.\n coverage_file = Path(f\".cov.test.sim-{sim}-{toplevel_lang}-{gpi_interface}\")\n with suppress(FileNotFoundError):\n coverage_file.unlink()\n\n session.log(f\"Running 'make test' against a simulator {config_str}\")\n session.run(\"make\", \"clean\", \"test\", external=True, env=env)\n\n session.log(f\"Running simulator-specific tests against a simulator {config_str}\")\n session.run(\n \"pytest\",\n \"-v\",\n \"--cov=cocotb\",\n \"--cov-branch\",\n # Don't display coverage report here\n \"--cov-report=\",\n \"-k\",\n \"simulator_required\",\n env=env,\n )\n Path(\".coverage\").rename(\".coverage.pytest\")\n\n session.log(f\"All tests passed with configuration {config_str}!\")\n\n # Combine coverage produced during the test runs, and place it in a file\n # with a name specific to this invocation of dev_test_sim().\n coverage_files = glob.glob(\"**/.coverage.cocotb\", recursive=True)\n if not coverage_files:\n session.error(\n \"No coverage files found. Something went wrong during the test execution.\"\n )\n coverage_files.append(\".coverage.pytest\")\n session.run(\"coverage\", \"combine\", \"--append\", *coverage_files)\n Path(\".coverage\").rename(coverage_file)\n\n session.log(f\"Stored Python coverage for this test run in {coverage_file}.\")\n\n\[email protected]\ndef dev_test_nosim(session: nox.Session) -> None:\n \"\"\"Run the simulator-agnostic tests against a cocotb development version.\"\"\"\n\n configure_env_for_dev_build(session)\n\n session.run(\"pip\", \"install\", *test_deps, *coverage_deps)\n session.run(\"pip\", \"install\", \"-e\", \".\")\n\n # Remove a potentially existing coverage file from a previous run for the\n # same test configuration. Use a filename *not* starting with `.coverage.`,\n # as coverage.py assumes ownership over these files and deleted them at\n # will.\n coverage_file = Path(\".cov.test.nosim\")\n with suppress(FileNotFoundError):\n coverage_file.unlink()\n\n # Run pytest with the default configuration in setup.cfg.\n session.log(\"Running simulator-agnostic tests with pytest\")\n session.run(\n \"pytest\",\n \"-v\",\n \"--cov=cocotb\",\n \"--cov-branch\",\n # Don't display coverage report here\n \"--cov-report=\",\n \"-k\",\n \"not simulator_required\",\n )\n\n # Run pytest for files which can only be tested in the source tree, not in\n # the installed binary (otherwise we get an \"import file mismatch\" error\n # from pytest).\n session.log(\"Running simulator-agnostic tests in the source tree with pytest\")\n pytest_sourcetree = [\n \"cocotb/utils.py\",\n \"cocotb/binary.py\",\n \"cocotb/types/\",\n \"cocotb/_sim_versions.py\",\n ]\n session.run(\n \"pytest\",\n \"-v\",\n \"--doctest-modules\",\n \"--cov=cocotb\",\n \"--cov-branch\",\n # Don't display coverage report here\n \"--cov-report=\",\n # Append to the .coverage file created in the previous pytest\n # invocation in this session.\n \"--cov-append\",\n \"-k\",\n \"not simulator_required\",\n *pytest_sourcetree,\n )\n\n session.log(\"All tests passed!\")\n\n # Rename the .coverage file to make it unique to the session.\n Path(\".coverage\").rename(coverage_file)\n\n session.log(f\"Stored Python coverage for this test run in {coverage_file}.\")\n\n\[email protected]\ndef dev_coverage_combine(session: nox.Session) -> None:\n \"\"\"Combine coverage from previous dev_* runs into a .coverage file.\"\"\"\n session.run(\"pip\", \"install\", *coverage_report_deps)\n\n coverage_files = glob.glob(\"**/.cov.test.*\", recursive=True)\n session.run(\"coverage\", \"combine\", *coverage_files)\n assert Path(\".coverage\").is_file()\n\n session.log(\"Wrote combined coverage database for all tests to '.coverage'.\")\n\n session.notify(\"dev_coverage_report\")\n\n\[email protected]\ndef dev_coverage_report(session: nox.Session) -> None:\n \"\"\"Report coverage results.\"\"\"\n session.run(\"pip\", \"install\", *coverage_report_deps)\n\n # Produce Cobertura XML coverage reports.\n session.log(\"Producing Python and C/C++ coverage in Cobertura XML format\")\n\n coverage_python_xml = Path(\".python_coverage.xml\")\n session.run(\"coverage\", \"xml\", \"-o\", str(coverage_python_xml))\n assert coverage_python_xml.is_file()\n\n coverage_cpp_xml = Path(\".cpp_coverage.xml\")\n session.run(\n \"gcovr\",\n \"--xml\",\n \"--output\",\n str(coverage_cpp_xml),\n \".\",\n )\n assert coverage_cpp_xml.is_file()\n\n session.log(\n f\"Cobertura XML files written to {str(coverage_cpp_xml)!r} (C/C++) and {str(coverage_python_xml)!r} (Python)\"\n )\n\n # Report human-readable coverage.\n session.log(\"Python coverage\")\n session.run(\"coverage\", \"report\")\n\n session.log(\"Library coverage\")\n session.run(\"gcovr\", \"--print-summary\", \"--txt\")\n\n\n#\n# Release pipeline.\n#\n# - Clean out the dist directory.\n# - Build wheels (release builds).\n# - Install cocotb from wheel.\n# - Run tests against cocotb installed from the wheel.\n#\n# The release pipeline does not collect coverage, and does not run doctests.\n#\n\n# Directory containing the distribution artifacts (sdist and bdist).\ndist_dir = \"dist\"\n\n\[email protected]\ndef release_clean(session: nox.Session) -> None:\n \"\"\"Remove all build artifacts from the dist directory.\"\"\"\n shutil.rmtree(dist_dir, ignore_errors=True)\n\n\[email protected]\ndef release_build(session: nox.Session) -> None:\n \"\"\"Build a release (sdist and bdist).\"\"\"\n session.notify(\"release_build_bdist\")\n session.notify(\"release_build_sdist\")\n\n\[email protected]\ndef release_build_bdist(session: nox.Session) -> None:\n \"\"\"Build a binary distribution (wheels) on the current operating system.\"\"\"\n\n # Pin a version to ensure reproducible builds.\n session.run(\"pip\", \"install\", \"cibuildwheel==2.11.2\")\n\n # cibuildwheel only auto-detects the platform if it runs on a CI server.\n # Do the auto-detect manually to enable local runs.\n if sys.platform.startswith(\"linux\"):\n platform = \"linux\"\n elif sys.platform == \"darwin\":\n platform = \"macos\"\n elif sys.platform == \"win32\":\n platform = \"windows\"\n else:\n session.error(f\"Unknown platform: {sys.platform!r}\")\n\n session.log(\"Building binary distribution (wheels)\")\n session.run(\n \"cibuildwheel\",\n \"--platform\",\n platform,\n \"--output-dir\",\n dist_dir,\n )\n\n session.log(\n f\"Binary distribution in release mode for {platform!r} built into {dist_dir!r}\"\n )\n\n\[email protected]\ndef release_build_sdist(session: nox.Session) -> None:\n \"\"\"Build the source distribution.\"\"\"\n\n session.run(\"pip\", \"install\", \"build\")\n\n session.log(\"Building source distribution (sdist)\")\n session.run(\"python\", \"-m\", \"build\", \"--sdist\", \"--outdir\", dist_dir, \".\")\n\n session.log(f\"Source distribution in release mode built into {dist_dir!r}\")\n\n\[email protected]\ndef release_test_sdist(session: nox.Session) -> None:\n \"\"\"Build and install the sdist.\"\"\"\n\n # Find the sdist to install.\n sdists = list(Path(dist_dir).glob(\"cocotb-*.tar.gz\"))\n if len(sdists) == 0:\n session.error(\n f\"No *.tar.gz sdist file found in {dist_dir!r} \"\n f\"Run the 'release_build' session first.\"\n )\n if len(sdists) > 1:\n session.error(\n f\"More than one potential sdist found in the {dist_dir!r} \"\n f\"directory. Run the 'release_clean' session first!\"\n )\n sdist_path = sdists[0]\n assert sdist_path.is_file()\n\n session.log(\"Installing cocotb from sdist, which includes the build step\")\n session.run(\n \"pip\",\n \"install\",\n str(sdist_path),\n )\n\n session.log(\"Running cocotb-config as basic installation smoke test\")\n session.run(\"cocotb-config\", \"--version\")\n\n\ndef release_install(session: nox.Session) -> None:\n \"\"\"Helper: Install cocotb from wheels and also install test dependencies.\"\"\"\n\n # We have to disable the use of the PyPi index when installing cocotb to\n # guarantee that the wheels in dist are being used. But without an index\n # pip cannot find the dependencies, which need to be installed from PyPi.\n # Work around that by explicitly installing the dependencies first from\n # PyPi, and then installing cocotb itself from the local dist directory.\n\n session.log(\"Installing cocotb dependencies from PyPi\")\n session.run(\"pip\", \"install\", \"find_libpython\")\n\n session.log(f\"Installing cocotb from wheels in {dist_dir!r}\")\n session.run(\n \"pip\",\n \"install\",\n \"--force-reinstall\",\n \"--only-binary\",\n \"cocotb\",\n \"--no-index\",\n \"--no-dependencies\",\n \"--find-links\",\n dist_dir,\n \"cocotb\",\n )\n\n session.log(\"Running cocotb-config as basic installation smoke test\")\n session.run(\"cocotb-config\", \"--version\")\n\n session.log(\"Installing test dependencies\")\n session.run(\"pip\", \"install\", *test_deps)\n\n\[email protected]\[email protected](\"sim,toplevel_lang,gpi_interface\", simulator_support_matrix())\ndef release_test_sim(\n session: nox.Session, sim: str, toplevel_lang: str, gpi_interface: str\n) -> None:\n \"\"\"Test a release version of cocotb against a simulator.\"\"\"\n\n release_install(session)\n\n env = env_vars_for_test(sim, toplevel_lang, gpi_interface)\n config_str = stringify_dict(env)\n\n session.log(f\"Running tests against a simulator: {config_str}\")\n session.run(\"make\", \"clean\", \"test\", external=True, env=env)\n\n session.log(f\"Running simulator-specific tests against a simulator {config_str}\")\n session.run(\n \"pytest\",\n \"-v\",\n \"-k\",\n \"simulator_required\",\n )\n\n session.log(f\"All tests passed with configuration {config_str}!\")\n\n\[email protected]\ndef release_test_nosim(session: nox.Session) -> None:\n \"\"\"Run the simulator-agnostic tests against a cocotb release.\"\"\"\n\n release_install(session)\n\n session.log(\"Running simulator-agnostic tests\")\n session.run(\n \"pytest\",\n \"-v\",\n \"-k\",\n \"not simulator_required\",\n )\n\n session.log(\"All tests passed!\")\n\n\[email protected]\ndef docs(session: nox.Session) -> None:\n \"\"\"invoke sphinx-build to build the HTML docs\"\"\"\n session.run(\"pip\", \"install\", \"-r\", \"documentation/requirements.txt\")\n session.run(\"pip\", \"install\", \"-e\", \".\")\n outdir = session.cache_dir / \"docs_out\"\n session.run(\n \"sphinx-build\", \"./documentation/source\", str(outdir), \"--color\", \"-b\", \"html\"\n )\n index = (outdir / \"index.html\").resolve().as_uri()\n session.log(f\"Documentation is available at {index}\")\n\n\[email protected]\ndef docs_linkcheck(session: nox.Session) -> None:\n \"\"\"invoke sphinx-build to linkcheck the docs\"\"\"\n session.run(\"pip\", \"install\", \"-r\", \"documentation/requirements.txt\")\n session.run(\"pip\", \"install\", \"-e\", \".\")\n outdir = session.cache_dir / \"docs_out\"\n session.run(\n \"sphinx-build\",\n \"./documentation/source\",\n str(outdir),\n \"--color\",\n \"-b\",\n \"linkcheck\",\n )\n\n\[email protected]\ndef docs_spelling(session: nox.Session) -> None:\n \"\"\"invoke sphinx-build to spellcheck the docs\"\"\"\n session.run(\"pip\", \"install\", \"-r\", \"documentation/requirements.txt\")\n session.run(\"pip\", \"install\", \"-e\", \".\")\n outdir = session.cache_dir / \"docs_out\"\n session.run(\n \"sphinx-build\",\n \"./documentation/source\",\n str(outdir),\n \"--color\",\n \"-b\",\n \"spelling\",\n )\n\n\[email protected](reuse_venv=True)\ndef dev(session: nox.Session) -> None:\n \"\"\"Build a development environment and optionally run a command given as extra args\"\"\"\n\n configure_env_for_dev_build(session)\n\n session.run(\"pip\", \"install\", *test_deps)\n session.run(\"pip\", \"install\", *dev_deps)\n session.run(\"pip\", \"install\", \"-e\", \".\")\n if session.posargs:\n session.run(*session.posargs, external=True)\n", "path": "noxfile.py"}], "after_files": [{"content": "# Copyright cocotb contributors\n# Licensed under the Revised BSD License, see LICENSE for details.\n# SPDX-License-Identifier: BSD-3-Clause\nimport glob\nimport os\nimport shutil\nimport sys\nfrom contextlib import suppress\nfrom pathlib import Path\nfrom typing import Dict, List, Optional, Tuple\n\nimport nox\n\n# Sessions run by default if nox is called without further arguments.\nnox.options.sessions = [\"dev_test\"]\n\ntest_deps = [\"pytest\"]\ncoverage_deps = [\"coverage\", \"pytest-cov\"]\n# gcovr 5.1 has an issue parsing some gcov files, so pin to 5.0. See\n# https://github.com/gcovr/gcovr/issues/596\n# When using gcovr 5.0, deprecated jinja2.Markup was removed in 3.1, so an\n# Exception is raised during html report generation.\n# See https://github.com/gcovr/gcovr/pull/576\n# These issues are fixed on gcovr master branch, so next release should work.\ncoverage_report_deps = [\"coverage\", \"jinja2<3.1\", \"gcovr==5.0\"]\n\ndev_deps = [\n \"black\",\n \"isort\",\n \"mypy\",\n \"pre-commit\",\n \"nox\",\n \"flake8\",\n \"clang-format\",\n]\n\n#\n# Helpers for use within this file.\n#\n\n\ndef simulator_support_matrix() -> List[Tuple[str, str, str]]:\n \"\"\"\n Get a list of supported simulator/toplevel-language/GPI-interface tuples.\n \"\"\"\n\n # Simulators with support for VHDL through VHPI, and Verilog through VPI.\n standard = [\n (sim, toplevel_lang, gpi_interface)\n for sim in (\"activehdl\", \"rivierapro\", \"xcelium\")\n for toplevel_lang in (\"verilog\", \"vhdl\")\n for gpi_interface in (\"vpi\", \"vhpi\")\n if (toplevel_lang, gpi_interface) in ((\"verilog\", \"vpi\"), (\"vhdl\", \"vhpi\"))\n ]\n\n # Special-case simulators.\n special = [\n (\"cvc\", \"verilog\", \"vpi\"),\n (\"ghdl\", \"vhdl\", \"vpi\"),\n (\"icarus\", \"verilog\", \"vpi\"),\n (\"questa\", \"verilog\", \"vpi\"),\n (\"questa\", \"vhdl\", \"fli\"),\n (\"questa\", \"vhdl\", \"vhpi\"),\n (\"verilator\", \"verilog\", \"vpi\"),\n (\"vcs\", \"verilog\", \"vpi\"),\n ]\n\n return standard + special\n\n\ndef env_vars_for_test(\n sim: Optional[str], toplevel_lang: Optional[str], gpi_interface: Optional[str]\n) -> Dict[str, str]:\n \"\"\"Prepare the environment variables controlling the test run.\"\"\"\n e = {}\n if sim is not None:\n e[\"SIM\"] = sim\n\n if os.getenv(\"TOPLEVEL_LANG\") is not None:\n e[\"HDL_TOPLEVEL_LANG\"] = os.getenv(\"TOPLEVEL_LANG\")\n\n if toplevel_lang is not None:\n e[\"TOPLEVEL_LANG\"] = toplevel_lang\n e[\"HDL_TOPLEVEL_LANG\"] = toplevel_lang\n\n assert not (toplevel_lang == \"verilog\" and gpi_interface != \"vpi\")\n if toplevel_lang == \"vhdl\" and gpi_interface is not None:\n e[\"VHDL_GPI_INTERFACE\"] = gpi_interface\n\n return e\n\n\ndef stringify_dict(d: Dict[str, str]) -> str:\n return \", \".join(f\"{k}={v}\" for k, v in d.items())\n\n\ndef configure_env_for_dev_build(session: nox.session) -> None:\n \"\"\"Set environment variables for a development build.\n\n - Enable coverage collection.\n - Build with more aggressive error checking.\n \"\"\"\n session.env[\"CFLAGS\"] = \"-Werror -Wno-deprecated-declarations -g --coverage\"\n session.env[\"COCOTB_LIBRARY_COVERAGE\"] = \"1\"\n session.env[\"CXXFLAGS\"] = \"-Werror\"\n session.env[\"LDFLAGS\"] = \"--coverage\"\n\n\n#\n# Development pipeline\n#\n# - Use nox to build an sdist; no separate build step is required.\n# - Run tests against the installed sdist.\n# - Collect coverage.\n#\n\n\[email protected]\ndef dev_build(session: nox.Session) -> None:\n session.warn(\"No building is necessary for development sessions.\")\n\n\[email protected]\ndef dev_test(session: nox.Session) -> None:\n \"\"\"Run all development tests as configured through environment variables.\"\"\"\n\n dev_test_sim(session, sim=None, toplevel_lang=None, gpi_interface=None)\n dev_test_nosim(session)\n dev_coverage_combine(session)\n\n\[email protected]\[email protected](\"sim,toplevel_lang,gpi_interface\", simulator_support_matrix())\ndef dev_test_sim(\n session: nox.Session,\n sim: Optional[str],\n toplevel_lang: Optional[str],\n gpi_interface: Optional[str],\n) -> None:\n \"\"\"Test a development version of cocotb against a simulator.\"\"\"\n\n configure_env_for_dev_build(session)\n\n session.run(\"pip\", \"install\", *test_deps, *coverage_deps)\n\n # Editable installs break C/C++ coverage collection; don't use them.\n # C/C++ coverage collection requires that the object files produced by the\n # compiler are not moved around, otherwise the gcno and gcda files produced\n # at compile and runtime, respectively, are located in the wrong\n # directories. Depending on the version of the Python install machinery\n # editable builds are done in a directory in /tmp, which is removed after\n # the build completes, taking all gcno files with them, as well as the path\n # to place the gcda files.\n session.run(\"pip\", \"install\", \".\")\n\n env = env_vars_for_test(sim, toplevel_lang, gpi_interface)\n config_str = stringify_dict(env)\n\n # Remove a potentially existing coverage file from a previous run for the\n # same test configuration. Use a filename *not* starting with `.coverage.`,\n # as coverage.py assumes ownership over these files and deleted them at\n # will.\n coverage_file = Path(f\".cov.test.sim-{sim}-{toplevel_lang}-{gpi_interface}\")\n with suppress(FileNotFoundError):\n coverage_file.unlink()\n\n session.log(f\"Running 'make test' against a simulator {config_str}\")\n session.run(\"make\", \"clean\", \"test\", external=True, env=env)\n\n session.log(f\"Running simulator-specific tests against a simulator {config_str}\")\n session.run(\n \"pytest\",\n \"-v\",\n \"--cov=cocotb\",\n \"--cov-branch\",\n # Don't display coverage report here\n \"--cov-report=\",\n \"-k\",\n \"simulator_required\",\n env=env,\n )\n Path(\".coverage\").rename(\".coverage.pytest\")\n\n session.log(f\"All tests passed with configuration {config_str}!\")\n\n # Combine coverage produced during the test runs, and place it in a file\n # with a name specific to this invocation of dev_test_sim().\n coverage_files = glob.glob(\"**/.coverage.cocotb\", recursive=True)\n if not coverage_files:\n session.error(\n \"No coverage files found. Something went wrong during the test execution.\"\n )\n coverage_files.append(\".coverage.pytest\")\n session.run(\"coverage\", \"combine\", \"--append\", *coverage_files)\n Path(\".coverage\").rename(coverage_file)\n\n session.log(f\"Stored Python coverage for this test run in {coverage_file}.\")\n\n\[email protected]\ndef dev_test_nosim(session: nox.Session) -> None:\n \"\"\"Run the simulator-agnostic tests against a cocotb development version.\"\"\"\n\n configure_env_for_dev_build(session)\n\n session.run(\"pip\", \"install\", *test_deps, *coverage_deps)\n session.run(\"pip\", \"install\", \"-e\", \".\")\n\n # Remove a potentially existing coverage file from a previous run for the\n # same test configuration. Use a filename *not* starting with `.coverage.`,\n # as coverage.py assumes ownership over these files and deleted them at\n # will.\n coverage_file = Path(\".cov.test.nosim\")\n with suppress(FileNotFoundError):\n coverage_file.unlink()\n\n # Run pytest with the default configuration in setup.cfg.\n session.log(\"Running simulator-agnostic tests with pytest\")\n session.run(\n \"pytest\",\n \"-v\",\n \"--cov=cocotb\",\n \"--cov-branch\",\n # Don't display coverage report here\n \"--cov-report=\",\n \"-k\",\n \"not simulator_required\",\n )\n\n # Run pytest for files which can only be tested in the source tree, not in\n # the installed binary (otherwise we get an \"import file mismatch\" error\n # from pytest).\n session.log(\"Running simulator-agnostic tests in the source tree with pytest\")\n pytest_sourcetree = [\n \"cocotb/utils.py\",\n \"cocotb/binary.py\",\n \"cocotb/types/\",\n \"cocotb/_sim_versions.py\",\n ]\n session.run(\n \"pytest\",\n \"-v\",\n \"--doctest-modules\",\n \"--cov=cocotb\",\n \"--cov-branch\",\n # Don't display coverage report here\n \"--cov-report=\",\n # Append to the .coverage file created in the previous pytest\n # invocation in this session.\n \"--cov-append\",\n \"-k\",\n \"not simulator_required\",\n *pytest_sourcetree,\n )\n\n session.log(\"All tests passed!\")\n\n # Rename the .coverage file to make it unique to the session.\n Path(\".coverage\").rename(coverage_file)\n\n session.log(f\"Stored Python coverage for this test run in {coverage_file}.\")\n\n\[email protected]\ndef dev_coverage_combine(session: nox.Session) -> None:\n \"\"\"Combine coverage from previous dev_* runs into a .coverage file.\"\"\"\n session.run(\"pip\", \"install\", *coverage_report_deps)\n\n coverage_files = glob.glob(\"**/.cov.test.*\", recursive=True)\n session.run(\"coverage\", \"combine\", *coverage_files)\n assert Path(\".coverage\").is_file()\n\n session.log(\"Wrote combined coverage database for all tests to '.coverage'.\")\n\n session.notify(\"dev_coverage_report\")\n\n\[email protected]\ndef dev_coverage_report(session: nox.Session) -> None:\n \"\"\"Report coverage results.\"\"\"\n session.run(\"pip\", \"install\", *coverage_report_deps)\n\n # Produce Cobertura XML coverage reports.\n session.log(\"Producing Python and C/C++ coverage in Cobertura XML format\")\n\n coverage_python_xml = Path(\".python_coverage.xml\")\n session.run(\"coverage\", \"xml\", \"-o\", str(coverage_python_xml))\n assert coverage_python_xml.is_file()\n\n coverage_cpp_xml = Path(\".cpp_coverage.xml\")\n session.run(\n \"gcovr\",\n \"--xml\",\n \"--output\",\n str(coverage_cpp_xml),\n \".\",\n )\n assert coverage_cpp_xml.is_file()\n\n session.log(\n f\"Cobertura XML files written to {str(coverage_cpp_xml)!r} (C/C++) and {str(coverage_python_xml)!r} (Python)\"\n )\n\n # Report human-readable coverage.\n session.log(\"Python coverage\")\n session.run(\"coverage\", \"report\")\n\n session.log(\"Library coverage\")\n session.run(\"gcovr\", \"--print-summary\", \"--txt\")\n\n\n#\n# Release pipeline.\n#\n# - Clean out the dist directory.\n# - Build wheels (release builds).\n# - Install cocotb from wheel.\n# - Run tests against cocotb installed from the wheel.\n#\n# The release pipeline does not collect coverage, and does not run doctests.\n#\n\n# Directory containing the distribution artifacts (sdist and bdist).\ndist_dir = \"dist\"\n\n\[email protected]\ndef release_clean(session: nox.Session) -> None:\n \"\"\"Remove all build artifacts from the dist directory.\"\"\"\n shutil.rmtree(dist_dir, ignore_errors=True)\n\n\[email protected]\ndef release_build(session: nox.Session) -> None:\n \"\"\"Build a release (sdist and bdist).\"\"\"\n session.notify(\"release_build_bdist\")\n session.notify(\"release_build_sdist\")\n\n\[email protected]\ndef release_build_bdist(session: nox.Session) -> None:\n \"\"\"Build a binary distribution (wheels) on the current operating system.\"\"\"\n\n # Pin a version to ensure reproducible builds.\n session.run(\"pip\", \"install\", \"cibuildwheel==2.11.2\")\n\n # cibuildwheel only auto-detects the platform if it runs on a CI server.\n # Do the auto-detect manually to enable local runs.\n if sys.platform.startswith(\"linux\"):\n platform = \"linux\"\n elif sys.platform == \"darwin\":\n platform = \"macos\"\n elif sys.platform == \"win32\":\n platform = \"windows\"\n else:\n session.error(f\"Unknown platform: {sys.platform!r}\")\n\n session.log(\"Building binary distribution (wheels)\")\n session.run(\n \"cibuildwheel\",\n \"--platform\",\n platform,\n \"--output-dir\",\n dist_dir,\n )\n\n session.log(\n f\"Binary distribution in release mode for {platform!r} built into {dist_dir!r}\"\n )\n\n\[email protected]\ndef release_build_sdist(session: nox.Session) -> None:\n \"\"\"Build the source distribution.\"\"\"\n\n session.run(\"pip\", \"install\", \"build\")\n\n session.log(\"Building source distribution (sdist)\")\n session.run(\"python\", \"-m\", \"build\", \"--sdist\", \"--outdir\", dist_dir, \".\")\n\n session.log(f\"Source distribution in release mode built into {dist_dir!r}\")\n\n\[email protected]\ndef release_test_sdist(session: nox.Session) -> None:\n \"\"\"Build and install the sdist.\"\"\"\n\n # Find the sdist to install.\n sdists = list(Path(dist_dir).glob(\"cocotb-*.tar.gz\"))\n if len(sdists) == 0:\n session.error(\n f\"No *.tar.gz sdist file found in {dist_dir!r} \"\n f\"Run the 'release_build' session first.\"\n )\n if len(sdists) > 1:\n session.error(\n f\"More than one potential sdist found in the {dist_dir!r} \"\n f\"directory. Run the 'release_clean' session first!\"\n )\n sdist_path = sdists[0]\n assert sdist_path.is_file()\n\n session.log(\"Installing cocotb from sdist, which includes the build step\")\n session.run(\n \"pip\",\n \"install\",\n str(sdist_path),\n )\n\n session.log(\"Running cocotb-config as basic installation smoke test\")\n session.run(\"cocotb-config\", \"--version\")\n\n\ndef release_install(session: nox.Session) -> None:\n \"\"\"Helper: Install cocotb from wheels and also install test dependencies.\"\"\"\n\n # We have to disable the use of the PyPi index when installing cocotb to\n # guarantee that the wheels in dist are being used. But without an index\n # pip cannot find the dependencies, which need to be installed from PyPi.\n # Work around that by explicitly installing the dependencies first from\n # PyPi, and then installing cocotb itself from the local dist directory.\n\n session.log(\"Installing cocotb dependencies from PyPi\")\n session.run(\"pip\", \"install\", \"find_libpython\")\n\n session.log(f\"Installing cocotb from wheels in {dist_dir!r}\")\n session.run(\n \"pip\",\n \"install\",\n \"--force-reinstall\",\n \"--only-binary\",\n \"cocotb\",\n \"--no-index\",\n \"--no-dependencies\",\n \"--find-links\",\n dist_dir,\n \"cocotb\",\n )\n\n session.log(\"Running cocotb-config as basic installation smoke test\")\n session.run(\"cocotb-config\", \"--version\")\n\n session.log(\"Installing test dependencies\")\n session.run(\"pip\", \"install\", *test_deps)\n\n\[email protected]\[email protected](\"sim,toplevel_lang,gpi_interface\", simulator_support_matrix())\ndef release_test_sim(\n session: nox.Session, sim: str, toplevel_lang: str, gpi_interface: str\n) -> None:\n \"\"\"Test a release version of cocotb against a simulator.\"\"\"\n\n release_install(session)\n\n env = env_vars_for_test(sim, toplevel_lang, gpi_interface)\n config_str = stringify_dict(env)\n\n session.log(f\"Running tests against a simulator: {config_str}\")\n session.run(\"make\", \"clean\", \"test\", external=True, env=env)\n\n session.log(f\"Running simulator-specific tests against a simulator {config_str}\")\n session.run(\n \"pytest\",\n \"-v\",\n \"-k\",\n \"simulator_required\",\n env=env,\n )\n\n session.log(f\"All tests passed with configuration {config_str}!\")\n\n\[email protected]\ndef release_test_nosim(session: nox.Session) -> None:\n \"\"\"Run the simulator-agnostic tests against a cocotb release.\"\"\"\n\n release_install(session)\n\n session.log(\"Running simulator-agnostic tests\")\n session.run(\n \"pytest\",\n \"-v\",\n \"-k\",\n \"not simulator_required\",\n )\n\n session.log(\"All tests passed!\")\n\n\[email protected]\ndef docs(session: nox.Session) -> None:\n \"\"\"invoke sphinx-build to build the HTML docs\"\"\"\n session.run(\"pip\", \"install\", \"-r\", \"documentation/requirements.txt\")\n session.run(\"pip\", \"install\", \"-e\", \".\")\n outdir = session.cache_dir / \"docs_out\"\n session.run(\n \"sphinx-build\", \"./documentation/source\", str(outdir), \"--color\", \"-b\", \"html\"\n )\n index = (outdir / \"index.html\").resolve().as_uri()\n session.log(f\"Documentation is available at {index}\")\n\n\[email protected]\ndef docs_linkcheck(session: nox.Session) -> None:\n \"\"\"invoke sphinx-build to linkcheck the docs\"\"\"\n session.run(\"pip\", \"install\", \"-r\", \"documentation/requirements.txt\")\n session.run(\"pip\", \"install\", \"-e\", \".\")\n outdir = session.cache_dir / \"docs_out\"\n session.run(\n \"sphinx-build\",\n \"./documentation/source\",\n str(outdir),\n \"--color\",\n \"-b\",\n \"linkcheck\",\n )\n\n\[email protected]\ndef docs_spelling(session: nox.Session) -> None:\n \"\"\"invoke sphinx-build to spellcheck the docs\"\"\"\n session.run(\"pip\", \"install\", \"-r\", \"documentation/requirements.txt\")\n session.run(\"pip\", \"install\", \"-e\", \".\")\n outdir = session.cache_dir / \"docs_out\"\n session.run(\n \"sphinx-build\",\n \"./documentation/source\",\n str(outdir),\n \"--color\",\n \"-b\",\n \"spelling\",\n )\n\n\[email protected](reuse_venv=True)\ndef dev(session: nox.Session) -> None:\n \"\"\"Build a development environment and optionally run a command given as extra args\"\"\"\n\n configure_env_for_dev_build(session)\n\n session.run(\"pip\", \"install\", *test_deps)\n session.run(\"pip\", \"install\", *dev_deps)\n session.run(\"pip\", \"install\", \"-e\", \".\")\n if session.posargs:\n session.run(*session.posargs, external=True)\n", "path": "noxfile.py"}]} |
gh_patches_debug_145 | rasdani/github-patches | git_diff | wright-group__WrightTools-522 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
hide fit functionality
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `WrightTools/__init__.py`
Content:
```
1 """WrightTools init."""
2 # flake8: noqa
3
4
5 # --- import --------------------------------------------------------------------------------------
6
7
8 import sys as _sys
9
10 from .__version__ import *
11 from . import artists
12 from . import collection
13 from . import data
14 from . import diagrams
15 from . import fit
16 from . import kit
17 from . import units
18 from . import exceptions
19
20 from ._open import *
21 from .collection._collection import *
22 from .data._data import *
23
24
25 # --- rcparams ------------------------------------------------------------------------------------
26
27
28 if int(_sys.version.split('.')[0]) > 2:
29 artists.apply_rcparams('fast')
30
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/WrightTools/__init__.py b/WrightTools/__init__.py
--- a/WrightTools/__init__.py
+++ b/WrightTools/__init__.py
@@ -12,7 +12,6 @@
from . import collection
from . import data
from . import diagrams
-from . import fit
from . import kit
from . import units
from . import exceptions
| {"golden_diff": "diff --git a/WrightTools/__init__.py b/WrightTools/__init__.py\n--- a/WrightTools/__init__.py\n+++ b/WrightTools/__init__.py\n@@ -12,7 +12,6 @@\n from . import collection\n from . import data\n from . import diagrams\n-from . import fit\n from . import kit\n from . import units\n from . import exceptions\n", "issue": "hide fit functionality\n\n", "before_files": [{"content": "\"\"\"WrightTools init.\"\"\"\n# flake8: noqa\n\n\n# --- import --------------------------------------------------------------------------------------\n\n\nimport sys as _sys\n\nfrom .__version__ import *\nfrom . import artists\nfrom . import collection\nfrom . import data\nfrom . import diagrams\nfrom . import fit\nfrom . import kit\nfrom . import units\nfrom . import exceptions\n\nfrom ._open import *\nfrom .collection._collection import *\nfrom .data._data import *\n\n\n# --- rcparams ------------------------------------------------------------------------------------\n\n\nif int(_sys.version.split('.')[0]) > 2:\n artists.apply_rcparams('fast')\n", "path": "WrightTools/__init__.py"}], "after_files": [{"content": "\"\"\"WrightTools init.\"\"\"\n# flake8: noqa\n\n\n# --- import --------------------------------------------------------------------------------------\n\n\nimport sys as _sys\n\nfrom .__version__ import *\nfrom . import artists\nfrom . import collection\nfrom . import data\nfrom . import diagrams\nfrom . import kit\nfrom . import units\nfrom . import exceptions\n\nfrom ._open import *\nfrom .collection._collection import *\nfrom .data._data import *\n\n\n# --- rcparams ------------------------------------------------------------------------------------\n\n\nif int(_sys.version.split('.')[0]) > 2:\n artists.apply_rcparams('fast')\n", "path": "WrightTools/__init__.py"}]} |
gh_patches_debug_146 | rasdani/github-patches | git_diff | python-poetry__poetry-979 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
--no-root behavior is inverted on latest develop
[This](https://github.com/sdispater/poetry/commit/37ec1447b3508ee0bbdb41f8e5773ed5bfae0654#diff-427299ba040b8502b4d29846e595c2d0R59) should probably be `if self.option("no-root")`, to _not_ install the root package when `--no-root` is provided.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `poetry/console/commands/install.py`
Content:
```
1 import os
2
3 from .env_command import EnvCommand
4
5
6 class InstallCommand(EnvCommand):
7 """
8 Installs the project dependencies.
9
10 install
11 { --no-dev : Do not install dev dependencies. }
12 { --no-root : Do not install the root package (your project). }
13 { --dry-run : Outputs the operations but will not execute anything
14 (implicitly enables --verbose). }
15 { --E|extras=* : Extra sets of dependencies to install. }
16 { --develop=* : Install given packages in development mode. }
17 """
18
19 help = """The <info>install</info> command reads the <comment>poetry.lock</> file from
20 the current directory, processes it, and downloads and installs all the
21 libraries and dependencies outlined in that file. If the file does not
22 exist it will look for <comment>pyproject.toml</> and do the same.
23
24 <info>poetry install</info>
25 """
26
27 _loggers = ["poetry.repositories.pypi_repository"]
28
29 def handle(self):
30 from clikit.io import NullIO
31 from poetry.installation import Installer
32 from poetry.masonry.builders import SdistBuilder
33 from poetry.masonry.utils.module import ModuleOrPackageNotFound
34 from poetry.utils._compat import decode
35 from poetry.utils.env import NullEnv
36
37 installer = Installer(
38 self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool
39 )
40
41 extras = []
42 for extra in self.option("extras"):
43 if " " in extra:
44 extras += [e.strip() for e in extra.split(" ")]
45 else:
46 extras.append(extra)
47
48 installer.extras(extras)
49 installer.dev_mode(not self.option("no-dev"))
50 installer.develop(self.option("develop"))
51 installer.dry_run(self.option("dry-run"))
52 installer.verbose(self.option("verbose"))
53
54 return_code = installer.run()
55
56 if return_code != 0:
57 return return_code
58
59 if not self.option("no-root"):
60 return 0
61
62 try:
63 builder = SdistBuilder(self.poetry, NullEnv(), NullIO())
64 except ModuleOrPackageNotFound:
65 # This is likely due to the fact that the project is an application
66 # not following the structure expected by Poetry
67 # If this is a true error it will be picked up later by build anyway.
68 return 0
69
70 self.line(
71 " - Installing <info>{}</info> (<comment>{}</comment>)".format(
72 self.poetry.package.pretty_name, self.poetry.package.pretty_version
73 )
74 )
75
76 if self.option("dry-run"):
77 return 0
78
79 setup = self.poetry.file.parent / "setup.py"
80 has_setup = setup.exists()
81
82 if has_setup:
83 self.line("<warning>A setup.py file already exists. Using it.</warning>")
84 else:
85 with setup.open("w", encoding="utf-8") as f:
86 f.write(decode(builder.build_setup()))
87
88 try:
89 self.env.run("pip", "install", "-e", str(setup.parent), "--no-deps")
90 finally:
91 if not has_setup:
92 os.remove(str(setup))
93
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/poetry/console/commands/install.py b/poetry/console/commands/install.py
--- a/poetry/console/commands/install.py
+++ b/poetry/console/commands/install.py
@@ -56,7 +56,7 @@
if return_code != 0:
return return_code
- if not self.option("no-root"):
+ if self.option("no-root"):
return 0
try:
| {"golden_diff": "diff --git a/poetry/console/commands/install.py b/poetry/console/commands/install.py\n--- a/poetry/console/commands/install.py\n+++ b/poetry/console/commands/install.py\n@@ -56,7 +56,7 @@\n if return_code != 0:\n return return_code\n \n- if not self.option(\"no-root\"):\n+ if self.option(\"no-root\"):\n return 0\n \n try:\n", "issue": "--no-root behavior is inverted on latest develop\n[This](https://github.com/sdispater/poetry/commit/37ec1447b3508ee0bbdb41f8e5773ed5bfae0654#diff-427299ba040b8502b4d29846e595c2d0R59) should probably be `if self.option(\"no-root\")`, to _not_ install the root package when `--no-root` is provided.\n", "before_files": [{"content": "import os\n\nfrom .env_command import EnvCommand\n\n\nclass InstallCommand(EnvCommand):\n \"\"\"\n Installs the project dependencies.\n\n install\n { --no-dev : Do not install dev dependencies. }\n { --no-root : Do not install the root package (your project). }\n { --dry-run : Outputs the operations but will not execute anything\n (implicitly enables --verbose). }\n { --E|extras=* : Extra sets of dependencies to install. }\n { --develop=* : Install given packages in development mode. }\n \"\"\"\n\n help = \"\"\"The <info>install</info> command reads the <comment>poetry.lock</> file from\nthe current directory, processes it, and downloads and installs all the\nlibraries and dependencies outlined in that file. If the file does not\nexist it will look for <comment>pyproject.toml</> and do the same.\n\n<info>poetry install</info>\n\"\"\"\n\n _loggers = [\"poetry.repositories.pypi_repository\"]\n\n def handle(self):\n from clikit.io import NullIO\n from poetry.installation import Installer\n from poetry.masonry.builders import SdistBuilder\n from poetry.masonry.utils.module import ModuleOrPackageNotFound\n from poetry.utils._compat import decode\n from poetry.utils.env import NullEnv\n\n installer = Installer(\n self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool\n )\n\n extras = []\n for extra in self.option(\"extras\"):\n if \" \" in extra:\n extras += [e.strip() for e in extra.split(\" \")]\n else:\n extras.append(extra)\n\n installer.extras(extras)\n installer.dev_mode(not self.option(\"no-dev\"))\n installer.develop(self.option(\"develop\"))\n installer.dry_run(self.option(\"dry-run\"))\n installer.verbose(self.option(\"verbose\"))\n\n return_code = installer.run()\n\n if return_code != 0:\n return return_code\n\n if not self.option(\"no-root\"):\n return 0\n\n try:\n builder = SdistBuilder(self.poetry, NullEnv(), NullIO())\n except ModuleOrPackageNotFound:\n # This is likely due to the fact that the project is an application\n # not following the structure expected by Poetry\n # If this is a true error it will be picked up later by build anyway.\n return 0\n\n self.line(\n \" - Installing <info>{}</info> (<comment>{}</comment>)\".format(\n self.poetry.package.pretty_name, self.poetry.package.pretty_version\n )\n )\n\n if self.option(\"dry-run\"):\n return 0\n\n setup = self.poetry.file.parent / \"setup.py\"\n has_setup = setup.exists()\n\n if has_setup:\n self.line(\"<warning>A setup.py file already exists. Using it.</warning>\")\n else:\n with setup.open(\"w\", encoding=\"utf-8\") as f:\n f.write(decode(builder.build_setup()))\n\n try:\n self.env.run(\"pip\", \"install\", \"-e\", str(setup.parent), \"--no-deps\")\n finally:\n if not has_setup:\n os.remove(str(setup))\n", "path": "poetry/console/commands/install.py"}], "after_files": [{"content": "import os\n\nfrom .env_command import EnvCommand\n\n\nclass InstallCommand(EnvCommand):\n \"\"\"\n Installs the project dependencies.\n\n install\n { --no-dev : Do not install dev dependencies. }\n { --no-root : Do not install the root package (your project). }\n { --dry-run : Outputs the operations but will not execute anything\n (implicitly enables --verbose). }\n { --E|extras=* : Extra sets of dependencies to install. }\n { --develop=* : Install given packages in development mode. }\n \"\"\"\n\n help = \"\"\"The <info>install</info> command reads the <comment>poetry.lock</> file from\nthe current directory, processes it, and downloads and installs all the\nlibraries and dependencies outlined in that file. If the file does not\nexist it will look for <comment>pyproject.toml</> and do the same.\n\n<info>poetry install</info>\n\"\"\"\n\n _loggers = [\"poetry.repositories.pypi_repository\"]\n\n def handle(self):\n from clikit.io import NullIO\n from poetry.installation import Installer\n from poetry.masonry.builders import SdistBuilder\n from poetry.masonry.utils.module import ModuleOrPackageNotFound\n from poetry.utils._compat import decode\n from poetry.utils.env import NullEnv\n\n installer = Installer(\n self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool\n )\n\n extras = []\n for extra in self.option(\"extras\"):\n if \" \" in extra:\n extras += [e.strip() for e in extra.split(\" \")]\n else:\n extras.append(extra)\n\n installer.extras(extras)\n installer.dev_mode(not self.option(\"no-dev\"))\n installer.develop(self.option(\"develop\"))\n installer.dry_run(self.option(\"dry-run\"))\n installer.verbose(self.option(\"verbose\"))\n\n return_code = installer.run()\n\n if return_code != 0:\n return return_code\n\n if self.option(\"no-root\"):\n return 0\n\n try:\n builder = SdistBuilder(self.poetry, NullEnv(), NullIO())\n except ModuleOrPackageNotFound:\n # This is likely due to the fact that the project is an application\n # not following the structure expected by Poetry\n # If this is a true error it will be picked up later by build anyway.\n return 0\n\n self.line(\n \" - Installing <info>{}</info> (<comment>{}</comment>)\".format(\n self.poetry.package.pretty_name, self.poetry.package.pretty_version\n )\n )\n\n if self.option(\"dry-run\"):\n return 0\n\n setup = self.poetry.file.parent / \"setup.py\"\n has_setup = setup.exists()\n\n if has_setup:\n self.line(\"<warning>A setup.py file already exists. Using it.</warning>\")\n else:\n with setup.open(\"w\", encoding=\"utf-8\") as f:\n f.write(decode(builder.build_setup()))\n\n try:\n self.env.run(\"pip\", \"install\", \"-e\", str(setup.parent), \"--no-deps\")\n finally:\n if not has_setup:\n os.remove(str(setup))\n", "path": "poetry/console/commands/install.py"}]} |
gh_patches_debug_147 | rasdani/github-patches | git_diff | mit-ll-responsible-ai__hydra-zen-97 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
PEP 561 compatibility
Hi,
Would it be possible to make hydra-zen compliant with [PEP 561](https://www.python.org/dev/peps/pep-0561) by distributing a `py.typed` file with the package?
Currently I'm getting `Skipping analyzing "hydra_zen": found module but no type hints or library stubs` when I run mypy on a test file. Here are steps to reproduce this error:
```text
$ pip install hydra-zen mypy
...
Successfully installed PyYAML-5.4.1 antlr4-python3-runtime-4.8 hydra-core-1.1.1 hydra-zen-0.2.0 mypy-0.910 mypy-extensions-0.4.3 omegaconf-2.1.1 toml-0.10.2 typing-extensions-3.10.0.2
...
$ echo "from hydra_zen import builds" > tmp.py
$ mypy tmp.py
tmp.py:1: error: Skipping analyzing "hydra_zen": found module but no type hints or library stubs
tmp.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports
Found 1 error in 1 file (checked 1 source file)
```
I believe that adding an empty `py.typed` file to the `src/hydra_zen` directory (and modifying `setup.py` so that the `py.typed` file is distributed with the `hydra-zen` package) would make it possible for type checkers following PEP 561 to discover the type hints in `src`.
(I'd be happy to submit a PR to this effect.)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # Copyright (c) 2021 Massachusetts Institute of Technology
2 # SPDX-License-Identifier: MIT
3
4 from setuptools import find_packages, setup
5
6 import versioneer
7
8 DISTNAME = "hydra_zen"
9 LICENSE = "MIT"
10 AUTHOR = "Justin Goodwin, Ryan Soklaski"
11 AUTHOR_EMAIL = "[email protected]"
12 URL = "https://github.com/mit-ll-responsible-ai/hydra_zen"
13 CLASSIFIERS = [
14 "Development Status :: 4 - Beta",
15 "License :: OSI Approved :: MIT License",
16 "Operating System :: OS Independent",
17 "Intended Audience :: Science/Research",
18 "Programming Language :: Python :: 3.6",
19 "Programming Language :: Python :: 3.7",
20 "Programming Language :: Python :: 3.8",
21 "Programming Language :: Python :: 3.9",
22 "Topic :: Scientific/Engineering",
23 ]
24 KEYWORDS = "machine learning research configuration scalable reproducible"
25 INSTALL_REQUIRES = [
26 "hydra-core >= 1.1.0",
27 "typing-extensions >= 3.7.4.1",
28 ]
29 TESTS_REQUIRE = [
30 "pytest >= 3.8",
31 "hypothesis >= 5.32.0",
32 ]
33
34 DESCRIPTION = "Utilities for making hydra scale to ML workflows"
35 LONG_DESCRIPTION = """
36 hydra-zen helps you configure your project using the power of Hydra, while enjoying the Zen of Python!
37
38 hydra-zen eliminates the boilerplate code that you write to configure, orchestrate, and organize the results of large-scale projects, such as machine learning experiments. It does so by providing Hydra-compatible tools that dynamically generate "structured configurations" of your code, and enables Python-centric workflows for running configured instances of your code.
39
40 hydra-zen offers:
41
42 - Functions for automatically and dynamically generating structured configs that can be used to fully or partially instantiate objects in your application.
43 - The ability to launch Hydra jobs, complete with parameter sweeps and multi-run configurations, from within a notebook or any other Python environment.
44 - Incisive type annotations that provide enriched context about your project's configurations to IDEs, type checkers, and other tooling.
45 - Runtime validation of configurations to catch mistakes before your application launches.
46 - Equal support for both object-oriented libraries (e.g., torch.nn) and functional ones (e.g., jax and numpy).
47
48 These functions and capabilities can be used to great effect alongside PyTorch Lightning to design boilerplate-free machine learning projects!
49 """
50
51
52 setup(
53 name=DISTNAME,
54 version=versioneer.get_version(),
55 cmdclass=versioneer.get_cmdclass(),
56 license=LICENSE,
57 author=AUTHOR,
58 author_email=AUTHOR_EMAIL,
59 classifiers=CLASSIFIERS,
60 keywords=KEYWORDS,
61 description=DESCRIPTION,
62 long_description=LONG_DESCRIPTION,
63 install_requires=INSTALL_REQUIRES,
64 tests_require=TESTS_REQUIRE,
65 url=URL,
66 download_url="https://github.com/mit-ll-responsible-ai/hydra-zen/tarball/"
67 + versioneer.get_version(),
68 python_requires=">=3.6",
69 packages=find_packages(where="src", exclude=["tests", "tests.*"]),
70 package_dir={"": "src"},
71 )
72
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -68,4 +68,5 @@
python_requires=">=3.6",
packages=find_packages(where="src", exclude=["tests", "tests.*"]),
package_dir={"": "src"},
+ package_data={"hydra_zen": ["py.typed"]}
)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -68,4 +68,5 @@\n python_requires=\">=3.6\",\n packages=find_packages(where=\"src\", exclude=[\"tests\", \"tests.*\"]),\n package_dir={\"\": \"src\"},\n+ package_data={\"hydra_zen\": [\"py.typed\"]}\n )\n", "issue": "PEP 561 compatibility\nHi,\r\n\r\nWould it be possible to make hydra-zen compliant with [PEP 561](https://www.python.org/dev/peps/pep-0561) by distributing a `py.typed` file with the package?\r\n\r\nCurrently I'm getting `Skipping analyzing \"hydra_zen\": found module but no type hints or library stubs` when I run mypy on a test file. Here are steps to reproduce this error:\r\n```text\r\n$ pip install hydra-zen mypy\r\n...\r\nSuccessfully installed PyYAML-5.4.1 antlr4-python3-runtime-4.8 hydra-core-1.1.1 hydra-zen-0.2.0 mypy-0.910 mypy-extensions-0.4.3 omegaconf-2.1.1 toml-0.10.2 typing-extensions-3.10.0.2\r\n...\r\n$ echo \"from hydra_zen import builds\" > tmp.py\r\n$ mypy tmp.py\r\ntmp.py:1: error: Skipping analyzing \"hydra_zen\": found module but no type hints or library stubs\r\ntmp.py:1: note: See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports\r\nFound 1 error in 1 file (checked 1 source file)\r\n```\r\n\r\nI believe that adding an empty `py.typed` file to the `src/hydra_zen` directory (and modifying `setup.py` so that the `py.typed` file is distributed with the `hydra-zen` package) would make it possible for type checkers following PEP 561 to discover the type hints in `src`.\r\n(I'd be happy to submit a PR to this effect.)\n", "before_files": [{"content": "# Copyright (c) 2021 Massachusetts Institute of Technology\n# SPDX-License-Identifier: MIT\n\nfrom setuptools import find_packages, setup\n\nimport versioneer\n\nDISTNAME = \"hydra_zen\"\nLICENSE = \"MIT\"\nAUTHOR = \"Justin Goodwin, Ryan Soklaski\"\nAUTHOR_EMAIL = \"[email protected]\"\nURL = \"https://github.com/mit-ll-responsible-ai/hydra_zen\"\nCLASSIFIERS = [\n \"Development Status :: 4 - Beta\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Intended Audience :: Science/Research\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Topic :: Scientific/Engineering\",\n]\nKEYWORDS = \"machine learning research configuration scalable reproducible\"\nINSTALL_REQUIRES = [\n \"hydra-core >= 1.1.0\",\n \"typing-extensions >= 3.7.4.1\",\n]\nTESTS_REQUIRE = [\n \"pytest >= 3.8\",\n \"hypothesis >= 5.32.0\",\n]\n\nDESCRIPTION = \"Utilities for making hydra scale to ML workflows\"\nLONG_DESCRIPTION = \"\"\"\nhydra-zen helps you configure your project using the power of Hydra, while enjoying the Zen of Python!\n\nhydra-zen eliminates the boilerplate code that you write to configure, orchestrate, and organize the results of large-scale projects, such as machine learning experiments. It does so by providing Hydra-compatible tools that dynamically generate \"structured configurations\" of your code, and enables Python-centric workflows for running configured instances of your code.\n\nhydra-zen offers:\n\n - Functions for automatically and dynamically generating structured configs that can be used to fully or partially instantiate objects in your application.\n - The ability to launch Hydra jobs, complete with parameter sweeps and multi-run configurations, from within a notebook or any other Python environment.\n - Incisive type annotations that provide enriched context about your project's configurations to IDEs, type checkers, and other tooling.\n - Runtime validation of configurations to catch mistakes before your application launches.\n - Equal support for both object-oriented libraries (e.g., torch.nn) and functional ones (e.g., jax and numpy).\n\nThese functions and capabilities can be used to great effect alongside PyTorch Lightning to design boilerplate-free machine learning projects!\n\"\"\"\n\n\nsetup(\n name=DISTNAME,\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n license=LICENSE,\n author=AUTHOR,\n author_email=AUTHOR_EMAIL,\n classifiers=CLASSIFIERS,\n keywords=KEYWORDS,\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n install_requires=INSTALL_REQUIRES,\n tests_require=TESTS_REQUIRE,\n url=URL,\n download_url=\"https://github.com/mit-ll-responsible-ai/hydra-zen/tarball/\"\n + versioneer.get_version(),\n python_requires=\">=3.6\",\n packages=find_packages(where=\"src\", exclude=[\"tests\", \"tests.*\"]),\n package_dir={\"\": \"src\"},\n)\n", "path": "setup.py"}], "after_files": [{"content": "# Copyright (c) 2021 Massachusetts Institute of Technology\n# SPDX-License-Identifier: MIT\n\nfrom setuptools import find_packages, setup\n\nimport versioneer\n\nDISTNAME = \"hydra_zen\"\nLICENSE = \"MIT\"\nAUTHOR = \"Justin Goodwin, Ryan Soklaski\"\nAUTHOR_EMAIL = \"[email protected]\"\nURL = \"https://github.com/mit-ll-responsible-ai/hydra_zen\"\nCLASSIFIERS = [\n \"Development Status :: 4 - Beta\",\n \"License :: OSI Approved :: MIT License\",\n \"Operating System :: OS Independent\",\n \"Intended Audience :: Science/Research\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Topic :: Scientific/Engineering\",\n]\nKEYWORDS = \"machine learning research configuration scalable reproducible\"\nINSTALL_REQUIRES = [\n \"hydra-core >= 1.1.0\",\n \"typing-extensions >= 3.7.4.1\",\n]\nTESTS_REQUIRE = [\n \"pytest >= 3.8\",\n \"hypothesis >= 5.32.0\",\n]\n\nDESCRIPTION = \"Utilities for making hydra scale to ML workflows\"\nLONG_DESCRIPTION = \"\"\"\nhydra-zen helps you configure your project using the power of Hydra, while enjoying the Zen of Python!\n\nhydra-zen eliminates the boilerplate code that you write to configure, orchestrate, and organize the results of large-scale projects, such as machine learning experiments. It does so by providing Hydra-compatible tools that dynamically generate \"structured configurations\" of your code, and enables Python-centric workflows for running configured instances of your code.\n\nhydra-zen offers:\n\n - Functions for automatically and dynamically generating structured configs that can be used to fully or partially instantiate objects in your application.\n - The ability to launch Hydra jobs, complete with parameter sweeps and multi-run configurations, from within a notebook or any other Python environment.\n - Incisive type annotations that provide enriched context about your project's configurations to IDEs, type checkers, and other tooling.\n - Runtime validation of configurations to catch mistakes before your application launches.\n - Equal support for both object-oriented libraries (e.g., torch.nn) and functional ones (e.g., jax and numpy).\n\nThese functions and capabilities can be used to great effect alongside PyTorch Lightning to design boilerplate-free machine learning projects!\n\"\"\"\n\n\nsetup(\n name=DISTNAME,\n version=versioneer.get_version(),\n cmdclass=versioneer.get_cmdclass(),\n license=LICENSE,\n author=AUTHOR,\n author_email=AUTHOR_EMAIL,\n classifiers=CLASSIFIERS,\n keywords=KEYWORDS,\n description=DESCRIPTION,\n long_description=LONG_DESCRIPTION,\n install_requires=INSTALL_REQUIRES,\n tests_require=TESTS_REQUIRE,\n url=URL,\n download_url=\"https://github.com/mit-ll-responsible-ai/hydra-zen/tarball/\"\n + versioneer.get_version(),\n python_requires=\">=3.6\",\n packages=find_packages(where=\"src\", exclude=[\"tests\", \"tests.*\"]),\n package_dir={\"\": \"src\"},\n package_data={\"hydra_zen\": [\"py.typed\"]}\n)\n", "path": "setup.py"}]} |
gh_patches_debug_148 | rasdani/github-patches | git_diff | nipy__nipype-3385 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
MathsCommand().inputs.out_file has to exist
When setting `MathsCommand().inputs.out_file` argument, a Trait error is thrown because of the `exists=True`:
https://github.com/nipy/nipype/blob/6a7837c0994367a5f34bb576bb0a97ec70669b8f/nipype/interfaces/fsl/maths.py#L44
I think this doesn't make any sense, because `out_file` is to be created by `MathsCommand()`.
In my case, I just want to get rid of SPM nan's in an .nii file, but I also don't want fsl_maths to append a suffix to the filename - this is why I also provide `out_file` to `MathsCommand().inputs`.
A short example:
```
nan2zero = pe.MapNode(interface=MathsCommand(), name='nan2zero', iterfield=['in_file', 'out_file'])
nan2zero.inputs.nan2zeros = True
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `nipype/interfaces/fsl/maths.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
3 # vi: set ft=python sts=4 ts=4 sw=4 et:
4 """
5 The maths module provides higher-level interfaces to some of the operations
6 that can be performed with the fslmaths command-line program.
7 """
8 import os
9 import numpy as np
10
11 from ..base import TraitedSpec, File, traits, InputMultiPath, isdefined
12 from .base import FSLCommand, FSLCommandInputSpec
13
14
15 class MathsInput(FSLCommandInputSpec):
16
17 in_file = File(
18 position=2, argstr="%s", exists=True, mandatory=True, desc="image to operate on"
19 )
20 out_file = File(
21 genfile=True, position=-2, argstr="%s", desc="image to write", hash_files=False
22 )
23 _dtypes = ["float", "char", "int", "short", "double", "input"]
24 internal_datatype = traits.Enum(
25 *_dtypes,
26 position=1,
27 argstr="-dt %s",
28 desc=("datatype to use for calculations " "(default is float)")
29 )
30 output_datatype = traits.Enum(
31 *_dtypes,
32 position=-1,
33 argstr="-odt %s",
34 desc=("datatype to use for output (default " "uses input type)")
35 )
36
37 nan2zeros = traits.Bool(
38 position=3, argstr="-nan", desc="change NaNs to zeros before doing anything"
39 )
40
41
42 class MathsOutput(TraitedSpec):
43
44 out_file = File(exists=True, desc="image written after calculations")
45
46
47 class MathsCommand(FSLCommand):
48
49 _cmd = "fslmaths"
50 input_spec = MathsInput
51 output_spec = MathsOutput
52 _suffix = "_maths"
53
54 def _list_outputs(self):
55 outputs = self.output_spec().get()
56 outputs["out_file"] = self.inputs.out_file
57 if not isdefined(self.inputs.out_file):
58 outputs["out_file"] = self._gen_fname(
59 self.inputs.in_file, suffix=self._suffix
60 )
61 outputs["out_file"] = os.path.abspath(outputs["out_file"])
62 return outputs
63
64 def _gen_filename(self, name):
65 if name == "out_file":
66 return self._list_outputs()["out_file"]
67 return None
68
69
70 class ChangeDataTypeInput(MathsInput):
71
72 _dtypes = ["float", "char", "int", "short", "double", "input"]
73 output_datatype = traits.Enum(
74 *_dtypes, position=-1, argstr="-odt %s", mandatory=True, desc="output data type"
75 )
76
77
78 class ChangeDataType(MathsCommand):
79 """Use fslmaths to change the datatype of an image."""
80
81 input_spec = ChangeDataTypeInput
82 _suffix = "_chdt"
83
84
85 class ThresholdInputSpec(MathsInput):
86
87 thresh = traits.Float(
88 mandatory=True, position=4, argstr="%s", desc="threshold value"
89 )
90 direction = traits.Enum(
91 "below",
92 "above",
93 usedefault=True,
94 desc="zero-out either below or above thresh value",
95 )
96 use_robust_range = traits.Bool(
97 desc="interpret thresh as percentage (0-100) of robust range"
98 )
99 use_nonzero_voxels = traits.Bool(
100 desc="use nonzero voxels to calculate robust range",
101 requires=["use_robust_range"],
102 )
103
104
105 class Threshold(MathsCommand):
106 """Use fslmaths to apply a threshold to an image in a variety of ways."""
107
108 input_spec = ThresholdInputSpec
109 _suffix = "_thresh"
110
111 def _format_arg(self, name, spec, value):
112 if name == "thresh":
113 arg = "-"
114 _si = self.inputs
115 if self.inputs.direction == "above":
116 arg += "u"
117 arg += "thr"
118 if isdefined(_si.use_robust_range) and _si.use_robust_range:
119 if isdefined(_si.use_nonzero_voxels) and _si.use_nonzero_voxels:
120 arg += "P"
121 else:
122 arg += "p"
123 arg += " %.10f" % value
124 return arg
125 return super(Threshold, self)._format_arg(name, spec, value)
126
127
128 class StdImageInput(MathsInput):
129
130 dimension = traits.Enum(
131 "T",
132 "X",
133 "Y",
134 "Z",
135 usedefault=True,
136 argstr="-%sstd",
137 position=4,
138 desc="dimension to standard deviate across",
139 )
140
141
142 class StdImage(MathsCommand):
143 """Use fslmaths to generate a standard deviation in an image across a given
144 dimension.
145 """
146
147 input_spec = StdImageInput
148 _suffix = "_std"
149
150
151 class MeanImageInput(MathsInput):
152
153 dimension = traits.Enum(
154 "T",
155 "X",
156 "Y",
157 "Z",
158 usedefault=True,
159 argstr="-%smean",
160 position=4,
161 desc="dimension to mean across",
162 )
163
164
165 class MeanImage(MathsCommand):
166 """Use fslmaths to generate a mean image across a given dimension."""
167
168 input_spec = MeanImageInput
169 _suffix = "_mean"
170
171
172 class MaxImageInput(MathsInput):
173
174 dimension = traits.Enum(
175 "T",
176 "X",
177 "Y",
178 "Z",
179 usedefault=True,
180 argstr="-%smax",
181 position=4,
182 desc="dimension to max across",
183 )
184
185
186 class MaxImage(MathsCommand):
187 """Use fslmaths to generate a max image across a given dimension.
188
189 Examples
190 --------
191 >>> from nipype.interfaces.fsl.maths import MaxImage
192 >>> maxer = MaxImage()
193 >>> maxer.inputs.in_file = "functional.nii" # doctest: +SKIP
194 >>> maxer.dimension = "T"
195 >>> maxer.cmdline # doctest: +SKIP
196 'fslmaths functional.nii -Tmax functional_max.nii'
197
198 """
199
200 input_spec = MaxImageInput
201 _suffix = "_max"
202
203
204 class PercentileImageInput(MathsInput):
205
206 dimension = traits.Enum(
207 "T",
208 "X",
209 "Y",
210 "Z",
211 usedefault=True,
212 argstr="-%sperc",
213 position=4,
214 desc="dimension to percentile across",
215 )
216 perc = traits.Range(
217 low=0,
218 high=100,
219 argstr="%f",
220 position=5,
221 desc=("nth percentile (0-100) of FULL RANGE " "across dimension"),
222 )
223
224
225 class PercentileImage(MathsCommand):
226 """Use fslmaths to generate a percentile image across a given dimension.
227
228 Examples
229 --------
230 >>> from nipype.interfaces.fsl.maths import MaxImage
231 >>> percer = PercentileImage()
232 >>> percer.inputs.in_file = "functional.nii" # doctest: +SKIP
233 >>> percer.dimension = "T"
234 >>> percer.perc = 90
235 >>> percer.cmdline # doctest: +SKIP
236 'fslmaths functional.nii -Tperc 90 functional_perc.nii'
237
238 """
239
240 input_spec = PercentileImageInput
241 _suffix = "_perc"
242
243
244 class MaxnImageInput(MathsInput):
245
246 dimension = traits.Enum(
247 "T",
248 "X",
249 "Y",
250 "Z",
251 usedefault=True,
252 argstr="-%smaxn",
253 position=4,
254 desc="dimension to index max across",
255 )
256
257
258 class MaxnImage(MathsCommand):
259 """Use fslmaths to generate an image of index of max across
260 a given dimension.
261
262 """
263
264 input_spec = MaxnImageInput
265 _suffix = "_maxn"
266
267
268 class MinImageInput(MathsInput):
269
270 dimension = traits.Enum(
271 "T",
272 "X",
273 "Y",
274 "Z",
275 usedefault=True,
276 argstr="-%smin",
277 position=4,
278 desc="dimension to min across",
279 )
280
281
282 class MinImage(MathsCommand):
283 """Use fslmaths to generate a minimum image across a given dimension."""
284
285 input_spec = MinImageInput
286 _suffix = "_min"
287
288
289 class MedianImageInput(MathsInput):
290
291 dimension = traits.Enum(
292 "T",
293 "X",
294 "Y",
295 "Z",
296 usedefault=True,
297 argstr="-%smedian",
298 position=4,
299 desc="dimension to median across",
300 )
301
302
303 class MedianImage(MathsCommand):
304 """Use fslmaths to generate a median image across a given dimension."""
305
306 input_spec = MedianImageInput
307 _suffix = "_median"
308
309
310 class AR1ImageInput(MathsInput):
311
312 dimension = traits.Enum(
313 "T",
314 "X",
315 "Y",
316 "Z",
317 usedefault=True,
318 argstr="-%sar1",
319 position=4,
320 desc=("dimension to find AR(1) coefficient" "across"),
321 )
322
323
324 class AR1Image(MathsCommand):
325 """Use fslmaths to generate an AR1 coefficient image across a
326 given dimension. (Should use -odt float and probably demean first)
327
328 """
329
330 input_spec = AR1ImageInput
331 _suffix = "_ar1"
332
333
334 class IsotropicSmoothInput(MathsInput):
335
336 fwhm = traits.Float(
337 mandatory=True,
338 xor=["sigma"],
339 position=4,
340 argstr="-s %.5f",
341 desc="fwhm of smoothing kernel [mm]",
342 )
343 sigma = traits.Float(
344 mandatory=True,
345 xor=["fwhm"],
346 position=4,
347 argstr="-s %.5f",
348 desc="sigma of smoothing kernel [mm]",
349 )
350
351
352 class IsotropicSmooth(MathsCommand):
353 """Use fslmaths to spatially smooth an image with a gaussian kernel."""
354
355 input_spec = IsotropicSmoothInput
356 _suffix = "_smooth"
357
358 def _format_arg(self, name, spec, value):
359 if name == "fwhm":
360 sigma = float(value) / np.sqrt(8 * np.log(2))
361 return spec.argstr % sigma
362 return super(IsotropicSmooth, self)._format_arg(name, spec, value)
363
364
365 class ApplyMaskInput(MathsInput):
366
367 mask_file = File(
368 exists=True,
369 mandatory=True,
370 argstr="-mas %s",
371 position=4,
372 desc="binary image defining mask space",
373 )
374
375
376 class ApplyMask(MathsCommand):
377 """Use fslmaths to apply a binary mask to another image."""
378
379 input_spec = ApplyMaskInput
380 _suffix = "_masked"
381
382
383 class KernelInput(MathsInput):
384
385 kernel_shape = traits.Enum(
386 "3D",
387 "2D",
388 "box",
389 "boxv",
390 "gauss",
391 "sphere",
392 "file",
393 argstr="-kernel %s",
394 position=4,
395 desc="kernel shape to use",
396 )
397 kernel_size = traits.Float(
398 argstr="%.4f",
399 position=5,
400 xor=["kernel_file"],
401 desc=(
402 "kernel size - voxels for box/boxv, mm " "for sphere, mm sigma for gauss"
403 ),
404 )
405 kernel_file = File(
406 exists=True,
407 argstr="%s",
408 position=5,
409 xor=["kernel_size"],
410 desc="use external file for kernel",
411 )
412
413
414 class DilateInput(KernelInput):
415
416 operation = traits.Enum(
417 "mean",
418 "modal",
419 "max",
420 argstr="-dil%s",
421 position=6,
422 mandatory=True,
423 desc="filtering operation to perfoem in dilation",
424 )
425
426
427 class DilateImage(MathsCommand):
428 """Use fslmaths to perform a spatial dilation of an image."""
429
430 input_spec = DilateInput
431 _suffix = "_dil"
432
433 def _format_arg(self, name, spec, value):
434 if name == "operation":
435 return spec.argstr % dict(mean="M", modal="D", max="F")[value]
436 return super(DilateImage, self)._format_arg(name, spec, value)
437
438
439 class ErodeInput(KernelInput):
440
441 minimum_filter = traits.Bool(
442 argstr="%s",
443 position=6,
444 usedefault=True,
445 default_value=False,
446 desc=("if true, minimum filter rather than " "erosion by zeroing-out"),
447 )
448
449
450 class ErodeImage(MathsCommand):
451 """Use fslmaths to perform a spatial erosion of an image."""
452
453 input_spec = ErodeInput
454 _suffix = "_ero"
455
456 def _format_arg(self, name, spec, value):
457 if name == "minimum_filter":
458 if value:
459 return "-eroF"
460 return "-ero"
461 return super(ErodeImage, self)._format_arg(name, spec, value)
462
463
464 class SpatialFilterInput(KernelInput):
465
466 operation = traits.Enum(
467 "mean",
468 "median",
469 "meanu",
470 argstr="-f%s",
471 position=6,
472 mandatory=True,
473 desc="operation to filter with",
474 )
475
476
477 class SpatialFilter(MathsCommand):
478 """Use fslmaths to spatially filter an image."""
479
480 input_spec = SpatialFilterInput
481 _suffix = "_filt"
482
483
484 class UnaryMathsInput(MathsInput):
485
486 operation = traits.Enum(
487 "exp",
488 "log",
489 "sin",
490 "cos",
491 "tan",
492 "asin",
493 "acos",
494 "atan",
495 "sqr",
496 "sqrt",
497 "recip",
498 "abs",
499 "bin",
500 "binv",
501 "fillh",
502 "fillh26",
503 "index",
504 "edge",
505 "nan",
506 "nanm",
507 "rand",
508 "randn",
509 "range",
510 argstr="-%s",
511 position=4,
512 mandatory=True,
513 desc="operation to perform",
514 )
515
516
517 class UnaryMaths(MathsCommand):
518 """Use fslmaths to perorm a variety of mathematical operations on an image."""
519
520 input_spec = UnaryMathsInput
521
522 def _list_outputs(self):
523 self._suffix = "_" + self.inputs.operation
524 return super(UnaryMaths, self)._list_outputs()
525
526
527 class BinaryMathsInput(MathsInput):
528
529 operation = traits.Enum(
530 "add",
531 "sub",
532 "mul",
533 "div",
534 "rem",
535 "max",
536 "min",
537 mandatory=True,
538 argstr="-%s",
539 position=4,
540 desc="operation to perform",
541 )
542 operand_file = File(
543 exists=True,
544 argstr="%s",
545 mandatory=True,
546 position=5,
547 xor=["operand_value"],
548 desc="second image to perform operation with",
549 )
550 operand_value = traits.Float(
551 argstr="%.8f",
552 mandatory=True,
553 position=5,
554 xor=["operand_file"],
555 desc="value to perform operation with",
556 )
557
558
559 class BinaryMaths(MathsCommand):
560 """Use fslmaths to perform mathematical operations using a second image or
561 a numeric value.
562
563 """
564
565 input_spec = BinaryMathsInput
566
567
568 class MultiImageMathsInput(MathsInput):
569
570 op_string = traits.String(
571 position=4,
572 argstr="%s",
573 mandatory=True,
574 desc=("python formatted string of operations " "to perform"),
575 )
576 operand_files = InputMultiPath(
577 File(exists=True),
578 mandatory=True,
579 desc=("list of file names to plug into op " "string"),
580 )
581
582
583 class MultiImageMaths(MathsCommand):
584 """Use fslmaths to perform a sequence of mathematical operations.
585
586 Examples
587 --------
588 >>> from nipype.interfaces.fsl import MultiImageMaths
589 >>> maths = MultiImageMaths()
590 >>> maths.inputs.in_file = "functional.nii"
591 >>> maths.inputs.op_string = "-add %s -mul -1 -div %s"
592 >>> maths.inputs.operand_files = ["functional2.nii", "functional3.nii"]
593 >>> maths.inputs.out_file = "functional4.nii"
594 >>> maths.cmdline
595 'fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii'
596
597 """
598
599 input_spec = MultiImageMathsInput
600
601 def _format_arg(self, name, spec, value):
602 if name == "op_string":
603 return value % tuple(self.inputs.operand_files)
604 return super(MultiImageMaths, self)._format_arg(name, spec, value)
605
606
607 class TemporalFilterInput(MathsInput):
608
609 lowpass_sigma = traits.Float(
610 -1,
611 argstr="%.6f",
612 position=5,
613 usedefault=True,
614 desc="lowpass filter sigma (in volumes)",
615 )
616 highpass_sigma = traits.Float(
617 -1,
618 argstr="-bptf %.6f",
619 position=4,
620 usedefault=True,
621 desc="highpass filter sigma (in volumes)",
622 )
623
624
625 class TemporalFilter(MathsCommand):
626 """Use fslmaths to apply a low, high, or bandpass temporal filter to a
627 timeseries.
628
629 """
630
631 input_spec = TemporalFilterInput
632 _suffix = "_filt"
633
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/nipype/interfaces/fsl/maths.py b/nipype/interfaces/fsl/maths.py
--- a/nipype/interfaces/fsl/maths.py
+++ b/nipype/interfaces/fsl/maths.py
@@ -41,7 +41,7 @@
class MathsOutput(TraitedSpec):
- out_file = File(exists=True, desc="image written after calculations")
+ out_file = File(desc="image written after calculations")
class MathsCommand(FSLCommand):
| {"golden_diff": "diff --git a/nipype/interfaces/fsl/maths.py b/nipype/interfaces/fsl/maths.py\n--- a/nipype/interfaces/fsl/maths.py\n+++ b/nipype/interfaces/fsl/maths.py\n@@ -41,7 +41,7 @@\n \n class MathsOutput(TraitedSpec):\n \n- out_file = File(exists=True, desc=\"image written after calculations\")\n+ out_file = File(desc=\"image written after calculations\")\n \n \n class MathsCommand(FSLCommand):\n", "issue": "MathsCommand().inputs.out_file has to exist\nWhen setting `MathsCommand().inputs.out_file` argument, a Trait error is thrown because of the `exists=True`:\r\n\r\nhttps://github.com/nipy/nipype/blob/6a7837c0994367a5f34bb576bb0a97ec70669b8f/nipype/interfaces/fsl/maths.py#L44\r\n\r\nI think this doesn't make any sense, because `out_file` is to be created by `MathsCommand()`.\r\nIn my case, I just want to get rid of SPM nan's in an .nii file, but I also don't want fsl_maths to append a suffix to the filename - this is why I also provide `out_file` to `MathsCommand().inputs`.\r\nA short example:\r\n```\r\nnan2zero = pe.MapNode(interface=MathsCommand(), name='nan2zero', iterfield=['in_file', 'out_file'])\r\nnan2zero.inputs.nan2zeros = True\r\n\r\n```\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-\n# vi: set ft=python sts=4 ts=4 sw=4 et:\n\"\"\"\nThe maths module provides higher-level interfaces to some of the operations\nthat can be performed with the fslmaths command-line program.\n\"\"\"\nimport os\nimport numpy as np\n\nfrom ..base import TraitedSpec, File, traits, InputMultiPath, isdefined\nfrom .base import FSLCommand, FSLCommandInputSpec\n\n\nclass MathsInput(FSLCommandInputSpec):\n\n in_file = File(\n position=2, argstr=\"%s\", exists=True, mandatory=True, desc=\"image to operate on\"\n )\n out_file = File(\n genfile=True, position=-2, argstr=\"%s\", desc=\"image to write\", hash_files=False\n )\n _dtypes = [\"float\", \"char\", \"int\", \"short\", \"double\", \"input\"]\n internal_datatype = traits.Enum(\n *_dtypes,\n position=1,\n argstr=\"-dt %s\",\n desc=(\"datatype to use for calculations \" \"(default is float)\")\n )\n output_datatype = traits.Enum(\n *_dtypes,\n position=-1,\n argstr=\"-odt %s\",\n desc=(\"datatype to use for output (default \" \"uses input type)\")\n )\n\n nan2zeros = traits.Bool(\n position=3, argstr=\"-nan\", desc=\"change NaNs to zeros before doing anything\"\n )\n\n\nclass MathsOutput(TraitedSpec):\n\n out_file = File(exists=True, desc=\"image written after calculations\")\n\n\nclass MathsCommand(FSLCommand):\n\n _cmd = \"fslmaths\"\n input_spec = MathsInput\n output_spec = MathsOutput\n _suffix = \"_maths\"\n\n def _list_outputs(self):\n outputs = self.output_spec().get()\n outputs[\"out_file\"] = self.inputs.out_file\n if not isdefined(self.inputs.out_file):\n outputs[\"out_file\"] = self._gen_fname(\n self.inputs.in_file, suffix=self._suffix\n )\n outputs[\"out_file\"] = os.path.abspath(outputs[\"out_file\"])\n return outputs\n\n def _gen_filename(self, name):\n if name == \"out_file\":\n return self._list_outputs()[\"out_file\"]\n return None\n\n\nclass ChangeDataTypeInput(MathsInput):\n\n _dtypes = [\"float\", \"char\", \"int\", \"short\", \"double\", \"input\"]\n output_datatype = traits.Enum(\n *_dtypes, position=-1, argstr=\"-odt %s\", mandatory=True, desc=\"output data type\"\n )\n\n\nclass ChangeDataType(MathsCommand):\n \"\"\"Use fslmaths to change the datatype of an image.\"\"\"\n\n input_spec = ChangeDataTypeInput\n _suffix = \"_chdt\"\n\n\nclass ThresholdInputSpec(MathsInput):\n\n thresh = traits.Float(\n mandatory=True, position=4, argstr=\"%s\", desc=\"threshold value\"\n )\n direction = traits.Enum(\n \"below\",\n \"above\",\n usedefault=True,\n desc=\"zero-out either below or above thresh value\",\n )\n use_robust_range = traits.Bool(\n desc=\"interpret thresh as percentage (0-100) of robust range\"\n )\n use_nonzero_voxels = traits.Bool(\n desc=\"use nonzero voxels to calculate robust range\",\n requires=[\"use_robust_range\"],\n )\n\n\nclass Threshold(MathsCommand):\n \"\"\"Use fslmaths to apply a threshold to an image in a variety of ways.\"\"\"\n\n input_spec = ThresholdInputSpec\n _suffix = \"_thresh\"\n\n def _format_arg(self, name, spec, value):\n if name == \"thresh\":\n arg = \"-\"\n _si = self.inputs\n if self.inputs.direction == \"above\":\n arg += \"u\"\n arg += \"thr\"\n if isdefined(_si.use_robust_range) and _si.use_robust_range:\n if isdefined(_si.use_nonzero_voxels) and _si.use_nonzero_voxels:\n arg += \"P\"\n else:\n arg += \"p\"\n arg += \" %.10f\" % value\n return arg\n return super(Threshold, self)._format_arg(name, spec, value)\n\n\nclass StdImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%sstd\",\n position=4,\n desc=\"dimension to standard deviate across\",\n )\n\n\nclass StdImage(MathsCommand):\n \"\"\"Use fslmaths to generate a standard deviation in an image across a given\n dimension.\n \"\"\"\n\n input_spec = StdImageInput\n _suffix = \"_std\"\n\n\nclass MeanImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smean\",\n position=4,\n desc=\"dimension to mean across\",\n )\n\n\nclass MeanImage(MathsCommand):\n \"\"\"Use fslmaths to generate a mean image across a given dimension.\"\"\"\n\n input_spec = MeanImageInput\n _suffix = \"_mean\"\n\n\nclass MaxImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smax\",\n position=4,\n desc=\"dimension to max across\",\n )\n\n\nclass MaxImage(MathsCommand):\n \"\"\"Use fslmaths to generate a max image across a given dimension.\n\n Examples\n --------\n >>> from nipype.interfaces.fsl.maths import MaxImage\n >>> maxer = MaxImage()\n >>> maxer.inputs.in_file = \"functional.nii\" # doctest: +SKIP\n >>> maxer.dimension = \"T\"\n >>> maxer.cmdline # doctest: +SKIP\n 'fslmaths functional.nii -Tmax functional_max.nii'\n\n \"\"\"\n\n input_spec = MaxImageInput\n _suffix = \"_max\"\n\n\nclass PercentileImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%sperc\",\n position=4,\n desc=\"dimension to percentile across\",\n )\n perc = traits.Range(\n low=0,\n high=100,\n argstr=\"%f\",\n position=5,\n desc=(\"nth percentile (0-100) of FULL RANGE \" \"across dimension\"),\n )\n\n\nclass PercentileImage(MathsCommand):\n \"\"\"Use fslmaths to generate a percentile image across a given dimension.\n\n Examples\n --------\n >>> from nipype.interfaces.fsl.maths import MaxImage\n >>> percer = PercentileImage()\n >>> percer.inputs.in_file = \"functional.nii\" # doctest: +SKIP\n >>> percer.dimension = \"T\"\n >>> percer.perc = 90\n >>> percer.cmdline # doctest: +SKIP\n 'fslmaths functional.nii -Tperc 90 functional_perc.nii'\n\n \"\"\"\n\n input_spec = PercentileImageInput\n _suffix = \"_perc\"\n\n\nclass MaxnImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smaxn\",\n position=4,\n desc=\"dimension to index max across\",\n )\n\n\nclass MaxnImage(MathsCommand):\n \"\"\"Use fslmaths to generate an image of index of max across\n a given dimension.\n\n \"\"\"\n\n input_spec = MaxnImageInput\n _suffix = \"_maxn\"\n\n\nclass MinImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smin\",\n position=4,\n desc=\"dimension to min across\",\n )\n\n\nclass MinImage(MathsCommand):\n \"\"\"Use fslmaths to generate a minimum image across a given dimension.\"\"\"\n\n input_spec = MinImageInput\n _suffix = \"_min\"\n\n\nclass MedianImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smedian\",\n position=4,\n desc=\"dimension to median across\",\n )\n\n\nclass MedianImage(MathsCommand):\n \"\"\"Use fslmaths to generate a median image across a given dimension.\"\"\"\n\n input_spec = MedianImageInput\n _suffix = \"_median\"\n\n\nclass AR1ImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%sar1\",\n position=4,\n desc=(\"dimension to find AR(1) coefficient\" \"across\"),\n )\n\n\nclass AR1Image(MathsCommand):\n \"\"\"Use fslmaths to generate an AR1 coefficient image across a\n given dimension. (Should use -odt float and probably demean first)\n\n \"\"\"\n\n input_spec = AR1ImageInput\n _suffix = \"_ar1\"\n\n\nclass IsotropicSmoothInput(MathsInput):\n\n fwhm = traits.Float(\n mandatory=True,\n xor=[\"sigma\"],\n position=4,\n argstr=\"-s %.5f\",\n desc=\"fwhm of smoothing kernel [mm]\",\n )\n sigma = traits.Float(\n mandatory=True,\n xor=[\"fwhm\"],\n position=4,\n argstr=\"-s %.5f\",\n desc=\"sigma of smoothing kernel [mm]\",\n )\n\n\nclass IsotropicSmooth(MathsCommand):\n \"\"\"Use fslmaths to spatially smooth an image with a gaussian kernel.\"\"\"\n\n input_spec = IsotropicSmoothInput\n _suffix = \"_smooth\"\n\n def _format_arg(self, name, spec, value):\n if name == \"fwhm\":\n sigma = float(value) / np.sqrt(8 * np.log(2))\n return spec.argstr % sigma\n return super(IsotropicSmooth, self)._format_arg(name, spec, value)\n\n\nclass ApplyMaskInput(MathsInput):\n\n mask_file = File(\n exists=True,\n mandatory=True,\n argstr=\"-mas %s\",\n position=4,\n desc=\"binary image defining mask space\",\n )\n\n\nclass ApplyMask(MathsCommand):\n \"\"\"Use fslmaths to apply a binary mask to another image.\"\"\"\n\n input_spec = ApplyMaskInput\n _suffix = \"_masked\"\n\n\nclass KernelInput(MathsInput):\n\n kernel_shape = traits.Enum(\n \"3D\",\n \"2D\",\n \"box\",\n \"boxv\",\n \"gauss\",\n \"sphere\",\n \"file\",\n argstr=\"-kernel %s\",\n position=4,\n desc=\"kernel shape to use\",\n )\n kernel_size = traits.Float(\n argstr=\"%.4f\",\n position=5,\n xor=[\"kernel_file\"],\n desc=(\n \"kernel size - voxels for box/boxv, mm \" \"for sphere, mm sigma for gauss\"\n ),\n )\n kernel_file = File(\n exists=True,\n argstr=\"%s\",\n position=5,\n xor=[\"kernel_size\"],\n desc=\"use external file for kernel\",\n )\n\n\nclass DilateInput(KernelInput):\n\n operation = traits.Enum(\n \"mean\",\n \"modal\",\n \"max\",\n argstr=\"-dil%s\",\n position=6,\n mandatory=True,\n desc=\"filtering operation to perfoem in dilation\",\n )\n\n\nclass DilateImage(MathsCommand):\n \"\"\"Use fslmaths to perform a spatial dilation of an image.\"\"\"\n\n input_spec = DilateInput\n _suffix = \"_dil\"\n\n def _format_arg(self, name, spec, value):\n if name == \"operation\":\n return spec.argstr % dict(mean=\"M\", modal=\"D\", max=\"F\")[value]\n return super(DilateImage, self)._format_arg(name, spec, value)\n\n\nclass ErodeInput(KernelInput):\n\n minimum_filter = traits.Bool(\n argstr=\"%s\",\n position=6,\n usedefault=True,\n default_value=False,\n desc=(\"if true, minimum filter rather than \" \"erosion by zeroing-out\"),\n )\n\n\nclass ErodeImage(MathsCommand):\n \"\"\"Use fslmaths to perform a spatial erosion of an image.\"\"\"\n\n input_spec = ErodeInput\n _suffix = \"_ero\"\n\n def _format_arg(self, name, spec, value):\n if name == \"minimum_filter\":\n if value:\n return \"-eroF\"\n return \"-ero\"\n return super(ErodeImage, self)._format_arg(name, spec, value)\n\n\nclass SpatialFilterInput(KernelInput):\n\n operation = traits.Enum(\n \"mean\",\n \"median\",\n \"meanu\",\n argstr=\"-f%s\",\n position=6,\n mandatory=True,\n desc=\"operation to filter with\",\n )\n\n\nclass SpatialFilter(MathsCommand):\n \"\"\"Use fslmaths to spatially filter an image.\"\"\"\n\n input_spec = SpatialFilterInput\n _suffix = \"_filt\"\n\n\nclass UnaryMathsInput(MathsInput):\n\n operation = traits.Enum(\n \"exp\",\n \"log\",\n \"sin\",\n \"cos\",\n \"tan\",\n \"asin\",\n \"acos\",\n \"atan\",\n \"sqr\",\n \"sqrt\",\n \"recip\",\n \"abs\",\n \"bin\",\n \"binv\",\n \"fillh\",\n \"fillh26\",\n \"index\",\n \"edge\",\n \"nan\",\n \"nanm\",\n \"rand\",\n \"randn\",\n \"range\",\n argstr=\"-%s\",\n position=4,\n mandatory=True,\n desc=\"operation to perform\",\n )\n\n\nclass UnaryMaths(MathsCommand):\n \"\"\"Use fslmaths to perorm a variety of mathematical operations on an image.\"\"\"\n\n input_spec = UnaryMathsInput\n\n def _list_outputs(self):\n self._suffix = \"_\" + self.inputs.operation\n return super(UnaryMaths, self)._list_outputs()\n\n\nclass BinaryMathsInput(MathsInput):\n\n operation = traits.Enum(\n \"add\",\n \"sub\",\n \"mul\",\n \"div\",\n \"rem\",\n \"max\",\n \"min\",\n mandatory=True,\n argstr=\"-%s\",\n position=4,\n desc=\"operation to perform\",\n )\n operand_file = File(\n exists=True,\n argstr=\"%s\",\n mandatory=True,\n position=5,\n xor=[\"operand_value\"],\n desc=\"second image to perform operation with\",\n )\n operand_value = traits.Float(\n argstr=\"%.8f\",\n mandatory=True,\n position=5,\n xor=[\"operand_file\"],\n desc=\"value to perform operation with\",\n )\n\n\nclass BinaryMaths(MathsCommand):\n \"\"\"Use fslmaths to perform mathematical operations using a second image or\n a numeric value.\n\n \"\"\"\n\n input_spec = BinaryMathsInput\n\n\nclass MultiImageMathsInput(MathsInput):\n\n op_string = traits.String(\n position=4,\n argstr=\"%s\",\n mandatory=True,\n desc=(\"python formatted string of operations \" \"to perform\"),\n )\n operand_files = InputMultiPath(\n File(exists=True),\n mandatory=True,\n desc=(\"list of file names to plug into op \" \"string\"),\n )\n\n\nclass MultiImageMaths(MathsCommand):\n \"\"\"Use fslmaths to perform a sequence of mathematical operations.\n\n Examples\n --------\n >>> from nipype.interfaces.fsl import MultiImageMaths\n >>> maths = MultiImageMaths()\n >>> maths.inputs.in_file = \"functional.nii\"\n >>> maths.inputs.op_string = \"-add %s -mul -1 -div %s\"\n >>> maths.inputs.operand_files = [\"functional2.nii\", \"functional3.nii\"]\n >>> maths.inputs.out_file = \"functional4.nii\"\n >>> maths.cmdline\n 'fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii'\n\n \"\"\"\n\n input_spec = MultiImageMathsInput\n\n def _format_arg(self, name, spec, value):\n if name == \"op_string\":\n return value % tuple(self.inputs.operand_files)\n return super(MultiImageMaths, self)._format_arg(name, spec, value)\n\n\nclass TemporalFilterInput(MathsInput):\n\n lowpass_sigma = traits.Float(\n -1,\n argstr=\"%.6f\",\n position=5,\n usedefault=True,\n desc=\"lowpass filter sigma (in volumes)\",\n )\n highpass_sigma = traits.Float(\n -1,\n argstr=\"-bptf %.6f\",\n position=4,\n usedefault=True,\n desc=\"highpass filter sigma (in volumes)\",\n )\n\n\nclass TemporalFilter(MathsCommand):\n \"\"\"Use fslmaths to apply a low, high, or bandpass temporal filter to a\n timeseries.\n\n \"\"\"\n\n input_spec = TemporalFilterInput\n _suffix = \"_filt\"\n", "path": "nipype/interfaces/fsl/maths.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-\n# vi: set ft=python sts=4 ts=4 sw=4 et:\n\"\"\"\nThe maths module provides higher-level interfaces to some of the operations\nthat can be performed with the fslmaths command-line program.\n\"\"\"\nimport os\nimport numpy as np\n\nfrom ..base import TraitedSpec, File, traits, InputMultiPath, isdefined\nfrom .base import FSLCommand, FSLCommandInputSpec\n\n\nclass MathsInput(FSLCommandInputSpec):\n\n in_file = File(\n position=2, argstr=\"%s\", exists=True, mandatory=True, desc=\"image to operate on\"\n )\n out_file = File(\n genfile=True, position=-2, argstr=\"%s\", desc=\"image to write\", hash_files=False\n )\n _dtypes = [\"float\", \"char\", \"int\", \"short\", \"double\", \"input\"]\n internal_datatype = traits.Enum(\n *_dtypes,\n position=1,\n argstr=\"-dt %s\",\n desc=(\"datatype to use for calculations \" \"(default is float)\")\n )\n output_datatype = traits.Enum(\n *_dtypes,\n position=-1,\n argstr=\"-odt %s\",\n desc=(\"datatype to use for output (default \" \"uses input type)\")\n )\n\n nan2zeros = traits.Bool(\n position=3, argstr=\"-nan\", desc=\"change NaNs to zeros before doing anything\"\n )\n\n\nclass MathsOutput(TraitedSpec):\n\n out_file = File(desc=\"image written after calculations\")\n\n\nclass MathsCommand(FSLCommand):\n\n _cmd = \"fslmaths\"\n input_spec = MathsInput\n output_spec = MathsOutput\n _suffix = \"_maths\"\n\n def _list_outputs(self):\n outputs = self.output_spec().get()\n outputs[\"out_file\"] = self.inputs.out_file\n if not isdefined(self.inputs.out_file):\n outputs[\"out_file\"] = self._gen_fname(\n self.inputs.in_file, suffix=self._suffix\n )\n outputs[\"out_file\"] = os.path.abspath(outputs[\"out_file\"])\n return outputs\n\n def _gen_filename(self, name):\n if name == \"out_file\":\n return self._list_outputs()[\"out_file\"]\n return None\n\n\nclass ChangeDataTypeInput(MathsInput):\n\n _dtypes = [\"float\", \"char\", \"int\", \"short\", \"double\", \"input\"]\n output_datatype = traits.Enum(\n *_dtypes, position=-1, argstr=\"-odt %s\", mandatory=True, desc=\"output data type\"\n )\n\n\nclass ChangeDataType(MathsCommand):\n \"\"\"Use fslmaths to change the datatype of an image.\"\"\"\n\n input_spec = ChangeDataTypeInput\n _suffix = \"_chdt\"\n\n\nclass ThresholdInputSpec(MathsInput):\n\n thresh = traits.Float(\n mandatory=True, position=4, argstr=\"%s\", desc=\"threshold value\"\n )\n direction = traits.Enum(\n \"below\",\n \"above\",\n usedefault=True,\n desc=\"zero-out either below or above thresh value\",\n )\n use_robust_range = traits.Bool(\n desc=\"interpret thresh as percentage (0-100) of robust range\"\n )\n use_nonzero_voxels = traits.Bool(\n desc=\"use nonzero voxels to calculate robust range\",\n requires=[\"use_robust_range\"],\n )\n\n\nclass Threshold(MathsCommand):\n \"\"\"Use fslmaths to apply a threshold to an image in a variety of ways.\"\"\"\n\n input_spec = ThresholdInputSpec\n _suffix = \"_thresh\"\n\n def _format_arg(self, name, spec, value):\n if name == \"thresh\":\n arg = \"-\"\n _si = self.inputs\n if self.inputs.direction == \"above\":\n arg += \"u\"\n arg += \"thr\"\n if isdefined(_si.use_robust_range) and _si.use_robust_range:\n if isdefined(_si.use_nonzero_voxels) and _si.use_nonzero_voxels:\n arg += \"P\"\n else:\n arg += \"p\"\n arg += \" %.10f\" % value\n return arg\n return super(Threshold, self)._format_arg(name, spec, value)\n\n\nclass StdImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%sstd\",\n position=4,\n desc=\"dimension to standard deviate across\",\n )\n\n\nclass StdImage(MathsCommand):\n \"\"\"Use fslmaths to generate a standard deviation in an image across a given\n dimension.\n \"\"\"\n\n input_spec = StdImageInput\n _suffix = \"_std\"\n\n\nclass MeanImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smean\",\n position=4,\n desc=\"dimension to mean across\",\n )\n\n\nclass MeanImage(MathsCommand):\n \"\"\"Use fslmaths to generate a mean image across a given dimension.\"\"\"\n\n input_spec = MeanImageInput\n _suffix = \"_mean\"\n\n\nclass MaxImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smax\",\n position=4,\n desc=\"dimension to max across\",\n )\n\n\nclass MaxImage(MathsCommand):\n \"\"\"Use fslmaths to generate a max image across a given dimension.\n\n Examples\n --------\n >>> from nipype.interfaces.fsl.maths import MaxImage\n >>> maxer = MaxImage()\n >>> maxer.inputs.in_file = \"functional.nii\" # doctest: +SKIP\n >>> maxer.dimension = \"T\"\n >>> maxer.cmdline # doctest: +SKIP\n 'fslmaths functional.nii -Tmax functional_max.nii'\n\n \"\"\"\n\n input_spec = MaxImageInput\n _suffix = \"_max\"\n\n\nclass PercentileImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%sperc\",\n position=4,\n desc=\"dimension to percentile across\",\n )\n perc = traits.Range(\n low=0,\n high=100,\n argstr=\"%f\",\n position=5,\n desc=(\"nth percentile (0-100) of FULL RANGE \" \"across dimension\"),\n )\n\n\nclass PercentileImage(MathsCommand):\n \"\"\"Use fslmaths to generate a percentile image across a given dimension.\n\n Examples\n --------\n >>> from nipype.interfaces.fsl.maths import MaxImage\n >>> percer = PercentileImage()\n >>> percer.inputs.in_file = \"functional.nii\" # doctest: +SKIP\n >>> percer.dimension = \"T\"\n >>> percer.perc = 90\n >>> percer.cmdline # doctest: +SKIP\n 'fslmaths functional.nii -Tperc 90 functional_perc.nii'\n\n \"\"\"\n\n input_spec = PercentileImageInput\n _suffix = \"_perc\"\n\n\nclass MaxnImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smaxn\",\n position=4,\n desc=\"dimension to index max across\",\n )\n\n\nclass MaxnImage(MathsCommand):\n \"\"\"Use fslmaths to generate an image of index of max across\n a given dimension.\n\n \"\"\"\n\n input_spec = MaxnImageInput\n _suffix = \"_maxn\"\n\n\nclass MinImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smin\",\n position=4,\n desc=\"dimension to min across\",\n )\n\n\nclass MinImage(MathsCommand):\n \"\"\"Use fslmaths to generate a minimum image across a given dimension.\"\"\"\n\n input_spec = MinImageInput\n _suffix = \"_min\"\n\n\nclass MedianImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%smedian\",\n position=4,\n desc=\"dimension to median across\",\n )\n\n\nclass MedianImage(MathsCommand):\n \"\"\"Use fslmaths to generate a median image across a given dimension.\"\"\"\n\n input_spec = MedianImageInput\n _suffix = \"_median\"\n\n\nclass AR1ImageInput(MathsInput):\n\n dimension = traits.Enum(\n \"T\",\n \"X\",\n \"Y\",\n \"Z\",\n usedefault=True,\n argstr=\"-%sar1\",\n position=4,\n desc=(\"dimension to find AR(1) coefficient\" \"across\"),\n )\n\n\nclass AR1Image(MathsCommand):\n \"\"\"Use fslmaths to generate an AR1 coefficient image across a\n given dimension. (Should use -odt float and probably demean first)\n\n \"\"\"\n\n input_spec = AR1ImageInput\n _suffix = \"_ar1\"\n\n\nclass IsotropicSmoothInput(MathsInput):\n\n fwhm = traits.Float(\n mandatory=True,\n xor=[\"sigma\"],\n position=4,\n argstr=\"-s %.5f\",\n desc=\"fwhm of smoothing kernel [mm]\",\n )\n sigma = traits.Float(\n mandatory=True,\n xor=[\"fwhm\"],\n position=4,\n argstr=\"-s %.5f\",\n desc=\"sigma of smoothing kernel [mm]\",\n )\n\n\nclass IsotropicSmooth(MathsCommand):\n \"\"\"Use fslmaths to spatially smooth an image with a gaussian kernel.\"\"\"\n\n input_spec = IsotropicSmoothInput\n _suffix = \"_smooth\"\n\n def _format_arg(self, name, spec, value):\n if name == \"fwhm\":\n sigma = float(value) / np.sqrt(8 * np.log(2))\n return spec.argstr % sigma\n return super(IsotropicSmooth, self)._format_arg(name, spec, value)\n\n\nclass ApplyMaskInput(MathsInput):\n\n mask_file = File(\n exists=True,\n mandatory=True,\n argstr=\"-mas %s\",\n position=4,\n desc=\"binary image defining mask space\",\n )\n\n\nclass ApplyMask(MathsCommand):\n \"\"\"Use fslmaths to apply a binary mask to another image.\"\"\"\n\n input_spec = ApplyMaskInput\n _suffix = \"_masked\"\n\n\nclass KernelInput(MathsInput):\n\n kernel_shape = traits.Enum(\n \"3D\",\n \"2D\",\n \"box\",\n \"boxv\",\n \"gauss\",\n \"sphere\",\n \"file\",\n argstr=\"-kernel %s\",\n position=4,\n desc=\"kernel shape to use\",\n )\n kernel_size = traits.Float(\n argstr=\"%.4f\",\n position=5,\n xor=[\"kernel_file\"],\n desc=(\n \"kernel size - voxels for box/boxv, mm \" \"for sphere, mm sigma for gauss\"\n ),\n )\n kernel_file = File(\n exists=True,\n argstr=\"%s\",\n position=5,\n xor=[\"kernel_size\"],\n desc=\"use external file for kernel\",\n )\n\n\nclass DilateInput(KernelInput):\n\n operation = traits.Enum(\n \"mean\",\n \"modal\",\n \"max\",\n argstr=\"-dil%s\",\n position=6,\n mandatory=True,\n desc=\"filtering operation to perfoem in dilation\",\n )\n\n\nclass DilateImage(MathsCommand):\n \"\"\"Use fslmaths to perform a spatial dilation of an image.\"\"\"\n\n input_spec = DilateInput\n _suffix = \"_dil\"\n\n def _format_arg(self, name, spec, value):\n if name == \"operation\":\n return spec.argstr % dict(mean=\"M\", modal=\"D\", max=\"F\")[value]\n return super(DilateImage, self)._format_arg(name, spec, value)\n\n\nclass ErodeInput(KernelInput):\n\n minimum_filter = traits.Bool(\n argstr=\"%s\",\n position=6,\n usedefault=True,\n default_value=False,\n desc=(\"if true, minimum filter rather than \" \"erosion by zeroing-out\"),\n )\n\n\nclass ErodeImage(MathsCommand):\n \"\"\"Use fslmaths to perform a spatial erosion of an image.\"\"\"\n\n input_spec = ErodeInput\n _suffix = \"_ero\"\n\n def _format_arg(self, name, spec, value):\n if name == \"minimum_filter\":\n if value:\n return \"-eroF\"\n return \"-ero\"\n return super(ErodeImage, self)._format_arg(name, spec, value)\n\n\nclass SpatialFilterInput(KernelInput):\n\n operation = traits.Enum(\n \"mean\",\n \"median\",\n \"meanu\",\n argstr=\"-f%s\",\n position=6,\n mandatory=True,\n desc=\"operation to filter with\",\n )\n\n\nclass SpatialFilter(MathsCommand):\n \"\"\"Use fslmaths to spatially filter an image.\"\"\"\n\n input_spec = SpatialFilterInput\n _suffix = \"_filt\"\n\n\nclass UnaryMathsInput(MathsInput):\n\n operation = traits.Enum(\n \"exp\",\n \"log\",\n \"sin\",\n \"cos\",\n \"tan\",\n \"asin\",\n \"acos\",\n \"atan\",\n \"sqr\",\n \"sqrt\",\n \"recip\",\n \"abs\",\n \"bin\",\n \"binv\",\n \"fillh\",\n \"fillh26\",\n \"index\",\n \"edge\",\n \"nan\",\n \"nanm\",\n \"rand\",\n \"randn\",\n \"range\",\n argstr=\"-%s\",\n position=4,\n mandatory=True,\n desc=\"operation to perform\",\n )\n\n\nclass UnaryMaths(MathsCommand):\n \"\"\"Use fslmaths to perorm a variety of mathematical operations on an image.\"\"\"\n\n input_spec = UnaryMathsInput\n\n def _list_outputs(self):\n self._suffix = \"_\" + self.inputs.operation\n return super(UnaryMaths, self)._list_outputs()\n\n\nclass BinaryMathsInput(MathsInput):\n\n operation = traits.Enum(\n \"add\",\n \"sub\",\n \"mul\",\n \"div\",\n \"rem\",\n \"max\",\n \"min\",\n mandatory=True,\n argstr=\"-%s\",\n position=4,\n desc=\"operation to perform\",\n )\n operand_file = File(\n exists=True,\n argstr=\"%s\",\n mandatory=True,\n position=5,\n xor=[\"operand_value\"],\n desc=\"second image to perform operation with\",\n )\n operand_value = traits.Float(\n argstr=\"%.8f\",\n mandatory=True,\n position=5,\n xor=[\"operand_file\"],\n desc=\"value to perform operation with\",\n )\n\n\nclass BinaryMaths(MathsCommand):\n \"\"\"Use fslmaths to perform mathematical operations using a second image or\n a numeric value.\n\n \"\"\"\n\n input_spec = BinaryMathsInput\n\n\nclass MultiImageMathsInput(MathsInput):\n\n op_string = traits.String(\n position=4,\n argstr=\"%s\",\n mandatory=True,\n desc=(\"python formatted string of operations \" \"to perform\"),\n )\n operand_files = InputMultiPath(\n File(exists=True),\n mandatory=True,\n desc=(\"list of file names to plug into op \" \"string\"),\n )\n\n\nclass MultiImageMaths(MathsCommand):\n \"\"\"Use fslmaths to perform a sequence of mathematical operations.\n\n Examples\n --------\n >>> from nipype.interfaces.fsl import MultiImageMaths\n >>> maths = MultiImageMaths()\n >>> maths.inputs.in_file = \"functional.nii\"\n >>> maths.inputs.op_string = \"-add %s -mul -1 -div %s\"\n >>> maths.inputs.operand_files = [\"functional2.nii\", \"functional3.nii\"]\n >>> maths.inputs.out_file = \"functional4.nii\"\n >>> maths.cmdline\n 'fslmaths functional.nii -add functional2.nii -mul -1 -div functional3.nii functional4.nii'\n\n \"\"\"\n\n input_spec = MultiImageMathsInput\n\n def _format_arg(self, name, spec, value):\n if name == \"op_string\":\n return value % tuple(self.inputs.operand_files)\n return super(MultiImageMaths, self)._format_arg(name, spec, value)\n\n\nclass TemporalFilterInput(MathsInput):\n\n lowpass_sigma = traits.Float(\n -1,\n argstr=\"%.6f\",\n position=5,\n usedefault=True,\n desc=\"lowpass filter sigma (in volumes)\",\n )\n highpass_sigma = traits.Float(\n -1,\n argstr=\"-bptf %.6f\",\n position=4,\n usedefault=True,\n desc=\"highpass filter sigma (in volumes)\",\n )\n\n\nclass TemporalFilter(MathsCommand):\n \"\"\"Use fslmaths to apply a low, high, or bandpass temporal filter to a\n timeseries.\n\n \"\"\"\n\n input_spec = TemporalFilterInput\n _suffix = \"_filt\"\n", "path": "nipype/interfaces/fsl/maths.py"}]} |
gh_patches_debug_149 | rasdani/github-patches | git_diff | tensorflow__addons-897 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Nightly build break
**System information**
- TensorFlow version and how it was installed (source or binary): tf-nightly-**2.2.0.dev20200115**
- TensorFlow-Addons version and how it was installed (source or binary): tfa-nightly-**0.8.0.dev20200115**
**Describe the bug**
Hi, it looks like [this commit](https://github.com/tensorflow/addons/commit/3aae7732998cb233234a2948010b9aaafc24e920) causes the latest nightly build to fail on import
```
----> 1 import tensorflow_addons
/usr/local/lib/python3.6/dist-packages/tensorflow_addons/__init__.py in <module>()
30
31 # Cleanup symbols to avoid polluting namespace.
---> 32 del absolute_import
33 del division
34 del print_function
NameError: name 'absolute_import' is not defined
```
@seanpmorgan
**Code to reproduce the issue**
[colab](https://colab.research.google.com/drive/1fxRshVv0FPJNHdOqWC4GySjPJ_TdJTJU#scrollTo=TTC3gzRLRAvY)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tensorflow_addons/__init__.py`
Content:
```
1 # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 # ==============================================================================
15 """Useful extra functionality for TensorFlow maintained by SIG-addons."""
16
17 # Local project imports
18 from tensorflow_addons import activations
19 from tensorflow_addons import callbacks
20 from tensorflow_addons import image
21 from tensorflow_addons import layers
22 from tensorflow_addons import losses
23 from tensorflow_addons import metrics
24 from tensorflow_addons import optimizers
25 from tensorflow_addons import rnn
26 from tensorflow_addons import seq2seq
27 from tensorflow_addons import text
28
29 from tensorflow_addons.version import __version__
30
31 # Cleanup symbols to avoid polluting namespace.
32 del absolute_import
33 del division
34 del print_function
35
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/tensorflow_addons/__init__.py b/tensorflow_addons/__init__.py
--- a/tensorflow_addons/__init__.py
+++ b/tensorflow_addons/__init__.py
@@ -27,8 +27,3 @@
from tensorflow_addons import text
from tensorflow_addons.version import __version__
-
-# Cleanup symbols to avoid polluting namespace.
-del absolute_import
-del division
-del print_function
| {"golden_diff": "diff --git a/tensorflow_addons/__init__.py b/tensorflow_addons/__init__.py\n--- a/tensorflow_addons/__init__.py\n+++ b/tensorflow_addons/__init__.py\n@@ -27,8 +27,3 @@\n from tensorflow_addons import text\n \n from tensorflow_addons.version import __version__\n-\n-# Cleanup symbols to avoid polluting namespace.\n-del absolute_import\n-del division\n-del print_function\n", "issue": "Nightly build break\n**System information**\r\n- TensorFlow version and how it was installed (source or binary): tf-nightly-**2.2.0.dev20200115** \r\n- TensorFlow-Addons version and how it was installed (source or binary): tfa-nightly-**0.8.0.dev20200115**\r\n\r\n**Describe the bug**\r\nHi, it looks like [this commit](https://github.com/tensorflow/addons/commit/3aae7732998cb233234a2948010b9aaafc24e920) causes the latest nightly build to fail on import\r\n\r\n```\r\n----> 1 import tensorflow_addons\r\n\r\n/usr/local/lib/python3.6/dist-packages/tensorflow_addons/__init__.py in <module>()\r\n 30 \r\n 31 # Cleanup symbols to avoid polluting namespace.\r\n---> 32 del absolute_import\r\n 33 del division\r\n 34 del print_function\r\n\r\nNameError: name 'absolute_import' is not defined\r\n```\r\n@seanpmorgan \r\n\r\n**Code to reproduce the issue**\r\n[colab](https://colab.research.google.com/drive/1fxRshVv0FPJNHdOqWC4GySjPJ_TdJTJU#scrollTo=TTC3gzRLRAvY)\r\n\n", "before_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Useful extra functionality for TensorFlow maintained by SIG-addons.\"\"\"\n\n# Local project imports\nfrom tensorflow_addons import activations\nfrom tensorflow_addons import callbacks\nfrom tensorflow_addons import image\nfrom tensorflow_addons import layers\nfrom tensorflow_addons import losses\nfrom tensorflow_addons import metrics\nfrom tensorflow_addons import optimizers\nfrom tensorflow_addons import rnn\nfrom tensorflow_addons import seq2seq\nfrom tensorflow_addons import text\n\nfrom tensorflow_addons.version import __version__\n\n# Cleanup symbols to avoid polluting namespace.\ndel absolute_import\ndel division\ndel print_function\n", "path": "tensorflow_addons/__init__.py"}], "after_files": [{"content": "# Copyright 2019 The TensorFlow Authors. All Rights Reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n# ==============================================================================\n\"\"\"Useful extra functionality for TensorFlow maintained by SIG-addons.\"\"\"\n\n# Local project imports\nfrom tensorflow_addons import activations\nfrom tensorflow_addons import callbacks\nfrom tensorflow_addons import image\nfrom tensorflow_addons import layers\nfrom tensorflow_addons import losses\nfrom tensorflow_addons import metrics\nfrom tensorflow_addons import optimizers\nfrom tensorflow_addons import rnn\nfrom tensorflow_addons import seq2seq\nfrom tensorflow_addons import text\n\nfrom tensorflow_addons.version import __version__\n", "path": "tensorflow_addons/__init__.py"}]} |
gh_patches_debug_150 | rasdani/github-patches | git_diff | python-discord__bot-1404 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add an `attachments` rule
# Abstract
We should have an antispam rule filtering small burst of images.
# Rationale
Currently, when a user posts 4 images in less than 10 seconds without any comment, the `duplicates` rule will trigger. While we still want to be informed when many images are posted, having the `duplicates` rule trigger doesn't make much sense. Besides, if different message content is given for each image, it will only trigger `burst` if more than 9 messages are sent in 10 seconds.
# Specification
- [ ] Make sure that the `duplicates` filter won't be triggered by messages with images. We can safely skip empty messages with attachments.
- [ ] Create an `images` filter based on `duplicates` that will trigger when more than 3 images are posted in less than 10 seconds. It should ignore the message content.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bot/rules/duplicates.py`
Content:
```
1 from typing import Dict, Iterable, List, Optional, Tuple
2
3 from discord import Member, Message
4
5
6 async def apply(
7 last_message: Message, recent_messages: List[Message], config: Dict[str, int]
8 ) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:
9 """Detects duplicated messages sent by a single user."""
10 relevant_messages = tuple(
11 msg
12 for msg in recent_messages
13 if (
14 msg.author == last_message.author
15 and msg.content == last_message.content
16 )
17 )
18
19 total_duplicated = len(relevant_messages)
20
21 if total_duplicated > config['max']:
22 return (
23 f"sent {total_duplicated} duplicated messages in {config['interval']}s",
24 (last_message.author,),
25 relevant_messages
26 )
27 return None
28
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bot/rules/duplicates.py b/bot/rules/duplicates.py
--- a/bot/rules/duplicates.py
+++ b/bot/rules/duplicates.py
@@ -13,6 +13,7 @@
if (
msg.author == last_message.author
and msg.content == last_message.content
+ and msg.content
)
)
| {"golden_diff": "diff --git a/bot/rules/duplicates.py b/bot/rules/duplicates.py\n--- a/bot/rules/duplicates.py\n+++ b/bot/rules/duplicates.py\n@@ -13,6 +13,7 @@\n if (\n msg.author == last_message.author\n and msg.content == last_message.content\n+ and msg.content\n )\n )\n", "issue": "Add an `attachments` rule\n# Abstract \r\n\r\nWe should have an antispam rule filtering small burst of images. \r\n\r\n# Rationale\r\n\r\nCurrently, when a user posts 4 images in less than 10 seconds without any comment, the `duplicates` rule will trigger. While we still want to be informed when many images are posted, having the `duplicates` rule trigger doesn't make much sense. Besides, if different message content is given for each image, it will only trigger `burst` if more than 9 messages are sent in 10 seconds. \r\n\r\n# Specification\r\n\r\n- [ ] Make sure that the `duplicates` filter won't be triggered by messages with images. We can safely skip empty messages with attachments.\r\n- [ ] Create an `images` filter based on `duplicates` that will trigger when more than 3 images are posted in less than 10 seconds. It should ignore the message content. \n", "before_files": [{"content": "from typing import Dict, Iterable, List, Optional, Tuple\n\nfrom discord import Member, Message\n\n\nasync def apply(\n last_message: Message, recent_messages: List[Message], config: Dict[str, int]\n) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:\n \"\"\"Detects duplicated messages sent by a single user.\"\"\"\n relevant_messages = tuple(\n msg\n for msg in recent_messages\n if (\n msg.author == last_message.author\n and msg.content == last_message.content\n )\n )\n\n total_duplicated = len(relevant_messages)\n\n if total_duplicated > config['max']:\n return (\n f\"sent {total_duplicated} duplicated messages in {config['interval']}s\",\n (last_message.author,),\n relevant_messages\n )\n return None\n", "path": "bot/rules/duplicates.py"}], "after_files": [{"content": "from typing import Dict, Iterable, List, Optional, Tuple\n\nfrom discord import Member, Message\n\n\nasync def apply(\n last_message: Message, recent_messages: List[Message], config: Dict[str, int]\n) -> Optional[Tuple[str, Iterable[Member], Iterable[Message]]]:\n \"\"\"Detects duplicated messages sent by a single user.\"\"\"\n relevant_messages = tuple(\n msg\n for msg in recent_messages\n if (\n msg.author == last_message.author\n and msg.content == last_message.content\n and msg.content\n )\n )\n\n total_duplicated = len(relevant_messages)\n\n if total_duplicated > config['max']:\n return (\n f\"sent {total_duplicated} duplicated messages in {config['interval']}s\",\n (last_message.author,),\n relevant_messages\n )\n return None\n", "path": "bot/rules/duplicates.py"}]} |
gh_patches_debug_151 | rasdani/github-patches | git_diff | ethereum__consensus-specs-1102 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
BLS and testing
Decided I wanted to get this out to explain the current state of testing, and **collect feedback** (implementers please comment) on what you need from testing, and your feelings about BLS usage in tests.
# BLS and testing
The two pain-points to get a pretty (and large) set of test-vectors out for clients are:
- BLS Signature creation
- BLS Signature verification
And side-issue, but easily resolved:
*efficient creation of a genesis state*:
When BLS functionality is implemented in test-code (creation of signed deposits, and verification).
Solution would be to either cache it, or create it directly, without going through the spec functions (current temporary solution on experiment branch).
## Status
Talking about the status on [`spectest-deco` PR 1052](https://github.com/ethereum/eth2.0-specs/pull/1052) here, based on the `v06x` branch, where we are developing 0.6 improvements. (to be merged back into dev later)
### The testing pipeline currently looks like:
- py-spec, calls BLS stub
- test-helpers, don't create self-signed objects with valid signatures
- py-test code, unified with test-vector-creation (see [PR 1052](https://github.com/ethereum/eth2.0-specs/pull/1052))
- py-test runner to run spec-tests, purely for assertions
- test-generator running the spec-tests, passing `generator_mode=true` to each of them, making them output a test-vector.
### Pytests status:
- move from `tests/` to `eth2spec/test`, i.e. part of package
- removed use of `pytest`
- annotated with `@spec_test` or similar (see PR 1052)
- as part of test-generation effort, yay for shared effort:
- expanded in block-operation testing: [coverage checklist here](https://github.com/ethereum/eth2.0-specs/issues/927)
- slightly faster, less deep-copies
- stuck on BLS stub (no sig creation/verification)
### Test-generation status:
- BLS, SSZ-generic, SSZ-static, shuffling test generators still all in place and up to date (`v06x` branch)
- `operations` test-gen uses test-package ability to output test-vectors for each test-case
- but no valid signatures
- lack of a definition how to handle this signature problem as a test-consumer
- there are no signature-related testcases
- turning BLS off would effectively let you check conformance, but it's hacky, and not remotely a good practice to have even an option for...
- it's approx. ~140MB worth (iirc) of yaml encoded state-transitions, covering many edge-cases. Worth to get in the hands of implementers quick.
- `sanity` tests updated and can be cleanly used for test-generation, but requires more work to define the format of the test-vectors, as they is more variety.
- `epoch` processing tests also updated, also can be used, not as complete as block-processing, lower priority.
## Possible ways forward:
- Simple but hacky: "turn BLS off for testing"
- No "BLS off", BLS ON on client side, but only partially on spec side. Rely on signature verification not being hit before anything else during testing
- valid test cases generated with valid signatures
- invalid test cases marked: does it error because of BLS? And runners should check the reason for aborting processing: if it doesn't match, the test should fail. Now these pytests don't need full BLS update work, and can be released somewhat quicker
- "BLS on", more work (~1 week)
- slower on test-generation, but we get the best kind of test-vectors: correct, BLS verification ON.
- blocker: what if a test case fails because of a signature error (test setup not creating the sig correctly), instead of a real assertion case. Spec will look correct, passes tests, but things are not right. We need to mark Sig-verification errors distinctly, so we can catch these problems when we turn BLS on in the pyspec. How: instead of `assert verify_...`, just `verify_...`, and make it raise a special `BLSVerificationError` (or something like that)
- We likely still want to mark tests as "signature related" or not, so implementers can catch it easily if their code is not aborting properly before signature verification, to assure invalid inputs are not costly.
A work-in-progress introduction of actual full BLS usage in the pytests is started here: [`tests-with-sigs` branch](https://github.com/ethereum/eth2.0-specs/tree/tests-with-sigs)
Suggestions welcome.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scripts/phase0/build_spec.py`
Content:
```
1 import sys
2 import function_puller
3
4
5 def build_phase0_spec(sourcefile, outfile):
6 code_lines = []
7 code_lines.append("""
8 from typing import (
9 Any,
10 Dict,
11 List,
12 NewType,
13 Tuple,
14 )
15 from eth2spec.utils.minimal_ssz import *
16 from eth2spec.utils.bls_stub import *
17
18 """)
19 for i in (1, 2, 3, 4, 8, 32, 48, 96):
20 code_lines.append("def int_to_bytes%d(x): return x.to_bytes(%d, 'little')" % (i, i))
21
22 code_lines.append("""
23
24 # stub, will get overwritten by real var
25 SLOTS_PER_EPOCH = 64
26
27
28 Slot = NewType('Slot', int) # uint64
29 Epoch = NewType('Epoch', int) # uint64
30 Shard = NewType('Shard', int) # uint64
31 ValidatorIndex = NewType('ValidatorIndex', int) # uint64
32 Gwei = NewType('Gwei', int) # uint64
33 Bytes32 = NewType('Bytes32', bytes) # bytes32
34 BLSPubkey = NewType('BLSPubkey', bytes) # bytes48
35 BLSSignature = NewType('BLSSignature', bytes) # bytes96
36 Store = None
37 """)
38
39 code_lines += function_puller.get_spec(sourcefile)
40
41 code_lines.append("""
42 # Monkey patch validator compute committee code
43 _compute_committee = compute_committee
44 committee_cache = {}
45
46
47 def compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]:
48 param_hash = (hash_tree_root(indices), seed, index, count)
49
50 if param_hash in committee_cache:
51 return committee_cache[param_hash]
52 else:
53 ret = _compute_committee(indices, seed, index, count)
54 committee_cache[param_hash] = ret
55 return ret
56
57
58 # Monkey patch hash cache
59 _hash = hash
60 hash_cache = {}
61
62
63 def hash(x):
64 if x in hash_cache:
65 return hash_cache[x]
66 else:
67 ret = _hash(x)
68 hash_cache[x] = ret
69 return ret
70
71 # Access to overwrite spec constants based on configuration
72 def apply_constants_preset(preset: Dict[str, Any]):
73 global_vars = globals()
74 for k, v in preset.items():
75 global_vars[k] = v
76
77 # Deal with derived constants
78 global_vars['GENESIS_EPOCH'] = slot_to_epoch(GENESIS_SLOT)
79
80 # Initialize SSZ types again, to account for changed lengths
81 init_SSZ_types()
82 """)
83
84 with open(outfile, 'w') as out:
85 out.write("\n".join(code_lines))
86
87
88 if __name__ == '__main__':
89 if len(sys.argv) < 3:
90 print("Usage: <source phase0> <output phase0 pyspec>")
91 build_phase0_spec(sys.argv[1], sys.argv[2])
92
93
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scripts/phase0/build_spec.py b/scripts/phase0/build_spec.py
--- a/scripts/phase0/build_spec.py
+++ b/scripts/phase0/build_spec.py
@@ -13,7 +13,7 @@
Tuple,
)
from eth2spec.utils.minimal_ssz import *
-from eth2spec.utils.bls_stub import *
+from eth2spec.utils.bls import *
""")
for i in (1, 2, 3, 4, 8, 32, 48, 96):
| {"golden_diff": "diff --git a/scripts/phase0/build_spec.py b/scripts/phase0/build_spec.py\n--- a/scripts/phase0/build_spec.py\n+++ b/scripts/phase0/build_spec.py\n@@ -13,7 +13,7 @@\n Tuple,\n )\n from eth2spec.utils.minimal_ssz import *\n-from eth2spec.utils.bls_stub import *\n+from eth2spec.utils.bls import *\n \n \"\"\")\n for i in (1, 2, 3, 4, 8, 32, 48, 96):\n", "issue": "BLS and testing\nDecided I wanted to get this out to explain the current state of testing, and **collect feedback** (implementers please comment) on what you need from testing, and your feelings about BLS usage in tests.\r\n\r\n# BLS and testing\r\n\r\nThe two pain-points to get a pretty (and large) set of test-vectors out for clients are:\r\n- BLS Signature creation\r\n- BLS Signature verification\r\n\r\nAnd side-issue, but easily resolved:\r\n*efficient creation of a genesis state*:\r\nWhen BLS functionality is implemented in test-code (creation of signed deposits, and verification).\r\nSolution would be to either cache it, or create it directly, without going through the spec functions (current temporary solution on experiment branch).\r\n\r\n## Status\r\n\r\nTalking about the status on [`spectest-deco` PR 1052](https://github.com/ethereum/eth2.0-specs/pull/1052) here, based on the `v06x` branch, where we are developing 0.6 improvements. (to be merged back into dev later)\r\n\r\n### The testing pipeline currently looks like:\r\n\r\n- py-spec, calls BLS stub\r\n- test-helpers, don't create self-signed objects with valid signatures\r\n- py-test code, unified with test-vector-creation (see [PR 1052](https://github.com/ethereum/eth2.0-specs/pull/1052))\r\n- py-test runner to run spec-tests, purely for assertions\r\n- test-generator running the spec-tests, passing `generator_mode=true` to each of them, making them output a test-vector.\r\n\r\n### Pytests status:\r\n\r\n- move from `tests/` to `eth2spec/test`, i.e. part of package\r\n - removed use of `pytest`\r\n - annotated with `@spec_test` or similar (see PR 1052)\r\n- as part of test-generation effort, yay for shared effort:\r\n - expanded in block-operation testing: [coverage checklist here](https://github.com/ethereum/eth2.0-specs/issues/927)\r\n - slightly faster, less deep-copies\r\n- stuck on BLS stub (no sig creation/verification)\r\n\r\n### Test-generation status:\r\n\r\n- BLS, SSZ-generic, SSZ-static, shuffling test generators still all in place and up to date (`v06x` branch)\r\n- `operations` test-gen uses test-package ability to output test-vectors for each test-case\r\n - but no valid signatures\r\n - lack of a definition how to handle this signature problem as a test-consumer\r\n - there are no signature-related testcases\r\n - turning BLS off would effectively let you check conformance, but it's hacky, and not remotely a good practice to have even an option for...\r\n - it's approx. ~140MB worth (iirc) of yaml encoded state-transitions, covering many edge-cases. Worth to get in the hands of implementers quick.\r\n- `sanity` tests updated and can be cleanly used for test-generation, but requires more work to define the format of the test-vectors, as they is more variety.\r\n- `epoch` processing tests also updated, also can be used, not as complete as block-processing, lower priority.\r\n\r\n## Possible ways forward:\r\n\r\n- Simple but hacky: \"turn BLS off for testing\"\r\n- No \"BLS off\", BLS ON on client side, but only partially on spec side. Rely on signature verification not being hit before anything else during testing\r\n - valid test cases generated with valid signatures\r\n - invalid test cases marked: does it error because of BLS? And runners should check the reason for aborting processing: if it doesn't match, the test should fail. Now these pytests don't need full BLS update work, and can be released somewhat quicker\r\n- \"BLS on\", more work (~1 week)\r\n - slower on test-generation, but we get the best kind of test-vectors: correct, BLS verification ON.\r\n - blocker: what if a test case fails because of a signature error (test setup not creating the sig correctly), instead of a real assertion case. Spec will look correct, passes tests, but things are not right. We need to mark Sig-verification errors distinctly, so we can catch these problems when we turn BLS on in the pyspec. How: instead of `assert verify_...`, just `verify_...`, and make it raise a special `BLSVerificationError` (or something like that)\r\n - We likely still want to mark tests as \"signature related\" or not, so implementers can catch it easily if their code is not aborting properly before signature verification, to assure invalid inputs are not costly.\r\n\r\nA work-in-progress introduction of actual full BLS usage in the pytests is started here: [`tests-with-sigs` branch](https://github.com/ethereum/eth2.0-specs/tree/tests-with-sigs)\r\n\r\nSuggestions welcome.\r\n\r\n\r\n\r\n\r\n\n", "before_files": [{"content": "import sys\nimport function_puller\n\n\ndef build_phase0_spec(sourcefile, outfile):\n code_lines = []\n code_lines.append(\"\"\"\nfrom typing import (\n Any,\n Dict,\n List,\n NewType,\n Tuple,\n)\nfrom eth2spec.utils.minimal_ssz import *\nfrom eth2spec.utils.bls_stub import *\n\n\"\"\")\n for i in (1, 2, 3, 4, 8, 32, 48, 96):\n code_lines.append(\"def int_to_bytes%d(x): return x.to_bytes(%d, 'little')\" % (i, i))\n\n code_lines.append(\"\"\"\n\n# stub, will get overwritten by real var\nSLOTS_PER_EPOCH = 64\n\n\nSlot = NewType('Slot', int) # uint64\nEpoch = NewType('Epoch', int) # uint64\nShard = NewType('Shard', int) # uint64\nValidatorIndex = NewType('ValidatorIndex', int) # uint64\nGwei = NewType('Gwei', int) # uint64\nBytes32 = NewType('Bytes32', bytes) # bytes32\nBLSPubkey = NewType('BLSPubkey', bytes) # bytes48\nBLSSignature = NewType('BLSSignature', bytes) # bytes96\nStore = None\n\"\"\")\n\n code_lines += function_puller.get_spec(sourcefile)\n\n code_lines.append(\"\"\"\n# Monkey patch validator compute committee code\n_compute_committee = compute_committee\ncommittee_cache = {}\n\n\ndef compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]:\n param_hash = (hash_tree_root(indices), seed, index, count)\n\n if param_hash in committee_cache:\n return committee_cache[param_hash]\n else:\n ret = _compute_committee(indices, seed, index, count)\n committee_cache[param_hash] = ret\n return ret\n\n\n# Monkey patch hash cache\n_hash = hash\nhash_cache = {}\n\n\ndef hash(x):\n if x in hash_cache:\n return hash_cache[x]\n else:\n ret = _hash(x)\n hash_cache[x] = ret\n return ret\n\n# Access to overwrite spec constants based on configuration\ndef apply_constants_preset(preset: Dict[str, Any]):\n global_vars = globals()\n for k, v in preset.items():\n global_vars[k] = v\n\n # Deal with derived constants\n global_vars['GENESIS_EPOCH'] = slot_to_epoch(GENESIS_SLOT)\n\n # Initialize SSZ types again, to account for changed lengths\n init_SSZ_types()\n\"\"\")\n\n with open(outfile, 'w') as out:\n out.write(\"\\n\".join(code_lines))\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 3:\n print(\"Usage: <source phase0> <output phase0 pyspec>\")\n build_phase0_spec(sys.argv[1], sys.argv[2])\n\n", "path": "scripts/phase0/build_spec.py"}], "after_files": [{"content": "import sys\nimport function_puller\n\n\ndef build_phase0_spec(sourcefile, outfile):\n code_lines = []\n code_lines.append(\"\"\"\nfrom typing import (\n Any,\n Dict,\n List,\n NewType,\n Tuple,\n)\nfrom eth2spec.utils.minimal_ssz import *\nfrom eth2spec.utils.bls import *\n\n\"\"\")\n for i in (1, 2, 3, 4, 8, 32, 48, 96):\n code_lines.append(\"def int_to_bytes%d(x): return x.to_bytes(%d, 'little')\" % (i, i))\n\n code_lines.append(\"\"\"\n\n# stub, will get overwritten by real var\nSLOTS_PER_EPOCH = 64\n\n\nSlot = NewType('Slot', int) # uint64\nEpoch = NewType('Epoch', int) # uint64\nShard = NewType('Shard', int) # uint64\nValidatorIndex = NewType('ValidatorIndex', int) # uint64\nGwei = NewType('Gwei', int) # uint64\nBytes32 = NewType('Bytes32', bytes) # bytes32\nBLSPubkey = NewType('BLSPubkey', bytes) # bytes48\nBLSSignature = NewType('BLSSignature', bytes) # bytes96\nStore = None\n\"\"\")\n\n code_lines += function_puller.get_spec(sourcefile)\n\n code_lines.append(\"\"\"\n# Monkey patch validator compute committee code\n_compute_committee = compute_committee\ncommittee_cache = {}\n\n\ndef compute_committee(indices: List[ValidatorIndex], seed: Bytes32, index: int, count: int) -> List[ValidatorIndex]:\n param_hash = (hash_tree_root(indices), seed, index, count)\n\n if param_hash in committee_cache:\n return committee_cache[param_hash]\n else:\n ret = _compute_committee(indices, seed, index, count)\n committee_cache[param_hash] = ret\n return ret\n\n\n# Monkey patch hash cache\n_hash = hash\nhash_cache = {}\n\n\ndef hash(x):\n if x in hash_cache:\n return hash_cache[x]\n else:\n ret = _hash(x)\n hash_cache[x] = ret\n return ret\n\n# Access to overwrite spec constants based on configuration\ndef apply_constants_preset(preset: Dict[str, Any]):\n global_vars = globals()\n for k, v in preset.items():\n global_vars[k] = v\n\n # Deal with derived constants\n global_vars['GENESIS_EPOCH'] = slot_to_epoch(GENESIS_SLOT)\n\n # Initialize SSZ types again, to account for changed lengths\n init_SSZ_types()\n\"\"\")\n\n with open(outfile, 'w') as out:\n out.write(\"\\n\".join(code_lines))\n\n\nif __name__ == '__main__':\n if len(sys.argv) < 3:\n print(\"Usage: <source phase0> <output phase0 pyspec>\")\n build_phase0_spec(sys.argv[1], sys.argv[2])\n\n", "path": "scripts/phase0/build_spec.py"}]} |
gh_patches_debug_152 | rasdani/github-patches | git_diff | pex-tool__pex-630 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 1.6.0
On the docket:
+ (longterm fix) unhandled AttributeError during pex bootstrapping with PEX_PATH #598
+ Vendor setuptools / wheel. #607
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = '1.5.3'
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = '1.5.3'
+__version__ = '1.6.0'
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = '1.5.3'\n+__version__ = '1.6.0'\n", "issue": "Release 1.6.0\nOn the docket:\r\n+ (longterm fix) unhandled AttributeError during pex bootstrapping with PEX_PATH #598\r\n+ Vendor setuptools / wheel. #607\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '1.5.3'\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '1.6.0'\n", "path": "pex/version.py"}]} |
gh_patches_debug_153 | rasdani/github-patches | git_diff | pex-tool__pex-916 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.6
On the docket:
+ [x] Don't delete the root `__init__.py` when devendoring. #915
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = '2.1.5'
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = '2.1.5'
+__version__ = '2.1.6'
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = '2.1.5'\n+__version__ = '2.1.6'\n", "issue": "Release 2.1.6\nOn the docket:\r\n+ [x] Don't delete the root `__init__.py` when devendoring. #915\r\n\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '2.1.5'\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '2.1.6'\n", "path": "pex/version.py"}]} |
gh_patches_debug_154 | rasdani/github-patches | git_diff | google-research__text-to-text-transfer-transformer-480 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Running hf_model.py
I am trying to run your models with [`hf_model`](https://github.com/google-research/text-to-text-transfer-transformer/blob/master/t5/models/hf_model.py). The current blocker issue is that the code is using `num_parallel_calls` in [in multiple places](https://github.com/google-research/text-to-text-transfer-transformer/blob/master/t5/models/hf_model.py#L128), however, this function seems to be [deprecated](https://github.com/google-research/text-to-text-transfer-transformer/blob/838157d433995473e96b773c9c761b6aadf01e37/t5/data/preprocessors.py#L2651).
Wondering if there is a replacement for this function I can use as a quick fix.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `t5/version.py`
Content:
```
1 # Copyright 2020 The T5 Authors.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # Lint as: python3
16 r"""Separate file for storing the current version of T5.
17
18 Stored in a separate file so that setup.py can reference the version without
19 pulling in all the dependencies in __init__.py.
20 """
21 __version__ = '0.7.0'
22
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/t5/version.py b/t5/version.py
--- a/t5/version.py
+++ b/t5/version.py
@@ -18,4 +18,4 @@
Stored in a separate file so that setup.py can reference the version without
pulling in all the dependencies in __init__.py.
"""
-__version__ = '0.7.0'
+__version__ = '0.7.1'
| {"golden_diff": "diff --git a/t5/version.py b/t5/version.py\n--- a/t5/version.py\n+++ b/t5/version.py\n@@ -18,4 +18,4 @@\n Stored in a separate file so that setup.py can reference the version without\n pulling in all the dependencies in __init__.py.\n \"\"\"\n-__version__ = '0.7.0'\n+__version__ = '0.7.1'\n", "issue": "Running hf_model.py \nI am trying to run your models with [`hf_model`](https://github.com/google-research/text-to-text-transfer-transformer/blob/master/t5/models/hf_model.py). The current blocker issue is that the code is using `num_parallel_calls` in [in multiple places](https://github.com/google-research/text-to-text-transfer-transformer/blob/master/t5/models/hf_model.py#L128), however, this function seems to be [deprecated](https://github.com/google-research/text-to-text-transfer-transformer/blob/838157d433995473e96b773c9c761b6aadf01e37/t5/data/preprocessors.py#L2651).\r\n\r\nWondering if there is a replacement for this function I can use as a quick fix. \n", "before_files": [{"content": "# Copyright 2020 The T5 Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Lint as: python3\nr\"\"\"Separate file for storing the current version of T5.\n\nStored in a separate file so that setup.py can reference the version without\npulling in all the dependencies in __init__.py.\n\"\"\"\n__version__ = '0.7.0'\n", "path": "t5/version.py"}], "after_files": [{"content": "# Copyright 2020 The T5 Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# Lint as: python3\nr\"\"\"Separate file for storing the current version of T5.\n\nStored in a separate file so that setup.py can reference the version without\npulling in all the dependencies in __init__.py.\n\"\"\"\n__version__ = '0.7.1'\n", "path": "t5/version.py"}]} |
gh_patches_debug_155 | rasdani/github-patches | git_diff | comic__grand-challenge.org-3379 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Server error page won't render because of missing context
If a view throws a 500 error, the 500.html should get rendered. We recently updated the template to inherit from base.html, and now it will not render anymore because it is missing context variables (the 500 view is by default passed an empty context).
I'm unsure if we should update the 500 view and add the missing context or if we should go back to not inheriting from base.html for the error views?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `app/config/urls/challenge_subdomain.py`
Content:
```
1 from django.conf import settings
2 from django.urls import include, path
3 from django.views.generic import TemplateView
4
5 from grandchallenge.challenges.views import ChallengeUpdate
6
7 urlpatterns = [
8 path(
9 "robots.txt",
10 TemplateView.as_view(
11 template_name="robots.txt", content_type="text/plain"
12 ),
13 name="subdomain_robots_txt",
14 ),
15 path(
16 "evaluation/",
17 include("grandchallenge.evaluation.urls", namespace="evaluation"),
18 ),
19 path("teams/", include("grandchallenge.teams.urls", namespace="teams")),
20 path(
21 "participants/",
22 include("grandchallenge.participants.urls", namespace="participants"),
23 ),
24 path("admins/", include("grandchallenge.admins.urls", namespace="admins")),
25 path("update/", ChallengeUpdate.as_view(), name="challenge-update"),
26 path("summernote/", include("django_summernote.urls")),
27 path("", include("grandchallenge.pages.urls", namespace="pages")),
28 ]
29
30 if settings.DEBUG and settings.ENABLE_DEBUG_TOOLBAR:
31 import debug_toolbar
32
33 urlpatterns = [
34 path("__debug__/", include(debug_toolbar.urls))
35 ] + urlpatterns
36
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/app/config/urls/challenge_subdomain.py b/app/config/urls/challenge_subdomain.py
--- a/app/config/urls/challenge_subdomain.py
+++ b/app/config/urls/challenge_subdomain.py
@@ -4,6 +4,9 @@
from grandchallenge.challenges.views import ChallengeUpdate
+handler500 = "grandchallenge.core.views.handler500"
+
+
urlpatterns = [
path(
"robots.txt",
| {"golden_diff": "diff --git a/app/config/urls/challenge_subdomain.py b/app/config/urls/challenge_subdomain.py\n--- a/app/config/urls/challenge_subdomain.py\n+++ b/app/config/urls/challenge_subdomain.py\n@@ -4,6 +4,9 @@\n \n from grandchallenge.challenges.views import ChallengeUpdate\n \n+handler500 = \"grandchallenge.core.views.handler500\"\n+\n+\n urlpatterns = [\n path(\n \"robots.txt\",\n", "issue": "Server error page won't render because of missing context\nIf a view throws a 500 error, the 500.html should get rendered. We recently updated the template to inherit from base.html, and now it will not render anymore because it is missing context variables (the 500 view is by default passed an empty context). \r\n\r\nI'm unsure if we should update the 500 view and add the missing context or if we should go back to not inheriting from base.html for the error views? \r\n\r\n\n", "before_files": [{"content": "from django.conf import settings\nfrom django.urls import include, path\nfrom django.views.generic import TemplateView\n\nfrom grandchallenge.challenges.views import ChallengeUpdate\n\nurlpatterns = [\n path(\n \"robots.txt\",\n TemplateView.as_view(\n template_name=\"robots.txt\", content_type=\"text/plain\"\n ),\n name=\"subdomain_robots_txt\",\n ),\n path(\n \"evaluation/\",\n include(\"grandchallenge.evaluation.urls\", namespace=\"evaluation\"),\n ),\n path(\"teams/\", include(\"grandchallenge.teams.urls\", namespace=\"teams\")),\n path(\n \"participants/\",\n include(\"grandchallenge.participants.urls\", namespace=\"participants\"),\n ),\n path(\"admins/\", include(\"grandchallenge.admins.urls\", namespace=\"admins\")),\n path(\"update/\", ChallengeUpdate.as_view(), name=\"challenge-update\"),\n path(\"summernote/\", include(\"django_summernote.urls\")),\n path(\"\", include(\"grandchallenge.pages.urls\", namespace=\"pages\")),\n]\n\nif settings.DEBUG and settings.ENABLE_DEBUG_TOOLBAR:\n import debug_toolbar\n\n urlpatterns = [\n path(\"__debug__/\", include(debug_toolbar.urls))\n ] + urlpatterns\n", "path": "app/config/urls/challenge_subdomain.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.urls import include, path\nfrom django.views.generic import TemplateView\n\nfrom grandchallenge.challenges.views import ChallengeUpdate\n\nhandler500 = \"grandchallenge.core.views.handler500\"\n\n\nurlpatterns = [\n path(\n \"robots.txt\",\n TemplateView.as_view(\n template_name=\"robots.txt\", content_type=\"text/plain\"\n ),\n name=\"subdomain_robots_txt\",\n ),\n path(\n \"evaluation/\",\n include(\"grandchallenge.evaluation.urls\", namespace=\"evaluation\"),\n ),\n path(\"teams/\", include(\"grandchallenge.teams.urls\", namespace=\"teams\")),\n path(\n \"participants/\",\n include(\"grandchallenge.participants.urls\", namespace=\"participants\"),\n ),\n path(\"admins/\", include(\"grandchallenge.admins.urls\", namespace=\"admins\")),\n path(\"update/\", ChallengeUpdate.as_view(), name=\"challenge-update\"),\n path(\"summernote/\", include(\"django_summernote.urls\")),\n path(\"\", include(\"grandchallenge.pages.urls\", namespace=\"pages\")),\n]\n\nif settings.DEBUG and settings.ENABLE_DEBUG_TOOLBAR:\n import debug_toolbar\n\n urlpatterns = [\n path(\"__debug__/\", include(debug_toolbar.urls))\n ] + urlpatterns\n", "path": "app/config/urls/challenge_subdomain.py"}]} |
gh_patches_debug_156 | rasdani/github-patches | git_diff | encode__httpx-1199 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
How about attaching original traceback to exceptions raised in `map_exceptions()` ?
Currently we use `map_exceptions()` to covert underlying library exceptions to `httpx` exceptions, when captured an expected exception, we raise a new exception from `None`:
https://github.com/encode/httpx/blob/655773e1c1b75895eda927d5a9d22a3b5b8f572d/httpx/_exceptions.py#L336-L359
This causes a traceback like this:
```
In [30]: httpx.get('http://notexisthost')
Traceback (most recent call last):
File "<ipython-input-30-7c88a36ce394>", line 1, in <module>
httpx.get('http://notexisthost')
File "D:\programs\anaconda3\lib\site-packages\httpx\_api.py", line 170, in get
trust_env=trust_env,
File "D:\programs\anaconda3\lib\site-packages\httpx\_api.py", line 96, in request
allow_redirects=allow_redirects,
File "D:\programs\anaconda3\lib\site-packages\httpx\_client.py", line 601, in request
request, auth=auth, allow_redirects=allow_redirects, timeout=timeout,
File "D:\programs\anaconda3\lib\site-packages\httpx\_client.py", line 621, in send
request, auth=auth, timeout=timeout, allow_redirects=allow_redirects,
File "D:\programs\anaconda3\lib\site-packages\httpx\_client.py", line 648, in send_handling_redirects
request, auth=auth, timeout=timeout, history=history
File "D:\programs\anaconda3\lib\site-packages\httpx\_client.py", line 684, in send_handling_auth
response = self.send_single_request(request, timeout)
File "D:\programs\anaconda3\lib\site-packages\httpx\_client.py", line 719, in send_single_request
timeout=timeout.as_dict(),
File "D:\programs\anaconda3\lib\site-packages\httpcore\_sync\http_proxy.py", line 99, in request
method, url, headers=headers, stream=stream, timeout=timeout
File "D:\programs\anaconda3\lib\site-packages\httpcore\_sync\http_proxy.py", line 150, in _forward_request
method, url, headers=headers, stream=stream, timeout=timeout
File "D:\programs\anaconda3\lib\site-packages\httpcore\_sync\connection.py", line 78, in request
return self.connection.request(method, url, headers, stream, timeout)
File "D:\programs\anaconda3\lib\site-packages\httpcore\_sync\http11.py", line 62, in request
) = self._receive_response(timeout)
File "D:\programs\anaconda3\lib\site-packages\httpcore\_sync\http11.py", line 115, in _receive_response
event = self._receive_event(timeout)
File "D:\programs\anaconda3\lib\site-packages\httpcore\_sync\http11.py", line 142, in _receive_event
event = self.h11_state.next_event()
File "D:\programs\anaconda3\lib\contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "D:\programs\anaconda3\lib\site-packages\httpcore\_exceptions.py", line 12, in map_exceptions
raise to_exc(exc) from None
ProtocolError: can't handle event type ConnectionClosed when role=SERVER and state=SEND_RESPONSE
```
Notice the last two frames from the traceback:
```
File "D:\programs\anaconda3\lib\contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "D:\programs\anaconda3\lib\site-packages\httpcore\_exceptions.py", line 12, in map_exceptions
raise to_exc(exc) from None
```
I find that It's not obvious to figure out where's the root cause. If we attach the original traceback to the newly raised exception like this:
```py
@contextlib.contextmanager
def map_exceptions(
mapping: typing.Mapping[typing.Type[Exception], typing.Type[Exception]],
**kwargs: typing.Any,
) -> typing.Iterator[None]:
try:
yield
except Exception as exc:
mapped_exc = None
for from_exc, to_exc in mapping.items():
if not isinstance(exc, from_exc):
continue
# We want to map to the most specific exception we can find.
# Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to
# `httpx.ReadTimeout`, not just `httpx.TimeoutException`.
if mapped_exc is None or issubclass(to_exc, mapped_exc):
mapped_exc = to_exc
if mapped_exc is None:
raise
message = str(exc)
raise mapped_exc(message, **kwargs).with_traceback(exc.__traceback__) from None # type: ignore
```
the traceback becomes:
```
In [4]: httpx.get('http://doestnotexist')
Traceback (most recent call last):
File "<ipython-input-4-74881dccb3f2>", line 1, in <module>
httpx.get('http://doestnotexist')
File "e:\projects\pycharm\httpx\httpx\_api.py", line 170, in get
trust_env=trust_env,
File "e:\projects\pycharm\httpx\httpx\_api.py", line 96, in request
allow_redirects=allow_redirects,
File "e:\projects\pycharm\httpx\httpx\_client.py", line 643, in request
request, auth=auth, allow_redirects=allow_redirects, timeout=timeout,
File "e:\projects\pycharm\httpx\httpx\_client.py", line 673, in send
request, auth=auth, timeout=timeout, allow_redirects=allow_redirects,
File "e:\projects\pycharm\httpx\httpx\_client.py", line 702, in _send_handling_redirects
request, auth=auth, timeout=timeout, history=history
File "e:\projects\pycharm\httpx\httpx\_client.py", line 738, in _send_handling_auth
response = self._send_single_request(request, timeout)
File "e:\projects\pycharm\httpx\httpx\_client.py", line 772, in _send_single_request
timeout=timeout.as_dict(),
File "D:\programs\anaconda3\lib\contextlib.py", line 130, in __exit__
self.gen.throw(type, value, traceback)
File "e:\projects\pycharm\httpx\httpx\_exceptions.py", line 359, in map_exceptions
raise mapped_exc(message, **kwargs).with_traceback(exc.__traceback__) from None # type: ignore
File "e:\projects\pycharm\httpx\httpx\_exceptions.py", line 342, in map_exceptions
yield
File "e:\projects\pycharm\httpx\httpx\_client.py", line 772, in _send_single_request
timeout=timeout.as_dict(),
File "E:\projects\pycharm\httpcore\httpcore\_sync\http_proxy.py", line 102, in request
method, url, headers=headers, stream=stream, timeout=timeout
File "E:\projects\pycharm\httpcore\httpcore\_sync\http_proxy.py", line 159, in _forward_request
method, url, headers=headers, stream=stream, timeout=timeout
File "E:\projects\pycharm\httpcore\httpcore\_sync\connection.py", line 96, in request
return self.connection.request(method, url, headers, stream, timeout)
File "E:\projects\pycharm\httpcore\httpcore\_sync\http11.py", line 73, in request
) = self._receive_response(timeout)
File "E:\projects\pycharm\httpcore\httpcore\_sync\http11.py", line 130, in _receive_response
event = self._receive_event(timeout)
File "E:\projects\pycharm\httpcore\httpcore\_sync\http11.py", line 160, in _receive_event
data = self.socket.read(self.READ_NUM_BYTES, timeout)
File "E:\projects\pycharm\httpcore\httpcore\_backends\sync.py", line 64, in read
raise ReadError("Server disconnected while attempting read")
ReadError: Server disconnected while attempting read
```
May be it will be easier to realize the really problems?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `httpx/_exceptions.py`
Content:
```
1 """
2 Our exception hierarchy:
3
4 * HTTPError
5 x RequestError
6 + TransportError
7 - TimeoutException
8 · ConnectTimeout
9 · ReadTimeout
10 · WriteTimeout
11 · PoolTimeout
12 - NetworkError
13 · ConnectError
14 · ReadError
15 · WriteError
16 · CloseError
17 - ProtocolError
18 · LocalProtocolError
19 · RemoteProtocolError
20 - ProxyError
21 - UnsupportedProtocol
22 + DecodingError
23 + TooManyRedirects
24 + RequestBodyUnavailable
25 x HTTPStatusError
26 * InvalidURL
27 * NotRedirectResponse
28 * CookieConflict
29 * StreamError
30 x StreamConsumed
31 x ResponseNotRead
32 x RequestNotRead
33 x ResponseClosed
34 """
35 import contextlib
36 import typing
37
38 import httpcore
39
40 if typing.TYPE_CHECKING:
41 from ._models import Request, Response # pragma: nocover
42
43
44 class HTTPError(Exception):
45 """
46 Base class for `RequestError` and `HTTPStatusError`.
47
48 Useful for `try...except` blocks when issuing a request,
49 and then calling `.raise_for_status()`.
50
51 For example:
52
53 ```
54 try:
55 response = httpx.get("https://www.example.com")
56 response.raise_for_status()
57 except httpx.HTTPError as exc:
58 print(f"HTTP Exception for {exc.request.url} - {exc.message}")
59 ```
60 """
61
62 def __init__(self, message: str, *, request: "Request") -> None:
63 super().__init__(message)
64 self.request = request
65
66
67 class RequestError(HTTPError):
68 """
69 Base class for all exceptions that may occur when issuing a `.request()`.
70 """
71
72 def __init__(self, message: str, *, request: "Request") -> None:
73 super().__init__(message, request=request)
74
75
76 class TransportError(RequestError):
77 """
78 Base class for all exceptions that occur at the level of the Transport API.
79
80 All of these exceptions also have an equivelent mapping in `httpcore`.
81 """
82
83
84 # Timeout exceptions...
85
86
87 class TimeoutException(TransportError):
88 """
89 The base class for timeout errors.
90
91 An operation has timed out.
92 """
93
94
95 class ConnectTimeout(TimeoutException):
96 """
97 Timed out while connecting to the host.
98 """
99
100
101 class ReadTimeout(TimeoutException):
102 """
103 Timed out while receiving data from the host.
104 """
105
106
107 class WriteTimeout(TimeoutException):
108 """
109 Timed out while sending data to the host.
110 """
111
112
113 class PoolTimeout(TimeoutException):
114 """
115 Timed out waiting to acquire a connection from the pool.
116 """
117
118
119 # Core networking exceptions...
120
121
122 class NetworkError(TransportError):
123 """
124 The base class for network-related errors.
125
126 An error occurred while interacting with the network.
127 """
128
129
130 class ReadError(NetworkError):
131 """
132 Failed to receive data from the network.
133 """
134
135
136 class WriteError(NetworkError):
137 """
138 Failed to send data through the network.
139 """
140
141
142 class ConnectError(NetworkError):
143 """
144 Failed to establish a connection.
145 """
146
147
148 class CloseError(NetworkError):
149 """
150 Failed to close a connection.
151 """
152
153
154 # Other transport exceptions...
155
156
157 class ProxyError(TransportError):
158 """
159 An error occurred while establishing a proxy connection.
160 """
161
162
163 class UnsupportedProtocol(TransportError):
164 """
165 Attempted to make a request to an unsupported protocol.
166
167 For example issuing a request to `ftp://www.example.com`.
168 """
169
170
171 class ProtocolError(TransportError):
172 """
173 The protocol was violated.
174 """
175
176
177 class LocalProtocolError(ProtocolError):
178 """
179 A protocol was violated by the client.
180
181 For example if the user instantiated a `Request` instance explicitly,
182 failed to include the mandatory `Host:` header, and then issued it directly
183 using `client.send()`.
184 """
185
186
187 class RemoteProtocolError(ProtocolError):
188 """
189 The protocol was violated by the server.
190
191 For exaample, returning malformed HTTP.
192 """
193
194
195 # Other request exceptions...
196
197
198 class DecodingError(RequestError):
199 """
200 Decoding of the response failed, due to a malformed encoding.
201 """
202
203
204 class TooManyRedirects(RequestError):
205 """
206 Too many redirects.
207 """
208
209
210 class RequestBodyUnavailable(RequestError):
211 """
212 Had to send the request again, but the request body was streaming, and is
213 no longer available.
214 """
215
216
217 # Client errors
218
219
220 class HTTPStatusError(HTTPError):
221 """
222 The response had an error HTTP status of 4xx or 5xx.
223
224 May be raised when calling `response.raise_for_status()`
225 """
226
227 def __init__(
228 self, message: str, *, request: "Request", response: "Response"
229 ) -> None:
230 super().__init__(message, request=request)
231 self.response = response
232
233
234 class InvalidURL(Exception):
235 """
236 URL is improperly formed or cannot be parsed.
237 """
238
239 def __init__(self, message: str) -> None:
240 super().__init__(message)
241
242
243 class NotRedirectResponse(Exception):
244 """
245 Response was not a redirect response.
246
247 May be raised if `response.next()` is called without first
248 properly checking `response.is_redirect`.
249 """
250
251 def __init__(self, message: str) -> None:
252 super().__init__(message)
253
254
255 class CookieConflict(Exception):
256 """
257 Attempted to lookup a cookie by name, but multiple cookies existed.
258
259 Can occur when calling `response.cookies.get(...)`.
260 """
261
262 def __init__(self, message: str) -> None:
263 super().__init__(message)
264
265
266 # Stream exceptions...
267
268 # These may occur as the result of a programming error, by accessing
269 # the request/response stream in an invalid manner.
270
271
272 class StreamError(Exception):
273 """
274 The base class for stream exceptions.
275
276 The developer made an error in accessing the request stream in
277 an invalid way.
278 """
279
280 def __init__(self, message: str) -> None:
281 super().__init__(message)
282
283
284 class StreamConsumed(StreamError):
285 """
286 Attempted to read or stream response content, but the content has already
287 been streamed.
288 """
289
290 def __init__(self) -> None:
291 message = (
292 "Attempted to read or stream response content, but the content has "
293 "already been streamed."
294 )
295 super().__init__(message)
296
297
298 class ResponseNotRead(StreamError):
299 """
300 Attempted to access response content, without having called `read()`
301 after a streaming response.
302 """
303
304 def __init__(self) -> None:
305 message = (
306 "Attempted to access response content, without having called `read()` "
307 "after a streaming response."
308 )
309 super().__init__(message)
310
311
312 class RequestNotRead(StreamError):
313 """
314 Attempted to access request content, without having called `read()`.
315 """
316
317 def __init__(self) -> None:
318 message = "Attempted to access request content, without having called `read()`."
319 super().__init__(message)
320
321
322 class ResponseClosed(StreamError):
323 """
324 Attempted to read or stream response content, but the request has been
325 closed.
326 """
327
328 def __init__(self) -> None:
329 message = (
330 "Attempted to read or stream response content, but the request has "
331 "been closed."
332 )
333 super().__init__(message)
334
335
336 @contextlib.contextmanager
337 def map_exceptions(
338 mapping: typing.Mapping[typing.Type[Exception], typing.Type[Exception]],
339 **kwargs: typing.Any,
340 ) -> typing.Iterator[None]:
341 try:
342 yield
343 except Exception as exc:
344 mapped_exc = None
345
346 for from_exc, to_exc in mapping.items():
347 if not isinstance(exc, from_exc):
348 continue
349 # We want to map to the most specific exception we can find.
350 # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to
351 # `httpx.ReadTimeout`, not just `httpx.TimeoutException`.
352 if mapped_exc is None or issubclass(to_exc, mapped_exc):
353 mapped_exc = to_exc
354
355 if mapped_exc is None:
356 raise
357
358 message = str(exc)
359 raise mapped_exc(message, **kwargs) from None # type: ignore
360
361
362 HTTPCORE_EXC_MAP = {
363 httpcore.TimeoutException: TimeoutException,
364 httpcore.ConnectTimeout: ConnectTimeout,
365 httpcore.ReadTimeout: ReadTimeout,
366 httpcore.WriteTimeout: WriteTimeout,
367 httpcore.PoolTimeout: PoolTimeout,
368 httpcore.NetworkError: NetworkError,
369 httpcore.ConnectError: ConnectError,
370 httpcore.ReadError: ReadError,
371 httpcore.WriteError: WriteError,
372 httpcore.CloseError: CloseError,
373 httpcore.ProxyError: ProxyError,
374 httpcore.UnsupportedProtocol: UnsupportedProtocol,
375 httpcore.ProtocolError: ProtocolError,
376 httpcore.LocalProtocolError: LocalProtocolError,
377 httpcore.RemoteProtocolError: RemoteProtocolError,
378 }
379
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/httpx/_exceptions.py b/httpx/_exceptions.py
--- a/httpx/_exceptions.py
+++ b/httpx/_exceptions.py
@@ -356,7 +356,7 @@
raise
message = str(exc)
- raise mapped_exc(message, **kwargs) from None # type: ignore
+ raise mapped_exc(message, **kwargs) from exc # type: ignore
HTTPCORE_EXC_MAP = {
| {"golden_diff": "diff --git a/httpx/_exceptions.py b/httpx/_exceptions.py\n--- a/httpx/_exceptions.py\n+++ b/httpx/_exceptions.py\n@@ -356,7 +356,7 @@\n raise\n \n message = str(exc)\n- raise mapped_exc(message, **kwargs) from None # type: ignore\n+ raise mapped_exc(message, **kwargs) from exc # type: ignore\n \n \n HTTPCORE_EXC_MAP = {\n", "issue": "How about attaching original traceback to exceptions raised in `map_exceptions()` ?\nCurrently we use `map_exceptions()` to covert underlying library exceptions to `httpx` exceptions, when captured an expected exception, we raise a new exception from `None`: \r\nhttps://github.com/encode/httpx/blob/655773e1c1b75895eda927d5a9d22a3b5b8f572d/httpx/_exceptions.py#L336-L359\r\n\r\nThis causes a traceback like this:\r\n```\r\nIn [30]: httpx.get('http://notexisthost')\r\nTraceback (most recent call last):\r\n File \"<ipython-input-30-7c88a36ce394>\", line 1, in <module>\r\n httpx.get('http://notexisthost')\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpx\\_api.py\", line 170, in get\r\n trust_env=trust_env,\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpx\\_api.py\", line 96, in request\r\n allow_redirects=allow_redirects,\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpx\\_client.py\", line 601, in request\r\n request, auth=auth, allow_redirects=allow_redirects, timeout=timeout,\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpx\\_client.py\", line 621, in send\r\n request, auth=auth, timeout=timeout, allow_redirects=allow_redirects,\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpx\\_client.py\", line 648, in send_handling_redirects\r\n request, auth=auth, timeout=timeout, history=history\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpx\\_client.py\", line 684, in send_handling_auth\r\n response = self.send_single_request(request, timeout)\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpx\\_client.py\", line 719, in send_single_request\r\n timeout=timeout.as_dict(),\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpcore\\_sync\\http_proxy.py\", line 99, in request\r\n method, url, headers=headers, stream=stream, timeout=timeout\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpcore\\_sync\\http_proxy.py\", line 150, in _forward_request\r\n method, url, headers=headers, stream=stream, timeout=timeout\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpcore\\_sync\\connection.py\", line 78, in request\r\n return self.connection.request(method, url, headers, stream, timeout)\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpcore\\_sync\\http11.py\", line 62, in request\r\n ) = self._receive_response(timeout)\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpcore\\_sync\\http11.py\", line 115, in _receive_response\r\n event = self._receive_event(timeout)\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpcore\\_sync\\http11.py\", line 142, in _receive_event\r\n event = self.h11_state.next_event()\r\n File \"D:\\programs\\anaconda3\\lib\\contextlib.py\", line 130, in __exit__\r\n self.gen.throw(type, value, traceback)\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpcore\\_exceptions.py\", line 12, in map_exceptions\r\n raise to_exc(exc) from None\r\nProtocolError: can't handle event type ConnectionClosed when role=SERVER and state=SEND_RESPONSE\r\n```\r\n\r\nNotice the last two frames from the traceback:\r\n```\r\nFile \"D:\\programs\\anaconda3\\lib\\contextlib.py\", line 130, in __exit__\r\n self.gen.throw(type, value, traceback)\r\n File \"D:\\programs\\anaconda3\\lib\\site-packages\\httpcore\\_exceptions.py\", line 12, in map_exceptions\r\n raise to_exc(exc) from None\r\n```\r\nI find that It's not obvious to figure out where's the root cause. If we attach the original traceback to the newly raised exception like this:\r\n```py\r\[email protected]\r\ndef map_exceptions(\r\n mapping: typing.Mapping[typing.Type[Exception], typing.Type[Exception]],\r\n **kwargs: typing.Any,\r\n) -> typing.Iterator[None]:\r\n try:\r\n yield\r\n except Exception as exc:\r\n mapped_exc = None\r\n\r\n for from_exc, to_exc in mapping.items():\r\n if not isinstance(exc, from_exc):\r\n continue\r\n # We want to map to the most specific exception we can find.\r\n # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to\r\n # `httpx.ReadTimeout`, not just `httpx.TimeoutException`.\r\n if mapped_exc is None or issubclass(to_exc, mapped_exc):\r\n mapped_exc = to_exc\r\n\r\n if mapped_exc is None:\r\n raise\r\n\r\n message = str(exc)\r\n raise mapped_exc(message, **kwargs).with_traceback(exc.__traceback__) from None # type: ignore\r\n```\r\nthe traceback becomes:\r\n```\r\nIn [4]: httpx.get('http://doestnotexist')\r\nTraceback (most recent call last):\r\n File \"<ipython-input-4-74881dccb3f2>\", line 1, in <module>\r\n httpx.get('http://doestnotexist')\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_api.py\", line 170, in get\r\n trust_env=trust_env,\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_api.py\", line 96, in request\r\n allow_redirects=allow_redirects,\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_client.py\", line 643, in request\r\n request, auth=auth, allow_redirects=allow_redirects, timeout=timeout,\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_client.py\", line 673, in send\r\n request, auth=auth, timeout=timeout, allow_redirects=allow_redirects,\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_client.py\", line 702, in _send_handling_redirects\r\n request, auth=auth, timeout=timeout, history=history\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_client.py\", line 738, in _send_handling_auth\r\n response = self._send_single_request(request, timeout)\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_client.py\", line 772, in _send_single_request\r\n timeout=timeout.as_dict(),\r\n File \"D:\\programs\\anaconda3\\lib\\contextlib.py\", line 130, in __exit__\r\n self.gen.throw(type, value, traceback)\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_exceptions.py\", line 359, in map_exceptions\r\n raise mapped_exc(message, **kwargs).with_traceback(exc.__traceback__) from None # type: ignore\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_exceptions.py\", line 342, in map_exceptions\r\n yield\r\n File \"e:\\projects\\pycharm\\httpx\\httpx\\_client.py\", line 772, in _send_single_request\r\n timeout=timeout.as_dict(),\r\n File \"E:\\projects\\pycharm\\httpcore\\httpcore\\_sync\\http_proxy.py\", line 102, in request\r\n method, url, headers=headers, stream=stream, timeout=timeout\r\n File \"E:\\projects\\pycharm\\httpcore\\httpcore\\_sync\\http_proxy.py\", line 159, in _forward_request\r\n method, url, headers=headers, stream=stream, timeout=timeout\r\n File \"E:\\projects\\pycharm\\httpcore\\httpcore\\_sync\\connection.py\", line 96, in request\r\n return self.connection.request(method, url, headers, stream, timeout)\r\n File \"E:\\projects\\pycharm\\httpcore\\httpcore\\_sync\\http11.py\", line 73, in request\r\n ) = self._receive_response(timeout)\r\n File \"E:\\projects\\pycharm\\httpcore\\httpcore\\_sync\\http11.py\", line 130, in _receive_response\r\n event = self._receive_event(timeout)\r\n File \"E:\\projects\\pycharm\\httpcore\\httpcore\\_sync\\http11.py\", line 160, in _receive_event\r\n data = self.socket.read(self.READ_NUM_BYTES, timeout)\r\n File \"E:\\projects\\pycharm\\httpcore\\httpcore\\_backends\\sync.py\", line 64, in read\r\n raise ReadError(\"Server disconnected while attempting read\")\r\nReadError: Server disconnected while attempting read\r\n```\r\nMay be it will be easier to realize the really problems?\n", "before_files": [{"content": "\"\"\"\nOur exception hierarchy:\n\n* HTTPError\n x RequestError\n + TransportError\n - TimeoutException\n \u00b7 ConnectTimeout\n \u00b7 ReadTimeout\n \u00b7 WriteTimeout\n \u00b7 PoolTimeout\n - NetworkError\n \u00b7 ConnectError\n \u00b7 ReadError\n \u00b7 WriteError\n \u00b7 CloseError\n - ProtocolError\n \u00b7 LocalProtocolError\n \u00b7 RemoteProtocolError\n - ProxyError\n - UnsupportedProtocol\n + DecodingError\n + TooManyRedirects\n + RequestBodyUnavailable\n x HTTPStatusError\n* InvalidURL\n* NotRedirectResponse\n* CookieConflict\n* StreamError\n x StreamConsumed\n x ResponseNotRead\n x RequestNotRead\n x ResponseClosed\n\"\"\"\nimport contextlib\nimport typing\n\nimport httpcore\n\nif typing.TYPE_CHECKING:\n from ._models import Request, Response # pragma: nocover\n\n\nclass HTTPError(Exception):\n \"\"\"\n Base class for `RequestError` and `HTTPStatusError`.\n\n Useful for `try...except` blocks when issuing a request,\n and then calling `.raise_for_status()`.\n\n For example:\n\n ```\n try:\n response = httpx.get(\"https://www.example.com\")\n response.raise_for_status()\n except httpx.HTTPError as exc:\n print(f\"HTTP Exception for {exc.request.url} - {exc.message}\")\n ```\n \"\"\"\n\n def __init__(self, message: str, *, request: \"Request\") -> None:\n super().__init__(message)\n self.request = request\n\n\nclass RequestError(HTTPError):\n \"\"\"\n Base class for all exceptions that may occur when issuing a `.request()`.\n \"\"\"\n\n def __init__(self, message: str, *, request: \"Request\") -> None:\n super().__init__(message, request=request)\n\n\nclass TransportError(RequestError):\n \"\"\"\n Base class for all exceptions that occur at the level of the Transport API.\n\n All of these exceptions also have an equivelent mapping in `httpcore`.\n \"\"\"\n\n\n# Timeout exceptions...\n\n\nclass TimeoutException(TransportError):\n \"\"\"\n The base class for timeout errors.\n\n An operation has timed out.\n \"\"\"\n\n\nclass ConnectTimeout(TimeoutException):\n \"\"\"\n Timed out while connecting to the host.\n \"\"\"\n\n\nclass ReadTimeout(TimeoutException):\n \"\"\"\n Timed out while receiving data from the host.\n \"\"\"\n\n\nclass WriteTimeout(TimeoutException):\n \"\"\"\n Timed out while sending data to the host.\n \"\"\"\n\n\nclass PoolTimeout(TimeoutException):\n \"\"\"\n Timed out waiting to acquire a connection from the pool.\n \"\"\"\n\n\n# Core networking exceptions...\n\n\nclass NetworkError(TransportError):\n \"\"\"\n The base class for network-related errors.\n\n An error occurred while interacting with the network.\n \"\"\"\n\n\nclass ReadError(NetworkError):\n \"\"\"\n Failed to receive data from the network.\n \"\"\"\n\n\nclass WriteError(NetworkError):\n \"\"\"\n Failed to send data through the network.\n \"\"\"\n\n\nclass ConnectError(NetworkError):\n \"\"\"\n Failed to establish a connection.\n \"\"\"\n\n\nclass CloseError(NetworkError):\n \"\"\"\n Failed to close a connection.\n \"\"\"\n\n\n# Other transport exceptions...\n\n\nclass ProxyError(TransportError):\n \"\"\"\n An error occurred while establishing a proxy connection.\n \"\"\"\n\n\nclass UnsupportedProtocol(TransportError):\n \"\"\"\n Attempted to make a request to an unsupported protocol.\n\n For example issuing a request to `ftp://www.example.com`.\n \"\"\"\n\n\nclass ProtocolError(TransportError):\n \"\"\"\n The protocol was violated.\n \"\"\"\n\n\nclass LocalProtocolError(ProtocolError):\n \"\"\"\n A protocol was violated by the client.\n\n For example if the user instantiated a `Request` instance explicitly,\n failed to include the mandatory `Host:` header, and then issued it directly\n using `client.send()`.\n \"\"\"\n\n\nclass RemoteProtocolError(ProtocolError):\n \"\"\"\n The protocol was violated by the server.\n\n For exaample, returning malformed HTTP.\n \"\"\"\n\n\n# Other request exceptions...\n\n\nclass DecodingError(RequestError):\n \"\"\"\n Decoding of the response failed, due to a malformed encoding.\n \"\"\"\n\n\nclass TooManyRedirects(RequestError):\n \"\"\"\n Too many redirects.\n \"\"\"\n\n\nclass RequestBodyUnavailable(RequestError):\n \"\"\"\n Had to send the request again, but the request body was streaming, and is\n no longer available.\n \"\"\"\n\n\n# Client errors\n\n\nclass HTTPStatusError(HTTPError):\n \"\"\"\n The response had an error HTTP status of 4xx or 5xx.\n\n May be raised when calling `response.raise_for_status()`\n \"\"\"\n\n def __init__(\n self, message: str, *, request: \"Request\", response: \"Response\"\n ) -> None:\n super().__init__(message, request=request)\n self.response = response\n\n\nclass InvalidURL(Exception):\n \"\"\"\n URL is improperly formed or cannot be parsed.\n \"\"\"\n\n def __init__(self, message: str) -> None:\n super().__init__(message)\n\n\nclass NotRedirectResponse(Exception):\n \"\"\"\n Response was not a redirect response.\n\n May be raised if `response.next()` is called without first\n properly checking `response.is_redirect`.\n \"\"\"\n\n def __init__(self, message: str) -> None:\n super().__init__(message)\n\n\nclass CookieConflict(Exception):\n \"\"\"\n Attempted to lookup a cookie by name, but multiple cookies existed.\n\n Can occur when calling `response.cookies.get(...)`.\n \"\"\"\n\n def __init__(self, message: str) -> None:\n super().__init__(message)\n\n\n# Stream exceptions...\n\n# These may occur as the result of a programming error, by accessing\n# the request/response stream in an invalid manner.\n\n\nclass StreamError(Exception):\n \"\"\"\n The base class for stream exceptions.\n\n The developer made an error in accessing the request stream in\n an invalid way.\n \"\"\"\n\n def __init__(self, message: str) -> None:\n super().__init__(message)\n\n\nclass StreamConsumed(StreamError):\n \"\"\"\n Attempted to read or stream response content, but the content has already\n been streamed.\n \"\"\"\n\n def __init__(self) -> None:\n message = (\n \"Attempted to read or stream response content, but the content has \"\n \"already been streamed.\"\n )\n super().__init__(message)\n\n\nclass ResponseNotRead(StreamError):\n \"\"\"\n Attempted to access response content, without having called `read()`\n after a streaming response.\n \"\"\"\n\n def __init__(self) -> None:\n message = (\n \"Attempted to access response content, without having called `read()` \"\n \"after a streaming response.\"\n )\n super().__init__(message)\n\n\nclass RequestNotRead(StreamError):\n \"\"\"\n Attempted to access request content, without having called `read()`.\n \"\"\"\n\n def __init__(self) -> None:\n message = \"Attempted to access request content, without having called `read()`.\"\n super().__init__(message)\n\n\nclass ResponseClosed(StreamError):\n \"\"\"\n Attempted to read or stream response content, but the request has been\n closed.\n \"\"\"\n\n def __init__(self) -> None:\n message = (\n \"Attempted to read or stream response content, but the request has \"\n \"been closed.\"\n )\n super().__init__(message)\n\n\[email protected]\ndef map_exceptions(\n mapping: typing.Mapping[typing.Type[Exception], typing.Type[Exception]],\n **kwargs: typing.Any,\n) -> typing.Iterator[None]:\n try:\n yield\n except Exception as exc:\n mapped_exc = None\n\n for from_exc, to_exc in mapping.items():\n if not isinstance(exc, from_exc):\n continue\n # We want to map to the most specific exception we can find.\n # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to\n # `httpx.ReadTimeout`, not just `httpx.TimeoutException`.\n if mapped_exc is None or issubclass(to_exc, mapped_exc):\n mapped_exc = to_exc\n\n if mapped_exc is None:\n raise\n\n message = str(exc)\n raise mapped_exc(message, **kwargs) from None # type: ignore\n\n\nHTTPCORE_EXC_MAP = {\n httpcore.TimeoutException: TimeoutException,\n httpcore.ConnectTimeout: ConnectTimeout,\n httpcore.ReadTimeout: ReadTimeout,\n httpcore.WriteTimeout: WriteTimeout,\n httpcore.PoolTimeout: PoolTimeout,\n httpcore.NetworkError: NetworkError,\n httpcore.ConnectError: ConnectError,\n httpcore.ReadError: ReadError,\n httpcore.WriteError: WriteError,\n httpcore.CloseError: CloseError,\n httpcore.ProxyError: ProxyError,\n httpcore.UnsupportedProtocol: UnsupportedProtocol,\n httpcore.ProtocolError: ProtocolError,\n httpcore.LocalProtocolError: LocalProtocolError,\n httpcore.RemoteProtocolError: RemoteProtocolError,\n}\n", "path": "httpx/_exceptions.py"}], "after_files": [{"content": "\"\"\"\nOur exception hierarchy:\n\n* HTTPError\n x RequestError\n + TransportError\n - TimeoutException\n \u00b7 ConnectTimeout\n \u00b7 ReadTimeout\n \u00b7 WriteTimeout\n \u00b7 PoolTimeout\n - NetworkError\n \u00b7 ConnectError\n \u00b7 ReadError\n \u00b7 WriteError\n \u00b7 CloseError\n - ProtocolError\n \u00b7 LocalProtocolError\n \u00b7 RemoteProtocolError\n - ProxyError\n - UnsupportedProtocol\n + DecodingError\n + TooManyRedirects\n + RequestBodyUnavailable\n x HTTPStatusError\n* InvalidURL\n* NotRedirectResponse\n* CookieConflict\n* StreamError\n x StreamConsumed\n x ResponseNotRead\n x RequestNotRead\n x ResponseClosed\n\"\"\"\nimport contextlib\nimport typing\n\nimport httpcore\n\nif typing.TYPE_CHECKING:\n from ._models import Request, Response # pragma: nocover\n\n\nclass HTTPError(Exception):\n \"\"\"\n Base class for `RequestError` and `HTTPStatusError`.\n\n Useful for `try...except` blocks when issuing a request,\n and then calling `.raise_for_status()`.\n\n For example:\n\n ```\n try:\n response = httpx.get(\"https://www.example.com\")\n response.raise_for_status()\n except httpx.HTTPError as exc:\n print(f\"HTTP Exception for {exc.request.url} - {exc.message}\")\n ```\n \"\"\"\n\n def __init__(self, message: str, *, request: \"Request\") -> None:\n super().__init__(message)\n self.request = request\n\n\nclass RequestError(HTTPError):\n \"\"\"\n Base class for all exceptions that may occur when issuing a `.request()`.\n \"\"\"\n\n def __init__(self, message: str, *, request: \"Request\") -> None:\n super().__init__(message, request=request)\n\n\nclass TransportError(RequestError):\n \"\"\"\n Base class for all exceptions that occur at the level of the Transport API.\n\n All of these exceptions also have an equivelent mapping in `httpcore`.\n \"\"\"\n\n\n# Timeout exceptions...\n\n\nclass TimeoutException(TransportError):\n \"\"\"\n The base class for timeout errors.\n\n An operation has timed out.\n \"\"\"\n\n\nclass ConnectTimeout(TimeoutException):\n \"\"\"\n Timed out while connecting to the host.\n \"\"\"\n\n\nclass ReadTimeout(TimeoutException):\n \"\"\"\n Timed out while receiving data from the host.\n \"\"\"\n\n\nclass WriteTimeout(TimeoutException):\n \"\"\"\n Timed out while sending data to the host.\n \"\"\"\n\n\nclass PoolTimeout(TimeoutException):\n \"\"\"\n Timed out waiting to acquire a connection from the pool.\n \"\"\"\n\n\n# Core networking exceptions...\n\n\nclass NetworkError(TransportError):\n \"\"\"\n The base class for network-related errors.\n\n An error occurred while interacting with the network.\n \"\"\"\n\n\nclass ReadError(NetworkError):\n \"\"\"\n Failed to receive data from the network.\n \"\"\"\n\n\nclass WriteError(NetworkError):\n \"\"\"\n Failed to send data through the network.\n \"\"\"\n\n\nclass ConnectError(NetworkError):\n \"\"\"\n Failed to establish a connection.\n \"\"\"\n\n\nclass CloseError(NetworkError):\n \"\"\"\n Failed to close a connection.\n \"\"\"\n\n\n# Other transport exceptions...\n\n\nclass ProxyError(TransportError):\n \"\"\"\n An error occurred while establishing a proxy connection.\n \"\"\"\n\n\nclass UnsupportedProtocol(TransportError):\n \"\"\"\n Attempted to make a request to an unsupported protocol.\n\n For example issuing a request to `ftp://www.example.com`.\n \"\"\"\n\n\nclass ProtocolError(TransportError):\n \"\"\"\n The protocol was violated.\n \"\"\"\n\n\nclass LocalProtocolError(ProtocolError):\n \"\"\"\n A protocol was violated by the client.\n\n For example if the user instantiated a `Request` instance explicitly,\n failed to include the mandatory `Host:` header, and then issued it directly\n using `client.send()`.\n \"\"\"\n\n\nclass RemoteProtocolError(ProtocolError):\n \"\"\"\n The protocol was violated by the server.\n\n For exaample, returning malformed HTTP.\n \"\"\"\n\n\n# Other request exceptions...\n\n\nclass DecodingError(RequestError):\n \"\"\"\n Decoding of the response failed, due to a malformed encoding.\n \"\"\"\n\n\nclass TooManyRedirects(RequestError):\n \"\"\"\n Too many redirects.\n \"\"\"\n\n\nclass RequestBodyUnavailable(RequestError):\n \"\"\"\n Had to send the request again, but the request body was streaming, and is\n no longer available.\n \"\"\"\n\n\n# Client errors\n\n\nclass HTTPStatusError(HTTPError):\n \"\"\"\n The response had an error HTTP status of 4xx or 5xx.\n\n May be raised when calling `response.raise_for_status()`\n \"\"\"\n\n def __init__(\n self, message: str, *, request: \"Request\", response: \"Response\"\n ) -> None:\n super().__init__(message, request=request)\n self.response = response\n\n\nclass InvalidURL(Exception):\n \"\"\"\n URL is improperly formed or cannot be parsed.\n \"\"\"\n\n def __init__(self, message: str) -> None:\n super().__init__(message)\n\n\nclass NotRedirectResponse(Exception):\n \"\"\"\n Response was not a redirect response.\n\n May be raised if `response.next()` is called without first\n properly checking `response.is_redirect`.\n \"\"\"\n\n def __init__(self, message: str) -> None:\n super().__init__(message)\n\n\nclass CookieConflict(Exception):\n \"\"\"\n Attempted to lookup a cookie by name, but multiple cookies existed.\n\n Can occur when calling `response.cookies.get(...)`.\n \"\"\"\n\n def __init__(self, message: str) -> None:\n super().__init__(message)\n\n\n# Stream exceptions...\n\n# These may occur as the result of a programming error, by accessing\n# the request/response stream in an invalid manner.\n\n\nclass StreamError(Exception):\n \"\"\"\n The base class for stream exceptions.\n\n The developer made an error in accessing the request stream in\n an invalid way.\n \"\"\"\n\n def __init__(self, message: str) -> None:\n super().__init__(message)\n\n\nclass StreamConsumed(StreamError):\n \"\"\"\n Attempted to read or stream response content, but the content has already\n been streamed.\n \"\"\"\n\n def __init__(self) -> None:\n message = (\n \"Attempted to read or stream response content, but the content has \"\n \"already been streamed.\"\n )\n super().__init__(message)\n\n\nclass ResponseNotRead(StreamError):\n \"\"\"\n Attempted to access response content, without having called `read()`\n after a streaming response.\n \"\"\"\n\n def __init__(self) -> None:\n message = (\n \"Attempted to access response content, without having called `read()` \"\n \"after a streaming response.\"\n )\n super().__init__(message)\n\n\nclass RequestNotRead(StreamError):\n \"\"\"\n Attempted to access request content, without having called `read()`.\n \"\"\"\n\n def __init__(self) -> None:\n message = \"Attempted to access request content, without having called `read()`.\"\n super().__init__(message)\n\n\nclass ResponseClosed(StreamError):\n \"\"\"\n Attempted to read or stream response content, but the request has been\n closed.\n \"\"\"\n\n def __init__(self) -> None:\n message = (\n \"Attempted to read or stream response content, but the request has \"\n \"been closed.\"\n )\n super().__init__(message)\n\n\[email protected]\ndef map_exceptions(\n mapping: typing.Mapping[typing.Type[Exception], typing.Type[Exception]],\n **kwargs: typing.Any,\n) -> typing.Iterator[None]:\n try:\n yield\n except Exception as exc:\n mapped_exc = None\n\n for from_exc, to_exc in mapping.items():\n if not isinstance(exc, from_exc):\n continue\n # We want to map to the most specific exception we can find.\n # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to\n # `httpx.ReadTimeout`, not just `httpx.TimeoutException`.\n if mapped_exc is None or issubclass(to_exc, mapped_exc):\n mapped_exc = to_exc\n\n if mapped_exc is None:\n raise\n\n message = str(exc)\n raise mapped_exc(message, **kwargs) from exc # type: ignore\n\n\nHTTPCORE_EXC_MAP = {\n httpcore.TimeoutException: TimeoutException,\n httpcore.ConnectTimeout: ConnectTimeout,\n httpcore.ReadTimeout: ReadTimeout,\n httpcore.WriteTimeout: WriteTimeout,\n httpcore.PoolTimeout: PoolTimeout,\n httpcore.NetworkError: NetworkError,\n httpcore.ConnectError: ConnectError,\n httpcore.ReadError: ReadError,\n httpcore.WriteError: WriteError,\n httpcore.CloseError: CloseError,\n httpcore.ProxyError: ProxyError,\n httpcore.UnsupportedProtocol: UnsupportedProtocol,\n httpcore.ProtocolError: ProtocolError,\n httpcore.LocalProtocolError: LocalProtocolError,\n httpcore.RemoteProtocolError: RemoteProtocolError,\n}\n", "path": "httpx/_exceptions.py"}]} |
gh_patches_debug_157 | rasdani/github-patches | git_diff | astronomer__astro-sdk-1401 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Doc: Cross link to API reference page from Operators page
Currently there is no way to jump to Func/Operator definition from https://astro-sdk-python.readthedocs.io/en/stable/astro/sql/operators/get_value_list.html (and other operators listed on https://astro-sdk-python.readthedocs.io/en/stable/guides/operators.html) to their definition https://astro-sdk-python.readthedocs.io/en/stable/autoapi/index.html
We should cross-link them using https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#cross-referencing-syntax
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `python-sdk/src/astro/sql/__init__.py`
Content:
```
1 from airflow.configuration import conf
2 from airflow.decorators.base import get_unique_task_id
3 from airflow.models.xcom_arg import XComArg
4
5 from astro.sql.operators.append import AppendOperator, append
6 from astro.sql.operators.cleanup import CleanupOperator, cleanup
7 from astro.sql.operators.dataframe import DataframeOperator, dataframe
8 from astro.sql.operators.drop import DropTableOperator, drop_table
9 from astro.sql.operators.export_file import ExportFileOperator, export_file
10 from astro.sql.operators.load_file import LoadFileOperator, load_file
11 from astro.sql.operators.merge import MergeOperator, merge
12 from astro.sql.operators.raw_sql import RawSQLOperator, run_raw_sql
13 from astro.sql.operators.transform import TransformOperator, transform, transform_file
14 from astro.table import Metadata, Table
15
16 __all__ = [
17 "AppendOperator",
18 "append",
19 "CleanupOperator",
20 "cleanup",
21 "DataframeOperator",
22 "dataframe",
23 "DropTableOperator",
24 "drop_table",
25 "ExportFileOperator",
26 "export_file",
27 "LoadFileOperator",
28 "load_file",
29 "MergeOperator",
30 "merge",
31 "Metadata",
32 "run_raw_sql",
33 "Table",
34 "TransformOperator",
35 "transform_file",
36 "transform",
37 ]
38
39
40 def get_value_list(sql: str, conn_id: str, **kwargs) -> XComArg:
41 """
42 Execute a sql statement and return the result.
43 By default, the response size is less than equal to value of ``max_map_length`` conf.
44 You can call a callable handler to alter the response by default it call ``fetchall`` on database result set.
45
46
47 :param sql: sql query to execute.
48 If the sql query will return huge number of row then it can overload the XCOM.
49 also, If you are using output of this method to expand a task using dynamic task map then
50 it can create lots of parallel task. So it is advisable to limit your sql query statement.
51 :param conn_id: Airflow connection id. This connection id will be used to identify the database client
52 and connect with it at runtime
53 """
54 handler = kwargs.get("handler") or (lambda result_set: result_set.fetchall())
55 max_map_length = int(conf.get(section="core", key="max_map_length"))
56 op_kwargs = {
57 "handler": handler,
58 "response_limit": max_map_length,
59 }
60 task_id = kwargs.get("task_id") or get_unique_task_id(
61 "get_value_list", dag=kwargs.get("dag"), task_group=kwargs.get("task_group")
62 )
63 kwargs.update({"task_id": task_id})
64 return RawSQLOperator(
65 sql=sql, conn_id=conn_id, op_kwargs=op_kwargs, python_callable=(lambda *args: None), **kwargs
66 ).output
67
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/python-sdk/src/astro/sql/__init__.py b/python-sdk/src/astro/sql/__init__.py
--- a/python-sdk/src/astro/sql/__init__.py
+++ b/python-sdk/src/astro/sql/__init__.py
@@ -24,6 +24,7 @@
"drop_table",
"ExportFileOperator",
"export_file",
+ "get_value_list",
"LoadFileOperator",
"load_file",
"MergeOperator",
| {"golden_diff": "diff --git a/python-sdk/src/astro/sql/__init__.py b/python-sdk/src/astro/sql/__init__.py\n--- a/python-sdk/src/astro/sql/__init__.py\n+++ b/python-sdk/src/astro/sql/__init__.py\n@@ -24,6 +24,7 @@\n \"drop_table\",\n \"ExportFileOperator\",\n \"export_file\",\n+ \"get_value_list\",\n \"LoadFileOperator\",\n \"load_file\",\n \"MergeOperator\",\n", "issue": "Doc: Cross link to API reference page from Operators page\nCurrently there is no way to jump to Func/Operator definition from https://astro-sdk-python.readthedocs.io/en/stable/astro/sql/operators/get_value_list.html (and other operators listed on https://astro-sdk-python.readthedocs.io/en/stable/guides/operators.html) to their definition https://astro-sdk-python.readthedocs.io/en/stable/autoapi/index.html\r\n\r\nWe should cross-link them using https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#cross-referencing-syntax\n", "before_files": [{"content": "from airflow.configuration import conf\nfrom airflow.decorators.base import get_unique_task_id\nfrom airflow.models.xcom_arg import XComArg\n\nfrom astro.sql.operators.append import AppendOperator, append\nfrom astro.sql.operators.cleanup import CleanupOperator, cleanup\nfrom astro.sql.operators.dataframe import DataframeOperator, dataframe\nfrom astro.sql.operators.drop import DropTableOperator, drop_table\nfrom astro.sql.operators.export_file import ExportFileOperator, export_file\nfrom astro.sql.operators.load_file import LoadFileOperator, load_file\nfrom astro.sql.operators.merge import MergeOperator, merge\nfrom astro.sql.operators.raw_sql import RawSQLOperator, run_raw_sql\nfrom astro.sql.operators.transform import TransformOperator, transform, transform_file\nfrom astro.table import Metadata, Table\n\n__all__ = [\n \"AppendOperator\",\n \"append\",\n \"CleanupOperator\",\n \"cleanup\",\n \"DataframeOperator\",\n \"dataframe\",\n \"DropTableOperator\",\n \"drop_table\",\n \"ExportFileOperator\",\n \"export_file\",\n \"LoadFileOperator\",\n \"load_file\",\n \"MergeOperator\",\n \"merge\",\n \"Metadata\",\n \"run_raw_sql\",\n \"Table\",\n \"TransformOperator\",\n \"transform_file\",\n \"transform\",\n]\n\n\ndef get_value_list(sql: str, conn_id: str, **kwargs) -> XComArg:\n \"\"\"\n Execute a sql statement and return the result.\n By default, the response size is less than equal to value of ``max_map_length`` conf.\n You can call a callable handler to alter the response by default it call ``fetchall`` on database result set.\n\n\n :param sql: sql query to execute.\n If the sql query will return huge number of row then it can overload the XCOM.\n also, If you are using output of this method to expand a task using dynamic task map then\n it can create lots of parallel task. So it is advisable to limit your sql query statement.\n :param conn_id: Airflow connection id. This connection id will be used to identify the database client\n and connect with it at runtime\n \"\"\"\n handler = kwargs.get(\"handler\") or (lambda result_set: result_set.fetchall())\n max_map_length = int(conf.get(section=\"core\", key=\"max_map_length\"))\n op_kwargs = {\n \"handler\": handler,\n \"response_limit\": max_map_length,\n }\n task_id = kwargs.get(\"task_id\") or get_unique_task_id(\n \"get_value_list\", dag=kwargs.get(\"dag\"), task_group=kwargs.get(\"task_group\")\n )\n kwargs.update({\"task_id\": task_id})\n return RawSQLOperator(\n sql=sql, conn_id=conn_id, op_kwargs=op_kwargs, python_callable=(lambda *args: None), **kwargs\n ).output\n", "path": "python-sdk/src/astro/sql/__init__.py"}], "after_files": [{"content": "from airflow.configuration import conf\nfrom airflow.decorators.base import get_unique_task_id\nfrom airflow.models.xcom_arg import XComArg\n\nfrom astro.sql.operators.append import AppendOperator, append\nfrom astro.sql.operators.cleanup import CleanupOperator, cleanup\nfrom astro.sql.operators.dataframe import DataframeOperator, dataframe\nfrom astro.sql.operators.drop import DropTableOperator, drop_table\nfrom astro.sql.operators.export_file import ExportFileOperator, export_file\nfrom astro.sql.operators.load_file import LoadFileOperator, load_file\nfrom astro.sql.operators.merge import MergeOperator, merge\nfrom astro.sql.operators.raw_sql import RawSQLOperator, run_raw_sql\nfrom astro.sql.operators.transform import TransformOperator, transform, transform_file\nfrom astro.table import Metadata, Table\n\n__all__ = [\n \"AppendOperator\",\n \"append\",\n \"CleanupOperator\",\n \"cleanup\",\n \"DataframeOperator\",\n \"dataframe\",\n \"DropTableOperator\",\n \"drop_table\",\n \"ExportFileOperator\",\n \"export_file\",\n \"get_value_list\",\n \"LoadFileOperator\",\n \"load_file\",\n \"MergeOperator\",\n \"merge\",\n \"Metadata\",\n \"run_raw_sql\",\n \"Table\",\n \"TransformOperator\",\n \"transform_file\",\n \"transform\",\n]\n\n\ndef get_value_list(sql: str, conn_id: str, **kwargs) -> XComArg:\n \"\"\"\n Execute a sql statement and return the result.\n By default, the response size is less than equal to value of ``max_map_length`` conf.\n You can call a callable handler to alter the response by default it call ``fetchall`` on database result set.\n\n\n :param sql: sql query to execute.\n If the sql query will return huge number of row then it can overload the XCOM.\n also, If you are using output of this method to expand a task using dynamic task map then\n it can create lots of parallel task. So it is advisable to limit your sql query statement.\n :param conn_id: Airflow connection id. This connection id will be used to identify the database client\n and connect with it at runtime\n \"\"\"\n handler = kwargs.get(\"handler\") or (lambda result_set: result_set.fetchall())\n max_map_length = int(conf.get(section=\"core\", key=\"max_map_length\"))\n op_kwargs = {\n \"handler\": handler,\n \"response_limit\": max_map_length,\n }\n task_id = kwargs.get(\"task_id\") or get_unique_task_id(\n \"get_value_list\", dag=kwargs.get(\"dag\"), task_group=kwargs.get(\"task_group\")\n )\n kwargs.update({\"task_id\": task_id})\n return RawSQLOperator(\n sql=sql, conn_id=conn_id, op_kwargs=op_kwargs, python_callable=(lambda *args: None), **kwargs\n ).output\n", "path": "python-sdk/src/astro/sql/__init__.py"}]} |
gh_patches_debug_158 | rasdani/github-patches | git_diff | pex-tool__pex-991 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.12
On the docket:
- [x] A PEX_EXTRA_SYS_PATH runtime variable #989
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = '2.1.11'
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = '2.1.11'
+__version__ = '2.1.12'
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = '2.1.11'\n+__version__ = '2.1.12'\n", "issue": "Release 2.1.12\nOn the docket:\r\n- [x] A PEX_EXTRA_SYS_PATH runtime variable #989 \n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '2.1.11'\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = '2.1.12'\n", "path": "pex/version.py"}]} |
gh_patches_debug_159 | rasdani/github-patches | git_diff | TOMToolkit__tom_base-196 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Missing dataclasses
Following the tom_base install instructions, I pip installed the requirements.txt and then tried
> ./manage.py migrate
which ended with the following error:
File "<frozen importlib._bootstrap_external>", line 678, in exec_module
File "<frozen importlib._bootstrap>", line 205, in _call_with_frames_removed
File "/Users/rstreet/software/tom_base/tom_alerts/urls.py", line 3, in <module>
from tom_alerts.views import BrokerQueryCreateView, BrokerQueryListView, BrokerQueryUpdateView, RunQueryView
File "/Users/rstreet/software/tom_base/tom_alerts/views.py", line 3, in <module>
from tom_alerts.alerts import get_service_class, get_service_classes
File "/Users/rstreet/software/tom_base/tom_alerts/alerts.py", line 5, in <module>
from dataclasses import dataclass
ModuleNotFoundError: No module named 'dataclasses'
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 from setuptools import setup, find_packages
2 from os import path
3
4 here = path.abspath(path.dirname(__file__))
5 with open(path.join(here, 'README.md'), encoding='utf-8') as f:
6 long_description = f.read()
7
8 setup(
9 name='tomtoolkit',
10 version='1.1.0',
11 description='The TOM Toolkit and base modules',
12 long_description=long_description,
13 long_description_content_type='text/markdown',
14 url='https://tomtoolkit.github.io',
15 author='TOM Toolkit Project',
16 author_email='[email protected]',
17 classifiers=[
18 'Development Status :: 3 - Alpha',
19 'Intended Audience :: Science/Research',
20 'License :: OSI Approved :: BSD License',
21 'Operating System :: OS Independent',
22 'Programming Language :: Python :: 3',
23 'Programming Language :: Python :: 3.7',
24 'Topic :: Scientific/Engineering :: Astronomy',
25 'Topic :: Scientific/Engineering :: Physics'
26 ],
27 keywords=['tomtoolkit', 'astronomy', 'astrophysics', 'cosmology', 'science', 'fits', 'observatory'],
28 packages=find_packages(),
29 install_requires=[
30 'django',
31 'django-bootstrap4',
32 'django-extensions',
33 'django-filter',
34 'django-contrib-comments',
35 'django-gravatar2',
36 'django-crispy-forms',
37 'django-guardian',
38 'numpy',
39 'python-dateutil',
40 'requests',
41 'astroquery',
42 'astropy',
43 'astroplan',
44 'plotly',
45 'matplotlib',
46 'pillow',
47 'fits2image',
48 'specutils',
49 ],
50 extras_require={
51 'test': ['factory_boy']
52 },
53 include_package_data=True,
54 )
55
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -46,6 +46,7 @@
'pillow',
'fits2image',
'specutils',
+ "dataclasses; python_version < '3.7'",
],
extras_require={
'test': ['factory_boy']
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -46,6 +46,7 @@\n 'pillow',\n 'fits2image',\n 'specutils',\n+ \"dataclasses; python_version < '3.7'\",\n ],\n extras_require={\n 'test': ['factory_boy']\n", "issue": "Missing dataclasses\nFollowing the tom_base install instructions, I pip installed the requirements.txt and then tried \r\n> ./manage.py migrate\r\n\r\nwhich ended with the following error:\r\n File \"<frozen importlib._bootstrap_external>\", line 678, in exec_module\r\n File \"<frozen importlib._bootstrap>\", line 205, in _call_with_frames_removed\r\n File \"/Users/rstreet/software/tom_base/tom_alerts/urls.py\", line 3, in <module>\r\n from tom_alerts.views import BrokerQueryCreateView, BrokerQueryListView, BrokerQueryUpdateView, RunQueryView\r\n File \"/Users/rstreet/software/tom_base/tom_alerts/views.py\", line 3, in <module>\r\n from tom_alerts.alerts import get_service_class, get_service_classes\r\n File \"/Users/rstreet/software/tom_base/tom_alerts/alerts.py\", line 5, in <module>\r\n from dataclasses import dataclass\r\nModuleNotFoundError: No module named 'dataclasses'\r\n\n", "before_files": [{"content": "from setuptools import setup, find_packages\nfrom os import path\n\nhere = path.abspath(path.dirname(__file__))\nwith open(path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\nsetup(\n name='tomtoolkit',\n version='1.1.0',\n description='The TOM Toolkit and base modules',\n long_description=long_description,\n long_description_content_type='text/markdown',\n url='https://tomtoolkit.github.io',\n author='TOM Toolkit Project',\n author_email='[email protected]',\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering :: Astronomy',\n 'Topic :: Scientific/Engineering :: Physics'\n ],\n keywords=['tomtoolkit', 'astronomy', 'astrophysics', 'cosmology', 'science', 'fits', 'observatory'],\n packages=find_packages(),\n install_requires=[\n 'django',\n 'django-bootstrap4',\n 'django-extensions',\n 'django-filter',\n 'django-contrib-comments',\n 'django-gravatar2',\n 'django-crispy-forms',\n 'django-guardian',\n 'numpy',\n 'python-dateutil',\n 'requests',\n 'astroquery',\n 'astropy',\n 'astroplan',\n 'plotly',\n 'matplotlib',\n 'pillow',\n 'fits2image',\n 'specutils',\n ],\n extras_require={\n 'test': ['factory_boy']\n },\n include_package_data=True,\n)\n", "path": "setup.py"}], "after_files": [{"content": "from setuptools import setup, find_packages\nfrom os import path\n\nhere = path.abspath(path.dirname(__file__))\nwith open(path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\nsetup(\n name='tomtoolkit',\n version='1.1.0',\n description='The TOM Toolkit and base modules',\n long_description=long_description,\n long_description_content_type='text/markdown',\n url='https://tomtoolkit.github.io',\n author='TOM Toolkit Project',\n author_email='[email protected]',\n classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: BSD License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.7',\n 'Topic :: Scientific/Engineering :: Astronomy',\n 'Topic :: Scientific/Engineering :: Physics'\n ],\n keywords=['tomtoolkit', 'astronomy', 'astrophysics', 'cosmology', 'science', 'fits', 'observatory'],\n packages=find_packages(),\n install_requires=[\n 'django',\n 'django-bootstrap4',\n 'django-extensions',\n 'django-filter',\n 'django-contrib-comments',\n 'django-gravatar2',\n 'django-crispy-forms',\n 'django-guardian',\n 'numpy',\n 'python-dateutil',\n 'requests',\n 'astroquery',\n 'astropy',\n 'astroplan',\n 'plotly',\n 'matplotlib',\n 'pillow',\n 'fits2image',\n 'specutils',\n \"dataclasses; python_version < '3.7'\",\n ],\n extras_require={\n 'test': ['factory_boy']\n },\n include_package_data=True,\n)\n", "path": "setup.py"}]} |
gh_patches_debug_160 | rasdani/github-patches | git_diff | pex-tool__pex-1590 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.64
On the docket:
+ [x] Pex does not support mac universal2 wheels #1587
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.63"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.63"
+__version__ = "2.1.64"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.63\"\n+__version__ = \"2.1.64\"\n", "issue": "Release 2.1.64\nOn the docket:\r\n+ [x] Pex does not support mac universal2 wheels #1587 \r\n\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.63\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.64\"\n", "path": "pex/version.py"}]} |
gh_patches_debug_161 | rasdani/github-patches | git_diff | pex-tool__pex-1692 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.74
On the docket:
+ [x] Add support for locking VCS requirements. (#1687)
+ [x] Fix `--lock` for multiplatform via sdists. (#1689)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.73"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.73"
+__version__ = "2.1.74"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.73\"\n+__version__ = \"2.1.74\"\n", "issue": "Release 2.1.74\nOn the docket:\r\n+ [x] Add support for locking VCS requirements. (#1687)\r\n+ [x] Fix `--lock` for multiplatform via sdists. (#1689)\r\n\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.73\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.74\"\n", "path": "pex/version.py"}]} |
gh_patches_debug_162 | rasdani/github-patches | git_diff | liqd__a4-meinberlin-382 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Order of poll answer choices mixed up after saving
The order of poll answer choices is mixed up after saving. Restoring original order is not possible:

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `apps/polls/models.py`
Content:
```
1 from django.contrib.contenttypes.fields import GenericRelation
2 from django.db import models
3
4 from adhocracy4.comments import models as comment_models
5 from adhocracy4.models.base import UserGeneratedContentModel
6 from adhocracy4.modules import models as module_models
7
8 from . import validators
9
10
11 class Poll(module_models.Item):
12 comments = GenericRelation(comment_models.Comment,
13 related_query_name='poll',
14 object_id_field='object_pk')
15
16
17 class Question(models.Model):
18 label = models.CharField(max_length=255)
19 weight = models.SmallIntegerField()
20
21 poll = models.ForeignKey(
22 'Poll',
23 on_delete=models.CASCADE,
24 related_name='questions'
25 )
26
27 def user_choices_list(self, user):
28 if not user.is_authenticated():
29 return []
30
31 return self.choices\
32 .filter(votes__creator=user)\
33 .values_list('id', flat=True)
34
35 def __str__(self):
36 return self.label
37
38 class Meta:
39 ordering = ['weight']
40
41
42 class ChoiceQuerySet(models.QuerySet):
43
44 def annotate_vote_count(self):
45 return self.annotate(
46 vote_count=models.Count(
47 'votes'
48 )
49 )
50
51
52 class Choice(models.Model):
53 label = models.CharField(max_length=255)
54
55 question = models.ForeignKey(
56 'Question',
57 on_delete=models.CASCADE,
58 related_name='choices',
59 )
60
61 objects = ChoiceQuerySet.as_manager()
62
63 def __str__(self):
64 return '%s @%s' % (self.label, self.question)
65
66
67 class Vote(UserGeneratedContentModel):
68 choice = models.ForeignKey(
69 'Choice',
70 on_delete=models.CASCADE,
71 related_name='votes'
72 )
73
74 def validate_unique(self, exclude=None):
75 super(Vote, self).validate_unique(exclude)
76 validators.single_vote_per_user(self.creator,
77 self.choice.question,
78 self.pk)
79
80 # Make Vote instances behave like items for rule checking
81 @property
82 def module(self):
83 self.choice.question.poll.module
84
85 @property
86 def project(self):
87 return self.module.project
88
89 def __str__(self):
90 return '%s: %s' % (self.creator, self.choice)
91
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/apps/polls/models.py b/apps/polls/models.py
--- a/apps/polls/models.py
+++ b/apps/polls/models.py
@@ -60,6 +60,9 @@
objects = ChoiceQuerySet.as_manager()
+ class Meta:
+ ordering = ['id']
+
def __str__(self):
return '%s @%s' % (self.label, self.question)
| {"golden_diff": "diff --git a/apps/polls/models.py b/apps/polls/models.py\n--- a/apps/polls/models.py\n+++ b/apps/polls/models.py\n@@ -60,6 +60,9 @@\n \n objects = ChoiceQuerySet.as_manager()\n \n+ class Meta:\n+ ordering = ['id']\n+\n def __str__(self):\n return '%s @%s' % (self.label, self.question)\n", "issue": "Order of poll answer choices mixed up after saving\nThe order of poll answer choices is mixed up after saving. Restoring original order is not possible:\r\n\r\n\n", "before_files": [{"content": "from django.contrib.contenttypes.fields import GenericRelation\nfrom django.db import models\n\nfrom adhocracy4.comments import models as comment_models\nfrom adhocracy4.models.base import UserGeneratedContentModel\nfrom adhocracy4.modules import models as module_models\n\nfrom . import validators\n\n\nclass Poll(module_models.Item):\n comments = GenericRelation(comment_models.Comment,\n related_query_name='poll',\n object_id_field='object_pk')\n\n\nclass Question(models.Model):\n label = models.CharField(max_length=255)\n weight = models.SmallIntegerField()\n\n poll = models.ForeignKey(\n 'Poll',\n on_delete=models.CASCADE,\n related_name='questions'\n )\n\n def user_choices_list(self, user):\n if not user.is_authenticated():\n return []\n\n return self.choices\\\n .filter(votes__creator=user)\\\n .values_list('id', flat=True)\n\n def __str__(self):\n return self.label\n\n class Meta:\n ordering = ['weight']\n\n\nclass ChoiceQuerySet(models.QuerySet):\n\n def annotate_vote_count(self):\n return self.annotate(\n vote_count=models.Count(\n 'votes'\n )\n )\n\n\nclass Choice(models.Model):\n label = models.CharField(max_length=255)\n\n question = models.ForeignKey(\n 'Question',\n on_delete=models.CASCADE,\n related_name='choices',\n )\n\n objects = ChoiceQuerySet.as_manager()\n\n def __str__(self):\n return '%s @%s' % (self.label, self.question)\n\n\nclass Vote(UserGeneratedContentModel):\n choice = models.ForeignKey(\n 'Choice',\n on_delete=models.CASCADE,\n related_name='votes'\n )\n\n def validate_unique(self, exclude=None):\n super(Vote, self).validate_unique(exclude)\n validators.single_vote_per_user(self.creator,\n self.choice.question,\n self.pk)\n\n # Make Vote instances behave like items for rule checking\n @property\n def module(self):\n self.choice.question.poll.module\n\n @property\n def project(self):\n return self.module.project\n\n def __str__(self):\n return '%s: %s' % (self.creator, self.choice)\n", "path": "apps/polls/models.py"}], "after_files": [{"content": "from django.contrib.contenttypes.fields import GenericRelation\nfrom django.db import models\n\nfrom adhocracy4.comments import models as comment_models\nfrom adhocracy4.models.base import UserGeneratedContentModel\nfrom adhocracy4.modules import models as module_models\n\nfrom . import validators\n\n\nclass Poll(module_models.Item):\n comments = GenericRelation(comment_models.Comment,\n related_query_name='poll',\n object_id_field='object_pk')\n\n\nclass Question(models.Model):\n label = models.CharField(max_length=255)\n weight = models.SmallIntegerField()\n\n poll = models.ForeignKey(\n 'Poll',\n on_delete=models.CASCADE,\n related_name='questions'\n )\n\n def user_choices_list(self, user):\n if not user.is_authenticated():\n return []\n\n return self.choices\\\n .filter(votes__creator=user)\\\n .values_list('id', flat=True)\n\n def __str__(self):\n return self.label\n\n class Meta:\n ordering = ['weight']\n\n\nclass ChoiceQuerySet(models.QuerySet):\n\n def annotate_vote_count(self):\n return self.annotate(\n vote_count=models.Count(\n 'votes'\n )\n )\n\n\nclass Choice(models.Model):\n label = models.CharField(max_length=255)\n\n question = models.ForeignKey(\n 'Question',\n on_delete=models.CASCADE,\n related_name='choices',\n )\n\n objects = ChoiceQuerySet.as_manager()\n\n class Meta:\n ordering = ['id']\n\n def __str__(self):\n return '%s @%s' % (self.label, self.question)\n\n\nclass Vote(UserGeneratedContentModel):\n choice = models.ForeignKey(\n 'Choice',\n on_delete=models.CASCADE,\n related_name='votes'\n )\n\n def validate_unique(self, exclude=None):\n super(Vote, self).validate_unique(exclude)\n validators.single_vote_per_user(self.creator,\n self.choice.question,\n self.pk)\n\n # Make Vote instances behave like items for rule checking\n @property\n def module(self):\n self.choice.question.poll.module\n\n @property\n def project(self):\n return self.module.project\n\n def __str__(self):\n return '%s: %s' % (self.creator, self.choice)\n", "path": "apps/polls/models.py"}]} |
gh_patches_debug_163 | rasdani/github-patches | git_diff | pex-tool__pex-1112 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.21
On the docket:
+ [x] "FileNotFoundError: [Errno 2] No such file or directory" in pex #1098
+ [x] Unclosed resource warning for `/dev/null` in PEX teardown. #1101
+ [x] Remove `--sources-directory` / `--resources-directory` distinction. #1100
+ [x] Invalid requirement, parse error at "'python_v' #940
+ [x] Pex skipping pandas activation #1017
+ [x] Changing vendored versions does not fully clean up previous version #1096
+ [x] Pex discards the current interpreter's PATH entry when it is a directory entry. #1109
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.20"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.20"
+__version__ = "2.1.21"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.20\"\n+__version__ = \"2.1.21\"\n", "issue": "Release 2.1.21\nOn the docket:\r\n+ [x] \"FileNotFoundError: [Errno 2] No such file or directory\" in pex #1098\r\n+ [x] Unclosed resource warning for `/dev/null` in PEX teardown. #1101\r\n+ [x] Remove `--sources-directory` / `--resources-directory` distinction. #1100\r\n+ [x] Invalid requirement, parse error at \"'python_v' #940\r\n+ [x] Pex skipping pandas activation #1017\r\n+ [x] Changing vendored versions does not fully clean up previous version #1096\r\n+ [x] Pex discards the current interpreter's PATH entry when it is a directory entry. #1109\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.20\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.21\"\n", "path": "pex/version.py"}]} |
gh_patches_debug_164 | rasdani/github-patches | git_diff | pex-tool__pex-1720 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.79
On the docket:
+ [x] The --lock resolver only includes extras from the 1st encounter of a required project in its graph walk. #1717
+ [x] Support canonicalizing absolute paths in locks. (#1716)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.78"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.78"
+__version__ = "2.1.79"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.78\"\n+__version__ = \"2.1.79\"\n", "issue": "Release 2.1.79\nOn the docket:\r\n+ [x] The --lock resolver only includes extras from the 1st encounter of a required project in its graph walk. #1717 \r\n+ [x] Support canonicalizing absolute paths in locks. (#1716)\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.78\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.79\"\n", "path": "pex/version.py"}]} |
gh_patches_debug_165 | rasdani/github-patches | git_diff | pex-tool__pex-1516 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.55
On the docket:
+ [x] Add official support for Python 3.10 (#1512)
+ [x] Always register global options. (#1511)
+ [x] Fix RTD generation by pinning docutils low. (#1509)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.54"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.54"
+__version__ = "2.1.55"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.54\"\n+__version__ = \"2.1.55\"\n", "issue": "Release 2.1.55\nOn the docket:\r\n+ [x] Add official support for Python 3.10 (#1512)\r\n+ [x] Always register global options. (#1511)\r\n+ [x] Fix RTD generation by pinning docutils low. (#1509)\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.54\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.55\"\n", "path": "pex/version.py"}]} |
gh_patches_debug_166 | rasdani/github-patches | git_diff | pex-tool__pex-1725 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.80
On the docket:
+ [x] Support booting via `/bin/sh` with `--sh-boot`. (#1721)
+ [x] Fix more pathologic lock creation slowness. (#1723)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.79"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.79"
+__version__ = "2.1.80"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.79\"\n+__version__ = \"2.1.80\"\n", "issue": "Release 2.1.80\nOn the docket:\r\n+ [x] Support booting via `/bin/sh` with `--sh-boot`. (#1721)\r\n+ [x] Fix more pathologic lock creation slowness. (#1723)\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.79\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.80\"\n", "path": "pex/version.py"}]} |
gh_patches_debug_167 | rasdani/github-patches | git_diff | pex-tool__pex-1442 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.48
On the docket:
+ [x] Remove zipapp execution mode & introduce --layout. #1438
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.47"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.47"
+__version__ = "2.1.48"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.47\"\n+__version__ = \"2.1.48\"\n", "issue": "Release 2.1.48\nOn the docket:\r\n+ [x] Remove zipapp execution mode & introduce --layout. #1438 \n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.47\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.48\"\n", "path": "pex/version.py"}]} |
gh_patches_debug_168 | rasdani/github-patches | git_diff | searxng__searxng-2862 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug: bilibili engine is broken
<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF SearXNG -->
Something has changed, and now some fixes are needed to use the api successfully.
**Version of SearXNG, commit number if you are using on master branch and stipulate if you forked SearXNG**
Repository: https://github.com/searxng/searxng
Branch: master
Version: 2023.9.27+1a66d7467+dirty
<!-- If you are running on master branch using git execute this command
in order to fetch the latest commit ID:
```
git log -1
```
If you are using searxng-docker then look at the bottom of the SearXNG page
and check for the version after "Powered by SearXNG"
Please also stipulate if you are using a forked version of SearXNG and
include a link to the fork source code.
-->
**How did you install SearXNG?**
make run
<!-- Did you install SearXNG using the official wiki or using searxng-docker
or manually by executing the searx/webapp.py file? -->
**What happened?**
<!-- A clear and concise description of what the bug is. -->
**How To Reproduce**
<!-- How can we reproduce this issue? (as minimally and as precisely as possible) -->
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Screenshots & Logs**
<!-- If applicable, add screenshots, logs to help explain your problem. -->
**Additional context**
<!-- Add any other context about the problem here. -->
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `searx/engines/bilibili.py`
Content:
```
1 # SPDX-License-Identifier: AGPL-3.0-or-later
2 # lint: pylint
3 """Bilibili is a Chinese video sharing website.
4
5 .. _Bilibili: https://www.bilibili.com
6 """
7
8 import random
9 import string
10 from urllib.parse import urlencode
11 from datetime import datetime, timedelta
12
13 # Engine metadata
14 about = {
15 "website": "https://www.bilibili.com",
16 "wikidata_id": "Q3077586",
17 "official_api_documentation": None,
18 "use_official_api": False,
19 "require_api_key": False,
20 "results": "JSON",
21 }
22
23 # Engine configuration
24 paging = True
25 results_per_page = 20
26 categories = ["videos"]
27
28 # Search URL
29 base_url = "https://api.bilibili.com/x/web-interface/wbi/search/type"
30
31 cookie = {
32 "innersign": "0",
33 "buvid3": "".join(random.choice(string.hexdigits) for _ in range(16)) + "infoc",
34 "i-wanna-go-back": "-1",
35 "b_ut": "7",
36 "FEED_LIVE_VERSION": "V8",
37 "header_theme_version": "undefined",
38 "home_feed_column": "4",
39 }
40
41
42 def request(query, params):
43 query_params = {
44 "__refresh__": "true",
45 "page": params["pageno"],
46 "page_size": results_per_page,
47 "single_column": "0",
48 "keyword": query,
49 "search_type": "video",
50 }
51
52 params["url"] = f"{base_url}?{urlencode(query_params)}"
53 params["cookies"] = cookie
54
55 return params
56
57
58 # Format the video duration
59 def format_duration(duration):
60 minutes, seconds = map(int, duration.split(":"))
61 total_seconds = minutes * 60 + seconds
62
63 formatted_duration = str(timedelta(seconds=total_seconds))[2:] if 0 <= total_seconds < 3600 else ""
64
65 return formatted_duration
66
67
68 def response(resp):
69 search_res = resp.json()
70
71 results = []
72
73 for item in search_res.get("data", {}).get("result", []):
74 title = item["title"]
75 url = item["arcurl"]
76 thumbnail = item["pic"]
77 description = item["description"]
78 author = item["author"]
79 video_id = item["aid"]
80 unix_date = item["pubdate"]
81
82 formatted_date = datetime.utcfromtimestamp(unix_date)
83 formatted_duration = format_duration(item["duration"])
84 iframe_url = f"https://player.bilibili.com/player.html?aid={video_id}&high_quality=1&autoplay=false&danmaku=0"
85
86 results.append(
87 {
88 "title": title,
89 "url": url,
90 "content": description,
91 "author": author,
92 "publishedDate": formatted_date,
93 "length": formatted_duration,
94 "thumbnail": thumbnail,
95 "iframe_src": iframe_url,
96 "template": "videos.html",
97 }
98 )
99
100 return results
101
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/searx/engines/bilibili.py b/searx/engines/bilibili.py
--- a/searx/engines/bilibili.py
+++ b/searx/engines/bilibili.py
@@ -26,7 +26,7 @@
categories = ["videos"]
# Search URL
-base_url = "https://api.bilibili.com/x/web-interface/wbi/search/type"
+base_url = "https://api.bilibili.com/x/web-interface/search/type"
cookie = {
"innersign": "0",
| {"golden_diff": "diff --git a/searx/engines/bilibili.py b/searx/engines/bilibili.py\n--- a/searx/engines/bilibili.py\n+++ b/searx/engines/bilibili.py\n@@ -26,7 +26,7 @@\n categories = [\"videos\"]\n \n # Search URL\n-base_url = \"https://api.bilibili.com/x/web-interface/wbi/search/type\"\n+base_url = \"https://api.bilibili.com/x/web-interface/search/type\"\n \n cookie = {\n \"innersign\": \"0\",\n", "issue": "Bug: bilibili engine is broken\n<!-- PLEASE FILL THESE FIELDS, IT REALLY HELPS THE MAINTAINERS OF SearXNG -->\r\n\r\nSomething has changed, and now some fixes are needed to use the api successfully.\r\n\r\n**Version of SearXNG, commit number if you are using on master branch and stipulate if you forked SearXNG**\r\nRepository: https://github.com/searxng/searxng\r\nBranch: master\r\nVersion: 2023.9.27+1a66d7467+dirty\r\n<!-- If you are running on master branch using git execute this command\r\nin order to fetch the latest commit ID:\r\n```\r\ngit log -1\r\n``` \r\nIf you are using searxng-docker then look at the bottom of the SearXNG page\r\nand check for the version after \"Powered by SearXNG\"\r\n\r\nPlease also stipulate if you are using a forked version of SearXNG and\r\ninclude a link to the fork source code.\r\n-->\r\n**How did you install SearXNG?**\r\nmake run\r\n<!-- Did you install SearXNG using the official wiki or using searxng-docker\r\nor manually by executing the searx/webapp.py file? -->\r\n**What happened?**\r\n<!-- A clear and concise description of what the bug is. -->\r\n\r\n**How To Reproduce**\r\n<!-- How can we reproduce this issue? (as minimally and as precisely as possible) -->\r\n\r\n**Expected behavior**\r\n<!-- A clear and concise description of what you expected to happen. -->\r\n\r\n**Screenshots & Logs**\r\n<!-- If applicable, add screenshots, logs to help explain your problem. -->\r\n\r\n**Additional context**\r\n<!-- Add any other context about the problem here. -->\r\n\n", "before_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n# lint: pylint\n\"\"\"Bilibili is a Chinese video sharing website.\n\n.. _Bilibili: https://www.bilibili.com\n\"\"\"\n\nimport random\nimport string\nfrom urllib.parse import urlencode\nfrom datetime import datetime, timedelta\n\n# Engine metadata\nabout = {\n \"website\": \"https://www.bilibili.com\",\n \"wikidata_id\": \"Q3077586\",\n \"official_api_documentation\": None,\n \"use_official_api\": False,\n \"require_api_key\": False,\n \"results\": \"JSON\",\n}\n\n# Engine configuration\npaging = True\nresults_per_page = 20\ncategories = [\"videos\"]\n\n# Search URL\nbase_url = \"https://api.bilibili.com/x/web-interface/wbi/search/type\"\n\ncookie = {\n \"innersign\": \"0\",\n \"buvid3\": \"\".join(random.choice(string.hexdigits) for _ in range(16)) + \"infoc\",\n \"i-wanna-go-back\": \"-1\",\n \"b_ut\": \"7\",\n \"FEED_LIVE_VERSION\": \"V8\",\n \"header_theme_version\": \"undefined\",\n \"home_feed_column\": \"4\",\n}\n\n\ndef request(query, params):\n query_params = {\n \"__refresh__\": \"true\",\n \"page\": params[\"pageno\"],\n \"page_size\": results_per_page,\n \"single_column\": \"0\",\n \"keyword\": query,\n \"search_type\": \"video\",\n }\n\n params[\"url\"] = f\"{base_url}?{urlencode(query_params)}\"\n params[\"cookies\"] = cookie\n\n return params\n\n\n# Format the video duration\ndef format_duration(duration):\n minutes, seconds = map(int, duration.split(\":\"))\n total_seconds = minutes * 60 + seconds\n\n formatted_duration = str(timedelta(seconds=total_seconds))[2:] if 0 <= total_seconds < 3600 else \"\"\n\n return formatted_duration\n\n\ndef response(resp):\n search_res = resp.json()\n\n results = []\n\n for item in search_res.get(\"data\", {}).get(\"result\", []):\n title = item[\"title\"]\n url = item[\"arcurl\"]\n thumbnail = item[\"pic\"]\n description = item[\"description\"]\n author = item[\"author\"]\n video_id = item[\"aid\"]\n unix_date = item[\"pubdate\"]\n\n formatted_date = datetime.utcfromtimestamp(unix_date)\n formatted_duration = format_duration(item[\"duration\"])\n iframe_url = f\"https://player.bilibili.com/player.html?aid={video_id}&high_quality=1&autoplay=false&danmaku=0\"\n\n results.append(\n {\n \"title\": title,\n \"url\": url,\n \"content\": description,\n \"author\": author,\n \"publishedDate\": formatted_date,\n \"length\": formatted_duration,\n \"thumbnail\": thumbnail,\n \"iframe_src\": iframe_url,\n \"template\": \"videos.html\",\n }\n )\n\n return results\n", "path": "searx/engines/bilibili.py"}], "after_files": [{"content": "# SPDX-License-Identifier: AGPL-3.0-or-later\n# lint: pylint\n\"\"\"Bilibili is a Chinese video sharing website.\n\n.. _Bilibili: https://www.bilibili.com\n\"\"\"\n\nimport random\nimport string\nfrom urllib.parse import urlencode\nfrom datetime import datetime, timedelta\n\n# Engine metadata\nabout = {\n \"website\": \"https://www.bilibili.com\",\n \"wikidata_id\": \"Q3077586\",\n \"official_api_documentation\": None,\n \"use_official_api\": False,\n \"require_api_key\": False,\n \"results\": \"JSON\",\n}\n\n# Engine configuration\npaging = True\nresults_per_page = 20\ncategories = [\"videos\"]\n\n# Search URL\nbase_url = \"https://api.bilibili.com/x/web-interface/search/type\"\n\ncookie = {\n \"innersign\": \"0\",\n \"buvid3\": \"\".join(random.choice(string.hexdigits) for _ in range(16)) + \"infoc\",\n \"i-wanna-go-back\": \"-1\",\n \"b_ut\": \"7\",\n \"FEED_LIVE_VERSION\": \"V8\",\n \"header_theme_version\": \"undefined\",\n \"home_feed_column\": \"4\",\n}\n\n\ndef request(query, params):\n query_params = {\n \"__refresh__\": \"true\",\n \"page\": params[\"pageno\"],\n \"page_size\": results_per_page,\n \"single_column\": \"0\",\n \"keyword\": query,\n \"search_type\": \"video\",\n }\n\n params[\"url\"] = f\"{base_url}?{urlencode(query_params)}\"\n params[\"cookies\"] = cookie\n\n return params\n\n\n# Format the video duration\ndef format_duration(duration):\n minutes, seconds = map(int, duration.split(\":\"))\n total_seconds = minutes * 60 + seconds\n\n formatted_duration = str(timedelta(seconds=total_seconds))[2:] if 0 <= total_seconds < 3600 else \"\"\n\n return formatted_duration\n\n\ndef response(resp):\n search_res = resp.json()\n\n results = []\n\n for item in search_res.get(\"data\", {}).get(\"result\", []):\n title = item[\"title\"]\n url = item[\"arcurl\"]\n thumbnail = item[\"pic\"]\n description = item[\"description\"]\n author = item[\"author\"]\n video_id = item[\"aid\"]\n unix_date = item[\"pubdate\"]\n\n formatted_date = datetime.utcfromtimestamp(unix_date)\n formatted_duration = format_duration(item[\"duration\"])\n iframe_url = f\"https://player.bilibili.com/player.html?aid={video_id}&high_quality=1&autoplay=false&danmaku=0\"\n\n results.append(\n {\n \"title\": title,\n \"url\": url,\n \"content\": description,\n \"author\": author,\n \"publishedDate\": formatted_date,\n \"length\": formatted_duration,\n \"thumbnail\": thumbnail,\n \"iframe_src\": iframe_url,\n \"template\": \"videos.html\",\n }\n )\n\n return results\n", "path": "searx/engines/bilibili.py"}]} |
gh_patches_debug_169 | rasdani/github-patches | git_diff | DDMAL__CantusDB-1077 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Admin area, change chant page: "title" field should be hidden
The "title" field is only used for sequences and never for chants, so this field should be hidden from the Chant Change page in the Admin area.
Debra sent us a message asking us what this field was for, so since this is a simple fix, we should hide this field before it causes further confusion.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django/cantusdb_project/main_app/admin.py`
Content:
```
1 from django.contrib import admin
2 from main_app.models import *
3 from main_app.forms import (
4 AdminCenturyForm,
5 AdminChantForm,
6 AdminFeastForm,
7 AdminGenreForm,
8 AdminNotationForm,
9 AdminOfficeForm,
10 AdminProvenanceForm,
11 AdminRismSiglumForm,
12 AdminSegmentForm,
13 AdminSequenceForm,
14 AdminSourceForm,
15 )
16
17 # these fields should not be editable by all classes
18 EXCLUDE = (
19 "created_by",
20 "last_updated_by",
21 "json_info",
22 )
23
24
25 class BaseModelAdmin(admin.ModelAdmin):
26 exclude = EXCLUDE
27
28 # if an object is created in the admin interface, assign the user to the created_by field
29 # else if an object is updated in the admin interface, assign the user to the last_updated_by field
30 def save_model(self, request, obj, form, change):
31 if change:
32 obj.last_updated_by = request.user
33 else:
34 obj.created_by = request.user
35 super().save_model(request, obj, form, change)
36
37
38 class CenturyAdmin(BaseModelAdmin):
39 search_fields = ("name",)
40 form = AdminCenturyForm
41
42
43 class ChantAdmin(BaseModelAdmin):
44 @admin.display(description="Source Siglum")
45 def get_source_siglum(self, obj):
46 if obj.source:
47 return obj.source.siglum
48
49 list_display = (
50 "incipit",
51 "get_source_siglum",
52 "genre",
53 )
54 search_fields = (
55 "title",
56 "incipit",
57 "cantus_id",
58 "id",
59 )
60
61 readonly_fields = (
62 "date_created",
63 "date_updated",
64 )
65
66 list_filter = (
67 "genre",
68 "office",
69 )
70 exclude = EXCLUDE + (
71 "col1",
72 "col2",
73 "col3",
74 "next_chant",
75 "s_sequence",
76 "is_last_chant_in_feast",
77 "visible_status",
78 "date",
79 "volpiano_notes",
80 "volpiano_intervals",
81 )
82 form = AdminChantForm
83 raw_id_fields = (
84 "source",
85 "feast",
86 )
87 ordering = ("source__siglum",)
88
89
90 class FeastAdmin(BaseModelAdmin):
91 search_fields = (
92 "name",
93 "feast_code",
94 )
95 list_display = (
96 "name",
97 "month",
98 "day",
99 "feast_code",
100 )
101 form = AdminFeastForm
102
103
104 class GenreAdmin(BaseModelAdmin):
105 search_fields = ("name",)
106 form = AdminGenreForm
107
108
109 class NotationAdmin(BaseModelAdmin):
110 search_fields = ("name",)
111 form = AdminNotationForm
112
113
114 class OfficeAdmin(BaseModelAdmin):
115 search_fields = ("name",)
116 form = AdminOfficeForm
117
118
119 class ProvenanceAdmin(BaseModelAdmin):
120 search_fields = ("name",)
121 form = AdminProvenanceForm
122
123
124 class RismSiglumAdmin(BaseModelAdmin):
125 search_fields = ("name",)
126 form = AdminRismSiglumForm
127
128
129 class SegmentAdmin(BaseModelAdmin):
130 search_fields = ("name",)
131 form = AdminSegmentForm
132
133
134 class SequenceAdmin(BaseModelAdmin):
135 @admin.display(description="Source Siglum")
136 def get_source_siglum(self, obj):
137 if obj.source:
138 return obj.source.siglum
139
140 search_fields = (
141 "title",
142 "incipit",
143 "cantus_id",
144 "id",
145 )
146 exclude = EXCLUDE + (
147 "c_sequence",
148 "next_chant",
149 "is_last_chant_in_feast",
150 "visible_status",
151 )
152 list_display = ("incipit", "get_source_siglum", "genre")
153 list_filter = (
154 "genre",
155 "office",
156 )
157 raw_id_fields = (
158 "source",
159 "feast",
160 )
161 ordering = ("source__siglum",)
162 form = AdminSequenceForm
163
164
165 class SourceAdmin(BaseModelAdmin):
166 # These search fields are also available on the user-source inline relationship in the user admin page
167 search_fields = (
168 "siglum",
169 "title",
170 "id",
171 )
172 readonly_fields = (
173 "number_of_chants",
174 "number_of_melodies",
175 "date_created",
176 "date_updated",
177 )
178 # from the Django docs:
179 # Adding a ManyToManyField to this list will instead use a nifty unobtrusive JavaScript “filter” interface
180 # that allows searching within the options. The unselected and selected options appear in two boxes side by side.
181 filter_horizontal = (
182 "century",
183 "notation",
184 "current_editors",
185 "inventoried_by",
186 "full_text_entered_by",
187 "melodies_entered_by",
188 "proofreaders",
189 "other_editors",
190 )
191
192 list_display = (
193 "title",
194 "siglum",
195 "id",
196 )
197
198 list_filter = (
199 "full_source",
200 "segment",
201 "source_status",
202 "published",
203 "century",
204 )
205
206 ordering = ("siglum",)
207
208 form = AdminSourceForm
209
210
211 admin.site.register(Century, CenturyAdmin)
212 admin.site.register(Chant, ChantAdmin)
213 admin.site.register(Feast, FeastAdmin)
214 admin.site.register(Genre, GenreAdmin)
215 admin.site.register(Notation, NotationAdmin)
216 admin.site.register(Office, OfficeAdmin)
217 admin.site.register(Provenance, ProvenanceAdmin)
218 admin.site.register(RismSiglum, RismSiglumAdmin)
219 admin.site.register(Segment, SegmentAdmin)
220 admin.site.register(Sequence, SequenceAdmin)
221 admin.site.register(Source, SourceAdmin)
222
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/django/cantusdb_project/main_app/admin.py b/django/cantusdb_project/main_app/admin.py
--- a/django/cantusdb_project/main_app/admin.py
+++ b/django/cantusdb_project/main_app/admin.py
@@ -78,6 +78,7 @@
"date",
"volpiano_notes",
"volpiano_intervals",
+ "title",
)
form = AdminChantForm
raw_id_fields = (
| {"golden_diff": "diff --git a/django/cantusdb_project/main_app/admin.py b/django/cantusdb_project/main_app/admin.py\n--- a/django/cantusdb_project/main_app/admin.py\n+++ b/django/cantusdb_project/main_app/admin.py\n@@ -78,6 +78,7 @@\n \"date\",\n \"volpiano_notes\",\n \"volpiano_intervals\",\n+ \"title\",\n )\n form = AdminChantForm\n raw_id_fields = (\n", "issue": "Admin area, change chant page: \"title\" field should be hidden\nThe \"title\" field is only used for sequences and never for chants, so this field should be hidden from the Chant Change page in the Admin area.\r\n\r\nDebra sent us a message asking us what this field was for, so since this is a simple fix, we should hide this field before it causes further confusion.\n", "before_files": [{"content": "from django.contrib import admin\nfrom main_app.models import *\nfrom main_app.forms import (\n AdminCenturyForm,\n AdminChantForm,\n AdminFeastForm,\n AdminGenreForm,\n AdminNotationForm,\n AdminOfficeForm,\n AdminProvenanceForm,\n AdminRismSiglumForm,\n AdminSegmentForm,\n AdminSequenceForm,\n AdminSourceForm,\n)\n\n# these fields should not be editable by all classes\nEXCLUDE = (\n \"created_by\",\n \"last_updated_by\",\n \"json_info\",\n)\n\n\nclass BaseModelAdmin(admin.ModelAdmin):\n exclude = EXCLUDE\n\n # if an object is created in the admin interface, assign the user to the created_by field\n # else if an object is updated in the admin interface, assign the user to the last_updated_by field\n def save_model(self, request, obj, form, change):\n if change:\n obj.last_updated_by = request.user\n else:\n obj.created_by = request.user\n super().save_model(request, obj, form, change)\n\n\nclass CenturyAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminCenturyForm\n\n\nclass ChantAdmin(BaseModelAdmin):\n @admin.display(description=\"Source Siglum\")\n def get_source_siglum(self, obj):\n if obj.source:\n return obj.source.siglum\n\n list_display = (\n \"incipit\",\n \"get_source_siglum\",\n \"genre\",\n )\n search_fields = (\n \"title\",\n \"incipit\",\n \"cantus_id\",\n \"id\",\n )\n\n readonly_fields = (\n \"date_created\",\n \"date_updated\",\n )\n\n list_filter = (\n \"genre\",\n \"office\",\n )\n exclude = EXCLUDE + (\n \"col1\",\n \"col2\",\n \"col3\",\n \"next_chant\",\n \"s_sequence\",\n \"is_last_chant_in_feast\",\n \"visible_status\",\n \"date\",\n \"volpiano_notes\",\n \"volpiano_intervals\",\n )\n form = AdminChantForm\n raw_id_fields = (\n \"source\",\n \"feast\",\n )\n ordering = (\"source__siglum\",)\n\n\nclass FeastAdmin(BaseModelAdmin):\n search_fields = (\n \"name\",\n \"feast_code\",\n )\n list_display = (\n \"name\",\n \"month\",\n \"day\",\n \"feast_code\",\n )\n form = AdminFeastForm\n\n\nclass GenreAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminGenreForm\n\n\nclass NotationAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminNotationForm\n\n\nclass OfficeAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminOfficeForm\n\n\nclass ProvenanceAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminProvenanceForm\n\n\nclass RismSiglumAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminRismSiglumForm\n\n\nclass SegmentAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminSegmentForm\n\n\nclass SequenceAdmin(BaseModelAdmin):\n @admin.display(description=\"Source Siglum\")\n def get_source_siglum(self, obj):\n if obj.source:\n return obj.source.siglum\n\n search_fields = (\n \"title\",\n \"incipit\",\n \"cantus_id\",\n \"id\",\n )\n exclude = EXCLUDE + (\n \"c_sequence\",\n \"next_chant\",\n \"is_last_chant_in_feast\",\n \"visible_status\",\n )\n list_display = (\"incipit\", \"get_source_siglum\", \"genre\")\n list_filter = (\n \"genre\",\n \"office\",\n )\n raw_id_fields = (\n \"source\",\n \"feast\",\n )\n ordering = (\"source__siglum\",)\n form = AdminSequenceForm\n\n\nclass SourceAdmin(BaseModelAdmin):\n # These search fields are also available on the user-source inline relationship in the user admin page\n search_fields = (\n \"siglum\",\n \"title\",\n \"id\",\n )\n readonly_fields = (\n \"number_of_chants\",\n \"number_of_melodies\",\n \"date_created\",\n \"date_updated\",\n )\n # from the Django docs:\n # Adding a ManyToManyField to this list will instead use a nifty unobtrusive JavaScript \u201cfilter\u201d interface\n # that allows searching within the options. The unselected and selected options appear in two boxes side by side.\n filter_horizontal = (\n \"century\",\n \"notation\",\n \"current_editors\",\n \"inventoried_by\",\n \"full_text_entered_by\",\n \"melodies_entered_by\",\n \"proofreaders\",\n \"other_editors\",\n )\n\n list_display = (\n \"title\",\n \"siglum\",\n \"id\",\n )\n\n list_filter = (\n \"full_source\",\n \"segment\",\n \"source_status\",\n \"published\",\n \"century\",\n )\n\n ordering = (\"siglum\",)\n\n form = AdminSourceForm\n\n\nadmin.site.register(Century, CenturyAdmin)\nadmin.site.register(Chant, ChantAdmin)\nadmin.site.register(Feast, FeastAdmin)\nadmin.site.register(Genre, GenreAdmin)\nadmin.site.register(Notation, NotationAdmin)\nadmin.site.register(Office, OfficeAdmin)\nadmin.site.register(Provenance, ProvenanceAdmin)\nadmin.site.register(RismSiglum, RismSiglumAdmin)\nadmin.site.register(Segment, SegmentAdmin)\nadmin.site.register(Sequence, SequenceAdmin)\nadmin.site.register(Source, SourceAdmin)\n", "path": "django/cantusdb_project/main_app/admin.py"}], "after_files": [{"content": "from django.contrib import admin\nfrom main_app.models import *\nfrom main_app.forms import (\n AdminCenturyForm,\n AdminChantForm,\n AdminFeastForm,\n AdminGenreForm,\n AdminNotationForm,\n AdminOfficeForm,\n AdminProvenanceForm,\n AdminRismSiglumForm,\n AdminSegmentForm,\n AdminSequenceForm,\n AdminSourceForm,\n)\n\n# these fields should not be editable by all classes\nEXCLUDE = (\n \"created_by\",\n \"last_updated_by\",\n \"json_info\",\n)\n\n\nclass BaseModelAdmin(admin.ModelAdmin):\n exclude = EXCLUDE\n\n # if an object is created in the admin interface, assign the user to the created_by field\n # else if an object is updated in the admin interface, assign the user to the last_updated_by field\n def save_model(self, request, obj, form, change):\n if change:\n obj.last_updated_by = request.user\n else:\n obj.created_by = request.user\n super().save_model(request, obj, form, change)\n\n\nclass CenturyAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminCenturyForm\n\n\nclass ChantAdmin(BaseModelAdmin):\n @admin.display(description=\"Source Siglum\")\n def get_source_siglum(self, obj):\n if obj.source:\n return obj.source.siglum\n\n list_display = (\n \"incipit\",\n \"get_source_siglum\",\n \"genre\",\n )\n search_fields = (\n \"title\",\n \"incipit\",\n \"cantus_id\",\n \"id\",\n )\n\n readonly_fields = (\n \"date_created\",\n \"date_updated\",\n )\n\n list_filter = (\n \"genre\",\n \"office\",\n )\n exclude = EXCLUDE + (\n \"col1\",\n \"col2\",\n \"col3\",\n \"next_chant\",\n \"s_sequence\",\n \"is_last_chant_in_feast\",\n \"visible_status\",\n \"date\",\n \"volpiano_notes\",\n \"volpiano_intervals\",\n \"title\",\n )\n form = AdminChantForm\n raw_id_fields = (\n \"source\",\n \"feast\",\n )\n ordering = (\"source__siglum\",)\n\n\nclass FeastAdmin(BaseModelAdmin):\n search_fields = (\n \"name\",\n \"feast_code\",\n )\n list_display = (\n \"name\",\n \"month\",\n \"day\",\n \"feast_code\",\n )\n form = AdminFeastForm\n\n\nclass GenreAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminGenreForm\n\n\nclass NotationAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminNotationForm\n\n\nclass OfficeAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminOfficeForm\n\n\nclass ProvenanceAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminProvenanceForm\n\n\nclass RismSiglumAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminRismSiglumForm\n\n\nclass SegmentAdmin(BaseModelAdmin):\n search_fields = (\"name\",)\n form = AdminSegmentForm\n\n\nclass SequenceAdmin(BaseModelAdmin):\n @admin.display(description=\"Source Siglum\")\n def get_source_siglum(self, obj):\n if obj.source:\n return obj.source.siglum\n\n search_fields = (\n \"title\",\n \"incipit\",\n \"cantus_id\",\n \"id\",\n )\n exclude = EXCLUDE + (\n \"c_sequence\",\n \"next_chant\",\n \"is_last_chant_in_feast\",\n \"visible_status\",\n )\n list_display = (\"incipit\", \"get_source_siglum\", \"genre\")\n list_filter = (\n \"genre\",\n \"office\",\n )\n raw_id_fields = (\n \"source\",\n \"feast\",\n )\n ordering = (\"source__siglum\",)\n form = AdminSequenceForm\n\n\nclass SourceAdmin(BaseModelAdmin):\n # These search fields are also available on the user-source inline relationship in the user admin page\n search_fields = (\n \"siglum\",\n \"title\",\n \"id\",\n )\n readonly_fields = (\n \"number_of_chants\",\n \"number_of_melodies\",\n \"date_created\",\n \"date_updated\",\n )\n # from the Django docs:\n # Adding a ManyToManyField to this list will instead use a nifty unobtrusive JavaScript \u201cfilter\u201d interface\n # that allows searching within the options. The unselected and selected options appear in two boxes side by side.\n filter_horizontal = (\n \"century\",\n \"notation\",\n \"current_editors\",\n \"inventoried_by\",\n \"full_text_entered_by\",\n \"melodies_entered_by\",\n \"proofreaders\",\n \"other_editors\",\n )\n\n list_display = (\n \"title\",\n \"siglum\",\n \"id\",\n )\n\n list_filter = (\n \"full_source\",\n \"segment\",\n \"source_status\",\n \"published\",\n \"century\",\n )\n\n ordering = (\"siglum\",)\n\n form = AdminSourceForm\n\n\nadmin.site.register(Century, CenturyAdmin)\nadmin.site.register(Chant, ChantAdmin)\nadmin.site.register(Feast, FeastAdmin)\nadmin.site.register(Genre, GenreAdmin)\nadmin.site.register(Notation, NotationAdmin)\nadmin.site.register(Office, OfficeAdmin)\nadmin.site.register(Provenance, ProvenanceAdmin)\nadmin.site.register(RismSiglum, RismSiglumAdmin)\nadmin.site.register(Segment, SegmentAdmin)\nadmin.site.register(Sequence, SequenceAdmin)\nadmin.site.register(Source, SourceAdmin)\n", "path": "django/cantusdb_project/main_app/admin.py"}]} |
gh_patches_debug_170 | rasdani/github-patches | git_diff | pex-tool__pex-1896 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.104
On the docket:
+ [x] Pull in Pip fixes. #1805
+ [x] pex fails to overwrite zipapp pex with loose pex #1879
+ [x] Make lock update sensitive to artifacts. #1887
+ [x] PEXBuilder is not robust to ephemeral .pyc compiles. #1889
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.103"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.103"
+__version__ = "2.1.104"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.103\"\n+__version__ = \"2.1.104\"\n", "issue": "Release 2.1.104\nOn the docket:\r\n+ [x] Pull in Pip fixes. #1805\r\n+ [x] pex fails to overwrite zipapp pex with loose pex #1879\r\n+ [x] Make lock update sensitive to artifacts. #1887 \r\n+ [x] PEXBuilder is not robust to ephemeral .pyc compiles. #1889 \n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.103\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.104\"\n", "path": "pex/version.py"}]} |
gh_patches_debug_171 | rasdani/github-patches | git_diff | pex-tool__pex-1932 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Release 2.1.108
On the docket:
+ [x] Fix slow PEX boot time when there are many extras. #1929
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pex/version.py`
Content:
```
1 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
2 # Licensed under the Apache License, Version 2.0 (see LICENSE).
3
4 __version__ = "2.1.107"
5
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pex/version.py b/pex/version.py
--- a/pex/version.py
+++ b/pex/version.py
@@ -1,4 +1,4 @@
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
-__version__ = "2.1.107"
+__version__ = "2.1.108"
| {"golden_diff": "diff --git a/pex/version.py b/pex/version.py\n--- a/pex/version.py\n+++ b/pex/version.py\n@@ -1,4 +1,4 @@\n # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n # Licensed under the Apache License, Version 2.0 (see LICENSE).\n \n-__version__ = \"2.1.107\"\n+__version__ = \"2.1.108\"\n", "issue": "Release 2.1.108\nOn the docket:\r\n+ [x] Fix slow PEX boot time when there are many extras. #1929\n", "before_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.107\"\n", "path": "pex/version.py"}], "after_files": [{"content": "# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).\n# Licensed under the Apache License, Version 2.0 (see LICENSE).\n\n__version__ = \"2.1.108\"\n", "path": "pex/version.py"}]} |
gh_patches_debug_172 | rasdani/github-patches | git_diff | wemake-services__wemake-python-styleguide-2619 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
False positive WPS226 in f strings
### What's wrong
line
f'query:"{query}"'
in ast node generate node with string value " (double qoute)
so if we have several lines like with we get tricky and false positive violation of WPS226
WPS226 Found string literal over-use: " > 3
### How it should be
f string should be considered as single node.
### Flake8 version and plugins
{
"dependencies": [],
"platform": {
"python_implementation": "CPython",
"python_version": "3.9.9",
"system": "Linux"
},
"plugins": [
{
"is_local": false,
"plugin": "flake8-bandit",
"version": "3.0.0"
},
{
"is_local": false,
"plugin": "flake8-broken-line",
"version": "0.4.0"
},
{
"is_local": false,
"plugin": "flake8-bugbear",
"version": "22.10.27"
},
{
"is_local": false,
"plugin": "flake8-comprehensions",
"version": "3.10.1"
},
{
"is_local": false,
"plugin": "flake8-darglint",
"version": "1.8.1"
},
{
"is_local": false,
"plugin": "flake8-debugger",
"version": "4.1.2"
},
{
"is_local": false,
"plugin": "flake8-docstrings",
"version": "1.6.0, pydocstyle: 6.1.1"
},
{
"is_local": false,
"plugin": "flake8-eradicate",
"version": "1.4.0"
},
{
"is_local": false,
"plugin": "flake8-string-format",
"version": "0.3.0"
},
{
"is_local": false,
"plugin": "flake8_commas",
"version": "2.1.0"
},
{
"is_local": false,
"plugin": "flake8_isort",
"version": "4.2.0"
},
{
"is_local": false,
"plugin": "flake8_quotes",
"version": "3.3.1"
},
{
"is_local": false,
"plugin": "mccabe",
"version": "0.6.1"
},
{
"is_local": false,
"plugin": "naming",
"version": "0.12.1"
},
{
"is_local": false,
"plugin": "pycodestyle",
"version": "2.8.0"
},
{
"is_local": false,
"plugin": "pyflakes",
"version": "2.4.0"
},
{
"is_local": false,
"plugin": "rst-docstrings",
"version": "0.2.7"
},
{
"is_local": false,
"plugin": "wemake_python_styleguide",
"version": "0.16.1"
}
],
"version": "4.0.1"
}
### pip information
pip 22.3.1
(python 3.9)
absl-py==1.3.0
aiodns==3.0.0
aiohttp==3.8.1
aiosignal==1.3.1
aniso8601==7.0.0
asgiref==3.5.2
astor==0.8.1
astroid==2.12.12
astunparse==1.6.3
async-timeout==4.0.2
attrs==22.1.0
autoflake==1.4
bandit==1.7.4
black==22.3.0
boto3==1.23.4
botocore==1.26.10
Brotli==1.0.9
cachetools==5.0.0
cchardet==2.1.7
certifi==2022.9.24
cffi==1.15.1
cfgv==3.3.1
charset-normalizer==2.0.12
click==8.1.3
ConfigArgParse==1.5.3
coverage==6.5.0
darglint==1.8.1
Deprecated==1.2.13
dill==0.3.6
distlib==0.3.6
dj-database-url==1.0.0
dj-email-url==1.0.6
Django==4.0.6
django-cache-url==3.4.2
django-cors-headers==3.12.0
django-injector==0.2.5
django-stubs==1.13.0
django-stubs-ext==0.7.0
docutils==0.19
environs==9.5.0
eradicate==2.1.0
filelock==3.8.0
flake8==4.0.1
flake8-bandit==3.0.0
flake8-broken-line==0.4.0
flake8-bugbear==22.10.27
flake8-commas==2.1.0
flake8-comprehensions==3.10.1
flake8-debugger==4.1.2
flake8-docstrings==1.6.0
flake8-eradicate==1.4.0
flake8-isort==4.2.0
flake8-polyfill==1.0.2
flake8-quotes==3.3.1
flake8-rst-docstrings==0.2.7
flake8-string-format==0.3.0
Flask==2.2.2
Flask-BasicAuth==0.2.0
Flask-Cors==3.0.10
flatbuffers==1.12
frozenlist==1.3.3
gast==0.4.0
gevent==22.10.2
geventhttpclient==2.0.8
gitdb==4.0.9
GitPython==3.1.29
google-auth==2.14.1
google-auth-oauthlib==0.4.6
google-pasta==0.2.0
graphene==2.1.9
graphene-django==2.15.0
graphql-core==2.3.2
graphql-relay==2.0.1
greenlet==2.0.1
grpcio==1.50.0
gunicorn==20.1.0
h5py==3.7.0
hash-chunker==0.1.9
identify==2.5.8
idna==3.4
importlib-metadata==5.0.0
inflect==5.5.2
iniconfig==1.1.1
injector==0.20.1
isort==5.10.1
itsdangerous==2.1.2
Jinja2==3.1.2
jmespath==1.0.1
joblib==1.2.0
kazoo==2.8.0
keras==2.9.0
Keras-Preprocessing==1.1.2
lazy-object-proxy==1.8.0
libclang==14.0.6
locust==2.9.0
Markdown==3.4.1
MarkupSafe==2.1.1
marshmallow==3.19.0
mccabe==0.6.1
msgpack==1.0.4
multidict==6.0.2
mypy==0.990
mypy-extensions==0.4.3
mysqlclient==2.1.0
nodeenv==1.7.0
numpy==1.23.4
oauthlib==3.2.2
opt-einsum==3.3.0
packaging==21.3
pandas==1.4.2
pathspec==0.10.2
pbr==5.11.0
pep8-naming==0.12.1
platformdirs==2.5.4
pluggy==0.13.1
pre-commit==2.16.0
promise==2.3
protobuf==3.19.6
psutil==5.9.4
py==1.11.0
pyasn1==0.4.8
pyasn1-modules==0.2.8
pycares==4.2.2
pycodestyle==2.8.0
pycparser==2.21
pydocstyle==6.1.1
pyflakes==2.4.0
Pygments==2.13.0
pylint==2.15.4
pylint-django==2.5.3
pylint-plugin-utils==0.7
PyMySQL==1.0.2
pyparsing==3.0.9
pytest==6.2.4
pytest-cov==2.12.0
pytest-django==4.5.2
pytest-lazy-fixture==0.6.3
python-dateutil==2.8.2
python-dotenv==0.21.0
pytz==2022.6
PyYAML==6.0
pyzmq==22.3.0
redis==4.2.2
requests==2.27.1
requests-oauthlib==1.3.1
restructuredtext-lint==1.4.0
roundrobin==0.0.4
rsa==4.9
Rx==1.6.1
s3transfer==0.5.2
scikit-learn==1.0
scipy==1.9.3
singledispatch==3.7.0
six==1.16.0
smmap==5.0.0
snowballstemmer==2.2.0
SQLAlchemy==1.4.36
sqlparse==0.4.3
stevedore==4.1.1
tenacity==8.0.1
tensorboard==2.9.1
tensorboard-data-server==0.6.1
tensorboard-plugin-wit==1.8.1
tensorflow-cpu==2.9.1
tensorflow-estimator==2.9.0
tensorflow-io-gcs-filesystem==0.27.0
termcolor==2.1.0
text-unidecode==1.3
threadpoolctl==3.1.0
toml==0.10.2
tomli==2.0.1
tomlkit==0.11.6
tqdm==4.64.0
types-cachetools==5.0.1
types-pytz==2022.6.0.1
types-PyYAML==6.0.12.2
types-redis==4.2.5
types-requests==2.27.19
types-urllib3==1.26.25.3
typing_extensions==4.4.0
urllib3==1.26.12
virtualenv==20.16.7
wemake-python-styleguide==0.16.1
Werkzeug==2.2.2
wrapt==1.14.1
yarl==1.8.1
zipp==3.10.0
zope.event==4.5.0
zope.interface==5.5.1
### OS information
Ubuntu 22.04.1 LTS
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `wemake_python_styleguide/visitors/ast/complexity/overuses.py`
Content:
```
1 import ast
2 from collections import defaultdict
3 from typing import (
4 Callable,
5 ClassVar,
6 DefaultDict,
7 FrozenSet,
8 List,
9 Tuple,
10 Union,
11 )
12
13 from typing_extensions import TypeAlias, final
14
15 from wemake_python_styleguide.compat.aliases import FunctionNodes
16 from wemake_python_styleguide.logic import source, walk
17 from wemake_python_styleguide.logic.complexity import overuses
18 from wemake_python_styleguide.logic.tree import annotations
19 from wemake_python_styleguide.types import AnyNodes, AnyText, AnyTextPrimitive
20 from wemake_python_styleguide.violations import complexity
21 from wemake_python_styleguide.visitors import base, decorators
22
23 #: We use these types to store the number of nodes usage in different contexts.
24 _Expressions: TypeAlias = DefaultDict[str, List[ast.AST]]
25 _FunctionExpressions: TypeAlias = DefaultDict[ast.AST, _Expressions]
26 _StringConstants: TypeAlias = FrozenSet[Union[str, bytes]]
27
28
29 @final
30 @decorators.alias('visit_any_string', (
31 'visit_Str',
32 'visit_Bytes',
33 ))
34 class StringOveruseVisitor(base.BaseNodeVisitor):
35 """
36 Restricts repeated usage of the same string constant.
37
38 NB: Some short strings are ignored, as their use is very common and
39 forcing assignment would not make much sense (i.e. newlines, "",
40 comma, dot).
41 """
42
43 _ignored_string_constants: ClassVar[_StringConstants] = frozenset((
44 ' ',
45 '.',
46 ',',
47 '',
48 '\n',
49 '\r\n',
50 '\t',
51 '|',
52 b' ',
53 b'.',
54 b',',
55 b'',
56 b'\n',
57 b'\r\n',
58 b'\t',
59 ))
60
61 def __init__(self, *args, **kwargs) -> None:
62 """Inits the counter for constants."""
63 super().__init__(*args, **kwargs)
64 self._string_constants: DefaultDict[
65 AnyTextPrimitive, int,
66 ] = defaultdict(int)
67
68 def visit_any_string(self, node: AnyText) -> None:
69 """Restricts to over-use string constants."""
70 self._check_string_constant(node)
71 self.generic_visit(node)
72
73 def _check_string_constant(self, node: AnyText) -> None:
74 if annotations.is_annotation(node):
75 return
76
77 # Some strings are so common, that it makes no sense to check if
78 # they are overused.
79 if node.s in self._ignored_string_constants:
80 return
81
82 self._string_constants[node.s] += 1
83
84 def _post_visit(self) -> None:
85 for string, usage_count in self._string_constants.items():
86 if usage_count > self.options.max_string_usages:
87 self.add_violation(
88 complexity.OverusedStringViolation(
89 text=source.render_string(string) or "''",
90 baseline=self.options.max_string_usages,
91 ),
92 )
93
94
95 @final
96 class ExpressionOveruseVisitor(base.BaseNodeVisitor):
97 """Finds overused expressions."""
98
99 _expressions: ClassVar[AnyNodes] = (
100 # We do not treat `ast.Attribute`s as expressions
101 # because they are too widely used. That's a compromise.
102 ast.Assert,
103 ast.BoolOp,
104 ast.BinOp,
105 ast.UnaryOp,
106 ast.Call,
107 ast.Compare,
108 ast.Subscript,
109 ast.Lambda,
110
111 ast.DictComp,
112 ast.Dict,
113 ast.List,
114 ast.ListComp,
115 ast.Tuple,
116 ast.GeneratorExp,
117 ast.Set,
118 ast.SetComp,
119 )
120
121 _ignore_predicates: Tuple[Callable[[ast.AST], bool], ...] = (
122 overuses.is_decorator,
123 overuses.is_self,
124 annotations.is_annotation,
125 overuses.is_class_context,
126 overuses.is_super_call,
127 overuses.is_primitive,
128 overuses.is_unary_minus,
129 )
130
131 _msg: ClassVar[str] = '{0}; used {1}'
132
133 def __init__(self, *args, **kwargs) -> None:
134 """We need to track expression usage in functions and modules."""
135 super().__init__(*args, **kwargs)
136 self._module_expressions: _Expressions = defaultdict(list)
137 self._function_expressions: _FunctionExpressions = defaultdict(
138 lambda: defaultdict(list),
139 )
140
141 def visit(self, node: ast.AST) -> None:
142 """Visits all nodes in a module to find overused values."""
143 if isinstance(node, self._expressions):
144 self._add_expression(node)
145 self.generic_visit(node)
146
147 def _add_expression(self, node: ast.AST) -> None:
148 if any(ignore(node) for ignore in self._ignore_predicates):
149 return
150
151 source_code = source.node_to_string(node)
152 self._module_expressions[source_code].append(node)
153
154 maybe_function = walk.get_closest_parent(node, FunctionNodes)
155 if maybe_function is not None:
156 self._function_expressions[maybe_function][source_code].append(
157 node,
158 )
159
160 def _post_visit(self) -> None:
161 for mod_source, module_nodes in self._module_expressions.items():
162 if len(module_nodes) > self.options.max_module_expressions:
163 self.add_violation(
164 complexity.OverusedExpressionViolation(
165 module_nodes[0],
166 text=self._msg.format(mod_source, len(module_nodes)),
167 baseline=self.options.max_module_expressions,
168 ),
169 )
170
171 for function_contexts in self._function_expressions.values():
172 for src, function_nodes in function_contexts.items():
173 if len(function_nodes) > self.options.max_function_expressions:
174 self.add_violation(
175 complexity.OverusedExpressionViolation(
176 function_nodes[0],
177 text=self._msg.format(src, len(function_nodes)),
178 baseline=self.options.max_function_expressions,
179 ),
180 )
181
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/wemake_python_styleguide/visitors/ast/complexity/overuses.py b/wemake_python_styleguide/visitors/ast/complexity/overuses.py
--- a/wemake_python_styleguide/visitors/ast/complexity/overuses.py
+++ b/wemake_python_styleguide/visitors/ast/complexity/overuses.py
@@ -49,6 +49,10 @@
'\r\n',
'\t',
'|',
+ '"',
+ "'",
+ b'"',
+ b"'",
b' ',
b'.',
b',',
| {"golden_diff": "diff --git a/wemake_python_styleguide/visitors/ast/complexity/overuses.py b/wemake_python_styleguide/visitors/ast/complexity/overuses.py\n--- a/wemake_python_styleguide/visitors/ast/complexity/overuses.py\n+++ b/wemake_python_styleguide/visitors/ast/complexity/overuses.py\n@@ -49,6 +49,10 @@\n '\\r\\n',\n '\\t',\n '|',\n+ '\"',\n+ \"'\",\n+ b'\"',\n+ b\"'\",\n b' ',\n b'.',\n b',',\n", "issue": "False positive WPS226 in f strings\n### What's wrong\n\nline\r\nf'query:\"{query}\"'\r\nin ast node generate node with string value \" (double qoute)\r\n\r\nso if we have several lines like with we get tricky and false positive violation of WPS226\r\n\r\n WPS226 Found string literal over-use: \" > 3\r\n\n\n### How it should be\n\nf string should be considered as single node.\n\n### Flake8 version and plugins\n\n{\r\n \"dependencies\": [],\r\n \"platform\": {\r\n \"python_implementation\": \"CPython\",\r\n \"python_version\": \"3.9.9\",\r\n \"system\": \"Linux\"\r\n },\r\n \"plugins\": [\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8-bandit\",\r\n \"version\": \"3.0.0\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8-broken-line\",\r\n \"version\": \"0.4.0\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8-bugbear\",\r\n \"version\": \"22.10.27\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8-comprehensions\",\r\n \"version\": \"3.10.1\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8-darglint\",\r\n \"version\": \"1.8.1\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8-debugger\",\r\n \"version\": \"4.1.2\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8-docstrings\",\r\n \"version\": \"1.6.0, pydocstyle: 6.1.1\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8-eradicate\",\r\n \"version\": \"1.4.0\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8-string-format\",\r\n \"version\": \"0.3.0\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8_commas\",\r\n \"version\": \"2.1.0\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8_isort\",\r\n \"version\": \"4.2.0\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"flake8_quotes\",\r\n \"version\": \"3.3.1\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"mccabe\",\r\n \"version\": \"0.6.1\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"naming\",\r\n \"version\": \"0.12.1\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"pycodestyle\",\r\n \"version\": \"2.8.0\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"pyflakes\",\r\n \"version\": \"2.4.0\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"rst-docstrings\",\r\n \"version\": \"0.2.7\"\r\n },\r\n {\r\n \"is_local\": false,\r\n \"plugin\": \"wemake_python_styleguide\",\r\n \"version\": \"0.16.1\"\r\n }\r\n ],\r\n \"version\": \"4.0.1\"\r\n}\r\n\n\n### pip information\n\npip 22.3.1\r\n(python 3.9)\r\nabsl-py==1.3.0\r\naiodns==3.0.0\r\naiohttp==3.8.1\r\naiosignal==1.3.1\r\naniso8601==7.0.0\r\nasgiref==3.5.2\r\nastor==0.8.1\r\nastroid==2.12.12\r\nastunparse==1.6.3\r\nasync-timeout==4.0.2\r\nattrs==22.1.0\r\nautoflake==1.4\r\nbandit==1.7.4\r\nblack==22.3.0\r\nboto3==1.23.4\r\nbotocore==1.26.10\r\nBrotli==1.0.9\r\ncachetools==5.0.0\r\ncchardet==2.1.7\r\ncertifi==2022.9.24\r\ncffi==1.15.1\r\ncfgv==3.3.1\r\ncharset-normalizer==2.0.12\r\nclick==8.1.3\r\nConfigArgParse==1.5.3\r\ncoverage==6.5.0\r\ndarglint==1.8.1\r\nDeprecated==1.2.13\r\ndill==0.3.6\r\ndistlib==0.3.6\r\ndj-database-url==1.0.0\r\ndj-email-url==1.0.6\r\nDjango==4.0.6\r\ndjango-cache-url==3.4.2\r\ndjango-cors-headers==3.12.0\r\ndjango-injector==0.2.5\r\ndjango-stubs==1.13.0\r\ndjango-stubs-ext==0.7.0\r\ndocutils==0.19\r\nenvirons==9.5.0\r\neradicate==2.1.0\r\nfilelock==3.8.0\r\nflake8==4.0.1\r\nflake8-bandit==3.0.0\r\nflake8-broken-line==0.4.0\r\nflake8-bugbear==22.10.27\r\nflake8-commas==2.1.0\r\nflake8-comprehensions==3.10.1\r\nflake8-debugger==4.1.2\r\nflake8-docstrings==1.6.0\r\nflake8-eradicate==1.4.0\r\nflake8-isort==4.2.0\r\nflake8-polyfill==1.0.2\r\nflake8-quotes==3.3.1\r\nflake8-rst-docstrings==0.2.7\r\nflake8-string-format==0.3.0\r\nFlask==2.2.2\r\nFlask-BasicAuth==0.2.0\r\nFlask-Cors==3.0.10\r\nflatbuffers==1.12\r\nfrozenlist==1.3.3\r\ngast==0.4.0\r\ngevent==22.10.2\r\ngeventhttpclient==2.0.8\r\ngitdb==4.0.9\r\nGitPython==3.1.29\r\ngoogle-auth==2.14.1\r\ngoogle-auth-oauthlib==0.4.6\r\ngoogle-pasta==0.2.0\r\ngraphene==2.1.9\r\ngraphene-django==2.15.0\r\ngraphql-core==2.3.2\r\ngraphql-relay==2.0.1\r\ngreenlet==2.0.1\r\ngrpcio==1.50.0\r\ngunicorn==20.1.0\r\nh5py==3.7.0\r\nhash-chunker==0.1.9\r\nidentify==2.5.8\r\nidna==3.4\r\nimportlib-metadata==5.0.0\r\ninflect==5.5.2\r\niniconfig==1.1.1\r\ninjector==0.20.1\r\nisort==5.10.1\r\nitsdangerous==2.1.2\r\nJinja2==3.1.2\r\njmespath==1.0.1\r\njoblib==1.2.0\r\nkazoo==2.8.0\r\nkeras==2.9.0\r\nKeras-Preprocessing==1.1.2\r\nlazy-object-proxy==1.8.0\r\nlibclang==14.0.6\r\nlocust==2.9.0\r\nMarkdown==3.4.1\r\nMarkupSafe==2.1.1\r\nmarshmallow==3.19.0\r\nmccabe==0.6.1\r\nmsgpack==1.0.4\r\nmultidict==6.0.2\r\nmypy==0.990\r\nmypy-extensions==0.4.3\r\nmysqlclient==2.1.0\r\nnodeenv==1.7.0\r\nnumpy==1.23.4\r\noauthlib==3.2.2\r\nopt-einsum==3.3.0\r\npackaging==21.3\r\npandas==1.4.2\r\npathspec==0.10.2\r\npbr==5.11.0\r\npep8-naming==0.12.1\r\nplatformdirs==2.5.4\r\npluggy==0.13.1\r\npre-commit==2.16.0\r\npromise==2.3\r\nprotobuf==3.19.6\r\npsutil==5.9.4\r\npy==1.11.0\r\npyasn1==0.4.8\r\npyasn1-modules==0.2.8\r\npycares==4.2.2\r\npycodestyle==2.8.0\r\npycparser==2.21\r\npydocstyle==6.1.1\r\npyflakes==2.4.0\r\nPygments==2.13.0\r\npylint==2.15.4\r\npylint-django==2.5.3\r\npylint-plugin-utils==0.7\r\nPyMySQL==1.0.2\r\npyparsing==3.0.9\r\npytest==6.2.4\r\npytest-cov==2.12.0\r\npytest-django==4.5.2\r\npytest-lazy-fixture==0.6.3\r\npython-dateutil==2.8.2\r\npython-dotenv==0.21.0\r\npytz==2022.6\r\nPyYAML==6.0\r\npyzmq==22.3.0\r\nredis==4.2.2\r\nrequests==2.27.1\r\nrequests-oauthlib==1.3.1\r\nrestructuredtext-lint==1.4.0\r\nroundrobin==0.0.4\r\nrsa==4.9\r\nRx==1.6.1\r\ns3transfer==0.5.2\r\nscikit-learn==1.0\r\nscipy==1.9.3\r\nsingledispatch==3.7.0\r\nsix==1.16.0\r\nsmmap==5.0.0\r\nsnowballstemmer==2.2.0\r\nSQLAlchemy==1.4.36\r\nsqlparse==0.4.3\r\nstevedore==4.1.1\r\ntenacity==8.0.1\r\ntensorboard==2.9.1\r\ntensorboard-data-server==0.6.1\r\ntensorboard-plugin-wit==1.8.1\r\ntensorflow-cpu==2.9.1\r\ntensorflow-estimator==2.9.0\r\ntensorflow-io-gcs-filesystem==0.27.0\r\ntermcolor==2.1.0\r\ntext-unidecode==1.3\r\nthreadpoolctl==3.1.0\r\ntoml==0.10.2\r\ntomli==2.0.1\r\ntomlkit==0.11.6\r\ntqdm==4.64.0\r\ntypes-cachetools==5.0.1\r\ntypes-pytz==2022.6.0.1\r\ntypes-PyYAML==6.0.12.2\r\ntypes-redis==4.2.5\r\ntypes-requests==2.27.19\r\ntypes-urllib3==1.26.25.3\r\ntyping_extensions==4.4.0\r\nurllib3==1.26.12\r\nvirtualenv==20.16.7\r\nwemake-python-styleguide==0.16.1\r\nWerkzeug==2.2.2\r\nwrapt==1.14.1\r\nyarl==1.8.1\r\nzipp==3.10.0\r\nzope.event==4.5.0\r\nzope.interface==5.5.1\r\n\n\n### OS information\n\nUbuntu 22.04.1 LTS\n", "before_files": [{"content": "import ast\nfrom collections import defaultdict\nfrom typing import (\n Callable,\n ClassVar,\n DefaultDict,\n FrozenSet,\n List,\n Tuple,\n Union,\n)\n\nfrom typing_extensions import TypeAlias, final\n\nfrom wemake_python_styleguide.compat.aliases import FunctionNodes\nfrom wemake_python_styleguide.logic import source, walk\nfrom wemake_python_styleguide.logic.complexity import overuses\nfrom wemake_python_styleguide.logic.tree import annotations\nfrom wemake_python_styleguide.types import AnyNodes, AnyText, AnyTextPrimitive\nfrom wemake_python_styleguide.violations import complexity\nfrom wemake_python_styleguide.visitors import base, decorators\n\n#: We use these types to store the number of nodes usage in different contexts.\n_Expressions: TypeAlias = DefaultDict[str, List[ast.AST]]\n_FunctionExpressions: TypeAlias = DefaultDict[ast.AST, _Expressions]\n_StringConstants: TypeAlias = FrozenSet[Union[str, bytes]]\n\n\n@final\[email protected]('visit_any_string', (\n 'visit_Str',\n 'visit_Bytes',\n))\nclass StringOveruseVisitor(base.BaseNodeVisitor):\n \"\"\"\n Restricts repeated usage of the same string constant.\n\n NB: Some short strings are ignored, as their use is very common and\n forcing assignment would not make much sense (i.e. newlines, \"\",\n comma, dot).\n \"\"\"\n\n _ignored_string_constants: ClassVar[_StringConstants] = frozenset((\n ' ',\n '.',\n ',',\n '',\n '\\n',\n '\\r\\n',\n '\\t',\n '|',\n b' ',\n b'.',\n b',',\n b'',\n b'\\n',\n b'\\r\\n',\n b'\\t',\n ))\n\n def __init__(self, *args, **kwargs) -> None:\n \"\"\"Inits the counter for constants.\"\"\"\n super().__init__(*args, **kwargs)\n self._string_constants: DefaultDict[\n AnyTextPrimitive, int,\n ] = defaultdict(int)\n\n def visit_any_string(self, node: AnyText) -> None:\n \"\"\"Restricts to over-use string constants.\"\"\"\n self._check_string_constant(node)\n self.generic_visit(node)\n\n def _check_string_constant(self, node: AnyText) -> None:\n if annotations.is_annotation(node):\n return\n\n # Some strings are so common, that it makes no sense to check if\n # they are overused.\n if node.s in self._ignored_string_constants:\n return\n\n self._string_constants[node.s] += 1\n\n def _post_visit(self) -> None:\n for string, usage_count in self._string_constants.items():\n if usage_count > self.options.max_string_usages:\n self.add_violation(\n complexity.OverusedStringViolation(\n text=source.render_string(string) or \"''\",\n baseline=self.options.max_string_usages,\n ),\n )\n\n\n@final\nclass ExpressionOveruseVisitor(base.BaseNodeVisitor):\n \"\"\"Finds overused expressions.\"\"\"\n\n _expressions: ClassVar[AnyNodes] = (\n # We do not treat `ast.Attribute`s as expressions\n # because they are too widely used. That's a compromise.\n ast.Assert,\n ast.BoolOp,\n ast.BinOp,\n ast.UnaryOp,\n ast.Call,\n ast.Compare,\n ast.Subscript,\n ast.Lambda,\n\n ast.DictComp,\n ast.Dict,\n ast.List,\n ast.ListComp,\n ast.Tuple,\n ast.GeneratorExp,\n ast.Set,\n ast.SetComp,\n )\n\n _ignore_predicates: Tuple[Callable[[ast.AST], bool], ...] = (\n overuses.is_decorator,\n overuses.is_self,\n annotations.is_annotation,\n overuses.is_class_context,\n overuses.is_super_call,\n overuses.is_primitive,\n overuses.is_unary_minus,\n )\n\n _msg: ClassVar[str] = '{0}; used {1}'\n\n def __init__(self, *args, **kwargs) -> None:\n \"\"\"We need to track expression usage in functions and modules.\"\"\"\n super().__init__(*args, **kwargs)\n self._module_expressions: _Expressions = defaultdict(list)\n self._function_expressions: _FunctionExpressions = defaultdict(\n lambda: defaultdict(list),\n )\n\n def visit(self, node: ast.AST) -> None:\n \"\"\"Visits all nodes in a module to find overused values.\"\"\"\n if isinstance(node, self._expressions):\n self._add_expression(node)\n self.generic_visit(node)\n\n def _add_expression(self, node: ast.AST) -> None:\n if any(ignore(node) for ignore in self._ignore_predicates):\n return\n\n source_code = source.node_to_string(node)\n self._module_expressions[source_code].append(node)\n\n maybe_function = walk.get_closest_parent(node, FunctionNodes)\n if maybe_function is not None:\n self._function_expressions[maybe_function][source_code].append(\n node,\n )\n\n def _post_visit(self) -> None:\n for mod_source, module_nodes in self._module_expressions.items():\n if len(module_nodes) > self.options.max_module_expressions:\n self.add_violation(\n complexity.OverusedExpressionViolation(\n module_nodes[0],\n text=self._msg.format(mod_source, len(module_nodes)),\n baseline=self.options.max_module_expressions,\n ),\n )\n\n for function_contexts in self._function_expressions.values():\n for src, function_nodes in function_contexts.items():\n if len(function_nodes) > self.options.max_function_expressions:\n self.add_violation(\n complexity.OverusedExpressionViolation(\n function_nodes[0],\n text=self._msg.format(src, len(function_nodes)),\n baseline=self.options.max_function_expressions,\n ),\n )\n", "path": "wemake_python_styleguide/visitors/ast/complexity/overuses.py"}], "after_files": [{"content": "import ast\nfrom collections import defaultdict\nfrom typing import (\n Callable,\n ClassVar,\n DefaultDict,\n FrozenSet,\n List,\n Tuple,\n Union,\n)\n\nfrom typing_extensions import TypeAlias, final\n\nfrom wemake_python_styleguide.compat.aliases import FunctionNodes\nfrom wemake_python_styleguide.logic import source, walk\nfrom wemake_python_styleguide.logic.complexity import overuses\nfrom wemake_python_styleguide.logic.tree import annotations\nfrom wemake_python_styleguide.types import AnyNodes, AnyText, AnyTextPrimitive\nfrom wemake_python_styleguide.violations import complexity\nfrom wemake_python_styleguide.visitors import base, decorators\n\n#: We use these types to store the number of nodes usage in different contexts.\n_Expressions: TypeAlias = DefaultDict[str, List[ast.AST]]\n_FunctionExpressions: TypeAlias = DefaultDict[ast.AST, _Expressions]\n_StringConstants: TypeAlias = FrozenSet[Union[str, bytes]]\n\n\n@final\[email protected]('visit_any_string', (\n 'visit_Str',\n 'visit_Bytes',\n))\nclass StringOveruseVisitor(base.BaseNodeVisitor):\n \"\"\"\n Restricts repeated usage of the same string constant.\n\n NB: Some short strings are ignored, as their use is very common and\n forcing assignment would not make much sense (i.e. newlines, \"\",\n comma, dot).\n \"\"\"\n\n _ignored_string_constants: ClassVar[_StringConstants] = frozenset((\n ' ',\n '.',\n ',',\n '',\n '\\n',\n '\\r\\n',\n '\\t',\n '|',\n '\"',\n \"'\",\n b'\"',\n b\"'\",\n b' ',\n b'.',\n b',',\n b'',\n b'\\n',\n b'\\r\\n',\n b'\\t',\n ))\n\n def __init__(self, *args, **kwargs) -> None:\n \"\"\"Inits the counter for constants.\"\"\"\n super().__init__(*args, **kwargs)\n self._string_constants: DefaultDict[\n AnyTextPrimitive, int,\n ] = defaultdict(int)\n\n def visit_any_string(self, node: AnyText) -> None:\n \"\"\"Restricts to over-use string constants.\"\"\"\n self._check_string_constant(node)\n self.generic_visit(node)\n\n def _check_string_constant(self, node: AnyText) -> None:\n if annotations.is_annotation(node):\n return\n\n # Some strings are so common, that it makes no sense to check if\n # they are overused.\n if node.s in self._ignored_string_constants:\n return\n\n self._string_constants[node.s] += 1\n\n def _post_visit(self) -> None:\n for string, usage_count in self._string_constants.items():\n if usage_count > self.options.max_string_usages:\n self.add_violation(\n complexity.OverusedStringViolation(\n text=source.render_string(string) or \"''\",\n baseline=self.options.max_string_usages,\n ),\n )\n\n\n@final\nclass ExpressionOveruseVisitor(base.BaseNodeVisitor):\n \"\"\"Finds overused expressions.\"\"\"\n\n _expressions: ClassVar[AnyNodes] = (\n # We do not treat `ast.Attribute`s as expressions\n # because they are too widely used. That's a compromise.\n ast.Assert,\n ast.BoolOp,\n ast.BinOp,\n ast.UnaryOp,\n ast.Call,\n ast.Compare,\n ast.Subscript,\n ast.Lambda,\n\n ast.DictComp,\n ast.Dict,\n ast.List,\n ast.ListComp,\n ast.Tuple,\n ast.GeneratorExp,\n ast.Set,\n ast.SetComp,\n )\n\n _ignore_predicates: Tuple[Callable[[ast.AST], bool], ...] = (\n overuses.is_decorator,\n overuses.is_self,\n annotations.is_annotation,\n overuses.is_class_context,\n overuses.is_super_call,\n overuses.is_primitive,\n overuses.is_unary_minus,\n )\n\n _msg: ClassVar[str] = '{0}; used {1}'\n\n def __init__(self, *args, **kwargs) -> None:\n \"\"\"We need to track expression usage in functions and modules.\"\"\"\n super().__init__(*args, **kwargs)\n self._module_expressions: _Expressions = defaultdict(list)\n self._function_expressions: _FunctionExpressions = defaultdict(\n lambda: defaultdict(list),\n )\n\n def visit(self, node: ast.AST) -> None:\n \"\"\"Visits all nodes in a module to find overused values.\"\"\"\n if isinstance(node, self._expressions):\n self._add_expression(node)\n self.generic_visit(node)\n\n def _add_expression(self, node: ast.AST) -> None:\n if any(ignore(node) for ignore in self._ignore_predicates):\n return\n\n source_code = source.node_to_string(node)\n self._module_expressions[source_code].append(node)\n\n maybe_function = walk.get_closest_parent(node, FunctionNodes)\n if maybe_function is not None:\n self._function_expressions[maybe_function][source_code].append(\n node,\n )\n\n def _post_visit(self) -> None:\n for mod_source, module_nodes in self._module_expressions.items():\n if len(module_nodes) > self.options.max_module_expressions:\n self.add_violation(\n complexity.OverusedExpressionViolation(\n module_nodes[0],\n text=self._msg.format(mod_source, len(module_nodes)),\n baseline=self.options.max_module_expressions,\n ),\n )\n\n for function_contexts in self._function_expressions.values():\n for src, function_nodes in function_contexts.items():\n if len(function_nodes) > self.options.max_function_expressions:\n self.add_violation(\n complexity.OverusedExpressionViolation(\n function_nodes[0],\n text=self._msg.format(src, len(function_nodes)),\n baseline=self.options.max_function_expressions,\n ),\n )\n", "path": "wemake_python_styleguide/visitors/ast/complexity/overuses.py"}]} |
gh_patches_debug_173 | rasdani/github-patches | git_diff | LibraryOfCongress__concordia-535 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Set site time zone to US/Eastern
https://docs.djangoproject.com/en/2.1/ref/settings/#std:setting-TIME_ZONE
Use Django setting to change user-facing timestamps to use US/Eastern time zone.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `concordia/settings_template.py`
Content:
```
1 # TODO: use correct copyright header
2 import os
3
4 from django.contrib import messages
5
6 import raven
7
8 # Build paths inside the project like this: os.path.join(SITE_ROOT_DIR, ...)
9 CONCORDIA_APP_DIR = os.path.abspath(os.path.dirname(__file__))
10 SITE_ROOT_DIR = os.path.dirname(CONCORDIA_APP_DIR)
11
12 # SECURITY WARNING: keep the secret key used in production secret!
13 SECRET_KEY = "django-secret-key"
14
15 CONCORDIA_ENVIRONMENT = os.environ.get("CONCORDIA_ENVIRONMENT", "development")
16
17 # Optional SMTP authentication information for EMAIL_HOST.
18 EMAIL_HOST_USER = ""
19 EMAIL_HOST_PASSWORD = ""
20 EMAIL_USE_TLS = False
21 DEFAULT_FROM_EMAIL = "[email protected]"
22
23 ALLOWED_HOSTS = ["*"]
24
25 DEBUG = False
26 CSRF_COOKIE_SECURE = False
27
28 AUTH_PASSWORD_VALIDATORS = []
29 EMAIL_BACKEND = "django.core.mail.backends.filebased.EmailBackend"
30 # EMAIL_FILE_PATH = os.path.join(SITE_ROOT_DIR, 'emails')
31 EMAIL_HOST = "localhost"
32 EMAIL_PORT = 25
33 LANGUAGE_CODE = "en-us"
34 LOGIN_REDIRECT_URL = "/"
35 LOGOUT_REDIRECT_URL = "/"
36 ROOT_URLCONF = "concordia.urls"
37 STATIC_ROOT = "static-files"
38 STATIC_URL = "/static/"
39 STATICFILES_DIRS = [
40 os.path.join(CONCORDIA_APP_DIR, "static"),
41 os.path.join(SITE_ROOT_DIR, "static"),
42 ]
43 TEMPLATE_DEBUG = False
44 TIME_ZONE = "UTC"
45 USE_I18N = True
46 USE_L10N = True
47 USE_TZ = True
48 WSGI_APPLICATION = "concordia.wsgi.application"
49
50 ADMIN_SITE = {"site_header": "Concordia Admin", "site_title": "Concordia"}
51
52 DATABASES = {
53 "default": {
54 "ENGINE": "django.db.backends.postgresql",
55 "NAME": "concordia",
56 "USER": "concordia",
57 "PASSWORD": os.getenv("POSTGRESQL_PW"),
58 "HOST": os.getenv("POSTGRESQL_HOST", "localhost"),
59 "PORT": "5432",
60 "CONN_MAX_AGE": 15 * 60, # Keep database connections open for 15 minutes
61 }
62 }
63
64
65 INSTALLED_APPS = [
66 "django.contrib.admin",
67 "django.contrib.auth",
68 "django.contrib.contenttypes",
69 "django.contrib.humanize",
70 "django.contrib.sessions",
71 "django.contrib.messages",
72 "django.contrib.sites",
73 "django.contrib.staticfiles",
74 "raven.contrib.django.raven_compat",
75 "maintenance_mode",
76 "bootstrap4",
77 "bittersweet",
78 "concordia.apps.ConcordiaAppConfig",
79 "exporter",
80 "importer",
81 "captcha",
82 "django_prometheus_metrics",
83 "robots",
84 ]
85
86 if DEBUG:
87 INSTALLED_APPS += ["django_extensions"]
88 INSTALLED_APPS += ["kombu.transport"]
89
90
91 MIDDLEWARE = [
92 "django_prometheus_metrics.middleware.PrometheusBeforeMiddleware",
93 "django.middleware.security.SecurityMiddleware",
94 # WhiteNoise serves static files efficiently:
95 "whitenoise.middleware.WhiteNoiseMiddleware",
96 "django.contrib.sessions.middleware.SessionMiddleware",
97 "django.middleware.common.CommonMiddleware",
98 "django.middleware.csrf.CsrfViewMiddleware",
99 "django.contrib.auth.middleware.AuthenticationMiddleware",
100 "django.contrib.messages.middleware.MessageMiddleware",
101 "django.middleware.clickjacking.XFrameOptionsMiddleware",
102 "maintenance_mode.middleware.MaintenanceModeMiddleware",
103 ]
104
105 TEMPLATES = [
106 {
107 "BACKEND": "django.template.backends.django.DjangoTemplates",
108 "DIRS": [
109 os.path.join(SITE_ROOT_DIR, "templates"),
110 os.path.join(CONCORDIA_APP_DIR, "templates"),
111 ],
112 "OPTIONS": {
113 "context_processors": [
114 "django.template.context_processors.debug",
115 "django.template.context_processors.request",
116 "django.contrib.auth.context_processors.auth",
117 "django.contrib.messages.context_processors.messages",
118 "django.template.context_processors.media",
119 # Concordia
120 "concordia.context_processors.system_configuration",
121 "concordia.context_processors.site_navigation",
122 ],
123 "loaders": [
124 "django.template.loaders.filesystem.Loader",
125 "django.template.loaders.app_directories.Loader",
126 ],
127 },
128 }
129 ]
130
131 MEMCACHED_ADDRESS = os.getenv("MEMCACHED_ADDRESS", "")
132 MEMCACHED_PORT = os.getenv("MEMCACHED_PORT", "")
133
134 CACHES = {
135 "default": {
136 "BACKEND": "django.core.cache.backends.memcached.MemcachedCache",
137 "LOCATION": "{}:{}".format(MEMCACHED_ADDRESS, MEMCACHED_PORT),
138 }
139 }
140
141 HAYSTACK_CONNECTIONS = {
142 "default": {
143 "ENGINE": "haystack.backends.whoosh_backend.WhooshEngine",
144 "PATH": os.path.join(os.path.dirname(__file__), "whoosh_index"),
145 }
146 }
147
148 # Celery settings
149 CELERY_BROKER_URL = "pyamqp://guest@rabbit"
150 CELERY_RESULT_BACKEND = "rpc://"
151
152 CELERY_ACCEPT_CONTENT = ["json"]
153 CELERY_TASK_SERIALIZER = "json"
154 CELERY_IMPORTS = ("importer.tasks",)
155
156 CELERY_BROKER_HEARTBEAT = 0
157 CELERY_BROKER_TRANSPORT_OPTIONS = {
158 "confirm_publish": True,
159 "max_retries": 3,
160 "interval_start": 0,
161 "interval_step": 0.2,
162 "interval_max": 0.5,
163 }
164
165 LOGGING = {
166 "version": 1,
167 "disable_existing_loggers": False,
168 "formatters": {
169 "long": {
170 "format": "[{asctime} {levelname} {name}:{lineno}] {message}",
171 "datefmt": "%Y-%m-%dT%H:%M:%S",
172 "style": "{",
173 },
174 "short": {
175 "format": "[{levelname} {name}] {message}",
176 "datefmt": "%Y-%m-%dT%H:%M:%S",
177 "style": "{",
178 },
179 },
180 "handlers": {
181 "stream": {
182 "class": "logging.StreamHandler",
183 "level": "INFO",
184 "formatter": "long",
185 },
186 "null": {"level": "DEBUG", "class": "logging.NullHandler"},
187 "file": {
188 "class": "logging.handlers.TimedRotatingFileHandler",
189 "level": "DEBUG",
190 "formatter": "long",
191 "filename": "{}/logs/concordia.log".format(SITE_ROOT_DIR),
192 "when": "H",
193 "interval": 3,
194 "backupCount": 16,
195 },
196 "celery": {
197 "level": "DEBUG",
198 "class": "logging.handlers.RotatingFileHandler",
199 "filename": "{}/logs/celery.log".format(SITE_ROOT_DIR),
200 "formatter": "long",
201 "maxBytes": 1024 * 1024 * 100, # 100 mb
202 },
203 "sentry": {
204 "level": "WARNING",
205 "class": "raven.contrib.django.raven_compat.handlers.SentryHandler",
206 },
207 },
208 "loggers": {
209 "django": {"handlers": ["file", "stream"], "level": "DEBUG", "propagate": True},
210 "celery": {"handlers": ["celery", "stream"], "level": "DEBUG"},
211 "sentry.errors": {"level": "INFO", "handlers": ["stream"], "propagate": False},
212 },
213 }
214
215
216 ################################################################################
217 # Django-specific settings above
218 ################################################################################
219
220 ACCOUNT_ACTIVATION_DAYS = 7
221
222 MEDIA_URL = "/media/"
223 MEDIA_ROOT = os.path.join(SITE_ROOT_DIR, "media")
224
225 LOGIN_URL = "login"
226
227 PASSWORD_VALIDATOR = (
228 "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
229 )
230
231 AUTH_PASSWORD_VALIDATORS = [
232 {"NAME": PASSWORD_VALIDATOR},
233 {
234 "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
235 "OPTIONS": {"min_length": 8},
236 },
237 {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
238 {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
239 {"NAME": "concordia.validators.complexity"},
240 ]
241
242 AUTHENTICATION_BACKENDS = [
243 "concordia.email_username_backend.EmailOrUsernameModelBackend"
244 ]
245
246 CAPTCHA_CHALLENGE_FUNCT = "captcha.helpers.random_char_challenge"
247 #: Anonymous sessions require captcha validation every day by default:
248 ANONYMOUS_CAPTCHA_VALIDATION_INTERVAL = 86400
249
250 STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
251 WHITENOISE_ROOT = os.path.join(SITE_ROOT_DIR, "static")
252
253 PASSWORD_RESET_TIMEOUT_DAYS = 1
254 ACCOUNT_ACTIVATION_DAYS = 1
255 REGISTRATION_OPEN = True # set to false to temporarily disable registrations
256
257 MESSAGE_STORAGE = "django.contrib.messages.storage.session.SessionStorage"
258
259 MESSAGE_TAGS = {messages.ERROR: "danger"}
260
261 SENTRY_DSN = os.environ.get("SENTRY_DSN", "")
262 SENTRY_PUBLIC_DSN = os.environ.get("SENTRY_PUBLIC_DSN", "")
263
264 RAVEN_CONFIG = {
265 "dsn": SENTRY_DSN,
266 "environment": CONCORDIA_ENVIRONMENT,
267 "release": raven.fetch_git_sha(SITE_ROOT_DIR),
268 }
269
270 # When the MAINTENANCE_MODE setting is true, this template will be used to
271 # generate a 503 response:
272 MAINTENANCE_MODE_TEMPLATE = "maintenance-mode.html"
273
274 # Names of special django.auth Groups
275 COMMUNITY_MANAGER_GROUP_NAME = "Community Managers"
276 NEWSLETTER_GROUP_NAME = "Newsletter"
277
278 # Django sites framework setting
279 SITE_ID = 1
280 ROBOTS_USE_SITEMAP = False
281 ROBOTS_USE_HOST = False
282
283 # django-bootstrap4 customization:
284 BOOTSTRAP4 = {"required_css_class": "form-group-required"}
285
286 # Transcription-related settings
287
288 #: Number of seconds an asset reservation is valid for
289 TRANSCRIPTION_RESERVATION_SECONDS = 5 * 60
290
291 #: Web cache policy settings
292 DEFAULT_PAGE_TTL = 5 * 60
293
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/concordia/settings_template.py b/concordia/settings_template.py
--- a/concordia/settings_template.py
+++ b/concordia/settings_template.py
@@ -41,7 +41,7 @@
os.path.join(SITE_ROOT_DIR, "static"),
]
TEMPLATE_DEBUG = False
-TIME_ZONE = "UTC"
+TIME_ZONE = "America/New_York"
USE_I18N = True
USE_L10N = True
USE_TZ = True
| {"golden_diff": "diff --git a/concordia/settings_template.py b/concordia/settings_template.py\n--- a/concordia/settings_template.py\n+++ b/concordia/settings_template.py\n@@ -41,7 +41,7 @@\n os.path.join(SITE_ROOT_DIR, \"static\"),\n ]\n TEMPLATE_DEBUG = False\n-TIME_ZONE = \"UTC\"\n+TIME_ZONE = \"America/New_York\"\n USE_I18N = True\n USE_L10N = True\n USE_TZ = True\n", "issue": "Set site time zone to US/Eastern\nhttps://docs.djangoproject.com/en/2.1/ref/settings/#std:setting-TIME_ZONE\r\n\r\nUse Django setting to change user-facing timestamps to use US/Eastern time zone.\n", "before_files": [{"content": "# TODO: use correct copyright header\nimport os\n\nfrom django.contrib import messages\n\nimport raven\n\n# Build paths inside the project like this: os.path.join(SITE_ROOT_DIR, ...)\nCONCORDIA_APP_DIR = os.path.abspath(os.path.dirname(__file__))\nSITE_ROOT_DIR = os.path.dirname(CONCORDIA_APP_DIR)\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = \"django-secret-key\"\n\nCONCORDIA_ENVIRONMENT = os.environ.get(\"CONCORDIA_ENVIRONMENT\", \"development\")\n\n# Optional SMTP authentication information for EMAIL_HOST.\nEMAIL_HOST_USER = \"\"\nEMAIL_HOST_PASSWORD = \"\"\nEMAIL_USE_TLS = False\nDEFAULT_FROM_EMAIL = \"[email protected]\"\n\nALLOWED_HOSTS = [\"*\"]\n\nDEBUG = False\nCSRF_COOKIE_SECURE = False\n\nAUTH_PASSWORD_VALIDATORS = []\nEMAIL_BACKEND = \"django.core.mail.backends.filebased.EmailBackend\"\n# EMAIL_FILE_PATH = os.path.join(SITE_ROOT_DIR, 'emails')\nEMAIL_HOST = \"localhost\"\nEMAIL_PORT = 25\nLANGUAGE_CODE = \"en-us\"\nLOGIN_REDIRECT_URL = \"/\"\nLOGOUT_REDIRECT_URL = \"/\"\nROOT_URLCONF = \"concordia.urls\"\nSTATIC_ROOT = \"static-files\"\nSTATIC_URL = \"/static/\"\nSTATICFILES_DIRS = [\n os.path.join(CONCORDIA_APP_DIR, \"static\"),\n os.path.join(SITE_ROOT_DIR, \"static\"),\n]\nTEMPLATE_DEBUG = False\nTIME_ZONE = \"UTC\"\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nWSGI_APPLICATION = \"concordia.wsgi.application\"\n\nADMIN_SITE = {\"site_header\": \"Concordia Admin\", \"site_title\": \"Concordia\"}\n\nDATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql\",\n \"NAME\": \"concordia\",\n \"USER\": \"concordia\",\n \"PASSWORD\": os.getenv(\"POSTGRESQL_PW\"),\n \"HOST\": os.getenv(\"POSTGRESQL_HOST\", \"localhost\"),\n \"PORT\": \"5432\",\n \"CONN_MAX_AGE\": 15 * 60, # Keep database connections open for 15 minutes\n }\n}\n\n\nINSTALLED_APPS = [\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.humanize\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.sites\",\n \"django.contrib.staticfiles\",\n \"raven.contrib.django.raven_compat\",\n \"maintenance_mode\",\n \"bootstrap4\",\n \"bittersweet\",\n \"concordia.apps.ConcordiaAppConfig\",\n \"exporter\",\n \"importer\",\n \"captcha\",\n \"django_prometheus_metrics\",\n \"robots\",\n]\n\nif DEBUG:\n INSTALLED_APPS += [\"django_extensions\"]\n INSTALLED_APPS += [\"kombu.transport\"]\n\n\nMIDDLEWARE = [\n \"django_prometheus_metrics.middleware.PrometheusBeforeMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n # WhiteNoise serves static files efficiently:\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"maintenance_mode.middleware.MaintenanceModeMiddleware\",\n]\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [\n os.path.join(SITE_ROOT_DIR, \"templates\"),\n os.path.join(CONCORDIA_APP_DIR, \"templates\"),\n ],\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"django.template.context_processors.media\",\n # Concordia\n \"concordia.context_processors.system_configuration\",\n \"concordia.context_processors.site_navigation\",\n ],\n \"loaders\": [\n \"django.template.loaders.filesystem.Loader\",\n \"django.template.loaders.app_directories.Loader\",\n ],\n },\n }\n]\n\nMEMCACHED_ADDRESS = os.getenv(\"MEMCACHED_ADDRESS\", \"\")\nMEMCACHED_PORT = os.getenv(\"MEMCACHED_PORT\", \"\")\n\nCACHES = {\n \"default\": {\n \"BACKEND\": \"django.core.cache.backends.memcached.MemcachedCache\",\n \"LOCATION\": \"{}:{}\".format(MEMCACHED_ADDRESS, MEMCACHED_PORT),\n }\n}\n\nHAYSTACK_CONNECTIONS = {\n \"default\": {\n \"ENGINE\": \"haystack.backends.whoosh_backend.WhooshEngine\",\n \"PATH\": os.path.join(os.path.dirname(__file__), \"whoosh_index\"),\n }\n}\n\n# Celery settings\nCELERY_BROKER_URL = \"pyamqp://guest@rabbit\"\nCELERY_RESULT_BACKEND = \"rpc://\"\n\nCELERY_ACCEPT_CONTENT = [\"json\"]\nCELERY_TASK_SERIALIZER = \"json\"\nCELERY_IMPORTS = (\"importer.tasks\",)\n\nCELERY_BROKER_HEARTBEAT = 0\nCELERY_BROKER_TRANSPORT_OPTIONS = {\n \"confirm_publish\": True,\n \"max_retries\": 3,\n \"interval_start\": 0,\n \"interval_step\": 0.2,\n \"interval_max\": 0.5,\n}\n\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"formatters\": {\n \"long\": {\n \"format\": \"[{asctime} {levelname} {name}:{lineno}] {message}\",\n \"datefmt\": \"%Y-%m-%dT%H:%M:%S\",\n \"style\": \"{\",\n },\n \"short\": {\n \"format\": \"[{levelname} {name}] {message}\",\n \"datefmt\": \"%Y-%m-%dT%H:%M:%S\",\n \"style\": \"{\",\n },\n },\n \"handlers\": {\n \"stream\": {\n \"class\": \"logging.StreamHandler\",\n \"level\": \"INFO\",\n \"formatter\": \"long\",\n },\n \"null\": {\"level\": \"DEBUG\", \"class\": \"logging.NullHandler\"},\n \"file\": {\n \"class\": \"logging.handlers.TimedRotatingFileHandler\",\n \"level\": \"DEBUG\",\n \"formatter\": \"long\",\n \"filename\": \"{}/logs/concordia.log\".format(SITE_ROOT_DIR),\n \"when\": \"H\",\n \"interval\": 3,\n \"backupCount\": 16,\n },\n \"celery\": {\n \"level\": \"DEBUG\",\n \"class\": \"logging.handlers.RotatingFileHandler\",\n \"filename\": \"{}/logs/celery.log\".format(SITE_ROOT_DIR),\n \"formatter\": \"long\",\n \"maxBytes\": 1024 * 1024 * 100, # 100 mb\n },\n \"sentry\": {\n \"level\": \"WARNING\",\n \"class\": \"raven.contrib.django.raven_compat.handlers.SentryHandler\",\n },\n },\n \"loggers\": {\n \"django\": {\"handlers\": [\"file\", \"stream\"], \"level\": \"DEBUG\", \"propagate\": True},\n \"celery\": {\"handlers\": [\"celery\", \"stream\"], \"level\": \"DEBUG\"},\n \"sentry.errors\": {\"level\": \"INFO\", \"handlers\": [\"stream\"], \"propagate\": False},\n },\n}\n\n\n################################################################################\n# Django-specific settings above\n################################################################################\n\nACCOUNT_ACTIVATION_DAYS = 7\n\nMEDIA_URL = \"/media/\"\nMEDIA_ROOT = os.path.join(SITE_ROOT_DIR, \"media\")\n\nLOGIN_URL = \"login\"\n\nPASSWORD_VALIDATOR = (\n \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\"\n)\n\nAUTH_PASSWORD_VALIDATORS = [\n {\"NAME\": PASSWORD_VALIDATOR},\n {\n \"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\",\n \"OPTIONS\": {\"min_length\": 8},\n },\n {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"},\n {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"},\n {\"NAME\": \"concordia.validators.complexity\"},\n]\n\nAUTHENTICATION_BACKENDS = [\n \"concordia.email_username_backend.EmailOrUsernameModelBackend\"\n]\n\nCAPTCHA_CHALLENGE_FUNCT = \"captcha.helpers.random_char_challenge\"\n#: Anonymous sessions require captcha validation every day by default:\nANONYMOUS_CAPTCHA_VALIDATION_INTERVAL = 86400\n\nSTATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\nWHITENOISE_ROOT = os.path.join(SITE_ROOT_DIR, \"static\")\n\nPASSWORD_RESET_TIMEOUT_DAYS = 1\nACCOUNT_ACTIVATION_DAYS = 1\nREGISTRATION_OPEN = True # set to false to temporarily disable registrations\n\nMESSAGE_STORAGE = \"django.contrib.messages.storage.session.SessionStorage\"\n\nMESSAGE_TAGS = {messages.ERROR: \"danger\"}\n\nSENTRY_DSN = os.environ.get(\"SENTRY_DSN\", \"\")\nSENTRY_PUBLIC_DSN = os.environ.get(\"SENTRY_PUBLIC_DSN\", \"\")\n\nRAVEN_CONFIG = {\n \"dsn\": SENTRY_DSN,\n \"environment\": CONCORDIA_ENVIRONMENT,\n \"release\": raven.fetch_git_sha(SITE_ROOT_DIR),\n}\n\n# When the MAINTENANCE_MODE setting is true, this template will be used to\n# generate a 503 response:\nMAINTENANCE_MODE_TEMPLATE = \"maintenance-mode.html\"\n\n# Names of special django.auth Groups\nCOMMUNITY_MANAGER_GROUP_NAME = \"Community Managers\"\nNEWSLETTER_GROUP_NAME = \"Newsletter\"\n\n# Django sites framework setting\nSITE_ID = 1\nROBOTS_USE_SITEMAP = False\nROBOTS_USE_HOST = False\n\n# django-bootstrap4 customization:\nBOOTSTRAP4 = {\"required_css_class\": \"form-group-required\"}\n\n# Transcription-related settings\n\n#: Number of seconds an asset reservation is valid for\nTRANSCRIPTION_RESERVATION_SECONDS = 5 * 60\n\n#: Web cache policy settings\nDEFAULT_PAGE_TTL = 5 * 60\n", "path": "concordia/settings_template.py"}], "after_files": [{"content": "# TODO: use correct copyright header\nimport os\n\nfrom django.contrib import messages\n\nimport raven\n\n# Build paths inside the project like this: os.path.join(SITE_ROOT_DIR, ...)\nCONCORDIA_APP_DIR = os.path.abspath(os.path.dirname(__file__))\nSITE_ROOT_DIR = os.path.dirname(CONCORDIA_APP_DIR)\n\n# SECURITY WARNING: keep the secret key used in production secret!\nSECRET_KEY = \"django-secret-key\"\n\nCONCORDIA_ENVIRONMENT = os.environ.get(\"CONCORDIA_ENVIRONMENT\", \"development\")\n\n# Optional SMTP authentication information for EMAIL_HOST.\nEMAIL_HOST_USER = \"\"\nEMAIL_HOST_PASSWORD = \"\"\nEMAIL_USE_TLS = False\nDEFAULT_FROM_EMAIL = \"[email protected]\"\n\nALLOWED_HOSTS = [\"*\"]\n\nDEBUG = False\nCSRF_COOKIE_SECURE = False\n\nAUTH_PASSWORD_VALIDATORS = []\nEMAIL_BACKEND = \"django.core.mail.backends.filebased.EmailBackend\"\n# EMAIL_FILE_PATH = os.path.join(SITE_ROOT_DIR, 'emails')\nEMAIL_HOST = \"localhost\"\nEMAIL_PORT = 25\nLANGUAGE_CODE = \"en-us\"\nLOGIN_REDIRECT_URL = \"/\"\nLOGOUT_REDIRECT_URL = \"/\"\nROOT_URLCONF = \"concordia.urls\"\nSTATIC_ROOT = \"static-files\"\nSTATIC_URL = \"/static/\"\nSTATICFILES_DIRS = [\n os.path.join(CONCORDIA_APP_DIR, \"static\"),\n os.path.join(SITE_ROOT_DIR, \"static\"),\n]\nTEMPLATE_DEBUG = False\nTIME_ZONE = \"America/New_York\"\nUSE_I18N = True\nUSE_L10N = True\nUSE_TZ = True\nWSGI_APPLICATION = \"concordia.wsgi.application\"\n\nADMIN_SITE = {\"site_header\": \"Concordia Admin\", \"site_title\": \"Concordia\"}\n\nDATABASES = {\n \"default\": {\n \"ENGINE\": \"django.db.backends.postgresql\",\n \"NAME\": \"concordia\",\n \"USER\": \"concordia\",\n \"PASSWORD\": os.getenv(\"POSTGRESQL_PW\"),\n \"HOST\": os.getenv(\"POSTGRESQL_HOST\", \"localhost\"),\n \"PORT\": \"5432\",\n \"CONN_MAX_AGE\": 15 * 60, # Keep database connections open for 15 minutes\n }\n}\n\n\nINSTALLED_APPS = [\n \"django.contrib.admin\",\n \"django.contrib.auth\",\n \"django.contrib.contenttypes\",\n \"django.contrib.humanize\",\n \"django.contrib.sessions\",\n \"django.contrib.messages\",\n \"django.contrib.sites\",\n \"django.contrib.staticfiles\",\n \"raven.contrib.django.raven_compat\",\n \"maintenance_mode\",\n \"bootstrap4\",\n \"bittersweet\",\n \"concordia.apps.ConcordiaAppConfig\",\n \"exporter\",\n \"importer\",\n \"captcha\",\n \"django_prometheus_metrics\",\n \"robots\",\n]\n\nif DEBUG:\n INSTALLED_APPS += [\"django_extensions\"]\n INSTALLED_APPS += [\"kombu.transport\"]\n\n\nMIDDLEWARE = [\n \"django_prometheus_metrics.middleware.PrometheusBeforeMiddleware\",\n \"django.middleware.security.SecurityMiddleware\",\n # WhiteNoise serves static files efficiently:\n \"whitenoise.middleware.WhiteNoiseMiddleware\",\n \"django.contrib.sessions.middleware.SessionMiddleware\",\n \"django.middleware.common.CommonMiddleware\",\n \"django.middleware.csrf.CsrfViewMiddleware\",\n \"django.contrib.auth.middleware.AuthenticationMiddleware\",\n \"django.contrib.messages.middleware.MessageMiddleware\",\n \"django.middleware.clickjacking.XFrameOptionsMiddleware\",\n \"maintenance_mode.middleware.MaintenanceModeMiddleware\",\n]\n\nTEMPLATES = [\n {\n \"BACKEND\": \"django.template.backends.django.DjangoTemplates\",\n \"DIRS\": [\n os.path.join(SITE_ROOT_DIR, \"templates\"),\n os.path.join(CONCORDIA_APP_DIR, \"templates\"),\n ],\n \"OPTIONS\": {\n \"context_processors\": [\n \"django.template.context_processors.debug\",\n \"django.template.context_processors.request\",\n \"django.contrib.auth.context_processors.auth\",\n \"django.contrib.messages.context_processors.messages\",\n \"django.template.context_processors.media\",\n # Concordia\n \"concordia.context_processors.system_configuration\",\n \"concordia.context_processors.site_navigation\",\n ],\n \"loaders\": [\n \"django.template.loaders.filesystem.Loader\",\n \"django.template.loaders.app_directories.Loader\",\n ],\n },\n }\n]\n\nMEMCACHED_ADDRESS = os.getenv(\"MEMCACHED_ADDRESS\", \"\")\nMEMCACHED_PORT = os.getenv(\"MEMCACHED_PORT\", \"\")\n\nCACHES = {\n \"default\": {\n \"BACKEND\": \"django.core.cache.backends.memcached.MemcachedCache\",\n \"LOCATION\": \"{}:{}\".format(MEMCACHED_ADDRESS, MEMCACHED_PORT),\n }\n}\n\nHAYSTACK_CONNECTIONS = {\n \"default\": {\n \"ENGINE\": \"haystack.backends.whoosh_backend.WhooshEngine\",\n \"PATH\": os.path.join(os.path.dirname(__file__), \"whoosh_index\"),\n }\n}\n\n# Celery settings\nCELERY_BROKER_URL = \"pyamqp://guest@rabbit\"\nCELERY_RESULT_BACKEND = \"rpc://\"\n\nCELERY_ACCEPT_CONTENT = [\"json\"]\nCELERY_TASK_SERIALIZER = \"json\"\nCELERY_IMPORTS = (\"importer.tasks\",)\n\nCELERY_BROKER_HEARTBEAT = 0\nCELERY_BROKER_TRANSPORT_OPTIONS = {\n \"confirm_publish\": True,\n \"max_retries\": 3,\n \"interval_start\": 0,\n \"interval_step\": 0.2,\n \"interval_max\": 0.5,\n}\n\nLOGGING = {\n \"version\": 1,\n \"disable_existing_loggers\": False,\n \"formatters\": {\n \"long\": {\n \"format\": \"[{asctime} {levelname} {name}:{lineno}] {message}\",\n \"datefmt\": \"%Y-%m-%dT%H:%M:%S\",\n \"style\": \"{\",\n },\n \"short\": {\n \"format\": \"[{levelname} {name}] {message}\",\n \"datefmt\": \"%Y-%m-%dT%H:%M:%S\",\n \"style\": \"{\",\n },\n },\n \"handlers\": {\n \"stream\": {\n \"class\": \"logging.StreamHandler\",\n \"level\": \"INFO\",\n \"formatter\": \"long\",\n },\n \"null\": {\"level\": \"DEBUG\", \"class\": \"logging.NullHandler\"},\n \"file\": {\n \"class\": \"logging.handlers.TimedRotatingFileHandler\",\n \"level\": \"DEBUG\",\n \"formatter\": \"long\",\n \"filename\": \"{}/logs/concordia.log\".format(SITE_ROOT_DIR),\n \"when\": \"H\",\n \"interval\": 3,\n \"backupCount\": 16,\n },\n \"celery\": {\n \"level\": \"DEBUG\",\n \"class\": \"logging.handlers.RotatingFileHandler\",\n \"filename\": \"{}/logs/celery.log\".format(SITE_ROOT_DIR),\n \"formatter\": \"long\",\n \"maxBytes\": 1024 * 1024 * 100, # 100 mb\n },\n \"sentry\": {\n \"level\": \"WARNING\",\n \"class\": \"raven.contrib.django.raven_compat.handlers.SentryHandler\",\n },\n },\n \"loggers\": {\n \"django\": {\"handlers\": [\"file\", \"stream\"], \"level\": \"DEBUG\", \"propagate\": True},\n \"celery\": {\"handlers\": [\"celery\", \"stream\"], \"level\": \"DEBUG\"},\n \"sentry.errors\": {\"level\": \"INFO\", \"handlers\": [\"stream\"], \"propagate\": False},\n },\n}\n\n\n################################################################################\n# Django-specific settings above\n################################################################################\n\nACCOUNT_ACTIVATION_DAYS = 7\n\nMEDIA_URL = \"/media/\"\nMEDIA_ROOT = os.path.join(SITE_ROOT_DIR, \"media\")\n\nLOGIN_URL = \"login\"\n\nPASSWORD_VALIDATOR = (\n \"django.contrib.auth.password_validation.UserAttributeSimilarityValidator\"\n)\n\nAUTH_PASSWORD_VALIDATORS = [\n {\"NAME\": PASSWORD_VALIDATOR},\n {\n \"NAME\": \"django.contrib.auth.password_validation.MinimumLengthValidator\",\n \"OPTIONS\": {\"min_length\": 8},\n },\n {\"NAME\": \"django.contrib.auth.password_validation.CommonPasswordValidator\"},\n {\"NAME\": \"django.contrib.auth.password_validation.NumericPasswordValidator\"},\n {\"NAME\": \"concordia.validators.complexity\"},\n]\n\nAUTHENTICATION_BACKENDS = [\n \"concordia.email_username_backend.EmailOrUsernameModelBackend\"\n]\n\nCAPTCHA_CHALLENGE_FUNCT = \"captcha.helpers.random_char_challenge\"\n#: Anonymous sessions require captcha validation every day by default:\nANONYMOUS_CAPTCHA_VALIDATION_INTERVAL = 86400\n\nSTATICFILES_STORAGE = \"whitenoise.storage.CompressedManifestStaticFilesStorage\"\nWHITENOISE_ROOT = os.path.join(SITE_ROOT_DIR, \"static\")\n\nPASSWORD_RESET_TIMEOUT_DAYS = 1\nACCOUNT_ACTIVATION_DAYS = 1\nREGISTRATION_OPEN = True # set to false to temporarily disable registrations\n\nMESSAGE_STORAGE = \"django.contrib.messages.storage.session.SessionStorage\"\n\nMESSAGE_TAGS = {messages.ERROR: \"danger\"}\n\nSENTRY_DSN = os.environ.get(\"SENTRY_DSN\", \"\")\nSENTRY_PUBLIC_DSN = os.environ.get(\"SENTRY_PUBLIC_DSN\", \"\")\n\nRAVEN_CONFIG = {\n \"dsn\": SENTRY_DSN,\n \"environment\": CONCORDIA_ENVIRONMENT,\n \"release\": raven.fetch_git_sha(SITE_ROOT_DIR),\n}\n\n# When the MAINTENANCE_MODE setting is true, this template will be used to\n# generate a 503 response:\nMAINTENANCE_MODE_TEMPLATE = \"maintenance-mode.html\"\n\n# Names of special django.auth Groups\nCOMMUNITY_MANAGER_GROUP_NAME = \"Community Managers\"\nNEWSLETTER_GROUP_NAME = \"Newsletter\"\n\n# Django sites framework setting\nSITE_ID = 1\nROBOTS_USE_SITEMAP = False\nROBOTS_USE_HOST = False\n\n# django-bootstrap4 customization:\nBOOTSTRAP4 = {\"required_css_class\": \"form-group-required\"}\n\n# Transcription-related settings\n\n#: Number of seconds an asset reservation is valid for\nTRANSCRIPTION_RESERVATION_SECONDS = 5 * 60\n\n#: Web cache policy settings\nDEFAULT_PAGE_TTL = 5 * 60\n", "path": "concordia/settings_template.py"}]} |
gh_patches_debug_174 | rasdani/github-patches | git_diff | ivy-llc__ivy-12770 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
expm1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `ivy/functional/frontends/tensorflow/math.py`
Content:
```
1 # global
2 import ivy
3 from ivy import with_supported_dtypes, with_unsupported_dtypes
4 from ivy.functional.frontends.tensorflow import check_tensorflow_casting
5 from ivy.functional.frontends.tensorflow.func_wrapper import (
6 to_ivy_arrays_and_back,
7 handle_tf_dtype,
8 to_ivy_dtype,
9 )
10
11
12 @with_supported_dtypes(
13 {"2.9.0 and below": ("float16", "float32", "float64", "complex64", "complex128")},
14 "tensorflow",
15 )
16 @to_ivy_arrays_and_back
17 def imag(input, name=None):
18 return ivy.imag(input)
19
20
21 @to_ivy_arrays_and_back
22 def accumulate_n(inputs, input_type=None, shape=None, dtype=None, name=None):
23 return ivy.astype(ivy.sum(ivy.array(inputs)), ivy.int64)
24
25
26 @to_ivy_arrays_and_back
27 def add(x, y, name=None):
28 x, y = check_tensorflow_casting(x, y)
29 return ivy.add(x, y)
30
31
32 @to_ivy_arrays_and_back
33 def exp(x, name=None):
34 return ivy.exp(x)
35
36
37 @to_ivy_arrays_and_back
38 def sqrt(x, name=None):
39 return ivy.sqrt(x)
40
41
42 @to_ivy_arrays_and_back
43 def negative(x, name=None):
44 return ivy.negative(x)
45
46
47 @to_ivy_arrays_and_back
48 def argmax(input, axis, output_type=None, name=None):
49 output_type = to_ivy_dtype(output_type)
50 if output_type in ["uint16", "int16", "int32", "int64"]:
51 return ivy.astype(ivy.argmax(input, axis=axis), output_type)
52 else:
53 return ivy.astype(ivy.argmax(input, axis=axis), "int64")
54
55
56 @to_ivy_arrays_and_back
57 def asinh(x, name="asinh"):
58 return ivy.asinh(x)
59
60
61 @handle_tf_dtype
62 @to_ivy_arrays_and_back
63 def confusion_matrix(
64 labels, predictions, num_classes=None, weights=None, dtype=ivy.int32, name=None
65 ):
66 labels = ivy.astype(
67 ivy.squeeze(ivy.array(labels), axis=None), ivy.int64, copy=False
68 )
69 predictions = ivy.astype(
70 ivy.squeeze(ivy.array(predictions), axis=None), ivy.int64, copy=False
71 )
72 # failsafe for (1,) array will be squeeze to 0-dim
73 labels = ivy.expand_dims(labels, axis=-1) if labels.ndim == 0 else labels
74 predictions = (
75 ivy.expand_dims(predictions, axis=-1) if predictions.ndim == 0 else predictions
76 )
77
78 # Sanity check (potential optimization)
79 ivy.utils.assertions.check_greater(
80 labels, 0, allow_equal=True, message="labels contains negative values"
81 )
82 ivy.utils.assertions.check_greater(
83 predictions, 0, allow_equal=True, message="predictions contains negative values"
84 )
85
86 if num_classes is None:
87 num_classes = max(ivy.max(labels), ivy.max(predictions)) + 1
88 else:
89 num_classes_int64 = ivy.astype(ivy.array(num_classes), ivy.int64, copy=False)
90 ivy.utils.assertions.check_less(
91 labels, num_classes_int64, message="labels out of bound"
92 )
93 ivy.utils.assertions.check_less(
94 predictions, num_classes_int64, message="predictions out of bound"
95 )
96
97 if weights is not None:
98 weights = ivy.array(weights)
99 ivy.utils.assertions.check_equal(
100 ivy.shape(predictions),
101 ivy.shape(weights),
102 message="weights shape do not match predictions",
103 )
104 weights = ivy.astype(weights, dtype, copy=False)
105
106 shape = ivy.stack([num_classes, num_classes])
107 indices = ivy.stack([labels, predictions], axis=1)
108 values = ivy.ones_like(predictions, dtype=dtype) if weights is None else weights
109 return ivy.scatter_nd(indices, values, shape=shape)
110
111
112 @handle_tf_dtype
113 @to_ivy_arrays_and_back
114 def count_nonzero(input, axis=None, keepdims=None, dtype=ivy.int64, name=None):
115 x = ivy.array(input)
116 if keepdims is None:
117 keepdims = False
118 zero = ivy.zeros(ivy.shape(x), dtype=x.dtype)
119 return ivy.astype(
120 ivy.sum(
121 ivy.astype(ivy.not_equal(x, zero), ivy.int64),
122 axis=axis,
123 keepdims=keepdims,
124 ),
125 dtype,
126 copy=False,
127 )
128
129
130 def cumprod(x, axis, exclusive=False, reverse=False, name=None):
131 return ivy.astype(
132 ivy.cumprod(x, axis=axis, exclusive=exclusive, reverse=reverse), x.dtype
133 )
134
135
136 def cumsum(x, axis, exclusive=False, reverse=False, name=None):
137 return ivy.astype(
138 ivy.cumsum(x, axis=axis, exclusive=exclusive, reverse=reverse), x.dtype
139 )
140
141
142 @to_ivy_arrays_and_back
143 def divide(x, y, name=None):
144 x, y = check_tensorflow_casting(x, y)
145 return ivy.divide(x, y)
146
147
148 @to_ivy_arrays_and_back
149 def divide_no_nan(x, y, name="divide_no_nan"):
150 x, y = check_tensorflow_casting(x, y)
151 return ivy.where(
152 y == 0,
153 ivy.array(0.0, dtype=ivy.promote_types(x.dtype, y.dtype)),
154 x / y,
155 )
156
157
158 @to_ivy_arrays_and_back
159 def maximum(x, y, name=None):
160 return ivy.maximum(x, y)
161
162
163 @to_ivy_arrays_and_back
164 def erfcinv(x, name="erfcinv"):
165 return 1 / (1 - ivy.erf(x))
166
167
168 @to_ivy_arrays_and_back
169 def is_inf(x, name=None):
170 return ivy.isinf(x)
171
172
173 @to_ivy_arrays_and_back
174 def is_non_decreasing(x, name="is_non_decreasing"):
175 if ivy.array(x).size < 2:
176 return ivy.array(True)
177 if ivy.array(x).size == 2:
178 return ivy.array([x[0] <= x[1]])
179 return ivy.all(ivy.less_equal(x, ivy.roll(x, -1)))
180
181
182 @to_ivy_arrays_and_back
183 def is_strictly_increasing(x, name="is_strictly_increasing"):
184 if ivy.array(x).size < 2:
185 return ivy.array(True)
186 if ivy.array(x).size == 2:
187 return ivy.array(x[0] < x[1])
188 return ivy.all(ivy.less(x, ivy.roll(x, -1)))
189
190
191 @to_ivy_arrays_and_back
192 def log_sigmoid(x, name=None):
193 return -ivy.softplus(-x)
194
195
196 @to_ivy_arrays_and_back
197 def logical_not(x, name="logical_not"):
198 return ivy.logical_not(x)
199
200
201 @to_ivy_arrays_and_back
202 def log1p(x, name=None):
203 return ivy.log1p(x)
204
205
206 @to_ivy_arrays_and_back
207 def logical_and(x, y, name="LogicalAnd"):
208 return ivy.logical_and(x, y)
209
210
211 @to_ivy_arrays_and_back
212 def logical_xor(x, y, name="LogicalXor"):
213 return ivy.logical_xor(x, y)
214
215
216 @to_ivy_arrays_and_back
217 def logical_or(x, y, name="logical_or"):
218 return ivy.logical_or(x, y)
219
220
221 @to_ivy_arrays_and_back
222 def multiply(x, y, name=None):
223 x, y = check_tensorflow_casting(x, y)
224 return ivy.multiply(x, y)
225
226
227 @to_ivy_arrays_and_back
228 def multiply_no_nan(x, y, name="multiply_no_nan"):
229 x, y = check_tensorflow_casting(x, y)
230 return ivy.where(
231 y == 0,
232 ivy.array(0.0, dtype=ivy.promote_types(x.dtype, y.dtype)),
233 x * y,
234 )
235
236
237 @to_ivy_arrays_and_back
238 def polyval(coeffs, x, name=None):
239 ivy.utils.assertions.check_isinstance(coeffs, list)
240 x = ivy.array(x)
241 if len(coeffs) < 1:
242 return ivy.zeros_like(x, dtype=x.dtype)
243 coeffs = [ivy.array(_) for _ in coeffs]
244 p = coeffs[0]
245 for c in coeffs[1:]:
246 p = c + p * x
247 return p
248
249
250 @to_ivy_arrays_and_back
251 def pow(x, y, name="pow"):
252 x, y = check_tensorflow_casting(x, y)
253 return ivy.pow(x, y)
254
255
256 @to_ivy_arrays_and_back
257 def reciprocal(x, name="reciprocal"):
258 return ivy.reciprocal(x)
259
260
261 @to_ivy_arrays_and_back
262 def reciprocal_no_nan(x, name="reciprocal_no_nan"):
263 return ivy.where(
264 x == 0,
265 ivy.array(0.0, dtype=x.dtype),
266 ivy.ones_like(x, dtype=x.dtype) / x,
267 )
268
269
270 @to_ivy_arrays_and_back
271 def reduce_all(input_tensor, axis=None, keepdims=False, name="reduce_all"):
272 return ivy.all(input_tensor, axis=axis, keepdims=keepdims)
273
274
275 @to_ivy_arrays_and_back
276 def reduce_any(input_tensor, axis=None, keepdims=False, name="reduce_any"):
277 return ivy.any(input_tensor, axis=axis, keepdims=keepdims)
278
279
280 @to_ivy_arrays_and_back
281 def reduce_euclidean_norm(
282 input_tensor, axis=None, keepdims=False, name="reduce_euclidean_norm"
283 ):
284 return ivy.vector_norm(
285 input_tensor, axis=axis, keepdims=keepdims, ord=2
286 ) # ord = '2' is the euclidean norm
287
288
289 @to_ivy_arrays_and_back
290 def reduce_logsumexp(input_tensor, axis=None, keepdims=False, name="reduce_logsumexp"):
291 # stable logsumexp trick
292 max_input_tensor = ivy.max(input_tensor, axis=axis, keepdims=True)
293 return (
294 ivy.log(
295 ivy.sum(
296 ivy.exp(input_tensor - max_input_tensor),
297 axis=axis,
298 keepdims=keepdims,
299 )
300 )
301 + max_input_tensor
302 ).astype(input_tensor.dtype)
303
304
305 @to_ivy_arrays_and_back
306 def reduce_max(input_tensor, axis=None, keepdims=False, name="reduce_max"):
307 return ivy.max(input_tensor, axis=axis, keepdims=keepdims)
308
309
310 @to_ivy_arrays_and_back
311 def reduce_mean(input_tensor, axis=None, keepdims=False, name="reduce_mean"):
312 if ivy.exists(axis):
313 axis = ivy.to_list(axis)
314 return ivy.mean(input_tensor, axis=axis, keepdims=keepdims)
315
316
317 @to_ivy_arrays_and_back
318 def reduce_min(input_tensor, axis=None, keepdims=False, name="reduce_min"):
319 return ivy.min(input_tensor, axis=axis, keepdims=keepdims)
320
321
322 @to_ivy_arrays_and_back
323 def reduce_prod(input_tensor, axis=None, keepdims=False, name="reduce_prod"):
324 return ivy.prod(input_tensor, axis=axis, keepdims=keepdims).astype(
325 input_tensor.dtype
326 )
327
328
329 @to_ivy_arrays_and_back
330 def reduce_std(input_tensor, axis=None, keepdims=False, name="reduce_std"):
331 return ivy.std(input_tensor, axis=axis, keepdims=keepdims)
332
333
334 @to_ivy_arrays_and_back
335 def reduce_sum(input_tensor, axis=None, keepdims=False, name="reduce_sum"):
336 return ivy.sum(input_tensor, axis=axis, keepdims=keepdims).astype(
337 input_tensor.dtype
338 )
339
340
341 @to_ivy_arrays_and_back
342 def reduce_variance(input_tensor, axis=None, keepdims=False, name="reduce_variance"):
343 return ivy.var(input_tensor, axis=axis, keepdims=keepdims)
344
345
346 @to_ivy_arrays_and_back
347 def scalar_mul(scalar, x, name="scalar_mul"):
348 scalar, x = check_tensorflow_casting(scalar, x)
349 return ivy.multiply(x, scalar).astype(x.dtype)
350
351
352 @to_ivy_arrays_and_back
353 def subtract(x, y, name=None):
354 x, y = check_tensorflow_casting(x, y)
355 return ivy.subtract(x, y)
356
357
358 @to_ivy_arrays_and_back
359 def squared_difference(x, y, name=None):
360 x, y = check_tensorflow_casting(x, y)
361 return ivy.square(ivy.subtract(x, y))
362
363
364 @with_supported_dtypes(
365 {
366 "2.9.0 and below": (
367 "bfloat16",
368 "float16",
369 "float32",
370 "float64",
371 "complex64",
372 "complex128",
373 )
374 },
375 "tensorflow",
376 )
377 @to_ivy_arrays_and_back
378 def sin(x, name=None):
379 return ivy.sin(x)
380
381
382 @to_ivy_arrays_and_back
383 def tan(x, name=None):
384 return ivy.tan(x)
385
386
387 @to_ivy_arrays_and_back
388 def unsorted_segment_mean(
389 data, segment_ids, num_segments, name="unsorted_segment_mean"
390 ):
391 ivy.utils.assertions.check_equal(list(segment_ids.shape), [list(data.shape)[0]])
392 x = ivy.zeros(tuple([num_segments] + (list(data.shape))[1:]))
393 count = ivy.zeros((num_segments,))
394 for i in range((segment_ids).shape[0]):
395 x[segment_ids[i]] = x[segment_ids[i]] + data[i]
396 count[segment_ids[i]] += 1
397 for j in range(num_segments):
398 x[j] = ivy.divide(x[j], count[j])
399 return x
400
401
402 @to_ivy_arrays_and_back
403 def unsorted_segment_sqrt_n(
404 data, segment_ids, num_segments, name="unsorted_segement_sqrt_n"
405 ):
406 ivy.utils.assertions.check_equal(list(segment_ids.shape), [list(data.shape)[0]])
407 x = ivy.zeros(tuple([num_segments] + (list(data.shape))[1:]))
408 count = ivy.zeros((num_segments,))
409 for i in range((segment_ids).shape[0]):
410 x[segment_ids[i]] = x[segment_ids[i]] + data[i]
411 count[segment_ids[i]] += 1
412 for j in range(num_segments):
413 x[j] = ivy.divide(x[j], ivy.sqrt(count[j]))
414 return x
415
416
417 @to_ivy_arrays_and_back
418 def zero_fraction(value, name="zero_fraction"):
419 zero = ivy.zeros(tuple(list(value.shape)), dtype=ivy.float32)
420 x = ivy.array(value, dtype=ivy.float32)
421 count_zero = ivy.sum(ivy.equal(x, zero))
422 count_nonzero = ivy.sum(ivy.not_equal(x, zero))
423 return ivy.divide(count_zero, ivy.add(count_zero, count_nonzero))
424
425
426 @to_ivy_arrays_and_back
427 def argmin(input, axis=None, output_type="int64", name=None):
428 output_type = to_ivy_dtype(output_type)
429 if output_type in ["int32", "int64"]:
430 return ivy.astype(ivy.argmin(input, axis=axis), output_type)
431 else:
432 return ivy.astype(ivy.argmin(input, axis=axis), "int64")
433
434
435 @to_ivy_arrays_and_back
436 def truediv(x, y, name="truediv"):
437 x, y = check_tensorflow_casting(x, y)
438 x_dtype = ivy.dtype(x)
439
440 if ivy.current_backend_str() == "torch":
441 if x_dtype in [ivy.int8, ivy.int16]:
442 return ivy.divide(ivy.astype(x, ivy.float32), ivy.astype(y, ivy.float32))
443 elif x_dtype in [ivy.int32, ivy.int64]:
444 return ivy.divide(ivy.astype(x, ivy.float64), ivy.astype(y, ivy.float64))
445 else:
446 if x_dtype in [ivy.int8, ivy.uint8, ivy.int16, ivy.uint16]:
447 return ivy.divide(ivy.astype(x, ivy.float32), ivy.astype(y, ivy.float32))
448 elif x_dtype in [ivy.int32, ivy.uint32, ivy.int64, ivy.uint64]:
449 return ivy.divide(ivy.astype(x, ivy.float64), ivy.astype(y, ivy.float64))
450 return ivy.divide(x, y)
451
452
453 @to_ivy_arrays_and_back
454 def equal(x, y, name=None):
455 x, y = check_tensorflow_casting(x, y)
456 return ivy.equal(x, y)
457
458
459 @to_ivy_arrays_and_back
460 def not_equal(x, y, name=None):
461 x, y = check_tensorflow_casting(x, y)
462 return ivy.not_equal(x, y)
463
464
465 @to_ivy_arrays_and_back
466 def floor(x, name=None):
467 return ivy.floor(x)
468
469
470 @to_ivy_arrays_and_back
471 def floordiv(x, y, name=None):
472 return ivy.floor_divide(x, y)
473
474
475 @to_ivy_arrays_and_back
476 def ceil(x, name=None):
477 return ivy.ceil(x)
478
479
480 @to_ivy_arrays_and_back
481 def round(x, name=None):
482 return ivy.round(x)
483
484
485 @to_ivy_arrays_and_back
486 def minimum(x, y, name=None):
487 return ivy.minimum(x, y)
488
489
490 @to_ivy_arrays_and_back
491 def sigmoid(x, name=None):
492 return ivy.sigmoid(x)
493
494
495 @with_supported_dtypes(
496 {"2.9.0 and below": ("float16", "float32", "float64", "complex64", "complex128")},
497 "tensorflow",
498 )
499 @to_ivy_arrays_and_back
500 def tanh(x, name=None):
501 return ivy.tanh(x)
502
503
504 @to_ivy_arrays_and_back
505 def rsqrt(x, name=None):
506 return ivy.reciprocal(ivy.sqrt(x))
507
508
509 @to_ivy_arrays_and_back
510 def nextafter(x1, x2, name=None):
511 return ivy.nextafter(x1, x2)
512
513
514 @with_unsupported_dtypes(
515 {
516 "1.2.0": ("float16", "complex64", "complex128"),
517 "1.8.0 and below": ("float16"),
518 "2.9.0 and below": ("int8", "int16", "uint8", "uint16", "uint32", "uint64"),
519 },
520 "tensorflow",
521 )
522 def abs(x, name=None):
523 return ivy.abs(x)
524
525
526 @to_ivy_arrays_and_back
527 def log_softmax(logits, axis=None):
528 return ivy.log_softmax(logits, axis=axis)
529
530
531 @to_ivy_arrays_and_back
532 def asin(x, name=None):
533 return ivy.asin(x)
534
535
536 @to_ivy_arrays_and_back
537 def acos(x, name="acos"):
538 return ivy.acos(x)
539
540
541 @to_ivy_arrays_and_back
542 def acosh(x, name="acosh"):
543 return ivy.acosh(x)
544
545
546 @to_ivy_arrays_and_back
547 def square(x, name=None):
548 return ivy.square(x)
549
550
551 @to_ivy_arrays_and_back
552 def is_nan(x, name=None):
553 return ivy.isnan(x)
554
555
556 @with_supported_dtypes(
557 {
558 "2.11.0 and below": ("bfloat16", "half", "float32", "float64"),
559 },
560 "tensorflow",
561 )
562 @to_ivy_arrays_and_back
563 def is_finite(x, name=None):
564 return ivy.isfinite(x)
565
566
567 @to_ivy_arrays_and_back
568 def atan(x, name=None):
569 return ivy.atan(x)
570
571
572 @to_ivy_arrays_and_back
573 def atan2(y, x, name=None):
574 return ivy.atan2(y, x)
575
576
577 @to_ivy_arrays_and_back
578 def log(x, name=None):
579 return ivy.log(x)
580
581
582 @to_ivy_arrays_and_back
583 def add_n(inputs, name=None):
584 return ivy.sum(inputs, dtype=inputs.dtype, axis=0)
585
586
587 @to_ivy_arrays_and_back
588 def floormod(x, y, name=None):
589 return ivy.remainder(x, y)
590
591
592 @to_ivy_arrays_and_back
593 def less_equal(x, y, name="LessEqual"):
594 x, y = check_tensorflow_casting(x, y)
595 return ivy.less_equal(x, y)
596
597
598 @to_ivy_arrays_and_back
599 def greater(x, y, name=None):
600 x, y = check_tensorflow_casting(x, y)
601 return ivy.greater(x, y)
602
603
604 @to_ivy_arrays_and_back
605 def less(x, y, name="None"):
606 x, y = check_tensorflow_casting(x, y)
607 return ivy.less(x, y)
608
609
610 @to_ivy_arrays_and_back
611 def cos(x, name=None):
612 return ivy.cos(x)
613
614
615 @to_ivy_arrays_and_back
616 def sinh(x, name=None):
617 return ivy.sinh(x)
618
619
620 @to_ivy_arrays_and_back
621 def softmax(logits, axis=-1):
622 return ivy.softmax(logits, axis=axis)
623
624
625 @to_ivy_arrays_and_back
626 def softplus(features, name=None):
627 return ivy.softplus(features)
628
629
630 @to_ivy_arrays_and_back
631 def xlogy(x, y, name=None):
632 return ivy.xlogy(x, y)
633
634
635 @to_ivy_arrays_and_back
636 def cosh(x, name=None):
637 return ivy.cosh(x)
638
639
640 @to_ivy_arrays_and_back
641 @with_supported_dtypes(
642 {
643 "2.11.0 and below": ("float32", "float64"),
644 },
645 "tensorflow",
646 )
647 def zeta(x, q, name=None):
648 return ivy.zeta(x, q)
649
650
651 @to_ivy_arrays_and_back
652 def greater_equal(x, y, name=None):
653 x, y = check_tensorflow_casting(x, y)
654 return ivy.greater_equal(x, y)
655
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/ivy/functional/frontends/tensorflow/math.py b/ivy/functional/frontends/tensorflow/math.py
--- a/ivy/functional/frontends/tensorflow/math.py
+++ b/ivy/functional/frontends/tensorflow/math.py
@@ -34,6 +34,11 @@
return ivy.exp(x)
+@to_ivy_arrays_and_back
+def expm1(x, name=None):
+ return ivy.expm1(x)
+
+
@to_ivy_arrays_and_back
def sqrt(x, name=None):
return ivy.sqrt(x)
| {"golden_diff": "diff --git a/ivy/functional/frontends/tensorflow/math.py b/ivy/functional/frontends/tensorflow/math.py\n--- a/ivy/functional/frontends/tensorflow/math.py\n+++ b/ivy/functional/frontends/tensorflow/math.py\n@@ -34,6 +34,11 @@\n return ivy.exp(x)\n \n \n+@to_ivy_arrays_and_back\n+def expm1(x, name=None):\n+ return ivy.expm1(x)\n+\n+\n @to_ivy_arrays_and_back\n def sqrt(x, name=None):\n return ivy.sqrt(x)\n", "issue": "expm1\n\n", "before_files": [{"content": "# global\nimport ivy\nfrom ivy import with_supported_dtypes, with_unsupported_dtypes\nfrom ivy.functional.frontends.tensorflow import check_tensorflow_casting\nfrom ivy.functional.frontends.tensorflow.func_wrapper import (\n to_ivy_arrays_and_back,\n handle_tf_dtype,\n to_ivy_dtype,\n)\n\n\n@with_supported_dtypes(\n {\"2.9.0 and below\": (\"float16\", \"float32\", \"float64\", \"complex64\", \"complex128\")},\n \"tensorflow\",\n)\n@to_ivy_arrays_and_back\ndef imag(input, name=None):\n return ivy.imag(input)\n\n\n@to_ivy_arrays_and_back\ndef accumulate_n(inputs, input_type=None, shape=None, dtype=None, name=None):\n return ivy.astype(ivy.sum(ivy.array(inputs)), ivy.int64)\n\n\n@to_ivy_arrays_and_back\ndef add(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.add(x, y)\n\n\n@to_ivy_arrays_and_back\ndef exp(x, name=None):\n return ivy.exp(x)\n\n\n@to_ivy_arrays_and_back\ndef sqrt(x, name=None):\n return ivy.sqrt(x)\n\n\n@to_ivy_arrays_and_back\ndef negative(x, name=None):\n return ivy.negative(x)\n\n\n@to_ivy_arrays_and_back\ndef argmax(input, axis, output_type=None, name=None):\n output_type = to_ivy_dtype(output_type)\n if output_type in [\"uint16\", \"int16\", \"int32\", \"int64\"]:\n return ivy.astype(ivy.argmax(input, axis=axis), output_type)\n else:\n return ivy.astype(ivy.argmax(input, axis=axis), \"int64\")\n\n\n@to_ivy_arrays_and_back\ndef asinh(x, name=\"asinh\"):\n return ivy.asinh(x)\n\n\n@handle_tf_dtype\n@to_ivy_arrays_and_back\ndef confusion_matrix(\n labels, predictions, num_classes=None, weights=None, dtype=ivy.int32, name=None\n):\n labels = ivy.astype(\n ivy.squeeze(ivy.array(labels), axis=None), ivy.int64, copy=False\n )\n predictions = ivy.astype(\n ivy.squeeze(ivy.array(predictions), axis=None), ivy.int64, copy=False\n )\n # failsafe for (1,) array will be squeeze to 0-dim\n labels = ivy.expand_dims(labels, axis=-1) if labels.ndim == 0 else labels\n predictions = (\n ivy.expand_dims(predictions, axis=-1) if predictions.ndim == 0 else predictions\n )\n\n # Sanity check (potential optimization)\n ivy.utils.assertions.check_greater(\n labels, 0, allow_equal=True, message=\"labels contains negative values\"\n )\n ivy.utils.assertions.check_greater(\n predictions, 0, allow_equal=True, message=\"predictions contains negative values\"\n )\n\n if num_classes is None:\n num_classes = max(ivy.max(labels), ivy.max(predictions)) + 1\n else:\n num_classes_int64 = ivy.astype(ivy.array(num_classes), ivy.int64, copy=False)\n ivy.utils.assertions.check_less(\n labels, num_classes_int64, message=\"labels out of bound\"\n )\n ivy.utils.assertions.check_less(\n predictions, num_classes_int64, message=\"predictions out of bound\"\n )\n\n if weights is not None:\n weights = ivy.array(weights)\n ivy.utils.assertions.check_equal(\n ivy.shape(predictions),\n ivy.shape(weights),\n message=\"weights shape do not match predictions\",\n )\n weights = ivy.astype(weights, dtype, copy=False)\n\n shape = ivy.stack([num_classes, num_classes])\n indices = ivy.stack([labels, predictions], axis=1)\n values = ivy.ones_like(predictions, dtype=dtype) if weights is None else weights\n return ivy.scatter_nd(indices, values, shape=shape)\n\n\n@handle_tf_dtype\n@to_ivy_arrays_and_back\ndef count_nonzero(input, axis=None, keepdims=None, dtype=ivy.int64, name=None):\n x = ivy.array(input)\n if keepdims is None:\n keepdims = False\n zero = ivy.zeros(ivy.shape(x), dtype=x.dtype)\n return ivy.astype(\n ivy.sum(\n ivy.astype(ivy.not_equal(x, zero), ivy.int64),\n axis=axis,\n keepdims=keepdims,\n ),\n dtype,\n copy=False,\n )\n\n\ndef cumprod(x, axis, exclusive=False, reverse=False, name=None):\n return ivy.astype(\n ivy.cumprod(x, axis=axis, exclusive=exclusive, reverse=reverse), x.dtype\n )\n\n\ndef cumsum(x, axis, exclusive=False, reverse=False, name=None):\n return ivy.astype(\n ivy.cumsum(x, axis=axis, exclusive=exclusive, reverse=reverse), x.dtype\n )\n\n\n@to_ivy_arrays_and_back\ndef divide(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.divide(x, y)\n\n\n@to_ivy_arrays_and_back\ndef divide_no_nan(x, y, name=\"divide_no_nan\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.where(\n y == 0,\n ivy.array(0.0, dtype=ivy.promote_types(x.dtype, y.dtype)),\n x / y,\n )\n\n\n@to_ivy_arrays_and_back\ndef maximum(x, y, name=None):\n return ivy.maximum(x, y)\n\n\n@to_ivy_arrays_and_back\ndef erfcinv(x, name=\"erfcinv\"):\n return 1 / (1 - ivy.erf(x))\n\n\n@to_ivy_arrays_and_back\ndef is_inf(x, name=None):\n return ivy.isinf(x)\n\n\n@to_ivy_arrays_and_back\ndef is_non_decreasing(x, name=\"is_non_decreasing\"):\n if ivy.array(x).size < 2:\n return ivy.array(True)\n if ivy.array(x).size == 2:\n return ivy.array([x[0] <= x[1]])\n return ivy.all(ivy.less_equal(x, ivy.roll(x, -1)))\n\n\n@to_ivy_arrays_and_back\ndef is_strictly_increasing(x, name=\"is_strictly_increasing\"):\n if ivy.array(x).size < 2:\n return ivy.array(True)\n if ivy.array(x).size == 2:\n return ivy.array(x[0] < x[1])\n return ivy.all(ivy.less(x, ivy.roll(x, -1)))\n\n\n@to_ivy_arrays_and_back\ndef log_sigmoid(x, name=None):\n return -ivy.softplus(-x)\n\n\n@to_ivy_arrays_and_back\ndef logical_not(x, name=\"logical_not\"):\n return ivy.logical_not(x)\n\n\n@to_ivy_arrays_and_back\ndef log1p(x, name=None):\n return ivy.log1p(x)\n\n\n@to_ivy_arrays_and_back\ndef logical_and(x, y, name=\"LogicalAnd\"):\n return ivy.logical_and(x, y)\n\n\n@to_ivy_arrays_and_back\ndef logical_xor(x, y, name=\"LogicalXor\"):\n return ivy.logical_xor(x, y)\n\n\n@to_ivy_arrays_and_back\ndef logical_or(x, y, name=\"logical_or\"):\n return ivy.logical_or(x, y)\n\n\n@to_ivy_arrays_and_back\ndef multiply(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.multiply(x, y)\n\n\n@to_ivy_arrays_and_back\ndef multiply_no_nan(x, y, name=\"multiply_no_nan\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.where(\n y == 0,\n ivy.array(0.0, dtype=ivy.promote_types(x.dtype, y.dtype)),\n x * y,\n )\n\n\n@to_ivy_arrays_and_back\ndef polyval(coeffs, x, name=None):\n ivy.utils.assertions.check_isinstance(coeffs, list)\n x = ivy.array(x)\n if len(coeffs) < 1:\n return ivy.zeros_like(x, dtype=x.dtype)\n coeffs = [ivy.array(_) for _ in coeffs]\n p = coeffs[0]\n for c in coeffs[1:]:\n p = c + p * x\n return p\n\n\n@to_ivy_arrays_and_back\ndef pow(x, y, name=\"pow\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.pow(x, y)\n\n\n@to_ivy_arrays_and_back\ndef reciprocal(x, name=\"reciprocal\"):\n return ivy.reciprocal(x)\n\n\n@to_ivy_arrays_and_back\ndef reciprocal_no_nan(x, name=\"reciprocal_no_nan\"):\n return ivy.where(\n x == 0,\n ivy.array(0.0, dtype=x.dtype),\n ivy.ones_like(x, dtype=x.dtype) / x,\n )\n\n\n@to_ivy_arrays_and_back\ndef reduce_all(input_tensor, axis=None, keepdims=False, name=\"reduce_all\"):\n return ivy.all(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_any(input_tensor, axis=None, keepdims=False, name=\"reduce_any\"):\n return ivy.any(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_euclidean_norm(\n input_tensor, axis=None, keepdims=False, name=\"reduce_euclidean_norm\"\n):\n return ivy.vector_norm(\n input_tensor, axis=axis, keepdims=keepdims, ord=2\n ) # ord = '2' is the euclidean norm\n\n\n@to_ivy_arrays_and_back\ndef reduce_logsumexp(input_tensor, axis=None, keepdims=False, name=\"reduce_logsumexp\"):\n # stable logsumexp trick\n max_input_tensor = ivy.max(input_tensor, axis=axis, keepdims=True)\n return (\n ivy.log(\n ivy.sum(\n ivy.exp(input_tensor - max_input_tensor),\n axis=axis,\n keepdims=keepdims,\n )\n )\n + max_input_tensor\n ).astype(input_tensor.dtype)\n\n\n@to_ivy_arrays_and_back\ndef reduce_max(input_tensor, axis=None, keepdims=False, name=\"reduce_max\"):\n return ivy.max(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_mean(input_tensor, axis=None, keepdims=False, name=\"reduce_mean\"):\n if ivy.exists(axis):\n axis = ivy.to_list(axis)\n return ivy.mean(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_min(input_tensor, axis=None, keepdims=False, name=\"reduce_min\"):\n return ivy.min(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_prod(input_tensor, axis=None, keepdims=False, name=\"reduce_prod\"):\n return ivy.prod(input_tensor, axis=axis, keepdims=keepdims).astype(\n input_tensor.dtype\n )\n\n\n@to_ivy_arrays_and_back\ndef reduce_std(input_tensor, axis=None, keepdims=False, name=\"reduce_std\"):\n return ivy.std(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_sum(input_tensor, axis=None, keepdims=False, name=\"reduce_sum\"):\n return ivy.sum(input_tensor, axis=axis, keepdims=keepdims).astype(\n input_tensor.dtype\n )\n\n\n@to_ivy_arrays_and_back\ndef reduce_variance(input_tensor, axis=None, keepdims=False, name=\"reduce_variance\"):\n return ivy.var(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef scalar_mul(scalar, x, name=\"scalar_mul\"):\n scalar, x = check_tensorflow_casting(scalar, x)\n return ivy.multiply(x, scalar).astype(x.dtype)\n\n\n@to_ivy_arrays_and_back\ndef subtract(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.subtract(x, y)\n\n\n@to_ivy_arrays_and_back\ndef squared_difference(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.square(ivy.subtract(x, y))\n\n\n@with_supported_dtypes(\n {\n \"2.9.0 and below\": (\n \"bfloat16\",\n \"float16\",\n \"float32\",\n \"float64\",\n \"complex64\",\n \"complex128\",\n )\n },\n \"tensorflow\",\n)\n@to_ivy_arrays_and_back\ndef sin(x, name=None):\n return ivy.sin(x)\n\n\n@to_ivy_arrays_and_back\ndef tan(x, name=None):\n return ivy.tan(x)\n\n\n@to_ivy_arrays_and_back\ndef unsorted_segment_mean(\n data, segment_ids, num_segments, name=\"unsorted_segment_mean\"\n):\n ivy.utils.assertions.check_equal(list(segment_ids.shape), [list(data.shape)[0]])\n x = ivy.zeros(tuple([num_segments] + (list(data.shape))[1:]))\n count = ivy.zeros((num_segments,))\n for i in range((segment_ids).shape[0]):\n x[segment_ids[i]] = x[segment_ids[i]] + data[i]\n count[segment_ids[i]] += 1\n for j in range(num_segments):\n x[j] = ivy.divide(x[j], count[j])\n return x\n\n\n@to_ivy_arrays_and_back\ndef unsorted_segment_sqrt_n(\n data, segment_ids, num_segments, name=\"unsorted_segement_sqrt_n\"\n):\n ivy.utils.assertions.check_equal(list(segment_ids.shape), [list(data.shape)[0]])\n x = ivy.zeros(tuple([num_segments] + (list(data.shape))[1:]))\n count = ivy.zeros((num_segments,))\n for i in range((segment_ids).shape[0]):\n x[segment_ids[i]] = x[segment_ids[i]] + data[i]\n count[segment_ids[i]] += 1\n for j in range(num_segments):\n x[j] = ivy.divide(x[j], ivy.sqrt(count[j]))\n return x\n\n\n@to_ivy_arrays_and_back\ndef zero_fraction(value, name=\"zero_fraction\"):\n zero = ivy.zeros(tuple(list(value.shape)), dtype=ivy.float32)\n x = ivy.array(value, dtype=ivy.float32)\n count_zero = ivy.sum(ivy.equal(x, zero))\n count_nonzero = ivy.sum(ivy.not_equal(x, zero))\n return ivy.divide(count_zero, ivy.add(count_zero, count_nonzero))\n\n\n@to_ivy_arrays_and_back\ndef argmin(input, axis=None, output_type=\"int64\", name=None):\n output_type = to_ivy_dtype(output_type)\n if output_type in [\"int32\", \"int64\"]:\n return ivy.astype(ivy.argmin(input, axis=axis), output_type)\n else:\n return ivy.astype(ivy.argmin(input, axis=axis), \"int64\")\n\n\n@to_ivy_arrays_and_back\ndef truediv(x, y, name=\"truediv\"):\n x, y = check_tensorflow_casting(x, y)\n x_dtype = ivy.dtype(x)\n\n if ivy.current_backend_str() == \"torch\":\n if x_dtype in [ivy.int8, ivy.int16]:\n return ivy.divide(ivy.astype(x, ivy.float32), ivy.astype(y, ivy.float32))\n elif x_dtype in [ivy.int32, ivy.int64]:\n return ivy.divide(ivy.astype(x, ivy.float64), ivy.astype(y, ivy.float64))\n else:\n if x_dtype in [ivy.int8, ivy.uint8, ivy.int16, ivy.uint16]:\n return ivy.divide(ivy.astype(x, ivy.float32), ivy.astype(y, ivy.float32))\n elif x_dtype in [ivy.int32, ivy.uint32, ivy.int64, ivy.uint64]:\n return ivy.divide(ivy.astype(x, ivy.float64), ivy.astype(y, ivy.float64))\n return ivy.divide(x, y)\n\n\n@to_ivy_arrays_and_back\ndef equal(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.equal(x, y)\n\n\n@to_ivy_arrays_and_back\ndef not_equal(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.not_equal(x, y)\n\n\n@to_ivy_arrays_and_back\ndef floor(x, name=None):\n return ivy.floor(x)\n\n\n@to_ivy_arrays_and_back\ndef floordiv(x, y, name=None):\n return ivy.floor_divide(x, y)\n\n\n@to_ivy_arrays_and_back\ndef ceil(x, name=None):\n return ivy.ceil(x)\n\n\n@to_ivy_arrays_and_back\ndef round(x, name=None):\n return ivy.round(x)\n\n\n@to_ivy_arrays_and_back\ndef minimum(x, y, name=None):\n return ivy.minimum(x, y)\n\n\n@to_ivy_arrays_and_back\ndef sigmoid(x, name=None):\n return ivy.sigmoid(x)\n\n\n@with_supported_dtypes(\n {\"2.9.0 and below\": (\"float16\", \"float32\", \"float64\", \"complex64\", \"complex128\")},\n \"tensorflow\",\n)\n@to_ivy_arrays_and_back\ndef tanh(x, name=None):\n return ivy.tanh(x)\n\n\n@to_ivy_arrays_and_back\ndef rsqrt(x, name=None):\n return ivy.reciprocal(ivy.sqrt(x))\n\n\n@to_ivy_arrays_and_back\ndef nextafter(x1, x2, name=None):\n return ivy.nextafter(x1, x2)\n\n\n@with_unsupported_dtypes(\n {\n \"1.2.0\": (\"float16\", \"complex64\", \"complex128\"),\n \"1.8.0 and below\": (\"float16\"),\n \"2.9.0 and below\": (\"int8\", \"int16\", \"uint8\", \"uint16\", \"uint32\", \"uint64\"),\n },\n \"tensorflow\",\n)\ndef abs(x, name=None):\n return ivy.abs(x)\n\n\n@to_ivy_arrays_and_back\ndef log_softmax(logits, axis=None):\n return ivy.log_softmax(logits, axis=axis)\n\n\n@to_ivy_arrays_and_back\ndef asin(x, name=None):\n return ivy.asin(x)\n\n\n@to_ivy_arrays_and_back\ndef acos(x, name=\"acos\"):\n return ivy.acos(x)\n\n\n@to_ivy_arrays_and_back\ndef acosh(x, name=\"acosh\"):\n return ivy.acosh(x)\n\n\n@to_ivy_arrays_and_back\ndef square(x, name=None):\n return ivy.square(x)\n\n\n@to_ivy_arrays_and_back\ndef is_nan(x, name=None):\n return ivy.isnan(x)\n\n\n@with_supported_dtypes(\n {\n \"2.11.0 and below\": (\"bfloat16\", \"half\", \"float32\", \"float64\"),\n },\n \"tensorflow\",\n)\n@to_ivy_arrays_and_back\ndef is_finite(x, name=None):\n return ivy.isfinite(x)\n\n\n@to_ivy_arrays_and_back\ndef atan(x, name=None):\n return ivy.atan(x)\n\n\n@to_ivy_arrays_and_back\ndef atan2(y, x, name=None):\n return ivy.atan2(y, x)\n\n\n@to_ivy_arrays_and_back\ndef log(x, name=None):\n return ivy.log(x)\n\n\n@to_ivy_arrays_and_back\ndef add_n(inputs, name=None):\n return ivy.sum(inputs, dtype=inputs.dtype, axis=0)\n\n\n@to_ivy_arrays_and_back\ndef floormod(x, y, name=None):\n return ivy.remainder(x, y)\n\n\n@to_ivy_arrays_and_back\ndef less_equal(x, y, name=\"LessEqual\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.less_equal(x, y)\n\n\n@to_ivy_arrays_and_back\ndef greater(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.greater(x, y)\n\n\n@to_ivy_arrays_and_back\ndef less(x, y, name=\"None\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.less(x, y)\n\n\n@to_ivy_arrays_and_back\ndef cos(x, name=None):\n return ivy.cos(x)\n\n\n@to_ivy_arrays_and_back\ndef sinh(x, name=None):\n return ivy.sinh(x)\n\n\n@to_ivy_arrays_and_back\ndef softmax(logits, axis=-1):\n return ivy.softmax(logits, axis=axis)\n\n\n@to_ivy_arrays_and_back\ndef softplus(features, name=None):\n return ivy.softplus(features)\n\n\n@to_ivy_arrays_and_back\ndef xlogy(x, y, name=None):\n return ivy.xlogy(x, y)\n\n\n@to_ivy_arrays_and_back\ndef cosh(x, name=None):\n return ivy.cosh(x)\n\n\n@to_ivy_arrays_and_back\n@with_supported_dtypes(\n {\n \"2.11.0 and below\": (\"float32\", \"float64\"),\n },\n \"tensorflow\",\n)\ndef zeta(x, q, name=None):\n return ivy.zeta(x, q)\n\n\n@to_ivy_arrays_and_back\ndef greater_equal(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.greater_equal(x, y)\n", "path": "ivy/functional/frontends/tensorflow/math.py"}], "after_files": [{"content": "# global\nimport ivy\nfrom ivy import with_supported_dtypes, with_unsupported_dtypes\nfrom ivy.functional.frontends.tensorflow import check_tensorflow_casting\nfrom ivy.functional.frontends.tensorflow.func_wrapper import (\n to_ivy_arrays_and_back,\n handle_tf_dtype,\n to_ivy_dtype,\n)\n\n\n@with_supported_dtypes(\n {\"2.9.0 and below\": (\"float16\", \"float32\", \"float64\", \"complex64\", \"complex128\")},\n \"tensorflow\",\n)\n@to_ivy_arrays_and_back\ndef imag(input, name=None):\n return ivy.imag(input)\n\n\n@to_ivy_arrays_and_back\ndef accumulate_n(inputs, input_type=None, shape=None, dtype=None, name=None):\n return ivy.astype(ivy.sum(ivy.array(inputs)), ivy.int64)\n\n\n@to_ivy_arrays_and_back\ndef add(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.add(x, y)\n\n\n@to_ivy_arrays_and_back\ndef exp(x, name=None):\n return ivy.exp(x)\n\n\n@to_ivy_arrays_and_back\ndef expm1(x, name=None):\n return ivy.expm1(x)\n\n\n@to_ivy_arrays_and_back\ndef sqrt(x, name=None):\n return ivy.sqrt(x)\n\n\n@to_ivy_arrays_and_back\ndef negative(x, name=None):\n return ivy.negative(x)\n\n\n@to_ivy_arrays_and_back\ndef argmax(input, axis, output_type=None, name=None):\n output_type = to_ivy_dtype(output_type)\n if output_type in [\"uint16\", \"int16\", \"int32\", \"int64\"]:\n return ivy.astype(ivy.argmax(input, axis=axis), output_type)\n else:\n return ivy.astype(ivy.argmax(input, axis=axis), \"int64\")\n\n\n@to_ivy_arrays_and_back\ndef asinh(x, name=\"asinh\"):\n return ivy.asinh(x)\n\n\n@handle_tf_dtype\n@to_ivy_arrays_and_back\ndef confusion_matrix(\n labels, predictions, num_classes=None, weights=None, dtype=ivy.int32, name=None\n):\n labels = ivy.astype(\n ivy.squeeze(ivy.array(labels), axis=None), ivy.int64, copy=False\n )\n predictions = ivy.astype(\n ivy.squeeze(ivy.array(predictions), axis=None), ivy.int64, copy=False\n )\n # failsafe for (1,) array will be squeeze to 0-dim\n labels = ivy.expand_dims(labels, axis=-1) if labels.ndim == 0 else labels\n predictions = (\n ivy.expand_dims(predictions, axis=-1) if predictions.ndim == 0 else predictions\n )\n\n # Sanity check (potential optimization)\n ivy.utils.assertions.check_greater(\n labels, 0, allow_equal=True, message=\"labels contains negative values\"\n )\n ivy.utils.assertions.check_greater(\n predictions, 0, allow_equal=True, message=\"predictions contains negative values\"\n )\n\n if num_classes is None:\n num_classes = max(ivy.max(labels), ivy.max(predictions)) + 1\n else:\n num_classes_int64 = ivy.astype(ivy.array(num_classes), ivy.int64, copy=False)\n ivy.utils.assertions.check_less(\n labels, num_classes_int64, message=\"labels out of bound\"\n )\n ivy.utils.assertions.check_less(\n predictions, num_classes_int64, message=\"predictions out of bound\"\n )\n\n if weights is not None:\n weights = ivy.array(weights)\n ivy.utils.assertions.check_equal(\n ivy.shape(predictions),\n ivy.shape(weights),\n message=\"weights shape do not match predictions\",\n )\n weights = ivy.astype(weights, dtype, copy=False)\n\n shape = ivy.stack([num_classes, num_classes])\n indices = ivy.stack([labels, predictions], axis=1)\n values = ivy.ones_like(predictions, dtype=dtype) if weights is None else weights\n return ivy.scatter_nd(indices, values, shape=shape)\n\n\n@handle_tf_dtype\n@to_ivy_arrays_and_back\ndef count_nonzero(input, axis=None, keepdims=None, dtype=ivy.int64, name=None):\n x = ivy.array(input)\n if keepdims is None:\n keepdims = False\n zero = ivy.zeros(ivy.shape(x), dtype=x.dtype)\n return ivy.astype(\n ivy.sum(\n ivy.astype(ivy.not_equal(x, zero), ivy.int64),\n axis=axis,\n keepdims=keepdims,\n ),\n dtype,\n copy=False,\n )\n\n\ndef cumprod(x, axis, exclusive=False, reverse=False, name=None):\n return ivy.astype(\n ivy.cumprod(x, axis=axis, exclusive=exclusive, reverse=reverse), x.dtype\n )\n\n\ndef cumsum(x, axis, exclusive=False, reverse=False, name=None):\n return ivy.astype(\n ivy.cumsum(x, axis=axis, exclusive=exclusive, reverse=reverse), x.dtype\n )\n\n\n@to_ivy_arrays_and_back\ndef divide(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.divide(x, y)\n\n\n@to_ivy_arrays_and_back\ndef divide_no_nan(x, y, name=\"divide_no_nan\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.where(\n y == 0,\n ivy.array(0.0, dtype=ivy.promote_types(x.dtype, y.dtype)),\n x / y,\n )\n\n\n@to_ivy_arrays_and_back\ndef maximum(x, y, name=None):\n return ivy.maximum(x, y)\n\n\n@to_ivy_arrays_and_back\ndef erfcinv(x, name=\"erfcinv\"):\n return 1 / (1 - ivy.erf(x))\n\n\n@to_ivy_arrays_and_back\ndef is_inf(x, name=None):\n return ivy.isinf(x)\n\n\n@to_ivy_arrays_and_back\ndef is_non_decreasing(x, name=\"is_non_decreasing\"):\n if ivy.array(x).size < 2:\n return ivy.array(True)\n if ivy.array(x).size == 2:\n return ivy.array([x[0] <= x[1]])\n return ivy.all(ivy.less_equal(x, ivy.roll(x, -1)))\n\n\n@to_ivy_arrays_and_back\ndef is_strictly_increasing(x, name=\"is_strictly_increasing\"):\n if ivy.array(x).size < 2:\n return ivy.array(True)\n if ivy.array(x).size == 2:\n return ivy.array(x[0] < x[1])\n return ivy.all(ivy.less(x, ivy.roll(x, -1)))\n\n\n@to_ivy_arrays_and_back\ndef log_sigmoid(x, name=None):\n return -ivy.softplus(-x)\n\n\n@to_ivy_arrays_and_back\ndef logical_not(x, name=\"logical_not\"):\n return ivy.logical_not(x)\n\n\n@to_ivy_arrays_and_back\ndef log1p(x, name=None):\n return ivy.log1p(x)\n\n\n@to_ivy_arrays_and_back\ndef logical_and(x, y, name=\"LogicalAnd\"):\n return ivy.logical_and(x, y)\n\n\n@to_ivy_arrays_and_back\ndef logical_xor(x, y, name=\"LogicalXor\"):\n return ivy.logical_xor(x, y)\n\n\n@to_ivy_arrays_and_back\ndef logical_or(x, y, name=\"logical_or\"):\n return ivy.logical_or(x, y)\n\n\n@to_ivy_arrays_and_back\ndef multiply(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.multiply(x, y)\n\n\n@to_ivy_arrays_and_back\ndef multiply_no_nan(x, y, name=\"multiply_no_nan\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.where(\n y == 0,\n ivy.array(0.0, dtype=ivy.promote_types(x.dtype, y.dtype)),\n x * y,\n )\n\n\n@to_ivy_arrays_and_back\ndef polyval(coeffs, x, name=None):\n ivy.utils.assertions.check_isinstance(coeffs, list)\n x = ivy.array(x)\n if len(coeffs) < 1:\n return ivy.zeros_like(x, dtype=x.dtype)\n coeffs = [ivy.array(_) for _ in coeffs]\n p = coeffs[0]\n for c in coeffs[1:]:\n p = c + p * x\n return p\n\n\n@to_ivy_arrays_and_back\ndef pow(x, y, name=\"pow\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.pow(x, y)\n\n\n@to_ivy_arrays_and_back\ndef reciprocal(x, name=\"reciprocal\"):\n return ivy.reciprocal(x)\n\n\n@to_ivy_arrays_and_back\ndef reciprocal_no_nan(x, name=\"reciprocal_no_nan\"):\n return ivy.where(\n x == 0,\n ivy.array(0.0, dtype=x.dtype),\n ivy.ones_like(x, dtype=x.dtype) / x,\n )\n\n\n@to_ivy_arrays_and_back\ndef reduce_all(input_tensor, axis=None, keepdims=False, name=\"reduce_all\"):\n return ivy.all(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_any(input_tensor, axis=None, keepdims=False, name=\"reduce_any\"):\n return ivy.any(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_euclidean_norm(\n input_tensor, axis=None, keepdims=False, name=\"reduce_euclidean_norm\"\n):\n return ivy.vector_norm(\n input_tensor, axis=axis, keepdims=keepdims, ord=2\n ) # ord = '2' is the euclidean norm\n\n\n@to_ivy_arrays_and_back\ndef reduce_logsumexp(input_tensor, axis=None, keepdims=False, name=\"reduce_logsumexp\"):\n # stable logsumexp trick\n max_input_tensor = ivy.max(input_tensor, axis=axis, keepdims=True)\n return (\n ivy.log(\n ivy.sum(\n ivy.exp(input_tensor - max_input_tensor),\n axis=axis,\n keepdims=keepdims,\n )\n )\n + max_input_tensor\n ).astype(input_tensor.dtype)\n\n\n@to_ivy_arrays_and_back\ndef reduce_max(input_tensor, axis=None, keepdims=False, name=\"reduce_max\"):\n return ivy.max(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_mean(input_tensor, axis=None, keepdims=False, name=\"reduce_mean\"):\n if ivy.exists(axis):\n axis = ivy.to_list(axis)\n return ivy.mean(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_min(input_tensor, axis=None, keepdims=False, name=\"reduce_min\"):\n return ivy.min(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_prod(input_tensor, axis=None, keepdims=False, name=\"reduce_prod\"):\n return ivy.prod(input_tensor, axis=axis, keepdims=keepdims).astype(\n input_tensor.dtype\n )\n\n\n@to_ivy_arrays_and_back\ndef reduce_std(input_tensor, axis=None, keepdims=False, name=\"reduce_std\"):\n return ivy.std(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef reduce_sum(input_tensor, axis=None, keepdims=False, name=\"reduce_sum\"):\n return ivy.sum(input_tensor, axis=axis, keepdims=keepdims).astype(\n input_tensor.dtype\n )\n\n\n@to_ivy_arrays_and_back\ndef reduce_variance(input_tensor, axis=None, keepdims=False, name=\"reduce_variance\"):\n return ivy.var(input_tensor, axis=axis, keepdims=keepdims)\n\n\n@to_ivy_arrays_and_back\ndef scalar_mul(scalar, x, name=\"scalar_mul\"):\n scalar, x = check_tensorflow_casting(scalar, x)\n return ivy.multiply(x, scalar).astype(x.dtype)\n\n\n@to_ivy_arrays_and_back\ndef subtract(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.subtract(x, y)\n\n\n@to_ivy_arrays_and_back\ndef squared_difference(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.square(ivy.subtract(x, y))\n\n\n@with_supported_dtypes(\n {\n \"2.9.0 and below\": (\n \"bfloat16\",\n \"float16\",\n \"float32\",\n \"float64\",\n \"complex64\",\n \"complex128\",\n )\n },\n \"tensorflow\",\n)\n@to_ivy_arrays_and_back\ndef sin(x, name=None):\n return ivy.sin(x)\n\n\n@to_ivy_arrays_and_back\ndef tan(x, name=None):\n return ivy.tan(x)\n\n\n@to_ivy_arrays_and_back\ndef unsorted_segment_mean(\n data, segment_ids, num_segments, name=\"unsorted_segment_mean\"\n):\n ivy.utils.assertions.check_equal(list(segment_ids.shape), [list(data.shape)[0]])\n x = ivy.zeros(tuple([num_segments] + (list(data.shape))[1:]))\n count = ivy.zeros((num_segments,))\n for i in range((segment_ids).shape[0]):\n x[segment_ids[i]] = x[segment_ids[i]] + data[i]\n count[segment_ids[i]] += 1\n for j in range(num_segments):\n x[j] = ivy.divide(x[j], count[j])\n return x\n\n\n@to_ivy_arrays_and_back\ndef unsorted_segment_sqrt_n(\n data, segment_ids, num_segments, name=\"unsorted_segement_sqrt_n\"\n):\n ivy.utils.assertions.check_equal(list(segment_ids.shape), [list(data.shape)[0]])\n x = ivy.zeros(tuple([num_segments] + (list(data.shape))[1:]))\n count = ivy.zeros((num_segments,))\n for i in range((segment_ids).shape[0]):\n x[segment_ids[i]] = x[segment_ids[i]] + data[i]\n count[segment_ids[i]] += 1\n for j in range(num_segments):\n x[j] = ivy.divide(x[j], ivy.sqrt(count[j]))\n return x\n\n\n@to_ivy_arrays_and_back\ndef zero_fraction(value, name=\"zero_fraction\"):\n zero = ivy.zeros(tuple(list(value.shape)), dtype=ivy.float32)\n x = ivy.array(value, dtype=ivy.float32)\n count_zero = ivy.sum(ivy.equal(x, zero))\n count_nonzero = ivy.sum(ivy.not_equal(x, zero))\n return ivy.divide(count_zero, ivy.add(count_zero, count_nonzero))\n\n\n@to_ivy_arrays_and_back\ndef argmin(input, axis=None, output_type=\"int64\", name=None):\n output_type = to_ivy_dtype(output_type)\n if output_type in [\"int32\", \"int64\"]:\n return ivy.astype(ivy.argmin(input, axis=axis), output_type)\n else:\n return ivy.astype(ivy.argmin(input, axis=axis), \"int64\")\n\n\n@to_ivy_arrays_and_back\ndef truediv(x, y, name=\"truediv\"):\n x, y = check_tensorflow_casting(x, y)\n x_dtype = ivy.dtype(x)\n\n if ivy.current_backend_str() == \"torch\":\n if x_dtype in [ivy.int8, ivy.int16]:\n return ivy.divide(ivy.astype(x, ivy.float32), ivy.astype(y, ivy.float32))\n elif x_dtype in [ivy.int32, ivy.int64]:\n return ivy.divide(ivy.astype(x, ivy.float64), ivy.astype(y, ivy.float64))\n else:\n if x_dtype in [ivy.int8, ivy.uint8, ivy.int16, ivy.uint16]:\n return ivy.divide(ivy.astype(x, ivy.float32), ivy.astype(y, ivy.float32))\n elif x_dtype in [ivy.int32, ivy.uint32, ivy.int64, ivy.uint64]:\n return ivy.divide(ivy.astype(x, ivy.float64), ivy.astype(y, ivy.float64))\n return ivy.divide(x, y)\n\n\n@to_ivy_arrays_and_back\ndef equal(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.equal(x, y)\n\n\n@to_ivy_arrays_and_back\ndef not_equal(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.not_equal(x, y)\n\n\n@to_ivy_arrays_and_back\ndef floor(x, name=None):\n return ivy.floor(x)\n\n\n@to_ivy_arrays_and_back\ndef floordiv(x, y, name=None):\n return ivy.floor_divide(x, y)\n\n\n@to_ivy_arrays_and_back\ndef ceil(x, name=None):\n return ivy.ceil(x)\n\n\n@to_ivy_arrays_and_back\ndef round(x, name=None):\n return ivy.round(x)\n\n\n@to_ivy_arrays_and_back\ndef minimum(x, y, name=None):\n return ivy.minimum(x, y)\n\n\n@to_ivy_arrays_and_back\ndef sigmoid(x, name=None):\n return ivy.sigmoid(x)\n\n\n@with_supported_dtypes(\n {\"2.9.0 and below\": (\"float16\", \"float32\", \"float64\", \"complex64\", \"complex128\")},\n \"tensorflow\",\n)\n@to_ivy_arrays_and_back\ndef tanh(x, name=None):\n return ivy.tanh(x)\n\n\n@to_ivy_arrays_and_back\ndef rsqrt(x, name=None):\n return ivy.reciprocal(ivy.sqrt(x))\n\n\n@to_ivy_arrays_and_back\ndef nextafter(x1, x2, name=None):\n return ivy.nextafter(x1, x2)\n\n\n@with_unsupported_dtypes(\n {\n \"1.2.0\": (\"float16\", \"complex64\", \"complex128\"),\n \"1.8.0 and below\": (\"float16\"),\n \"2.9.0 and below\": (\"int8\", \"int16\", \"uint8\", \"uint16\", \"uint32\", \"uint64\"),\n },\n \"tensorflow\",\n)\ndef abs(x, name=None):\n return ivy.abs(x)\n\n\n@to_ivy_arrays_and_back\ndef log_softmax(logits, axis=None):\n return ivy.log_softmax(logits, axis=axis)\n\n\n@to_ivy_arrays_and_back\ndef asin(x, name=None):\n return ivy.asin(x)\n\n\n@to_ivy_arrays_and_back\ndef acos(x, name=\"acos\"):\n return ivy.acos(x)\n\n\n@to_ivy_arrays_and_back\ndef acosh(x, name=\"acosh\"):\n return ivy.acosh(x)\n\n\n@to_ivy_arrays_and_back\ndef square(x, name=None):\n return ivy.square(x)\n\n\n@to_ivy_arrays_and_back\ndef is_nan(x, name=None):\n return ivy.isnan(x)\n\n\n@with_supported_dtypes(\n {\n \"2.11.0 and below\": (\"bfloat16\", \"half\", \"float32\", \"float64\"),\n },\n \"tensorflow\",\n)\n@to_ivy_arrays_and_back\ndef is_finite(x, name=None):\n return ivy.isfinite(x)\n\n\n@to_ivy_arrays_and_back\ndef atan(x, name=None):\n return ivy.atan(x)\n\n\n@to_ivy_arrays_and_back\ndef atan2(y, x, name=None):\n return ivy.atan2(y, x)\n\n\n@to_ivy_arrays_and_back\ndef log(x, name=None):\n return ivy.log(x)\n\n\n@to_ivy_arrays_and_back\ndef add_n(inputs, name=None):\n return ivy.sum(inputs, dtype=inputs.dtype, axis=0)\n\n\n@to_ivy_arrays_and_back\ndef floormod(x, y, name=None):\n return ivy.remainder(x, y)\n\n\n@to_ivy_arrays_and_back\ndef less_equal(x, y, name=\"LessEqual\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.less_equal(x, y)\n\n\n@to_ivy_arrays_and_back\ndef greater(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.greater(x, y)\n\n\n@to_ivy_arrays_and_back\ndef less(x, y, name=\"None\"):\n x, y = check_tensorflow_casting(x, y)\n return ivy.less(x, y)\n\n\n@to_ivy_arrays_and_back\ndef cos(x, name=None):\n return ivy.cos(x)\n\n\n@to_ivy_arrays_and_back\ndef sinh(x, name=None):\n return ivy.sinh(x)\n\n\n@to_ivy_arrays_and_back\ndef softmax(logits, axis=-1):\n return ivy.softmax(logits, axis=axis)\n\n\n@to_ivy_arrays_and_back\ndef softplus(features, name=None):\n return ivy.softplus(features)\n\n\n@to_ivy_arrays_and_back\ndef xlogy(x, y, name=None):\n return ivy.xlogy(x, y)\n\n\n@to_ivy_arrays_and_back\ndef cosh(x, name=None):\n return ivy.cosh(x)\n\n\n@to_ivy_arrays_and_back\n@with_supported_dtypes(\n {\n \"2.11.0 and below\": (\"float32\", \"float64\"),\n },\n \"tensorflow\",\n)\ndef zeta(x, q, name=None):\n return ivy.zeta(x, q)\n\n\n@to_ivy_arrays_and_back\ndef greater_equal(x, y, name=None):\n x, y = check_tensorflow_casting(x, y)\n return ivy.greater_equal(x, y)\n", "path": "ivy/functional/frontends/tensorflow/math.py"}]} |
gh_patches_debug_175 | rasdani/github-patches | git_diff | pytorch__pytorch-2063 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
inplace division doesn't work in version 0.1.12_2
This is an ipython session. Note that the `id` doesn't remain the same for /= even though it works for div_
```python
In [1]: import torch
In [2]: foo = torch.ones(3, 3)
In [3]: foo
Out[3]:
1 1 1
1 1 1
1 1 1
[torch.FloatTensor of size 3x3]
In [4]: id(foo)
Out[4]: 140493992350728
In [5]: foo /= 2
In [6]: foo
Out[6]:
0.5000 0.5000 0.5000
0.5000 0.5000 0.5000
0.5000 0.5000 0.5000
[torch.FloatTensor of size 3x3]
In [7]: id(foo)
Out[7]: 140493991465672
In [8]: foo = torch.ones(3, 3)
In [9]: id(foo)
Out[9]: 140492895866120
In [10]: foo.div_(2)
Out[10]:
0.5000 0.5000 0.5000
0.5000 0.5000 0.5000
0.5000 0.5000 0.5000
[torch.FloatTensor of size 3x3]
In [11]: id(foo)
Out[11]: 140492895866120
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torch/tensor.py`
Content:
```
1 import torch
2 import warnings
3 from . import _tensor_str
4 from ._utils import _type, _cuda, _range, _rebuild_tensor
5 import sys
6
7
8 class _TensorBase(object):
9 #: bool: True if this is a CUDA tensor
10 is_cuda = False
11 is_sparse = False
12
13 # NB: This implementation is CPU only; see THPTensor_(new) for the
14 # CUDA case, which handles constructing the tensor on the same GPU
15 # as this tensor.
16 def new(self, *args, **kwargs):
17 """Constructs a new tensor of the same data type."""
18 return self.__class__(*args, **kwargs)
19
20 def type_as(self, tensor):
21 """Returns this tensor cast to the type of the given tensor.
22
23 This is a no-op if the tensor is already of the correct type. This is
24 equivalent to::
25
26 self.type(tensor.type())
27
28 Params:
29 tensor (Tensor): the tensor which has the desired type
30 """
31 return self.type(tensor.type())
32
33 def cpu(self):
34 """Returns a CPU copy of this tensor if it's not already on the CPU"""
35 return self.type(getattr(torch, self.__class__.__name__))
36
37 def double(self):
38 """Casts this tensor to double type"""
39 return self.type(type(self).__module__ + '.DoubleTensor')
40
41 def float(self):
42 """Casts this tensor to float type"""
43 return self.type(type(self).__module__ + '.FloatTensor')
44
45 def half(self):
46 """Casts this tensor to half-precision float type"""
47 return self.type(type(self).__module__ + '.HalfTensor')
48
49 def long(self):
50 """Casts this tensor to long type"""
51 return self.type(type(self).__module__ + '.LongTensor')
52
53 def int(self):
54 """Casts this tensor to int type"""
55 return self.type(type(self).__module__ + '.IntTensor')
56
57 def short(self):
58 """Casts this tensor to short type"""
59 return self.type(type(self).__module__ + '.ShortTensor')
60
61 def char(self):
62 """Casts this tensor to char type"""
63 return self.type(type(self).__module__ + '.CharTensor')
64
65 def byte(self):
66 """Casts this tensor to byte type"""
67 return self.type(type(self).__module__ + '.ByteTensor')
68
69 def is_pinned(self):
70 """Returns true if this tensor resides in pinned memory"""
71 storage = self.storage()
72 return storage.is_pinned() if storage else False
73
74 def pin_memory(self):
75 """Copies the tensor to pinned memory, if it's not already pinned."""
76 if self.is_cuda:
77 raise TypeError("cannot pin '{0}' only CPU memory can be pinned"
78 .format(self.type()))
79 storage = self.storage()
80 if storage is None:
81 storage = (self.storage_type())()
82 return type(self)().set_(storage.pin_memory()).view_as(self)
83
84 def share_memory_(self):
85 """Moves the underlying storage to shared memory.
86
87 This is a no-op if the underlying storage is already in shared memory
88 and for CUDA tensors. Tensors in shared memory cannot be resized.
89 """
90 self.storage().share_memory_()
91 return self
92
93 def is_shared(self):
94 """Checks if tensor is in shared memory.
95
96 This is always ``True`` for CUDA tensors.
97 """
98 return self.storage().is_shared()
99
100 @property
101 def shape(self):
102 """Alias for .size()
103
104 Returns a torch.Size object, containing the dimensions of the tensor
105 """
106 return self.size()
107
108 def __deepcopy__(self, _memo):
109 memo = _memo.setdefault('torch', {})
110 if self._cdata in memo:
111 return memo[self._cdata]
112 new_storage = self.storage().__deepcopy__(_memo)
113 new_tensor = self.new()
114 new_tensor.set_(new_storage, self.storage_offset(), self.size(), self.stride())
115 memo[self._cdata] = new_tensor
116 return new_tensor
117
118 def __reduce__(self):
119 # NOTE: _rebuild_tensor does not call __setstate__
120 args = self.__getstate__()
121 return (_rebuild_tensor, args)
122
123 def __getstate__(self):
124 return (self.storage(),
125 self.storage_offset(),
126 tuple(self.size()),
127 self.stride())
128
129 def __setstate__(self, state):
130 self.set_(*state)
131
132 def __repr__(self):
133 return str(self)
134
135 def __str__(self):
136 # All strings are unicode in Python 3, while we have to encode unicode
137 # strings in Python2. If we can't, let python decide the best
138 # characters to replace unicode characters with.
139 if sys.version_info > (3,):
140 return _tensor_str._str(self)
141 else:
142 if hasattr(sys.stdout, 'encoding'):
143 return _tensor_str._str(self).encode(
144 sys.stdout.encoding or 'UTF-8', 'replace')
145 else:
146 return _tensor_str._str(self).encode('UTF-8', 'replace')
147
148 def __bool__(self):
149 if self.numel() == 0:
150 return False
151 raise RuntimeError("bool value of non-empty " + torch.typename(self) +
152 " objects is ambiguous")
153
154 __nonzero__ = __bool__
155
156 def __iter__(self):
157 if self.nelement() > 0:
158 return iter(map(lambda i: self.select(0, i), _range(self.size(0))))
159 else:
160 return iter([])
161
162 def split(self, split_size, dim=0):
163 """Splits this tensor into a tuple of tensors.
164
165 See :func:`torch.split`.
166 """
167 return torch.split(self, split_size, dim)
168
169 def chunk(self, n_chunks, dim=0):
170 """Splits this tensor into a tuple of tensors.
171
172 See :func:`torch.chunk`.
173 """
174 return torch.chunk(self, n_chunks, dim)
175
176 def matmul(self, other):
177 """Matrix product of two tensors.
178
179 See :func:`torch.matmul`."""
180 return torch.matmul(self, other)
181
182 def tolist(self):
183 """Returns a nested list represenation of this tensor."""
184 dim = self.dim()
185 if dim == 1:
186 return [v for v in self]
187 elif dim > 0:
188 return [subt.tolist() for subt in self]
189 return []
190
191 def view_as(self, tensor):
192 """Returns this tensor viewed as the size as the specified tensor.
193
194 This is equivalent to::
195
196 self.view(tensor.size())
197 """
198 return self.view(tensor.size())
199
200 def permute(self, *dims):
201 """Permute the dimensions of this tensor.
202
203 Args:
204 *dims (int...): The desired ordering of dimensions
205
206 Example:
207 >>> x = torch.randn(2, 3, 5)
208 >>> x.size()
209 torch.Size([2, 3, 5])
210 >>> x.permute(2, 0, 1).size()
211 torch.Size([5, 2, 3])
212 """
213 perm = list(dims)
214 tensor = self
215 n_dims = tensor.dim()
216 assert len(perm) == n_dims, 'Invalid permutation'
217 for i, p in enumerate(perm):
218 if p != i and p != -1:
219 j = i
220 while True:
221 assert 0 <= perm[j] and perm[j] < n_dims, 'Invalid permutation'
222 tensor = tensor.transpose(j, perm[j])
223 perm[j], j = -1, perm[j]
224 if perm[j] == i:
225 break
226 perm[j] = -1
227 return tensor
228
229 def expand_as(self, tensor):
230 """Expands this tensor to the size of the specified tensor.
231
232 This is equivalent to::
233
234 self.expand(tensor.size())
235 """
236 return self.expand(tensor.size())
237
238 def repeat(self, *sizes):
239 """Repeats this tensor along the specified dimensions.
240
241 Unlike :meth:`expand`, this function copies the tensor's data.
242
243 Args:
244 *sizes (torch.Size or int...): The number of times to repeat this
245 tensor along each dimension
246
247 Example:
248 >>> x = torch.Tensor([1, 2, 3])
249 >>> x.repeat(4, 2)
250 1 2 3 1 2 3
251 1 2 3 1 2 3
252 1 2 3 1 2 3
253 1 2 3 1 2 3
254 [torch.FloatTensor of size 4x6]
255 >>> x.repeat(4, 2, 1).size()
256 torch.Size([4, 2, 3])
257 """
258 # If args == (torch.Size,), then we need to unpack the tuple
259 if len(sizes) == 1 and isinstance(sizes[0], torch.Size):
260 sizes = sizes[0]
261 repeats = list(sizes)
262 result = self.new()
263 src = self.contiguous()
264
265 if len(repeats) < src.dim():
266 raise ValueError('Number of dimensions of repeat dims can not be '
267 'smaller than number of dimensions of tensor')
268
269 xtensor = src.new().set_(src)
270 xsize = list(xtensor.size())
271 for i in _range(len(repeats) - src.dim()):
272 xsize = [1] + xsize
273
274 size = torch.Size([a * b for a, b in zip(xsize, repeats)])
275 xtensor.resize_(torch.Size(xsize))
276 result.resize_(size)
277 urtensor = result.new(result)
278 for i in _range(xtensor.dim()):
279 urtensor = urtensor.unfold(i, xtensor.size(i), xtensor.size(i))
280 for i in _range(urtensor.dim() - xtensor.dim()):
281 xsize = [1] + xsize
282 xtensor.resize_(torch.Size(xsize))
283 xxtensor = xtensor.expand_as(urtensor)
284 urtensor.copy_(xxtensor)
285 return result
286
287 def masked_copy_(self, *args, **kwargs):
288 warnings.warn("masked_copy_ is deprecated and renamed to masked_scatter_, and will be removed in v0.3")
289 return self.masked_scatter_(*args, **kwargs)
290
291 # TODO: add tests for operators
292 def __add__(self, other):
293 return self.add(other)
294 __radd__ = __add__
295
296 def __iadd__(self, other):
297 return self.add_(other)
298
299 def __sub__(self, other):
300 return self.sub(other)
301
302 def __rsub__(self, other):
303 return self.new().resize_as_(self).fill_(other).add_(-1, self)
304
305 def __isub__(self, other):
306 return self.sub_(other)
307
308 def __mul__(self, other):
309 return self.mul(other)
310 __rmul__ = __mul__
311
312 def __imul__(self, other):
313 return self.mul_(other)
314
315 def __matmul__(self, other):
316 if not torch.is_tensor(other):
317 return NotImplemented
318 return self.matmul(other)
319
320 def __pow__(self, other):
321 return self.pow(other)
322
323 def __ipow__(self, other):
324 return self.pow_(other)
325
326 def __div__(self, other):
327 return self.div(other)
328 __truediv__ = __div__
329
330 def __rdiv__(self, other):
331 return self.new().resize_as_(self).fill_(other).div_(self)
332 __rtruediv__ = __rdiv__
333
334 def __idiv__(self, other):
335 return self.div_(other)
336
337 def __mod__(self, other):
338 return self.remainder(other)
339
340 def __neg__(self):
341 return self.neg()
342
343 def __eq__(self, other):
344 return self.eq(other)
345
346 def __ne__(self, other):
347 return self.ne(other)
348
349 def __lt__(self, other):
350 return self.lt(other)
351
352 def __le__(self, other):
353 return self.le(other)
354
355 def __gt__(self, other):
356 return self.gt(other)
357
358 def __ge__(self, other):
359 return self.ge(other)
360
361 # TODO: add native add or and xor in the libs
362 def __invert__(self):
363 if type(self).__name__ != 'ByteTensor':
364 raise RuntimeError('logical operations are supported on ByteTensors only')
365 return (1 - self)
366
367 def __hash__(self):
368 return id(self)
369
370 # provide user guidance when they inavertently call autograd properties on a Tensor
371 @property
372 def data(self):
373 raise RuntimeError('cannot call .data on a torch.Tensor: did you intend to use autograd.Variable?')
374
375
376 _TensorBase.type = _type
377 _TensorBase.cuda = _cuda
378
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/torch/tensor.py b/torch/tensor.py
--- a/torch/tensor.py
+++ b/torch/tensor.py
@@ -333,6 +333,7 @@
def __idiv__(self, other):
return self.div_(other)
+ __itruediv__ = __idiv__
def __mod__(self, other):
return self.remainder(other)
| {"golden_diff": "diff --git a/torch/tensor.py b/torch/tensor.py\n--- a/torch/tensor.py\n+++ b/torch/tensor.py\n@@ -333,6 +333,7 @@\n \n def __idiv__(self, other):\n return self.div_(other)\n+ __itruediv__ = __idiv__\n \n def __mod__(self, other):\n return self.remainder(other)\n", "issue": "inplace division doesn't work in version 0.1.12_2\nThis is an ipython session. Note that the `id` doesn't remain the same for /= even though it works for div_\r\n\r\n```python\r\nIn [1]: import torch\r\n\r\nIn [2]: foo = torch.ones(3, 3)\r\n\r\nIn [3]: foo\r\nOut[3]: \r\n\r\n 1 1 1\r\n 1 1 1\r\n 1 1 1\r\n[torch.FloatTensor of size 3x3]\r\n\r\nIn [4]: id(foo)\r\nOut[4]: 140493992350728\r\n\r\nIn [5]: foo /= 2\r\n\r\nIn [6]: foo\r\nOut[6]: \r\n\r\n 0.5000 0.5000 0.5000\r\n 0.5000 0.5000 0.5000\r\n 0.5000 0.5000 0.5000\r\n[torch.FloatTensor of size 3x3]\r\n\r\nIn [7]: id(foo)\r\nOut[7]: 140493991465672\r\n\r\nIn [8]: foo = torch.ones(3, 3)\r\n\r\nIn [9]: id(foo)\r\nOut[9]: 140492895866120\r\n\r\nIn [10]: foo.div_(2)\r\nOut[10]: \r\n\r\n 0.5000 0.5000 0.5000\r\n 0.5000 0.5000 0.5000\r\n 0.5000 0.5000 0.5000\r\n[torch.FloatTensor of size 3x3]\r\n\r\nIn [11]: id(foo)\r\nOut[11]: 140492895866120\r\n```\n", "before_files": [{"content": "import torch\nimport warnings\nfrom . import _tensor_str\nfrom ._utils import _type, _cuda, _range, _rebuild_tensor\nimport sys\n\n\nclass _TensorBase(object):\n #: bool: True if this is a CUDA tensor\n is_cuda = False\n is_sparse = False\n\n # NB: This implementation is CPU only; see THPTensor_(new) for the\n # CUDA case, which handles constructing the tensor on the same GPU\n # as this tensor.\n def new(self, *args, **kwargs):\n \"\"\"Constructs a new tensor of the same data type.\"\"\"\n return self.__class__(*args, **kwargs)\n\n def type_as(self, tensor):\n \"\"\"Returns this tensor cast to the type of the given tensor.\n\n This is a no-op if the tensor is already of the correct type. This is\n equivalent to::\n\n self.type(tensor.type())\n\n Params:\n tensor (Tensor): the tensor which has the desired type\n \"\"\"\n return self.type(tensor.type())\n\n def cpu(self):\n \"\"\"Returns a CPU copy of this tensor if it's not already on the CPU\"\"\"\n return self.type(getattr(torch, self.__class__.__name__))\n\n def double(self):\n \"\"\"Casts this tensor to double type\"\"\"\n return self.type(type(self).__module__ + '.DoubleTensor')\n\n def float(self):\n \"\"\"Casts this tensor to float type\"\"\"\n return self.type(type(self).__module__ + '.FloatTensor')\n\n def half(self):\n \"\"\"Casts this tensor to half-precision float type\"\"\"\n return self.type(type(self).__module__ + '.HalfTensor')\n\n def long(self):\n \"\"\"Casts this tensor to long type\"\"\"\n return self.type(type(self).__module__ + '.LongTensor')\n\n def int(self):\n \"\"\"Casts this tensor to int type\"\"\"\n return self.type(type(self).__module__ + '.IntTensor')\n\n def short(self):\n \"\"\"Casts this tensor to short type\"\"\"\n return self.type(type(self).__module__ + '.ShortTensor')\n\n def char(self):\n \"\"\"Casts this tensor to char type\"\"\"\n return self.type(type(self).__module__ + '.CharTensor')\n\n def byte(self):\n \"\"\"Casts this tensor to byte type\"\"\"\n return self.type(type(self).__module__ + '.ByteTensor')\n\n def is_pinned(self):\n \"\"\"Returns true if this tensor resides in pinned memory\"\"\"\n storage = self.storage()\n return storage.is_pinned() if storage else False\n\n def pin_memory(self):\n \"\"\"Copies the tensor to pinned memory, if it's not already pinned.\"\"\"\n if self.is_cuda:\n raise TypeError(\"cannot pin '{0}' only CPU memory can be pinned\"\n .format(self.type()))\n storage = self.storage()\n if storage is None:\n storage = (self.storage_type())()\n return type(self)().set_(storage.pin_memory()).view_as(self)\n\n def share_memory_(self):\n \"\"\"Moves the underlying storage to shared memory.\n\n This is a no-op if the underlying storage is already in shared memory\n and for CUDA tensors. Tensors in shared memory cannot be resized.\n \"\"\"\n self.storage().share_memory_()\n return self\n\n def is_shared(self):\n \"\"\"Checks if tensor is in shared memory.\n\n This is always ``True`` for CUDA tensors.\n \"\"\"\n return self.storage().is_shared()\n\n @property\n def shape(self):\n \"\"\"Alias for .size()\n\n Returns a torch.Size object, containing the dimensions of the tensor\n \"\"\"\n return self.size()\n\n def __deepcopy__(self, _memo):\n memo = _memo.setdefault('torch', {})\n if self._cdata in memo:\n return memo[self._cdata]\n new_storage = self.storage().__deepcopy__(_memo)\n new_tensor = self.new()\n new_tensor.set_(new_storage, self.storage_offset(), self.size(), self.stride())\n memo[self._cdata] = new_tensor\n return new_tensor\n\n def __reduce__(self):\n # NOTE: _rebuild_tensor does not call __setstate__\n args = self.__getstate__()\n return (_rebuild_tensor, args)\n\n def __getstate__(self):\n return (self.storage(),\n self.storage_offset(),\n tuple(self.size()),\n self.stride())\n\n def __setstate__(self, state):\n self.set_(*state)\n\n def __repr__(self):\n return str(self)\n\n def __str__(self):\n # All strings are unicode in Python 3, while we have to encode unicode\n # strings in Python2. If we can't, let python decide the best\n # characters to replace unicode characters with.\n if sys.version_info > (3,):\n return _tensor_str._str(self)\n else:\n if hasattr(sys.stdout, 'encoding'):\n return _tensor_str._str(self).encode(\n sys.stdout.encoding or 'UTF-8', 'replace')\n else:\n return _tensor_str._str(self).encode('UTF-8', 'replace')\n\n def __bool__(self):\n if self.numel() == 0:\n return False\n raise RuntimeError(\"bool value of non-empty \" + torch.typename(self) +\n \" objects is ambiguous\")\n\n __nonzero__ = __bool__\n\n def __iter__(self):\n if self.nelement() > 0:\n return iter(map(lambda i: self.select(0, i), _range(self.size(0))))\n else:\n return iter([])\n\n def split(self, split_size, dim=0):\n \"\"\"Splits this tensor into a tuple of tensors.\n\n See :func:`torch.split`.\n \"\"\"\n return torch.split(self, split_size, dim)\n\n def chunk(self, n_chunks, dim=0):\n \"\"\"Splits this tensor into a tuple of tensors.\n\n See :func:`torch.chunk`.\n \"\"\"\n return torch.chunk(self, n_chunks, dim)\n\n def matmul(self, other):\n \"\"\"Matrix product of two tensors.\n\n See :func:`torch.matmul`.\"\"\"\n return torch.matmul(self, other)\n\n def tolist(self):\n \"\"\"Returns a nested list represenation of this tensor.\"\"\"\n dim = self.dim()\n if dim == 1:\n return [v for v in self]\n elif dim > 0:\n return [subt.tolist() for subt in self]\n return []\n\n def view_as(self, tensor):\n \"\"\"Returns this tensor viewed as the size as the specified tensor.\n\n This is equivalent to::\n\n self.view(tensor.size())\n \"\"\"\n return self.view(tensor.size())\n\n def permute(self, *dims):\n \"\"\"Permute the dimensions of this tensor.\n\n Args:\n *dims (int...): The desired ordering of dimensions\n\n Example:\n >>> x = torch.randn(2, 3, 5)\n >>> x.size()\n torch.Size([2, 3, 5])\n >>> x.permute(2, 0, 1).size()\n torch.Size([5, 2, 3])\n \"\"\"\n perm = list(dims)\n tensor = self\n n_dims = tensor.dim()\n assert len(perm) == n_dims, 'Invalid permutation'\n for i, p in enumerate(perm):\n if p != i and p != -1:\n j = i\n while True:\n assert 0 <= perm[j] and perm[j] < n_dims, 'Invalid permutation'\n tensor = tensor.transpose(j, perm[j])\n perm[j], j = -1, perm[j]\n if perm[j] == i:\n break\n perm[j] = -1\n return tensor\n\n def expand_as(self, tensor):\n \"\"\"Expands this tensor to the size of the specified tensor.\n\n This is equivalent to::\n\n self.expand(tensor.size())\n \"\"\"\n return self.expand(tensor.size())\n\n def repeat(self, *sizes):\n \"\"\"Repeats this tensor along the specified dimensions.\n\n Unlike :meth:`expand`, this function copies the tensor's data.\n\n Args:\n *sizes (torch.Size or int...): The number of times to repeat this\n tensor along each dimension\n\n Example:\n >>> x = torch.Tensor([1, 2, 3])\n >>> x.repeat(4, 2)\n 1 2 3 1 2 3\n 1 2 3 1 2 3\n 1 2 3 1 2 3\n 1 2 3 1 2 3\n [torch.FloatTensor of size 4x6]\n >>> x.repeat(4, 2, 1).size()\n torch.Size([4, 2, 3])\n \"\"\"\n # If args == (torch.Size,), then we need to unpack the tuple\n if len(sizes) == 1 and isinstance(sizes[0], torch.Size):\n sizes = sizes[0]\n repeats = list(sizes)\n result = self.new()\n src = self.contiguous()\n\n if len(repeats) < src.dim():\n raise ValueError('Number of dimensions of repeat dims can not be '\n 'smaller than number of dimensions of tensor')\n\n xtensor = src.new().set_(src)\n xsize = list(xtensor.size())\n for i in _range(len(repeats) - src.dim()):\n xsize = [1] + xsize\n\n size = torch.Size([a * b for a, b in zip(xsize, repeats)])\n xtensor.resize_(torch.Size(xsize))\n result.resize_(size)\n urtensor = result.new(result)\n for i in _range(xtensor.dim()):\n urtensor = urtensor.unfold(i, xtensor.size(i), xtensor.size(i))\n for i in _range(urtensor.dim() - xtensor.dim()):\n xsize = [1] + xsize\n xtensor.resize_(torch.Size(xsize))\n xxtensor = xtensor.expand_as(urtensor)\n urtensor.copy_(xxtensor)\n return result\n\n def masked_copy_(self, *args, **kwargs):\n warnings.warn(\"masked_copy_ is deprecated and renamed to masked_scatter_, and will be removed in v0.3\")\n return self.masked_scatter_(*args, **kwargs)\n\n # TODO: add tests for operators\n def __add__(self, other):\n return self.add(other)\n __radd__ = __add__\n\n def __iadd__(self, other):\n return self.add_(other)\n\n def __sub__(self, other):\n return self.sub(other)\n\n def __rsub__(self, other):\n return self.new().resize_as_(self).fill_(other).add_(-1, self)\n\n def __isub__(self, other):\n return self.sub_(other)\n\n def __mul__(self, other):\n return self.mul(other)\n __rmul__ = __mul__\n\n def __imul__(self, other):\n return self.mul_(other)\n\n def __matmul__(self, other):\n if not torch.is_tensor(other):\n return NotImplemented\n return self.matmul(other)\n\n def __pow__(self, other):\n return self.pow(other)\n\n def __ipow__(self, other):\n return self.pow_(other)\n\n def __div__(self, other):\n return self.div(other)\n __truediv__ = __div__\n\n def __rdiv__(self, other):\n return self.new().resize_as_(self).fill_(other).div_(self)\n __rtruediv__ = __rdiv__\n\n def __idiv__(self, other):\n return self.div_(other)\n\n def __mod__(self, other):\n return self.remainder(other)\n\n def __neg__(self):\n return self.neg()\n\n def __eq__(self, other):\n return self.eq(other)\n\n def __ne__(self, other):\n return self.ne(other)\n\n def __lt__(self, other):\n return self.lt(other)\n\n def __le__(self, other):\n return self.le(other)\n\n def __gt__(self, other):\n return self.gt(other)\n\n def __ge__(self, other):\n return self.ge(other)\n\n # TODO: add native add or and xor in the libs\n def __invert__(self):\n if type(self).__name__ != 'ByteTensor':\n raise RuntimeError('logical operations are supported on ByteTensors only')\n return (1 - self)\n\n def __hash__(self):\n return id(self)\n\n # provide user guidance when they inavertently call autograd properties on a Tensor\n @property\n def data(self):\n raise RuntimeError('cannot call .data on a torch.Tensor: did you intend to use autograd.Variable?')\n\n\n_TensorBase.type = _type\n_TensorBase.cuda = _cuda\n", "path": "torch/tensor.py"}], "after_files": [{"content": "import torch\nimport warnings\nfrom . import _tensor_str\nfrom ._utils import _type, _cuda, _range, _rebuild_tensor\nimport sys\n\n\nclass _TensorBase(object):\n #: bool: True if this is a CUDA tensor\n is_cuda = False\n is_sparse = False\n\n # NB: This implementation is CPU only; see THPTensor_(new) for the\n # CUDA case, which handles constructing the tensor on the same GPU\n # as this tensor.\n def new(self, *args, **kwargs):\n \"\"\"Constructs a new tensor of the same data type.\"\"\"\n return self.__class__(*args, **kwargs)\n\n def type_as(self, tensor):\n \"\"\"Returns this tensor cast to the type of the given tensor.\n\n This is a no-op if the tensor is already of the correct type. This is\n equivalent to::\n\n self.type(tensor.type())\n\n Params:\n tensor (Tensor): the tensor which has the desired type\n \"\"\"\n return self.type(tensor.type())\n\n def cpu(self):\n \"\"\"Returns a CPU copy of this tensor if it's not already on the CPU\"\"\"\n return self.type(getattr(torch, self.__class__.__name__))\n\n def double(self):\n \"\"\"Casts this tensor to double type\"\"\"\n return self.type(type(self).__module__ + '.DoubleTensor')\n\n def float(self):\n \"\"\"Casts this tensor to float type\"\"\"\n return self.type(type(self).__module__ + '.FloatTensor')\n\n def half(self):\n \"\"\"Casts this tensor to half-precision float type\"\"\"\n return self.type(type(self).__module__ + '.HalfTensor')\n\n def long(self):\n \"\"\"Casts this tensor to long type\"\"\"\n return self.type(type(self).__module__ + '.LongTensor')\n\n def int(self):\n \"\"\"Casts this tensor to int type\"\"\"\n return self.type(type(self).__module__ + '.IntTensor')\n\n def short(self):\n \"\"\"Casts this tensor to short type\"\"\"\n return self.type(type(self).__module__ + '.ShortTensor')\n\n def char(self):\n \"\"\"Casts this tensor to char type\"\"\"\n return self.type(type(self).__module__ + '.CharTensor')\n\n def byte(self):\n \"\"\"Casts this tensor to byte type\"\"\"\n return self.type(type(self).__module__ + '.ByteTensor')\n\n def is_pinned(self):\n \"\"\"Returns true if this tensor resides in pinned memory\"\"\"\n storage = self.storage()\n return storage.is_pinned() if storage else False\n\n def pin_memory(self):\n \"\"\"Copies the tensor to pinned memory, if it's not already pinned.\"\"\"\n if self.is_cuda:\n raise TypeError(\"cannot pin '{0}' only CPU memory can be pinned\"\n .format(self.type()))\n storage = self.storage()\n if storage is None:\n storage = (self.storage_type())()\n return type(self)().set_(storage.pin_memory()).view_as(self)\n\n def share_memory_(self):\n \"\"\"Moves the underlying storage to shared memory.\n\n This is a no-op if the underlying storage is already in shared memory\n and for CUDA tensors. Tensors in shared memory cannot be resized.\n \"\"\"\n self.storage().share_memory_()\n return self\n\n def is_shared(self):\n \"\"\"Checks if tensor is in shared memory.\n\n This is always ``True`` for CUDA tensors.\n \"\"\"\n return self.storage().is_shared()\n\n @property\n def shape(self):\n \"\"\"Alias for .size()\n\n Returns a torch.Size object, containing the dimensions of the tensor\n \"\"\"\n return self.size()\n\n def __deepcopy__(self, _memo):\n memo = _memo.setdefault('torch', {})\n if self._cdata in memo:\n return memo[self._cdata]\n new_storage = self.storage().__deepcopy__(_memo)\n new_tensor = self.new()\n new_tensor.set_(new_storage, self.storage_offset(), self.size(), self.stride())\n memo[self._cdata] = new_tensor\n return new_tensor\n\n def __reduce__(self):\n # NOTE: _rebuild_tensor does not call __setstate__\n args = self.__getstate__()\n return (_rebuild_tensor, args)\n\n def __getstate__(self):\n return (self.storage(),\n self.storage_offset(),\n tuple(self.size()),\n self.stride())\n\n def __setstate__(self, state):\n self.set_(*state)\n\n def __repr__(self):\n return str(self)\n\n def __str__(self):\n # All strings are unicode in Python 3, while we have to encode unicode\n # strings in Python2. If we can't, let python decide the best\n # characters to replace unicode characters with.\n if sys.version_info > (3,):\n return _tensor_str._str(self)\n else:\n if hasattr(sys.stdout, 'encoding'):\n return _tensor_str._str(self).encode(\n sys.stdout.encoding or 'UTF-8', 'replace')\n else:\n return _tensor_str._str(self).encode('UTF-8', 'replace')\n\n def __bool__(self):\n if self.numel() == 0:\n return False\n raise RuntimeError(\"bool value of non-empty \" + torch.typename(self) +\n \" objects is ambiguous\")\n\n __nonzero__ = __bool__\n\n def __iter__(self):\n if self.nelement() > 0:\n return iter(map(lambda i: self.select(0, i), _range(self.size(0))))\n else:\n return iter([])\n\n def split(self, split_size, dim=0):\n \"\"\"Splits this tensor into a tuple of tensors.\n\n See :func:`torch.split`.\n \"\"\"\n return torch.split(self, split_size, dim)\n\n def chunk(self, n_chunks, dim=0):\n \"\"\"Splits this tensor into a tuple of tensors.\n\n See :func:`torch.chunk`.\n \"\"\"\n return torch.chunk(self, n_chunks, dim)\n\n def matmul(self, other):\n \"\"\"Matrix product of two tensors.\n\n See :func:`torch.matmul`.\"\"\"\n return torch.matmul(self, other)\n\n def tolist(self):\n \"\"\"Returns a nested list represenation of this tensor.\"\"\"\n dim = self.dim()\n if dim == 1:\n return [v for v in self]\n elif dim > 0:\n return [subt.tolist() for subt in self]\n return []\n\n def view_as(self, tensor):\n \"\"\"Returns this tensor viewed as the size as the specified tensor.\n\n This is equivalent to::\n\n self.view(tensor.size())\n \"\"\"\n return self.view(tensor.size())\n\n def permute(self, *dims):\n \"\"\"Permute the dimensions of this tensor.\n\n Args:\n *dims (int...): The desired ordering of dimensions\n\n Example:\n >>> x = torch.randn(2, 3, 5)\n >>> x.size()\n torch.Size([2, 3, 5])\n >>> x.permute(2, 0, 1).size()\n torch.Size([5, 2, 3])\n \"\"\"\n perm = list(dims)\n tensor = self\n n_dims = tensor.dim()\n assert len(perm) == n_dims, 'Invalid permutation'\n for i, p in enumerate(perm):\n if p != i and p != -1:\n j = i\n while True:\n assert 0 <= perm[j] and perm[j] < n_dims, 'Invalid permutation'\n tensor = tensor.transpose(j, perm[j])\n perm[j], j = -1, perm[j]\n if perm[j] == i:\n break\n perm[j] = -1\n return tensor\n\n def expand_as(self, tensor):\n \"\"\"Expands this tensor to the size of the specified tensor.\n\n This is equivalent to::\n\n self.expand(tensor.size())\n \"\"\"\n return self.expand(tensor.size())\n\n def repeat(self, *sizes):\n \"\"\"Repeats this tensor along the specified dimensions.\n\n Unlike :meth:`expand`, this function copies the tensor's data.\n\n Args:\n *sizes (torch.Size or int...): The number of times to repeat this\n tensor along each dimension\n\n Example:\n >>> x = torch.Tensor([1, 2, 3])\n >>> x.repeat(4, 2)\n 1 2 3 1 2 3\n 1 2 3 1 2 3\n 1 2 3 1 2 3\n 1 2 3 1 2 3\n [torch.FloatTensor of size 4x6]\n >>> x.repeat(4, 2, 1).size()\n torch.Size([4, 2, 3])\n \"\"\"\n # If args == (torch.Size,), then we need to unpack the tuple\n if len(sizes) == 1 and isinstance(sizes[0], torch.Size):\n sizes = sizes[0]\n repeats = list(sizes)\n result = self.new()\n src = self.contiguous()\n\n if len(repeats) < src.dim():\n raise ValueError('Number of dimensions of repeat dims can not be '\n 'smaller than number of dimensions of tensor')\n\n xtensor = src.new().set_(src)\n xsize = list(xtensor.size())\n for i in _range(len(repeats) - src.dim()):\n xsize = [1] + xsize\n\n size = torch.Size([a * b for a, b in zip(xsize, repeats)])\n xtensor.resize_(torch.Size(xsize))\n result.resize_(size)\n urtensor = result.new(result)\n for i in _range(xtensor.dim()):\n urtensor = urtensor.unfold(i, xtensor.size(i), xtensor.size(i))\n for i in _range(urtensor.dim() - xtensor.dim()):\n xsize = [1] + xsize\n xtensor.resize_(torch.Size(xsize))\n xxtensor = xtensor.expand_as(urtensor)\n urtensor.copy_(xxtensor)\n return result\n\n def masked_copy_(self, *args, **kwargs):\n warnings.warn(\"masked_copy_ is deprecated and renamed to masked_scatter_, and will be removed in v0.3\")\n return self.masked_scatter_(*args, **kwargs)\n\n # TODO: add tests for operators\n def __add__(self, other):\n return self.add(other)\n __radd__ = __add__\n\n def __iadd__(self, other):\n return self.add_(other)\n\n def __sub__(self, other):\n return self.sub(other)\n\n def __rsub__(self, other):\n return self.new().resize_as_(self).fill_(other).add_(-1, self)\n\n def __isub__(self, other):\n return self.sub_(other)\n\n def __mul__(self, other):\n return self.mul(other)\n __rmul__ = __mul__\n\n def __imul__(self, other):\n return self.mul_(other)\n\n def __matmul__(self, other):\n if not torch.is_tensor(other):\n return NotImplemented\n return self.matmul(other)\n\n def __pow__(self, other):\n return self.pow(other)\n\n def __ipow__(self, other):\n return self.pow_(other)\n\n def __div__(self, other):\n return self.div(other)\n __truediv__ = __div__\n\n def __rdiv__(self, other):\n return self.new().resize_as_(self).fill_(other).div_(self)\n __rtruediv__ = __rdiv__\n\n def __idiv__(self, other):\n return self.div_(other)\n __itruediv__ = __idiv__\n\n def __mod__(self, other):\n return self.remainder(other)\n\n def __neg__(self):\n return self.neg()\n\n def __eq__(self, other):\n return self.eq(other)\n\n def __ne__(self, other):\n return self.ne(other)\n\n def __lt__(self, other):\n return self.lt(other)\n\n def __le__(self, other):\n return self.le(other)\n\n def __gt__(self, other):\n return self.gt(other)\n\n def __ge__(self, other):\n return self.ge(other)\n\n # TODO: add native add or and xor in the libs\n def __invert__(self):\n if type(self).__name__ != 'ByteTensor':\n raise RuntimeError('logical operations are supported on ByteTensors only')\n return (1 - self)\n\n def __hash__(self):\n return id(self)\n\n # provide user guidance when they inavertently call autograd properties on a Tensor\n @property\n def data(self):\n raise RuntimeError('cannot call .data on a torch.Tensor: did you intend to use autograd.Variable?')\n\n\n_TensorBase.type = _type\n_TensorBase.cuda = _cuda\n", "path": "torch/tensor.py"}]} |
gh_patches_debug_176 | rasdani/github-patches | git_diff | django-oscar__django-oscar-2404 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Basket.line_tax returns None
Due to the change in ad1094c1e8, if a line's tax is set to `Decimal('0.00')`, `Line.is_tax_known` will return `True`, but `Line.line_tax` will return `None`. Formerly in this case, `Line.line_tax` returned `Decimal('0.00')` instead of `None`. I'd suggest making `line_tax` check `is_tax_known` instead of doing it's own check.
```
@property
def line_tax(self):
- if self.unit_tax:
+ if self.is_tax_known:
return self.quantity * self.unit_tax
```
See [ad1094c1e8:src/oscar/apps/basket/abstract_models.py:828](https://github.com/django-oscar/django-oscar/commit/ad1094c1e89c0314c0a883b0b0a4e618a443da22#diff-519bc8e8997795240c2a90d8d63b66baL828)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/oscar/apps/basket/abstract_models.py`
Content:
```
1 import zlib
2 from decimal import Decimal as D
3
4 from django.conf import settings
5 from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
6 from django.db import models
7 from django.db.models import Sum
8 from django.utils.encoding import python_2_unicode_compatible, smart_text
9 from django.utils.timezone import now
10 from django.utils.translation import ugettext_lazy as _
11
12 from oscar.apps.basket.managers import OpenBasketManager, SavedBasketManager
13 from oscar.apps.offer import results
14 from oscar.core.compat import AUTH_USER_MODEL
15 from oscar.core.loading import get_class
16 from oscar.core.utils import get_default_currency
17 from oscar.models.fields.slugfield import SlugField
18 from oscar.templatetags.currency_filters import currency
19
20 Unavailable = get_class('partner.availability', 'Unavailable')
21
22
23 @python_2_unicode_compatible
24 class AbstractBasket(models.Model):
25 """
26 Basket object
27 """
28 # Baskets can be anonymously owned - hence this field is nullable. When a
29 # anon user signs in, their two baskets are merged.
30 owner = models.ForeignKey(
31 AUTH_USER_MODEL,
32 null=True,
33 related_name='baskets',
34 on_delete=models.CASCADE,
35 verbose_name=_("Owner"))
36
37 # Basket statuses
38 # - Frozen is for when a basket is in the process of being submitted
39 # and we need to prevent any changes to it.
40 OPEN, MERGED, SAVED, FROZEN, SUBMITTED = (
41 "Open", "Merged", "Saved", "Frozen", "Submitted")
42 STATUS_CHOICES = (
43 (OPEN, _("Open - currently active")),
44 (MERGED, _("Merged - superceded by another basket")),
45 (SAVED, _("Saved - for items to be purchased later")),
46 (FROZEN, _("Frozen - the basket cannot be modified")),
47 (SUBMITTED, _("Submitted - has been ordered at the checkout")),
48 )
49 status = models.CharField(
50 _("Status"), max_length=128, default=OPEN, choices=STATUS_CHOICES)
51
52 # A basket can have many vouchers attached to it. However, it is common
53 # for sites to only allow one voucher per basket - this will need to be
54 # enforced in the project's codebase.
55 vouchers = models.ManyToManyField(
56 'voucher.Voucher', verbose_name=_("Vouchers"), blank=True)
57
58 date_created = models.DateTimeField(_("Date created"), auto_now_add=True)
59 date_merged = models.DateTimeField(_("Date merged"), null=True, blank=True)
60 date_submitted = models.DateTimeField(_("Date submitted"), null=True,
61 blank=True)
62
63 # Only if a basket is in one of these statuses can it be edited
64 editable_statuses = (OPEN, SAVED)
65
66 class Meta:
67 abstract = True
68 app_label = 'basket'
69 verbose_name = _('Basket')
70 verbose_name_plural = _('Baskets')
71
72 objects = models.Manager()
73 open = OpenBasketManager()
74 saved = SavedBasketManager()
75
76 def __init__(self, *args, **kwargs):
77 super(AbstractBasket, self).__init__(*args, **kwargs)
78
79 # We keep a cached copy of the basket lines as we refer to them often
80 # within the same request cycle. Also, applying offers will append
81 # discount data to the basket lines which isn't persisted to the DB and
82 # so we want to avoid reloading them as this would drop the discount
83 # information.
84 self._lines = None
85 self.offer_applications = results.OfferApplications()
86
87 def __str__(self):
88 return _(
89 u"%(status)s basket (owner: %(owner)s, lines: %(num_lines)d)") \
90 % {'status': self.status,
91 'owner': self.owner,
92 'num_lines': self.num_lines}
93
94 # ========
95 # Strategy
96 # ========
97
98 @property
99 def has_strategy(self):
100 return hasattr(self, '_strategy')
101
102 def _get_strategy(self):
103 if not self.has_strategy:
104 raise RuntimeError(
105 "No strategy class has been assigned to this basket. "
106 "This is normally assigned to the incoming request in "
107 "oscar.apps.basket.middleware.BasketMiddleware. "
108 "Since it is missing, you must be doing something different. "
109 "Ensure that a strategy instance is assigned to the basket!"
110 )
111 return self._strategy
112
113 def _set_strategy(self, strategy):
114 self._strategy = strategy
115
116 strategy = property(_get_strategy, _set_strategy)
117
118 def all_lines(self):
119 """
120 Return a cached set of basket lines.
121
122 This is important for offers as they alter the line models and you
123 don't want to reload them from the DB as that information would be
124 lost.
125 """
126 if self.id is None:
127 return self.lines.none()
128 if self._lines is None:
129 self._lines = (
130 self.lines
131 .select_related('product', 'stockrecord')
132 .prefetch_related(
133 'attributes', 'product__images')
134 .order_by(self._meta.pk.name))
135 return self._lines
136
137 def is_quantity_allowed(self, qty):
138 """
139 Test whether the passed quantity of items can be added to the basket
140 """
141 # We enforce a max threshold to prevent a DOS attack via the offers
142 # system.
143 basket_threshold = settings.OSCAR_MAX_BASKET_QUANTITY_THRESHOLD
144 if basket_threshold:
145 total_basket_quantity = self.num_items
146 max_allowed = basket_threshold - total_basket_quantity
147 if qty > max_allowed:
148 return False, _(
149 "Due to technical limitations we are not able "
150 "to ship more than %(threshold)d items in one order.") \
151 % {'threshold': basket_threshold}
152 return True, None
153
154 # ============
155 # Manipulation
156 # ============
157
158 def flush(self):
159 """
160 Remove all lines from basket.
161 """
162 if self.status == self.FROZEN:
163 raise PermissionDenied("A frozen basket cannot be flushed")
164 self.lines.all().delete()
165 self._lines = None
166
167 def add_product(self, product, quantity=1, options=None):
168 """
169 Add a product to the basket
170
171 'stock_info' is the price and availability data returned from
172 a partner strategy class.
173
174 The 'options' list should contains dicts with keys 'option' and 'value'
175 which link the relevant product.Option model and string value
176 respectively.
177
178 Returns (line, created).
179 line: the matching basket line
180 created: whether the line was created or updated
181
182 """
183 if options is None:
184 options = []
185 if not self.id:
186 self.save()
187
188 # Ensure that all lines are the same currency
189 price_currency = self.currency
190 stock_info = self.strategy.fetch_for_product(product)
191 if price_currency and stock_info.price.currency != price_currency:
192 raise ValueError((
193 "Basket lines must all have the same currency. Proposed "
194 "line has currency %s, while basket has currency %s")
195 % (stock_info.price.currency, price_currency))
196
197 if stock_info.stockrecord is None:
198 raise ValueError((
199 "Basket lines must all have stock records. Strategy hasn't "
200 "found any stock record for product %s") % product)
201
202 # Line reference is used to distinguish between variations of the same
203 # product (eg T-shirts with different personalisations)
204 line_ref = self._create_line_reference(
205 product, stock_info.stockrecord, options)
206
207 # Determine price to store (if one exists). It is only stored for
208 # audit and sometimes caching.
209 defaults = {
210 'quantity': quantity,
211 'price_excl_tax': stock_info.price.excl_tax,
212 'price_currency': stock_info.price.currency,
213 }
214 if stock_info.price.is_tax_known:
215 defaults['price_incl_tax'] = stock_info.price.incl_tax
216
217 line, created = self.lines.get_or_create(
218 line_reference=line_ref,
219 product=product,
220 stockrecord=stock_info.stockrecord,
221 defaults=defaults)
222 if created:
223 for option_dict in options:
224 line.attributes.create(option=option_dict['option'],
225 value=option_dict['value'])
226 else:
227 line.quantity = max(0, line.quantity + quantity)
228 line.save()
229 self.reset_offer_applications()
230
231 # Returning the line is useful when overriding this method.
232 return line, created
233 add_product.alters_data = True
234 add = add_product
235
236 def applied_offers(self):
237 """
238 Return a dict of offers successfully applied to the basket.
239
240 This is used to compare offers before and after a basket change to see
241 if there is a difference.
242 """
243 return self.offer_applications.offers
244
245 def reset_offer_applications(self):
246 """
247 Remove any discounts so they get recalculated
248 """
249 self.offer_applications = results.OfferApplications()
250 self._lines = None
251
252 def merge_line(self, line, add_quantities=True):
253 """
254 For transferring a line from another basket to this one.
255
256 This is used with the "Saved" basket functionality.
257 """
258 try:
259 existing_line = self.lines.get(line_reference=line.line_reference)
260 except ObjectDoesNotExist:
261 # Line does not already exist - reassign its basket
262 line.basket = self
263 line.save()
264 else:
265 # Line already exists - assume the max quantity is correct and
266 # delete the old
267 if add_quantities:
268 existing_line.quantity += line.quantity
269 else:
270 existing_line.quantity = max(existing_line.quantity,
271 line.quantity)
272 existing_line.save()
273 line.delete()
274 finally:
275 self._lines = None
276 merge_line.alters_data = True
277
278 def merge(self, basket, add_quantities=True):
279 """
280 Merges another basket with this one.
281
282 :basket: The basket to merge into this one.
283 :add_quantities: Whether to add line quantities when they are merged.
284 """
285 # Use basket.lines.all instead of all_lines as this function is called
286 # before a strategy has been assigned.
287 for line_to_merge in basket.lines.all():
288 self.merge_line(line_to_merge, add_quantities)
289 basket.status = self.MERGED
290 basket.date_merged = now()
291 basket._lines = None
292 basket.save()
293 # Ensure all vouchers are moved to the new basket
294 for voucher in basket.vouchers.all():
295 basket.vouchers.remove(voucher)
296 self.vouchers.add(voucher)
297 merge.alters_data = True
298
299 def freeze(self):
300 """
301 Freezes the basket so it cannot be modified.
302 """
303 self.status = self.FROZEN
304 self.save()
305 freeze.alters_data = True
306
307 def thaw(self):
308 """
309 Unfreezes a basket so it can be modified again
310 """
311 self.status = self.OPEN
312 self.save()
313 thaw.alters_data = True
314
315 def submit(self):
316 """
317 Mark this basket as submitted
318 """
319 self.status = self.SUBMITTED
320 self.date_submitted = now()
321 self.save()
322 submit.alters_data = True
323
324 # Kept for backwards compatibility
325 set_as_submitted = submit
326
327 def is_shipping_required(self):
328 """
329 Test whether the basket contains physical products that require
330 shipping.
331 """
332 for line in self.all_lines():
333 if line.product.is_shipping_required:
334 return True
335 return False
336
337 # =======
338 # Helpers
339 # =======
340
341 def _create_line_reference(self, product, stockrecord, options):
342 """
343 Returns a reference string for a line based on the item
344 and its options.
345 """
346 base = '%s_%s' % (product.id, stockrecord.id)
347 if not options:
348 return base
349 repr_options = [{'option': repr(option['option']),
350 'value': repr(option['value'])} for option in options]
351 return "%s_%s" % (base, zlib.crc32(repr(repr_options).encode('utf8')))
352
353 def _get_total(self, property):
354 """
355 For executing a named method on each line of the basket
356 and returning the total.
357 """
358 total = D('0.00')
359 for line in self.all_lines():
360 try:
361 total += getattr(line, property)
362 except ObjectDoesNotExist:
363 # Handle situation where the product may have been deleted
364 pass
365 except TypeError:
366 # Handle Unavailable products with no known price
367 info = self.strategy.fetch_for_product(line.product)
368 if info.availability.is_available_to_buy:
369 raise
370 pass
371 return total
372
373 # ==========
374 # Properties
375 # ==========
376
377 @property
378 def is_empty(self):
379 """
380 Test if this basket is empty
381 """
382 return self.id is None or self.num_lines == 0
383
384 @property
385 def is_tax_known(self):
386 """
387 Test if tax values are known for this basket
388 """
389 return all([line.is_tax_known for line in self.all_lines()])
390
391 @property
392 def total_excl_tax(self):
393 """
394 Return total line price excluding tax
395 """
396 return self._get_total('line_price_excl_tax_incl_discounts')
397
398 @property
399 def total_tax(self):
400 """Return total tax for a line"""
401 return self._get_total('line_tax')
402
403 @property
404 def total_incl_tax(self):
405 """
406 Return total price inclusive of tax and discounts
407 """
408 return self._get_total('line_price_incl_tax_incl_discounts')
409
410 @property
411 def total_incl_tax_excl_discounts(self):
412 """
413 Return total price inclusive of tax but exclusive discounts
414 """
415 return self._get_total('line_price_incl_tax')
416
417 @property
418 def total_discount(self):
419 return self._get_total('discount_value')
420
421 @property
422 def offer_discounts(self):
423 """
424 Return basket discounts from non-voucher sources. Does not include
425 shipping discounts.
426 """
427 return self.offer_applications.offer_discounts
428
429 @property
430 def voucher_discounts(self):
431 """
432 Return discounts from vouchers
433 """
434 return self.offer_applications.voucher_discounts
435
436 @property
437 def has_shipping_discounts(self):
438 return len(self.shipping_discounts) > 0
439
440 @property
441 def shipping_discounts(self):
442 """
443 Return discounts from vouchers
444 """
445 return self.offer_applications.shipping_discounts
446
447 @property
448 def post_order_actions(self):
449 """
450 Return discounts from vouchers
451 """
452 return self.offer_applications.post_order_actions
453
454 @property
455 def grouped_voucher_discounts(self):
456 """
457 Return discounts from vouchers but grouped so that a voucher which
458 links to multiple offers is aggregated into one object.
459 """
460 return self.offer_applications.grouped_voucher_discounts
461
462 @property
463 def total_excl_tax_excl_discounts(self):
464 """
465 Return total price excluding tax and discounts
466 """
467 return self._get_total('line_price_excl_tax')
468
469 @property
470 def num_lines(self):
471 """Return number of lines"""
472 return self.all_lines().count()
473
474 @property
475 def num_items(self):
476 """Return number of items"""
477 return sum(line.quantity for line in self.lines.all())
478
479 @property
480 def num_items_without_discount(self):
481 num = 0
482 for line in self.all_lines():
483 num += line.quantity_without_discount
484 return num
485
486 @property
487 def num_items_with_discount(self):
488 num = 0
489 for line in self.all_lines():
490 num += line.quantity_with_discount
491 return num
492
493 @property
494 def time_before_submit(self):
495 if not self.date_submitted:
496 return None
497 return self.date_submitted - self.date_created
498
499 @property
500 def time_since_creation(self, test_datetime=None):
501 if not test_datetime:
502 test_datetime = now()
503 return test_datetime - self.date_created
504
505 @property
506 def contains_a_voucher(self):
507 if not self.id:
508 return False
509 return self.vouchers.exists()
510
511 @property
512 def is_submitted(self):
513 return self.status == self.SUBMITTED
514
515 @property
516 def can_be_edited(self):
517 """
518 Test if a basket can be edited
519 """
520 return self.status in self.editable_statuses
521
522 @property
523 def currency(self):
524 # Since all lines should have the same currency, return the currency of
525 # the first one found.
526 for line in self.all_lines():
527 return line.price_currency
528
529 # =============
530 # Query methods
531 # =============
532
533 def contains_voucher(self, code):
534 """
535 Test whether the basket contains a voucher with a given code
536 """
537 if self.id is None:
538 return False
539 try:
540 self.vouchers.get(code=code)
541 except ObjectDoesNotExist:
542 return False
543 else:
544 return True
545
546 def product_quantity(self, product):
547 """
548 Return the quantity of a product in the basket
549
550 The basket can contain multiple lines with the same product, but
551 different options and stockrecords. Those quantities are summed up.
552 """
553 matching_lines = self.lines.filter(product=product)
554 quantity = matching_lines.aggregate(Sum('quantity'))['quantity__sum']
555 return quantity or 0
556
557 def line_quantity(self, product, stockrecord, options=None):
558 """
559 Return the current quantity of a specific product and options
560 """
561 ref = self._create_line_reference(product, stockrecord, options)
562 try:
563 return self.lines.get(line_reference=ref).quantity
564 except ObjectDoesNotExist:
565 return 0
566
567
568 @python_2_unicode_compatible
569 class AbstractLine(models.Model):
570 """A line of a basket (product and a quantity)
571
572 Common approaches on ordering basket lines:
573
574 a) First added at top. That's the history-like approach; new items are
575 added to the bottom of the list. Changing quantities doesn't impact
576 position.
577 Oscar does this by default. It just sorts by Line.pk, which is
578 guaranteed to increment after each creation.
579
580 b) Last modified at top. That means items move to the top when you add
581 another one, and new items are added to the top as well. Amazon
582 mostly does this, but doesn't change the position when you update
583 the quantity in the basket view.
584 To get this behaviour, add a date_updated field, change
585 Meta.ordering and optionally do something similar on wishlist lines.
586 Order lines should already be created in the order of the basket
587 lines, and are sorted by their primary key, so no changes should be
588 necessary there.
589
590 """
591 basket = models.ForeignKey(
592 'basket.Basket',
593 on_delete=models.CASCADE,
594 related_name='lines',
595 verbose_name=_("Basket"))
596
597 # This is to determine which products belong to the same line
598 # We can't just use product.id as you can have customised products
599 # which should be treated as separate lines. Set as a
600 # SlugField as it is included in the path for certain views.
601 line_reference = SlugField(
602 _("Line Reference"), max_length=128, db_index=True)
603
604 product = models.ForeignKey(
605 'catalogue.Product',
606 on_delete=models.CASCADE,
607 related_name='basket_lines',
608 verbose_name=_("Product"))
609
610 # We store the stockrecord that should be used to fulfil this line.
611 stockrecord = models.ForeignKey(
612 'partner.StockRecord',
613 on_delete=models.CASCADE,
614 related_name='basket_lines')
615
616 quantity = models.PositiveIntegerField(_('Quantity'), default=1)
617
618 # We store the unit price incl tax of the product when it is first added to
619 # the basket. This allows us to tell if a product has changed price since
620 # a person first added it to their basket.
621 price_currency = models.CharField(
622 _("Currency"), max_length=12, default=get_default_currency)
623 price_excl_tax = models.DecimalField(
624 _('Price excl. Tax'), decimal_places=2, max_digits=12,
625 null=True)
626 price_incl_tax = models.DecimalField(
627 _('Price incl. Tax'), decimal_places=2, max_digits=12, null=True)
628
629 # Track date of first addition
630 date_created = models.DateTimeField(_("Date Created"), auto_now_add=True)
631
632 def __init__(self, *args, **kwargs):
633 super(AbstractLine, self).__init__(*args, **kwargs)
634 # Instance variables used to persist discount information
635 self._discount_excl_tax = D('0.00')
636 self._discount_incl_tax = D('0.00')
637 self._affected_quantity = 0
638
639 class Meta:
640 abstract = True
641 app_label = 'basket'
642 # Enforce sorting by order of creation.
643 ordering = ['date_created', 'pk']
644 unique_together = ("basket", "line_reference")
645 verbose_name = _('Basket line')
646 verbose_name_plural = _('Basket lines')
647
648 def __str__(self):
649 return _(
650 u"Basket #%(basket_id)d, Product #%(product_id)d, quantity"
651 u" %(quantity)d") % {'basket_id': self.basket.pk,
652 'product_id': self.product.pk,
653 'quantity': self.quantity}
654
655 def save(self, *args, **kwargs):
656 if not self.basket.can_be_edited:
657 raise PermissionDenied(
658 _("You cannot modify a %s basket") % (
659 self.basket.status.lower(),))
660 return super(AbstractLine, self).save(*args, **kwargs)
661
662 # =============
663 # Offer methods
664 # =============
665
666 def clear_discount(self):
667 """
668 Remove any discounts from this line.
669 """
670 self._discount_excl_tax = D('0.00')
671 self._discount_incl_tax = D('0.00')
672 self._affected_quantity = 0
673
674 def discount(self, discount_value, affected_quantity, incl_tax=True):
675 """
676 Apply a discount to this line
677 """
678 if incl_tax:
679 if self._discount_excl_tax > 0:
680 raise RuntimeError(
681 "Attempting to discount the tax-inclusive price of a line "
682 "when tax-exclusive discounts are already applied")
683 self._discount_incl_tax += discount_value
684 else:
685 if self._discount_incl_tax > 0:
686 raise RuntimeError(
687 "Attempting to discount the tax-exclusive price of a line "
688 "when tax-inclusive discounts are already applied")
689 self._discount_excl_tax += discount_value
690 self._affected_quantity += int(affected_quantity)
691
692 def consume(self, quantity):
693 """
694 Mark all or part of the line as 'consumed'
695
696 Consumed items are no longer available to be used in offers.
697 """
698 if quantity > self.quantity - self._affected_quantity:
699 inc = self.quantity - self._affected_quantity
700 else:
701 inc = quantity
702 self._affected_quantity += int(inc)
703
704 def get_price_breakdown(self):
705 """
706 Return a breakdown of line prices after discounts have been applied.
707
708 Returns a list of (unit_price_incl_tax, unit_price_excl_tax, quantity)
709 tuples.
710 """
711 if not self.is_tax_known:
712 raise RuntimeError("A price breakdown can only be determined "
713 "when taxes are known")
714 prices = []
715 if not self.discount_value:
716 prices.append((self.unit_price_incl_tax, self.unit_price_excl_tax,
717 self.quantity))
718 else:
719 # Need to split the discount among the affected quantity
720 # of products.
721 item_incl_tax_discount = (
722 self.discount_value / int(self._affected_quantity))
723 item_excl_tax_discount = item_incl_tax_discount * self._tax_ratio
724 item_excl_tax_discount = item_excl_tax_discount.quantize(D('0.01'))
725 prices.append((self.unit_price_incl_tax - item_incl_tax_discount,
726 self.unit_price_excl_tax - item_excl_tax_discount,
727 self._affected_quantity))
728 if self.quantity_without_discount:
729 prices.append((self.unit_price_incl_tax,
730 self.unit_price_excl_tax,
731 self.quantity_without_discount))
732 return prices
733
734 # =======
735 # Helpers
736 # =======
737
738 @property
739 def _tax_ratio(self):
740 if not self.unit_price_incl_tax:
741 return 0
742 return self.unit_price_excl_tax / self.unit_price_incl_tax
743
744 # ==========
745 # Properties
746 # ==========
747
748 @property
749 def has_discount(self):
750 return self.quantity > self.quantity_without_discount
751
752 @property
753 def quantity_with_discount(self):
754 return self._affected_quantity
755
756 @property
757 def quantity_without_discount(self):
758 return int(self.quantity - self._affected_quantity)
759
760 @property
761 def is_available_for_discount(self):
762 return self.quantity_without_discount > 0
763
764 @property
765 def discount_value(self):
766 # Only one of the incl- and excl- discounts should be non-zero
767 return max(self._discount_incl_tax, self._discount_excl_tax)
768
769 @property
770 def purchase_info(self):
771 """
772 Return the stock/price info
773 """
774 if not hasattr(self, '_info'):
775 # Cache the PurchaseInfo instance.
776 self._info = self.basket.strategy.fetch_for_line(
777 self, self.stockrecord)
778 return self._info
779
780 @property
781 def is_tax_known(self):
782 return self.purchase_info.price.is_tax_known
783
784 @property
785 def unit_effective_price(self):
786 """
787 The price to use for offer calculations
788 """
789 return self.purchase_info.price.effective_price
790
791 @property
792 def unit_price_excl_tax(self):
793 return self.purchase_info.price.excl_tax
794
795 @property
796 def unit_price_incl_tax(self):
797 return self.purchase_info.price.incl_tax
798
799 @property
800 def unit_tax(self):
801 return self.purchase_info.price.tax
802
803 @property
804 def line_price_excl_tax(self):
805 if self.unit_price_excl_tax is not None:
806 return self.quantity * self.unit_price_excl_tax
807
808 @property
809 def line_price_excl_tax_incl_discounts(self):
810 if self._discount_excl_tax and self.line_price_excl_tax is not None:
811 return self.line_price_excl_tax - self._discount_excl_tax
812 if self._discount_incl_tax and self.line_price_incl_tax is not None:
813 # This is a tricky situation. We know the discount as calculated
814 # against tax inclusive prices but we need to guess how much of the
815 # discount applies to tax-exclusive prices. We do this by
816 # assuming a linear tax and scaling down the original discount.
817 return self.line_price_excl_tax \
818 - self._tax_ratio * self._discount_incl_tax
819 return self.line_price_excl_tax
820
821 @property
822 def line_price_incl_tax_incl_discounts(self):
823 # We use whichever discount value is set. If the discount value was
824 # calculated against the tax-exclusive prices, then the line price
825 # including tax
826 if self.line_price_incl_tax is not None:
827 return self.line_price_incl_tax - self.discount_value
828
829 @property
830 def line_tax(self):
831 if self.unit_tax:
832 return self.quantity * self.unit_tax
833
834 @property
835 def line_price_incl_tax(self):
836 if self.unit_price_incl_tax is not None:
837 return self.quantity * self.unit_price_incl_tax
838
839 @property
840 def description(self):
841 d = smart_text(self.product)
842 ops = []
843 for attribute in self.attributes.all():
844 ops.append("%s = '%s'" % (attribute.option.name, attribute.value))
845 if ops:
846 d = "%s (%s)" % (d, ", ".join(ops))
847 return d
848
849 def get_warning(self):
850 """
851 Return a warning message about this basket line if one is applicable
852
853 This could be things like the price has changed
854 """
855 if isinstance(self.purchase_info.availability, Unavailable):
856 msg = u"'%(product)s' is no longer available"
857 return _(msg) % {'product': self.product.get_title()}
858
859 if not self.price_incl_tax:
860 return
861 if not self.purchase_info.price.is_tax_known:
862 return
863
864 # Compare current price to price when added to basket
865 current_price_incl_tax = self.purchase_info.price.incl_tax
866 if current_price_incl_tax != self.price_incl_tax:
867 product_prices = {
868 'product': self.product.get_title(),
869 'old_price': currency(self.price_incl_tax),
870 'new_price': currency(current_price_incl_tax)
871 }
872 if current_price_incl_tax > self.price_incl_tax:
873 warning = _("The price of '%(product)s' has increased from"
874 " %(old_price)s to %(new_price)s since you added"
875 " it to your basket")
876 return warning % product_prices
877 else:
878 warning = _("The price of '%(product)s' has decreased from"
879 " %(old_price)s to %(new_price)s since you added"
880 " it to your basket")
881 return warning % product_prices
882
883
884 class AbstractLineAttribute(models.Model):
885 """
886 An attribute of a basket line
887 """
888 line = models.ForeignKey(
889 'basket.Line',
890 on_delete=models.CASCADE,
891 related_name='attributes',
892 verbose_name=_("Line"))
893 option = models.ForeignKey(
894 'catalogue.Option',
895 on_delete=models.CASCADE,
896 verbose_name=_("Option"))
897 value = models.CharField(_("Value"), max_length=255)
898
899 class Meta:
900 abstract = True
901 app_label = 'basket'
902 verbose_name = _('Line attribute')
903 verbose_name_plural = _('Line attributes')
904
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/oscar/apps/basket/abstract_models.py b/src/oscar/apps/basket/abstract_models.py
--- a/src/oscar/apps/basket/abstract_models.py
+++ b/src/oscar/apps/basket/abstract_models.py
@@ -828,7 +828,7 @@
@property
def line_tax(self):
- if self.unit_tax:
+ if self.is_tax_known:
return self.quantity * self.unit_tax
@property
| {"golden_diff": "diff --git a/src/oscar/apps/basket/abstract_models.py b/src/oscar/apps/basket/abstract_models.py\n--- a/src/oscar/apps/basket/abstract_models.py\n+++ b/src/oscar/apps/basket/abstract_models.py\n@@ -828,7 +828,7 @@\n \n @property\n def line_tax(self):\n- if self.unit_tax:\n+ if self.is_tax_known:\n return self.quantity * self.unit_tax\n \n @property\n", "issue": "Basket.line_tax returns None\nDue to the change in ad1094c1e8, if a line's tax is set to `Decimal('0.00')`, `Line.is_tax_known` will return `True`, but `Line.line_tax` will return `None`. Formerly in this case, `Line.line_tax` returned `Decimal('0.00')` instead of `None`. I'd suggest making `line_tax` check `is_tax_known` instead of doing it's own check.\r\n\r\n```\r\n @property\r\n def line_tax(self):\r\n- if self.unit_tax:\r\n+ if self.is_tax_known:\r\n return self.quantity * self.unit_tax\r\n``` \r\n\r\nSee [ad1094c1e8:src/oscar/apps/basket/abstract_models.py:828](https://github.com/django-oscar/django-oscar/commit/ad1094c1e89c0314c0a883b0b0a4e618a443da22#diff-519bc8e8997795240c2a90d8d63b66baL828)\n", "before_files": [{"content": "import zlib\nfrom decimal import Decimal as D\n\nfrom django.conf import settings\nfrom django.core.exceptions import ObjectDoesNotExist, PermissionDenied\nfrom django.db import models\nfrom django.db.models import Sum\nfrom django.utils.encoding import python_2_unicode_compatible, smart_text\nfrom django.utils.timezone import now\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom oscar.apps.basket.managers import OpenBasketManager, SavedBasketManager\nfrom oscar.apps.offer import results\nfrom oscar.core.compat import AUTH_USER_MODEL\nfrom oscar.core.loading import get_class\nfrom oscar.core.utils import get_default_currency\nfrom oscar.models.fields.slugfield import SlugField\nfrom oscar.templatetags.currency_filters import currency\n\nUnavailable = get_class('partner.availability', 'Unavailable')\n\n\n@python_2_unicode_compatible\nclass AbstractBasket(models.Model):\n \"\"\"\n Basket object\n \"\"\"\n # Baskets can be anonymously owned - hence this field is nullable. When a\n # anon user signs in, their two baskets are merged.\n owner = models.ForeignKey(\n AUTH_USER_MODEL,\n null=True,\n related_name='baskets',\n on_delete=models.CASCADE,\n verbose_name=_(\"Owner\"))\n\n # Basket statuses\n # - Frozen is for when a basket is in the process of being submitted\n # and we need to prevent any changes to it.\n OPEN, MERGED, SAVED, FROZEN, SUBMITTED = (\n \"Open\", \"Merged\", \"Saved\", \"Frozen\", \"Submitted\")\n STATUS_CHOICES = (\n (OPEN, _(\"Open - currently active\")),\n (MERGED, _(\"Merged - superceded by another basket\")),\n (SAVED, _(\"Saved - for items to be purchased later\")),\n (FROZEN, _(\"Frozen - the basket cannot be modified\")),\n (SUBMITTED, _(\"Submitted - has been ordered at the checkout\")),\n )\n status = models.CharField(\n _(\"Status\"), max_length=128, default=OPEN, choices=STATUS_CHOICES)\n\n # A basket can have many vouchers attached to it. However, it is common\n # for sites to only allow one voucher per basket - this will need to be\n # enforced in the project's codebase.\n vouchers = models.ManyToManyField(\n 'voucher.Voucher', verbose_name=_(\"Vouchers\"), blank=True)\n\n date_created = models.DateTimeField(_(\"Date created\"), auto_now_add=True)\n date_merged = models.DateTimeField(_(\"Date merged\"), null=True, blank=True)\n date_submitted = models.DateTimeField(_(\"Date submitted\"), null=True,\n blank=True)\n\n # Only if a basket is in one of these statuses can it be edited\n editable_statuses = (OPEN, SAVED)\n\n class Meta:\n abstract = True\n app_label = 'basket'\n verbose_name = _('Basket')\n verbose_name_plural = _('Baskets')\n\n objects = models.Manager()\n open = OpenBasketManager()\n saved = SavedBasketManager()\n\n def __init__(self, *args, **kwargs):\n super(AbstractBasket, self).__init__(*args, **kwargs)\n\n # We keep a cached copy of the basket lines as we refer to them often\n # within the same request cycle. Also, applying offers will append\n # discount data to the basket lines which isn't persisted to the DB and\n # so we want to avoid reloading them as this would drop the discount\n # information.\n self._lines = None\n self.offer_applications = results.OfferApplications()\n\n def __str__(self):\n return _(\n u\"%(status)s basket (owner: %(owner)s, lines: %(num_lines)d)\") \\\n % {'status': self.status,\n 'owner': self.owner,\n 'num_lines': self.num_lines}\n\n # ========\n # Strategy\n # ========\n\n @property\n def has_strategy(self):\n return hasattr(self, '_strategy')\n\n def _get_strategy(self):\n if not self.has_strategy:\n raise RuntimeError(\n \"No strategy class has been assigned to this basket. \"\n \"This is normally assigned to the incoming request in \"\n \"oscar.apps.basket.middleware.BasketMiddleware. \"\n \"Since it is missing, you must be doing something different. \"\n \"Ensure that a strategy instance is assigned to the basket!\"\n )\n return self._strategy\n\n def _set_strategy(self, strategy):\n self._strategy = strategy\n\n strategy = property(_get_strategy, _set_strategy)\n\n def all_lines(self):\n \"\"\"\n Return a cached set of basket lines.\n\n This is important for offers as they alter the line models and you\n don't want to reload them from the DB as that information would be\n lost.\n \"\"\"\n if self.id is None:\n return self.lines.none()\n if self._lines is None:\n self._lines = (\n self.lines\n .select_related('product', 'stockrecord')\n .prefetch_related(\n 'attributes', 'product__images')\n .order_by(self._meta.pk.name))\n return self._lines\n\n def is_quantity_allowed(self, qty):\n \"\"\"\n Test whether the passed quantity of items can be added to the basket\n \"\"\"\n # We enforce a max threshold to prevent a DOS attack via the offers\n # system.\n basket_threshold = settings.OSCAR_MAX_BASKET_QUANTITY_THRESHOLD\n if basket_threshold:\n total_basket_quantity = self.num_items\n max_allowed = basket_threshold - total_basket_quantity\n if qty > max_allowed:\n return False, _(\n \"Due to technical limitations we are not able \"\n \"to ship more than %(threshold)d items in one order.\") \\\n % {'threshold': basket_threshold}\n return True, None\n\n # ============\n # Manipulation\n # ============\n\n def flush(self):\n \"\"\"\n Remove all lines from basket.\n \"\"\"\n if self.status == self.FROZEN:\n raise PermissionDenied(\"A frozen basket cannot be flushed\")\n self.lines.all().delete()\n self._lines = None\n\n def add_product(self, product, quantity=1, options=None):\n \"\"\"\n Add a product to the basket\n\n 'stock_info' is the price and availability data returned from\n a partner strategy class.\n\n The 'options' list should contains dicts with keys 'option' and 'value'\n which link the relevant product.Option model and string value\n respectively.\n\n Returns (line, created).\n line: the matching basket line\n created: whether the line was created or updated\n\n \"\"\"\n if options is None:\n options = []\n if not self.id:\n self.save()\n\n # Ensure that all lines are the same currency\n price_currency = self.currency\n stock_info = self.strategy.fetch_for_product(product)\n if price_currency and stock_info.price.currency != price_currency:\n raise ValueError((\n \"Basket lines must all have the same currency. Proposed \"\n \"line has currency %s, while basket has currency %s\")\n % (stock_info.price.currency, price_currency))\n\n if stock_info.stockrecord is None:\n raise ValueError((\n \"Basket lines must all have stock records. Strategy hasn't \"\n \"found any stock record for product %s\") % product)\n\n # Line reference is used to distinguish between variations of the same\n # product (eg T-shirts with different personalisations)\n line_ref = self._create_line_reference(\n product, stock_info.stockrecord, options)\n\n # Determine price to store (if one exists). It is only stored for\n # audit and sometimes caching.\n defaults = {\n 'quantity': quantity,\n 'price_excl_tax': stock_info.price.excl_tax,\n 'price_currency': stock_info.price.currency,\n }\n if stock_info.price.is_tax_known:\n defaults['price_incl_tax'] = stock_info.price.incl_tax\n\n line, created = self.lines.get_or_create(\n line_reference=line_ref,\n product=product,\n stockrecord=stock_info.stockrecord,\n defaults=defaults)\n if created:\n for option_dict in options:\n line.attributes.create(option=option_dict['option'],\n value=option_dict['value'])\n else:\n line.quantity = max(0, line.quantity + quantity)\n line.save()\n self.reset_offer_applications()\n\n # Returning the line is useful when overriding this method.\n return line, created\n add_product.alters_data = True\n add = add_product\n\n def applied_offers(self):\n \"\"\"\n Return a dict of offers successfully applied to the basket.\n\n This is used to compare offers before and after a basket change to see\n if there is a difference.\n \"\"\"\n return self.offer_applications.offers\n\n def reset_offer_applications(self):\n \"\"\"\n Remove any discounts so they get recalculated\n \"\"\"\n self.offer_applications = results.OfferApplications()\n self._lines = None\n\n def merge_line(self, line, add_quantities=True):\n \"\"\"\n For transferring a line from another basket to this one.\n\n This is used with the \"Saved\" basket functionality.\n \"\"\"\n try:\n existing_line = self.lines.get(line_reference=line.line_reference)\n except ObjectDoesNotExist:\n # Line does not already exist - reassign its basket\n line.basket = self\n line.save()\n else:\n # Line already exists - assume the max quantity is correct and\n # delete the old\n if add_quantities:\n existing_line.quantity += line.quantity\n else:\n existing_line.quantity = max(existing_line.quantity,\n line.quantity)\n existing_line.save()\n line.delete()\n finally:\n self._lines = None\n merge_line.alters_data = True\n\n def merge(self, basket, add_quantities=True):\n \"\"\"\n Merges another basket with this one.\n\n :basket: The basket to merge into this one.\n :add_quantities: Whether to add line quantities when they are merged.\n \"\"\"\n # Use basket.lines.all instead of all_lines as this function is called\n # before a strategy has been assigned.\n for line_to_merge in basket.lines.all():\n self.merge_line(line_to_merge, add_quantities)\n basket.status = self.MERGED\n basket.date_merged = now()\n basket._lines = None\n basket.save()\n # Ensure all vouchers are moved to the new basket\n for voucher in basket.vouchers.all():\n basket.vouchers.remove(voucher)\n self.vouchers.add(voucher)\n merge.alters_data = True\n\n def freeze(self):\n \"\"\"\n Freezes the basket so it cannot be modified.\n \"\"\"\n self.status = self.FROZEN\n self.save()\n freeze.alters_data = True\n\n def thaw(self):\n \"\"\"\n Unfreezes a basket so it can be modified again\n \"\"\"\n self.status = self.OPEN\n self.save()\n thaw.alters_data = True\n\n def submit(self):\n \"\"\"\n Mark this basket as submitted\n \"\"\"\n self.status = self.SUBMITTED\n self.date_submitted = now()\n self.save()\n submit.alters_data = True\n\n # Kept for backwards compatibility\n set_as_submitted = submit\n\n def is_shipping_required(self):\n \"\"\"\n Test whether the basket contains physical products that require\n shipping.\n \"\"\"\n for line in self.all_lines():\n if line.product.is_shipping_required:\n return True\n return False\n\n # =======\n # Helpers\n # =======\n\n def _create_line_reference(self, product, stockrecord, options):\n \"\"\"\n Returns a reference string for a line based on the item\n and its options.\n \"\"\"\n base = '%s_%s' % (product.id, stockrecord.id)\n if not options:\n return base\n repr_options = [{'option': repr(option['option']),\n 'value': repr(option['value'])} for option in options]\n return \"%s_%s\" % (base, zlib.crc32(repr(repr_options).encode('utf8')))\n\n def _get_total(self, property):\n \"\"\"\n For executing a named method on each line of the basket\n and returning the total.\n \"\"\"\n total = D('0.00')\n for line in self.all_lines():\n try:\n total += getattr(line, property)\n except ObjectDoesNotExist:\n # Handle situation where the product may have been deleted\n pass\n except TypeError:\n # Handle Unavailable products with no known price\n info = self.strategy.fetch_for_product(line.product)\n if info.availability.is_available_to_buy:\n raise\n pass\n return total\n\n # ==========\n # Properties\n # ==========\n\n @property\n def is_empty(self):\n \"\"\"\n Test if this basket is empty\n \"\"\"\n return self.id is None or self.num_lines == 0\n\n @property\n def is_tax_known(self):\n \"\"\"\n Test if tax values are known for this basket\n \"\"\"\n return all([line.is_tax_known for line in self.all_lines()])\n\n @property\n def total_excl_tax(self):\n \"\"\"\n Return total line price excluding tax\n \"\"\"\n return self._get_total('line_price_excl_tax_incl_discounts')\n\n @property\n def total_tax(self):\n \"\"\"Return total tax for a line\"\"\"\n return self._get_total('line_tax')\n\n @property\n def total_incl_tax(self):\n \"\"\"\n Return total price inclusive of tax and discounts\n \"\"\"\n return self._get_total('line_price_incl_tax_incl_discounts')\n\n @property\n def total_incl_tax_excl_discounts(self):\n \"\"\"\n Return total price inclusive of tax but exclusive discounts\n \"\"\"\n return self._get_total('line_price_incl_tax')\n\n @property\n def total_discount(self):\n return self._get_total('discount_value')\n\n @property\n def offer_discounts(self):\n \"\"\"\n Return basket discounts from non-voucher sources. Does not include\n shipping discounts.\n \"\"\"\n return self.offer_applications.offer_discounts\n\n @property\n def voucher_discounts(self):\n \"\"\"\n Return discounts from vouchers\n \"\"\"\n return self.offer_applications.voucher_discounts\n\n @property\n def has_shipping_discounts(self):\n return len(self.shipping_discounts) > 0\n\n @property\n def shipping_discounts(self):\n \"\"\"\n Return discounts from vouchers\n \"\"\"\n return self.offer_applications.shipping_discounts\n\n @property\n def post_order_actions(self):\n \"\"\"\n Return discounts from vouchers\n \"\"\"\n return self.offer_applications.post_order_actions\n\n @property\n def grouped_voucher_discounts(self):\n \"\"\"\n Return discounts from vouchers but grouped so that a voucher which\n links to multiple offers is aggregated into one object.\n \"\"\"\n return self.offer_applications.grouped_voucher_discounts\n\n @property\n def total_excl_tax_excl_discounts(self):\n \"\"\"\n Return total price excluding tax and discounts\n \"\"\"\n return self._get_total('line_price_excl_tax')\n\n @property\n def num_lines(self):\n \"\"\"Return number of lines\"\"\"\n return self.all_lines().count()\n\n @property\n def num_items(self):\n \"\"\"Return number of items\"\"\"\n return sum(line.quantity for line in self.lines.all())\n\n @property\n def num_items_without_discount(self):\n num = 0\n for line in self.all_lines():\n num += line.quantity_without_discount\n return num\n\n @property\n def num_items_with_discount(self):\n num = 0\n for line in self.all_lines():\n num += line.quantity_with_discount\n return num\n\n @property\n def time_before_submit(self):\n if not self.date_submitted:\n return None\n return self.date_submitted - self.date_created\n\n @property\n def time_since_creation(self, test_datetime=None):\n if not test_datetime:\n test_datetime = now()\n return test_datetime - self.date_created\n\n @property\n def contains_a_voucher(self):\n if not self.id:\n return False\n return self.vouchers.exists()\n\n @property\n def is_submitted(self):\n return self.status == self.SUBMITTED\n\n @property\n def can_be_edited(self):\n \"\"\"\n Test if a basket can be edited\n \"\"\"\n return self.status in self.editable_statuses\n\n @property\n def currency(self):\n # Since all lines should have the same currency, return the currency of\n # the first one found.\n for line in self.all_lines():\n return line.price_currency\n\n # =============\n # Query methods\n # =============\n\n def contains_voucher(self, code):\n \"\"\"\n Test whether the basket contains a voucher with a given code\n \"\"\"\n if self.id is None:\n return False\n try:\n self.vouchers.get(code=code)\n except ObjectDoesNotExist:\n return False\n else:\n return True\n\n def product_quantity(self, product):\n \"\"\"\n Return the quantity of a product in the basket\n\n The basket can contain multiple lines with the same product, but\n different options and stockrecords. Those quantities are summed up.\n \"\"\"\n matching_lines = self.lines.filter(product=product)\n quantity = matching_lines.aggregate(Sum('quantity'))['quantity__sum']\n return quantity or 0\n\n def line_quantity(self, product, stockrecord, options=None):\n \"\"\"\n Return the current quantity of a specific product and options\n \"\"\"\n ref = self._create_line_reference(product, stockrecord, options)\n try:\n return self.lines.get(line_reference=ref).quantity\n except ObjectDoesNotExist:\n return 0\n\n\n@python_2_unicode_compatible\nclass AbstractLine(models.Model):\n \"\"\"A line of a basket (product and a quantity)\n\n Common approaches on ordering basket lines:\n\n a) First added at top. That's the history-like approach; new items are\n added to the bottom of the list. Changing quantities doesn't impact\n position.\n Oscar does this by default. It just sorts by Line.pk, which is\n guaranteed to increment after each creation.\n\n b) Last modified at top. That means items move to the top when you add\n another one, and new items are added to the top as well. Amazon\n mostly does this, but doesn't change the position when you update\n the quantity in the basket view.\n To get this behaviour, add a date_updated field, change\n Meta.ordering and optionally do something similar on wishlist lines.\n Order lines should already be created in the order of the basket\n lines, and are sorted by their primary key, so no changes should be\n necessary there.\n\n \"\"\"\n basket = models.ForeignKey(\n 'basket.Basket',\n on_delete=models.CASCADE,\n related_name='lines',\n verbose_name=_(\"Basket\"))\n\n # This is to determine which products belong to the same line\n # We can't just use product.id as you can have customised products\n # which should be treated as separate lines. Set as a\n # SlugField as it is included in the path for certain views.\n line_reference = SlugField(\n _(\"Line Reference\"), max_length=128, db_index=True)\n\n product = models.ForeignKey(\n 'catalogue.Product',\n on_delete=models.CASCADE,\n related_name='basket_lines',\n verbose_name=_(\"Product\"))\n\n # We store the stockrecord that should be used to fulfil this line.\n stockrecord = models.ForeignKey(\n 'partner.StockRecord',\n on_delete=models.CASCADE,\n related_name='basket_lines')\n\n quantity = models.PositiveIntegerField(_('Quantity'), default=1)\n\n # We store the unit price incl tax of the product when it is first added to\n # the basket. This allows us to tell if a product has changed price since\n # a person first added it to their basket.\n price_currency = models.CharField(\n _(\"Currency\"), max_length=12, default=get_default_currency)\n price_excl_tax = models.DecimalField(\n _('Price excl. Tax'), decimal_places=2, max_digits=12,\n null=True)\n price_incl_tax = models.DecimalField(\n _('Price incl. Tax'), decimal_places=2, max_digits=12, null=True)\n\n # Track date of first addition\n date_created = models.DateTimeField(_(\"Date Created\"), auto_now_add=True)\n\n def __init__(self, *args, **kwargs):\n super(AbstractLine, self).__init__(*args, **kwargs)\n # Instance variables used to persist discount information\n self._discount_excl_tax = D('0.00')\n self._discount_incl_tax = D('0.00')\n self._affected_quantity = 0\n\n class Meta:\n abstract = True\n app_label = 'basket'\n # Enforce sorting by order of creation.\n ordering = ['date_created', 'pk']\n unique_together = (\"basket\", \"line_reference\")\n verbose_name = _('Basket line')\n verbose_name_plural = _('Basket lines')\n\n def __str__(self):\n return _(\n u\"Basket #%(basket_id)d, Product #%(product_id)d, quantity\"\n u\" %(quantity)d\") % {'basket_id': self.basket.pk,\n 'product_id': self.product.pk,\n 'quantity': self.quantity}\n\n def save(self, *args, **kwargs):\n if not self.basket.can_be_edited:\n raise PermissionDenied(\n _(\"You cannot modify a %s basket\") % (\n self.basket.status.lower(),))\n return super(AbstractLine, self).save(*args, **kwargs)\n\n # =============\n # Offer methods\n # =============\n\n def clear_discount(self):\n \"\"\"\n Remove any discounts from this line.\n \"\"\"\n self._discount_excl_tax = D('0.00')\n self._discount_incl_tax = D('0.00')\n self._affected_quantity = 0\n\n def discount(self, discount_value, affected_quantity, incl_tax=True):\n \"\"\"\n Apply a discount to this line\n \"\"\"\n if incl_tax:\n if self._discount_excl_tax > 0:\n raise RuntimeError(\n \"Attempting to discount the tax-inclusive price of a line \"\n \"when tax-exclusive discounts are already applied\")\n self._discount_incl_tax += discount_value\n else:\n if self._discount_incl_tax > 0:\n raise RuntimeError(\n \"Attempting to discount the tax-exclusive price of a line \"\n \"when tax-inclusive discounts are already applied\")\n self._discount_excl_tax += discount_value\n self._affected_quantity += int(affected_quantity)\n\n def consume(self, quantity):\n \"\"\"\n Mark all or part of the line as 'consumed'\n\n Consumed items are no longer available to be used in offers.\n \"\"\"\n if quantity > self.quantity - self._affected_quantity:\n inc = self.quantity - self._affected_quantity\n else:\n inc = quantity\n self._affected_quantity += int(inc)\n\n def get_price_breakdown(self):\n \"\"\"\n Return a breakdown of line prices after discounts have been applied.\n\n Returns a list of (unit_price_incl_tax, unit_price_excl_tax, quantity)\n tuples.\n \"\"\"\n if not self.is_tax_known:\n raise RuntimeError(\"A price breakdown can only be determined \"\n \"when taxes are known\")\n prices = []\n if not self.discount_value:\n prices.append((self.unit_price_incl_tax, self.unit_price_excl_tax,\n self.quantity))\n else:\n # Need to split the discount among the affected quantity\n # of products.\n item_incl_tax_discount = (\n self.discount_value / int(self._affected_quantity))\n item_excl_tax_discount = item_incl_tax_discount * self._tax_ratio\n item_excl_tax_discount = item_excl_tax_discount.quantize(D('0.01'))\n prices.append((self.unit_price_incl_tax - item_incl_tax_discount,\n self.unit_price_excl_tax - item_excl_tax_discount,\n self._affected_quantity))\n if self.quantity_without_discount:\n prices.append((self.unit_price_incl_tax,\n self.unit_price_excl_tax,\n self.quantity_without_discount))\n return prices\n\n # =======\n # Helpers\n # =======\n\n @property\n def _tax_ratio(self):\n if not self.unit_price_incl_tax:\n return 0\n return self.unit_price_excl_tax / self.unit_price_incl_tax\n\n # ==========\n # Properties\n # ==========\n\n @property\n def has_discount(self):\n return self.quantity > self.quantity_without_discount\n\n @property\n def quantity_with_discount(self):\n return self._affected_quantity\n\n @property\n def quantity_without_discount(self):\n return int(self.quantity - self._affected_quantity)\n\n @property\n def is_available_for_discount(self):\n return self.quantity_without_discount > 0\n\n @property\n def discount_value(self):\n # Only one of the incl- and excl- discounts should be non-zero\n return max(self._discount_incl_tax, self._discount_excl_tax)\n\n @property\n def purchase_info(self):\n \"\"\"\n Return the stock/price info\n \"\"\"\n if not hasattr(self, '_info'):\n # Cache the PurchaseInfo instance.\n self._info = self.basket.strategy.fetch_for_line(\n self, self.stockrecord)\n return self._info\n\n @property\n def is_tax_known(self):\n return self.purchase_info.price.is_tax_known\n\n @property\n def unit_effective_price(self):\n \"\"\"\n The price to use for offer calculations\n \"\"\"\n return self.purchase_info.price.effective_price\n\n @property\n def unit_price_excl_tax(self):\n return self.purchase_info.price.excl_tax\n\n @property\n def unit_price_incl_tax(self):\n return self.purchase_info.price.incl_tax\n\n @property\n def unit_tax(self):\n return self.purchase_info.price.tax\n\n @property\n def line_price_excl_tax(self):\n if self.unit_price_excl_tax is not None:\n return self.quantity * self.unit_price_excl_tax\n\n @property\n def line_price_excl_tax_incl_discounts(self):\n if self._discount_excl_tax and self.line_price_excl_tax is not None:\n return self.line_price_excl_tax - self._discount_excl_tax\n if self._discount_incl_tax and self.line_price_incl_tax is not None:\n # This is a tricky situation. We know the discount as calculated\n # against tax inclusive prices but we need to guess how much of the\n # discount applies to tax-exclusive prices. We do this by\n # assuming a linear tax and scaling down the original discount.\n return self.line_price_excl_tax \\\n - self._tax_ratio * self._discount_incl_tax\n return self.line_price_excl_tax\n\n @property\n def line_price_incl_tax_incl_discounts(self):\n # We use whichever discount value is set. If the discount value was\n # calculated against the tax-exclusive prices, then the line price\n # including tax\n if self.line_price_incl_tax is not None:\n return self.line_price_incl_tax - self.discount_value\n\n @property\n def line_tax(self):\n if self.unit_tax:\n return self.quantity * self.unit_tax\n\n @property\n def line_price_incl_tax(self):\n if self.unit_price_incl_tax is not None:\n return self.quantity * self.unit_price_incl_tax\n\n @property\n def description(self):\n d = smart_text(self.product)\n ops = []\n for attribute in self.attributes.all():\n ops.append(\"%s = '%s'\" % (attribute.option.name, attribute.value))\n if ops:\n d = \"%s (%s)\" % (d, \", \".join(ops))\n return d\n\n def get_warning(self):\n \"\"\"\n Return a warning message about this basket line if one is applicable\n\n This could be things like the price has changed\n \"\"\"\n if isinstance(self.purchase_info.availability, Unavailable):\n msg = u\"'%(product)s' is no longer available\"\n return _(msg) % {'product': self.product.get_title()}\n\n if not self.price_incl_tax:\n return\n if not self.purchase_info.price.is_tax_known:\n return\n\n # Compare current price to price when added to basket\n current_price_incl_tax = self.purchase_info.price.incl_tax\n if current_price_incl_tax != self.price_incl_tax:\n product_prices = {\n 'product': self.product.get_title(),\n 'old_price': currency(self.price_incl_tax),\n 'new_price': currency(current_price_incl_tax)\n }\n if current_price_incl_tax > self.price_incl_tax:\n warning = _(\"The price of '%(product)s' has increased from\"\n \" %(old_price)s to %(new_price)s since you added\"\n \" it to your basket\")\n return warning % product_prices\n else:\n warning = _(\"The price of '%(product)s' has decreased from\"\n \" %(old_price)s to %(new_price)s since you added\"\n \" it to your basket\")\n return warning % product_prices\n\n\nclass AbstractLineAttribute(models.Model):\n \"\"\"\n An attribute of a basket line\n \"\"\"\n line = models.ForeignKey(\n 'basket.Line',\n on_delete=models.CASCADE,\n related_name='attributes',\n verbose_name=_(\"Line\"))\n option = models.ForeignKey(\n 'catalogue.Option',\n on_delete=models.CASCADE,\n verbose_name=_(\"Option\"))\n value = models.CharField(_(\"Value\"), max_length=255)\n\n class Meta:\n abstract = True\n app_label = 'basket'\n verbose_name = _('Line attribute')\n verbose_name_plural = _('Line attributes')\n", "path": "src/oscar/apps/basket/abstract_models.py"}], "after_files": [{"content": "import zlib\nfrom decimal import Decimal as D\n\nfrom django.conf import settings\nfrom django.core.exceptions import ObjectDoesNotExist, PermissionDenied\nfrom django.db import models\nfrom django.db.models import Sum\nfrom django.utils.encoding import python_2_unicode_compatible, smart_text\nfrom django.utils.timezone import now\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom oscar.apps.basket.managers import OpenBasketManager, SavedBasketManager\nfrom oscar.apps.offer import results\nfrom oscar.core.compat import AUTH_USER_MODEL\nfrom oscar.core.loading import get_class\nfrom oscar.core.utils import get_default_currency\nfrom oscar.models.fields.slugfield import SlugField\nfrom oscar.templatetags.currency_filters import currency\n\nUnavailable = get_class('partner.availability', 'Unavailable')\n\n\n@python_2_unicode_compatible\nclass AbstractBasket(models.Model):\n \"\"\"\n Basket object\n \"\"\"\n # Baskets can be anonymously owned - hence this field is nullable. When a\n # anon user signs in, their two baskets are merged.\n owner = models.ForeignKey(\n AUTH_USER_MODEL,\n null=True,\n related_name='baskets',\n on_delete=models.CASCADE,\n verbose_name=_(\"Owner\"))\n\n # Basket statuses\n # - Frozen is for when a basket is in the process of being submitted\n # and we need to prevent any changes to it.\n OPEN, MERGED, SAVED, FROZEN, SUBMITTED = (\n \"Open\", \"Merged\", \"Saved\", \"Frozen\", \"Submitted\")\n STATUS_CHOICES = (\n (OPEN, _(\"Open - currently active\")),\n (MERGED, _(\"Merged - superceded by another basket\")),\n (SAVED, _(\"Saved - for items to be purchased later\")),\n (FROZEN, _(\"Frozen - the basket cannot be modified\")),\n (SUBMITTED, _(\"Submitted - has been ordered at the checkout\")),\n )\n status = models.CharField(\n _(\"Status\"), max_length=128, default=OPEN, choices=STATUS_CHOICES)\n\n # A basket can have many vouchers attached to it. However, it is common\n # for sites to only allow one voucher per basket - this will need to be\n # enforced in the project's codebase.\n vouchers = models.ManyToManyField(\n 'voucher.Voucher', verbose_name=_(\"Vouchers\"), blank=True)\n\n date_created = models.DateTimeField(_(\"Date created\"), auto_now_add=True)\n date_merged = models.DateTimeField(_(\"Date merged\"), null=True, blank=True)\n date_submitted = models.DateTimeField(_(\"Date submitted\"), null=True,\n blank=True)\n\n # Only if a basket is in one of these statuses can it be edited\n editable_statuses = (OPEN, SAVED)\n\n class Meta:\n abstract = True\n app_label = 'basket'\n verbose_name = _('Basket')\n verbose_name_plural = _('Baskets')\n\n objects = models.Manager()\n open = OpenBasketManager()\n saved = SavedBasketManager()\n\n def __init__(self, *args, **kwargs):\n super(AbstractBasket, self).__init__(*args, **kwargs)\n\n # We keep a cached copy of the basket lines as we refer to them often\n # within the same request cycle. Also, applying offers will append\n # discount data to the basket lines which isn't persisted to the DB and\n # so we want to avoid reloading them as this would drop the discount\n # information.\n self._lines = None\n self.offer_applications = results.OfferApplications()\n\n def __str__(self):\n return _(\n u\"%(status)s basket (owner: %(owner)s, lines: %(num_lines)d)\") \\\n % {'status': self.status,\n 'owner': self.owner,\n 'num_lines': self.num_lines}\n\n # ========\n # Strategy\n # ========\n\n @property\n def has_strategy(self):\n return hasattr(self, '_strategy')\n\n def _get_strategy(self):\n if not self.has_strategy:\n raise RuntimeError(\n \"No strategy class has been assigned to this basket. \"\n \"This is normally assigned to the incoming request in \"\n \"oscar.apps.basket.middleware.BasketMiddleware. \"\n \"Since it is missing, you must be doing something different. \"\n \"Ensure that a strategy instance is assigned to the basket!\"\n )\n return self._strategy\n\n def _set_strategy(self, strategy):\n self._strategy = strategy\n\n strategy = property(_get_strategy, _set_strategy)\n\n def all_lines(self):\n \"\"\"\n Return a cached set of basket lines.\n\n This is important for offers as they alter the line models and you\n don't want to reload them from the DB as that information would be\n lost.\n \"\"\"\n if self.id is None:\n return self.lines.none()\n if self._lines is None:\n self._lines = (\n self.lines\n .select_related('product', 'stockrecord')\n .prefetch_related(\n 'attributes', 'product__images')\n .order_by(self._meta.pk.name))\n return self._lines\n\n def is_quantity_allowed(self, qty):\n \"\"\"\n Test whether the passed quantity of items can be added to the basket\n \"\"\"\n # We enforce a max threshold to prevent a DOS attack via the offers\n # system.\n basket_threshold = settings.OSCAR_MAX_BASKET_QUANTITY_THRESHOLD\n if basket_threshold:\n total_basket_quantity = self.num_items\n max_allowed = basket_threshold - total_basket_quantity\n if qty > max_allowed:\n return False, _(\n \"Due to technical limitations we are not able \"\n \"to ship more than %(threshold)d items in one order.\") \\\n % {'threshold': basket_threshold}\n return True, None\n\n # ============\n # Manipulation\n # ============\n\n def flush(self):\n \"\"\"\n Remove all lines from basket.\n \"\"\"\n if self.status == self.FROZEN:\n raise PermissionDenied(\"A frozen basket cannot be flushed\")\n self.lines.all().delete()\n self._lines = None\n\n def add_product(self, product, quantity=1, options=None):\n \"\"\"\n Add a product to the basket\n\n 'stock_info' is the price and availability data returned from\n a partner strategy class.\n\n The 'options' list should contains dicts with keys 'option' and 'value'\n which link the relevant product.Option model and string value\n respectively.\n\n Returns (line, created).\n line: the matching basket line\n created: whether the line was created or updated\n\n \"\"\"\n if options is None:\n options = []\n if not self.id:\n self.save()\n\n # Ensure that all lines are the same currency\n price_currency = self.currency\n stock_info = self.strategy.fetch_for_product(product)\n if price_currency and stock_info.price.currency != price_currency:\n raise ValueError((\n \"Basket lines must all have the same currency. Proposed \"\n \"line has currency %s, while basket has currency %s\")\n % (stock_info.price.currency, price_currency))\n\n if stock_info.stockrecord is None:\n raise ValueError((\n \"Basket lines must all have stock records. Strategy hasn't \"\n \"found any stock record for product %s\") % product)\n\n # Line reference is used to distinguish between variations of the same\n # product (eg T-shirts with different personalisations)\n line_ref = self._create_line_reference(\n product, stock_info.stockrecord, options)\n\n # Determine price to store (if one exists). It is only stored for\n # audit and sometimes caching.\n defaults = {\n 'quantity': quantity,\n 'price_excl_tax': stock_info.price.excl_tax,\n 'price_currency': stock_info.price.currency,\n }\n if stock_info.price.is_tax_known:\n defaults['price_incl_tax'] = stock_info.price.incl_tax\n\n line, created = self.lines.get_or_create(\n line_reference=line_ref,\n product=product,\n stockrecord=stock_info.stockrecord,\n defaults=defaults)\n if created:\n for option_dict in options:\n line.attributes.create(option=option_dict['option'],\n value=option_dict['value'])\n else:\n line.quantity = max(0, line.quantity + quantity)\n line.save()\n self.reset_offer_applications()\n\n # Returning the line is useful when overriding this method.\n return line, created\n add_product.alters_data = True\n add = add_product\n\n def applied_offers(self):\n \"\"\"\n Return a dict of offers successfully applied to the basket.\n\n This is used to compare offers before and after a basket change to see\n if there is a difference.\n \"\"\"\n return self.offer_applications.offers\n\n def reset_offer_applications(self):\n \"\"\"\n Remove any discounts so they get recalculated\n \"\"\"\n self.offer_applications = results.OfferApplications()\n self._lines = None\n\n def merge_line(self, line, add_quantities=True):\n \"\"\"\n For transferring a line from another basket to this one.\n\n This is used with the \"Saved\" basket functionality.\n \"\"\"\n try:\n existing_line = self.lines.get(line_reference=line.line_reference)\n except ObjectDoesNotExist:\n # Line does not already exist - reassign its basket\n line.basket = self\n line.save()\n else:\n # Line already exists - assume the max quantity is correct and\n # delete the old\n if add_quantities:\n existing_line.quantity += line.quantity\n else:\n existing_line.quantity = max(existing_line.quantity,\n line.quantity)\n existing_line.save()\n line.delete()\n finally:\n self._lines = None\n merge_line.alters_data = True\n\n def merge(self, basket, add_quantities=True):\n \"\"\"\n Merges another basket with this one.\n\n :basket: The basket to merge into this one.\n :add_quantities: Whether to add line quantities when they are merged.\n \"\"\"\n # Use basket.lines.all instead of all_lines as this function is called\n # before a strategy has been assigned.\n for line_to_merge in basket.lines.all():\n self.merge_line(line_to_merge, add_quantities)\n basket.status = self.MERGED\n basket.date_merged = now()\n basket._lines = None\n basket.save()\n # Ensure all vouchers are moved to the new basket\n for voucher in basket.vouchers.all():\n basket.vouchers.remove(voucher)\n self.vouchers.add(voucher)\n merge.alters_data = True\n\n def freeze(self):\n \"\"\"\n Freezes the basket so it cannot be modified.\n \"\"\"\n self.status = self.FROZEN\n self.save()\n freeze.alters_data = True\n\n def thaw(self):\n \"\"\"\n Unfreezes a basket so it can be modified again\n \"\"\"\n self.status = self.OPEN\n self.save()\n thaw.alters_data = True\n\n def submit(self):\n \"\"\"\n Mark this basket as submitted\n \"\"\"\n self.status = self.SUBMITTED\n self.date_submitted = now()\n self.save()\n submit.alters_data = True\n\n # Kept for backwards compatibility\n set_as_submitted = submit\n\n def is_shipping_required(self):\n \"\"\"\n Test whether the basket contains physical products that require\n shipping.\n \"\"\"\n for line in self.all_lines():\n if line.product.is_shipping_required:\n return True\n return False\n\n # =======\n # Helpers\n # =======\n\n def _create_line_reference(self, product, stockrecord, options):\n \"\"\"\n Returns a reference string for a line based on the item\n and its options.\n \"\"\"\n base = '%s_%s' % (product.id, stockrecord.id)\n if not options:\n return base\n repr_options = [{'option': repr(option['option']),\n 'value': repr(option['value'])} for option in options]\n return \"%s_%s\" % (base, zlib.crc32(repr(repr_options).encode('utf8')))\n\n def _get_total(self, property):\n \"\"\"\n For executing a named method on each line of the basket\n and returning the total.\n \"\"\"\n total = D('0.00')\n for line in self.all_lines():\n try:\n total += getattr(line, property)\n except ObjectDoesNotExist:\n # Handle situation where the product may have been deleted\n pass\n except TypeError:\n # Handle Unavailable products with no known price\n info = self.strategy.fetch_for_product(line.product)\n if info.availability.is_available_to_buy:\n raise\n pass\n return total\n\n # ==========\n # Properties\n # ==========\n\n @property\n def is_empty(self):\n \"\"\"\n Test if this basket is empty\n \"\"\"\n return self.id is None or self.num_lines == 0\n\n @property\n def is_tax_known(self):\n \"\"\"\n Test if tax values are known for this basket\n \"\"\"\n return all([line.is_tax_known for line in self.all_lines()])\n\n @property\n def total_excl_tax(self):\n \"\"\"\n Return total line price excluding tax\n \"\"\"\n return self._get_total('line_price_excl_tax_incl_discounts')\n\n @property\n def total_tax(self):\n \"\"\"Return total tax for a line\"\"\"\n return self._get_total('line_tax')\n\n @property\n def total_incl_tax(self):\n \"\"\"\n Return total price inclusive of tax and discounts\n \"\"\"\n return self._get_total('line_price_incl_tax_incl_discounts')\n\n @property\n def total_incl_tax_excl_discounts(self):\n \"\"\"\n Return total price inclusive of tax but exclusive discounts\n \"\"\"\n return self._get_total('line_price_incl_tax')\n\n @property\n def total_discount(self):\n return self._get_total('discount_value')\n\n @property\n def offer_discounts(self):\n \"\"\"\n Return basket discounts from non-voucher sources. Does not include\n shipping discounts.\n \"\"\"\n return self.offer_applications.offer_discounts\n\n @property\n def voucher_discounts(self):\n \"\"\"\n Return discounts from vouchers\n \"\"\"\n return self.offer_applications.voucher_discounts\n\n @property\n def has_shipping_discounts(self):\n return len(self.shipping_discounts) > 0\n\n @property\n def shipping_discounts(self):\n \"\"\"\n Return discounts from vouchers\n \"\"\"\n return self.offer_applications.shipping_discounts\n\n @property\n def post_order_actions(self):\n \"\"\"\n Return discounts from vouchers\n \"\"\"\n return self.offer_applications.post_order_actions\n\n @property\n def grouped_voucher_discounts(self):\n \"\"\"\n Return discounts from vouchers but grouped so that a voucher which\n links to multiple offers is aggregated into one object.\n \"\"\"\n return self.offer_applications.grouped_voucher_discounts\n\n @property\n def total_excl_tax_excl_discounts(self):\n \"\"\"\n Return total price excluding tax and discounts\n \"\"\"\n return self._get_total('line_price_excl_tax')\n\n @property\n def num_lines(self):\n \"\"\"Return number of lines\"\"\"\n return self.all_lines().count()\n\n @property\n def num_items(self):\n \"\"\"Return number of items\"\"\"\n return sum(line.quantity for line in self.lines.all())\n\n @property\n def num_items_without_discount(self):\n num = 0\n for line in self.all_lines():\n num += line.quantity_without_discount\n return num\n\n @property\n def num_items_with_discount(self):\n num = 0\n for line in self.all_lines():\n num += line.quantity_with_discount\n return num\n\n @property\n def time_before_submit(self):\n if not self.date_submitted:\n return None\n return self.date_submitted - self.date_created\n\n @property\n def time_since_creation(self, test_datetime=None):\n if not test_datetime:\n test_datetime = now()\n return test_datetime - self.date_created\n\n @property\n def contains_a_voucher(self):\n if not self.id:\n return False\n return self.vouchers.exists()\n\n @property\n def is_submitted(self):\n return self.status == self.SUBMITTED\n\n @property\n def can_be_edited(self):\n \"\"\"\n Test if a basket can be edited\n \"\"\"\n return self.status in self.editable_statuses\n\n @property\n def currency(self):\n # Since all lines should have the same currency, return the currency of\n # the first one found.\n for line in self.all_lines():\n return line.price_currency\n\n # =============\n # Query methods\n # =============\n\n def contains_voucher(self, code):\n \"\"\"\n Test whether the basket contains a voucher with a given code\n \"\"\"\n if self.id is None:\n return False\n try:\n self.vouchers.get(code=code)\n except ObjectDoesNotExist:\n return False\n else:\n return True\n\n def product_quantity(self, product):\n \"\"\"\n Return the quantity of a product in the basket\n\n The basket can contain multiple lines with the same product, but\n different options and stockrecords. Those quantities are summed up.\n \"\"\"\n matching_lines = self.lines.filter(product=product)\n quantity = matching_lines.aggregate(Sum('quantity'))['quantity__sum']\n return quantity or 0\n\n def line_quantity(self, product, stockrecord, options=None):\n \"\"\"\n Return the current quantity of a specific product and options\n \"\"\"\n ref = self._create_line_reference(product, stockrecord, options)\n try:\n return self.lines.get(line_reference=ref).quantity\n except ObjectDoesNotExist:\n return 0\n\n\n@python_2_unicode_compatible\nclass AbstractLine(models.Model):\n \"\"\"A line of a basket (product and a quantity)\n\n Common approaches on ordering basket lines:\n\n a) First added at top. That's the history-like approach; new items are\n added to the bottom of the list. Changing quantities doesn't impact\n position.\n Oscar does this by default. It just sorts by Line.pk, which is\n guaranteed to increment after each creation.\n\n b) Last modified at top. That means items move to the top when you add\n another one, and new items are added to the top as well. Amazon\n mostly does this, but doesn't change the position when you update\n the quantity in the basket view.\n To get this behaviour, add a date_updated field, change\n Meta.ordering and optionally do something similar on wishlist lines.\n Order lines should already be created in the order of the basket\n lines, and are sorted by their primary key, so no changes should be\n necessary there.\n\n \"\"\"\n basket = models.ForeignKey(\n 'basket.Basket',\n on_delete=models.CASCADE,\n related_name='lines',\n verbose_name=_(\"Basket\"))\n\n # This is to determine which products belong to the same line\n # We can't just use product.id as you can have customised products\n # which should be treated as separate lines. Set as a\n # SlugField as it is included in the path for certain views.\n line_reference = SlugField(\n _(\"Line Reference\"), max_length=128, db_index=True)\n\n product = models.ForeignKey(\n 'catalogue.Product',\n on_delete=models.CASCADE,\n related_name='basket_lines',\n verbose_name=_(\"Product\"))\n\n # We store the stockrecord that should be used to fulfil this line.\n stockrecord = models.ForeignKey(\n 'partner.StockRecord',\n on_delete=models.CASCADE,\n related_name='basket_lines')\n\n quantity = models.PositiveIntegerField(_('Quantity'), default=1)\n\n # We store the unit price incl tax of the product when it is first added to\n # the basket. This allows us to tell if a product has changed price since\n # a person first added it to their basket.\n price_currency = models.CharField(\n _(\"Currency\"), max_length=12, default=get_default_currency)\n price_excl_tax = models.DecimalField(\n _('Price excl. Tax'), decimal_places=2, max_digits=12,\n null=True)\n price_incl_tax = models.DecimalField(\n _('Price incl. Tax'), decimal_places=2, max_digits=12, null=True)\n\n # Track date of first addition\n date_created = models.DateTimeField(_(\"Date Created\"), auto_now_add=True)\n\n def __init__(self, *args, **kwargs):\n super(AbstractLine, self).__init__(*args, **kwargs)\n # Instance variables used to persist discount information\n self._discount_excl_tax = D('0.00')\n self._discount_incl_tax = D('0.00')\n self._affected_quantity = 0\n\n class Meta:\n abstract = True\n app_label = 'basket'\n # Enforce sorting by order of creation.\n ordering = ['date_created', 'pk']\n unique_together = (\"basket\", \"line_reference\")\n verbose_name = _('Basket line')\n verbose_name_plural = _('Basket lines')\n\n def __str__(self):\n return _(\n u\"Basket #%(basket_id)d, Product #%(product_id)d, quantity\"\n u\" %(quantity)d\") % {'basket_id': self.basket.pk,\n 'product_id': self.product.pk,\n 'quantity': self.quantity}\n\n def save(self, *args, **kwargs):\n if not self.basket.can_be_edited:\n raise PermissionDenied(\n _(\"You cannot modify a %s basket\") % (\n self.basket.status.lower(),))\n return super(AbstractLine, self).save(*args, **kwargs)\n\n # =============\n # Offer methods\n # =============\n\n def clear_discount(self):\n \"\"\"\n Remove any discounts from this line.\n \"\"\"\n self._discount_excl_tax = D('0.00')\n self._discount_incl_tax = D('0.00')\n self._affected_quantity = 0\n\n def discount(self, discount_value, affected_quantity, incl_tax=True):\n \"\"\"\n Apply a discount to this line\n \"\"\"\n if incl_tax:\n if self._discount_excl_tax > 0:\n raise RuntimeError(\n \"Attempting to discount the tax-inclusive price of a line \"\n \"when tax-exclusive discounts are already applied\")\n self._discount_incl_tax += discount_value\n else:\n if self._discount_incl_tax > 0:\n raise RuntimeError(\n \"Attempting to discount the tax-exclusive price of a line \"\n \"when tax-inclusive discounts are already applied\")\n self._discount_excl_tax += discount_value\n self._affected_quantity += int(affected_quantity)\n\n def consume(self, quantity):\n \"\"\"\n Mark all or part of the line as 'consumed'\n\n Consumed items are no longer available to be used in offers.\n \"\"\"\n if quantity > self.quantity - self._affected_quantity:\n inc = self.quantity - self._affected_quantity\n else:\n inc = quantity\n self._affected_quantity += int(inc)\n\n def get_price_breakdown(self):\n \"\"\"\n Return a breakdown of line prices after discounts have been applied.\n\n Returns a list of (unit_price_incl_tax, unit_price_excl_tax, quantity)\n tuples.\n \"\"\"\n if not self.is_tax_known:\n raise RuntimeError(\"A price breakdown can only be determined \"\n \"when taxes are known\")\n prices = []\n if not self.discount_value:\n prices.append((self.unit_price_incl_tax, self.unit_price_excl_tax,\n self.quantity))\n else:\n # Need to split the discount among the affected quantity\n # of products.\n item_incl_tax_discount = (\n self.discount_value / int(self._affected_quantity))\n item_excl_tax_discount = item_incl_tax_discount * self._tax_ratio\n item_excl_tax_discount = item_excl_tax_discount.quantize(D('0.01'))\n prices.append((self.unit_price_incl_tax - item_incl_tax_discount,\n self.unit_price_excl_tax - item_excl_tax_discount,\n self._affected_quantity))\n if self.quantity_without_discount:\n prices.append((self.unit_price_incl_tax,\n self.unit_price_excl_tax,\n self.quantity_without_discount))\n return prices\n\n # =======\n # Helpers\n # =======\n\n @property\n def _tax_ratio(self):\n if not self.unit_price_incl_tax:\n return 0\n return self.unit_price_excl_tax / self.unit_price_incl_tax\n\n # ==========\n # Properties\n # ==========\n\n @property\n def has_discount(self):\n return self.quantity > self.quantity_without_discount\n\n @property\n def quantity_with_discount(self):\n return self._affected_quantity\n\n @property\n def quantity_without_discount(self):\n return int(self.quantity - self._affected_quantity)\n\n @property\n def is_available_for_discount(self):\n return self.quantity_without_discount > 0\n\n @property\n def discount_value(self):\n # Only one of the incl- and excl- discounts should be non-zero\n return max(self._discount_incl_tax, self._discount_excl_tax)\n\n @property\n def purchase_info(self):\n \"\"\"\n Return the stock/price info\n \"\"\"\n if not hasattr(self, '_info'):\n # Cache the PurchaseInfo instance.\n self._info = self.basket.strategy.fetch_for_line(\n self, self.stockrecord)\n return self._info\n\n @property\n def is_tax_known(self):\n return self.purchase_info.price.is_tax_known\n\n @property\n def unit_effective_price(self):\n \"\"\"\n The price to use for offer calculations\n \"\"\"\n return self.purchase_info.price.effective_price\n\n @property\n def unit_price_excl_tax(self):\n return self.purchase_info.price.excl_tax\n\n @property\n def unit_price_incl_tax(self):\n return self.purchase_info.price.incl_tax\n\n @property\n def unit_tax(self):\n return self.purchase_info.price.tax\n\n @property\n def line_price_excl_tax(self):\n if self.unit_price_excl_tax is not None:\n return self.quantity * self.unit_price_excl_tax\n\n @property\n def line_price_excl_tax_incl_discounts(self):\n if self._discount_excl_tax and self.line_price_excl_tax is not None:\n return self.line_price_excl_tax - self._discount_excl_tax\n if self._discount_incl_tax and self.line_price_incl_tax is not None:\n # This is a tricky situation. We know the discount as calculated\n # against tax inclusive prices but we need to guess how much of the\n # discount applies to tax-exclusive prices. We do this by\n # assuming a linear tax and scaling down the original discount.\n return self.line_price_excl_tax \\\n - self._tax_ratio * self._discount_incl_tax\n return self.line_price_excl_tax\n\n @property\n def line_price_incl_tax_incl_discounts(self):\n # We use whichever discount value is set. If the discount value was\n # calculated against the tax-exclusive prices, then the line price\n # including tax\n if self.line_price_incl_tax is not None:\n return self.line_price_incl_tax - self.discount_value\n\n @property\n def line_tax(self):\n if self.is_tax_known:\n return self.quantity * self.unit_tax\n\n @property\n def line_price_incl_tax(self):\n if self.unit_price_incl_tax is not None:\n return self.quantity * self.unit_price_incl_tax\n\n @property\n def description(self):\n d = smart_text(self.product)\n ops = []\n for attribute in self.attributes.all():\n ops.append(\"%s = '%s'\" % (attribute.option.name, attribute.value))\n if ops:\n d = \"%s (%s)\" % (d, \", \".join(ops))\n return d\n\n def get_warning(self):\n \"\"\"\n Return a warning message about this basket line if one is applicable\n\n This could be things like the price has changed\n \"\"\"\n if isinstance(self.purchase_info.availability, Unavailable):\n msg = u\"'%(product)s' is no longer available\"\n return _(msg) % {'product': self.product.get_title()}\n\n if not self.price_incl_tax:\n return\n if not self.purchase_info.price.is_tax_known:\n return\n\n # Compare current price to price when added to basket\n current_price_incl_tax = self.purchase_info.price.incl_tax\n if current_price_incl_tax != self.price_incl_tax:\n product_prices = {\n 'product': self.product.get_title(),\n 'old_price': currency(self.price_incl_tax),\n 'new_price': currency(current_price_incl_tax)\n }\n if current_price_incl_tax > self.price_incl_tax:\n warning = _(\"The price of '%(product)s' has increased from\"\n \" %(old_price)s to %(new_price)s since you added\"\n \" it to your basket\")\n return warning % product_prices\n else:\n warning = _(\"The price of '%(product)s' has decreased from\"\n \" %(old_price)s to %(new_price)s since you added\"\n \" it to your basket\")\n return warning % product_prices\n\n\nclass AbstractLineAttribute(models.Model):\n \"\"\"\n An attribute of a basket line\n \"\"\"\n line = models.ForeignKey(\n 'basket.Line',\n on_delete=models.CASCADE,\n related_name='attributes',\n verbose_name=_(\"Line\"))\n option = models.ForeignKey(\n 'catalogue.Option',\n on_delete=models.CASCADE,\n verbose_name=_(\"Option\"))\n value = models.CharField(_(\"Value\"), max_length=255)\n\n class Meta:\n abstract = True\n app_label = 'basket'\n verbose_name = _('Line attribute')\n verbose_name_plural = _('Line attributes')\n", "path": "src/oscar/apps/basket/abstract_models.py"}]} |
gh_patches_debug_177 | rasdani/github-patches | git_diff | qutip__qutip-2305 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
QuTiP 4.7.4: incompatibility with the latest scipy (1.12.0)
https://github.com/qutip/qutip/blob/f5149616a4071a273e7e48a63d956836739c4569/qutip/parallel.py#L7
When the latest scipy version is used (1.12.0), QuTiP (4.7.4) cannot be imported since `from scipy import array` is no longer supported in scipy.
Code to reproduce the bug:
`import qutip`
Output:
```
---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
Cell In[1], line 1
----> 1 import qutip
File ~/anaconda3/envs/test/lib/python3.11/site-packages/qutip/__init__.py:133
131 # graphics
132 from qutip.bloch import *
--> 133 from qutip.visualization import *
134 from qutip.orbital import *
135 from qutip.bloch3d import *
File ~/anaconda3/envs/test/lib/python3.11/site-packages/qutip/visualization.py:24
22 from qutip.qobj import Qobj, isket
23 from qutip.states import ket2dm
---> 24 from qutip.wigner import wigner
25 from qutip.tensor import tensor
26 from qutip.matplotlib_utilities import complex_phase_cmap
File ~/anaconda3/envs/test/lib/python3.11/site-packages/qutip/wigner.py:19
17 import qutip
18 from qutip import Qobj, ket2dm, jmat
---> 19 from qutip.parallel import parfor
20 from qutip.cy.sparse_utils import _csr_get_diag
21 from qutip.sparse import eigh
File ~/anaconda3/envs/test/lib/python3.11/site-packages/qutip/parallel.py:7
1 """
2 This function provides functions for parallel execution of loops and function
3 mappings, using the builtin Python module multiprocessing.
4 """
5 __all__ = ['parfor', 'parallel_map', 'serial_map']
----> 7 from scipy import array
8 import multiprocessing
9 from functools import partial
ImportError: cannot import name 'array' from 'scipy' (/Users/konstantin/anaconda3/envs/test/lib/python3.11/site-packages/scipy/__init__.py)
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `qutip/parallel.py`
Content:
```
1 """
2 This function provides functions for parallel execution of loops and function
3 mappings, using the builtin Python module multiprocessing.
4 """
5 __all__ = ['parfor', 'parallel_map', 'serial_map']
6
7 from scipy import array
8 import multiprocessing
9 from functools import partial
10 import os
11 import sys
12 import signal
13 import qutip.settings as qset
14 from qutip.ui.progressbar import BaseProgressBar, TextProgressBar
15
16
17 if sys.platform == 'darwin':
18 Pool = multiprocessing.get_context('fork').Pool
19 else:
20 Pool = multiprocessing.Pool
21
22
23 def _task_wrapper(args):
24 try:
25 return args[0](*args[1])
26 except KeyboardInterrupt:
27 os.kill(args[2], signal.SIGINT)
28 sys.exit(1)
29
30
31 def _task_wrapper_with_args(args, user_args):
32 try:
33 return args[0](*args[1], **user_args)
34 except KeyboardInterrupt:
35 os.kill(args[2], signal.SIGINT)
36 sys.exit(1)
37
38
39 def parfor(func, *args, **kwargs):
40 """Executes a multi-variable function in parallel on the local machine.
41
42 Parallel execution of a for-loop over function `func` for multiple input
43 arguments and keyword arguments.
44
45 .. note::
46
47 From QuTiP 3.1, we recommend to use :func:`qutip.parallel.parallel_map`
48 instead of this function.
49
50 Parameters
51 ----------
52 func : function_type
53 A function to run in parallel on the local machine. The function 'func'
54 accepts a series of arguments that are passed to the function as
55 variables. In general, the function can have multiple input variables,
56 and these arguments must be passed in the same order as they are
57 defined in the function definition. In addition, the user can pass
58 multiple keyword arguments to the function.
59
60 The following keyword argument is reserved:
61
62 num_cpus : int
63 Number of CPU's to use. Default uses maximum number of CPU's.
64 Performance degrades if num_cpus is larger than the physical CPU
65 count of your machine.
66
67 Returns
68 -------
69 result : list
70 A ``list`` with length equal to number of input parameters
71 containing the output from `func`.
72
73 """
74 os.environ['QUTIP_IN_PARALLEL'] = 'TRUE'
75 kw = _default_kwargs()
76 if 'num_cpus' in kwargs.keys():
77 kw['num_cpus'] = kwargs['num_cpus']
78 del kwargs['num_cpus']
79 if len(kwargs) != 0:
80 task_func = partial(_task_wrapper_with_args, user_args=kwargs)
81 else:
82 task_func = _task_wrapper
83
84 if kw['num_cpus'] > qset.num_cpus:
85 print("Requested number of CPUs (%s) " % kw['num_cpus'] +
86 "is larger than physical number (%s)." % qset.num_cpus)
87 print("Reduce 'num_cpus' for greater performance.")
88
89 pool = Pool(processes=kw['num_cpus'])
90 args = [list(arg) for arg in args]
91 var = [[args[j][i] for j in range(len(args))]
92 for i in range(len(list(args[0])))]
93 try:
94 map_args = ((func, v, os.getpid()) for v in var)
95 par_return = list(pool.map(task_func, map_args))
96
97 pool.terminate()
98 pool.join()
99 os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'
100 if isinstance(par_return[0], tuple):
101 par_return = [elem for elem in par_return]
102 num_elems = len(par_return[0])
103 dt = [type(ii) for ii in par_return[0]]
104 return [array([elem[ii] for elem in par_return], dtype=dt[ii])
105 for ii in range(num_elems)]
106 else:
107 return par_return
108
109 except KeyboardInterrupt:
110 os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'
111 pool.terminate()
112
113
114 def serial_map(task, values, task_args=tuple(), task_kwargs={}, **kwargs):
115 """
116 Serial mapping function with the same call signature as parallel_map, for
117 easy switching between serial and parallel execution. This
118 is functionally equivalent to::
119
120 result = [task(value, *task_args, **task_kwargs) for value in values]
121
122 This function work as a drop-in replacement of
123 :func:`qutip.parallel.parallel_map`.
124
125 Parameters
126 ----------
127 task : a Python function
128 The function that is to be called for each value in ``task_vec``.
129 values : array / list
130 The list or array of values for which the ``task`` function is to be
131 evaluated.
132 task_args : list / dictionary
133 The optional additional argument to the ``task`` function.
134 task_kwargs : list / dictionary
135 The optional additional keyword argument to the ``task`` function.
136 progress_bar : ProgressBar
137 Progress bar class instance for showing progress.
138
139 Returns
140 --------
141 result : list
142 The result list contains the value of
143 ``task(value, *task_args, **task_kwargs)`` for each
144 value in ``values``.
145
146 """
147 try:
148 progress_bar = kwargs['progress_bar']
149 if progress_bar is True:
150 progress_bar = TextProgressBar()
151 except:
152 progress_bar = BaseProgressBar()
153
154 progress_bar.start(len(values))
155 results = []
156 for n, value in enumerate(values):
157 progress_bar.update(n)
158 result = task(value, *task_args, **task_kwargs)
159 results.append(result)
160 progress_bar.finished()
161
162 return results
163
164
165 def parallel_map(task, values, task_args=tuple(), task_kwargs={}, **kwargs):
166 """
167 Parallel execution of a mapping of `values` to the function `task`. This
168 is functionally equivalent to::
169
170 result = [task(value, *task_args, **task_kwargs) for value in values]
171
172 Parameters
173 ----------
174 task : a Python function
175 The function that is to be called for each value in ``task_vec``.
176 values : array / list
177 The list or array of values for which the ``task`` function is to be
178 evaluated.
179 task_args : list / dictionary
180 The optional additional argument to the ``task`` function.
181 task_kwargs : list / dictionary
182 The optional additional keyword argument to the ``task`` function.
183 progress_bar : ProgressBar
184 Progress bar class instance for showing progress.
185
186 Returns
187 --------
188 result : list
189 The result list contains the value of
190 ``task(value, *task_args, **task_kwargs)`` for
191 each value in ``values``.
192
193 """
194 os.environ['QUTIP_IN_PARALLEL'] = 'TRUE'
195 kw = _default_kwargs()
196 if 'num_cpus' in kwargs:
197 kw['num_cpus'] = kwargs['num_cpus']
198
199 try:
200 progress_bar = kwargs['progress_bar']
201 if progress_bar is True:
202 progress_bar = TextProgressBar()
203 except:
204 progress_bar = BaseProgressBar()
205
206 progress_bar.start(len(values))
207 nfinished = [0]
208
209 def _update_progress_bar(x):
210 nfinished[0] += 1
211 progress_bar.update(nfinished[0])
212
213 try:
214 pool = Pool(processes=kw['num_cpus'])
215
216 async_res = [pool.apply_async(task, (value,) + task_args, task_kwargs,
217 _update_progress_bar)
218 for value in values]
219
220 while not all([ar.ready() for ar in async_res]):
221 for ar in async_res:
222 ar.wait(timeout=0.1)
223
224 pool.terminate()
225 pool.join()
226
227 except KeyboardInterrupt as e:
228 os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'
229 pool.terminate()
230 pool.join()
231 raise e
232
233 progress_bar.finished()
234 os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'
235 return [ar.get() for ar in async_res]
236
237
238 def _default_kwargs():
239 settings = {'num_cpus': qset.num_cpus}
240 return settings
241
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/qutip/parallel.py b/qutip/parallel.py
--- a/qutip/parallel.py
+++ b/qutip/parallel.py
@@ -4,7 +4,7 @@
"""
__all__ = ['parfor', 'parallel_map', 'serial_map']
-from scipy import array
+from numpy import array
import multiprocessing
from functools import partial
import os
| {"golden_diff": "diff --git a/qutip/parallel.py b/qutip/parallel.py\n--- a/qutip/parallel.py\n+++ b/qutip/parallel.py\n@@ -4,7 +4,7 @@\n \"\"\"\n __all__ = ['parfor', 'parallel_map', 'serial_map']\n \n-from scipy import array\n+from numpy import array\n import multiprocessing\n from functools import partial\n import os\n", "issue": "QuTiP 4.7.4: incompatibility with the latest scipy (1.12.0)\nhttps://github.com/qutip/qutip/blob/f5149616a4071a273e7e48a63d956836739c4569/qutip/parallel.py#L7\r\n\r\nWhen the latest scipy version is used (1.12.0), QuTiP (4.7.4) cannot be imported since `from scipy import array` is no longer supported in scipy.\r\n\r\nCode to reproduce the bug: \r\n`import qutip`\r\n\r\nOutput:\r\n```\r\n---------------------------------------------------------------------------\r\nImportError Traceback (most recent call last)\r\nCell In[1], line 1\r\n----> 1 import qutip\r\n\r\nFile ~/anaconda3/envs/test/lib/python3.11/site-packages/qutip/__init__.py:133\r\n 131 # graphics\r\n 132 from qutip.bloch import *\r\n--> 133 from qutip.visualization import *\r\n 134 from qutip.orbital import *\r\n 135 from qutip.bloch3d import *\r\n\r\nFile ~/anaconda3/envs/test/lib/python3.11/site-packages/qutip/visualization.py:24\r\n 22 from qutip.qobj import Qobj, isket\r\n 23 from qutip.states import ket2dm\r\n---> 24 from qutip.wigner import wigner\r\n 25 from qutip.tensor import tensor\r\n 26 from qutip.matplotlib_utilities import complex_phase_cmap\r\n\r\nFile ~/anaconda3/envs/test/lib/python3.11/site-packages/qutip/wigner.py:19\r\n 17 import qutip\r\n 18 from qutip import Qobj, ket2dm, jmat\r\n---> 19 from qutip.parallel import parfor\r\n 20 from qutip.cy.sparse_utils import _csr_get_diag\r\n 21 from qutip.sparse import eigh\r\n\r\nFile ~/anaconda3/envs/test/lib/python3.11/site-packages/qutip/parallel.py:7\r\n 1 \"\"\"\r\n 2 This function provides functions for parallel execution of loops and function\r\n 3 mappings, using the builtin Python module multiprocessing.\r\n 4 \"\"\"\r\n 5 __all__ = ['parfor', 'parallel_map', 'serial_map']\r\n----> 7 from scipy import array\r\n 8 import multiprocessing\r\n 9 from functools import partial\r\n\r\nImportError: cannot import name 'array' from 'scipy' (/Users/konstantin/anaconda3/envs/test/lib/python3.11/site-packages/scipy/__init__.py)\r\n```\r\n\n", "before_files": [{"content": "\"\"\"\nThis function provides functions for parallel execution of loops and function\nmappings, using the builtin Python module multiprocessing.\n\"\"\"\n__all__ = ['parfor', 'parallel_map', 'serial_map']\n\nfrom scipy import array\nimport multiprocessing\nfrom functools import partial\nimport os\nimport sys\nimport signal\nimport qutip.settings as qset\nfrom qutip.ui.progressbar import BaseProgressBar, TextProgressBar\n\n\nif sys.platform == 'darwin':\n Pool = multiprocessing.get_context('fork').Pool\nelse:\n Pool = multiprocessing.Pool\n\n\ndef _task_wrapper(args):\n try:\n return args[0](*args[1])\n except KeyboardInterrupt:\n os.kill(args[2], signal.SIGINT)\n sys.exit(1)\n\n\ndef _task_wrapper_with_args(args, user_args):\n try:\n return args[0](*args[1], **user_args)\n except KeyboardInterrupt:\n os.kill(args[2], signal.SIGINT)\n sys.exit(1)\n\n\ndef parfor(func, *args, **kwargs):\n \"\"\"Executes a multi-variable function in parallel on the local machine.\n\n Parallel execution of a for-loop over function `func` for multiple input\n arguments and keyword arguments.\n\n .. note::\n\n From QuTiP 3.1, we recommend to use :func:`qutip.parallel.parallel_map`\n instead of this function.\n\n Parameters\n ----------\n func : function_type\n A function to run in parallel on the local machine. The function 'func'\n accepts a series of arguments that are passed to the function as\n variables. In general, the function can have multiple input variables,\n and these arguments must be passed in the same order as they are\n defined in the function definition. In addition, the user can pass\n multiple keyword arguments to the function.\n\n The following keyword argument is reserved:\n\n num_cpus : int\n Number of CPU's to use. Default uses maximum number of CPU's.\n Performance degrades if num_cpus is larger than the physical CPU\n count of your machine.\n\n Returns\n -------\n result : list\n A ``list`` with length equal to number of input parameters\n containing the output from `func`.\n\n \"\"\"\n os.environ['QUTIP_IN_PARALLEL'] = 'TRUE'\n kw = _default_kwargs()\n if 'num_cpus' in kwargs.keys():\n kw['num_cpus'] = kwargs['num_cpus']\n del kwargs['num_cpus']\n if len(kwargs) != 0:\n task_func = partial(_task_wrapper_with_args, user_args=kwargs)\n else:\n task_func = _task_wrapper\n\n if kw['num_cpus'] > qset.num_cpus:\n print(\"Requested number of CPUs (%s) \" % kw['num_cpus'] +\n \"is larger than physical number (%s).\" % qset.num_cpus)\n print(\"Reduce 'num_cpus' for greater performance.\")\n\n pool = Pool(processes=kw['num_cpus'])\n args = [list(arg) for arg in args]\n var = [[args[j][i] for j in range(len(args))]\n for i in range(len(list(args[0])))]\n try:\n map_args = ((func, v, os.getpid()) for v in var)\n par_return = list(pool.map(task_func, map_args))\n\n pool.terminate()\n pool.join()\n os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'\n if isinstance(par_return[0], tuple):\n par_return = [elem for elem in par_return]\n num_elems = len(par_return[0])\n dt = [type(ii) for ii in par_return[0]]\n return [array([elem[ii] for elem in par_return], dtype=dt[ii])\n for ii in range(num_elems)]\n else:\n return par_return\n\n except KeyboardInterrupt:\n os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'\n pool.terminate()\n\n\ndef serial_map(task, values, task_args=tuple(), task_kwargs={}, **kwargs):\n \"\"\"\n Serial mapping function with the same call signature as parallel_map, for\n easy switching between serial and parallel execution. This\n is functionally equivalent to::\n\n result = [task(value, *task_args, **task_kwargs) for value in values]\n\n This function work as a drop-in replacement of\n :func:`qutip.parallel.parallel_map`.\n\n Parameters\n ----------\n task : a Python function\n The function that is to be called for each value in ``task_vec``.\n values : array / list\n The list or array of values for which the ``task`` function is to be\n evaluated.\n task_args : list / dictionary\n The optional additional argument to the ``task`` function.\n task_kwargs : list / dictionary\n The optional additional keyword argument to the ``task`` function.\n progress_bar : ProgressBar\n Progress bar class instance for showing progress.\n\n Returns\n --------\n result : list\n The result list contains the value of\n ``task(value, *task_args, **task_kwargs)`` for each\n value in ``values``.\n\n \"\"\"\n try:\n progress_bar = kwargs['progress_bar']\n if progress_bar is True:\n progress_bar = TextProgressBar()\n except:\n progress_bar = BaseProgressBar()\n\n progress_bar.start(len(values))\n results = []\n for n, value in enumerate(values):\n progress_bar.update(n)\n result = task(value, *task_args, **task_kwargs)\n results.append(result)\n progress_bar.finished()\n\n return results\n\n\ndef parallel_map(task, values, task_args=tuple(), task_kwargs={}, **kwargs):\n \"\"\"\n Parallel execution of a mapping of `values` to the function `task`. This\n is functionally equivalent to::\n\n result = [task(value, *task_args, **task_kwargs) for value in values]\n\n Parameters\n ----------\n task : a Python function\n The function that is to be called for each value in ``task_vec``.\n values : array / list\n The list or array of values for which the ``task`` function is to be\n evaluated.\n task_args : list / dictionary\n The optional additional argument to the ``task`` function.\n task_kwargs : list / dictionary\n The optional additional keyword argument to the ``task`` function.\n progress_bar : ProgressBar\n Progress bar class instance for showing progress.\n\n Returns\n --------\n result : list\n The result list contains the value of\n ``task(value, *task_args, **task_kwargs)`` for\n each value in ``values``.\n\n \"\"\"\n os.environ['QUTIP_IN_PARALLEL'] = 'TRUE'\n kw = _default_kwargs()\n if 'num_cpus' in kwargs:\n kw['num_cpus'] = kwargs['num_cpus']\n\n try:\n progress_bar = kwargs['progress_bar']\n if progress_bar is True:\n progress_bar = TextProgressBar()\n except:\n progress_bar = BaseProgressBar()\n\n progress_bar.start(len(values))\n nfinished = [0]\n\n def _update_progress_bar(x):\n nfinished[0] += 1\n progress_bar.update(nfinished[0])\n\n try:\n pool = Pool(processes=kw['num_cpus'])\n\n async_res = [pool.apply_async(task, (value,) + task_args, task_kwargs,\n _update_progress_bar)\n for value in values]\n\n while not all([ar.ready() for ar in async_res]):\n for ar in async_res:\n ar.wait(timeout=0.1)\n\n pool.terminate()\n pool.join()\n\n except KeyboardInterrupt as e:\n os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'\n pool.terminate()\n pool.join()\n raise e\n\n progress_bar.finished()\n os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'\n return [ar.get() for ar in async_res]\n\n\ndef _default_kwargs():\n settings = {'num_cpus': qset.num_cpus}\n return settings\n", "path": "qutip/parallel.py"}], "after_files": [{"content": "\"\"\"\nThis function provides functions for parallel execution of loops and function\nmappings, using the builtin Python module multiprocessing.\n\"\"\"\n__all__ = ['parfor', 'parallel_map', 'serial_map']\n\nfrom numpy import array\nimport multiprocessing\nfrom functools import partial\nimport os\nimport sys\nimport signal\nimport qutip.settings as qset\nfrom qutip.ui.progressbar import BaseProgressBar, TextProgressBar\n\n\nif sys.platform == 'darwin':\n Pool = multiprocessing.get_context('fork').Pool\nelse:\n Pool = multiprocessing.Pool\n\n\ndef _task_wrapper(args):\n try:\n return args[0](*args[1])\n except KeyboardInterrupt:\n os.kill(args[2], signal.SIGINT)\n sys.exit(1)\n\n\ndef _task_wrapper_with_args(args, user_args):\n try:\n return args[0](*args[1], **user_args)\n except KeyboardInterrupt:\n os.kill(args[2], signal.SIGINT)\n sys.exit(1)\n\n\ndef parfor(func, *args, **kwargs):\n \"\"\"Executes a multi-variable function in parallel on the local machine.\n\n Parallel execution of a for-loop over function `func` for multiple input\n arguments and keyword arguments.\n\n .. note::\n\n From QuTiP 3.1, we recommend to use :func:`qutip.parallel.parallel_map`\n instead of this function.\n\n Parameters\n ----------\n func : function_type\n A function to run in parallel on the local machine. The function 'func'\n accepts a series of arguments that are passed to the function as\n variables. In general, the function can have multiple input variables,\n and these arguments must be passed in the same order as they are\n defined in the function definition. In addition, the user can pass\n multiple keyword arguments to the function.\n\n The following keyword argument is reserved:\n\n num_cpus : int\n Number of CPU's to use. Default uses maximum number of CPU's.\n Performance degrades if num_cpus is larger than the physical CPU\n count of your machine.\n\n Returns\n -------\n result : list\n A ``list`` with length equal to number of input parameters\n containing the output from `func`.\n\n \"\"\"\n os.environ['QUTIP_IN_PARALLEL'] = 'TRUE'\n kw = _default_kwargs()\n if 'num_cpus' in kwargs.keys():\n kw['num_cpus'] = kwargs['num_cpus']\n del kwargs['num_cpus']\n if len(kwargs) != 0:\n task_func = partial(_task_wrapper_with_args, user_args=kwargs)\n else:\n task_func = _task_wrapper\n\n if kw['num_cpus'] > qset.num_cpus:\n print(\"Requested number of CPUs (%s) \" % kw['num_cpus'] +\n \"is larger than physical number (%s).\" % qset.num_cpus)\n print(\"Reduce 'num_cpus' for greater performance.\")\n\n pool = Pool(processes=kw['num_cpus'])\n args = [list(arg) for arg in args]\n var = [[args[j][i] for j in range(len(args))]\n for i in range(len(list(args[0])))]\n try:\n map_args = ((func, v, os.getpid()) for v in var)\n par_return = list(pool.map(task_func, map_args))\n\n pool.terminate()\n pool.join()\n os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'\n if isinstance(par_return[0], tuple):\n par_return = [elem for elem in par_return]\n num_elems = len(par_return[0])\n dt = [type(ii) for ii in par_return[0]]\n return [array([elem[ii] for elem in par_return], dtype=dt[ii])\n for ii in range(num_elems)]\n else:\n return par_return\n\n except KeyboardInterrupt:\n os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'\n pool.terminate()\n\n\ndef serial_map(task, values, task_args=tuple(), task_kwargs={}, **kwargs):\n \"\"\"\n Serial mapping function with the same call signature as parallel_map, for\n easy switching between serial and parallel execution. This\n is functionally equivalent to::\n\n result = [task(value, *task_args, **task_kwargs) for value in values]\n\n This function work as a drop-in replacement of\n :func:`qutip.parallel.parallel_map`.\n\n Parameters\n ----------\n task : a Python function\n The function that is to be called for each value in ``task_vec``.\n values : array / list\n The list or array of values for which the ``task`` function is to be\n evaluated.\n task_args : list / dictionary\n The optional additional argument to the ``task`` function.\n task_kwargs : list / dictionary\n The optional additional keyword argument to the ``task`` function.\n progress_bar : ProgressBar\n Progress bar class instance for showing progress.\n\n Returns\n --------\n result : list\n The result list contains the value of\n ``task(value, *task_args, **task_kwargs)`` for each\n value in ``values``.\n\n \"\"\"\n try:\n progress_bar = kwargs['progress_bar']\n if progress_bar is True:\n progress_bar = TextProgressBar()\n except:\n progress_bar = BaseProgressBar()\n\n progress_bar.start(len(values))\n results = []\n for n, value in enumerate(values):\n progress_bar.update(n)\n result = task(value, *task_args, **task_kwargs)\n results.append(result)\n progress_bar.finished()\n\n return results\n\n\ndef parallel_map(task, values, task_args=tuple(), task_kwargs={}, **kwargs):\n \"\"\"\n Parallel execution of a mapping of `values` to the function `task`. This\n is functionally equivalent to::\n\n result = [task(value, *task_args, **task_kwargs) for value in values]\n\n Parameters\n ----------\n task : a Python function\n The function that is to be called for each value in ``task_vec``.\n values : array / list\n The list or array of values for which the ``task`` function is to be\n evaluated.\n task_args : list / dictionary\n The optional additional argument to the ``task`` function.\n task_kwargs : list / dictionary\n The optional additional keyword argument to the ``task`` function.\n progress_bar : ProgressBar\n Progress bar class instance for showing progress.\n\n Returns\n --------\n result : list\n The result list contains the value of\n ``task(value, *task_args, **task_kwargs)`` for\n each value in ``values``.\n\n \"\"\"\n os.environ['QUTIP_IN_PARALLEL'] = 'TRUE'\n kw = _default_kwargs()\n if 'num_cpus' in kwargs:\n kw['num_cpus'] = kwargs['num_cpus']\n\n try:\n progress_bar = kwargs['progress_bar']\n if progress_bar is True:\n progress_bar = TextProgressBar()\n except:\n progress_bar = BaseProgressBar()\n\n progress_bar.start(len(values))\n nfinished = [0]\n\n def _update_progress_bar(x):\n nfinished[0] += 1\n progress_bar.update(nfinished[0])\n\n try:\n pool = Pool(processes=kw['num_cpus'])\n\n async_res = [pool.apply_async(task, (value,) + task_args, task_kwargs,\n _update_progress_bar)\n for value in values]\n\n while not all([ar.ready() for ar in async_res]):\n for ar in async_res:\n ar.wait(timeout=0.1)\n\n pool.terminate()\n pool.join()\n\n except KeyboardInterrupt as e:\n os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'\n pool.terminate()\n pool.join()\n raise e\n\n progress_bar.finished()\n os.environ['QUTIP_IN_PARALLEL'] = 'FALSE'\n return [ar.get() for ar in async_res]\n\n\ndef _default_kwargs():\n settings = {'num_cpus': qset.num_cpus}\n return settings\n", "path": "qutip/parallel.py"}]} |
gh_patches_debug_178 | rasdani/github-patches | git_diff | kornia__kornia-2817 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Important typo in RandAugment default policy list
### Describe the bug
'translate_x' is in the default policy list twice, whereas 'translate_y' is missing.
### Reproduction steps
```bash
1. Open kornia/augmentation/auto/rand_augment/rand_augment.py
```
### Expected behavior
Current default_policy list:
```
default_policy: List[SUBPLOLICY_CONFIG] = [
[("auto_contrast", 0, 1)],
[("equalize", 0, 1)],
[("invert", 0, 1)],
[("rotate", -30.0, 30.0)],
[("posterize", 0.0, 4)],
[("solarize", 0.0, 1.0)],
[("solarize_add", 0.0, 0.43)],
[("color", 0.1, 1.9)],
[("contrast", 0.1, 1.9)],
[("brightness", 0.1, 1.9)],
[("sharpness", 0.1, 1.9)],
[("shear_x", -0.3, 0.3)],
[("shear_y", -0.3, 0.3)],
# (CutoutAbs, 0, 40),
[("translate_x", -0.1, 0.1)],
[("translate_x", -0.1, 0.1)],
]
```
Expected list:
```
default_policy: List[SUBPLOLICY_CONFIG] = [
[("auto_contrast", 0, 1)],
[("equalize", 0, 1)],
[("invert", 0, 1)],
[("rotate", -30.0, 30.0)],
[("posterize", 0.0, 4)],
[("solarize", 0.0, 1.0)],
[("solarize_add", 0.0, 0.43)],
[("color", 0.1, 1.9)],
[("contrast", 0.1, 1.9)],
[("brightness", 0.1, 1.9)],
[("sharpness", 0.1, 1.9)],
[("shear_x", -0.3, 0.3)],
[("shear_y", -0.3, 0.3)],
# (CutoutAbs, 0, 40),
[("translate_x", -0.1, 0.1)],
[("translate_y", -0.1, 0.1)],
]
```
### Environment
```shell
Irrelevant
```
### Additional context
_No response_
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kornia/augmentation/auto/rand_augment/rand_augment.py`
Content:
```
1 from typing import Dict, Iterator, List, Optional, Tuple, Union, cast
2
3 import torch
4 from torch.distributions import Categorical
5
6 from kornia.augmentation.auto.base import SUBPLOLICY_CONFIG, PolicyAugmentBase
7 from kornia.augmentation.auto.operations import OperationBase
8 from kornia.augmentation.auto.operations.policy import PolicySequential
9 from kornia.augmentation.container.params import ParamItem
10 from kornia.core import Module, Tensor
11
12 from . import ops
13
14 default_policy: List[SUBPLOLICY_CONFIG] = [
15 [("auto_contrast", 0, 1)],
16 [("equalize", 0, 1)],
17 [("invert", 0, 1)],
18 [("rotate", -30.0, 30.0)],
19 [("posterize", 0.0, 4)],
20 [("solarize", 0.0, 1.0)],
21 [("solarize_add", 0.0, 0.43)],
22 [("color", 0.1, 1.9)],
23 [("contrast", 0.1, 1.9)],
24 [("brightness", 0.1, 1.9)],
25 [("sharpness", 0.1, 1.9)],
26 [("shear_x", -0.3, 0.3)],
27 [("shear_y", -0.3, 0.3)],
28 # (CutoutAbs, 0, 40),
29 [("translate_x", -0.1, 0.1)],
30 [("translate_x", -0.1, 0.1)],
31 ]
32
33
34 class RandAugment(PolicyAugmentBase):
35 """Apply RandAugment :cite:`cubuk2020randaugment` augmentation strategies.
36
37 Args:
38 n: the number of augmentations to apply sequentially.
39 m: magnitude for all the augmentations, ranged from [0, 30].
40 policy: candidate transformations. If None, a default candidate list will be used.
41 transformation_matrix_mode: computation mode for the chained transformation matrix, via `.transform_matrix`
42 attribute.
43 If `silent`, transformation matrix will be computed silently and the non-rigid
44 modules will be ignored as identity transformations.
45 If `rigid`, transformation matrix will be computed silently and the non-rigid
46 modules will trigger errors.
47 If `skip`, transformation matrix will be totally ignored.
48
49 Examples:
50 >>> import kornia.augmentation as K
51 >>> in_tensor = torch.rand(5, 3, 30, 30)
52 >>> aug = K.AugmentationSequential(RandAugment(n=2, m=10))
53 >>> aug(in_tensor).shape
54 torch.Size([5, 3, 30, 30])
55 """
56
57 def __init__(
58 self,
59 n: int,
60 m: int,
61 policy: Optional[List[SUBPLOLICY_CONFIG]] = None,
62 transformation_matrix_mode: str = "silent",
63 ) -> None:
64 if m <= 0 or m >= 30:
65 raise ValueError(f"Expect `m` in [0, 30]. Got {m}.")
66
67 if policy is None:
68 _policy = default_policy
69 else:
70 _policy = policy
71
72 super().__init__(_policy, transformation_matrix_mode=transformation_matrix_mode)
73 selection_weights = torch.tensor([1.0 / len(self)] * len(self))
74 self.rand_selector = Categorical(selection_weights)
75 self.n = n
76 self.m = m
77
78 def compose_subpolicy_sequential(self, subpolicy: SUBPLOLICY_CONFIG) -> PolicySequential:
79 if len(subpolicy) != 1:
80 raise RuntimeError(f"Each policy must have only one operation for RandAugment. Got {len(subpolicy)}.")
81 name, low, high = subpolicy[0]
82 return PolicySequential(*[getattr(ops, name)(low, high)])
83
84 def get_forward_sequence(self, params: Optional[List[ParamItem]] = None) -> Iterator[Tuple[str, Module]]:
85 if params is None:
86 idx = self.rand_selector.sample((self.n,))
87 return self.get_children_by_indices(idx)
88
89 return self.get_children_by_params(params)
90
91 def forward_parameters(self, batch_shape: torch.Size) -> List[ParamItem]:
92 named_modules: Iterator[Tuple[str, Module]] = self.get_forward_sequence()
93
94 params: List[ParamItem] = []
95 mod_param: Union[Dict[str, Tensor], List[ParamItem]]
96 m = torch.tensor([self.m / 30] * batch_shape[0])
97
98 for name, module in named_modules:
99 # The Input PolicySequential only got one child.
100 op = cast(PolicySequential, module)[0]
101 op = cast(OperationBase, op)
102 mag = None
103 if op.magnitude_range is not None:
104 minval, maxval = op.magnitude_range
105 mag = m * float(maxval - minval) + minval
106 mod_param = op.forward_parameters(batch_shape, mag=mag)
107 # Compose it
108 param = ParamItem(name, [ParamItem(next(iter(module.named_children()))[0], mod_param)])
109 params.append(param)
110
111 return params
112
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kornia/augmentation/auto/rand_augment/rand_augment.py b/kornia/augmentation/auto/rand_augment/rand_augment.py
--- a/kornia/augmentation/auto/rand_augment/rand_augment.py
+++ b/kornia/augmentation/auto/rand_augment/rand_augment.py
@@ -27,7 +27,7 @@
[("shear_y", -0.3, 0.3)],
# (CutoutAbs, 0, 40),
[("translate_x", -0.1, 0.1)],
- [("translate_x", -0.1, 0.1)],
+ [("translate_y", -0.1, 0.1)],
]
| {"golden_diff": "diff --git a/kornia/augmentation/auto/rand_augment/rand_augment.py b/kornia/augmentation/auto/rand_augment/rand_augment.py\n--- a/kornia/augmentation/auto/rand_augment/rand_augment.py\n+++ b/kornia/augmentation/auto/rand_augment/rand_augment.py\n@@ -27,7 +27,7 @@\n [(\"shear_y\", -0.3, 0.3)],\n # (CutoutAbs, 0, 40),\n [(\"translate_x\", -0.1, 0.1)],\n- [(\"translate_x\", -0.1, 0.1)],\n+ [(\"translate_y\", -0.1, 0.1)],\n ]\n", "issue": "Important typo in RandAugment default policy list\n### Describe the bug\n\n'translate_x' is in the default policy list twice, whereas 'translate_y' is missing.\n\n### Reproduction steps\n\n```bash\n1. Open kornia/augmentation/auto/rand_augment/rand_augment.py\n```\n\n\n### Expected behavior\n\nCurrent default_policy list:\r\n```\r\ndefault_policy: List[SUBPLOLICY_CONFIG] = [\r\n [(\"auto_contrast\", 0, 1)],\r\n [(\"equalize\", 0, 1)],\r\n [(\"invert\", 0, 1)],\r\n [(\"rotate\", -30.0, 30.0)],\r\n [(\"posterize\", 0.0, 4)],\r\n [(\"solarize\", 0.0, 1.0)],\r\n [(\"solarize_add\", 0.0, 0.43)],\r\n [(\"color\", 0.1, 1.9)],\r\n [(\"contrast\", 0.1, 1.9)],\r\n [(\"brightness\", 0.1, 1.9)],\r\n [(\"sharpness\", 0.1, 1.9)],\r\n [(\"shear_x\", -0.3, 0.3)],\r\n [(\"shear_y\", -0.3, 0.3)],\r\n # (CutoutAbs, 0, 40),\r\n [(\"translate_x\", -0.1, 0.1)],\r\n [(\"translate_x\", -0.1, 0.1)],\r\n]\r\n```\r\n\r\nExpected list:\r\n```\r\ndefault_policy: List[SUBPLOLICY_CONFIG] = [\r\n [(\"auto_contrast\", 0, 1)],\r\n [(\"equalize\", 0, 1)],\r\n [(\"invert\", 0, 1)],\r\n [(\"rotate\", -30.0, 30.0)],\r\n [(\"posterize\", 0.0, 4)],\r\n [(\"solarize\", 0.0, 1.0)],\r\n [(\"solarize_add\", 0.0, 0.43)],\r\n [(\"color\", 0.1, 1.9)],\r\n [(\"contrast\", 0.1, 1.9)],\r\n [(\"brightness\", 0.1, 1.9)],\r\n [(\"sharpness\", 0.1, 1.9)],\r\n [(\"shear_x\", -0.3, 0.3)],\r\n [(\"shear_y\", -0.3, 0.3)],\r\n # (CutoutAbs, 0, 40),\r\n [(\"translate_x\", -0.1, 0.1)],\r\n [(\"translate_y\", -0.1, 0.1)],\r\n]\r\n```\n\n### Environment\n\n```shell\nIrrelevant\n```\n\n\n### Additional context\n\n_No response_\n", "before_files": [{"content": "from typing import Dict, Iterator, List, Optional, Tuple, Union, cast\n\nimport torch\nfrom torch.distributions import Categorical\n\nfrom kornia.augmentation.auto.base import SUBPLOLICY_CONFIG, PolicyAugmentBase\nfrom kornia.augmentation.auto.operations import OperationBase\nfrom kornia.augmentation.auto.operations.policy import PolicySequential\nfrom kornia.augmentation.container.params import ParamItem\nfrom kornia.core import Module, Tensor\n\nfrom . import ops\n\ndefault_policy: List[SUBPLOLICY_CONFIG] = [\n [(\"auto_contrast\", 0, 1)],\n [(\"equalize\", 0, 1)],\n [(\"invert\", 0, 1)],\n [(\"rotate\", -30.0, 30.0)],\n [(\"posterize\", 0.0, 4)],\n [(\"solarize\", 0.0, 1.0)],\n [(\"solarize_add\", 0.0, 0.43)],\n [(\"color\", 0.1, 1.9)],\n [(\"contrast\", 0.1, 1.9)],\n [(\"brightness\", 0.1, 1.9)],\n [(\"sharpness\", 0.1, 1.9)],\n [(\"shear_x\", -0.3, 0.3)],\n [(\"shear_y\", -0.3, 0.3)],\n # (CutoutAbs, 0, 40),\n [(\"translate_x\", -0.1, 0.1)],\n [(\"translate_x\", -0.1, 0.1)],\n]\n\n\nclass RandAugment(PolicyAugmentBase):\n \"\"\"Apply RandAugment :cite:`cubuk2020randaugment` augmentation strategies.\n\n Args:\n n: the number of augmentations to apply sequentially.\n m: magnitude for all the augmentations, ranged from [0, 30].\n policy: candidate transformations. If None, a default candidate list will be used.\n transformation_matrix_mode: computation mode for the chained transformation matrix, via `.transform_matrix`\n attribute.\n If `silent`, transformation matrix will be computed silently and the non-rigid\n modules will be ignored as identity transformations.\n If `rigid`, transformation matrix will be computed silently and the non-rigid\n modules will trigger errors.\n If `skip`, transformation matrix will be totally ignored.\n\n Examples:\n >>> import kornia.augmentation as K\n >>> in_tensor = torch.rand(5, 3, 30, 30)\n >>> aug = K.AugmentationSequential(RandAugment(n=2, m=10))\n >>> aug(in_tensor).shape\n torch.Size([5, 3, 30, 30])\n \"\"\"\n\n def __init__(\n self,\n n: int,\n m: int,\n policy: Optional[List[SUBPLOLICY_CONFIG]] = None,\n transformation_matrix_mode: str = \"silent\",\n ) -> None:\n if m <= 0 or m >= 30:\n raise ValueError(f\"Expect `m` in [0, 30]. Got {m}.\")\n\n if policy is None:\n _policy = default_policy\n else:\n _policy = policy\n\n super().__init__(_policy, transformation_matrix_mode=transformation_matrix_mode)\n selection_weights = torch.tensor([1.0 / len(self)] * len(self))\n self.rand_selector = Categorical(selection_weights)\n self.n = n\n self.m = m\n\n def compose_subpolicy_sequential(self, subpolicy: SUBPLOLICY_CONFIG) -> PolicySequential:\n if len(subpolicy) != 1:\n raise RuntimeError(f\"Each policy must have only one operation for RandAugment. Got {len(subpolicy)}.\")\n name, low, high = subpolicy[0]\n return PolicySequential(*[getattr(ops, name)(low, high)])\n\n def get_forward_sequence(self, params: Optional[List[ParamItem]] = None) -> Iterator[Tuple[str, Module]]:\n if params is None:\n idx = self.rand_selector.sample((self.n,))\n return self.get_children_by_indices(idx)\n\n return self.get_children_by_params(params)\n\n def forward_parameters(self, batch_shape: torch.Size) -> List[ParamItem]:\n named_modules: Iterator[Tuple[str, Module]] = self.get_forward_sequence()\n\n params: List[ParamItem] = []\n mod_param: Union[Dict[str, Tensor], List[ParamItem]]\n m = torch.tensor([self.m / 30] * batch_shape[0])\n\n for name, module in named_modules:\n # The Input PolicySequential only got one child.\n op = cast(PolicySequential, module)[0]\n op = cast(OperationBase, op)\n mag = None\n if op.magnitude_range is not None:\n minval, maxval = op.magnitude_range\n mag = m * float(maxval - minval) + minval\n mod_param = op.forward_parameters(batch_shape, mag=mag)\n # Compose it\n param = ParamItem(name, [ParamItem(next(iter(module.named_children()))[0], mod_param)])\n params.append(param)\n\n return params\n", "path": "kornia/augmentation/auto/rand_augment/rand_augment.py"}], "after_files": [{"content": "from typing import Dict, Iterator, List, Optional, Tuple, Union, cast\n\nimport torch\nfrom torch.distributions import Categorical\n\nfrom kornia.augmentation.auto.base import SUBPLOLICY_CONFIG, PolicyAugmentBase\nfrom kornia.augmentation.auto.operations import OperationBase\nfrom kornia.augmentation.auto.operations.policy import PolicySequential\nfrom kornia.augmentation.container.params import ParamItem\nfrom kornia.core import Module, Tensor\n\nfrom . import ops\n\ndefault_policy: List[SUBPLOLICY_CONFIG] = [\n [(\"auto_contrast\", 0, 1)],\n [(\"equalize\", 0, 1)],\n [(\"invert\", 0, 1)],\n [(\"rotate\", -30.0, 30.0)],\n [(\"posterize\", 0.0, 4)],\n [(\"solarize\", 0.0, 1.0)],\n [(\"solarize_add\", 0.0, 0.43)],\n [(\"color\", 0.1, 1.9)],\n [(\"contrast\", 0.1, 1.9)],\n [(\"brightness\", 0.1, 1.9)],\n [(\"sharpness\", 0.1, 1.9)],\n [(\"shear_x\", -0.3, 0.3)],\n [(\"shear_y\", -0.3, 0.3)],\n # (CutoutAbs, 0, 40),\n [(\"translate_x\", -0.1, 0.1)],\n [(\"translate_y\", -0.1, 0.1)],\n]\n\n\nclass RandAugment(PolicyAugmentBase):\n \"\"\"Apply RandAugment :cite:`cubuk2020randaugment` augmentation strategies.\n\n Args:\n n: the number of augmentations to apply sequentially.\n m: magnitude for all the augmentations, ranged from [0, 30].\n policy: candidate transformations. If None, a default candidate list will be used.\n transformation_matrix_mode: computation mode for the chained transformation matrix, via `.transform_matrix`\n attribute.\n If `silent`, transformation matrix will be computed silently and the non-rigid\n modules will be ignored as identity transformations.\n If `rigid`, transformation matrix will be computed silently and the non-rigid\n modules will trigger errors.\n If `skip`, transformation matrix will be totally ignored.\n\n Examples:\n >>> import kornia.augmentation as K\n >>> in_tensor = torch.rand(5, 3, 30, 30)\n >>> aug = K.AugmentationSequential(RandAugment(n=2, m=10))\n >>> aug(in_tensor).shape\n torch.Size([5, 3, 30, 30])\n \"\"\"\n\n def __init__(\n self,\n n: int,\n m: int,\n policy: Optional[List[SUBPLOLICY_CONFIG]] = None,\n transformation_matrix_mode: str = \"silent\",\n ) -> None:\n if m <= 0 or m >= 30:\n raise ValueError(f\"Expect `m` in [0, 30]. Got {m}.\")\n\n if policy is None:\n _policy = default_policy\n else:\n _policy = policy\n\n super().__init__(_policy, transformation_matrix_mode=transformation_matrix_mode)\n selection_weights = torch.tensor([1.0 / len(self)] * len(self))\n self.rand_selector = Categorical(selection_weights)\n self.n = n\n self.m = m\n\n def compose_subpolicy_sequential(self, subpolicy: SUBPLOLICY_CONFIG) -> PolicySequential:\n if len(subpolicy) != 1:\n raise RuntimeError(f\"Each policy must have only one operation for RandAugment. Got {len(subpolicy)}.\")\n name, low, high = subpolicy[0]\n return PolicySequential(*[getattr(ops, name)(low, high)])\n\n def get_forward_sequence(self, params: Optional[List[ParamItem]] = None) -> Iterator[Tuple[str, Module]]:\n if params is None:\n idx = self.rand_selector.sample((self.n,))\n return self.get_children_by_indices(idx)\n\n return self.get_children_by_params(params)\n\n def forward_parameters(self, batch_shape: torch.Size) -> List[ParamItem]:\n named_modules: Iterator[Tuple[str, Module]] = self.get_forward_sequence()\n\n params: List[ParamItem] = []\n mod_param: Union[Dict[str, Tensor], List[ParamItem]]\n m = torch.tensor([self.m / 30] * batch_shape[0])\n\n for name, module in named_modules:\n # The Input PolicySequential only got one child.\n op = cast(PolicySequential, module)[0]\n op = cast(OperationBase, op)\n mag = None\n if op.magnitude_range is not None:\n minval, maxval = op.magnitude_range\n mag = m * float(maxval - minval) + minval\n mod_param = op.forward_parameters(batch_shape, mag=mag)\n # Compose it\n param = ParamItem(name, [ParamItem(next(iter(module.named_children()))[0], mod_param)])\n params.append(param)\n\n return params\n", "path": "kornia/augmentation/auto/rand_augment/rand_augment.py"}]} |
gh_patches_debug_179 | rasdani/github-patches | git_diff | vyperlang__vyper-1275 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
State leakage across test runs when using parrellization
### What is wrong.
The tests at `tests/examples/safe_remote_purchase/test_safe_remote_purchase.py` fail when run using `pytest-xdist` to parallelize test runs.
```
def test_abort(w3, assert_tx_failed, check_balance, get_contract, contract_code):
a0, a1, a2 = w3.eth.accounts[:3]
c = get_contract(contract_code, value=2)
# Only sender can trigger refund
assert_tx_failed(lambda: c.abort(transact={'from': a2}))
# Refund works correctly
c.abort(transact={'from': a0, 'gasPrice': 0})
> assert check_balance() == (INIT_BAL_a0 - w3.toWei(2, 'ether'), INIT_BAL_a1)
E assert (100000000000...0000000000000) == (9999980000000...0000000000000)
E At index 0 diff: 1000000000000000000000000 != 999998000000000000000000
E Use -v to get the full diff
tests/examples/safe_remote_purchase/test_safe_remote_purchase.py:62: AssertionError
```
replicate by installing `pytest-xdist` and running with
```
pytest tests/examples/safe_remote_purchase/test_safe_remote_purchase.py -n 2
```
It's likely this isn't deterministic and you may need to run the full suite.
### How can it be fixed.
Figure out where statefulness is leaking across test runs and fix it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 from setuptools import setup, find_packages
4
5
6 test_deps = [
7 'pytest',
8 'pytest-cov',
9 'py-evm==0.2.0a34',
10 'eth-tester==0.1.0b33',
11 'web3==4.8.2',
12 ]
13
14
15 extras = {
16 'test': test_deps
17 }
18
19
20 setup(
21 name='vyper',
22 # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility.
23 version='0.1.0-beta.8',
24 description='Vyper Programming Language for Ethereum',
25 long_description_markdown_filename='README.md',
26 author='Vitalik Buterin',
27 author_email='',
28 url='https://github.com/ethereum/vyper',
29 license="MIT",
30 keywords='ethereum',
31 include_package_data=True,
32 packages=find_packages(exclude=('tests', 'docs')),
33 python_requires='>=3.6',
34 py_modules=['vyper'],
35 install_requires=[
36 'pycryptodome>=3.5.1,<4',
37 ],
38 setup_requires=[
39 'pytest-runner',
40 'setuptools-markdown'
41 ],
42 tests_require=test_deps,
43 extras_require=extras,
44 scripts=[
45 'bin/vyper',
46 'bin/vyper-serve',
47 'bin/vyper-lll'
48 ],
49 classifiers=[
50 'Intended Audience :: Developers',
51 'License :: OSI Approved :: MIT License',
52 'Programming Language :: Python :: 3.6',
53 ]
54 )
55
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -4,11 +4,12 @@
test_deps = [
- 'pytest',
- 'pytest-cov',
- 'py-evm==0.2.0a34',
- 'eth-tester==0.1.0b33',
- 'web3==4.8.2',
+ 'pytest>=3.6',
+ 'pytest-cov==2.4.0',
+ 'pytest-xdist==1.18.1',
+ 'py-evm==0.2.0a39',
+ 'eth-tester==0.1.0b37',
+ 'web3==5.0.0a6'
]
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -4,11 +4,12 @@\n \n \n test_deps = [\n- 'pytest',\n- 'pytest-cov',\n- 'py-evm==0.2.0a34',\n- 'eth-tester==0.1.0b33',\n- 'web3==4.8.2',\n+ 'pytest>=3.6',\n+ 'pytest-cov==2.4.0',\n+ 'pytest-xdist==1.18.1',\n+ 'py-evm==0.2.0a39',\n+ 'eth-tester==0.1.0b37',\n+ 'web3==5.0.0a6'\n ]\n", "issue": "State leakage across test runs when using parrellization\n### What is wrong.\r\n\r\nThe tests at `tests/examples/safe_remote_purchase/test_safe_remote_purchase.py` fail when run using `pytest-xdist` to parallelize test runs.\r\n\r\n```\r\n def test_abort(w3, assert_tx_failed, check_balance, get_contract, contract_code):\r\n a0, a1, a2 = w3.eth.accounts[:3]\r\n c = get_contract(contract_code, value=2)\r\n # Only sender can trigger refund\r\n assert_tx_failed(lambda: c.abort(transact={'from': a2}))\r\n # Refund works correctly\r\n c.abort(transact={'from': a0, 'gasPrice': 0})\r\n> assert check_balance() == (INIT_BAL_a0 - w3.toWei(2, 'ether'), INIT_BAL_a1)\r\nE assert (100000000000...0000000000000) == (9999980000000...0000000000000)\r\nE At index 0 diff: 1000000000000000000000000 != 999998000000000000000000\r\nE Use -v to get the full diff\r\n\r\ntests/examples/safe_remote_purchase/test_safe_remote_purchase.py:62: AssertionError\r\n```\r\n\r\nreplicate by installing `pytest-xdist` and running with\r\n\r\n```\r\npytest tests/examples/safe_remote_purchase/test_safe_remote_purchase.py -n 2\r\n```\r\n\r\nIt's likely this isn't deterministic and you may need to run the full suite.\r\n\r\n### How can it be fixed.\r\n\r\nFigure out where statefulness is leaking across test runs and fix it.\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\nfrom setuptools import setup, find_packages\n\n\ntest_deps = [\n 'pytest',\n 'pytest-cov',\n 'py-evm==0.2.0a34',\n 'eth-tester==0.1.0b33',\n 'web3==4.8.2',\n]\n\n\nextras = {\n 'test': test_deps\n}\n\n\nsetup(\n name='vyper',\n # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility.\n version='0.1.0-beta.8',\n description='Vyper Programming Language for Ethereum',\n long_description_markdown_filename='README.md',\n author='Vitalik Buterin',\n author_email='',\n url='https://github.com/ethereum/vyper',\n license=\"MIT\",\n keywords='ethereum',\n include_package_data=True,\n packages=find_packages(exclude=('tests', 'docs')),\n python_requires='>=3.6',\n py_modules=['vyper'],\n install_requires=[\n 'pycryptodome>=3.5.1,<4',\n ],\n setup_requires=[\n 'pytest-runner',\n 'setuptools-markdown'\n ],\n tests_require=test_deps,\n extras_require=extras,\n scripts=[\n 'bin/vyper',\n 'bin/vyper-serve',\n 'bin/vyper-lll'\n ],\n classifiers=[\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.6',\n ]\n)\n", "path": "setup.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\nfrom setuptools import setup, find_packages\n\n\ntest_deps = [\n 'pytest>=3.6',\n 'pytest-cov==2.4.0',\n 'pytest-xdist==1.18.1',\n 'py-evm==0.2.0a39',\n 'eth-tester==0.1.0b37',\n 'web3==5.0.0a6'\n]\n\n\nextras = {\n 'test': test_deps\n}\n\n\nsetup(\n name='vyper',\n # *IMPORTANT*: Don't manually change the version here. Use the 'bumpversion' utility.\n version='0.1.0-beta.8',\n description='Vyper Programming Language for Ethereum',\n long_description_markdown_filename='README.md',\n author='Vitalik Buterin',\n author_email='',\n url='https://github.com/ethereum/vyper',\n license=\"MIT\",\n keywords='ethereum',\n include_package_data=True,\n packages=find_packages(exclude=('tests', 'docs')),\n python_requires='>=3.6',\n py_modules=['vyper'],\n install_requires=[\n 'pycryptodome>=3.5.1,<4',\n ],\n setup_requires=[\n 'pytest-runner',\n 'setuptools-markdown'\n ],\n tests_require=test_deps,\n extras_require=extras,\n scripts=[\n 'bin/vyper',\n 'bin/vyper-serve',\n 'bin/vyper-lll'\n ],\n classifiers=[\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.6',\n ]\n)\n", "path": "setup.py"}]} |
gh_patches_debug_180 | rasdani/github-patches | git_diff | Parsl__parsl-597 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make `GlobusScheme` inherit from `RepresentationMixin`
Otherwise, the config printed in the log is not copy-and-pasteable:
```
storage_access=[<parsl.data_provider.scheme.GlobusScheme object at 0x7f48d021fbe0>],
working_dir=None
```
Make `GlobusScheme` inherit from `RepresentationMixin`
Otherwise, the config printed in the log is not copy-and-pasteable:
```
storage_access=[<parsl.data_provider.scheme.GlobusScheme object at 0x7f48d021fbe0>],
working_dir=None
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `parsl/data_provider/scheme.py`
Content:
```
1
2 class GlobusScheme(object):
3 """Specification for accessing data on a remote executor via Globus.
4
5 Parameters
6 ----------
7 endpoint_uuid : str
8 Universally unique identifier of the Globus endpoint at which the data can be accessed.
9 This can be found in the `Manage Endpoints <https://www.globus.org/app/endpoints>`_ page.
10 endpoint_path : str, optional
11 FIXME
12 local_path : str, optional
13 FIXME
14 """
15 def __init__(self, endpoint_uuid, endpoint_path=None, local_path=None):
16 self.endpoint_uuid = endpoint_uuid
17 self.endpoint_path = endpoint_path
18 self.local_path = local_path
19
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/parsl/data_provider/scheme.py b/parsl/data_provider/scheme.py
--- a/parsl/data_provider/scheme.py
+++ b/parsl/data_provider/scheme.py
@@ -1,5 +1,7 @@
+from parsl.utils import RepresentationMixin
-class GlobusScheme(object):
+
+class GlobusScheme(RepresentationMixin):
"""Specification for accessing data on a remote executor via Globus.
Parameters
| {"golden_diff": "diff --git a/parsl/data_provider/scheme.py b/parsl/data_provider/scheme.py\n--- a/parsl/data_provider/scheme.py\n+++ b/parsl/data_provider/scheme.py\n@@ -1,5 +1,7 @@\n+from parsl.utils import RepresentationMixin\n \n-class GlobusScheme(object):\n+\n+class GlobusScheme(RepresentationMixin):\n \"\"\"Specification for accessing data on a remote executor via Globus.\n \n Parameters\n", "issue": "Make `GlobusScheme` inherit from `RepresentationMixin`\nOtherwise, the config printed in the log is not copy-and-pasteable:\r\n\r\n```\r\n storage_access=[<parsl.data_provider.scheme.GlobusScheme object at 0x7f48d021fbe0>], \r\n working_dir=None\r\n```\r\n\r\n\nMake `GlobusScheme` inherit from `RepresentationMixin`\nOtherwise, the config printed in the log is not copy-and-pasteable:\r\n\r\n```\r\n storage_access=[<parsl.data_provider.scheme.GlobusScheme object at 0x7f48d021fbe0>], \r\n working_dir=None\r\n```\r\n\r\n\n", "before_files": [{"content": "\nclass GlobusScheme(object):\n \"\"\"Specification for accessing data on a remote executor via Globus.\n\n Parameters\n ----------\n endpoint_uuid : str\n Universally unique identifier of the Globus endpoint at which the data can be accessed.\n This can be found in the `Manage Endpoints <https://www.globus.org/app/endpoints>`_ page.\n endpoint_path : str, optional\n FIXME\n local_path : str, optional\n FIXME\n \"\"\"\n def __init__(self, endpoint_uuid, endpoint_path=None, local_path=None):\n self.endpoint_uuid = endpoint_uuid\n self.endpoint_path = endpoint_path\n self.local_path = local_path\n", "path": "parsl/data_provider/scheme.py"}], "after_files": [{"content": "from parsl.utils import RepresentationMixin\n\n\nclass GlobusScheme(RepresentationMixin):\n \"\"\"Specification for accessing data on a remote executor via Globus.\n\n Parameters\n ----------\n endpoint_uuid : str\n Universally unique identifier of the Globus endpoint at which the data can be accessed.\n This can be found in the `Manage Endpoints <https://www.globus.org/app/endpoints>`_ page.\n endpoint_path : str, optional\n FIXME\n local_path : str, optional\n FIXME\n \"\"\"\n def __init__(self, endpoint_uuid, endpoint_path=None, local_path=None):\n self.endpoint_uuid = endpoint_uuid\n self.endpoint_path = endpoint_path\n self.local_path = local_path\n", "path": "parsl/data_provider/scheme.py"}]} |
gh_patches_debug_181 | rasdani/github-patches | git_diff | PaddlePaddle__models-1201 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
will you support CPU training of Transformer?
https://github.com/PaddlePaddle/models/blob/ff63e48f5dd71143d7108198e71a61cdaa0895d9/fluid/neural_machine_translation/transformer/config.py#L2
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `fluid/neural_machine_translation/transformer/config.py`
Content:
```
1 class TrainTaskConfig(object):
2 # only support GPU currently
3 use_gpu = True
4 # the epoch number to train.
5 pass_num = 30
6 # the number of sequences contained in a mini-batch.
7 # deprecated, set batch_size in args.
8 batch_size = 32
9 # the hyper parameters for Adam optimizer.
10 # This static learning_rate will be multiplied to the LearningRateScheduler
11 # derived learning rate the to get the final learning rate.
12 learning_rate = 1
13 beta1 = 0.9
14 beta2 = 0.98
15 eps = 1e-9
16 # the parameters for learning rate scheduling.
17 warmup_steps = 4000
18 # the weight used to mix up the ground-truth distribution and the fixed
19 # uniform distribution in label smoothing when training.
20 # Set this as zero if label smoothing is not wanted.
21 label_smooth_eps = 0.1
22 # the directory for saving trained models.
23 model_dir = "trained_models"
24 # the directory for saving checkpoints.
25 ckpt_dir = "trained_ckpts"
26 # the directory for loading checkpoint.
27 # If provided, continue training from the checkpoint.
28 ckpt_path = None
29 # the parameter to initialize the learning rate scheduler.
30 # It should be provided if use checkpoints, since the checkpoint doesn't
31 # include the training step counter currently.
32 start_step = 0
33
34
35 class InferTaskConfig(object):
36 use_gpu = True
37 # the number of examples in one run for sequence generation.
38 batch_size = 10
39 # the parameters for beam search.
40 beam_size = 5
41 max_out_len = 256
42 # the number of decoded sentences to output.
43 n_best = 1
44 # the flags indicating whether to output the special tokens.
45 output_bos = False
46 output_eos = False
47 output_unk = True
48 # the directory for loading the trained model.
49 model_path = "trained_models/pass_1.infer.model"
50
51
52 class ModelHyperParams(object):
53 # These following five vocabularies related configurations will be set
54 # automatically according to the passed vocabulary path and special tokens.
55 # size of source word dictionary.
56 src_vocab_size = 10000
57 # size of target word dictionay
58 trg_vocab_size = 10000
59 # index for <bos> token
60 bos_idx = 0
61 # index for <eos> token
62 eos_idx = 1
63 # index for <unk> token
64 unk_idx = 2
65 # max length of sequences deciding the size of position encoding table.
66 # Start from 1 and count start and end tokens in.
67 max_length = 256
68 # the dimension for word embeddings, which is also the last dimension of
69 # the input and output of multi-head attention, position-wise feed-forward
70 # networks, encoder and decoder.
71 d_model = 512
72 # size of the hidden layer in position-wise feed-forward networks.
73 d_inner_hid = 2048
74 # the dimension that keys are projected to for dot-product attention.
75 d_key = 64
76 # the dimension that values are projected to for dot-product attention.
77 d_value = 64
78 # number of head used in multi-head attention.
79 n_head = 8
80 # number of sub-layers to be stacked in the encoder and decoder.
81 n_layer = 6
82 # dropout rate used by all dropout layers.
83 dropout = 0.1
84 # random seed used in dropout for CE.
85 dropout_seed = None
86 # the flag indicating whether to share embedding and softmax weights.
87 # vocabularies in source and target should be same for weight sharing.
88 weight_sharing = True
89
90
91 def merge_cfg_from_list(cfg_list, g_cfgs):
92 """
93 Set the above global configurations using the cfg_list.
94 """
95 assert len(cfg_list) % 2 == 0
96 for key, value in zip(cfg_list[0::2], cfg_list[1::2]):
97 for g_cfg in g_cfgs:
98 if hasattr(g_cfg, key):
99 try:
100 value = eval(value)
101 except Exception: # for file path
102 pass
103 setattr(g_cfg, key, value)
104 break
105
106
107 # The placeholder for batch_size in compile time. Must be -1 currently to be
108 # consistent with some ops' infer-shape output in compile time, such as the
109 # sequence_expand op used in beamsearch decoder.
110 batch_size = -1
111 # The placeholder for squence length in compile time.
112 seq_len = ModelHyperParams.max_length
113 # Here list the data shapes and data types of all inputs.
114 # The shapes here act as placeholder and are set to pass the infer-shape in
115 # compile time.
116 input_descs = {
117 # The actual data shape of src_word is:
118 # [batch_size, max_src_len_in_batch, 1]
119 "src_word": [(batch_size, seq_len, 1), "int64", 2],
120 # The actual data shape of src_pos is:
121 # [batch_size, max_src_len_in_batch, 1]
122 "src_pos": [(batch_size, seq_len, 1), "int64"],
123 # This input is used to remove attention weights on paddings in the
124 # encoder.
125 # The actual data shape of src_slf_attn_bias is:
126 # [batch_size, n_head, max_src_len_in_batch, max_src_len_in_batch]
127 "src_slf_attn_bias": [(batch_size, ModelHyperParams.n_head, seq_len,
128 seq_len), "float32"],
129 # The actual data shape of trg_word is:
130 # [batch_size, max_trg_len_in_batch, 1]
131 "trg_word": [(batch_size, seq_len, 1), "int64",
132 2], # lod_level is only used in fast decoder.
133 # The actual data shape of trg_pos is:
134 # [batch_size, max_trg_len_in_batch, 1]
135 "trg_pos": [(batch_size, seq_len, 1), "int64"],
136 # This input is used to remove attention weights on paddings and
137 # subsequent words in the decoder.
138 # The actual data shape of trg_slf_attn_bias is:
139 # [batch_size, n_head, max_trg_len_in_batch, max_trg_len_in_batch]
140 "trg_slf_attn_bias": [(batch_size, ModelHyperParams.n_head, seq_len,
141 seq_len), "float32"],
142 # This input is used to remove attention weights on paddings of the source
143 # input in the encoder-decoder attention.
144 # The actual data shape of trg_src_attn_bias is:
145 # [batch_size, n_head, max_trg_len_in_batch, max_src_len_in_batch]
146 "trg_src_attn_bias": [(batch_size, ModelHyperParams.n_head, seq_len,
147 seq_len), "float32"],
148 # This input is used in independent decoder program for inference.
149 # The actual data shape of enc_output is:
150 # [batch_size, max_src_len_in_batch, d_model]
151 "enc_output": [(batch_size, seq_len, ModelHyperParams.d_model), "float32"],
152 # The actual data shape of label_word is:
153 # [batch_size * max_trg_len_in_batch, 1]
154 "lbl_word": [(batch_size * seq_len, 1), "int64"],
155 # This input is used to mask out the loss of paddding tokens.
156 # The actual data shape of label_weight is:
157 # [batch_size * max_trg_len_in_batch, 1]
158 "lbl_weight": [(batch_size * seq_len, 1), "float32"],
159 # This input is used in beam-search decoder.
160 "init_score": [(batch_size, 1), "float32"],
161 }
162
163 # Names of word embedding table which might be reused for weight sharing.
164 word_emb_param_names = (
165 "src_word_emb_table",
166 "trg_word_emb_table", )
167 # Names of position encoding table which will be initialized externally.
168 pos_enc_param_names = (
169 "src_pos_enc_table",
170 "trg_pos_enc_table", )
171 # separated inputs for different usages.
172 encoder_data_input_fields = (
173 "src_word",
174 "src_pos",
175 "src_slf_attn_bias", )
176 decoder_data_input_fields = (
177 "trg_word",
178 "trg_pos",
179 "trg_slf_attn_bias",
180 "trg_src_attn_bias",
181 "enc_output", )
182 label_data_input_fields = (
183 "lbl_word",
184 "lbl_weight", )
185 # In fast decoder, trg_pos (only containing the current time step) is generated
186 # by ops and trg_slf_attn_bias is not needed.
187 fast_decoder_data_input_fields = (
188 "trg_word",
189 "init_score",
190 "trg_src_attn_bias", )
191
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/fluid/neural_machine_translation/transformer/config.py b/fluid/neural_machine_translation/transformer/config.py
--- a/fluid/neural_machine_translation/transformer/config.py
+++ b/fluid/neural_machine_translation/transformer/config.py
@@ -1,5 +1,5 @@
class TrainTaskConfig(object):
- # only support GPU currently
+ # support both CPU and GPU now.
use_gpu = True
# the epoch number to train.
pass_num = 30
| {"golden_diff": "diff --git a/fluid/neural_machine_translation/transformer/config.py b/fluid/neural_machine_translation/transformer/config.py\n--- a/fluid/neural_machine_translation/transformer/config.py\n+++ b/fluid/neural_machine_translation/transformer/config.py\n@@ -1,5 +1,5 @@\n class TrainTaskConfig(object):\n- # only support GPU currently\n+ # support both CPU and GPU now.\n use_gpu = True\n # the epoch number to train.\n pass_num = 30\n", "issue": "will you support CPU training of Transformer?\nhttps://github.com/PaddlePaddle/models/blob/ff63e48f5dd71143d7108198e71a61cdaa0895d9/fluid/neural_machine_translation/transformer/config.py#L2\n", "before_files": [{"content": "class TrainTaskConfig(object):\n # only support GPU currently\n use_gpu = True\n # the epoch number to train.\n pass_num = 30\n # the number of sequences contained in a mini-batch.\n # deprecated, set batch_size in args.\n batch_size = 32\n # the hyper parameters for Adam optimizer.\n # This static learning_rate will be multiplied to the LearningRateScheduler\n # derived learning rate the to get the final learning rate.\n learning_rate = 1\n beta1 = 0.9\n beta2 = 0.98\n eps = 1e-9\n # the parameters for learning rate scheduling.\n warmup_steps = 4000\n # the weight used to mix up the ground-truth distribution and the fixed\n # uniform distribution in label smoothing when training.\n # Set this as zero if label smoothing is not wanted.\n label_smooth_eps = 0.1\n # the directory for saving trained models.\n model_dir = \"trained_models\"\n # the directory for saving checkpoints.\n ckpt_dir = \"trained_ckpts\"\n # the directory for loading checkpoint.\n # If provided, continue training from the checkpoint.\n ckpt_path = None\n # the parameter to initialize the learning rate scheduler.\n # It should be provided if use checkpoints, since the checkpoint doesn't\n # include the training step counter currently.\n start_step = 0\n\n\nclass InferTaskConfig(object):\n use_gpu = True\n # the number of examples in one run for sequence generation.\n batch_size = 10\n # the parameters for beam search.\n beam_size = 5\n max_out_len = 256\n # the number of decoded sentences to output.\n n_best = 1\n # the flags indicating whether to output the special tokens.\n output_bos = False\n output_eos = False\n output_unk = True\n # the directory for loading the trained model.\n model_path = \"trained_models/pass_1.infer.model\"\n\n\nclass ModelHyperParams(object):\n # These following five vocabularies related configurations will be set\n # automatically according to the passed vocabulary path and special tokens.\n # size of source word dictionary.\n src_vocab_size = 10000\n # size of target word dictionay\n trg_vocab_size = 10000\n # index for <bos> token\n bos_idx = 0\n # index for <eos> token\n eos_idx = 1\n # index for <unk> token\n unk_idx = 2\n # max length of sequences deciding the size of position encoding table.\n # Start from 1 and count start and end tokens in.\n max_length = 256\n # the dimension for word embeddings, which is also the last dimension of\n # the input and output of multi-head attention, position-wise feed-forward\n # networks, encoder and decoder.\n d_model = 512\n # size of the hidden layer in position-wise feed-forward networks.\n d_inner_hid = 2048\n # the dimension that keys are projected to for dot-product attention.\n d_key = 64\n # the dimension that values are projected to for dot-product attention.\n d_value = 64\n # number of head used in multi-head attention.\n n_head = 8\n # number of sub-layers to be stacked in the encoder and decoder.\n n_layer = 6\n # dropout rate used by all dropout layers.\n dropout = 0.1\n # random seed used in dropout for CE.\n dropout_seed = None\n # the flag indicating whether to share embedding and softmax weights.\n # vocabularies in source and target should be same for weight sharing.\n weight_sharing = True\n\n\ndef merge_cfg_from_list(cfg_list, g_cfgs):\n \"\"\"\n Set the above global configurations using the cfg_list. \n \"\"\"\n assert len(cfg_list) % 2 == 0\n for key, value in zip(cfg_list[0::2], cfg_list[1::2]):\n for g_cfg in g_cfgs:\n if hasattr(g_cfg, key):\n try:\n value = eval(value)\n except Exception: # for file path\n pass\n setattr(g_cfg, key, value)\n break\n\n\n# The placeholder for batch_size in compile time. Must be -1 currently to be\n# consistent with some ops' infer-shape output in compile time, such as the\n# sequence_expand op used in beamsearch decoder.\nbatch_size = -1\n# The placeholder for squence length in compile time.\nseq_len = ModelHyperParams.max_length\n# Here list the data shapes and data types of all inputs.\n# The shapes here act as placeholder and are set to pass the infer-shape in\n# compile time.\ninput_descs = {\n # The actual data shape of src_word is:\n # [batch_size, max_src_len_in_batch, 1]\n \"src_word\": [(batch_size, seq_len, 1), \"int64\", 2],\n # The actual data shape of src_pos is:\n # [batch_size, max_src_len_in_batch, 1]\n \"src_pos\": [(batch_size, seq_len, 1), \"int64\"],\n # This input is used to remove attention weights on paddings in the\n # encoder.\n # The actual data shape of src_slf_attn_bias is:\n # [batch_size, n_head, max_src_len_in_batch, max_src_len_in_batch]\n \"src_slf_attn_bias\": [(batch_size, ModelHyperParams.n_head, seq_len,\n seq_len), \"float32\"],\n # The actual data shape of trg_word is:\n # [batch_size, max_trg_len_in_batch, 1]\n \"trg_word\": [(batch_size, seq_len, 1), \"int64\",\n 2], # lod_level is only used in fast decoder.\n # The actual data shape of trg_pos is:\n # [batch_size, max_trg_len_in_batch, 1]\n \"trg_pos\": [(batch_size, seq_len, 1), \"int64\"],\n # This input is used to remove attention weights on paddings and\n # subsequent words in the decoder.\n # The actual data shape of trg_slf_attn_bias is:\n # [batch_size, n_head, max_trg_len_in_batch, max_trg_len_in_batch]\n \"trg_slf_attn_bias\": [(batch_size, ModelHyperParams.n_head, seq_len,\n seq_len), \"float32\"],\n # This input is used to remove attention weights on paddings of the source\n # input in the encoder-decoder attention.\n # The actual data shape of trg_src_attn_bias is:\n # [batch_size, n_head, max_trg_len_in_batch, max_src_len_in_batch]\n \"trg_src_attn_bias\": [(batch_size, ModelHyperParams.n_head, seq_len,\n seq_len), \"float32\"],\n # This input is used in independent decoder program for inference.\n # The actual data shape of enc_output is:\n # [batch_size, max_src_len_in_batch, d_model]\n \"enc_output\": [(batch_size, seq_len, ModelHyperParams.d_model), \"float32\"],\n # The actual data shape of label_word is:\n # [batch_size * max_trg_len_in_batch, 1]\n \"lbl_word\": [(batch_size * seq_len, 1), \"int64\"],\n # This input is used to mask out the loss of paddding tokens.\n # The actual data shape of label_weight is:\n # [batch_size * max_trg_len_in_batch, 1]\n \"lbl_weight\": [(batch_size * seq_len, 1), \"float32\"],\n # This input is used in beam-search decoder.\n \"init_score\": [(batch_size, 1), \"float32\"],\n}\n\n# Names of word embedding table which might be reused for weight sharing.\nword_emb_param_names = (\n \"src_word_emb_table\",\n \"trg_word_emb_table\", )\n# Names of position encoding table which will be initialized externally.\npos_enc_param_names = (\n \"src_pos_enc_table\",\n \"trg_pos_enc_table\", )\n# separated inputs for different usages.\nencoder_data_input_fields = (\n \"src_word\",\n \"src_pos\",\n \"src_slf_attn_bias\", )\ndecoder_data_input_fields = (\n \"trg_word\",\n \"trg_pos\",\n \"trg_slf_attn_bias\",\n \"trg_src_attn_bias\",\n \"enc_output\", )\nlabel_data_input_fields = (\n \"lbl_word\",\n \"lbl_weight\", )\n# In fast decoder, trg_pos (only containing the current time step) is generated\n# by ops and trg_slf_attn_bias is not needed.\nfast_decoder_data_input_fields = (\n \"trg_word\",\n \"init_score\",\n \"trg_src_attn_bias\", )\n", "path": "fluid/neural_machine_translation/transformer/config.py"}], "after_files": [{"content": "class TrainTaskConfig(object):\n # support both CPU and GPU now.\n use_gpu = True\n # the epoch number to train.\n pass_num = 30\n # the number of sequences contained in a mini-batch.\n # deprecated, set batch_size in args.\n batch_size = 32\n # the hyper parameters for Adam optimizer.\n # This static learning_rate will be multiplied to the LearningRateScheduler\n # derived learning rate the to get the final learning rate.\n learning_rate = 1\n beta1 = 0.9\n beta2 = 0.98\n eps = 1e-9\n # the parameters for learning rate scheduling.\n warmup_steps = 4000\n # the weight used to mix up the ground-truth distribution and the fixed\n # uniform distribution in label smoothing when training.\n # Set this as zero if label smoothing is not wanted.\n label_smooth_eps = 0.1\n # the directory for saving trained models.\n model_dir = \"trained_models\"\n # the directory for saving checkpoints.\n ckpt_dir = \"trained_ckpts\"\n # the directory for loading checkpoint.\n # If provided, continue training from the checkpoint.\n ckpt_path = None\n # the parameter to initialize the learning rate scheduler.\n # It should be provided if use checkpoints, since the checkpoint doesn't\n # include the training step counter currently.\n start_step = 0\n\n\nclass InferTaskConfig(object):\n use_gpu = True\n # the number of examples in one run for sequence generation.\n batch_size = 10\n # the parameters for beam search.\n beam_size = 5\n max_out_len = 256\n # the number of decoded sentences to output.\n n_best = 1\n # the flags indicating whether to output the special tokens.\n output_bos = False\n output_eos = False\n output_unk = True\n # the directory for loading the trained model.\n model_path = \"trained_models/pass_1.infer.model\"\n\n\nclass ModelHyperParams(object):\n # These following five vocabularies related configurations will be set\n # automatically according to the passed vocabulary path and special tokens.\n # size of source word dictionary.\n src_vocab_size = 10000\n # size of target word dictionay\n trg_vocab_size = 10000\n # index for <bos> token\n bos_idx = 0\n # index for <eos> token\n eos_idx = 1\n # index for <unk> token\n unk_idx = 2\n # max length of sequences deciding the size of position encoding table.\n # Start from 1 and count start and end tokens in.\n max_length = 256\n # the dimension for word embeddings, which is also the last dimension of\n # the input and output of multi-head attention, position-wise feed-forward\n # networks, encoder and decoder.\n d_model = 512\n # size of the hidden layer in position-wise feed-forward networks.\n d_inner_hid = 2048\n # the dimension that keys are projected to for dot-product attention.\n d_key = 64\n # the dimension that values are projected to for dot-product attention.\n d_value = 64\n # number of head used in multi-head attention.\n n_head = 8\n # number of sub-layers to be stacked in the encoder and decoder.\n n_layer = 6\n # dropout rate used by all dropout layers.\n dropout = 0.1\n # random seed used in dropout for CE.\n dropout_seed = None\n # the flag indicating whether to share embedding and softmax weights.\n # vocabularies in source and target should be same for weight sharing.\n weight_sharing = True\n\n\ndef merge_cfg_from_list(cfg_list, g_cfgs):\n \"\"\"\n Set the above global configurations using the cfg_list. \n \"\"\"\n assert len(cfg_list) % 2 == 0\n for key, value in zip(cfg_list[0::2], cfg_list[1::2]):\n for g_cfg in g_cfgs:\n if hasattr(g_cfg, key):\n try:\n value = eval(value)\n except Exception: # for file path\n pass\n setattr(g_cfg, key, value)\n break\n\n\n# The placeholder for batch_size in compile time. Must be -1 currently to be\n# consistent with some ops' infer-shape output in compile time, such as the\n# sequence_expand op used in beamsearch decoder.\nbatch_size = -1\n# The placeholder for squence length in compile time.\nseq_len = ModelHyperParams.max_length\n# Here list the data shapes and data types of all inputs.\n# The shapes here act as placeholder and are set to pass the infer-shape in\n# compile time.\ninput_descs = {\n # The actual data shape of src_word is:\n # [batch_size, max_src_len_in_batch, 1]\n \"src_word\": [(batch_size, seq_len, 1), \"int64\", 2],\n # The actual data shape of src_pos is:\n # [batch_size, max_src_len_in_batch, 1]\n \"src_pos\": [(batch_size, seq_len, 1), \"int64\"],\n # This input is used to remove attention weights on paddings in the\n # encoder.\n # The actual data shape of src_slf_attn_bias is:\n # [batch_size, n_head, max_src_len_in_batch, max_src_len_in_batch]\n \"src_slf_attn_bias\": [(batch_size, ModelHyperParams.n_head, seq_len,\n seq_len), \"float32\"],\n # The actual data shape of trg_word is:\n # [batch_size, max_trg_len_in_batch, 1]\n \"trg_word\": [(batch_size, seq_len, 1), \"int64\",\n 2], # lod_level is only used in fast decoder.\n # The actual data shape of trg_pos is:\n # [batch_size, max_trg_len_in_batch, 1]\n \"trg_pos\": [(batch_size, seq_len, 1), \"int64\"],\n # This input is used to remove attention weights on paddings and\n # subsequent words in the decoder.\n # The actual data shape of trg_slf_attn_bias is:\n # [batch_size, n_head, max_trg_len_in_batch, max_trg_len_in_batch]\n \"trg_slf_attn_bias\": [(batch_size, ModelHyperParams.n_head, seq_len,\n seq_len), \"float32\"],\n # This input is used to remove attention weights on paddings of the source\n # input in the encoder-decoder attention.\n # The actual data shape of trg_src_attn_bias is:\n # [batch_size, n_head, max_trg_len_in_batch, max_src_len_in_batch]\n \"trg_src_attn_bias\": [(batch_size, ModelHyperParams.n_head, seq_len,\n seq_len), \"float32\"],\n # This input is used in independent decoder program for inference.\n # The actual data shape of enc_output is:\n # [batch_size, max_src_len_in_batch, d_model]\n \"enc_output\": [(batch_size, seq_len, ModelHyperParams.d_model), \"float32\"],\n # The actual data shape of label_word is:\n # [batch_size * max_trg_len_in_batch, 1]\n \"lbl_word\": [(batch_size * seq_len, 1), \"int64\"],\n # This input is used to mask out the loss of paddding tokens.\n # The actual data shape of label_weight is:\n # [batch_size * max_trg_len_in_batch, 1]\n \"lbl_weight\": [(batch_size * seq_len, 1), \"float32\"],\n # This input is used in beam-search decoder.\n \"init_score\": [(batch_size, 1), \"float32\"],\n}\n\n# Names of word embedding table which might be reused for weight sharing.\nword_emb_param_names = (\n \"src_word_emb_table\",\n \"trg_word_emb_table\", )\n# Names of position encoding table which will be initialized externally.\npos_enc_param_names = (\n \"src_pos_enc_table\",\n \"trg_pos_enc_table\", )\n# separated inputs for different usages.\nencoder_data_input_fields = (\n \"src_word\",\n \"src_pos\",\n \"src_slf_attn_bias\", )\ndecoder_data_input_fields = (\n \"trg_word\",\n \"trg_pos\",\n \"trg_slf_attn_bias\",\n \"trg_src_attn_bias\",\n \"enc_output\", )\nlabel_data_input_fields = (\n \"lbl_word\",\n \"lbl_weight\", )\n# In fast decoder, trg_pos (only containing the current time step) is generated\n# by ops and trg_slf_attn_bias is not needed.\nfast_decoder_data_input_fields = (\n \"trg_word\",\n \"init_score\",\n \"trg_src_attn_bias\", )\n", "path": "fluid/neural_machine_translation/transformer/config.py"}]} |
gh_patches_debug_182 | rasdani/github-patches | git_diff | mozmeao__snippets-service-892 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ASRSnippet list view search refers to target
ASRSnippet list view search requires update to refer to `targets` after #875
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `snippets/base/admin/adminmodels.py`
Content:
```
1 import re
2
3 from django.contrib import admin
4 from django.db.models import TextField, Q
5 from django.template.loader import get_template
6 from django.utils.safestring import mark_safe
7
8 from reversion.admin import VersionAdmin
9 from django_ace import AceWidget
10 from django_statsd.clients import statsd
11 from jinja2.meta import find_undeclared_variables
12 from django_admin_listfilter_dropdown.filters import RelatedDropdownFilter
13
14 from snippets.base import forms, models
15 from snippets.base.models import JINJA_ENV
16 from snippets.base.admin import filters
17 from snippets.base.admin import actions
18
19
20 MATCH_LOCALE_REGEX = re.compile(r'(\w+(?:-\w+)*)')
21 RESERVED_VARIABLES = ('_', 'snippet_id')
22
23
24 class ClientMatchRuleAdmin(VersionAdmin, admin.ModelAdmin):
25 list_display = ('description', 'is_exclusion', 'startpage_version', 'name',
26 'version', 'locale', 'appbuildid', 'build_target',
27 'channel', 'os_version', 'distribution',
28 'distribution_version', 'modified')
29 list_filter = ('name', 'version', 'os_version', 'appbuildid',
30 'build_target', 'channel', 'distribution', 'locale')
31 save_on_top = True
32 search_fields = ('description',)
33
34
35 class LogEntryAdmin(admin.ModelAdmin):
36 list_display = ('user', 'content_type', 'object_id', 'object_repr', 'change_message')
37 list_filter = ('user', 'content_type')
38
39
40 class SnippetTemplateVariableInline(admin.TabularInline):
41 model = models.SnippetTemplateVariable
42 formset = forms.SnippetTemplateVariableInlineFormset
43 max_num = 0
44 can_delete = False
45 readonly_fields = ('name',)
46 fields = ('name', 'type', 'order', 'description')
47
48
49 class SnippetTemplateAdmin(VersionAdmin, admin.ModelAdmin):
50 save_on_top = True
51 list_display = ('name', 'priority', 'hidden')
52 list_filter = ('hidden', 'startpage')
53 inlines = (SnippetTemplateVariableInline,)
54 formfield_overrides = {
55 TextField: {'widget': AceWidget(mode='html', theme='github',
56 width='1200px', height='500px')},
57 }
58
59 class Media:
60 css = {
61 'all': ('css/admin.css',)
62 }
63
64 def save_related(self, request, form, formsets, change):
65 """
66 After saving the related objects, remove and add
67 SnippetTemplateVariables depending on how the template code changed.
68 """
69 super(SnippetTemplateAdmin, self).save_related(request, form, formsets,
70 change)
71
72 # Parse the template code and find any undefined variables.
73 ast = JINJA_ENV.env.parse(form.instance.code)
74 new_vars = find_undeclared_variables(ast)
75 var_manager = form.instance.variable_set
76
77 # Filter out reserved variable names.
78 new_vars = [x for x in new_vars if x not in RESERVED_VARIABLES]
79
80 # Delete variables not in the new set.
81 var_manager.filter(~Q(name__in=new_vars)).delete()
82
83 # Create variables that don't exist.
84 for i, variable in enumerate(new_vars, start=1):
85 obj, _ = models.SnippetTemplateVariable.objects.get_or_create(
86 template=form.instance, name=variable)
87 if obj.order == 0:
88 obj.order = i * 10
89 obj.save()
90
91
92 class UploadedFileAdmin(admin.ModelAdmin):
93 readonly_fields = ('url', 'preview', 'snippets')
94 list_display = ('name', 'url', 'preview', 'modified')
95 prepopulated_fields = {'name': ('file',)}
96 form = forms.UploadedFileAdminForm
97
98 def preview(self, obj):
99 template = get_template('base/uploadedfile_preview.jinja')
100 return mark_safe(template.render({'file': obj}))
101
102 def snippets(self, obj):
103 """Snippets using this file."""
104 template = get_template('base/uploadedfile_snippets.jinja')
105 return mark_safe(template.render({'snippets': obj.snippets}))
106
107
108 class AddonAdmin(admin.ModelAdmin):
109 list_display = ('name', 'guid')
110
111
112 class ASRSnippetAdmin(admin.ModelAdmin):
113 form = forms.ASRSnippetAdminForm
114
115 list_display_links = (
116 'id',
117 'name',
118 )
119 list_display = (
120 'id',
121 'name',
122 'status',
123 'modified',
124 )
125 list_filter = (
126 filters.ModifiedFilter,
127 'status',
128 filters.ChannelFilter,
129 ('template', RelatedDropdownFilter),
130 )
131 search_fields = (
132 'name',
133 'id',
134 'campaign__name',
135 'target__name',
136 )
137 autocomplete_fields = (
138 'campaign',
139 )
140 preserve_filters = True
141 readonly_fields = (
142 'id',
143 'created',
144 'modified',
145 'uuid',
146 'creator',
147 'preview_url',
148 'migrated_from_linked',
149 )
150 filter_horizontal = (
151 'targets',
152 'locales',
153 )
154 save_on_top = True
155 save_as = True
156 view_on_site = False
157 actions = (
158 actions.duplicate_snippets_action,
159 actions.publish_snippets_action,
160 )
161
162 fieldsets = (
163 ('ID', {
164 'fields': ('id', 'name', 'status', 'creator', 'preview_url', 'migrated_from_linked')
165 }),
166 ('Content', {
167 'description': (
168 '''
169 <strong>Available deep links:</strong><br/>
170 <ol>
171 <li><code>special:accounts</code> to open Firefox Accounts</li>
172 <li><code>special:appMenu</code> to open the hamburger menu</li>
173 </ol><br/>
174 <strong>Automatically add Snippet ID:</strong><br/>
175 You can use <code>[[snippet_id]]</code> in any field and it
176 will be automatically replaced by Snippet ID when served to users.
177 <br/>
178 Example: This is a <code><a href="https://example.com?utm_term=[[snippet_id]]">link</a></code> # noqa
179 <br/>
180 '''
181 ),
182 'fields': ('template', 'data'),
183 }),
184 ('Publishing Options', {
185 'fields': (
186 'campaign',
187 'targets',
188 ('publish_start', 'publish_end'),
189 'locales',
190 'weight',)
191 }),
192 ('Other Info', {
193 'fields': ('uuid', ('created', 'modified'), 'for_qa'),
194 'classes': ('collapse',)
195 }),
196 )
197
198 class Media:
199 css = {
200 'all': (
201 'css/admin/ASRSnippetAdmin.css',
202 'css/admin/IDFieldHighlight.css',
203 )
204 }
205 js = (
206 'js/admin/clipboard.min.js',
207 'js/admin/copy_preview.js',
208 )
209
210 def save_model(self, request, obj, form, change):
211 if not obj.creator_id:
212 obj.creator = request.user
213 statsd.incr('save.asrsnippet')
214 super().save_model(request, obj, form, change)
215
216 def preview_url(self, obj):
217 text = f'''
218 <span id="previewLinkUrl">{obj.get_preview_url()}</span>
219 <button id="copyPreviewLink" class="btn"
220 data-clipboard-target="#previewLinkUrl"
221 originalText="Copy to Clipboard" type="button">
222 Copy to Clipboard
223 </button>
224 '''
225 return mark_safe(text)
226
227 def migrated_from_linked(self, obj):
228 return mark_safe(
229 f'<a href={obj.migrated_from.get_admin_url(full=False)}>{obj.migrated_from.name}</a>')
230 migrated_from_linked.short_description = 'Migrated From'
231
232 def change_view(self, request, *args, **kwargs):
233 if request.method == 'POST' and '_saveasnew' in request.POST:
234 # Always saved cloned snippets as un-published and un-check ready for review.
235 post_data = request.POST.copy()
236 post_data['status'] = models.STATUS_CHOICES['Draft']
237 post_data.pop('migrated_from', None)
238 request.POST = post_data
239 return super().change_view(request, *args, **kwargs)
240
241 def get_readonly_fields(self, request, obj):
242 if not request.user.is_superuser:
243 return self.readonly_fields + ('for_qa',)
244 return self.readonly_fields
245
246 def get_queryset(self, request):
247 queryset = super().get_queryset(request)
248 if request.user.is_superuser:
249 return queryset
250 return queryset.filter(for_qa=False)
251
252
253 class CampaignAdmin(admin.ModelAdmin):
254 readonly_fields = ('created', 'modified', 'creator',)
255 prepopulated_fields = {'slug': ('name',)}
256
257 fieldsets = (
258 ('ID', {'fields': ('name', 'slug')}),
259 ('Other Info', {
260 'fields': ('creator', ('created', 'modified')),
261 }),
262 )
263 search_fields = (
264 'name',
265 )
266
267 def save_model(self, request, obj, form, change):
268 if not obj.creator_id:
269 obj.creator = request.user
270 statsd.incr('save.campaign')
271 super().save_model(request, obj, form, change)
272
273
274 class TargetAdmin(admin.ModelAdmin):
275 form = forms.TargetAdminForm
276 save_on_top = True
277 readonly_fields = ('created', 'modified', 'creator', 'jexl_expr')
278 filter_horizontal = (
279 'client_match_rules',
280 )
281 search_fields = (
282 'name',
283 )
284 fieldsets = (
285 ('ID', {'fields': ('name',)}),
286 ('Product channels', {
287 'description': 'What channels will this snippet be available in?',
288 'fields': (('on_release', 'on_beta', 'on_aurora', 'on_nightly', 'on_esr'),)
289 }),
290 ('Targeting', {
291 'fields': (
292 'filtr_is_default_browser',
293 'filtr_updates_enabled',
294 'filtr_updates_autodownload_enabled',
295 'filtr_profile_age_created',
296 'filtr_firefox_version',
297 'filtr_previous_session_end',
298 'filtr_uses_firefox_sync',
299 'filtr_country',
300 'filtr_is_developer',
301 'filtr_current_search_engine',
302 'filtr_browser_addon',
303 'filtr_total_bookmarks_count',
304 )
305 }),
306 ('Advanced Targeting', {
307 'fields': (
308 'client_match_rules',
309 )
310 }),
311 ('Other Info', {
312 'fields': ('creator', ('created', 'modified'), 'jexl_expr'),
313 }),
314 )
315
316 def save_model(self, request, obj, form, change):
317 if not obj.creator_id:
318 obj.creator = request.user
319 statsd.incr('save.target')
320 super().save_model(request, obj, form, change)
321
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/snippets/base/admin/adminmodels.py b/snippets/base/admin/adminmodels.py
--- a/snippets/base/admin/adminmodels.py
+++ b/snippets/base/admin/adminmodels.py
@@ -132,7 +132,7 @@
'name',
'id',
'campaign__name',
- 'target__name',
+ 'targets__name',
)
autocomplete_fields = (
'campaign',
| {"golden_diff": "diff --git a/snippets/base/admin/adminmodels.py b/snippets/base/admin/adminmodels.py\n--- a/snippets/base/admin/adminmodels.py\n+++ b/snippets/base/admin/adminmodels.py\n@@ -132,7 +132,7 @@\n 'name',\n 'id',\n 'campaign__name',\n- 'target__name',\n+ 'targets__name',\n )\n autocomplete_fields = (\n 'campaign',\n", "issue": "ASRSnippet list view search refers to target\nASRSnippet list view search requires update to refer to `targets` after #875 \n", "before_files": [{"content": "import re\n\nfrom django.contrib import admin\nfrom django.db.models import TextField, Q\nfrom django.template.loader import get_template\nfrom django.utils.safestring import mark_safe\n\nfrom reversion.admin import VersionAdmin\nfrom django_ace import AceWidget\nfrom django_statsd.clients import statsd\nfrom jinja2.meta import find_undeclared_variables\nfrom django_admin_listfilter_dropdown.filters import RelatedDropdownFilter\n\nfrom snippets.base import forms, models\nfrom snippets.base.models import JINJA_ENV\nfrom snippets.base.admin import filters\nfrom snippets.base.admin import actions\n\n\nMATCH_LOCALE_REGEX = re.compile(r'(\\w+(?:-\\w+)*)')\nRESERVED_VARIABLES = ('_', 'snippet_id')\n\n\nclass ClientMatchRuleAdmin(VersionAdmin, admin.ModelAdmin):\n list_display = ('description', 'is_exclusion', 'startpage_version', 'name',\n 'version', 'locale', 'appbuildid', 'build_target',\n 'channel', 'os_version', 'distribution',\n 'distribution_version', 'modified')\n list_filter = ('name', 'version', 'os_version', 'appbuildid',\n 'build_target', 'channel', 'distribution', 'locale')\n save_on_top = True\n search_fields = ('description',)\n\n\nclass LogEntryAdmin(admin.ModelAdmin):\n list_display = ('user', 'content_type', 'object_id', 'object_repr', 'change_message')\n list_filter = ('user', 'content_type')\n\n\nclass SnippetTemplateVariableInline(admin.TabularInline):\n model = models.SnippetTemplateVariable\n formset = forms.SnippetTemplateVariableInlineFormset\n max_num = 0\n can_delete = False\n readonly_fields = ('name',)\n fields = ('name', 'type', 'order', 'description')\n\n\nclass SnippetTemplateAdmin(VersionAdmin, admin.ModelAdmin):\n save_on_top = True\n list_display = ('name', 'priority', 'hidden')\n list_filter = ('hidden', 'startpage')\n inlines = (SnippetTemplateVariableInline,)\n formfield_overrides = {\n TextField: {'widget': AceWidget(mode='html', theme='github',\n width='1200px', height='500px')},\n }\n\n class Media:\n css = {\n 'all': ('css/admin.css',)\n }\n\n def save_related(self, request, form, formsets, change):\n \"\"\"\n After saving the related objects, remove and add\n SnippetTemplateVariables depending on how the template code changed.\n \"\"\"\n super(SnippetTemplateAdmin, self).save_related(request, form, formsets,\n change)\n\n # Parse the template code and find any undefined variables.\n ast = JINJA_ENV.env.parse(form.instance.code)\n new_vars = find_undeclared_variables(ast)\n var_manager = form.instance.variable_set\n\n # Filter out reserved variable names.\n new_vars = [x for x in new_vars if x not in RESERVED_VARIABLES]\n\n # Delete variables not in the new set.\n var_manager.filter(~Q(name__in=new_vars)).delete()\n\n # Create variables that don't exist.\n for i, variable in enumerate(new_vars, start=1):\n obj, _ = models.SnippetTemplateVariable.objects.get_or_create(\n template=form.instance, name=variable)\n if obj.order == 0:\n obj.order = i * 10\n obj.save()\n\n\nclass UploadedFileAdmin(admin.ModelAdmin):\n readonly_fields = ('url', 'preview', 'snippets')\n list_display = ('name', 'url', 'preview', 'modified')\n prepopulated_fields = {'name': ('file',)}\n form = forms.UploadedFileAdminForm\n\n def preview(self, obj):\n template = get_template('base/uploadedfile_preview.jinja')\n return mark_safe(template.render({'file': obj}))\n\n def snippets(self, obj):\n \"\"\"Snippets using this file.\"\"\"\n template = get_template('base/uploadedfile_snippets.jinja')\n return mark_safe(template.render({'snippets': obj.snippets}))\n\n\nclass AddonAdmin(admin.ModelAdmin):\n list_display = ('name', 'guid')\n\n\nclass ASRSnippetAdmin(admin.ModelAdmin):\n form = forms.ASRSnippetAdminForm\n\n list_display_links = (\n 'id',\n 'name',\n )\n list_display = (\n 'id',\n 'name',\n 'status',\n 'modified',\n )\n list_filter = (\n filters.ModifiedFilter,\n 'status',\n filters.ChannelFilter,\n ('template', RelatedDropdownFilter),\n )\n search_fields = (\n 'name',\n 'id',\n 'campaign__name',\n 'target__name',\n )\n autocomplete_fields = (\n 'campaign',\n )\n preserve_filters = True\n readonly_fields = (\n 'id',\n 'created',\n 'modified',\n 'uuid',\n 'creator',\n 'preview_url',\n 'migrated_from_linked',\n )\n filter_horizontal = (\n 'targets',\n 'locales',\n )\n save_on_top = True\n save_as = True\n view_on_site = False\n actions = (\n actions.duplicate_snippets_action,\n actions.publish_snippets_action,\n )\n\n fieldsets = (\n ('ID', {\n 'fields': ('id', 'name', 'status', 'creator', 'preview_url', 'migrated_from_linked')\n }),\n ('Content', {\n 'description': (\n '''\n <strong>Available deep links:</strong><br/>\n <ol>\n <li><code>special:accounts</code> to open Firefox Accounts</li>\n <li><code>special:appMenu</code> to open the hamburger menu</li>\n </ol><br/>\n <strong>Automatically add Snippet ID:</strong><br/>\n You can use <code>[[snippet_id]]</code> in any field and it\n will be automatically replaced by Snippet ID when served to users.\n <br/>\n Example: This is a <code><a href="https://example.com?utm_term=[[snippet_id]]">link</a></code> # noqa\n <br/>\n '''\n ),\n 'fields': ('template', 'data'),\n }),\n ('Publishing Options', {\n 'fields': (\n 'campaign',\n 'targets',\n ('publish_start', 'publish_end'),\n 'locales',\n 'weight',)\n }),\n ('Other Info', {\n 'fields': ('uuid', ('created', 'modified'), 'for_qa'),\n 'classes': ('collapse',)\n }),\n )\n\n class Media:\n css = {\n 'all': (\n 'css/admin/ASRSnippetAdmin.css',\n 'css/admin/IDFieldHighlight.css',\n )\n }\n js = (\n 'js/admin/clipboard.min.js',\n 'js/admin/copy_preview.js',\n )\n\n def save_model(self, request, obj, form, change):\n if not obj.creator_id:\n obj.creator = request.user\n statsd.incr('save.asrsnippet')\n super().save_model(request, obj, form, change)\n\n def preview_url(self, obj):\n text = f'''\n <span id=\"previewLinkUrl\">{obj.get_preview_url()}</span>\n <button id=\"copyPreviewLink\" class=\"btn\"\n data-clipboard-target=\"#previewLinkUrl\"\n originalText=\"Copy to Clipboard\" type=\"button\">\n Copy to Clipboard\n </button>\n '''\n return mark_safe(text)\n\n def migrated_from_linked(self, obj):\n return mark_safe(\n f'<a href={obj.migrated_from.get_admin_url(full=False)}>{obj.migrated_from.name}</a>')\n migrated_from_linked.short_description = 'Migrated From'\n\n def change_view(self, request, *args, **kwargs):\n if request.method == 'POST' and '_saveasnew' in request.POST:\n # Always saved cloned snippets as un-published and un-check ready for review.\n post_data = request.POST.copy()\n post_data['status'] = models.STATUS_CHOICES['Draft']\n post_data.pop('migrated_from', None)\n request.POST = post_data\n return super().change_view(request, *args, **kwargs)\n\n def get_readonly_fields(self, request, obj):\n if not request.user.is_superuser:\n return self.readonly_fields + ('for_qa',)\n return self.readonly_fields\n\n def get_queryset(self, request):\n queryset = super().get_queryset(request)\n if request.user.is_superuser:\n return queryset\n return queryset.filter(for_qa=False)\n\n\nclass CampaignAdmin(admin.ModelAdmin):\n readonly_fields = ('created', 'modified', 'creator',)\n prepopulated_fields = {'slug': ('name',)}\n\n fieldsets = (\n ('ID', {'fields': ('name', 'slug')}),\n ('Other Info', {\n 'fields': ('creator', ('created', 'modified')),\n }),\n )\n search_fields = (\n 'name',\n )\n\n def save_model(self, request, obj, form, change):\n if not obj.creator_id:\n obj.creator = request.user\n statsd.incr('save.campaign')\n super().save_model(request, obj, form, change)\n\n\nclass TargetAdmin(admin.ModelAdmin):\n form = forms.TargetAdminForm\n save_on_top = True\n readonly_fields = ('created', 'modified', 'creator', 'jexl_expr')\n filter_horizontal = (\n 'client_match_rules',\n )\n search_fields = (\n 'name',\n )\n fieldsets = (\n ('ID', {'fields': ('name',)}),\n ('Product channels', {\n 'description': 'What channels will this snippet be available in?',\n 'fields': (('on_release', 'on_beta', 'on_aurora', 'on_nightly', 'on_esr'),)\n }),\n ('Targeting', {\n 'fields': (\n 'filtr_is_default_browser',\n 'filtr_updates_enabled',\n 'filtr_updates_autodownload_enabled',\n 'filtr_profile_age_created',\n 'filtr_firefox_version',\n 'filtr_previous_session_end',\n 'filtr_uses_firefox_sync',\n 'filtr_country',\n 'filtr_is_developer',\n 'filtr_current_search_engine',\n 'filtr_browser_addon',\n 'filtr_total_bookmarks_count',\n )\n }),\n ('Advanced Targeting', {\n 'fields': (\n 'client_match_rules',\n )\n }),\n ('Other Info', {\n 'fields': ('creator', ('created', 'modified'), 'jexl_expr'),\n }),\n )\n\n def save_model(self, request, obj, form, change):\n if not obj.creator_id:\n obj.creator = request.user\n statsd.incr('save.target')\n super().save_model(request, obj, form, change)\n", "path": "snippets/base/admin/adminmodels.py"}], "after_files": [{"content": "import re\n\nfrom django.contrib import admin\nfrom django.db.models import TextField, Q\nfrom django.template.loader import get_template\nfrom django.utils.safestring import mark_safe\n\nfrom reversion.admin import VersionAdmin\nfrom django_ace import AceWidget\nfrom django_statsd.clients import statsd\nfrom jinja2.meta import find_undeclared_variables\nfrom django_admin_listfilter_dropdown.filters import RelatedDropdownFilter\n\nfrom snippets.base import forms, models\nfrom snippets.base.models import JINJA_ENV\nfrom snippets.base.admin import filters\nfrom snippets.base.admin import actions\n\n\nMATCH_LOCALE_REGEX = re.compile(r'(\\w+(?:-\\w+)*)')\nRESERVED_VARIABLES = ('_', 'snippet_id')\n\n\nclass ClientMatchRuleAdmin(VersionAdmin, admin.ModelAdmin):\n list_display = ('description', 'is_exclusion', 'startpage_version', 'name',\n 'version', 'locale', 'appbuildid', 'build_target',\n 'channel', 'os_version', 'distribution',\n 'distribution_version', 'modified')\n list_filter = ('name', 'version', 'os_version', 'appbuildid',\n 'build_target', 'channel', 'distribution', 'locale')\n save_on_top = True\n search_fields = ('description',)\n\n\nclass LogEntryAdmin(admin.ModelAdmin):\n list_display = ('user', 'content_type', 'object_id', 'object_repr', 'change_message')\n list_filter = ('user', 'content_type')\n\n\nclass SnippetTemplateVariableInline(admin.TabularInline):\n model = models.SnippetTemplateVariable\n formset = forms.SnippetTemplateVariableInlineFormset\n max_num = 0\n can_delete = False\n readonly_fields = ('name',)\n fields = ('name', 'type', 'order', 'description')\n\n\nclass SnippetTemplateAdmin(VersionAdmin, admin.ModelAdmin):\n save_on_top = True\n list_display = ('name', 'priority', 'hidden')\n list_filter = ('hidden', 'startpage')\n inlines = (SnippetTemplateVariableInline,)\n formfield_overrides = {\n TextField: {'widget': AceWidget(mode='html', theme='github',\n width='1200px', height='500px')},\n }\n\n class Media:\n css = {\n 'all': ('css/admin.css',)\n }\n\n def save_related(self, request, form, formsets, change):\n \"\"\"\n After saving the related objects, remove and add\n SnippetTemplateVariables depending on how the template code changed.\n \"\"\"\n super(SnippetTemplateAdmin, self).save_related(request, form, formsets,\n change)\n\n # Parse the template code and find any undefined variables.\n ast = JINJA_ENV.env.parse(form.instance.code)\n new_vars = find_undeclared_variables(ast)\n var_manager = form.instance.variable_set\n\n # Filter out reserved variable names.\n new_vars = [x for x in new_vars if x not in RESERVED_VARIABLES]\n\n # Delete variables not in the new set.\n var_manager.filter(~Q(name__in=new_vars)).delete()\n\n # Create variables that don't exist.\n for i, variable in enumerate(new_vars, start=1):\n obj, _ = models.SnippetTemplateVariable.objects.get_or_create(\n template=form.instance, name=variable)\n if obj.order == 0:\n obj.order = i * 10\n obj.save()\n\n\nclass UploadedFileAdmin(admin.ModelAdmin):\n readonly_fields = ('url', 'preview', 'snippets')\n list_display = ('name', 'url', 'preview', 'modified')\n prepopulated_fields = {'name': ('file',)}\n form = forms.UploadedFileAdminForm\n\n def preview(self, obj):\n template = get_template('base/uploadedfile_preview.jinja')\n return mark_safe(template.render({'file': obj}))\n\n def snippets(self, obj):\n \"\"\"Snippets using this file.\"\"\"\n template = get_template('base/uploadedfile_snippets.jinja')\n return mark_safe(template.render({'snippets': obj.snippets}))\n\n\nclass AddonAdmin(admin.ModelAdmin):\n list_display = ('name', 'guid')\n\n\nclass ASRSnippetAdmin(admin.ModelAdmin):\n form = forms.ASRSnippetAdminForm\n\n list_display_links = (\n 'id',\n 'name',\n )\n list_display = (\n 'id',\n 'name',\n 'status',\n 'modified',\n )\n list_filter = (\n filters.ModifiedFilter,\n 'status',\n filters.ChannelFilter,\n ('template', RelatedDropdownFilter),\n )\n search_fields = (\n 'name',\n 'id',\n 'campaign__name',\n 'targets__name',\n )\n autocomplete_fields = (\n 'campaign',\n )\n preserve_filters = True\n readonly_fields = (\n 'id',\n 'created',\n 'modified',\n 'uuid',\n 'creator',\n 'preview_url',\n 'migrated_from_linked',\n )\n filter_horizontal = (\n 'targets',\n 'locales',\n )\n save_on_top = True\n save_as = True\n view_on_site = False\n actions = (\n actions.duplicate_snippets_action,\n actions.publish_snippets_action,\n )\n\n fieldsets = (\n ('ID', {\n 'fields': ('id', 'name', 'status', 'creator', 'preview_url', 'migrated_from_linked')\n }),\n ('Content', {\n 'description': (\n '''\n <strong>Available deep links:</strong><br/>\n <ol>\n <li><code>special:accounts</code> to open Firefox Accounts</li>\n <li><code>special:appMenu</code> to open the hamburger menu</li>\n </ol><br/>\n <strong>Automatically add Snippet ID:</strong><br/>\n You can use <code>[[snippet_id]]</code> in any field and it\n will be automatically replaced by Snippet ID when served to users.\n <br/>\n Example: This is a <code><a href="https://example.com?utm_term=[[snippet_id]]">link</a></code> # noqa\n <br/>\n '''\n ),\n 'fields': ('template', 'data'),\n }),\n ('Publishing Options', {\n 'fields': (\n 'campaign',\n 'targets',\n ('publish_start', 'publish_end'),\n 'locales',\n 'weight',)\n }),\n ('Other Info', {\n 'fields': ('uuid', ('created', 'modified'), 'for_qa'),\n 'classes': ('collapse',)\n }),\n )\n\n class Media:\n css = {\n 'all': (\n 'css/admin/ASRSnippetAdmin.css',\n 'css/admin/IDFieldHighlight.css',\n )\n }\n js = (\n 'js/admin/clipboard.min.js',\n 'js/admin/copy_preview.js',\n )\n\n def save_model(self, request, obj, form, change):\n if not obj.creator_id:\n obj.creator = request.user\n statsd.incr('save.asrsnippet')\n super().save_model(request, obj, form, change)\n\n def preview_url(self, obj):\n text = f'''\n <span id=\"previewLinkUrl\">{obj.get_preview_url()}</span>\n <button id=\"copyPreviewLink\" class=\"btn\"\n data-clipboard-target=\"#previewLinkUrl\"\n originalText=\"Copy to Clipboard\" type=\"button\">\n Copy to Clipboard\n </button>\n '''\n return mark_safe(text)\n\n def migrated_from_linked(self, obj):\n return mark_safe(\n f'<a href={obj.migrated_from.get_admin_url(full=False)}>{obj.migrated_from.name}</a>')\n migrated_from_linked.short_description = 'Migrated From'\n\n def change_view(self, request, *args, **kwargs):\n if request.method == 'POST' and '_saveasnew' in request.POST:\n # Always saved cloned snippets as un-published and un-check ready for review.\n post_data = request.POST.copy()\n post_data['status'] = models.STATUS_CHOICES['Draft']\n post_data.pop('migrated_from', None)\n request.POST = post_data\n return super().change_view(request, *args, **kwargs)\n\n def get_readonly_fields(self, request, obj):\n if not request.user.is_superuser:\n return self.readonly_fields + ('for_qa',)\n return self.readonly_fields\n\n def get_queryset(self, request):\n queryset = super().get_queryset(request)\n if request.user.is_superuser:\n return queryset\n return queryset.filter(for_qa=False)\n\n\nclass CampaignAdmin(admin.ModelAdmin):\n readonly_fields = ('created', 'modified', 'creator',)\n prepopulated_fields = {'slug': ('name',)}\n\n fieldsets = (\n ('ID', {'fields': ('name', 'slug')}),\n ('Other Info', {\n 'fields': ('creator', ('created', 'modified')),\n }),\n )\n search_fields = (\n 'name',\n )\n\n def save_model(self, request, obj, form, change):\n if not obj.creator_id:\n obj.creator = request.user\n statsd.incr('save.campaign')\n super().save_model(request, obj, form, change)\n\n\nclass TargetAdmin(admin.ModelAdmin):\n form = forms.TargetAdminForm\n save_on_top = True\n readonly_fields = ('created', 'modified', 'creator', 'jexl_expr')\n filter_horizontal = (\n 'client_match_rules',\n )\n search_fields = (\n 'name',\n )\n fieldsets = (\n ('ID', {'fields': ('name',)}),\n ('Product channels', {\n 'description': 'What channels will this snippet be available in?',\n 'fields': (('on_release', 'on_beta', 'on_aurora', 'on_nightly', 'on_esr'),)\n }),\n ('Targeting', {\n 'fields': (\n 'filtr_is_default_browser',\n 'filtr_updates_enabled',\n 'filtr_updates_autodownload_enabled',\n 'filtr_profile_age_created',\n 'filtr_firefox_version',\n 'filtr_previous_session_end',\n 'filtr_uses_firefox_sync',\n 'filtr_country',\n 'filtr_is_developer',\n 'filtr_current_search_engine',\n 'filtr_browser_addon',\n 'filtr_total_bookmarks_count',\n )\n }),\n ('Advanced Targeting', {\n 'fields': (\n 'client_match_rules',\n )\n }),\n ('Other Info', {\n 'fields': ('creator', ('created', 'modified'), 'jexl_expr'),\n }),\n )\n\n def save_model(self, request, obj, form, change):\n if not obj.creator_id:\n obj.creator = request.user\n statsd.incr('save.target')\n super().save_model(request, obj, form, change)\n", "path": "snippets/base/admin/adminmodels.py"}]} |
gh_patches_debug_183 | rasdani/github-patches | git_diff | e-valuation__EvaP-410 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Missing access to contributor page for delegate
Delegates for responsible users can't see the contributor page when they don't have own contributions.
Every delegate of any user with edit rights should have access to this page.
(Example: In the test data set, the user "delegate" can't see the page, although it should be possible.)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `evap/evaluation/models.py`
Content:
```
1 from django.conf import settings
2 from django.core.exceptions import ValidationError
3 from django.core.mail import EmailMessage
4 from django.db import models
5 from django.db.models import Count
6 from django.db.models.signals import post_save
7 from django.dispatch import receiver
8 from django.utils.translation import ugettext_lazy as _
9 from django.template import Context, Template, TemplateSyntaxError, TemplateEncodingError
10 from django_fsm.db.fields import FSMField, transition
11
12 # see evaluation.meta for the use of Translate in this file
13 from evap.evaluation.meta import LocalizeModelBase, Translate
14
15 import datetime
16 import random
17
18 # for converting state into student_state
19 STUDENT_STATES_NAMES = {
20 'new': 'upcoming',
21 'prepared': 'upcoming',
22 'lecturerApproved': 'upcoming',
23 'approved': 'upcoming',
24 'inEvaluation': 'inEvaluation',
25 'evaluated': 'evaluationFinished',
26 'reviewed': 'evaluationFinished',
27 'published': 'published'
28 }
29
30
31 class Semester(models.Model):
32 """Represents a semester, e.g. the winter term of 2011/2012."""
33
34 __metaclass__ = LocalizeModelBase
35
36 name_de = models.CharField(max_length=1024, unique=True, verbose_name=_(u"name (german)"))
37 name_en = models.CharField(max_length=1024, unique=True, verbose_name=_(u"name (english)"))
38
39 name = Translate
40
41 created_at = models.DateField(verbose_name=_(u"created at"), auto_now_add=True)
42
43 class Meta:
44 ordering = ('-created_at', 'name_de')
45 verbose_name = _(u"semester")
46 verbose_name_plural = _(u"semesters")
47
48 def __unicode__(self):
49 return self.name
50
51 @property
52 def can_fsr_delete(self):
53 for course in self.course_set.all():
54 if not course.can_fsr_delete:
55 return False
56 return True
57
58 @classmethod
59 def get_all_with_published_courses(cls):
60 return cls.objects.filter(course__state="published").distinct()
61
62
63 class Questionnaire(models.Model):
64 """A named collection of questions."""
65
66 __metaclass__ = LocalizeModelBase
67
68 name_de = models.CharField(max_length=1024, unique=True, verbose_name=_(u"name (german)"))
69 name_en = models.CharField(max_length=1024, unique=True, verbose_name=_(u"name (english)"))
70 name = Translate
71
72 description_de = models.TextField(verbose_name=_(u"description (german)"), blank=True, null=True)
73 description_en = models.TextField(verbose_name=_(u"description (english)"), blank=True, null=True)
74 description = Translate
75
76 public_name_de = models.CharField(max_length=1024, verbose_name=_(u"display name (german)"))
77 public_name_en = models.CharField(max_length=1024, verbose_name=_(u"display name (english)"))
78 public_name = Translate
79
80 teaser_de = models.TextField(verbose_name=_(u"teaser (german)"), blank=True, null=True)
81 teaser_en = models.TextField(verbose_name=_(u"teaser (english)"), blank=True, null=True)
82 teaser = Translate
83
84 index = models.IntegerField(verbose_name=_(u"ordering index"))
85
86 is_for_contributors = models.BooleanField(verbose_name=_(u"is for contributors"), default=False)
87 obsolete = models.BooleanField(verbose_name=_(u"obsolete"), default=False)
88
89 class Meta:
90 ordering = ('obsolete', 'index', 'name_de')
91 verbose_name = _(u"questionnaire")
92 verbose_name_plural = _(u"questionnaires")
93
94 def __unicode__(self):
95 return self.name
96
97 @property
98 def can_fsr_delete(self):
99 return not self.contributions.exists()
100
101
102 class Course(models.Model):
103 """Models a single course, e.g. the Math 101 course of 2002."""
104
105 __metaclass__ = LocalizeModelBase
106
107 state = FSMField(default='new', protected=True)
108
109 semester = models.ForeignKey(Semester, verbose_name=_(u"semester"))
110
111 name_de = models.CharField(max_length=1024, verbose_name=_(u"name (german)"))
112 name_en = models.CharField(max_length=1024, verbose_name=_(u"name (english)"))
113 name = Translate
114
115 # type of course: lecture, seminar, project
116 kind = models.CharField(max_length=1024, verbose_name=_(u"type"))
117
118 # bachelor, master, d-school course
119 degree = models.CharField(max_length=1024, verbose_name=_(u"degree"))
120
121 # students that are allowed to vote
122 participants = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u"participants"), blank=True)
123 participant_count = models.IntegerField(verbose_name=_(u"participant count"), blank=True, null=True, default=None)
124
125 # students that already voted
126 voters = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u"voters"), blank=True, related_name='+')
127 voter_count = models.IntegerField(verbose_name=_(u"voter count"), blank=True, null=True, default=None)
128
129 # when the evaluation takes place
130 vote_start_date = models.DateField(null=True, verbose_name=_(u"first date to vote"))
131 vote_end_date = models.DateField(null=True, verbose_name=_(u"last date to vote"))
132
133 # who last modified this course, shell be noted
134 last_modified_time = models.DateTimeField(auto_now=True)
135 last_modified_user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="+", null=True, blank=True)
136
137 class Meta:
138 ordering = ('semester', 'degree', 'name_de')
139 unique_together = (
140 ('semester', 'degree', 'name_de'),
141 ('semester', 'degree', 'name_en'),
142 )
143 verbose_name = _(u"course")
144 verbose_name_plural = _(u"courses")
145
146 def __unicode__(self):
147 return self.name
148
149 def clean(self):
150 if self.vote_start_date and self.vote_end_date:
151 if self.vote_start_date >= self.vote_end_date:
152 raise ValidationError(_(u"The vote start date must be before the vote end date."))
153
154 def save(self, *args, **kw):
155 super(Course, self).save(*args, **kw)
156
157 # make sure there is a general contribution
158 if not self.general_contribution:
159 self.contributions.create(contributor=None)
160
161 def is_fully_checked(self):
162 """Shortcut for finding out whether all text answers to this course have been checked"""
163 return not self.open_textanswer_set.exists()
164
165 def can_user_vote(self, user):
166 """Returns whether the user is allowed to vote on this course."""
167 return (self.state == "inEvaluation"
168 and datetime.date.today() <= self.vote_end_date
169 and user in self.participants.all()
170 and user not in self.voters.all())
171
172 def can_fsr_edit(self):
173 return self.state in ['new', 'prepared', 'lecturerApproved', 'approved', 'inEvaluation']
174
175 def can_fsr_delete(self):
176 return self.can_fsr_edit() and not self.voters.exists()
177
178 def can_fsr_review(self):
179 return self.state in ['inEvaluation', 'evaluated'] and not self.is_fully_checked()
180
181 def can_fsr_approve(self):
182 return self.state in ['new', 'prepared', 'lecturerApproved']
183
184 def can_publish_grades(self):
185 return self.num_voters >= settings.MIN_ANSWER_COUNT and float(self.num_voters) / self.num_participants >= settings.MIN_ANSWER_PERCENTAGE
186
187 @transition(field=state, source=['new', 'lecturerApproved'], target='prepared')
188 def ready_for_contributors(self, send_mail=True):
189 if send_mail:
190 EmailTemplate.get_review_template().send_to_users_in_courses([self], ['editors'])
191
192 @transition(field=state, source='prepared', target='lecturerApproved')
193 def contributor_approve(self):
194 pass
195
196 @transition(field=state, source=['new', 'prepared', 'lecturerApproved'], target='approved')
197 def fsr_approve(self):
198 pass
199
200 @transition(field=state, source='prepared', target='new')
201 def revert_to_new(self):
202 pass
203
204 @transition(field=state, source='approved', target='inEvaluation')
205 def evaluation_begin(self):
206 pass
207
208 @transition(field=state, source='inEvaluation', target='evaluated')
209 def evaluation_end(self):
210 pass
211
212 @transition(field=state, source='evaluated', target='reviewed', conditions=[is_fully_checked])
213 def review_finished(self):
214 pass
215
216 @transition(field=state, source='reviewed', target='published')
217 def publish(self):
218 pass
219
220 @transition(field=state, source='published', target='reviewed')
221 def revoke(self):
222 pass
223
224 @property
225 def student_state(self):
226 return STUDENT_STATES_NAMES[self.state]
227
228 @property
229 def general_contribution(self):
230 try:
231 return self.contributions.get(contributor=None)
232 except Contribution.DoesNotExist:
233 return None
234
235 @property
236 def num_participants(self):
237 if self.participant_count:
238 return self.participant_count
239 return self.participants.count()
240
241 @property
242 def num_voters(self):
243 if self.voter_count:
244 return self.voter_count
245 return self.voters.count()
246
247 @property
248 def due_participants(self):
249 return self.participants.exclude(pk__in=self.voters.all())
250
251 @property
252 def responsible_contributor(self):
253 return self.contributions.get(responsible=True).contributor
254
255 @property
256 def responsible_contributors_name(self):
257 return self.responsible_contributor.userprofile.full_name
258
259 @property
260 def responsible_contributors_username(self):
261 return self.responsible_contributor.username
262
263 def has_enough_questionnaires(self):
264 return self.general_contribution and all(self.contributions.aggregate(Count('questionnaires')).values())
265
266 def is_user_editor_or_delegate(self, user):
267 if self.contributions.filter(can_edit=True, contributor=user).exists():
268 return True
269 else:
270 represented_userprofiles = user.represented_users.all()
271 represented_users = [profile.user for profile in represented_userprofiles]
272 if self.contributions.filter(can_edit=True, contributor__in=represented_users).exists():
273 return True
274
275 return False
276
277 def is_user_responsible_or_delegate(self, user):
278 if self.contributions.filter(responsible=True, contributor=user).exists():
279 return True
280 else:
281 represented_userprofiles = user.represented_users.all()
282 represented_users = [profile.user for profile in represented_userprofiles]
283 if self.contributions.filter(responsible=True, contributor__in=represented_users).exists():
284 return True
285
286 return False
287
288 def is_user_contributor(self, user):
289 return self.contributions.filter(contributor=user).exists()
290
291 def is_user_editor(self, user):
292 return self.contributions.filter(contributor=user, can_edit=True).exists()
293
294 def warnings(self):
295 result = []
296 if self.state == 'new' and not self.has_enough_questionnaires():
297 result.append(_(u"Not enough questionnaires assigned"))
298 if self.state in ['inEvaluation', 'evaluated', 'reviewed'] and not self.can_publish_grades():
299 result.append(_(u"Not enough participants to publish results"))
300 return result
301
302 @property
303 def textanswer_set(self):
304 """Pseudo relationship to all text answers for this course"""
305 return TextAnswer.objects.filter(contribution__in=self.contributions.all())
306
307 @property
308 def open_textanswer_set(self):
309 """Pseudo relationship to all text answers for this course"""
310 return TextAnswer.objects.filter(contribution__in=self.contributions.all(), checked=False)
311
312 @property
313 def checked_textanswer_set(self):
314 """Pseudo relationship to all text answers for this course"""
315 return TextAnswer.objects.filter(contribution__in=self.contributions.all(), checked=True)
316
317 @property
318 def likertanswer_set(self):
319 """Pseudo relationship to all Likert answers for this course"""
320 return LikertAnswer.objects.filter(contribution__in=self.contributions.all())
321
322 @property
323 def gradeanswer_set(self):
324 """Pseudo relationship to all grade answers for this course"""
325 return GradeAnswer.objects.filter(contribution__in=self.contributions.all())
326
327
328 class Contribution(models.Model):
329 """A contributor who is assigned to a course and his questionnaires."""
330
331 course = models.ForeignKey(Course, verbose_name=_(u"course"), related_name='contributions')
332 contributor = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_(u"contributor"), blank=True, null=True, related_name='contributions')
333 questionnaires = models.ManyToManyField(Questionnaire, verbose_name=_(u"questionnaires"), blank=True, related_name="contributions")
334 responsible = models.BooleanField(verbose_name=_(u"responsible"), default=False)
335 can_edit = models.BooleanField(verbose_name=_(u"can edit"), default=False)
336
337 class Meta:
338 unique_together = (
339 ('course', 'contributor'),
340 )
341
342 def clean(self):
343 # responsible contributors can always edit
344 if self.responsible:
345 self.can_edit = True
346
347
348 class Question(models.Model):
349 """A question including a type."""
350
351 __metaclass__ = LocalizeModelBase
352
353 QUESTION_KINDS = (
354 (u"T", _(u"Text Question")),
355 (u"L", _(u"Likert Question")),
356 (u"G", _(u"Grade Question")),
357 )
358
359 questionnaire = models.ForeignKey(Questionnaire)
360 text_de = models.TextField(verbose_name=_(u"question text (german)"))
361 text_en = models.TextField(verbose_name=_(u"question text (english)"))
362 kind = models.CharField(max_length=1, choices=QUESTION_KINDS,
363 verbose_name=_(u"kind of question"))
364
365 text = Translate
366
367 class Meta:
368 order_with_respect_to = 'questionnaire'
369 verbose_name = _(u"question")
370 verbose_name_plural = _(u"questions")
371
372 @property
373 def answer_class(self):
374 if self.kind == u"T":
375 return TextAnswer
376 elif self.kind == u"L":
377 return LikertAnswer
378 elif self.kind == u"G":
379 return GradeAnswer
380 else:
381 raise Exception("Unknown answer kind: %r" % self.kind)
382
383 def is_likert_question(self):
384 return self.answer_class == LikertAnswer
385
386 def is_text_question(self):
387 return self.answer_class == TextAnswer
388
389 def is_grade_question(self):
390 return self.answer_class == GradeAnswer
391
392
393 class Answer(models.Model):
394 """An abstract answer to a question. For anonymity purposes, the answering
395 user ist not stored in the object. Concrete subclasses are `LikertAnswer`,
396 `TextAnswer` and `GradeAnswer`."""
397
398 question = models.ForeignKey(Question)
399 contribution = models.ForeignKey(Contribution)
400
401 class Meta:
402 abstract = True
403 verbose_name = _(u"answer")
404 verbose_name_plural = _(u"answers")
405
406
407 class LikertAnswer(Answer):
408 """A Likert-scale answer to a question with `1` being *strongly agree* and `5`
409 being *strongly disagree*."""
410
411 answer = models.IntegerField(verbose_name=_(u"answer"))
412
413 class Meta:
414 verbose_name = _(u"Likert answer")
415 verbose_name_plural = _(u"Likert answers")
416
417
418 class GradeAnswer(Answer):
419 """A grade answer to a question with `1` being best and `5` being worst."""
420
421 answer = models.IntegerField(verbose_name=_(u"answer"))
422
423 class Meta:
424 verbose_name = _(u"grade answer")
425 verbose_name_plural = _(u"grade answers")
426
427
428 class TextAnswer(Answer):
429 """A free-form text answer to a question (usually a comment about a course
430 or a contributor)."""
431
432 elements_per_page = 5
433
434 reviewed_answer = models.TextField(verbose_name=_(u"reviewed answer"), blank=True, null=True)
435 original_answer = models.TextField(verbose_name=_(u"original answer"), blank=True)
436
437 checked = models.BooleanField(verbose_name=_(u"answer checked"), default=False)
438 hidden = models.BooleanField(verbose_name=_(u"hide answer"), default=False)
439
440 class Meta:
441 verbose_name = _(u"text answer")
442 verbose_name_plural = _(u"text answers")
443
444 def _answer_get(self):
445 return self.reviewed_answer or self.original_answer
446
447 def _answer_set(self, value):
448 self.original_answer = value
449 self.reviewed_answer = None
450
451 answer = property(_answer_get, _answer_set)
452
453
454 class FaqSection(models.Model):
455 """Section in the frequently asked questions"""
456
457 __metaclass__ = LocalizeModelBase
458
459 order = models.IntegerField(verbose_name=_("section order"))
460
461 title_de = models.TextField(verbose_name=_(u"section title (german)"))
462 title_en = models.TextField(verbose_name=_(u"section title (english)"))
463 title = Translate
464
465 class Meta:
466 ordering = ['order', ]
467 verbose_name = _(u"section")
468 verbose_name_plural = _(u"sections")
469
470
471 class FaqQuestion(models.Model):
472 """Question and answer in the frequently asked questions"""
473
474 __metaclass__ = LocalizeModelBase
475
476 section = models.ForeignKey(FaqSection, related_name="questions")
477
478 order = models.IntegerField(verbose_name=_("question order"))
479
480 question_de = models.TextField(verbose_name=_("question (german)"))
481 question_en = models.TextField(verbose_name=_("question (english)"))
482 question = Translate
483
484 answer_de = models.TextField(verbose_name=_("answer (german)"))
485 answer_en = models.TextField(verbose_name=_("answer (german)"))
486 answer = Translate
487
488 class Meta:
489 ordering = ['order', ]
490 verbose_name = _(u"question")
491 verbose_name_plural = _(u"questions")
492
493
494 class UserProfile(models.Model):
495 user = models.OneToOneField(settings.AUTH_USER_MODEL)
496
497 # extending first_name and last_name from the user
498 title = models.CharField(verbose_name=_(u"Title"), max_length=1024, blank=True, null=True)
499
500 # picture of the user
501 picture = models.ImageField(verbose_name=_(u"Picture"), upload_to="pictures", blank=True, null=True)
502
503 # delegates of the user, which can also manage their courses
504 delegates = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u"Delegates"), related_name="represented_users", blank=True)
505
506 # users to which all emails should be sent in cc without giving them delegate rights
507 cc_users = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u"CC Users"), related_name="cc_users", blank=True)
508
509 # key for url based login of this user
510 MAX_LOGIN_KEY = 2**31-1
511
512 login_key = models.IntegerField(verbose_name=_(u"Login Key"), blank=True, null=True)
513 login_key_valid_until = models.DateField(verbose_name=_(u"Login Key Validity"), null=True)
514
515 class Meta:
516 verbose_name = _('user')
517 verbose_name_plural = _('users')
518
519 def __unicode__(self):
520 return unicode(self.user)
521
522 @property
523 def full_name(self):
524 if self.user.last_name:
525 name = self.user.last_name
526 if self.user.first_name:
527 name = self.user.first_name + " " + name
528 if self.title:
529 name = self.title + " " + name
530 return name
531 else:
532 return self.user.username
533
534 @property
535 def can_fsr_delete(self):
536 return not self.is_contributor
537
538 @property
539 def enrolled_in_courses(self):
540 return self.user.course_set.exists()
541
542 @property
543 def is_contributor(self):
544 return self.user.contributions.exists()
545
546 @property
547 def is_editor(self):
548 return self.user.contributions.filter(can_edit=True).exists()
549
550 @property
551 def is_responsible(self):
552 # in the user list, self.user.contributions is prefetched, therefore use it directly and don't filter it
553 return any(contribution.responsible for contribution in self.user.contributions.all())
554
555 @property
556 def is_delegate(self):
557 return self.delegates.exists()
558
559 @property
560 def is_editor_or_delegate(self):
561 return self.is_editor or self.is_delegate
562
563 @classmethod
564 def email_needs_login_key(cls, email):
565 return not any([email.endswith("@" + domain) for domain in settings.INSTITUTION_EMAIL_DOMAINS])
566
567 @property
568 def needs_login_key(self):
569 return UserProfile.email_needs_login_key(self.user.email)
570
571 @classmethod
572 def get_for_user(cls, user):
573 obj, _ = cls.objects.get_or_create(user=user)
574 return obj
575
576 def generate_login_key(self):
577 while True:
578 key = random.randrange(0, UserProfile.MAX_LOGIN_KEY)
579 if not UserProfile.objects.filter(login_key=key).exists():
580 # key not yet used
581 self.login_key = key
582 break
583
584 self.refresh_login_key()
585
586 def refresh_login_key(self):
587 self.login_key_valid_until = datetime.date.today() + datetime.timedelta(settings.LOGIN_KEY_VALIDITY)
588
589 @staticmethod
590 @receiver(post_save, sender=settings.AUTH_USER_MODEL)
591 def create_user_profile(sender, instance, created, raw, **kwargs):
592 """Creates a UserProfile object whenever a User is created."""
593 if created and not raw:
594 UserProfile.objects.create(user=instance)
595
596
597 def validate_template(value):
598 """Field validator which ensures that the value can be compiled into a
599 Django Template."""
600 try:
601 Template(value)
602 except (TemplateSyntaxError, TemplateEncodingError) as e:
603 raise ValidationError(str(e))
604
605
606 class EmailTemplate(models.Model):
607 name = models.CharField(max_length=1024, unique=True, verbose_name=_("Name"))
608
609 subject = models.CharField(max_length=1024, verbose_name=_(u"Subject"), validators=[validate_template])
610 body = models.TextField(verbose_name=_("Body"), validators=[validate_template])
611
612 @classmethod
613 def get_review_template(cls):
614 return cls.objects.get(name="Lecturer Review Notice")
615
616 @classmethod
617 def get_reminder_template(cls):
618 return cls.objects.get(name="Student Reminder")
619
620 @classmethod
621 def get_publish_template(cls):
622 return cls.objects.get(name="Publishing Notice")
623
624 @classmethod
625 def get_login_key_template(cls):
626 return cls.objects.get(name="Login Key Created")
627
628 @classmethod
629 def recipient_list_for_course(cls, course, recipient_groups):
630 recipients = []
631
632 if "responsible" in recipient_groups:
633 recipients += [course.responsible_contributor]
634
635 if "contributors" in recipient_groups:
636 recipients += [c.contributor for c in course.contributions.exclude(contributor=None)]
637 elif "editors" in recipient_groups:
638 recipients += [c.contributor for c in course.contributions.exclude(contributor=None).filter(can_edit=True)]
639
640 if "all_participants" in recipient_groups:
641 recipients += course.participants.all()
642 elif "due_participants" in recipient_groups:
643 recipients += course.due_participants
644
645 return recipients
646
647 @classmethod
648 def render_string(cls, text, dictionary):
649 return Template(text).render(Context(dictionary, autoescape=False))
650
651 def send_to_users_in_courses(self, courses, recipient_groups):
652 user_course_map = {}
653 for course in courses:
654 responsible = UserProfile.get_for_user(course.responsible_contributor)
655 for user in self.recipient_list_for_course(course, recipient_groups):
656 if user.email and user not in responsible.cc_users.all() and user not in responsible.delegates.all():
657 user_course_map.setdefault(user, []).append(course)
658
659 for user, courses in user_course_map.iteritems():
660 cc_users = []
661 if ("responsible" in recipient_groups or "editors" in recipient_groups) and any(course.is_user_editor(user) for course in courses):
662 cc_users += UserProfile.get_for_user(user).delegates.all()
663 cc_users += UserProfile.get_for_user(user).cc_users.all()
664 cc_addresses = [p.email for p in cc_users if p.email]
665
666 mail = EmailMessage(
667 subject = self.render_string(self.subject, {'user': user, 'courses': courses}),
668 body = self.render_string(self.body, {'user': user, 'courses': courses}),
669 to = [user.email],
670 cc = cc_addresses,
671 bcc = [a[1] for a in settings.MANAGERS],
672 headers = {'Reply-To': settings.REPLY_TO_EMAIL})
673 mail.send(False)
674
675 def send_to_user(self, user):
676 if not user.email:
677 return
678
679 mail = EmailMessage(
680 subject = self.render_string(self.subject, {'user': user}),
681 body = self.render_string(self.body, {'user': user}),
682 to = [user.email],
683 bcc = [a[1] for a in settings.MANAGERS],
684 headers = {'Reply-To': settings.REPLY_TO_EMAIL})
685 mail.send(False)
686
687
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/evap/evaluation/models.py b/evap/evaluation/models.py
--- a/evap/evaluation/models.py
+++ b/evap/evaluation/models.py
@@ -554,7 +554,7 @@
@property
def is_delegate(self):
- return self.delegates.exists()
+ return self.user.represented_users.exists()
@property
def is_editor_or_delegate(self):
| {"golden_diff": "diff --git a/evap/evaluation/models.py b/evap/evaluation/models.py\n--- a/evap/evaluation/models.py\n+++ b/evap/evaluation/models.py\n@@ -554,7 +554,7 @@\n \n @property\n def is_delegate(self):\n- return self.delegates.exists()\n+ return self.user.represented_users.exists()\n \n @property\n def is_editor_or_delegate(self):\n", "issue": "Missing access to contributor page for delegate\nDelegates for responsible users can't see the contributor page when they don't have own contributions.\nEvery delegate of any user with edit rights should have access to this page.\n\n(Example: In the test data set, the user \"delegate\" can't see the page, although it should be possible.)\n\n", "before_files": [{"content": "from django.conf import settings\nfrom django.core.exceptions import ValidationError\nfrom django.core.mail import EmailMessage\nfrom django.db import models\nfrom django.db.models import Count\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.template import Context, Template, TemplateSyntaxError, TemplateEncodingError\nfrom django_fsm.db.fields import FSMField, transition\n\n# see evaluation.meta for the use of Translate in this file\nfrom evap.evaluation.meta import LocalizeModelBase, Translate\n\nimport datetime\nimport random\n\n# for converting state into student_state\nSTUDENT_STATES_NAMES = {\n 'new': 'upcoming',\n 'prepared': 'upcoming',\n 'lecturerApproved': 'upcoming',\n 'approved': 'upcoming',\n 'inEvaluation': 'inEvaluation',\n 'evaluated': 'evaluationFinished',\n 'reviewed': 'evaluationFinished',\n 'published': 'published'\n}\n\n\nclass Semester(models.Model):\n \"\"\"Represents a semester, e.g. the winter term of 2011/2012.\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n name_de = models.CharField(max_length=1024, unique=True, verbose_name=_(u\"name (german)\"))\n name_en = models.CharField(max_length=1024, unique=True, verbose_name=_(u\"name (english)\"))\n\n name = Translate\n\n created_at = models.DateField(verbose_name=_(u\"created at\"), auto_now_add=True)\n\n class Meta:\n ordering = ('-created_at', 'name_de')\n verbose_name = _(u\"semester\")\n verbose_name_plural = _(u\"semesters\")\n\n def __unicode__(self):\n return self.name\n\n @property\n def can_fsr_delete(self):\n for course in self.course_set.all():\n if not course.can_fsr_delete:\n return False\n return True\n\n @classmethod\n def get_all_with_published_courses(cls):\n return cls.objects.filter(course__state=\"published\").distinct()\n\n\nclass Questionnaire(models.Model):\n \"\"\"A named collection of questions.\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n name_de = models.CharField(max_length=1024, unique=True, verbose_name=_(u\"name (german)\"))\n name_en = models.CharField(max_length=1024, unique=True, verbose_name=_(u\"name (english)\"))\n name = Translate\n\n description_de = models.TextField(verbose_name=_(u\"description (german)\"), blank=True, null=True)\n description_en = models.TextField(verbose_name=_(u\"description (english)\"), blank=True, null=True)\n description = Translate\n\n public_name_de = models.CharField(max_length=1024, verbose_name=_(u\"display name (german)\"))\n public_name_en = models.CharField(max_length=1024, verbose_name=_(u\"display name (english)\"))\n public_name = Translate\n\n teaser_de = models.TextField(verbose_name=_(u\"teaser (german)\"), blank=True, null=True)\n teaser_en = models.TextField(verbose_name=_(u\"teaser (english)\"), blank=True, null=True)\n teaser = Translate\n\n index = models.IntegerField(verbose_name=_(u\"ordering index\"))\n\n is_for_contributors = models.BooleanField(verbose_name=_(u\"is for contributors\"), default=False)\n obsolete = models.BooleanField(verbose_name=_(u\"obsolete\"), default=False)\n\n class Meta:\n ordering = ('obsolete', 'index', 'name_de')\n verbose_name = _(u\"questionnaire\")\n verbose_name_plural = _(u\"questionnaires\")\n\n def __unicode__(self):\n return self.name\n\n @property\n def can_fsr_delete(self):\n return not self.contributions.exists()\n\n\nclass Course(models.Model):\n \"\"\"Models a single course, e.g. the Math 101 course of 2002.\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n state = FSMField(default='new', protected=True)\n\n semester = models.ForeignKey(Semester, verbose_name=_(u\"semester\"))\n\n name_de = models.CharField(max_length=1024, verbose_name=_(u\"name (german)\"))\n name_en = models.CharField(max_length=1024, verbose_name=_(u\"name (english)\"))\n name = Translate\n\n # type of course: lecture, seminar, project\n kind = models.CharField(max_length=1024, verbose_name=_(u\"type\"))\n\n # bachelor, master, d-school course\n degree = models.CharField(max_length=1024, verbose_name=_(u\"degree\"))\n\n # students that are allowed to vote\n participants = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u\"participants\"), blank=True)\n participant_count = models.IntegerField(verbose_name=_(u\"participant count\"), blank=True, null=True, default=None)\n\n # students that already voted\n voters = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u\"voters\"), blank=True, related_name='+')\n voter_count = models.IntegerField(verbose_name=_(u\"voter count\"), blank=True, null=True, default=None)\n\n # when the evaluation takes place\n vote_start_date = models.DateField(null=True, verbose_name=_(u\"first date to vote\"))\n vote_end_date = models.DateField(null=True, verbose_name=_(u\"last date to vote\"))\n\n # who last modified this course, shell be noted\n last_modified_time = models.DateTimeField(auto_now=True)\n last_modified_user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name=\"+\", null=True, blank=True)\n\n class Meta:\n ordering = ('semester', 'degree', 'name_de')\n unique_together = (\n ('semester', 'degree', 'name_de'),\n ('semester', 'degree', 'name_en'),\n )\n verbose_name = _(u\"course\")\n verbose_name_plural = _(u\"courses\")\n\n def __unicode__(self):\n return self.name\n\n def clean(self):\n if self.vote_start_date and self.vote_end_date:\n if self.vote_start_date >= self.vote_end_date:\n raise ValidationError(_(u\"The vote start date must be before the vote end date.\"))\n\n def save(self, *args, **kw):\n super(Course, self).save(*args, **kw)\n\n # make sure there is a general contribution\n if not self.general_contribution:\n self.contributions.create(contributor=None)\n\n def is_fully_checked(self):\n \"\"\"Shortcut for finding out whether all text answers to this course have been checked\"\"\"\n return not self.open_textanswer_set.exists()\n\n def can_user_vote(self, user):\n \"\"\"Returns whether the user is allowed to vote on this course.\"\"\"\n return (self.state == \"inEvaluation\"\n and datetime.date.today() <= self.vote_end_date\n and user in self.participants.all()\n and user not in self.voters.all())\n\n def can_fsr_edit(self):\n return self.state in ['new', 'prepared', 'lecturerApproved', 'approved', 'inEvaluation']\n\n def can_fsr_delete(self):\n return self.can_fsr_edit() and not self.voters.exists()\n\n def can_fsr_review(self):\n return self.state in ['inEvaluation', 'evaluated'] and not self.is_fully_checked()\n\n def can_fsr_approve(self):\n return self.state in ['new', 'prepared', 'lecturerApproved']\n\n def can_publish_grades(self):\n return self.num_voters >= settings.MIN_ANSWER_COUNT and float(self.num_voters) / self.num_participants >= settings.MIN_ANSWER_PERCENTAGE\n\n @transition(field=state, source=['new', 'lecturerApproved'], target='prepared')\n def ready_for_contributors(self, send_mail=True):\n if send_mail:\n EmailTemplate.get_review_template().send_to_users_in_courses([self], ['editors'])\n\n @transition(field=state, source='prepared', target='lecturerApproved')\n def contributor_approve(self):\n pass\n\n @transition(field=state, source=['new', 'prepared', 'lecturerApproved'], target='approved')\n def fsr_approve(self):\n pass\n\n @transition(field=state, source='prepared', target='new')\n def revert_to_new(self):\n pass\n\n @transition(field=state, source='approved', target='inEvaluation')\n def evaluation_begin(self):\n pass\n\n @transition(field=state, source='inEvaluation', target='evaluated')\n def evaluation_end(self):\n pass\n\n @transition(field=state, source='evaluated', target='reviewed', conditions=[is_fully_checked])\n def review_finished(self):\n pass\n\n @transition(field=state, source='reviewed', target='published')\n def publish(self):\n pass\n\n @transition(field=state, source='published', target='reviewed')\n def revoke(self):\n pass\n\n @property\n def student_state(self):\n return STUDENT_STATES_NAMES[self.state]\n\n @property\n def general_contribution(self):\n try:\n return self.contributions.get(contributor=None)\n except Contribution.DoesNotExist:\n return None\n\n @property\n def num_participants(self):\n if self.participant_count:\n return self.participant_count\n return self.participants.count()\n\n @property\n def num_voters(self):\n if self.voter_count:\n return self.voter_count\n return self.voters.count()\n\n @property\n def due_participants(self):\n return self.participants.exclude(pk__in=self.voters.all())\n\n @property\n def responsible_contributor(self):\n return self.contributions.get(responsible=True).contributor\n\n @property\n def responsible_contributors_name(self):\n return self.responsible_contributor.userprofile.full_name\n\n @property\n def responsible_contributors_username(self):\n return self.responsible_contributor.username\n\n def has_enough_questionnaires(self):\n return self.general_contribution and all(self.contributions.aggregate(Count('questionnaires')).values())\n\n def is_user_editor_or_delegate(self, user):\n if self.contributions.filter(can_edit=True, contributor=user).exists():\n return True\n else:\n represented_userprofiles = user.represented_users.all()\n represented_users = [profile.user for profile in represented_userprofiles]\n if self.contributions.filter(can_edit=True, contributor__in=represented_users).exists():\n return True\n\n return False\n\n def is_user_responsible_or_delegate(self, user):\n if self.contributions.filter(responsible=True, contributor=user).exists():\n return True\n else:\n represented_userprofiles = user.represented_users.all()\n represented_users = [profile.user for profile in represented_userprofiles]\n if self.contributions.filter(responsible=True, contributor__in=represented_users).exists():\n return True\n\n return False\n\n def is_user_contributor(self, user):\n return self.contributions.filter(contributor=user).exists()\n\n def is_user_editor(self, user):\n return self.contributions.filter(contributor=user, can_edit=True).exists()\n\n def warnings(self):\n result = []\n if self.state == 'new' and not self.has_enough_questionnaires():\n result.append(_(u\"Not enough questionnaires assigned\"))\n if self.state in ['inEvaluation', 'evaluated', 'reviewed'] and not self.can_publish_grades():\n result.append(_(u\"Not enough participants to publish results\"))\n return result\n\n @property\n def textanswer_set(self):\n \"\"\"Pseudo relationship to all text answers for this course\"\"\"\n return TextAnswer.objects.filter(contribution__in=self.contributions.all())\n\n @property\n def open_textanswer_set(self):\n \"\"\"Pseudo relationship to all text answers for this course\"\"\"\n return TextAnswer.objects.filter(contribution__in=self.contributions.all(), checked=False)\n\n @property\n def checked_textanswer_set(self):\n \"\"\"Pseudo relationship to all text answers for this course\"\"\"\n return TextAnswer.objects.filter(contribution__in=self.contributions.all(), checked=True)\n\n @property\n def likertanswer_set(self):\n \"\"\"Pseudo relationship to all Likert answers for this course\"\"\"\n return LikertAnswer.objects.filter(contribution__in=self.contributions.all())\n\n @property\n def gradeanswer_set(self):\n \"\"\"Pseudo relationship to all grade answers for this course\"\"\"\n return GradeAnswer.objects.filter(contribution__in=self.contributions.all())\n\n\nclass Contribution(models.Model):\n \"\"\"A contributor who is assigned to a course and his questionnaires.\"\"\"\n\n course = models.ForeignKey(Course, verbose_name=_(u\"course\"), related_name='contributions')\n contributor = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_(u\"contributor\"), blank=True, null=True, related_name='contributions')\n questionnaires = models.ManyToManyField(Questionnaire, verbose_name=_(u\"questionnaires\"), blank=True, related_name=\"contributions\")\n responsible = models.BooleanField(verbose_name=_(u\"responsible\"), default=False)\n can_edit = models.BooleanField(verbose_name=_(u\"can edit\"), default=False)\n\n class Meta:\n unique_together = (\n ('course', 'contributor'),\n )\n\n def clean(self):\n # responsible contributors can always edit\n if self.responsible:\n self.can_edit = True\n\n\nclass Question(models.Model):\n \"\"\"A question including a type.\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n QUESTION_KINDS = (\n (u\"T\", _(u\"Text Question\")),\n (u\"L\", _(u\"Likert Question\")),\n (u\"G\", _(u\"Grade Question\")),\n )\n\n questionnaire = models.ForeignKey(Questionnaire)\n text_de = models.TextField(verbose_name=_(u\"question text (german)\"))\n text_en = models.TextField(verbose_name=_(u\"question text (english)\"))\n kind = models.CharField(max_length=1, choices=QUESTION_KINDS,\n verbose_name=_(u\"kind of question\"))\n\n text = Translate\n\n class Meta:\n order_with_respect_to = 'questionnaire'\n verbose_name = _(u\"question\")\n verbose_name_plural = _(u\"questions\")\n\n @property\n def answer_class(self):\n if self.kind == u\"T\":\n return TextAnswer\n elif self.kind == u\"L\":\n return LikertAnswer\n elif self.kind == u\"G\":\n return GradeAnswer\n else:\n raise Exception(\"Unknown answer kind: %r\" % self.kind)\n\n def is_likert_question(self):\n return self.answer_class == LikertAnswer\n\n def is_text_question(self):\n return self.answer_class == TextAnswer\n\n def is_grade_question(self):\n return self.answer_class == GradeAnswer\n\n\nclass Answer(models.Model):\n \"\"\"An abstract answer to a question. For anonymity purposes, the answering\n user ist not stored in the object. Concrete subclasses are `LikertAnswer`,\n `TextAnswer` and `GradeAnswer`.\"\"\"\n\n question = models.ForeignKey(Question)\n contribution = models.ForeignKey(Contribution)\n\n class Meta:\n abstract = True\n verbose_name = _(u\"answer\")\n verbose_name_plural = _(u\"answers\")\n\n\nclass LikertAnswer(Answer):\n \"\"\"A Likert-scale answer to a question with `1` being *strongly agree* and `5`\n being *strongly disagree*.\"\"\"\n\n answer = models.IntegerField(verbose_name=_(u\"answer\"))\n\n class Meta:\n verbose_name = _(u\"Likert answer\")\n verbose_name_plural = _(u\"Likert answers\")\n\n\nclass GradeAnswer(Answer):\n \"\"\"A grade answer to a question with `1` being best and `5` being worst.\"\"\"\n\n answer = models.IntegerField(verbose_name=_(u\"answer\"))\n\n class Meta:\n verbose_name = _(u\"grade answer\")\n verbose_name_plural = _(u\"grade answers\")\n\n\nclass TextAnswer(Answer):\n \"\"\"A free-form text answer to a question (usually a comment about a course\n or a contributor).\"\"\"\n\n elements_per_page = 5\n\n reviewed_answer = models.TextField(verbose_name=_(u\"reviewed answer\"), blank=True, null=True)\n original_answer = models.TextField(verbose_name=_(u\"original answer\"), blank=True)\n\n checked = models.BooleanField(verbose_name=_(u\"answer checked\"), default=False)\n hidden = models.BooleanField(verbose_name=_(u\"hide answer\"), default=False)\n\n class Meta:\n verbose_name = _(u\"text answer\")\n verbose_name_plural = _(u\"text answers\")\n\n def _answer_get(self):\n return self.reviewed_answer or self.original_answer\n\n def _answer_set(self, value):\n self.original_answer = value\n self.reviewed_answer = None\n\n answer = property(_answer_get, _answer_set)\n\n\nclass FaqSection(models.Model):\n \"\"\"Section in the frequently asked questions\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n order = models.IntegerField(verbose_name=_(\"section order\"))\n\n title_de = models.TextField(verbose_name=_(u\"section title (german)\"))\n title_en = models.TextField(verbose_name=_(u\"section title (english)\"))\n title = Translate\n\n class Meta:\n ordering = ['order', ]\n verbose_name = _(u\"section\")\n verbose_name_plural = _(u\"sections\")\n\n\nclass FaqQuestion(models.Model):\n \"\"\"Question and answer in the frequently asked questions\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n section = models.ForeignKey(FaqSection, related_name=\"questions\")\n\n order = models.IntegerField(verbose_name=_(\"question order\"))\n\n question_de = models.TextField(verbose_name=_(\"question (german)\"))\n question_en = models.TextField(verbose_name=_(\"question (english)\"))\n question = Translate\n\n answer_de = models.TextField(verbose_name=_(\"answer (german)\"))\n answer_en = models.TextField(verbose_name=_(\"answer (german)\"))\n answer = Translate\n\n class Meta:\n ordering = ['order', ]\n verbose_name = _(u\"question\")\n verbose_name_plural = _(u\"questions\")\n\n\nclass UserProfile(models.Model):\n user = models.OneToOneField(settings.AUTH_USER_MODEL)\n\n # extending first_name and last_name from the user\n title = models.CharField(verbose_name=_(u\"Title\"), max_length=1024, blank=True, null=True)\n\n # picture of the user\n picture = models.ImageField(verbose_name=_(u\"Picture\"), upload_to=\"pictures\", blank=True, null=True)\n\n # delegates of the user, which can also manage their courses\n delegates = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u\"Delegates\"), related_name=\"represented_users\", blank=True)\n\n # users to which all emails should be sent in cc without giving them delegate rights\n cc_users = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u\"CC Users\"), related_name=\"cc_users\", blank=True)\n\n # key for url based login of this user\n MAX_LOGIN_KEY = 2**31-1\n\n login_key = models.IntegerField(verbose_name=_(u\"Login Key\"), blank=True, null=True)\n login_key_valid_until = models.DateField(verbose_name=_(u\"Login Key Validity\"), null=True)\n\n class Meta:\n verbose_name = _('user')\n verbose_name_plural = _('users')\n\n def __unicode__(self):\n return unicode(self.user)\n\n @property\n def full_name(self):\n if self.user.last_name:\n name = self.user.last_name\n if self.user.first_name:\n name = self.user.first_name + \" \" + name\n if self.title:\n name = self.title + \" \" + name\n return name\n else:\n return self.user.username\n\n @property\n def can_fsr_delete(self):\n return not self.is_contributor\n\n @property\n def enrolled_in_courses(self):\n return self.user.course_set.exists()\n\n @property\n def is_contributor(self):\n return self.user.contributions.exists()\n\n @property\n def is_editor(self):\n return self.user.contributions.filter(can_edit=True).exists()\n\n @property\n def is_responsible(self):\n # in the user list, self.user.contributions is prefetched, therefore use it directly and don't filter it\n return any(contribution.responsible for contribution in self.user.contributions.all())\n\n @property\n def is_delegate(self):\n return self.delegates.exists()\n\n @property\n def is_editor_or_delegate(self):\n return self.is_editor or self.is_delegate\n\n @classmethod\n def email_needs_login_key(cls, email):\n return not any([email.endswith(\"@\" + domain) for domain in settings.INSTITUTION_EMAIL_DOMAINS])\n\n @property\n def needs_login_key(self):\n return UserProfile.email_needs_login_key(self.user.email)\n\n @classmethod\n def get_for_user(cls, user):\n obj, _ = cls.objects.get_or_create(user=user)\n return obj\n\n def generate_login_key(self):\n while True:\n key = random.randrange(0, UserProfile.MAX_LOGIN_KEY)\n if not UserProfile.objects.filter(login_key=key).exists():\n # key not yet used\n self.login_key = key\n break\n\n self.refresh_login_key()\n\n def refresh_login_key(self):\n self.login_key_valid_until = datetime.date.today() + datetime.timedelta(settings.LOGIN_KEY_VALIDITY)\n\n @staticmethod\n @receiver(post_save, sender=settings.AUTH_USER_MODEL)\n def create_user_profile(sender, instance, created, raw, **kwargs):\n \"\"\"Creates a UserProfile object whenever a User is created.\"\"\"\n if created and not raw:\n UserProfile.objects.create(user=instance)\n\n\ndef validate_template(value):\n \"\"\"Field validator which ensures that the value can be compiled into a\n Django Template.\"\"\"\n try:\n Template(value)\n except (TemplateSyntaxError, TemplateEncodingError) as e:\n raise ValidationError(str(e))\n\n\nclass EmailTemplate(models.Model):\n name = models.CharField(max_length=1024, unique=True, verbose_name=_(\"Name\"))\n\n subject = models.CharField(max_length=1024, verbose_name=_(u\"Subject\"), validators=[validate_template])\n body = models.TextField(verbose_name=_(\"Body\"), validators=[validate_template])\n\n @classmethod\n def get_review_template(cls):\n return cls.objects.get(name=\"Lecturer Review Notice\")\n\n @classmethod\n def get_reminder_template(cls):\n return cls.objects.get(name=\"Student Reminder\")\n\n @classmethod\n def get_publish_template(cls):\n return cls.objects.get(name=\"Publishing Notice\")\n\n @classmethod\n def get_login_key_template(cls):\n return cls.objects.get(name=\"Login Key Created\")\n\n @classmethod\n def recipient_list_for_course(cls, course, recipient_groups):\n recipients = []\n\n if \"responsible\" in recipient_groups:\n recipients += [course.responsible_contributor]\n\n if \"contributors\" in recipient_groups:\n recipients += [c.contributor for c in course.contributions.exclude(contributor=None)]\n elif \"editors\" in recipient_groups:\n recipients += [c.contributor for c in course.contributions.exclude(contributor=None).filter(can_edit=True)]\n\n if \"all_participants\" in recipient_groups:\n recipients += course.participants.all()\n elif \"due_participants\" in recipient_groups:\n recipients += course.due_participants\n\n return recipients\n\n @classmethod\n def render_string(cls, text, dictionary):\n return Template(text).render(Context(dictionary, autoescape=False))\n\n def send_to_users_in_courses(self, courses, recipient_groups):\n user_course_map = {}\n for course in courses:\n responsible = UserProfile.get_for_user(course.responsible_contributor)\n for user in self.recipient_list_for_course(course, recipient_groups):\n if user.email and user not in responsible.cc_users.all() and user not in responsible.delegates.all():\n user_course_map.setdefault(user, []).append(course)\n\n for user, courses in user_course_map.iteritems():\n cc_users = []\n if (\"responsible\" in recipient_groups or \"editors\" in recipient_groups) and any(course.is_user_editor(user) for course in courses):\n cc_users += UserProfile.get_for_user(user).delegates.all()\n cc_users += UserProfile.get_for_user(user).cc_users.all()\n cc_addresses = [p.email for p in cc_users if p.email]\n\n mail = EmailMessage(\n subject = self.render_string(self.subject, {'user': user, 'courses': courses}),\n body = self.render_string(self.body, {'user': user, 'courses': courses}),\n to = [user.email],\n cc = cc_addresses,\n bcc = [a[1] for a in settings.MANAGERS],\n headers = {'Reply-To': settings.REPLY_TO_EMAIL})\n mail.send(False)\n\n def send_to_user(self, user):\n if not user.email:\n return\n\n mail = EmailMessage(\n subject = self.render_string(self.subject, {'user': user}),\n body = self.render_string(self.body, {'user': user}),\n to = [user.email],\n bcc = [a[1] for a in settings.MANAGERS],\n headers = {'Reply-To': settings.REPLY_TO_EMAIL})\n mail.send(False)\n\n", "path": "evap/evaluation/models.py"}], "after_files": [{"content": "from django.conf import settings\nfrom django.core.exceptions import ValidationError\nfrom django.core.mail import EmailMessage\nfrom django.db import models\nfrom django.db.models import Count\nfrom django.db.models.signals import post_save\nfrom django.dispatch import receiver\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.template import Context, Template, TemplateSyntaxError, TemplateEncodingError\nfrom django_fsm.db.fields import FSMField, transition\n\n# see evaluation.meta for the use of Translate in this file\nfrom evap.evaluation.meta import LocalizeModelBase, Translate\n\nimport datetime\nimport random\n\n# for converting state into student_state\nSTUDENT_STATES_NAMES = {\n 'new': 'upcoming',\n 'prepared': 'upcoming',\n 'lecturerApproved': 'upcoming',\n 'approved': 'upcoming',\n 'inEvaluation': 'inEvaluation',\n 'evaluated': 'evaluationFinished',\n 'reviewed': 'evaluationFinished',\n 'published': 'published'\n}\n\n\nclass Semester(models.Model):\n \"\"\"Represents a semester, e.g. the winter term of 2011/2012.\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n name_de = models.CharField(max_length=1024, unique=True, verbose_name=_(u\"name (german)\"))\n name_en = models.CharField(max_length=1024, unique=True, verbose_name=_(u\"name (english)\"))\n\n name = Translate\n\n created_at = models.DateField(verbose_name=_(u\"created at\"), auto_now_add=True)\n\n class Meta:\n ordering = ('-created_at', 'name_de')\n verbose_name = _(u\"semester\")\n verbose_name_plural = _(u\"semesters\")\n\n def __unicode__(self):\n return self.name\n\n @property\n def can_fsr_delete(self):\n for course in self.course_set.all():\n if not course.can_fsr_delete:\n return False\n return True\n\n @classmethod\n def get_all_with_published_courses(cls):\n return cls.objects.filter(course__state=\"published\").distinct()\n\n\nclass Questionnaire(models.Model):\n \"\"\"A named collection of questions.\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n name_de = models.CharField(max_length=1024, unique=True, verbose_name=_(u\"name (german)\"))\n name_en = models.CharField(max_length=1024, unique=True, verbose_name=_(u\"name (english)\"))\n name = Translate\n\n description_de = models.TextField(verbose_name=_(u\"description (german)\"), blank=True, null=True)\n description_en = models.TextField(verbose_name=_(u\"description (english)\"), blank=True, null=True)\n description = Translate\n\n public_name_de = models.CharField(max_length=1024, verbose_name=_(u\"display name (german)\"))\n public_name_en = models.CharField(max_length=1024, verbose_name=_(u\"display name (english)\"))\n public_name = Translate\n\n teaser_de = models.TextField(verbose_name=_(u\"teaser (german)\"), blank=True, null=True)\n teaser_en = models.TextField(verbose_name=_(u\"teaser (english)\"), blank=True, null=True)\n teaser = Translate\n\n index = models.IntegerField(verbose_name=_(u\"ordering index\"))\n\n is_for_contributors = models.BooleanField(verbose_name=_(u\"is for contributors\"), default=False)\n obsolete = models.BooleanField(verbose_name=_(u\"obsolete\"), default=False)\n\n class Meta:\n ordering = ('obsolete', 'index', 'name_de')\n verbose_name = _(u\"questionnaire\")\n verbose_name_plural = _(u\"questionnaires\")\n\n def __unicode__(self):\n return self.name\n\n @property\n def can_fsr_delete(self):\n return not self.contributions.exists()\n\n\nclass Course(models.Model):\n \"\"\"Models a single course, e.g. the Math 101 course of 2002.\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n state = FSMField(default='new', protected=True)\n\n semester = models.ForeignKey(Semester, verbose_name=_(u\"semester\"))\n\n name_de = models.CharField(max_length=1024, verbose_name=_(u\"name (german)\"))\n name_en = models.CharField(max_length=1024, verbose_name=_(u\"name (english)\"))\n name = Translate\n\n # type of course: lecture, seminar, project\n kind = models.CharField(max_length=1024, verbose_name=_(u\"type\"))\n\n # bachelor, master, d-school course\n degree = models.CharField(max_length=1024, verbose_name=_(u\"degree\"))\n\n # students that are allowed to vote\n participants = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u\"participants\"), blank=True)\n participant_count = models.IntegerField(verbose_name=_(u\"participant count\"), blank=True, null=True, default=None)\n\n # students that already voted\n voters = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u\"voters\"), blank=True, related_name='+')\n voter_count = models.IntegerField(verbose_name=_(u\"voter count\"), blank=True, null=True, default=None)\n\n # when the evaluation takes place\n vote_start_date = models.DateField(null=True, verbose_name=_(u\"first date to vote\"))\n vote_end_date = models.DateField(null=True, verbose_name=_(u\"last date to vote\"))\n\n # who last modified this course, shell be noted\n last_modified_time = models.DateTimeField(auto_now=True)\n last_modified_user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name=\"+\", null=True, blank=True)\n\n class Meta:\n ordering = ('semester', 'degree', 'name_de')\n unique_together = (\n ('semester', 'degree', 'name_de'),\n ('semester', 'degree', 'name_en'),\n )\n verbose_name = _(u\"course\")\n verbose_name_plural = _(u\"courses\")\n\n def __unicode__(self):\n return self.name\n\n def clean(self):\n if self.vote_start_date and self.vote_end_date:\n if self.vote_start_date >= self.vote_end_date:\n raise ValidationError(_(u\"The vote start date must be before the vote end date.\"))\n\n def save(self, *args, **kw):\n super(Course, self).save(*args, **kw)\n\n # make sure there is a general contribution\n if not self.general_contribution:\n self.contributions.create(contributor=None)\n\n def is_fully_checked(self):\n \"\"\"Shortcut for finding out whether all text answers to this course have been checked\"\"\"\n return not self.open_textanswer_set.exists()\n\n def can_user_vote(self, user):\n \"\"\"Returns whether the user is allowed to vote on this course.\"\"\"\n return (self.state == \"inEvaluation\"\n and datetime.date.today() <= self.vote_end_date\n and user in self.participants.all()\n and user not in self.voters.all())\n\n def can_fsr_edit(self):\n return self.state in ['new', 'prepared', 'lecturerApproved', 'approved', 'inEvaluation']\n\n def can_fsr_delete(self):\n return self.can_fsr_edit() and not self.voters.exists()\n\n def can_fsr_review(self):\n return self.state in ['inEvaluation', 'evaluated'] and not self.is_fully_checked()\n\n def can_fsr_approve(self):\n return self.state in ['new', 'prepared', 'lecturerApproved']\n\n def can_publish_grades(self):\n return self.num_voters >= settings.MIN_ANSWER_COUNT and float(self.num_voters) / self.num_participants >= settings.MIN_ANSWER_PERCENTAGE\n\n @transition(field=state, source=['new', 'lecturerApproved'], target='prepared')\n def ready_for_contributors(self, send_mail=True):\n if send_mail:\n EmailTemplate.get_review_template().send_to_users_in_courses([self], ['editors'])\n\n @transition(field=state, source='prepared', target='lecturerApproved')\n def contributor_approve(self):\n pass\n\n @transition(field=state, source=['new', 'prepared', 'lecturerApproved'], target='approved')\n def fsr_approve(self):\n pass\n\n @transition(field=state, source='prepared', target='new')\n def revert_to_new(self):\n pass\n\n @transition(field=state, source='approved', target='inEvaluation')\n def evaluation_begin(self):\n pass\n\n @transition(field=state, source='inEvaluation', target='evaluated')\n def evaluation_end(self):\n pass\n\n @transition(field=state, source='evaluated', target='reviewed', conditions=[is_fully_checked])\n def review_finished(self):\n pass\n\n @transition(field=state, source='reviewed', target='published')\n def publish(self):\n pass\n\n @transition(field=state, source='published', target='reviewed')\n def revoke(self):\n pass\n\n @property\n def student_state(self):\n return STUDENT_STATES_NAMES[self.state]\n\n @property\n def general_contribution(self):\n try:\n return self.contributions.get(contributor=None)\n except Contribution.DoesNotExist:\n return None\n\n @property\n def num_participants(self):\n if self.participant_count:\n return self.participant_count\n return self.participants.count()\n\n @property\n def num_voters(self):\n if self.voter_count:\n return self.voter_count\n return self.voters.count()\n\n @property\n def due_participants(self):\n return self.participants.exclude(pk__in=self.voters.all())\n\n @property\n def responsible_contributor(self):\n return self.contributions.get(responsible=True).contributor\n\n @property\n def responsible_contributors_name(self):\n return self.responsible_contributor.userprofile.full_name\n\n @property\n def responsible_contributors_username(self):\n return self.responsible_contributor.username\n\n def has_enough_questionnaires(self):\n return self.general_contribution and all(self.contributions.aggregate(Count('questionnaires')).values())\n\n def is_user_editor_or_delegate(self, user):\n if self.contributions.filter(can_edit=True, contributor=user).exists():\n return True\n else:\n represented_userprofiles = user.represented_users.all()\n represented_users = [profile.user for profile in represented_userprofiles]\n if self.contributions.filter(can_edit=True, contributor__in=represented_users).exists():\n return True\n\n return False\n\n def is_user_responsible_or_delegate(self, user):\n if self.contributions.filter(responsible=True, contributor=user).exists():\n return True\n else:\n represented_userprofiles = user.represented_users.all()\n represented_users = [profile.user for profile in represented_userprofiles]\n if self.contributions.filter(responsible=True, contributor__in=represented_users).exists():\n return True\n\n return False\n\n def is_user_contributor(self, user):\n return self.contributions.filter(contributor=user).exists()\n\n def is_user_editor(self, user):\n return self.contributions.filter(contributor=user, can_edit=True).exists()\n\n def warnings(self):\n result = []\n if self.state == 'new' and not self.has_enough_questionnaires():\n result.append(_(u\"Not enough questionnaires assigned\"))\n if self.state in ['inEvaluation', 'evaluated', 'reviewed'] and not self.can_publish_grades():\n result.append(_(u\"Not enough participants to publish results\"))\n return result\n\n @property\n def textanswer_set(self):\n \"\"\"Pseudo relationship to all text answers for this course\"\"\"\n return TextAnswer.objects.filter(contribution__in=self.contributions.all())\n\n @property\n def open_textanswer_set(self):\n \"\"\"Pseudo relationship to all text answers for this course\"\"\"\n return TextAnswer.objects.filter(contribution__in=self.contributions.all(), checked=False)\n\n @property\n def checked_textanswer_set(self):\n \"\"\"Pseudo relationship to all text answers for this course\"\"\"\n return TextAnswer.objects.filter(contribution__in=self.contributions.all(), checked=True)\n\n @property\n def likertanswer_set(self):\n \"\"\"Pseudo relationship to all Likert answers for this course\"\"\"\n return LikertAnswer.objects.filter(contribution__in=self.contributions.all())\n\n @property\n def gradeanswer_set(self):\n \"\"\"Pseudo relationship to all grade answers for this course\"\"\"\n return GradeAnswer.objects.filter(contribution__in=self.contributions.all())\n\n\nclass Contribution(models.Model):\n \"\"\"A contributor who is assigned to a course and his questionnaires.\"\"\"\n\n course = models.ForeignKey(Course, verbose_name=_(u\"course\"), related_name='contributions')\n contributor = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_(u\"contributor\"), blank=True, null=True, related_name='contributions')\n questionnaires = models.ManyToManyField(Questionnaire, verbose_name=_(u\"questionnaires\"), blank=True, related_name=\"contributions\")\n responsible = models.BooleanField(verbose_name=_(u\"responsible\"), default=False)\n can_edit = models.BooleanField(verbose_name=_(u\"can edit\"), default=False)\n\n class Meta:\n unique_together = (\n ('course', 'contributor'),\n )\n\n def clean(self):\n # responsible contributors can always edit\n if self.responsible:\n self.can_edit = True\n\n\nclass Question(models.Model):\n \"\"\"A question including a type.\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n QUESTION_KINDS = (\n (u\"T\", _(u\"Text Question\")),\n (u\"L\", _(u\"Likert Question\")),\n (u\"G\", _(u\"Grade Question\")),\n )\n\n questionnaire = models.ForeignKey(Questionnaire)\n text_de = models.TextField(verbose_name=_(u\"question text (german)\"))\n text_en = models.TextField(verbose_name=_(u\"question text (english)\"))\n kind = models.CharField(max_length=1, choices=QUESTION_KINDS,\n verbose_name=_(u\"kind of question\"))\n\n text = Translate\n\n class Meta:\n order_with_respect_to = 'questionnaire'\n verbose_name = _(u\"question\")\n verbose_name_plural = _(u\"questions\")\n\n @property\n def answer_class(self):\n if self.kind == u\"T\":\n return TextAnswer\n elif self.kind == u\"L\":\n return LikertAnswer\n elif self.kind == u\"G\":\n return GradeAnswer\n else:\n raise Exception(\"Unknown answer kind: %r\" % self.kind)\n\n def is_likert_question(self):\n return self.answer_class == LikertAnswer\n\n def is_text_question(self):\n return self.answer_class == TextAnswer\n\n def is_grade_question(self):\n return self.answer_class == GradeAnswer\n\n\nclass Answer(models.Model):\n \"\"\"An abstract answer to a question. For anonymity purposes, the answering\n user ist not stored in the object. Concrete subclasses are `LikertAnswer`,\n `TextAnswer` and `GradeAnswer`.\"\"\"\n\n question = models.ForeignKey(Question)\n contribution = models.ForeignKey(Contribution)\n\n class Meta:\n abstract = True\n verbose_name = _(u\"answer\")\n verbose_name_plural = _(u\"answers\")\n\n\nclass LikertAnswer(Answer):\n \"\"\"A Likert-scale answer to a question with `1` being *strongly agree* and `5`\n being *strongly disagree*.\"\"\"\n\n answer = models.IntegerField(verbose_name=_(u\"answer\"))\n\n class Meta:\n verbose_name = _(u\"Likert answer\")\n verbose_name_plural = _(u\"Likert answers\")\n\n\nclass GradeAnswer(Answer):\n \"\"\"A grade answer to a question with `1` being best and `5` being worst.\"\"\"\n\n answer = models.IntegerField(verbose_name=_(u\"answer\"))\n\n class Meta:\n verbose_name = _(u\"grade answer\")\n verbose_name_plural = _(u\"grade answers\")\n\n\nclass TextAnswer(Answer):\n \"\"\"A free-form text answer to a question (usually a comment about a course\n or a contributor).\"\"\"\n\n elements_per_page = 5\n\n reviewed_answer = models.TextField(verbose_name=_(u\"reviewed answer\"), blank=True, null=True)\n original_answer = models.TextField(verbose_name=_(u\"original answer\"), blank=True)\n\n checked = models.BooleanField(verbose_name=_(u\"answer checked\"), default=False)\n hidden = models.BooleanField(verbose_name=_(u\"hide answer\"), default=False)\n\n class Meta:\n verbose_name = _(u\"text answer\")\n verbose_name_plural = _(u\"text answers\")\n\n def _answer_get(self):\n return self.reviewed_answer or self.original_answer\n\n def _answer_set(self, value):\n self.original_answer = value\n self.reviewed_answer = None\n\n answer = property(_answer_get, _answer_set)\n\n\nclass FaqSection(models.Model):\n \"\"\"Section in the frequently asked questions\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n order = models.IntegerField(verbose_name=_(\"section order\"))\n\n title_de = models.TextField(verbose_name=_(u\"section title (german)\"))\n title_en = models.TextField(verbose_name=_(u\"section title (english)\"))\n title = Translate\n\n class Meta:\n ordering = ['order', ]\n verbose_name = _(u\"section\")\n verbose_name_plural = _(u\"sections\")\n\n\nclass FaqQuestion(models.Model):\n \"\"\"Question and answer in the frequently asked questions\"\"\"\n\n __metaclass__ = LocalizeModelBase\n\n section = models.ForeignKey(FaqSection, related_name=\"questions\")\n\n order = models.IntegerField(verbose_name=_(\"question order\"))\n\n question_de = models.TextField(verbose_name=_(\"question (german)\"))\n question_en = models.TextField(verbose_name=_(\"question (english)\"))\n question = Translate\n\n answer_de = models.TextField(verbose_name=_(\"answer (german)\"))\n answer_en = models.TextField(verbose_name=_(\"answer (german)\"))\n answer = Translate\n\n class Meta:\n ordering = ['order', ]\n verbose_name = _(u\"question\")\n verbose_name_plural = _(u\"questions\")\n\n\nclass UserProfile(models.Model):\n user = models.OneToOneField(settings.AUTH_USER_MODEL)\n\n # extending first_name and last_name from the user\n title = models.CharField(verbose_name=_(u\"Title\"), max_length=1024, blank=True, null=True)\n\n # picture of the user\n picture = models.ImageField(verbose_name=_(u\"Picture\"), upload_to=\"pictures\", blank=True, null=True)\n\n # delegates of the user, which can also manage their courses\n delegates = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u\"Delegates\"), related_name=\"represented_users\", blank=True)\n\n # users to which all emails should be sent in cc without giving them delegate rights\n cc_users = models.ManyToManyField(settings.AUTH_USER_MODEL, verbose_name=_(u\"CC Users\"), related_name=\"cc_users\", blank=True)\n\n # key for url based login of this user\n MAX_LOGIN_KEY = 2**31-1\n\n login_key = models.IntegerField(verbose_name=_(u\"Login Key\"), blank=True, null=True)\n login_key_valid_until = models.DateField(verbose_name=_(u\"Login Key Validity\"), null=True)\n\n class Meta:\n verbose_name = _('user')\n verbose_name_plural = _('users')\n\n def __unicode__(self):\n return unicode(self.user)\n\n @property\n def full_name(self):\n if self.user.last_name:\n name = self.user.last_name\n if self.user.first_name:\n name = self.user.first_name + \" \" + name\n if self.title:\n name = self.title + \" \" + name\n return name\n else:\n return self.user.username\n\n @property\n def can_fsr_delete(self):\n return not self.is_contributor\n\n @property\n def enrolled_in_courses(self):\n return self.user.course_set.exists()\n\n @property\n def is_contributor(self):\n return self.user.contributions.exists()\n\n @property\n def is_editor(self):\n return self.user.contributions.filter(can_edit=True).exists()\n\n @property\n def is_responsible(self):\n # in the user list, self.user.contributions is prefetched, therefore use it directly and don't filter it\n return any(contribution.responsible for contribution in self.user.contributions.all())\n\n @property\n def is_delegate(self):\n return self.user.represented_users.exists()\n\n @property\n def is_editor_or_delegate(self):\n return self.is_editor or self.is_delegate\n\n @classmethod\n def email_needs_login_key(cls, email):\n return not any([email.endswith(\"@\" + domain) for domain in settings.INSTITUTION_EMAIL_DOMAINS])\n\n @property\n def needs_login_key(self):\n return UserProfile.email_needs_login_key(self.user.email)\n\n @classmethod\n def get_for_user(cls, user):\n obj, _ = cls.objects.get_or_create(user=user)\n return obj\n\n def generate_login_key(self):\n while True:\n key = random.randrange(0, UserProfile.MAX_LOGIN_KEY)\n if not UserProfile.objects.filter(login_key=key).exists():\n # key not yet used\n self.login_key = key\n break\n\n self.refresh_login_key()\n\n def refresh_login_key(self):\n self.login_key_valid_until = datetime.date.today() + datetime.timedelta(settings.LOGIN_KEY_VALIDITY)\n\n @staticmethod\n @receiver(post_save, sender=settings.AUTH_USER_MODEL)\n def create_user_profile(sender, instance, created, raw, **kwargs):\n \"\"\"Creates a UserProfile object whenever a User is created.\"\"\"\n if created and not raw:\n UserProfile.objects.create(user=instance)\n\n\ndef validate_template(value):\n \"\"\"Field validator which ensures that the value can be compiled into a\n Django Template.\"\"\"\n try:\n Template(value)\n except (TemplateSyntaxError, TemplateEncodingError) as e:\n raise ValidationError(str(e))\n\n\nclass EmailTemplate(models.Model):\n name = models.CharField(max_length=1024, unique=True, verbose_name=_(\"Name\"))\n\n subject = models.CharField(max_length=1024, verbose_name=_(u\"Subject\"), validators=[validate_template])\n body = models.TextField(verbose_name=_(\"Body\"), validators=[validate_template])\n\n @classmethod\n def get_review_template(cls):\n return cls.objects.get(name=\"Lecturer Review Notice\")\n\n @classmethod\n def get_reminder_template(cls):\n return cls.objects.get(name=\"Student Reminder\")\n\n @classmethod\n def get_publish_template(cls):\n return cls.objects.get(name=\"Publishing Notice\")\n\n @classmethod\n def get_login_key_template(cls):\n return cls.objects.get(name=\"Login Key Created\")\n\n @classmethod\n def recipient_list_for_course(cls, course, recipient_groups):\n recipients = []\n\n if \"responsible\" in recipient_groups:\n recipients += [course.responsible_contributor]\n\n if \"contributors\" in recipient_groups:\n recipients += [c.contributor for c in course.contributions.exclude(contributor=None)]\n elif \"editors\" in recipient_groups:\n recipients += [c.contributor for c in course.contributions.exclude(contributor=None).filter(can_edit=True)]\n\n if \"all_participants\" in recipient_groups:\n recipients += course.participants.all()\n elif \"due_participants\" in recipient_groups:\n recipients += course.due_participants\n\n return recipients\n\n @classmethod\n def render_string(cls, text, dictionary):\n return Template(text).render(Context(dictionary, autoescape=False))\n\n def send_to_users_in_courses(self, courses, recipient_groups):\n user_course_map = {}\n for course in courses:\n responsible = UserProfile.get_for_user(course.responsible_contributor)\n for user in self.recipient_list_for_course(course, recipient_groups):\n if user.email and user not in responsible.cc_users.all() and user not in responsible.delegates.all():\n user_course_map.setdefault(user, []).append(course)\n\n for user, courses in user_course_map.iteritems():\n cc_users = []\n if (\"responsible\" in recipient_groups or \"editors\" in recipient_groups) and any(course.is_user_editor(user) for course in courses):\n cc_users += UserProfile.get_for_user(user).delegates.all()\n cc_users += UserProfile.get_for_user(user).cc_users.all()\n cc_addresses = [p.email for p in cc_users if p.email]\n\n mail = EmailMessage(\n subject = self.render_string(self.subject, {'user': user, 'courses': courses}),\n body = self.render_string(self.body, {'user': user, 'courses': courses}),\n to = [user.email],\n cc = cc_addresses,\n bcc = [a[1] for a in settings.MANAGERS],\n headers = {'Reply-To': settings.REPLY_TO_EMAIL})\n mail.send(False)\n\n def send_to_user(self, user):\n if not user.email:\n return\n\n mail = EmailMessage(\n subject = self.render_string(self.subject, {'user': user}),\n body = self.render_string(self.body, {'user': user}),\n to = [user.email],\n bcc = [a[1] for a in settings.MANAGERS],\n headers = {'Reply-To': settings.REPLY_TO_EMAIL})\n mail.send(False)\n\n", "path": "evap/evaluation/models.py"}]} |
gh_patches_debug_184 | rasdani/github-patches | git_diff | googleapis__google-api-python-client-273 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error importing discovery from the client API
Hello, I'm getting a name exception when running a simple script. I've tried running the script in two ways:
```
from apiclient import discovery
import json
from pprint import pprint
from httplib2 import Http
from oauth2client.service_account import ServiceAccountCredentials
scopes = ['https://www.googleapis.com/auth/fusiontables']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
'service-account.json', scopes)
ft = discovery.build('fusiontables', 'v2', credentials=credentials)
pprint(ft)
```
Which gives:
> Traceback (most recent call last):
> File "apiTest.py", line 1, in <module>
> from apiclient import discovery
> File "build/bdist.linux-x86_64/egg/apiclient/**init**.py", line 18, in <module>
> File "build/bdist.linux-x86_64/egg/googleapiclient/channel.py", line 64, in <module>
> ImportError: cannot import name util
And:
```
from apiclient.discovery import build
import json
from pprint import pprint
from httplib2 import Http
from oauth2client.service_account import ServiceAccountCredentials
scopes = ['https://www.googleapis.com/auth/fusiontables']
credentials = ServiceAccountCredentials.from_json_keyfile_name(
'service-account.json', scopes)
ft = discovery.build('fusiontables', 'v2', credentials=credentials)
pprint(ft)
#print response
```
Which yields the same exception:
> python apiTest.py
> Traceback (most recent call last):
> File "apiTest.py", line 1, in <module>
> from apiclient.discovery import build
> File "build/bdist.linux-x86_64/egg/apiclient/**init**.py", line 18, in <module>
> File "build/bdist.linux-x86_64/egg/googleapiclient/channel.py", line 64, in <module>
> ImportError: cannot import name util
I'm not sure if I'm missing a prereq or what, but I'm not able to find a way around this.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `googleapiclient/channel.py`
Content:
```
1 """Channel notifications support.
2
3 Classes and functions to support channel subscriptions and notifications
4 on those channels.
5
6 Notes:
7 - This code is based on experimental APIs and is subject to change.
8 - Notification does not do deduplication of notification ids, that's up to
9 the receiver.
10 - Storing the Channel between calls is up to the caller.
11
12
13 Example setting up a channel:
14
15 # Create a new channel that gets notifications via webhook.
16 channel = new_webhook_channel("https://example.com/my_web_hook")
17
18 # Store the channel, keyed by 'channel.id'. Store it before calling the
19 # watch method because notifications may start arriving before the watch
20 # method returns.
21 ...
22
23 resp = service.objects().watchAll(
24 bucket="some_bucket_id", body=channel.body()).execute()
25 channel.update(resp)
26
27 # Store the channel, keyed by 'channel.id'. Store it after being updated
28 # since the resource_id value will now be correct, and that's needed to
29 # stop a subscription.
30 ...
31
32
33 An example Webhook implementation using webapp2. Note that webapp2 puts
34 headers in a case insensitive dictionary, as headers aren't guaranteed to
35 always be upper case.
36
37 id = self.request.headers[X_GOOG_CHANNEL_ID]
38
39 # Retrieve the channel by id.
40 channel = ...
41
42 # Parse notification from the headers, including validating the id.
43 n = notification_from_headers(channel, self.request.headers)
44
45 # Do app specific stuff with the notification here.
46 if n.resource_state == 'sync':
47 # Code to handle sync state.
48 elif n.resource_state == 'exists':
49 # Code to handle the exists state.
50 elif n.resource_state == 'not_exists':
51 # Code to handle the not exists state.
52
53
54 Example of unsubscribing.
55
56 service.channels().stop(channel.body())
57 """
58 from __future__ import absolute_import
59
60 import datetime
61 import uuid
62
63 from googleapiclient import errors
64 from oauth2client import util
65 import six
66
67 # Oauth2client < 3 has the positional helper in 'util', >= 3 has it
68 # in '_helpers'.
69 try:
70 from oauth2client import util
71 except ImportError:
72 from oauth2client import _helpers as util
73
74
75 # The unix time epoch starts at midnight 1970.
76 EPOCH = datetime.datetime.utcfromtimestamp(0)
77
78 # Map the names of the parameters in the JSON channel description to
79 # the parameter names we use in the Channel class.
80 CHANNEL_PARAMS = {
81 'address': 'address',
82 'id': 'id',
83 'expiration': 'expiration',
84 'params': 'params',
85 'resourceId': 'resource_id',
86 'resourceUri': 'resource_uri',
87 'type': 'type',
88 'token': 'token',
89 }
90
91 X_GOOG_CHANNEL_ID = 'X-GOOG-CHANNEL-ID'
92 X_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER'
93 X_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE'
94 X_GOOG_RESOURCE_URI = 'X-GOOG-RESOURCE-URI'
95 X_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID'
96
97
98 def _upper_header_keys(headers):
99 new_headers = {}
100 for k, v in six.iteritems(headers):
101 new_headers[k.upper()] = v
102 return new_headers
103
104
105 class Notification(object):
106 """A Notification from a Channel.
107
108 Notifications are not usually constructed directly, but are returned
109 from functions like notification_from_headers().
110
111 Attributes:
112 message_number: int, The unique id number of this notification.
113 state: str, The state of the resource being monitored.
114 uri: str, The address of the resource being monitored.
115 resource_id: str, The unique identifier of the version of the resource at
116 this event.
117 """
118 @util.positional(5)
119 def __init__(self, message_number, state, resource_uri, resource_id):
120 """Notification constructor.
121
122 Args:
123 message_number: int, The unique id number of this notification.
124 state: str, The state of the resource being monitored. Can be one
125 of "exists", "not_exists", or "sync".
126 resource_uri: str, The address of the resource being monitored.
127 resource_id: str, The identifier of the watched resource.
128 """
129 self.message_number = message_number
130 self.state = state
131 self.resource_uri = resource_uri
132 self.resource_id = resource_id
133
134
135 class Channel(object):
136 """A Channel for notifications.
137
138 Usually not constructed directly, instead it is returned from helper
139 functions like new_webhook_channel().
140
141 Attributes:
142 type: str, The type of delivery mechanism used by this channel. For
143 example, 'web_hook'.
144 id: str, A UUID for the channel.
145 token: str, An arbitrary string associated with the channel that
146 is delivered to the target address with each event delivered
147 over this channel.
148 address: str, The address of the receiving entity where events are
149 delivered. Specific to the channel type.
150 expiration: int, The time, in milliseconds from the epoch, when this
151 channel will expire.
152 params: dict, A dictionary of string to string, with additional parameters
153 controlling delivery channel behavior.
154 resource_id: str, An opaque id that identifies the resource that is
155 being watched. Stable across different API versions.
156 resource_uri: str, The canonicalized ID of the watched resource.
157 """
158
159 @util.positional(5)
160 def __init__(self, type, id, token, address, expiration=None,
161 params=None, resource_id="", resource_uri=""):
162 """Create a new Channel.
163
164 In user code, this Channel constructor will not typically be called
165 manually since there are functions for creating channels for each specific
166 type with a more customized set of arguments to pass.
167
168 Args:
169 type: str, The type of delivery mechanism used by this channel. For
170 example, 'web_hook'.
171 id: str, A UUID for the channel.
172 token: str, An arbitrary string associated with the channel that
173 is delivered to the target address with each event delivered
174 over this channel.
175 address: str, The address of the receiving entity where events are
176 delivered. Specific to the channel type.
177 expiration: int, The time, in milliseconds from the epoch, when this
178 channel will expire.
179 params: dict, A dictionary of string to string, with additional parameters
180 controlling delivery channel behavior.
181 resource_id: str, An opaque id that identifies the resource that is
182 being watched. Stable across different API versions.
183 resource_uri: str, The canonicalized ID of the watched resource.
184 """
185 self.type = type
186 self.id = id
187 self.token = token
188 self.address = address
189 self.expiration = expiration
190 self.params = params
191 self.resource_id = resource_id
192 self.resource_uri = resource_uri
193
194 def body(self):
195 """Build a body from the Channel.
196
197 Constructs a dictionary that's appropriate for passing into watch()
198 methods as the value of body argument.
199
200 Returns:
201 A dictionary representation of the channel.
202 """
203 result = {
204 'id': self.id,
205 'token': self.token,
206 'type': self.type,
207 'address': self.address
208 }
209 if self.params:
210 result['params'] = self.params
211 if self.resource_id:
212 result['resourceId'] = self.resource_id
213 if self.resource_uri:
214 result['resourceUri'] = self.resource_uri
215 if self.expiration:
216 result['expiration'] = self.expiration
217
218 return result
219
220 def update(self, resp):
221 """Update a channel with information from the response of watch().
222
223 When a request is sent to watch() a resource, the response returned
224 from the watch() request is a dictionary with updated channel information,
225 such as the resource_id, which is needed when stopping a subscription.
226
227 Args:
228 resp: dict, The response from a watch() method.
229 """
230 for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
231 value = resp.get(json_name)
232 if value is not None:
233 setattr(self, param_name, value)
234
235
236 def notification_from_headers(channel, headers):
237 """Parse a notification from the webhook request headers, validate
238 the notification, and return a Notification object.
239
240 Args:
241 channel: Channel, The channel that the notification is associated with.
242 headers: dict, A dictionary like object that contains the request headers
243 from the webhook HTTP request.
244
245 Returns:
246 A Notification object.
247
248 Raises:
249 errors.InvalidNotificationError if the notification is invalid.
250 ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
251 """
252 headers = _upper_header_keys(headers)
253 channel_id = headers[X_GOOG_CHANNEL_ID]
254 if channel.id != channel_id:
255 raise errors.InvalidNotificationError(
256 'Channel id mismatch: %s != %s' % (channel.id, channel_id))
257 else:
258 message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
259 state = headers[X_GOOG_RESOURCE_STATE]
260 resource_uri = headers[X_GOOG_RESOURCE_URI]
261 resource_id = headers[X_GOOG_RESOURCE_ID]
262 return Notification(message_number, state, resource_uri, resource_id)
263
264
265 @util.positional(2)
266 def new_webhook_channel(url, token=None, expiration=None, params=None):
267 """Create a new webhook Channel.
268
269 Args:
270 url: str, URL to post notifications to.
271 token: str, An arbitrary string associated with the channel that
272 is delivered to the target address with each notification delivered
273 over this channel.
274 expiration: datetime.datetime, A time in the future when the channel
275 should expire. Can also be None if the subscription should use the
276 default expiration. Note that different services may have different
277 limits on how long a subscription lasts. Check the response from the
278 watch() method to see the value the service has set for an expiration
279 time.
280 params: dict, Extra parameters to pass on channel creation. Currently
281 not used for webhook channels.
282 """
283 expiration_ms = 0
284 if expiration:
285 delta = expiration - EPOCH
286 expiration_ms = delta.microseconds/1000 + (
287 delta.seconds + delta.days*24*3600)*1000
288 if expiration_ms < 0:
289 expiration_ms = 0
290
291 return Channel('web_hook', str(uuid.uuid4()),
292 token, url, expiration=expiration_ms,
293 params=params)
294
295
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/googleapiclient/channel.py b/googleapiclient/channel.py
--- a/googleapiclient/channel.py
+++ b/googleapiclient/channel.py
@@ -61,7 +61,6 @@
import uuid
from googleapiclient import errors
-from oauth2client import util
import six
# Oauth2client < 3 has the positional helper in 'util', >= 3 has it
| {"golden_diff": "diff --git a/googleapiclient/channel.py b/googleapiclient/channel.py\n--- a/googleapiclient/channel.py\n+++ b/googleapiclient/channel.py\n@@ -61,7 +61,6 @@\n import uuid\n \n from googleapiclient import errors\n-from oauth2client import util\n import six\n \n # Oauth2client < 3 has the positional helper in 'util', >= 3 has it\n", "issue": "Error importing discovery from the client API\nHello, I'm getting a name exception when running a simple script. I've tried running the script in two ways:\n\n```\nfrom apiclient import discovery\nimport json\n\nfrom pprint import pprint\nfrom httplib2 import Http\n\nfrom oauth2client.service_account import ServiceAccountCredentials\n\nscopes = ['https://www.googleapis.com/auth/fusiontables']\n\ncredentials = ServiceAccountCredentials.from_json_keyfile_name(\n 'service-account.json', scopes)\n\nft = discovery.build('fusiontables', 'v2', credentials=credentials)\npprint(ft)\n```\n\nWhich gives:\n\n> Traceback (most recent call last):\n> File \"apiTest.py\", line 1, in <module>\n> from apiclient import discovery\n> File \"build/bdist.linux-x86_64/egg/apiclient/**init**.py\", line 18, in <module>\n> File \"build/bdist.linux-x86_64/egg/googleapiclient/channel.py\", line 64, in <module>\n> ImportError: cannot import name util\n\nAnd:\n\n```\nfrom apiclient.discovery import build\nimport json\n\nfrom pprint import pprint\nfrom httplib2 import Http\n\nfrom oauth2client.service_account import ServiceAccountCredentials\n\nscopes = ['https://www.googleapis.com/auth/fusiontables']\n\ncredentials = ServiceAccountCredentials.from_json_keyfile_name(\n 'service-account.json', scopes)\n\nft = discovery.build('fusiontables', 'v2', credentials=credentials)\npprint(ft)\n#print response\n```\n\nWhich yields the same exception:\n\n> python apiTest.py \n> Traceback (most recent call last):\n> File \"apiTest.py\", line 1, in <module>\n> from apiclient.discovery import build\n> File \"build/bdist.linux-x86_64/egg/apiclient/**init**.py\", line 18, in <module>\n> File \"build/bdist.linux-x86_64/egg/googleapiclient/channel.py\", line 64, in <module>\n> ImportError: cannot import name util\n\nI'm not sure if I'm missing a prereq or what, but I'm not able to find a way around this.\n\n", "before_files": [{"content": "\"\"\"Channel notifications support.\n\nClasses and functions to support channel subscriptions and notifications\non those channels.\n\nNotes:\n - This code is based on experimental APIs and is subject to change.\n - Notification does not do deduplication of notification ids, that's up to\n the receiver.\n - Storing the Channel between calls is up to the caller.\n\n\nExample setting up a channel:\n\n # Create a new channel that gets notifications via webhook.\n channel = new_webhook_channel(\"https://example.com/my_web_hook\")\n\n # Store the channel, keyed by 'channel.id'. Store it before calling the\n # watch method because notifications may start arriving before the watch\n # method returns.\n ...\n\n resp = service.objects().watchAll(\n bucket=\"some_bucket_id\", body=channel.body()).execute()\n channel.update(resp)\n\n # Store the channel, keyed by 'channel.id'. Store it after being updated\n # since the resource_id value will now be correct, and that's needed to\n # stop a subscription.\n ...\n\n\nAn example Webhook implementation using webapp2. Note that webapp2 puts\nheaders in a case insensitive dictionary, as headers aren't guaranteed to\nalways be upper case.\n\n id = self.request.headers[X_GOOG_CHANNEL_ID]\n\n # Retrieve the channel by id.\n channel = ...\n\n # Parse notification from the headers, including validating the id.\n n = notification_from_headers(channel, self.request.headers)\n\n # Do app specific stuff with the notification here.\n if n.resource_state == 'sync':\n # Code to handle sync state.\n elif n.resource_state == 'exists':\n # Code to handle the exists state.\n elif n.resource_state == 'not_exists':\n # Code to handle the not exists state.\n\n\nExample of unsubscribing.\n\n service.channels().stop(channel.body())\n\"\"\"\nfrom __future__ import absolute_import\n\nimport datetime\nimport uuid\n\nfrom googleapiclient import errors\nfrom oauth2client import util\nimport six\n\n# Oauth2client < 3 has the positional helper in 'util', >= 3 has it\n# in '_helpers'.\ntry:\n from oauth2client import util\nexcept ImportError:\n from oauth2client import _helpers as util\n\n\n# The unix time epoch starts at midnight 1970.\nEPOCH = datetime.datetime.utcfromtimestamp(0)\n\n# Map the names of the parameters in the JSON channel description to\n# the parameter names we use in the Channel class.\nCHANNEL_PARAMS = {\n 'address': 'address',\n 'id': 'id',\n 'expiration': 'expiration',\n 'params': 'params',\n 'resourceId': 'resource_id',\n 'resourceUri': 'resource_uri',\n 'type': 'type',\n 'token': 'token',\n }\n\nX_GOOG_CHANNEL_ID = 'X-GOOG-CHANNEL-ID'\nX_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER'\nX_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE'\nX_GOOG_RESOURCE_URI = 'X-GOOG-RESOURCE-URI'\nX_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID'\n\n\ndef _upper_header_keys(headers):\n new_headers = {}\n for k, v in six.iteritems(headers):\n new_headers[k.upper()] = v\n return new_headers\n\n\nclass Notification(object):\n \"\"\"A Notification from a Channel.\n\n Notifications are not usually constructed directly, but are returned\n from functions like notification_from_headers().\n\n Attributes:\n message_number: int, The unique id number of this notification.\n state: str, The state of the resource being monitored.\n uri: str, The address of the resource being monitored.\n resource_id: str, The unique identifier of the version of the resource at\n this event.\n \"\"\"\n @util.positional(5)\n def __init__(self, message_number, state, resource_uri, resource_id):\n \"\"\"Notification constructor.\n\n Args:\n message_number: int, The unique id number of this notification.\n state: str, The state of the resource being monitored. Can be one\n of \"exists\", \"not_exists\", or \"sync\".\n resource_uri: str, The address of the resource being monitored.\n resource_id: str, The identifier of the watched resource.\n \"\"\"\n self.message_number = message_number\n self.state = state\n self.resource_uri = resource_uri\n self.resource_id = resource_id\n\n\nclass Channel(object):\n \"\"\"A Channel for notifications.\n\n Usually not constructed directly, instead it is returned from helper\n functions like new_webhook_channel().\n\n Attributes:\n type: str, The type of delivery mechanism used by this channel. For\n example, 'web_hook'.\n id: str, A UUID for the channel.\n token: str, An arbitrary string associated with the channel that\n is delivered to the target address with each event delivered\n over this channel.\n address: str, The address of the receiving entity where events are\n delivered. Specific to the channel type.\n expiration: int, The time, in milliseconds from the epoch, when this\n channel will expire.\n params: dict, A dictionary of string to string, with additional parameters\n controlling delivery channel behavior.\n resource_id: str, An opaque id that identifies the resource that is\n being watched. Stable across different API versions.\n resource_uri: str, The canonicalized ID of the watched resource.\n \"\"\"\n\n @util.positional(5)\n def __init__(self, type, id, token, address, expiration=None,\n params=None, resource_id=\"\", resource_uri=\"\"):\n \"\"\"Create a new Channel.\n\n In user code, this Channel constructor will not typically be called\n manually since there are functions for creating channels for each specific\n type with a more customized set of arguments to pass.\n\n Args:\n type: str, The type of delivery mechanism used by this channel. For\n example, 'web_hook'.\n id: str, A UUID for the channel.\n token: str, An arbitrary string associated with the channel that\n is delivered to the target address with each event delivered\n over this channel.\n address: str, The address of the receiving entity where events are\n delivered. Specific to the channel type.\n expiration: int, The time, in milliseconds from the epoch, when this\n channel will expire.\n params: dict, A dictionary of string to string, with additional parameters\n controlling delivery channel behavior.\n resource_id: str, An opaque id that identifies the resource that is\n being watched. Stable across different API versions.\n resource_uri: str, The canonicalized ID of the watched resource.\n \"\"\"\n self.type = type\n self.id = id\n self.token = token\n self.address = address\n self.expiration = expiration\n self.params = params\n self.resource_id = resource_id\n self.resource_uri = resource_uri\n\n def body(self):\n \"\"\"Build a body from the Channel.\n\n Constructs a dictionary that's appropriate for passing into watch()\n methods as the value of body argument.\n\n Returns:\n A dictionary representation of the channel.\n \"\"\"\n result = {\n 'id': self.id,\n 'token': self.token,\n 'type': self.type,\n 'address': self.address\n }\n if self.params:\n result['params'] = self.params\n if self.resource_id:\n result['resourceId'] = self.resource_id\n if self.resource_uri:\n result['resourceUri'] = self.resource_uri\n if self.expiration:\n result['expiration'] = self.expiration\n\n return result\n\n def update(self, resp):\n \"\"\"Update a channel with information from the response of watch().\n\n When a request is sent to watch() a resource, the response returned\n from the watch() request is a dictionary with updated channel information,\n such as the resource_id, which is needed when stopping a subscription.\n\n Args:\n resp: dict, The response from a watch() method.\n \"\"\"\n for json_name, param_name in six.iteritems(CHANNEL_PARAMS):\n value = resp.get(json_name)\n if value is not None:\n setattr(self, param_name, value)\n\n\ndef notification_from_headers(channel, headers):\n \"\"\"Parse a notification from the webhook request headers, validate\n the notification, and return a Notification object.\n\n Args:\n channel: Channel, The channel that the notification is associated with.\n headers: dict, A dictionary like object that contains the request headers\n from the webhook HTTP request.\n\n Returns:\n A Notification object.\n\n Raises:\n errors.InvalidNotificationError if the notification is invalid.\n ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.\n \"\"\"\n headers = _upper_header_keys(headers)\n channel_id = headers[X_GOOG_CHANNEL_ID]\n if channel.id != channel_id:\n raise errors.InvalidNotificationError(\n 'Channel id mismatch: %s != %s' % (channel.id, channel_id))\n else:\n message_number = int(headers[X_GOOG_MESSAGE_NUMBER])\n state = headers[X_GOOG_RESOURCE_STATE]\n resource_uri = headers[X_GOOG_RESOURCE_URI]\n resource_id = headers[X_GOOG_RESOURCE_ID]\n return Notification(message_number, state, resource_uri, resource_id)\n\n\[email protected](2)\ndef new_webhook_channel(url, token=None, expiration=None, params=None):\n \"\"\"Create a new webhook Channel.\n\n Args:\n url: str, URL to post notifications to.\n token: str, An arbitrary string associated with the channel that\n is delivered to the target address with each notification delivered\n over this channel.\n expiration: datetime.datetime, A time in the future when the channel\n should expire. Can also be None if the subscription should use the\n default expiration. Note that different services may have different\n limits on how long a subscription lasts. Check the response from the\n watch() method to see the value the service has set for an expiration\n time.\n params: dict, Extra parameters to pass on channel creation. Currently\n not used for webhook channels.\n \"\"\"\n expiration_ms = 0\n if expiration:\n delta = expiration - EPOCH\n expiration_ms = delta.microseconds/1000 + (\n delta.seconds + delta.days*24*3600)*1000\n if expiration_ms < 0:\n expiration_ms = 0\n\n return Channel('web_hook', str(uuid.uuid4()),\n token, url, expiration=expiration_ms,\n params=params)\n\n", "path": "googleapiclient/channel.py"}], "after_files": [{"content": "\"\"\"Channel notifications support.\n\nClasses and functions to support channel subscriptions and notifications\non those channels.\n\nNotes:\n - This code is based on experimental APIs and is subject to change.\n - Notification does not do deduplication of notification ids, that's up to\n the receiver.\n - Storing the Channel between calls is up to the caller.\n\n\nExample setting up a channel:\n\n # Create a new channel that gets notifications via webhook.\n channel = new_webhook_channel(\"https://example.com/my_web_hook\")\n\n # Store the channel, keyed by 'channel.id'. Store it before calling the\n # watch method because notifications may start arriving before the watch\n # method returns.\n ...\n\n resp = service.objects().watchAll(\n bucket=\"some_bucket_id\", body=channel.body()).execute()\n channel.update(resp)\n\n # Store the channel, keyed by 'channel.id'. Store it after being updated\n # since the resource_id value will now be correct, and that's needed to\n # stop a subscription.\n ...\n\n\nAn example Webhook implementation using webapp2. Note that webapp2 puts\nheaders in a case insensitive dictionary, as headers aren't guaranteed to\nalways be upper case.\n\n id = self.request.headers[X_GOOG_CHANNEL_ID]\n\n # Retrieve the channel by id.\n channel = ...\n\n # Parse notification from the headers, including validating the id.\n n = notification_from_headers(channel, self.request.headers)\n\n # Do app specific stuff with the notification here.\n if n.resource_state == 'sync':\n # Code to handle sync state.\n elif n.resource_state == 'exists':\n # Code to handle the exists state.\n elif n.resource_state == 'not_exists':\n # Code to handle the not exists state.\n\n\nExample of unsubscribing.\n\n service.channels().stop(channel.body())\n\"\"\"\nfrom __future__ import absolute_import\n\nimport datetime\nimport uuid\n\nfrom googleapiclient import errors\nimport six\n\n# Oauth2client < 3 has the positional helper in 'util', >= 3 has it\n# in '_helpers'.\ntry:\n from oauth2client import util\nexcept ImportError:\n from oauth2client import _helpers as util\n\n\n# The unix time epoch starts at midnight 1970.\nEPOCH = datetime.datetime.utcfromtimestamp(0)\n\n# Map the names of the parameters in the JSON channel description to\n# the parameter names we use in the Channel class.\nCHANNEL_PARAMS = {\n 'address': 'address',\n 'id': 'id',\n 'expiration': 'expiration',\n 'params': 'params',\n 'resourceId': 'resource_id',\n 'resourceUri': 'resource_uri',\n 'type': 'type',\n 'token': 'token',\n }\n\nX_GOOG_CHANNEL_ID = 'X-GOOG-CHANNEL-ID'\nX_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER'\nX_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE'\nX_GOOG_RESOURCE_URI = 'X-GOOG-RESOURCE-URI'\nX_GOOG_RESOURCE_ID = 'X-GOOG-RESOURCE-ID'\n\n\ndef _upper_header_keys(headers):\n new_headers = {}\n for k, v in six.iteritems(headers):\n new_headers[k.upper()] = v\n return new_headers\n\n\nclass Notification(object):\n \"\"\"A Notification from a Channel.\n\n Notifications are not usually constructed directly, but are returned\n from functions like notification_from_headers().\n\n Attributes:\n message_number: int, The unique id number of this notification.\n state: str, The state of the resource being monitored.\n uri: str, The address of the resource being monitored.\n resource_id: str, The unique identifier of the version of the resource at\n this event.\n \"\"\"\n @util.positional(5)\n def __init__(self, message_number, state, resource_uri, resource_id):\n \"\"\"Notification constructor.\n\n Args:\n message_number: int, The unique id number of this notification.\n state: str, The state of the resource being monitored. Can be one\n of \"exists\", \"not_exists\", or \"sync\".\n resource_uri: str, The address of the resource being monitored.\n resource_id: str, The identifier of the watched resource.\n \"\"\"\n self.message_number = message_number\n self.state = state\n self.resource_uri = resource_uri\n self.resource_id = resource_id\n\n\nclass Channel(object):\n \"\"\"A Channel for notifications.\n\n Usually not constructed directly, instead it is returned from helper\n functions like new_webhook_channel().\n\n Attributes:\n type: str, The type of delivery mechanism used by this channel. For\n example, 'web_hook'.\n id: str, A UUID for the channel.\n token: str, An arbitrary string associated with the channel that\n is delivered to the target address with each event delivered\n over this channel.\n address: str, The address of the receiving entity where events are\n delivered. Specific to the channel type.\n expiration: int, The time, in milliseconds from the epoch, when this\n channel will expire.\n params: dict, A dictionary of string to string, with additional parameters\n controlling delivery channel behavior.\n resource_id: str, An opaque id that identifies the resource that is\n being watched. Stable across different API versions.\n resource_uri: str, The canonicalized ID of the watched resource.\n \"\"\"\n\n @util.positional(5)\n def __init__(self, type, id, token, address, expiration=None,\n params=None, resource_id=\"\", resource_uri=\"\"):\n \"\"\"Create a new Channel.\n\n In user code, this Channel constructor will not typically be called\n manually since there are functions for creating channels for each specific\n type with a more customized set of arguments to pass.\n\n Args:\n type: str, The type of delivery mechanism used by this channel. For\n example, 'web_hook'.\n id: str, A UUID for the channel.\n token: str, An arbitrary string associated with the channel that\n is delivered to the target address with each event delivered\n over this channel.\n address: str, The address of the receiving entity where events are\n delivered. Specific to the channel type.\n expiration: int, The time, in milliseconds from the epoch, when this\n channel will expire.\n params: dict, A dictionary of string to string, with additional parameters\n controlling delivery channel behavior.\n resource_id: str, An opaque id that identifies the resource that is\n being watched. Stable across different API versions.\n resource_uri: str, The canonicalized ID of the watched resource.\n \"\"\"\n self.type = type\n self.id = id\n self.token = token\n self.address = address\n self.expiration = expiration\n self.params = params\n self.resource_id = resource_id\n self.resource_uri = resource_uri\n\n def body(self):\n \"\"\"Build a body from the Channel.\n\n Constructs a dictionary that's appropriate for passing into watch()\n methods as the value of body argument.\n\n Returns:\n A dictionary representation of the channel.\n \"\"\"\n result = {\n 'id': self.id,\n 'token': self.token,\n 'type': self.type,\n 'address': self.address\n }\n if self.params:\n result['params'] = self.params\n if self.resource_id:\n result['resourceId'] = self.resource_id\n if self.resource_uri:\n result['resourceUri'] = self.resource_uri\n if self.expiration:\n result['expiration'] = self.expiration\n\n return result\n\n def update(self, resp):\n \"\"\"Update a channel with information from the response of watch().\n\n When a request is sent to watch() a resource, the response returned\n from the watch() request is a dictionary with updated channel information,\n such as the resource_id, which is needed when stopping a subscription.\n\n Args:\n resp: dict, The response from a watch() method.\n \"\"\"\n for json_name, param_name in six.iteritems(CHANNEL_PARAMS):\n value = resp.get(json_name)\n if value is not None:\n setattr(self, param_name, value)\n\n\ndef notification_from_headers(channel, headers):\n \"\"\"Parse a notification from the webhook request headers, validate\n the notification, and return a Notification object.\n\n Args:\n channel: Channel, The channel that the notification is associated with.\n headers: dict, A dictionary like object that contains the request headers\n from the webhook HTTP request.\n\n Returns:\n A Notification object.\n\n Raises:\n errors.InvalidNotificationError if the notification is invalid.\n ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.\n \"\"\"\n headers = _upper_header_keys(headers)\n channel_id = headers[X_GOOG_CHANNEL_ID]\n if channel.id != channel_id:\n raise errors.InvalidNotificationError(\n 'Channel id mismatch: %s != %s' % (channel.id, channel_id))\n else:\n message_number = int(headers[X_GOOG_MESSAGE_NUMBER])\n state = headers[X_GOOG_RESOURCE_STATE]\n resource_uri = headers[X_GOOG_RESOURCE_URI]\n resource_id = headers[X_GOOG_RESOURCE_ID]\n return Notification(message_number, state, resource_uri, resource_id)\n\n\[email protected](2)\ndef new_webhook_channel(url, token=None, expiration=None, params=None):\n \"\"\"Create a new webhook Channel.\n\n Args:\n url: str, URL to post notifications to.\n token: str, An arbitrary string associated with the channel that\n is delivered to the target address with each notification delivered\n over this channel.\n expiration: datetime.datetime, A time in the future when the channel\n should expire. Can also be None if the subscription should use the\n default expiration. Note that different services may have different\n limits on how long a subscription lasts. Check the response from the\n watch() method to see the value the service has set for an expiration\n time.\n params: dict, Extra parameters to pass on channel creation. Currently\n not used for webhook channels.\n \"\"\"\n expiration_ms = 0\n if expiration:\n delta = expiration - EPOCH\n expiration_ms = delta.microseconds/1000 + (\n delta.seconds + delta.days*24*3600)*1000\n if expiration_ms < 0:\n expiration_ms = 0\n\n return Channel('web_hook', str(uuid.uuid4()),\n token, url, expiration=expiration_ms,\n params=params)\n\n", "path": "googleapiclient/channel.py"}]} |
gh_patches_debug_185 | rasdani/github-patches | git_diff | secdev__scapy-1330 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
getattr() in volatile.py throws an AttributeError when str() or hexdump() is used on fuzz packet
Hello,
I have recently installed Scapy to use it to fuzz a CoAP server. I found a few threads related to this by @bsmelo and tried to recreate the results by building the fuzz packets the exact same way. But when I try to convert it into a string using str(), I get an AttributeError when the getattr() function in volatile.py is called.
I have installed the latest version of scapy from the secdev/scapy repository.
I have not modified any of the files in scapy (coap.py , utils.py , volatile.py etc) Please help me identify the source of this error.
$ sudo python
Python 2.7.12 (default, Dec 4 2017, 14:50:18)
[GCC 5.4.0 20160609] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> from scapy.all import *
>>> from scapy.contrib.coap import *
>>>
>>>
>>>
>>> conf.L3socket = L3RawSocket
>>>
>>>
>>> fuzz_pattern = fuzz(CoAP(ver=1L, type=RandNum(0, 1), code=RandNum(0, 4), token=RandBin(RandNum(0, 8)), options=[(11L, 'core')], paymark='\xff'+str(RandBin())))
>>>
>>> s = str(fuzz_pattern)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/scapy/packet.py", line 350, in __str__
return str(self.build())
File "/usr/local/lib/python2.7/dist-packages/scapy/packet.py", line 460, in build
p = self.do_build()
File "/usr/local/lib/python2.7/dist-packages/scapy/packet.py", line 441, in do_build
self = next(iter(self))
File "/usr/local/lib/python2.7/dist-packages/scapy/packet.py", line 828, in loop
for x in loop(todo[:], done):
File "/usr/local/lib/python2.7/dist-packages/scapy/packet.py", line 820, in loop
elt = self.getfieldval(eltname)
File "/usr/local/lib/python2.7/dist-packages/scapy/contrib/coap.py", line 240, in getfieldval
if v:
File "/usr/local/lib/python2.7/dist-packages/scapy/volatile.py", line 127, in __nonzero__
return bool(self.value)
File "/usr/local/lib/python2.7/dist-packages/scapy/volatile.py", line 89, in __getattr__
return getattr(self._fix(), attr)
AttributeError: 'int' object has no attribute 'value'
>>>
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scapy/volatile.py`
Content:
```
1 ## This file is part of Scapy
2 ## See http://www.secdev.org/projects/scapy for more informations
3 ## Copyright (C) Philippe Biondi <[email protected]>
4 ## This program is published under a GPLv2 license
5
6 """
7 Fields that hold random numbers.
8 """
9
10 from __future__ import absolute_import
11 import random, time, math
12 from scapy.base_classes import Net
13 from scapy.compat import *
14 from scapy.utils import corrupt_bits, corrupt_bytes
15 from scapy.modules.six.moves import range
16
17 ####################
18 ## Random numbers ##
19 ####################
20
21
22 class RandomEnumeration:
23 """iterate through a sequence in random order.
24 When all the values have been drawn, if forever=1, the drawing is done again.
25 If renewkeys=0, the draw will be in the same order, guaranteeing that the same
26 number will be drawn in not less than the number of integers of the sequence"""
27
28 def __init__(self, inf, sup, seed=None, forever=1, renewkeys=0):
29 self.forever = forever
30 self.renewkeys = renewkeys
31 self.inf = inf
32 self.rnd = random.Random(seed)
33 self.sbox_size = 256
34
35 self.top = sup-inf+1
36
37 n=0
38 while (1<<n) < self.top:
39 n += 1
40 self.n =n
41
42 self.fs = min(3, (n+1)//2)
43 self.fsmask = 2**self.fs-1
44 self.rounds = max(self.n, 3)
45 self.turns = 0
46 self.i = 0
47
48 def __iter__(self):
49 return self
50
51 def next(self):
52 while True:
53 if self.turns == 0 or (self.i == 0 and self.renewkeys):
54 self.cnt_key = self.rnd.randint(0, 2**self.n-1)
55 self.sbox = [self.rnd.randint(0, self.fsmask)
56 for _ in range(self.sbox_size)]
57 self.turns += 1
58 while self.i < 2**self.n:
59 ct = self.i^self.cnt_key
60 self.i += 1
61 for _ in range(self.rounds): # Unbalanced Feistel Network
62 lsb = ct & self.fsmask
63 ct >>= self.fs
64 lsb ^= self.sbox[ct%self.sbox_size]
65 ct |= lsb << (self.n-self.fs)
66
67 if ct < self.top:
68 return self.inf+ct
69 self.i = 0
70 if not self.forever:
71 raise StopIteration
72 __next__ = next
73
74
75 class VolatileValue(object):
76 def __repr__(self):
77 return "<%s>" % self.__class__.__name__
78
79 def __eq__(self, other):
80 x = self._fix()
81 y = other._fix() if isinstance(other, VolatileValue) else other
82 if not isinstance(x, type(y)):
83 return False
84 return x == y
85
86 def __getattr__(self, attr):
87 if attr in ["__setstate__", "__getstate__"]:
88 raise AttributeError(attr)
89 return getattr(self._fix(), attr)
90
91 def __str__(self):
92 return str(self._fix())
93
94 def __bytes__(self):
95 return raw(self._fix())
96
97 def __len__(self):
98 return len(self._fix())
99
100 def _fix(self):
101 return None
102
103
104 class RandField(VolatileValue):
105 pass
106
107
108 class RandNum(RandField):
109 """Instances evaluate to random integers in selected range"""
110 min = 0
111 max = 0
112
113 def __init__(self, min, max):
114 self.min = min
115 self.max = max
116
117 def _fix(self):
118 return random.randrange(self.min, self.max+1)
119
120 def __int__(self):
121 return int(self._fix())
122
123 def __index__(self):
124 return int(self)
125
126 def __nonzero__(self):
127 return bool(self.value)
128 __bool__ = __nonzero__
129
130 def __add__(self, other):
131 return self._fix() + other
132
133 def __radd__(self, other):
134 return other + self._fix()
135
136 def __sub__(self, other):
137 return self._fix() - other
138
139 def __rsub__(self, other):
140 return other - self._fix()
141
142 def __mul__(self, other):
143 return self._fix() * other
144
145 def __rmul__(self, other):
146 return other * self._fix()
147
148 def __floordiv__(self, other):
149 return self._fix() / other
150 __div__ = __floordiv__
151
152 def __lt__(self, other):
153 return self._fix() < other
154
155 def __le__(self, other):
156 return self._fix() <= other
157
158 def __eq__(self, other):
159 return self._fix() == other
160
161 def __ne__(self, other):
162 return self._fix() != other
163
164 def __ge__(self, other):
165 return self._fix() >= other
166
167 def __gt__(self, other):
168 return self._fix() > other
169
170 def __lshift__(self, other):
171 return self._fix() << other
172
173 def __rshift__(self, other):
174 return self._fix() >> other
175
176 def __and__(self, other):
177 return self._fix() & other
178
179 def __rand__(self, other):
180 return other & self._fix()
181
182 def __or__(self, other):
183 return self._fix() | other
184
185 def __ror__(self, other):
186 return other | self._fix()
187
188
189 class RandNumGamma(RandNum):
190 def __init__(self, alpha, beta):
191 self.alpha = alpha
192 self.beta = beta
193
194 def _fix(self):
195 return int(round(random.gammavariate(self.alpha, self.beta)))
196
197
198 class RandNumGauss(RandNum):
199 def __init__(self, mu, sigma):
200 self.mu = mu
201 self.sigma = sigma
202
203 def _fix(self):
204 return int(round(random.gauss(self.mu, self.sigma)))
205
206
207 class RandNumExpo(RandNum):
208 def __init__(self, lambd, base=0):
209 self.lambd = lambd
210 self.base = base
211
212 def _fix(self):
213 return self.base+int(round(random.expovariate(self.lambd)))
214
215
216 class RandEnum(RandNum):
217 """Instances evaluate to integer sampling without replacement from the given interval"""
218
219 def __init__(self, min, max, seed=None):
220 self.seq = RandomEnumeration(min, max, seed)
221
222 def _fix(self):
223 return next(self.seq)
224
225
226 class RandByte(RandNum):
227 def __init__(self):
228 RandNum.__init__(self, 0, 2**8-1)
229
230
231 class RandSByte(RandNum):
232 def __init__(self):
233 RandNum.__init__(self, -2**7, 2**7-1)
234
235
236 class RandShort(RandNum):
237 def __init__(self):
238 RandNum.__init__(self, 0, 2**16-1)
239
240
241 class RandSShort(RandNum):
242 def __init__(self):
243 RandNum.__init__(self, -2**15, 2**15-1)
244
245
246 class RandInt(RandNum):
247 def __init__(self):
248 RandNum.__init__(self, 0, 2**32-1)
249
250
251 class RandSInt(RandNum):
252 def __init__(self):
253 RandNum.__init__(self, -2**31, 2**31-1)
254
255
256 class RandLong(RandNum):
257 def __init__(self):
258 RandNum.__init__(self, 0, 2**64-1)
259
260
261 class RandSLong(RandNum):
262 def __init__(self):
263 RandNum.__init__(self, -2**63, 2**63-1)
264
265
266 class RandEnumByte(RandEnum):
267 def __init__(self):
268 RandEnum.__init__(self, 0, 2**8-1)
269
270
271 class RandEnumSByte(RandEnum):
272 def __init__(self):
273 RandEnum.__init__(self, -2**7, 2**7-1)
274
275
276 class RandEnumShort(RandEnum):
277 def __init__(self):
278 RandEnum.__init__(self, 0, 2**16-1)
279
280
281 class RandEnumSShort(RandEnum):
282 def __init__(self):
283 RandEnum.__init__(self, -2**15, 2**15-1)
284
285
286 class RandEnumInt(RandEnum):
287 def __init__(self):
288 RandEnum.__init__(self, 0, 2**32-1)
289
290
291 class RandEnumSInt(RandEnum):
292 def __init__(self):
293 RandEnum.__init__(self, -2**31, 2**31-1)
294
295
296 class RandEnumLong(RandEnum):
297 def __init__(self):
298 RandEnum.__init__(self, 0, 2**64-1)
299
300
301 class RandEnumSLong(RandEnum):
302 def __init__(self):
303 RandEnum.__init__(self, -2**63, 2**63-1)
304
305
306 class RandEnumKeys(RandEnum):
307 """Picks a random value from dict keys list. """
308
309 def __init__(self, enum, seed=None):
310 self.enum = list(enum)
311 self.seq = RandomEnumeration(0, len(self.enum) - 1, seed)
312
313 def _fix(self):
314 return self.enum[next(self.seq)]
315
316
317 class RandChoice(RandField):
318 def __init__(self, *args):
319 if not args:
320 raise TypeError("RandChoice needs at least one choice")
321 self._choice = args
322
323 def _fix(self):
324 return random.choice(self._choice)
325
326
327 class RandString(RandField):
328 def __init__(self, size=None, chars=b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"):
329 if size is None:
330 size = RandNumExpo(0.01)
331 self.size = size
332 self.chars = chars
333
334 def _fix(self):
335 s = b""
336 for _ in range(self.size):
337 s += chb(random.choice(self.chars))
338 return s
339
340 def __str__(self):
341 return plain_str(self._fix())
342
343 def __bytes__(self):
344 return raw(self._fix())
345
346 def __mul__(self, n):
347 return self._fix()*n
348
349
350 class RandBin(RandString):
351 def __init__(self, size=None):
352 super(RandBin, self).__init__(size=size, chars=b"".join(chb(c) for c in range(256)))
353
354
355 class RandTermString(RandBin):
356 def __init__(self, size, term):
357 self.term = raw(term)
358 super(RandTermString, self).__init__(size=size)
359
360 def _fix(self):
361 return RandBin._fix(self)+self.term
362
363
364 class RandIP(RandString):
365 def __init__(self, iptemplate="0.0.0.0/0"):
366 self.ip = Net(iptemplate)
367
368 def _fix(self):
369 return self.ip.choice()
370
371
372 class RandMAC(RandString):
373 def __init__(self, template="*"):
374 template += ":*:*:*:*:*"
375 template = template.split(":")
376 self.mac = ()
377 for i in range(6):
378 if template[i] == "*":
379 v = RandByte()
380 elif "-" in template[i]:
381 x, y = template[i].split("-")
382 v = RandNum(int(x, 16), int(y, 16))
383 else:
384 v = int(template[i], 16)
385 self.mac += (v,)
386
387 def _fix(self):
388 return "%02x:%02x:%02x:%02x:%02x:%02x" % self.mac
389
390
391 class RandIP6(RandString):
392 def __init__(self, ip6template="**"):
393 self.tmpl = ip6template
394 self.sp = self.tmpl.split(":")
395 for i, v in enumerate(self.sp):
396 if not v or v == "**":
397 continue
398 if "-" in v:
399 a, b = v.split("-")
400 elif v == "*":
401 a=b=""
402 else:
403 a=b=v
404
405 if not a:
406 a = "0"
407 if not b:
408 b = "ffff"
409 if a==b:
410 self.sp[i] = int(a, 16)
411 else:
412 self.sp[i] = RandNum(int(a, 16), int(b, 16))
413 self.variable = "" in self.sp
414 self.multi = self.sp.count("**")
415
416 def _fix(self):
417 done = 0
418 nbm = self.multi
419 ip = []
420 for i, n in enumerate(self.sp):
421 if n == "**":
422 nbm -= 1
423 remain = 8-(len(self.sp)-i-1)-len(ip)+nbm
424 if "" in self.sp:
425 remain += 1
426 if nbm or self.variable:
427 remain = random.randint(0, remain)
428 for j in range(remain):
429 ip.append("%04x" % random.randint(0, 65535))
430 elif isinstance(n, RandNum):
431 ip.append("%04x" % n)
432 elif n == 0:
433 ip.append("0")
434 elif not n:
435 ip.append("")
436 else:
437 ip.append("%04x" % n)
438 if len(ip) == 9:
439 ip.remove("")
440 if ip[-1] == "":
441 ip[-1] = "0"
442 return ":".join(ip)
443
444
445 class RandOID(RandString):
446 def __init__(self, fmt=None, depth=RandNumExpo(0.1), idnum=RandNumExpo(0.01)):
447 self.ori_fmt = fmt
448 if fmt is not None:
449 fmt = fmt.split(".")
450 for i in range(len(fmt)):
451 if "-" in fmt[i]:
452 fmt[i] = tuple(map(int, fmt[i].split("-")))
453 self.fmt = fmt
454 self.depth = depth
455 self.idnum = idnum
456
457 def __repr__(self):
458 if self.ori_fmt is None:
459 return "<%s>" % self.__class__.__name__
460 else:
461 return "<%s [%s]>" % (self.__class__.__name__, self.ori_fmt)
462
463 def _fix(self):
464 if self.fmt is None:
465 return ".".join(str(self.idnum) for _ in range(1 + self.depth))
466 else:
467 oid = []
468 for i in self.fmt:
469 if i == "*":
470 oid.append(str(self.idnum))
471 elif i == "**":
472 oid += [str(self.idnum) for i in range(1 + self.depth)]
473 elif isinstance(i, tuple):
474 oid.append(str(random.randrange(*i)))
475 else:
476 oid.append(i)
477 return ".".join(oid)
478
479
480 class RandRegExp(RandField):
481 def __init__(self, regexp, lambda_=0.3,):
482 self._regexp = regexp
483 self._lambda = lambda_
484
485 @staticmethod
486 def choice_expand(s): #XXX does not support special sets like (ex ':alnum:')
487 m = ""
488 invert = s and s[0] == "^"
489 while True:
490 p = s.find("-")
491 if p < 0:
492 break
493 if p == 0 or p == len(s)-1:
494 m = "-"
495 if p:
496 s = s[:-1]
497 else:
498 s = s[1:]
499 else:
500 c1 = s[p-1]
501 c2 = s[p+1]
502 rng = "".join(map(chr, range(ord(c1), ord(c2)+1)))
503 s = s[:p-1]+rng+s[p+1:]
504 res = m+s
505 if invert:
506 res = "".join(chr(x) for x in range(256) if chr(x) not in res)
507 return res
508
509 @staticmethod
510 def stack_fix(lst, index):
511 r = ""
512 mul = 1
513 for e in lst:
514 if isinstance(e, list):
515 if mul != 1:
516 mul = mul-1
517 r += RandRegExp.stack_fix(e[1:]*mul, index)
518 # only the last iteration should be kept for back reference
519 f = RandRegExp.stack_fix(e[1:], index)
520 for i, idx in enumerate(index):
521 if e is idx:
522 index[i] = f
523 r += f
524 mul = 1
525 elif isinstance(e, tuple):
526 kind, val = e
527 if kind == "cite":
528 r += index[val-1]
529 elif kind == "repeat":
530 mul = val
531
532 elif kind == "choice":
533 if mul == 1:
534 c = random.choice(val)
535 r += RandRegExp.stack_fix(c[1:], index)
536 else:
537 r += RandRegExp.stack_fix([e]*mul, index)
538 mul = 1
539 else:
540 if mul != 1:
541 r += RandRegExp.stack_fix([e]*mul, index)
542 mul = 1
543 else:
544 r += str(e)
545 return r
546
547 def _fix(self):
548 stack = [None]
549 index = []
550 current = stack
551 i = 0
552 ln = len(self._regexp)
553 interp = True
554 while i < ln:
555 c = self._regexp[i]
556 i+=1
557
558 if c == '(':
559 current = [current]
560 current[0].append(current)
561 elif c == '|':
562 p = current[0]
563 ch = p[-1]
564 if not isinstance(ch, tuple):
565 ch = ("choice", [current])
566 p[-1] = ch
567 else:
568 ch[1].append(current)
569 current = [p]
570 elif c == ')':
571 ch = current[0][-1]
572 if isinstance(ch, tuple):
573 ch[1].append(current)
574 index.append(current)
575 current = current[0]
576 elif c == '[' or c == '{':
577 current = [current]
578 current[0].append(current)
579 interp = False
580 elif c == ']':
581 current = current[0]
582 choice = RandRegExp.choice_expand("".join(current.pop()[1:]))
583 current.append(RandChoice(*list(choice)))
584 interp = True
585 elif c == '}':
586 current = current[0]
587 num = "".join(current.pop()[1:])
588 e = current.pop()
589 if "," not in num:
590 n = int(num)
591 current.append([current]+[e]*n)
592 else:
593 num_min, num_max = num.split(",")
594 if not num_min:
595 num_min = "0"
596 if num_max:
597 n = RandNum(int(num_min), int(num_max))
598 else:
599 n = RandNumExpo(self._lambda, base=int(num_min))
600 current.append(("repeat", n))
601 current.append(e)
602 interp = True
603 elif c == '\\':
604 c = self._regexp[i]
605 if c == "s":
606 c = RandChoice(" ", "\t")
607 elif c in "0123456789":
608 c = ("cite", ord(c)-0x30)
609 current.append(c)
610 i += 1
611 elif not interp:
612 current.append(c)
613 elif c == '+':
614 e = current.pop()
615 current.append([current]+[e]*(int(random.expovariate(self._lambda))+1))
616 elif c == '*':
617 e = current.pop()
618 current.append([current]+[e]*int(random.expovariate(self._lambda)))
619 elif c == '?':
620 if random.randint(0, 1):
621 current.pop()
622 elif c == '.':
623 current.append(RandChoice(*[chr(x) for x in range(256)]))
624 elif c == '$' or c == '^':
625 pass
626 else:
627 current.append(c)
628
629 return RandRegExp.stack_fix(stack[1:], index)
630
631 def __repr__(self):
632 return "<%s [%r]>" % (self.__class__.__name__, self._regexp)
633
634
635 class RandSingularity(RandChoice):
636 pass
637
638
639 class RandSingNum(RandSingularity):
640 @staticmethod
641 def make_power_of_two(end):
642 sign = 1
643 if end == 0:
644 end = 1
645 if end < 0:
646 end = -end
647 sign = -1
648 end_n = int(math.log(end)/math.log(2))+1
649 return {sign*2**i for i in range(end_n)}
650
651 def __init__(self, mn, mx):
652 sing = {0, mn, mx, int((mn+mx)/2)}
653 sing |= self.make_power_of_two(mn)
654 sing |= self.make_power_of_two(mx)
655 for i in sing.copy():
656 sing.add(i+1)
657 sing.add(i-1)
658 for i in sing.copy():
659 if not mn <= i <= mx:
660 sing.remove(i)
661 self._choice = list(sing)
662 self._choice.sort()
663
664
665 class RandSingByte(RandSingNum):
666 def __init__(self):
667 RandSingNum.__init__(self, 0, 2**8-1)
668
669
670 class RandSingSByte(RandSingNum):
671 def __init__(self):
672 RandSingNum.__init__(self, -2**7, 2**7-1)
673
674
675 class RandSingShort(RandSingNum):
676 def __init__(self):
677 RandSingNum.__init__(self, 0, 2**16-1)
678
679
680 class RandSingSShort(RandSingNum):
681 def __init__(self):
682 RandSingNum.__init__(self, -2**15, 2**15-1)
683
684
685 class RandSingInt(RandSingNum):
686 def __init__(self):
687 RandSingNum.__init__(self, 0, 2**32-1)
688
689
690 class RandSingSInt(RandSingNum):
691 def __init__(self):
692 RandSingNum.__init__(self, -2**31, 2**31-1)
693
694
695 class RandSingLong(RandSingNum):
696 def __init__(self):
697 RandSingNum.__init__(self, 0, 2**64-1)
698
699
700 class RandSingSLong(RandSingNum):
701 def __init__(self):
702 RandSingNum.__init__(self, -2**63, 2**63-1)
703
704
705 class RandSingString(RandSingularity):
706 def __init__(self):
707 self._choice = ["",
708 "%x",
709 "%%",
710 "%s",
711 "%i",
712 "%n",
713 "%x%x%x%x%x%x%x%x%x",
714 "%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s",
715 "%",
716 "%%%",
717 "A"*4096,
718 b"\x00"*4096,
719 b"\xff"*4096,
720 b"\x7f"*4096,
721 b"\x80"*4096,
722 " "*4096,
723 "\\"*4096,
724 "("*4096,
725 "../"*1024,
726 "/"*1024,
727 "${HOME}"*512,
728 " or 1=1 --",
729 "' or 1=1 --",
730 '" or 1=1 --',
731 " or 1=1; #",
732 "' or 1=1; #",
733 '" or 1=1; #',
734 ";reboot;",
735 "$(reboot)",
736 "`reboot`",
737 "index.php%00",
738 b"\x00",
739 "%00",
740 "\\",
741 "../../../../../../../../../../../../../../../../../etc/passwd",
742 "%2e%2e%2f" * 20 + "etc/passwd",
743 "%252e%252e%252f" * 20 + "boot.ini",
744 "..%c0%af" * 20 + "etc/passwd",
745 "..%c0%af" * 20 + "boot.ini",
746 "//etc/passwd",
747 r"..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\..\boot.ini",
748 "AUX:",
749 "CLOCK$",
750 "COM:",
751 "CON:",
752 "LPT:",
753 "LST:",
754 "NUL:",
755 "CON:",
756 r"C:\CON\CON",
757 r"C:\boot.ini",
758 r"\\myserver\share",
759 "foo.exe:",
760 "foo.exe\\", ]
761
762 def __str__(self):
763 return str(self._fix())
764
765 def __bytes__(self):
766 return raw(self._fix())
767
768
769 class RandPool(RandField):
770 def __init__(self, *args):
771 """Each parameter is a volatile object or a couple (volatile object, weight)"""
772 pool = []
773 for p in args:
774 w = 1
775 if isinstance(p, tuple):
776 p, w = p
777 pool += [p]*w
778 self._pool = pool
779
780 def _fix(self):
781 r = random.choice(self._pool)
782 return r._fix()
783
784 # Automatic timestamp
785
786
787 class AutoTime(VolatileValue):
788 def __init__(self, base=None):
789 if base == None:
790 self.diff = 0
791 else:
792 self.diff = time.time()-base
793
794 def _fix(self):
795 return time.time()-self.diff
796
797
798 class IntAutoTime(AutoTime):
799 def _fix(self):
800 return int(time.time()-self.diff)
801
802
803 class ZuluTime(AutoTime):
804 def __init__(self, diff=0):
805 self.diff = diff
806
807 def _fix(self):
808 return time.strftime("%y%m%d%H%M%SZ",
809 time.gmtime(time.time() + self.diff))
810
811
812 class GeneralizedTime(AutoTime):
813 def __init__(self, diff=0):
814 self.diff = diff
815
816 def _fix(self):
817 return time.strftime("%Y%m%d%H%M%SZ",
818 time.gmtime(time.time() + self.diff))
819
820
821 class DelayedEval(VolatileValue):
822 """ Example of usage: DelayedEval("time.time()") """
823
824 def __init__(self, expr):
825 self.expr = expr
826
827 def _fix(self):
828 return eval(self.expr)
829
830
831 class IncrementalValue(VolatileValue):
832 def __init__(self, start=0, step=1, restart=-1):
833 self.start = self.val = start
834 self.step = step
835 self.restart = restart
836
837 def _fix(self):
838 v = self.val
839 if self.val == self.restart:
840 self.val = self.start
841 else:
842 self.val += self.step
843 return v
844
845
846 class CorruptedBytes(VolatileValue):
847 def __init__(self, s, p=0.01, n=None):
848 self.s = s
849 self.p = p
850 self.n = n
851
852 def _fix(self):
853 return corrupt_bytes(self.s, self.p, self.n)
854
855
856 class CorruptedBits(CorruptedBytes):
857 def _fix(self):
858 return corrupt_bits(self.s, self.p, self.n)
859
860
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scapy/volatile.py b/scapy/volatile.py
--- a/scapy/volatile.py
+++ b/scapy/volatile.py
@@ -124,7 +124,7 @@
return int(self)
def __nonzero__(self):
- return bool(self.value)
+ return bool(self._fix())
__bool__ = __nonzero__
def __add__(self, other):
| {"golden_diff": "diff --git a/scapy/volatile.py b/scapy/volatile.py\n--- a/scapy/volatile.py\n+++ b/scapy/volatile.py\n@@ -124,7 +124,7 @@\n return int(self)\n \n def __nonzero__(self):\n- return bool(self.value)\n+ return bool(self._fix())\n __bool__ = __nonzero__\n \n def __add__(self, other):\n", "issue": "getattr() in volatile.py throws an AttributeError when str() or hexdump() is used on fuzz packet\nHello, \r\nI have recently installed Scapy to use it to fuzz a CoAP server. I found a few threads related to this by @bsmelo and tried to recreate the results by building the fuzz packets the exact same way. But when I try to convert it into a string using str(), I get an AttributeError when the getattr() function in volatile.py is called. \r\nI have installed the latest version of scapy from the secdev/scapy repository.\r\nI have not modified any of the files in scapy (coap.py , utils.py , volatile.py etc) Please help me identify the source of this error.\r\n\r\n$ sudo python\r\nPython 2.7.12 (default, Dec 4 2017, 14:50:18) \r\n[GCC 5.4.0 20160609] on linux2\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n>>> from scapy.all import *\r\n>>> from scapy.contrib.coap import *\r\n>>> \r\n>>> \r\n>>> \r\n>>> conf.L3socket = L3RawSocket\r\n>>> \r\n>>> \r\n>>> fuzz_pattern = fuzz(CoAP(ver=1L, type=RandNum(0, 1), code=RandNum(0, 4), token=RandBin(RandNum(0, 8)), options=[(11L, 'core')], paymark='\\xff'+str(RandBin())))\r\n>>> \r\n>>> s = str(fuzz_pattern)\r\nTraceback (most recent call last):\r\n File \"<stdin>\", line 1, in <module>\r\n File \"/usr/local/lib/python2.7/dist-packages/scapy/packet.py\", line 350, in __str__\r\n return str(self.build())\r\n File \"/usr/local/lib/python2.7/dist-packages/scapy/packet.py\", line 460, in build\r\n p = self.do_build()\r\n File \"/usr/local/lib/python2.7/dist-packages/scapy/packet.py\", line 441, in do_build\r\n self = next(iter(self))\r\n File \"/usr/local/lib/python2.7/dist-packages/scapy/packet.py\", line 828, in loop\r\n for x in loop(todo[:], done):\r\n File \"/usr/local/lib/python2.7/dist-packages/scapy/packet.py\", line 820, in loop\r\n elt = self.getfieldval(eltname)\r\n File \"/usr/local/lib/python2.7/dist-packages/scapy/contrib/coap.py\", line 240, in getfieldval\r\n if v:\r\n File \"/usr/local/lib/python2.7/dist-packages/scapy/volatile.py\", line 127, in __nonzero__\r\n return bool(self.value)\r\n File \"/usr/local/lib/python2.7/dist-packages/scapy/volatile.py\", line 89, in __getattr__\r\n return getattr(self._fix(), attr)\r\nAttributeError: 'int' object has no attribute 'value'\r\n>>> \r\n\n", "before_files": [{"content": "## This file is part of Scapy\n## See http://www.secdev.org/projects/scapy for more informations\n## Copyright (C) Philippe Biondi <[email protected]>\n## This program is published under a GPLv2 license\n\n\"\"\"\nFields that hold random numbers.\n\"\"\"\n\nfrom __future__ import absolute_import\nimport random, time, math\nfrom scapy.base_classes import Net\nfrom scapy.compat import *\nfrom scapy.utils import corrupt_bits, corrupt_bytes\nfrom scapy.modules.six.moves import range\n\n####################\n## Random numbers ##\n####################\n\n\nclass RandomEnumeration:\n \"\"\"iterate through a sequence in random order.\n When all the values have been drawn, if forever=1, the drawing is done again.\n If renewkeys=0, the draw will be in the same order, guaranteeing that the same\n number will be drawn in not less than the number of integers of the sequence\"\"\"\n\n def __init__(self, inf, sup, seed=None, forever=1, renewkeys=0):\n self.forever = forever\n self.renewkeys = renewkeys\n self.inf = inf\n self.rnd = random.Random(seed)\n self.sbox_size = 256\n\n self.top = sup-inf+1\n\n n=0\n while (1<<n) < self.top:\n n += 1\n self.n =n\n\n self.fs = min(3, (n+1)//2)\n self.fsmask = 2**self.fs-1\n self.rounds = max(self.n, 3)\n self.turns = 0\n self.i = 0\n\n def __iter__(self):\n return self\n\n def next(self):\n while True:\n if self.turns == 0 or (self.i == 0 and self.renewkeys):\n self.cnt_key = self.rnd.randint(0, 2**self.n-1)\n self.sbox = [self.rnd.randint(0, self.fsmask)\n for _ in range(self.sbox_size)]\n self.turns += 1\n while self.i < 2**self.n:\n ct = self.i^self.cnt_key\n self.i += 1\n for _ in range(self.rounds): # Unbalanced Feistel Network\n lsb = ct & self.fsmask\n ct >>= self.fs\n lsb ^= self.sbox[ct%self.sbox_size]\n ct |= lsb << (self.n-self.fs)\n\n if ct < self.top:\n return self.inf+ct\n self.i = 0\n if not self.forever:\n raise StopIteration\n __next__ = next\n\n\nclass VolatileValue(object):\n def __repr__(self):\n return \"<%s>\" % self.__class__.__name__\n\n def __eq__(self, other):\n x = self._fix()\n y = other._fix() if isinstance(other, VolatileValue) else other\n if not isinstance(x, type(y)):\n return False\n return x == y\n\n def __getattr__(self, attr):\n if attr in [\"__setstate__\", \"__getstate__\"]:\n raise AttributeError(attr)\n return getattr(self._fix(), attr)\n\n def __str__(self):\n return str(self._fix())\n\n def __bytes__(self):\n return raw(self._fix())\n\n def __len__(self):\n return len(self._fix())\n\n def _fix(self):\n return None\n\n\nclass RandField(VolatileValue):\n pass\n\n\nclass RandNum(RandField):\n \"\"\"Instances evaluate to random integers in selected range\"\"\"\n min = 0\n max = 0\n\n def __init__(self, min, max):\n self.min = min\n self.max = max\n\n def _fix(self):\n return random.randrange(self.min, self.max+1)\n\n def __int__(self):\n return int(self._fix())\n\n def __index__(self):\n return int(self)\n\n def __nonzero__(self):\n return bool(self.value)\n __bool__ = __nonzero__\n\n def __add__(self, other):\n return self._fix() + other\n\n def __radd__(self, other):\n return other + self._fix()\n\n def __sub__(self, other):\n return self._fix() - other\n\n def __rsub__(self, other):\n return other - self._fix()\n\n def __mul__(self, other):\n return self._fix() * other\n\n def __rmul__(self, other):\n return other * self._fix()\n\n def __floordiv__(self, other):\n return self._fix() / other\n __div__ = __floordiv__\n\n def __lt__(self, other):\n return self._fix() < other\n\n def __le__(self, other):\n return self._fix() <= other\n\n def __eq__(self, other):\n return self._fix() == other\n\n def __ne__(self, other):\n return self._fix() != other\n\n def __ge__(self, other):\n return self._fix() >= other\n\n def __gt__(self, other):\n return self._fix() > other\n\n def __lshift__(self, other):\n return self._fix() << other\n\n def __rshift__(self, other):\n return self._fix() >> other\n\n def __and__(self, other):\n return self._fix() & other\n\n def __rand__(self, other):\n return other & self._fix()\n\n def __or__(self, other):\n return self._fix() | other\n\n def __ror__(self, other):\n return other | self._fix()\n\n\nclass RandNumGamma(RandNum):\n def __init__(self, alpha, beta):\n self.alpha = alpha\n self.beta = beta\n\n def _fix(self):\n return int(round(random.gammavariate(self.alpha, self.beta)))\n\n\nclass RandNumGauss(RandNum):\n def __init__(self, mu, sigma):\n self.mu = mu\n self.sigma = sigma\n\n def _fix(self):\n return int(round(random.gauss(self.mu, self.sigma)))\n\n\nclass RandNumExpo(RandNum):\n def __init__(self, lambd, base=0):\n self.lambd = lambd\n self.base = base\n\n def _fix(self):\n return self.base+int(round(random.expovariate(self.lambd)))\n\n\nclass RandEnum(RandNum):\n \"\"\"Instances evaluate to integer sampling without replacement from the given interval\"\"\"\n\n def __init__(self, min, max, seed=None):\n self.seq = RandomEnumeration(min, max, seed)\n\n def _fix(self):\n return next(self.seq)\n\n\nclass RandByte(RandNum):\n def __init__(self):\n RandNum.__init__(self, 0, 2**8-1)\n\n\nclass RandSByte(RandNum):\n def __init__(self):\n RandNum.__init__(self, -2**7, 2**7-1)\n\n\nclass RandShort(RandNum):\n def __init__(self):\n RandNum.__init__(self, 0, 2**16-1)\n\n\nclass RandSShort(RandNum):\n def __init__(self):\n RandNum.__init__(self, -2**15, 2**15-1)\n\n\nclass RandInt(RandNum):\n def __init__(self):\n RandNum.__init__(self, 0, 2**32-1)\n\n\nclass RandSInt(RandNum):\n def __init__(self):\n RandNum.__init__(self, -2**31, 2**31-1)\n\n\nclass RandLong(RandNum):\n def __init__(self):\n RandNum.__init__(self, 0, 2**64-1)\n\n\nclass RandSLong(RandNum):\n def __init__(self):\n RandNum.__init__(self, -2**63, 2**63-1)\n\n\nclass RandEnumByte(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, 0, 2**8-1)\n\n\nclass RandEnumSByte(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, -2**7, 2**7-1)\n\n\nclass RandEnumShort(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, 0, 2**16-1)\n\n\nclass RandEnumSShort(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, -2**15, 2**15-1)\n\n\nclass RandEnumInt(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, 0, 2**32-1)\n\n\nclass RandEnumSInt(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, -2**31, 2**31-1)\n\n\nclass RandEnumLong(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, 0, 2**64-1)\n\n\nclass RandEnumSLong(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, -2**63, 2**63-1)\n\n\nclass RandEnumKeys(RandEnum):\n \"\"\"Picks a random value from dict keys list. \"\"\"\n\n def __init__(self, enum, seed=None):\n self.enum = list(enum)\n self.seq = RandomEnumeration(0, len(self.enum) - 1, seed)\n\n def _fix(self):\n return self.enum[next(self.seq)]\n\n\nclass RandChoice(RandField):\n def __init__(self, *args):\n if not args:\n raise TypeError(\"RandChoice needs at least one choice\")\n self._choice = args\n\n def _fix(self):\n return random.choice(self._choice)\n\n\nclass RandString(RandField):\n def __init__(self, size=None, chars=b\"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789\"):\n if size is None:\n size = RandNumExpo(0.01)\n self.size = size\n self.chars = chars\n\n def _fix(self):\n s = b\"\"\n for _ in range(self.size):\n s += chb(random.choice(self.chars))\n return s\n\n def __str__(self):\n return plain_str(self._fix())\n\n def __bytes__(self):\n return raw(self._fix())\n\n def __mul__(self, n):\n return self._fix()*n\n\n\nclass RandBin(RandString):\n def __init__(self, size=None):\n super(RandBin, self).__init__(size=size, chars=b\"\".join(chb(c) for c in range(256)))\n\n\nclass RandTermString(RandBin):\n def __init__(self, size, term):\n self.term = raw(term)\n super(RandTermString, self).__init__(size=size)\n\n def _fix(self):\n return RandBin._fix(self)+self.term\n\n\nclass RandIP(RandString):\n def __init__(self, iptemplate=\"0.0.0.0/0\"):\n self.ip = Net(iptemplate)\n\n def _fix(self):\n return self.ip.choice()\n\n\nclass RandMAC(RandString):\n def __init__(self, template=\"*\"):\n template += \":*:*:*:*:*\"\n template = template.split(\":\")\n self.mac = ()\n for i in range(6):\n if template[i] == \"*\":\n v = RandByte()\n elif \"-\" in template[i]:\n x, y = template[i].split(\"-\")\n v = RandNum(int(x, 16), int(y, 16))\n else:\n v = int(template[i], 16)\n self.mac += (v,)\n\n def _fix(self):\n return \"%02x:%02x:%02x:%02x:%02x:%02x\" % self.mac\n\n\nclass RandIP6(RandString):\n def __init__(self, ip6template=\"**\"):\n self.tmpl = ip6template\n self.sp = self.tmpl.split(\":\")\n for i, v in enumerate(self.sp):\n if not v or v == \"**\":\n continue\n if \"-\" in v:\n a, b = v.split(\"-\")\n elif v == \"*\":\n a=b=\"\"\n else:\n a=b=v\n\n if not a:\n a = \"0\"\n if not b:\n b = \"ffff\"\n if a==b:\n self.sp[i] = int(a, 16)\n else:\n self.sp[i] = RandNum(int(a, 16), int(b, 16))\n self.variable = \"\" in self.sp\n self.multi = self.sp.count(\"**\")\n\n def _fix(self):\n done = 0\n nbm = self.multi\n ip = []\n for i, n in enumerate(self.sp):\n if n == \"**\":\n nbm -= 1\n remain = 8-(len(self.sp)-i-1)-len(ip)+nbm\n if \"\" in self.sp:\n remain += 1\n if nbm or self.variable:\n remain = random.randint(0, remain)\n for j in range(remain):\n ip.append(\"%04x\" % random.randint(0, 65535))\n elif isinstance(n, RandNum):\n ip.append(\"%04x\" % n)\n elif n == 0:\n ip.append(\"0\")\n elif not n:\n ip.append(\"\")\n else:\n ip.append(\"%04x\" % n)\n if len(ip) == 9:\n ip.remove(\"\")\n if ip[-1] == \"\":\n ip[-1] = \"0\"\n return \":\".join(ip)\n\n\nclass RandOID(RandString):\n def __init__(self, fmt=None, depth=RandNumExpo(0.1), idnum=RandNumExpo(0.01)):\n self.ori_fmt = fmt\n if fmt is not None:\n fmt = fmt.split(\".\")\n for i in range(len(fmt)):\n if \"-\" in fmt[i]:\n fmt[i] = tuple(map(int, fmt[i].split(\"-\")))\n self.fmt = fmt\n self.depth = depth\n self.idnum = idnum\n\n def __repr__(self):\n if self.ori_fmt is None:\n return \"<%s>\" % self.__class__.__name__\n else:\n return \"<%s [%s]>\" % (self.__class__.__name__, self.ori_fmt)\n\n def _fix(self):\n if self.fmt is None:\n return \".\".join(str(self.idnum) for _ in range(1 + self.depth))\n else:\n oid = []\n for i in self.fmt:\n if i == \"*\":\n oid.append(str(self.idnum))\n elif i == \"**\":\n oid += [str(self.idnum) for i in range(1 + self.depth)]\n elif isinstance(i, tuple):\n oid.append(str(random.randrange(*i)))\n else:\n oid.append(i)\n return \".\".join(oid)\n\n\nclass RandRegExp(RandField):\n def __init__(self, regexp, lambda_=0.3,):\n self._regexp = regexp\n self._lambda = lambda_\n\n @staticmethod\n def choice_expand(s): #XXX does not support special sets like (ex ':alnum:')\n m = \"\"\n invert = s and s[0] == \"^\"\n while True:\n p = s.find(\"-\")\n if p < 0:\n break\n if p == 0 or p == len(s)-1:\n m = \"-\"\n if p:\n s = s[:-1]\n else:\n s = s[1:]\n else:\n c1 = s[p-1]\n c2 = s[p+1]\n rng = \"\".join(map(chr, range(ord(c1), ord(c2)+1)))\n s = s[:p-1]+rng+s[p+1:]\n res = m+s\n if invert:\n res = \"\".join(chr(x) for x in range(256) if chr(x) not in res)\n return res\n\n @staticmethod\n def stack_fix(lst, index):\n r = \"\"\n mul = 1\n for e in lst:\n if isinstance(e, list):\n if mul != 1:\n mul = mul-1\n r += RandRegExp.stack_fix(e[1:]*mul, index)\n # only the last iteration should be kept for back reference\n f = RandRegExp.stack_fix(e[1:], index)\n for i, idx in enumerate(index):\n if e is idx:\n index[i] = f\n r += f\n mul = 1\n elif isinstance(e, tuple):\n kind, val = e\n if kind == \"cite\":\n r += index[val-1]\n elif kind == \"repeat\":\n mul = val\n\n elif kind == \"choice\":\n if mul == 1:\n c = random.choice(val)\n r += RandRegExp.stack_fix(c[1:], index)\n else:\n r += RandRegExp.stack_fix([e]*mul, index)\n mul = 1\n else:\n if mul != 1:\n r += RandRegExp.stack_fix([e]*mul, index)\n mul = 1\n else:\n r += str(e)\n return r\n\n def _fix(self):\n stack = [None]\n index = []\n current = stack\n i = 0\n ln = len(self._regexp)\n interp = True\n while i < ln:\n c = self._regexp[i]\n i+=1\n\n if c == '(':\n current = [current]\n current[0].append(current)\n elif c == '|':\n p = current[0]\n ch = p[-1]\n if not isinstance(ch, tuple):\n ch = (\"choice\", [current])\n p[-1] = ch\n else:\n ch[1].append(current)\n current = [p]\n elif c == ')':\n ch = current[0][-1]\n if isinstance(ch, tuple):\n ch[1].append(current)\n index.append(current)\n current = current[0]\n elif c == '[' or c == '{':\n current = [current]\n current[0].append(current)\n interp = False\n elif c == ']':\n current = current[0]\n choice = RandRegExp.choice_expand(\"\".join(current.pop()[1:]))\n current.append(RandChoice(*list(choice)))\n interp = True\n elif c == '}':\n current = current[0]\n num = \"\".join(current.pop()[1:])\n e = current.pop()\n if \",\" not in num:\n n = int(num)\n current.append([current]+[e]*n)\n else:\n num_min, num_max = num.split(\",\")\n if not num_min:\n num_min = \"0\"\n if num_max:\n n = RandNum(int(num_min), int(num_max))\n else:\n n = RandNumExpo(self._lambda, base=int(num_min))\n current.append((\"repeat\", n))\n current.append(e)\n interp = True\n elif c == '\\\\':\n c = self._regexp[i]\n if c == \"s\":\n c = RandChoice(\" \", \"\\t\")\n elif c in \"0123456789\":\n c = (\"cite\", ord(c)-0x30)\n current.append(c)\n i += 1\n elif not interp:\n current.append(c)\n elif c == '+':\n e = current.pop()\n current.append([current]+[e]*(int(random.expovariate(self._lambda))+1))\n elif c == '*':\n e = current.pop()\n current.append([current]+[e]*int(random.expovariate(self._lambda)))\n elif c == '?':\n if random.randint(0, 1):\n current.pop()\n elif c == '.':\n current.append(RandChoice(*[chr(x) for x in range(256)]))\n elif c == '$' or c == '^':\n pass\n else:\n current.append(c)\n\n return RandRegExp.stack_fix(stack[1:], index)\n\n def __repr__(self):\n return \"<%s [%r]>\" % (self.__class__.__name__, self._regexp)\n\n\nclass RandSingularity(RandChoice):\n pass\n\n\nclass RandSingNum(RandSingularity):\n @staticmethod\n def make_power_of_two(end):\n sign = 1\n if end == 0:\n end = 1\n if end < 0:\n end = -end\n sign = -1\n end_n = int(math.log(end)/math.log(2))+1\n return {sign*2**i for i in range(end_n)}\n\n def __init__(self, mn, mx):\n sing = {0, mn, mx, int((mn+mx)/2)}\n sing |= self.make_power_of_two(mn)\n sing |= self.make_power_of_two(mx)\n for i in sing.copy():\n sing.add(i+1)\n sing.add(i-1)\n for i in sing.copy():\n if not mn <= i <= mx:\n sing.remove(i)\n self._choice = list(sing)\n self._choice.sort()\n\n\nclass RandSingByte(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, 0, 2**8-1)\n\n\nclass RandSingSByte(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, -2**7, 2**7-1)\n\n\nclass RandSingShort(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, 0, 2**16-1)\n\n\nclass RandSingSShort(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, -2**15, 2**15-1)\n\n\nclass RandSingInt(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, 0, 2**32-1)\n\n\nclass RandSingSInt(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, -2**31, 2**31-1)\n\n\nclass RandSingLong(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, 0, 2**64-1)\n\n\nclass RandSingSLong(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, -2**63, 2**63-1)\n\n\nclass RandSingString(RandSingularity):\n def __init__(self):\n self._choice = [\"\",\n \"%x\",\n \"%%\",\n \"%s\",\n \"%i\",\n \"%n\",\n \"%x%x%x%x%x%x%x%x%x\",\n \"%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s\",\n \"%\",\n \"%%%\",\n \"A\"*4096,\n b\"\\x00\"*4096,\n b\"\\xff\"*4096,\n b\"\\x7f\"*4096,\n b\"\\x80\"*4096,\n \" \"*4096,\n \"\\\\\"*4096,\n \"(\"*4096,\n \"../\"*1024,\n \"/\"*1024,\n \"${HOME}\"*512,\n \" or 1=1 --\",\n \"' or 1=1 --\",\n '\" or 1=1 --',\n \" or 1=1; #\",\n \"' or 1=1; #\",\n '\" or 1=1; #',\n \";reboot;\",\n \"$(reboot)\",\n \"`reboot`\",\n \"index.php%00\",\n b\"\\x00\",\n \"%00\",\n \"\\\\\",\n \"../../../../../../../../../../../../../../../../../etc/passwd\",\n \"%2e%2e%2f\" * 20 + \"etc/passwd\",\n \"%252e%252e%252f\" * 20 + \"boot.ini\",\n \"..%c0%af\" * 20 + \"etc/passwd\",\n \"..%c0%af\" * 20 + \"boot.ini\",\n \"//etc/passwd\",\n r\"..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\boot.ini\",\n \"AUX:\",\n \"CLOCK$\",\n \"COM:\",\n \"CON:\",\n \"LPT:\",\n \"LST:\",\n \"NUL:\",\n \"CON:\",\n r\"C:\\CON\\CON\",\n r\"C:\\boot.ini\",\n r\"\\\\myserver\\share\",\n \"foo.exe:\",\n \"foo.exe\\\\\", ]\n\n def __str__(self):\n return str(self._fix())\n\n def __bytes__(self):\n return raw(self._fix())\n\n\nclass RandPool(RandField):\n def __init__(self, *args):\n \"\"\"Each parameter is a volatile object or a couple (volatile object, weight)\"\"\"\n pool = []\n for p in args:\n w = 1\n if isinstance(p, tuple):\n p, w = p\n pool += [p]*w\n self._pool = pool\n\n def _fix(self):\n r = random.choice(self._pool)\n return r._fix()\n\n# Automatic timestamp\n\n\nclass AutoTime(VolatileValue):\n def __init__(self, base=None):\n if base == None:\n self.diff = 0\n else:\n self.diff = time.time()-base\n\n def _fix(self):\n return time.time()-self.diff\n\n\nclass IntAutoTime(AutoTime):\n def _fix(self):\n return int(time.time()-self.diff)\n\n\nclass ZuluTime(AutoTime):\n def __init__(self, diff=0):\n self.diff = diff\n\n def _fix(self):\n return time.strftime(\"%y%m%d%H%M%SZ\",\n time.gmtime(time.time() + self.diff))\n\n\nclass GeneralizedTime(AutoTime):\n def __init__(self, diff=0):\n self.diff = diff\n\n def _fix(self):\n return time.strftime(\"%Y%m%d%H%M%SZ\",\n time.gmtime(time.time() + self.diff))\n\n\nclass DelayedEval(VolatileValue):\n \"\"\" Example of usage: DelayedEval(\"time.time()\") \"\"\"\n\n def __init__(self, expr):\n self.expr = expr\n\n def _fix(self):\n return eval(self.expr)\n\n\nclass IncrementalValue(VolatileValue):\n def __init__(self, start=0, step=1, restart=-1):\n self.start = self.val = start\n self.step = step\n self.restart = restart\n\n def _fix(self):\n v = self.val\n if self.val == self.restart:\n self.val = self.start\n else:\n self.val += self.step\n return v\n\n\nclass CorruptedBytes(VolatileValue):\n def __init__(self, s, p=0.01, n=None):\n self.s = s\n self.p = p\n self.n = n\n\n def _fix(self):\n return corrupt_bytes(self.s, self.p, self.n)\n\n\nclass CorruptedBits(CorruptedBytes):\n def _fix(self):\n return corrupt_bits(self.s, self.p, self.n)\n\n", "path": "scapy/volatile.py"}], "after_files": [{"content": "## This file is part of Scapy\n## See http://www.secdev.org/projects/scapy for more informations\n## Copyright (C) Philippe Biondi <[email protected]>\n## This program is published under a GPLv2 license\n\n\"\"\"\nFields that hold random numbers.\n\"\"\"\n\nfrom __future__ import absolute_import\nimport random, time, math\nfrom scapy.base_classes import Net\nfrom scapy.compat import *\nfrom scapy.utils import corrupt_bits, corrupt_bytes\nfrom scapy.modules.six.moves import range\n\n####################\n## Random numbers ##\n####################\n\n\nclass RandomEnumeration:\n \"\"\"iterate through a sequence in random order.\n When all the values have been drawn, if forever=1, the drawing is done again.\n If renewkeys=0, the draw will be in the same order, guaranteeing that the same\n number will be drawn in not less than the number of integers of the sequence\"\"\"\n\n def __init__(self, inf, sup, seed=None, forever=1, renewkeys=0):\n self.forever = forever\n self.renewkeys = renewkeys\n self.inf = inf\n self.rnd = random.Random(seed)\n self.sbox_size = 256\n\n self.top = sup-inf+1\n\n n=0\n while (1<<n) < self.top:\n n += 1\n self.n =n\n\n self.fs = min(3, (n+1)//2)\n self.fsmask = 2**self.fs-1\n self.rounds = max(self.n, 3)\n self.turns = 0\n self.i = 0\n\n def __iter__(self):\n return self\n\n def next(self):\n while True:\n if self.turns == 0 or (self.i == 0 and self.renewkeys):\n self.cnt_key = self.rnd.randint(0, 2**self.n-1)\n self.sbox = [self.rnd.randint(0, self.fsmask)\n for _ in range(self.sbox_size)]\n self.turns += 1\n while self.i < 2**self.n:\n ct = self.i^self.cnt_key\n self.i += 1\n for _ in range(self.rounds): # Unbalanced Feistel Network\n lsb = ct & self.fsmask\n ct >>= self.fs\n lsb ^= self.sbox[ct%self.sbox_size]\n ct |= lsb << (self.n-self.fs)\n\n if ct < self.top:\n return self.inf+ct\n self.i = 0\n if not self.forever:\n raise StopIteration\n __next__ = next\n\n\nclass VolatileValue(object):\n def __repr__(self):\n return \"<%s>\" % self.__class__.__name__\n\n def __eq__(self, other):\n x = self._fix()\n y = other._fix() if isinstance(other, VolatileValue) else other\n if not isinstance(x, type(y)):\n return False\n return x == y\n\n def __getattr__(self, attr):\n if attr in [\"__setstate__\", \"__getstate__\"]:\n raise AttributeError(attr)\n return getattr(self._fix(), attr)\n\n def __str__(self):\n return str(self._fix())\n\n def __bytes__(self):\n return raw(self._fix())\n\n def __len__(self):\n return len(self._fix())\n\n def _fix(self):\n return None\n\n\nclass RandField(VolatileValue):\n pass\n\n\nclass RandNum(RandField):\n \"\"\"Instances evaluate to random integers in selected range\"\"\"\n min = 0\n max = 0\n\n def __init__(self, min, max):\n self.min = min\n self.max = max\n\n def _fix(self):\n return random.randrange(self.min, self.max+1)\n\n def __int__(self):\n return int(self._fix())\n\n def __index__(self):\n return int(self)\n\n def __nonzero__(self):\n return bool(self._fix())\n __bool__ = __nonzero__\n\n def __add__(self, other):\n return self._fix() + other\n\n def __radd__(self, other):\n return other + self._fix()\n\n def __sub__(self, other):\n return self._fix() - other\n\n def __rsub__(self, other):\n return other - self._fix()\n\n def __mul__(self, other):\n return self._fix() * other\n\n def __rmul__(self, other):\n return other * self._fix()\n\n def __floordiv__(self, other):\n return self._fix() / other\n __div__ = __floordiv__\n\n def __lt__(self, other):\n return self._fix() < other\n\n def __le__(self, other):\n return self._fix() <= other\n\n def __eq__(self, other):\n return self._fix() == other\n\n def __ne__(self, other):\n return self._fix() != other\n\n def __ge__(self, other):\n return self._fix() >= other\n\n def __gt__(self, other):\n return self._fix() > other\n\n def __lshift__(self, other):\n return self._fix() << other\n\n def __rshift__(self, other):\n return self._fix() >> other\n\n def __and__(self, other):\n return self._fix() & other\n\n def __rand__(self, other):\n return other & self._fix()\n\n def __or__(self, other):\n return self._fix() | other\n\n def __ror__(self, other):\n return other | self._fix()\n\n\nclass RandNumGamma(RandNum):\n def __init__(self, alpha, beta):\n self.alpha = alpha\n self.beta = beta\n\n def _fix(self):\n return int(round(random.gammavariate(self.alpha, self.beta)))\n\n\nclass RandNumGauss(RandNum):\n def __init__(self, mu, sigma):\n self.mu = mu\n self.sigma = sigma\n\n def _fix(self):\n return int(round(random.gauss(self.mu, self.sigma)))\n\n\nclass RandNumExpo(RandNum):\n def __init__(self, lambd, base=0):\n self.lambd = lambd\n self.base = base\n\n def _fix(self):\n return self.base+int(round(random.expovariate(self.lambd)))\n\n\nclass RandEnum(RandNum):\n \"\"\"Instances evaluate to integer sampling without replacement from the given interval\"\"\"\n\n def __init__(self, min, max, seed=None):\n self.seq = RandomEnumeration(min, max, seed)\n\n def _fix(self):\n return next(self.seq)\n\n\nclass RandByte(RandNum):\n def __init__(self):\n RandNum.__init__(self, 0, 2**8-1)\n\n\nclass RandSByte(RandNum):\n def __init__(self):\n RandNum.__init__(self, -2**7, 2**7-1)\n\n\nclass RandShort(RandNum):\n def __init__(self):\n RandNum.__init__(self, 0, 2**16-1)\n\n\nclass RandSShort(RandNum):\n def __init__(self):\n RandNum.__init__(self, -2**15, 2**15-1)\n\n\nclass RandInt(RandNum):\n def __init__(self):\n RandNum.__init__(self, 0, 2**32-1)\n\n\nclass RandSInt(RandNum):\n def __init__(self):\n RandNum.__init__(self, -2**31, 2**31-1)\n\n\nclass RandLong(RandNum):\n def __init__(self):\n RandNum.__init__(self, 0, 2**64-1)\n\n\nclass RandSLong(RandNum):\n def __init__(self):\n RandNum.__init__(self, -2**63, 2**63-1)\n\n\nclass RandEnumByte(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, 0, 2**8-1)\n\n\nclass RandEnumSByte(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, -2**7, 2**7-1)\n\n\nclass RandEnumShort(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, 0, 2**16-1)\n\n\nclass RandEnumSShort(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, -2**15, 2**15-1)\n\n\nclass RandEnumInt(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, 0, 2**32-1)\n\n\nclass RandEnumSInt(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, -2**31, 2**31-1)\n\n\nclass RandEnumLong(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, 0, 2**64-1)\n\n\nclass RandEnumSLong(RandEnum):\n def __init__(self):\n RandEnum.__init__(self, -2**63, 2**63-1)\n\n\nclass RandEnumKeys(RandEnum):\n \"\"\"Picks a random value from dict keys list. \"\"\"\n\n def __init__(self, enum, seed=None):\n self.enum = list(enum)\n self.seq = RandomEnumeration(0, len(self.enum) - 1, seed)\n\n def _fix(self):\n return self.enum[next(self.seq)]\n\n\nclass RandChoice(RandField):\n def __init__(self, *args):\n if not args:\n raise TypeError(\"RandChoice needs at least one choice\")\n self._choice = args\n\n def _fix(self):\n return random.choice(self._choice)\n\n\nclass RandString(RandField):\n def __init__(self, size=None, chars=b\"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789\"):\n if size is None:\n size = RandNumExpo(0.01)\n self.size = size\n self.chars = chars\n\n def _fix(self):\n s = b\"\"\n for _ in range(self.size):\n s += chb(random.choice(self.chars))\n return s\n\n def __str__(self):\n return plain_str(self._fix())\n\n def __bytes__(self):\n return raw(self._fix())\n\n def __mul__(self, n):\n return self._fix()*n\n\n\nclass RandBin(RandString):\n def __init__(self, size=None):\n super(RandBin, self).__init__(size=size, chars=b\"\".join(chb(c) for c in range(256)))\n\n\nclass RandTermString(RandBin):\n def __init__(self, size, term):\n self.term = raw(term)\n super(RandTermString, self).__init__(size=size)\n\n def _fix(self):\n return RandBin._fix(self)+self.term\n\n\nclass RandIP(RandString):\n def __init__(self, iptemplate=\"0.0.0.0/0\"):\n self.ip = Net(iptemplate)\n\n def _fix(self):\n return self.ip.choice()\n\n\nclass RandMAC(RandString):\n def __init__(self, template=\"*\"):\n template += \":*:*:*:*:*\"\n template = template.split(\":\")\n self.mac = ()\n for i in range(6):\n if template[i] == \"*\":\n v = RandByte()\n elif \"-\" in template[i]:\n x, y = template[i].split(\"-\")\n v = RandNum(int(x, 16), int(y, 16))\n else:\n v = int(template[i], 16)\n self.mac += (v,)\n\n def _fix(self):\n return \"%02x:%02x:%02x:%02x:%02x:%02x\" % self.mac\n\n\nclass RandIP6(RandString):\n def __init__(self, ip6template=\"**\"):\n self.tmpl = ip6template\n self.sp = self.tmpl.split(\":\")\n for i, v in enumerate(self.sp):\n if not v or v == \"**\":\n continue\n if \"-\" in v:\n a, b = v.split(\"-\")\n elif v == \"*\":\n a=b=\"\"\n else:\n a=b=v\n\n if not a:\n a = \"0\"\n if not b:\n b = \"ffff\"\n if a==b:\n self.sp[i] = int(a, 16)\n else:\n self.sp[i] = RandNum(int(a, 16), int(b, 16))\n self.variable = \"\" in self.sp\n self.multi = self.sp.count(\"**\")\n\n def _fix(self):\n done = 0\n nbm = self.multi\n ip = []\n for i, n in enumerate(self.sp):\n if n == \"**\":\n nbm -= 1\n remain = 8-(len(self.sp)-i-1)-len(ip)+nbm\n if \"\" in self.sp:\n remain += 1\n if nbm or self.variable:\n remain = random.randint(0, remain)\n for j in range(remain):\n ip.append(\"%04x\" % random.randint(0, 65535))\n elif isinstance(n, RandNum):\n ip.append(\"%04x\" % n)\n elif n == 0:\n ip.append(\"0\")\n elif not n:\n ip.append(\"\")\n else:\n ip.append(\"%04x\" % n)\n if len(ip) == 9:\n ip.remove(\"\")\n if ip[-1] == \"\":\n ip[-1] = \"0\"\n return \":\".join(ip)\n\n\nclass RandOID(RandString):\n def __init__(self, fmt=None, depth=RandNumExpo(0.1), idnum=RandNumExpo(0.01)):\n self.ori_fmt = fmt\n if fmt is not None:\n fmt = fmt.split(\".\")\n for i in range(len(fmt)):\n if \"-\" in fmt[i]:\n fmt[i] = tuple(map(int, fmt[i].split(\"-\")))\n self.fmt = fmt\n self.depth = depth\n self.idnum = idnum\n\n def __repr__(self):\n if self.ori_fmt is None:\n return \"<%s>\" % self.__class__.__name__\n else:\n return \"<%s [%s]>\" % (self.__class__.__name__, self.ori_fmt)\n\n def _fix(self):\n if self.fmt is None:\n return \".\".join(str(self.idnum) for _ in range(1 + self.depth))\n else:\n oid = []\n for i in self.fmt:\n if i == \"*\":\n oid.append(str(self.idnum))\n elif i == \"**\":\n oid += [str(self.idnum) for i in range(1 + self.depth)]\n elif isinstance(i, tuple):\n oid.append(str(random.randrange(*i)))\n else:\n oid.append(i)\n return \".\".join(oid)\n\n\nclass RandRegExp(RandField):\n def __init__(self, regexp, lambda_=0.3,):\n self._regexp = regexp\n self._lambda = lambda_\n\n @staticmethod\n def choice_expand(s): #XXX does not support special sets like (ex ':alnum:')\n m = \"\"\n invert = s and s[0] == \"^\"\n while True:\n p = s.find(\"-\")\n if p < 0:\n break\n if p == 0 or p == len(s)-1:\n m = \"-\"\n if p:\n s = s[:-1]\n else:\n s = s[1:]\n else:\n c1 = s[p-1]\n c2 = s[p+1]\n rng = \"\".join(map(chr, range(ord(c1), ord(c2)+1)))\n s = s[:p-1]+rng+s[p+1:]\n res = m+s\n if invert:\n res = \"\".join(chr(x) for x in range(256) if chr(x) not in res)\n return res\n\n @staticmethod\n def stack_fix(lst, index):\n r = \"\"\n mul = 1\n for e in lst:\n if isinstance(e, list):\n if mul != 1:\n mul = mul-1\n r += RandRegExp.stack_fix(e[1:]*mul, index)\n # only the last iteration should be kept for back reference\n f = RandRegExp.stack_fix(e[1:], index)\n for i, idx in enumerate(index):\n if e is idx:\n index[i] = f\n r += f\n mul = 1\n elif isinstance(e, tuple):\n kind, val = e\n if kind == \"cite\":\n r += index[val-1]\n elif kind == \"repeat\":\n mul = val\n\n elif kind == \"choice\":\n if mul == 1:\n c = random.choice(val)\n r += RandRegExp.stack_fix(c[1:], index)\n else:\n r += RandRegExp.stack_fix([e]*mul, index)\n mul = 1\n else:\n if mul != 1:\n r += RandRegExp.stack_fix([e]*mul, index)\n mul = 1\n else:\n r += str(e)\n return r\n\n def _fix(self):\n stack = [None]\n index = []\n current = stack\n i = 0\n ln = len(self._regexp)\n interp = True\n while i < ln:\n c = self._regexp[i]\n i+=1\n\n if c == '(':\n current = [current]\n current[0].append(current)\n elif c == '|':\n p = current[0]\n ch = p[-1]\n if not isinstance(ch, tuple):\n ch = (\"choice\", [current])\n p[-1] = ch\n else:\n ch[1].append(current)\n current = [p]\n elif c == ')':\n ch = current[0][-1]\n if isinstance(ch, tuple):\n ch[1].append(current)\n index.append(current)\n current = current[0]\n elif c == '[' or c == '{':\n current = [current]\n current[0].append(current)\n interp = False\n elif c == ']':\n current = current[0]\n choice = RandRegExp.choice_expand(\"\".join(current.pop()[1:]))\n current.append(RandChoice(*list(choice)))\n interp = True\n elif c == '}':\n current = current[0]\n num = \"\".join(current.pop()[1:])\n e = current.pop()\n if \",\" not in num:\n n = int(num)\n current.append([current]+[e]*n)\n else:\n num_min, num_max = num.split(\",\")\n if not num_min:\n num_min = \"0\"\n if num_max:\n n = RandNum(int(num_min), int(num_max))\n else:\n n = RandNumExpo(self._lambda, base=int(num_min))\n current.append((\"repeat\", n))\n current.append(e)\n interp = True\n elif c == '\\\\':\n c = self._regexp[i]\n if c == \"s\":\n c = RandChoice(\" \", \"\\t\")\n elif c in \"0123456789\":\n c = (\"cite\", ord(c)-0x30)\n current.append(c)\n i += 1\n elif not interp:\n current.append(c)\n elif c == '+':\n e = current.pop()\n current.append([current]+[e]*(int(random.expovariate(self._lambda))+1))\n elif c == '*':\n e = current.pop()\n current.append([current]+[e]*int(random.expovariate(self._lambda)))\n elif c == '?':\n if random.randint(0, 1):\n current.pop()\n elif c == '.':\n current.append(RandChoice(*[chr(x) for x in range(256)]))\n elif c == '$' or c == '^':\n pass\n else:\n current.append(c)\n\n return RandRegExp.stack_fix(stack[1:], index)\n\n def __repr__(self):\n return \"<%s [%r]>\" % (self.__class__.__name__, self._regexp)\n\n\nclass RandSingularity(RandChoice):\n pass\n\n\nclass RandSingNum(RandSingularity):\n @staticmethod\n def make_power_of_two(end):\n sign = 1\n if end == 0:\n end = 1\n if end < 0:\n end = -end\n sign = -1\n end_n = int(math.log(end)/math.log(2))+1\n return {sign*2**i for i in range(end_n)}\n\n def __init__(self, mn, mx):\n sing = {0, mn, mx, int((mn+mx)/2)}\n sing |= self.make_power_of_two(mn)\n sing |= self.make_power_of_two(mx)\n for i in sing.copy():\n sing.add(i+1)\n sing.add(i-1)\n for i in sing.copy():\n if not mn <= i <= mx:\n sing.remove(i)\n self._choice = list(sing)\n self._choice.sort()\n\n\nclass RandSingByte(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, 0, 2**8-1)\n\n\nclass RandSingSByte(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, -2**7, 2**7-1)\n\n\nclass RandSingShort(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, 0, 2**16-1)\n\n\nclass RandSingSShort(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, -2**15, 2**15-1)\n\n\nclass RandSingInt(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, 0, 2**32-1)\n\n\nclass RandSingSInt(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, -2**31, 2**31-1)\n\n\nclass RandSingLong(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, 0, 2**64-1)\n\n\nclass RandSingSLong(RandSingNum):\n def __init__(self):\n RandSingNum.__init__(self, -2**63, 2**63-1)\n\n\nclass RandSingString(RandSingularity):\n def __init__(self):\n self._choice = [\"\",\n \"%x\",\n \"%%\",\n \"%s\",\n \"%i\",\n \"%n\",\n \"%x%x%x%x%x%x%x%x%x\",\n \"%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s%s\",\n \"%\",\n \"%%%\",\n \"A\"*4096,\n b\"\\x00\"*4096,\n b\"\\xff\"*4096,\n b\"\\x7f\"*4096,\n b\"\\x80\"*4096,\n \" \"*4096,\n \"\\\\\"*4096,\n \"(\"*4096,\n \"../\"*1024,\n \"/\"*1024,\n \"${HOME}\"*512,\n \" or 1=1 --\",\n \"' or 1=1 --\",\n '\" or 1=1 --',\n \" or 1=1; #\",\n \"' or 1=1; #\",\n '\" or 1=1; #',\n \";reboot;\",\n \"$(reboot)\",\n \"`reboot`\",\n \"index.php%00\",\n b\"\\x00\",\n \"%00\",\n \"\\\\\",\n \"../../../../../../../../../../../../../../../../../etc/passwd\",\n \"%2e%2e%2f\" * 20 + \"etc/passwd\",\n \"%252e%252e%252f\" * 20 + \"boot.ini\",\n \"..%c0%af\" * 20 + \"etc/passwd\",\n \"..%c0%af\" * 20 + \"boot.ini\",\n \"//etc/passwd\",\n r\"..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\..\\boot.ini\",\n \"AUX:\",\n \"CLOCK$\",\n \"COM:\",\n \"CON:\",\n \"LPT:\",\n \"LST:\",\n \"NUL:\",\n \"CON:\",\n r\"C:\\CON\\CON\",\n r\"C:\\boot.ini\",\n r\"\\\\myserver\\share\",\n \"foo.exe:\",\n \"foo.exe\\\\\", ]\n\n def __str__(self):\n return str(self._fix())\n\n def __bytes__(self):\n return raw(self._fix())\n\n\nclass RandPool(RandField):\n def __init__(self, *args):\n \"\"\"Each parameter is a volatile object or a couple (volatile object, weight)\"\"\"\n pool = []\n for p in args:\n w = 1\n if isinstance(p, tuple):\n p, w = p\n pool += [p]*w\n self._pool = pool\n\n def _fix(self):\n r = random.choice(self._pool)\n return r._fix()\n\n# Automatic timestamp\n\n\nclass AutoTime(VolatileValue):\n def __init__(self, base=None):\n if base == None:\n self.diff = 0\n else:\n self.diff = time.time()-base\n\n def _fix(self):\n return time.time()-self.diff\n\n\nclass IntAutoTime(AutoTime):\n def _fix(self):\n return int(time.time()-self.diff)\n\n\nclass ZuluTime(AutoTime):\n def __init__(self, diff=0):\n self.diff = diff\n\n def _fix(self):\n return time.strftime(\"%y%m%d%H%M%SZ\",\n time.gmtime(time.time() + self.diff))\n\n\nclass GeneralizedTime(AutoTime):\n def __init__(self, diff=0):\n self.diff = diff\n\n def _fix(self):\n return time.strftime(\"%Y%m%d%H%M%SZ\",\n time.gmtime(time.time() + self.diff))\n\n\nclass DelayedEval(VolatileValue):\n \"\"\" Example of usage: DelayedEval(\"time.time()\") \"\"\"\n\n def __init__(self, expr):\n self.expr = expr\n\n def _fix(self):\n return eval(self.expr)\n\n\nclass IncrementalValue(VolatileValue):\n def __init__(self, start=0, step=1, restart=-1):\n self.start = self.val = start\n self.step = step\n self.restart = restart\n\n def _fix(self):\n v = self.val\n if self.val == self.restart:\n self.val = self.start\n else:\n self.val += self.step\n return v\n\n\nclass CorruptedBytes(VolatileValue):\n def __init__(self, s, p=0.01, n=None):\n self.s = s\n self.p = p\n self.n = n\n\n def _fix(self):\n return corrupt_bytes(self.s, self.p, self.n)\n\n\nclass CorruptedBits(CorruptedBytes):\n def _fix(self):\n return corrupt_bits(self.s, self.p, self.n)\n\n", "path": "scapy/volatile.py"}]} |
gh_patches_debug_186 | rasdani/github-patches | git_diff | apache__airflow-26806 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pdb no longer works with airflow test command since 2.3.3
Converted back to issue as I've reproduced it and traced the issue back to https://github.com/apache/airflow/pull/24362
### Discussed in https://github.com/apache/airflow/discussions/26352
<div type='discussions-op-text'>
<sup>Originally posted by **GuruComposer** September 12, 2022</sup>
### Apache Airflow version
2.3.4
### What happened
I used to be able to use ipdb to debug DAGs by running `airflow tasks test <dag_name> <dag_id>`, and hitting an ipdb breakpoint (ipdb.set_trace()).
This no longer works. I get a strange type error:
``` File "/usr/local/lib/python3.9/bdb.py", line 88, in trace_dispatch
return self.dispatch_line(frame)
File "/usr/local/lib/python3.9/bdb.py", line 112, in dispatch_line
self.user_line(frame)
File "/usr/local/lib/python3.9/pdb.py", line 262, in user_line
self.interaction(frame, None)
File "/home/astro/.local/lib/python3.9/site-packages/IPython/core/debugger.py", line 336, in interaction
OldPdb.interaction(self, frame, traceback)
File "/usr/local/lib/python3.9/pdb.py", line 357, in interaction
self._cmdloop()
File "/usr/local/lib/python3.9/pdb.py", line 322, in _cmdloop
self.cmdloop()
File "/usr/local/lib/python3.9/cmd.py", line 126, in cmdloop
line = input(self.prompt)
TypeError: an integer is required (got type NoneType)```
### What you think should happen instead
I should get the ipdb shell.
### How to reproduce
1. Add ipdb breakpoint anywhere in airflow task.
import ipdb; ipdb.set_trace()
2. Run that task:
Run `airflow tasks test <dag_name> <dag_id>`, and
### Operating System
Debian GNU/Linux
### Versions of Apache Airflow Providers
2.3.4
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `airflow/utils/log/secrets_masker.py`
Content:
```
1 # Licensed to the Apache Software Foundation (ASF) under one
2 # or more contributor license agreements. See the NOTICE file
3 # distributed with this work for additional information
4 # regarding copyright ownership. The ASF licenses this file
5 # to you under the Apache License, Version 2.0 (the
6 # "License"); you may not use this file except in compliance
7 # with the License. You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing,
12 # software distributed under the License is distributed on an
13 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 # KIND, either express or implied. See the License for the
15 # specific language governing permissions and limitations
16 # under the License.
17 """Mask sensitive information from logs"""
18 from __future__ import annotations
19
20 import collections
21 import logging
22 import re
23 import sys
24 from typing import Any, Dict, Iterable, List, TextIO, Tuple, TypeVar, Union
25
26 from airflow import settings
27 from airflow.compat.functools import cache, cached_property
28
29 Redactable = TypeVar("Redactable", str, Dict[Any, Any], Tuple[Any, ...], List[Any])
30 Redacted = Union[Redactable, str]
31
32 log = logging.getLogger(__name__)
33
34 DEFAULT_SENSITIVE_FIELDS = frozenset(
35 {
36 'access_token',
37 'api_key',
38 'apikey',
39 'authorization',
40 'passphrase',
41 'passwd',
42 'password',
43 'private_key',
44 'secret',
45 'token',
46 'keyfile_dict',
47 'service_account',
48 }
49 )
50 """Names of fields (Connection extra, Variable key name etc.) that are deemed sensitive"""
51
52 SECRETS_TO_SKIP_MASKING_FOR_TESTS = {'airflow'}
53
54
55 @cache
56 def get_sensitive_variables_fields():
57 """Get comma-separated sensitive Variable Fields from airflow.cfg."""
58 from airflow.configuration import conf
59
60 sensitive_fields = DEFAULT_SENSITIVE_FIELDS.copy()
61 sensitive_variable_fields = conf.get('core', 'sensitive_var_conn_names')
62 if sensitive_variable_fields:
63 sensitive_fields |= frozenset({field.strip() for field in sensitive_variable_fields.split(',')})
64 return sensitive_fields
65
66
67 def should_hide_value_for_key(name):
68 """Should the value for this given name (Variable name, or key in conn.extra_dejson) be hidden"""
69 from airflow import settings
70
71 if isinstance(name, str) and settings.HIDE_SENSITIVE_VAR_CONN_FIELDS:
72 name = name.strip().lower()
73 return any(s in name for s in get_sensitive_variables_fields())
74 return False
75
76
77 def mask_secret(secret: str | dict | Iterable, name: str | None = None) -> None:
78 """
79 Mask a secret from appearing in the task logs.
80
81 If ``name`` is provided, then it will only be masked if the name matches
82 one of the configured "sensitive" names.
83
84 If ``secret`` is a dict or a iterable (excluding str) then it will be
85 recursively walked and keys with sensitive names will be hidden.
86 """
87 # Filtering all log messages is not a free process, so we only do it when
88 # running tasks
89 if not secret:
90 return
91
92 _secrets_masker().add_mask(secret, name)
93
94
95 def redact(value: Redactable, name: str | None = None) -> Redacted:
96 """Redact any secrets found in ``value``."""
97 return _secrets_masker().redact(value, name)
98
99
100 @cache
101 def _secrets_masker() -> SecretsMasker:
102 for flt in logging.getLogger('airflow.task').filters:
103 if isinstance(flt, SecretsMasker):
104 return flt
105 raise RuntimeError(
106 "Logging Configuration Error! No SecretsMasker found! If you have custom logging, please make "
107 "sure you configure it taking airflow configuration as a base as explained at "
108 "https://airflow.apache.org/docs/apache-airflow/stable/logging-monitoring/logging-tasks.html"
109 "#advanced-configuration"
110 )
111
112
113 class SecretsMasker(logging.Filter):
114 """Redact secrets from logs"""
115
116 replacer: re.Pattern | None = None
117 patterns: set[str]
118
119 ALREADY_FILTERED_FLAG = "__SecretsMasker_filtered"
120 MAX_RECURSION_DEPTH = 5
121
122 def __init__(self):
123 super().__init__()
124 self.patterns = set()
125
126 @cached_property
127 def _record_attrs_to_ignore(self) -> Iterable[str]:
128 # Doing log.info(..., extra={'foo': 2}) sets extra properties on
129 # record, i.e. record.foo. And we need to filter those too. Fun
130 #
131 # Create a record, and look at what attributes are on it, and ignore
132 # all the default ones!
133
134 record = logging.getLogRecordFactory()(
135 # name, level, pathname, lineno, msg, args, exc_info, func=None, sinfo=None,
136 "x",
137 logging.INFO,
138 __file__,
139 1,
140 "",
141 tuple(),
142 exc_info=None,
143 func="funcname",
144 )
145 return frozenset(record.__dict__).difference({'msg', 'args'})
146
147 def _redact_exception_with_context(self, exception):
148 # Exception class may not be modifiable (e.g. declared by an
149 # extension module such as JDBC).
150 try:
151 exception.args = (self.redact(v) for v in exception.args)
152 except AttributeError:
153 pass
154 if exception.__context__:
155 self._redact_exception_with_context(exception.__context__)
156 if exception.__cause__ and exception.__cause__ is not exception.__context__:
157 self._redact_exception_with_context(exception.__cause__)
158
159 def filter(self, record) -> bool:
160 if settings.MASK_SECRETS_IN_LOGS is not True:
161 return True
162
163 if self.ALREADY_FILTERED_FLAG in record.__dict__:
164 # Filters are attached to multiple handlers and logs, keep a
165 # "private" flag that stops us needing to process it more than once
166 return True
167
168 if self.replacer:
169 for k, v in record.__dict__.items():
170 if k in self._record_attrs_to_ignore:
171 continue
172 record.__dict__[k] = self.redact(v)
173 if record.exc_info and record.exc_info[1] is not None:
174 exc = record.exc_info[1]
175 self._redact_exception_with_context(exc)
176 record.__dict__[self.ALREADY_FILTERED_FLAG] = True
177
178 return True
179
180 def _redact_all(self, item: Redactable, depth: int) -> Redacted:
181 if depth > self.MAX_RECURSION_DEPTH or isinstance(item, str):
182 return '***'
183 if isinstance(item, dict):
184 return {dict_key: self._redact_all(subval, depth + 1) for dict_key, subval in item.items()}
185 elif isinstance(item, (tuple, set)):
186 # Turn set in to tuple!
187 return tuple(self._redact_all(subval, depth + 1) for subval in item)
188 elif isinstance(item, list):
189 return list(self._redact_all(subval, depth + 1) for subval in item)
190 else:
191 return item
192
193 def _redact(self, item: Redactable, name: str | None, depth: int) -> Redacted:
194 # Avoid spending too much effort on redacting on deeply nested
195 # structures. This also avoid infinite recursion if a structure has
196 # reference to self.
197 if depth > self.MAX_RECURSION_DEPTH:
198 return item
199 try:
200 if name and should_hide_value_for_key(name):
201 return self._redact_all(item, depth)
202 if isinstance(item, dict):
203 return {
204 dict_key: self._redact(subval, name=dict_key, depth=(depth + 1))
205 for dict_key, subval in item.items()
206 }
207 elif isinstance(item, str):
208 if self.replacer:
209 # We can't replace specific values, but the key-based redacting
210 # can still happen, so we can't short-circuit, we need to walk
211 # the structure.
212 return self.replacer.sub('***', item)
213 return item
214 elif isinstance(item, (tuple, set)):
215 # Turn set in to tuple!
216 return tuple(self._redact(subval, name=None, depth=(depth + 1)) for subval in item)
217 elif isinstance(item, list):
218 return [self._redact(subval, name=None, depth=(depth + 1)) for subval in item]
219 else:
220 return item
221 # I think this should never happen, but it does not hurt to leave it just in case
222 # Well. It happened (see https://github.com/apache/airflow/issues/19816#issuecomment-983311373)
223 # but it caused infinite recursion, so we need to cast it to str first.
224 except Exception as e:
225 log.warning(
226 "Unable to redact %s, please report this via <https://github.com/apache/airflow/issues>. "
227 "Error was: %s: %s",
228 repr(item),
229 type(e).__name__,
230 str(e),
231 )
232 return item
233
234 def redact(self, item: Redactable, name: str | None = None) -> Redacted:
235 """Redact an any secrets found in ``item``, if it is a string.
236
237 If ``name`` is given, and it's a "sensitive" name (see
238 :func:`should_hide_value_for_key`) then all string values in the item
239 is redacted.
240 """
241 return self._redact(item, name, depth=0)
242
243 def add_mask(self, secret: str | dict | Iterable, name: str | None = None):
244 """Add a new secret to be masked to this filter instance."""
245 from airflow.configuration import conf
246
247 test_mode: bool = conf.getboolean('core', 'unit_test_mode')
248 if isinstance(secret, dict):
249 for k, v in secret.items():
250 self.add_mask(v, k)
251 elif isinstance(secret, str):
252 if not secret or (test_mode and secret in SECRETS_TO_SKIP_MASKING_FOR_TESTS):
253 return
254 pattern = re.escape(secret)
255 if pattern not in self.patterns and (not name or should_hide_value_for_key(name)):
256 self.patterns.add(pattern)
257 self.replacer = re.compile('|'.join(self.patterns))
258 elif isinstance(secret, collections.abc.Iterable):
259 for v in secret:
260 self.add_mask(v, name)
261
262
263 class RedactedIO(TextIO):
264 """IO class that redacts values going into stdout.
265
266 Expected usage::
267
268 with contextlib.redirect_stdout(RedactedIO()):
269 ... # Writes to stdout will be redacted.
270 """
271
272 def __init__(self):
273 self.target = sys.stdout
274
275 def write(self, s: str) -> int:
276 s = redact(s)
277 return self.target.write(s)
278
279 def flush(self) -> None:
280 return self.target.flush()
281
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py
--- a/airflow/utils/log/secrets_masker.py
+++ b/airflow/utils/log/secrets_masker.py
@@ -271,6 +271,7 @@
def __init__(self):
self.target = sys.stdout
+ self.fileno = sys.stdout.fileno
def write(self, s: str) -> int:
s = redact(s)
| {"golden_diff": "diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py\n--- a/airflow/utils/log/secrets_masker.py\n+++ b/airflow/utils/log/secrets_masker.py\n@@ -271,6 +271,7 @@\n \n def __init__(self):\n self.target = sys.stdout\n+ self.fileno = sys.stdout.fileno\n \n def write(self, s: str) -> int:\n s = redact(s)\n", "issue": "pdb no longer works with airflow test command since 2.3.3\nConverted back to issue as I've reproduced it and traced the issue back to https://github.com/apache/airflow/pull/24362\r\n\r\n### Discussed in https://github.com/apache/airflow/discussions/26352\r\n\r\n<div type='discussions-op-text'>\r\n\r\n<sup>Originally posted by **GuruComposer** September 12, 2022</sup>\r\n### Apache Airflow version\r\n\r\n2.3.4\r\n\r\n### What happened\r\n\r\nI used to be able to use ipdb to debug DAGs by running `airflow tasks test <dag_name> <dag_id>`, and hitting an ipdb breakpoint (ipdb.set_trace()).\r\n\r\nThis no longer works. I get a strange type error:\r\n\r\n``` File \"/usr/local/lib/python3.9/bdb.py\", line 88, in trace_dispatch\r\n return self.dispatch_line(frame)\r\n File \"/usr/local/lib/python3.9/bdb.py\", line 112, in dispatch_line\r\n self.user_line(frame)\r\n File \"/usr/local/lib/python3.9/pdb.py\", line 262, in user_line\r\n self.interaction(frame, None)\r\n File \"/home/astro/.local/lib/python3.9/site-packages/IPython/core/debugger.py\", line 336, in interaction\r\n OldPdb.interaction(self, frame, traceback)\r\n File \"/usr/local/lib/python3.9/pdb.py\", line 357, in interaction\r\n self._cmdloop()\r\n File \"/usr/local/lib/python3.9/pdb.py\", line 322, in _cmdloop\r\n self.cmdloop()\r\n File \"/usr/local/lib/python3.9/cmd.py\", line 126, in cmdloop\r\n line = input(self.prompt)\r\nTypeError: an integer is required (got type NoneType)```\r\n\r\n\r\n\r\n### What you think should happen instead\r\n\r\nI should get the ipdb shell.\r\n\r\n### How to reproduce\r\n\r\n1. Add ipdb breakpoint anywhere in airflow task.\r\nimport ipdb; ipdb.set_trace()\r\n\r\n2. Run that task:\r\nRun `airflow tasks test <dag_name> <dag_id>`, and \r\n\r\n### Operating System\r\n\r\nDebian GNU/Linux\r\n\r\n### Versions of Apache Airflow Providers\r\n\r\n2.3.4\n", "before_files": [{"content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\"\"\"Mask sensitive information from logs\"\"\"\nfrom __future__ import annotations\n\nimport collections\nimport logging\nimport re\nimport sys\nfrom typing import Any, Dict, Iterable, List, TextIO, Tuple, TypeVar, Union\n\nfrom airflow import settings\nfrom airflow.compat.functools import cache, cached_property\n\nRedactable = TypeVar(\"Redactable\", str, Dict[Any, Any], Tuple[Any, ...], List[Any])\nRedacted = Union[Redactable, str]\n\nlog = logging.getLogger(__name__)\n\nDEFAULT_SENSITIVE_FIELDS = frozenset(\n {\n 'access_token',\n 'api_key',\n 'apikey',\n 'authorization',\n 'passphrase',\n 'passwd',\n 'password',\n 'private_key',\n 'secret',\n 'token',\n 'keyfile_dict',\n 'service_account',\n }\n)\n\"\"\"Names of fields (Connection extra, Variable key name etc.) that are deemed sensitive\"\"\"\n\nSECRETS_TO_SKIP_MASKING_FOR_TESTS = {'airflow'}\n\n\n@cache\ndef get_sensitive_variables_fields():\n \"\"\"Get comma-separated sensitive Variable Fields from airflow.cfg.\"\"\"\n from airflow.configuration import conf\n\n sensitive_fields = DEFAULT_SENSITIVE_FIELDS.copy()\n sensitive_variable_fields = conf.get('core', 'sensitive_var_conn_names')\n if sensitive_variable_fields:\n sensitive_fields |= frozenset({field.strip() for field in sensitive_variable_fields.split(',')})\n return sensitive_fields\n\n\ndef should_hide_value_for_key(name):\n \"\"\"Should the value for this given name (Variable name, or key in conn.extra_dejson) be hidden\"\"\"\n from airflow import settings\n\n if isinstance(name, str) and settings.HIDE_SENSITIVE_VAR_CONN_FIELDS:\n name = name.strip().lower()\n return any(s in name for s in get_sensitive_variables_fields())\n return False\n\n\ndef mask_secret(secret: str | dict | Iterable, name: str | None = None) -> None:\n \"\"\"\n Mask a secret from appearing in the task logs.\n\n If ``name`` is provided, then it will only be masked if the name matches\n one of the configured \"sensitive\" names.\n\n If ``secret`` is a dict or a iterable (excluding str) then it will be\n recursively walked and keys with sensitive names will be hidden.\n \"\"\"\n # Filtering all log messages is not a free process, so we only do it when\n # running tasks\n if not secret:\n return\n\n _secrets_masker().add_mask(secret, name)\n\n\ndef redact(value: Redactable, name: str | None = None) -> Redacted:\n \"\"\"Redact any secrets found in ``value``.\"\"\"\n return _secrets_masker().redact(value, name)\n\n\n@cache\ndef _secrets_masker() -> SecretsMasker:\n for flt in logging.getLogger('airflow.task').filters:\n if isinstance(flt, SecretsMasker):\n return flt\n raise RuntimeError(\n \"Logging Configuration Error! No SecretsMasker found! If you have custom logging, please make \"\n \"sure you configure it taking airflow configuration as a base as explained at \"\n \"https://airflow.apache.org/docs/apache-airflow/stable/logging-monitoring/logging-tasks.html\"\n \"#advanced-configuration\"\n )\n\n\nclass SecretsMasker(logging.Filter):\n \"\"\"Redact secrets from logs\"\"\"\n\n replacer: re.Pattern | None = None\n patterns: set[str]\n\n ALREADY_FILTERED_FLAG = \"__SecretsMasker_filtered\"\n MAX_RECURSION_DEPTH = 5\n\n def __init__(self):\n super().__init__()\n self.patterns = set()\n\n @cached_property\n def _record_attrs_to_ignore(self) -> Iterable[str]:\n # Doing log.info(..., extra={'foo': 2}) sets extra properties on\n # record, i.e. record.foo. And we need to filter those too. Fun\n #\n # Create a record, and look at what attributes are on it, and ignore\n # all the default ones!\n\n record = logging.getLogRecordFactory()(\n # name, level, pathname, lineno, msg, args, exc_info, func=None, sinfo=None,\n \"x\",\n logging.INFO,\n __file__,\n 1,\n \"\",\n tuple(),\n exc_info=None,\n func=\"funcname\",\n )\n return frozenset(record.__dict__).difference({'msg', 'args'})\n\n def _redact_exception_with_context(self, exception):\n # Exception class may not be modifiable (e.g. declared by an\n # extension module such as JDBC).\n try:\n exception.args = (self.redact(v) for v in exception.args)\n except AttributeError:\n pass\n if exception.__context__:\n self._redact_exception_with_context(exception.__context__)\n if exception.__cause__ and exception.__cause__ is not exception.__context__:\n self._redact_exception_with_context(exception.__cause__)\n\n def filter(self, record) -> bool:\n if settings.MASK_SECRETS_IN_LOGS is not True:\n return True\n\n if self.ALREADY_FILTERED_FLAG in record.__dict__:\n # Filters are attached to multiple handlers and logs, keep a\n # \"private\" flag that stops us needing to process it more than once\n return True\n\n if self.replacer:\n for k, v in record.__dict__.items():\n if k in self._record_attrs_to_ignore:\n continue\n record.__dict__[k] = self.redact(v)\n if record.exc_info and record.exc_info[1] is not None:\n exc = record.exc_info[1]\n self._redact_exception_with_context(exc)\n record.__dict__[self.ALREADY_FILTERED_FLAG] = True\n\n return True\n\n def _redact_all(self, item: Redactable, depth: int) -> Redacted:\n if depth > self.MAX_RECURSION_DEPTH or isinstance(item, str):\n return '***'\n if isinstance(item, dict):\n return {dict_key: self._redact_all(subval, depth + 1) for dict_key, subval in item.items()}\n elif isinstance(item, (tuple, set)):\n # Turn set in to tuple!\n return tuple(self._redact_all(subval, depth + 1) for subval in item)\n elif isinstance(item, list):\n return list(self._redact_all(subval, depth + 1) for subval in item)\n else:\n return item\n\n def _redact(self, item: Redactable, name: str | None, depth: int) -> Redacted:\n # Avoid spending too much effort on redacting on deeply nested\n # structures. This also avoid infinite recursion if a structure has\n # reference to self.\n if depth > self.MAX_RECURSION_DEPTH:\n return item\n try:\n if name and should_hide_value_for_key(name):\n return self._redact_all(item, depth)\n if isinstance(item, dict):\n return {\n dict_key: self._redact(subval, name=dict_key, depth=(depth + 1))\n for dict_key, subval in item.items()\n }\n elif isinstance(item, str):\n if self.replacer:\n # We can't replace specific values, but the key-based redacting\n # can still happen, so we can't short-circuit, we need to walk\n # the structure.\n return self.replacer.sub('***', item)\n return item\n elif isinstance(item, (tuple, set)):\n # Turn set in to tuple!\n return tuple(self._redact(subval, name=None, depth=(depth + 1)) for subval in item)\n elif isinstance(item, list):\n return [self._redact(subval, name=None, depth=(depth + 1)) for subval in item]\n else:\n return item\n # I think this should never happen, but it does not hurt to leave it just in case\n # Well. It happened (see https://github.com/apache/airflow/issues/19816#issuecomment-983311373)\n # but it caused infinite recursion, so we need to cast it to str first.\n except Exception as e:\n log.warning(\n \"Unable to redact %s, please report this via <https://github.com/apache/airflow/issues>. \"\n \"Error was: %s: %s\",\n repr(item),\n type(e).__name__,\n str(e),\n )\n return item\n\n def redact(self, item: Redactable, name: str | None = None) -> Redacted:\n \"\"\"Redact an any secrets found in ``item``, if it is a string.\n\n If ``name`` is given, and it's a \"sensitive\" name (see\n :func:`should_hide_value_for_key`) then all string values in the item\n is redacted.\n \"\"\"\n return self._redact(item, name, depth=0)\n\n def add_mask(self, secret: str | dict | Iterable, name: str | None = None):\n \"\"\"Add a new secret to be masked to this filter instance.\"\"\"\n from airflow.configuration import conf\n\n test_mode: bool = conf.getboolean('core', 'unit_test_mode')\n if isinstance(secret, dict):\n for k, v in secret.items():\n self.add_mask(v, k)\n elif isinstance(secret, str):\n if not secret or (test_mode and secret in SECRETS_TO_SKIP_MASKING_FOR_TESTS):\n return\n pattern = re.escape(secret)\n if pattern not in self.patterns and (not name or should_hide_value_for_key(name)):\n self.patterns.add(pattern)\n self.replacer = re.compile('|'.join(self.patterns))\n elif isinstance(secret, collections.abc.Iterable):\n for v in secret:\n self.add_mask(v, name)\n\n\nclass RedactedIO(TextIO):\n \"\"\"IO class that redacts values going into stdout.\n\n Expected usage::\n\n with contextlib.redirect_stdout(RedactedIO()):\n ... # Writes to stdout will be redacted.\n \"\"\"\n\n def __init__(self):\n self.target = sys.stdout\n\n def write(self, s: str) -> int:\n s = redact(s)\n return self.target.write(s)\n\n def flush(self) -> None:\n return self.target.flush()\n", "path": "airflow/utils/log/secrets_masker.py"}], "after_files": [{"content": "# Licensed to the Apache Software Foundation (ASF) under one\n# or more contributor license agreements. See the NOTICE file\n# distributed with this work for additional information\n# regarding copyright ownership. The ASF licenses this file\n# to you under the Apache License, Version 2.0 (the\n# \"License\"); you may not use this file except in compliance\n# with the License. You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing,\n# software distributed under the License is distributed on an\n# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n# KIND, either express or implied. See the License for the\n# specific language governing permissions and limitations\n# under the License.\n\"\"\"Mask sensitive information from logs\"\"\"\nfrom __future__ import annotations\n\nimport collections\nimport logging\nimport re\nimport sys\nfrom typing import Any, Dict, Iterable, List, TextIO, Tuple, TypeVar, Union\n\nfrom airflow import settings\nfrom airflow.compat.functools import cache, cached_property\n\nRedactable = TypeVar(\"Redactable\", str, Dict[Any, Any], Tuple[Any, ...], List[Any])\nRedacted = Union[Redactable, str]\n\nlog = logging.getLogger(__name__)\n\nDEFAULT_SENSITIVE_FIELDS = frozenset(\n {\n 'access_token',\n 'api_key',\n 'apikey',\n 'authorization',\n 'passphrase',\n 'passwd',\n 'password',\n 'private_key',\n 'secret',\n 'token',\n 'keyfile_dict',\n 'service_account',\n }\n)\n\"\"\"Names of fields (Connection extra, Variable key name etc.) that are deemed sensitive\"\"\"\n\nSECRETS_TO_SKIP_MASKING_FOR_TESTS = {'airflow'}\n\n\n@cache\ndef get_sensitive_variables_fields():\n \"\"\"Get comma-separated sensitive Variable Fields from airflow.cfg.\"\"\"\n from airflow.configuration import conf\n\n sensitive_fields = DEFAULT_SENSITIVE_FIELDS.copy()\n sensitive_variable_fields = conf.get('core', 'sensitive_var_conn_names')\n if sensitive_variable_fields:\n sensitive_fields |= frozenset({field.strip() for field in sensitive_variable_fields.split(',')})\n return sensitive_fields\n\n\ndef should_hide_value_for_key(name):\n \"\"\"Should the value for this given name (Variable name, or key in conn.extra_dejson) be hidden\"\"\"\n from airflow import settings\n\n if isinstance(name, str) and settings.HIDE_SENSITIVE_VAR_CONN_FIELDS:\n name = name.strip().lower()\n return any(s in name for s in get_sensitive_variables_fields())\n return False\n\n\ndef mask_secret(secret: str | dict | Iterable, name: str | None = None) -> None:\n \"\"\"\n Mask a secret from appearing in the task logs.\n\n If ``name`` is provided, then it will only be masked if the name matches\n one of the configured \"sensitive\" names.\n\n If ``secret`` is a dict or a iterable (excluding str) then it will be\n recursively walked and keys with sensitive names will be hidden.\n \"\"\"\n # Filtering all log messages is not a free process, so we only do it when\n # running tasks\n if not secret:\n return\n\n _secrets_masker().add_mask(secret, name)\n\n\ndef redact(value: Redactable, name: str | None = None) -> Redacted:\n \"\"\"Redact any secrets found in ``value``.\"\"\"\n return _secrets_masker().redact(value, name)\n\n\n@cache\ndef _secrets_masker() -> SecretsMasker:\n for flt in logging.getLogger('airflow.task').filters:\n if isinstance(flt, SecretsMasker):\n return flt\n raise RuntimeError(\n \"Logging Configuration Error! No SecretsMasker found! If you have custom logging, please make \"\n \"sure you configure it taking airflow configuration as a base as explained at \"\n \"https://airflow.apache.org/docs/apache-airflow/stable/logging-monitoring/logging-tasks.html\"\n \"#advanced-configuration\"\n )\n\n\nclass SecretsMasker(logging.Filter):\n \"\"\"Redact secrets from logs\"\"\"\n\n replacer: re.Pattern | None = None\n patterns: set[str]\n\n ALREADY_FILTERED_FLAG = \"__SecretsMasker_filtered\"\n MAX_RECURSION_DEPTH = 5\n\n def __init__(self):\n super().__init__()\n self.patterns = set()\n\n @cached_property\n def _record_attrs_to_ignore(self) -> Iterable[str]:\n # Doing log.info(..., extra={'foo': 2}) sets extra properties on\n # record, i.e. record.foo. And we need to filter those too. Fun\n #\n # Create a record, and look at what attributes are on it, and ignore\n # all the default ones!\n\n record = logging.getLogRecordFactory()(\n # name, level, pathname, lineno, msg, args, exc_info, func=None, sinfo=None,\n \"x\",\n logging.INFO,\n __file__,\n 1,\n \"\",\n tuple(),\n exc_info=None,\n func=\"funcname\",\n )\n return frozenset(record.__dict__).difference({'msg', 'args'})\n\n def _redact_exception_with_context(self, exception):\n # Exception class may not be modifiable (e.g. declared by an\n # extension module such as JDBC).\n try:\n exception.args = (self.redact(v) for v in exception.args)\n except AttributeError:\n pass\n if exception.__context__:\n self._redact_exception_with_context(exception.__context__)\n if exception.__cause__ and exception.__cause__ is not exception.__context__:\n self._redact_exception_with_context(exception.__cause__)\n\n def filter(self, record) -> bool:\n if settings.MASK_SECRETS_IN_LOGS is not True:\n return True\n\n if self.ALREADY_FILTERED_FLAG in record.__dict__:\n # Filters are attached to multiple handlers and logs, keep a\n # \"private\" flag that stops us needing to process it more than once\n return True\n\n if self.replacer:\n for k, v in record.__dict__.items():\n if k in self._record_attrs_to_ignore:\n continue\n record.__dict__[k] = self.redact(v)\n if record.exc_info and record.exc_info[1] is not None:\n exc = record.exc_info[1]\n self._redact_exception_with_context(exc)\n record.__dict__[self.ALREADY_FILTERED_FLAG] = True\n\n return True\n\n def _redact_all(self, item: Redactable, depth: int) -> Redacted:\n if depth > self.MAX_RECURSION_DEPTH or isinstance(item, str):\n return '***'\n if isinstance(item, dict):\n return {dict_key: self._redact_all(subval, depth + 1) for dict_key, subval in item.items()}\n elif isinstance(item, (tuple, set)):\n # Turn set in to tuple!\n return tuple(self._redact_all(subval, depth + 1) for subval in item)\n elif isinstance(item, list):\n return list(self._redact_all(subval, depth + 1) for subval in item)\n else:\n return item\n\n def _redact(self, item: Redactable, name: str | None, depth: int) -> Redacted:\n # Avoid spending too much effort on redacting on deeply nested\n # structures. This also avoid infinite recursion if a structure has\n # reference to self.\n if depth > self.MAX_RECURSION_DEPTH:\n return item\n try:\n if name and should_hide_value_for_key(name):\n return self._redact_all(item, depth)\n if isinstance(item, dict):\n return {\n dict_key: self._redact(subval, name=dict_key, depth=(depth + 1))\n for dict_key, subval in item.items()\n }\n elif isinstance(item, str):\n if self.replacer:\n # We can't replace specific values, but the key-based redacting\n # can still happen, so we can't short-circuit, we need to walk\n # the structure.\n return self.replacer.sub('***', item)\n return item\n elif isinstance(item, (tuple, set)):\n # Turn set in to tuple!\n return tuple(self._redact(subval, name=None, depth=(depth + 1)) for subval in item)\n elif isinstance(item, list):\n return [self._redact(subval, name=None, depth=(depth + 1)) for subval in item]\n else:\n return item\n # I think this should never happen, but it does not hurt to leave it just in case\n # Well. It happened (see https://github.com/apache/airflow/issues/19816#issuecomment-983311373)\n # but it caused infinite recursion, so we need to cast it to str first.\n except Exception as e:\n log.warning(\n \"Unable to redact %s, please report this via <https://github.com/apache/airflow/issues>. \"\n \"Error was: %s: %s\",\n repr(item),\n type(e).__name__,\n str(e),\n )\n return item\n\n def redact(self, item: Redactable, name: str | None = None) -> Redacted:\n \"\"\"Redact an any secrets found in ``item``, if it is a string.\n\n If ``name`` is given, and it's a \"sensitive\" name (see\n :func:`should_hide_value_for_key`) then all string values in the item\n is redacted.\n \"\"\"\n return self._redact(item, name, depth=0)\n\n def add_mask(self, secret: str | dict | Iterable, name: str | None = None):\n \"\"\"Add a new secret to be masked to this filter instance.\"\"\"\n from airflow.configuration import conf\n\n test_mode: bool = conf.getboolean('core', 'unit_test_mode')\n if isinstance(secret, dict):\n for k, v in secret.items():\n self.add_mask(v, k)\n elif isinstance(secret, str):\n if not secret or (test_mode and secret in SECRETS_TO_SKIP_MASKING_FOR_TESTS):\n return\n pattern = re.escape(secret)\n if pattern not in self.patterns and (not name or should_hide_value_for_key(name)):\n self.patterns.add(pattern)\n self.replacer = re.compile('|'.join(self.patterns))\n elif isinstance(secret, collections.abc.Iterable):\n for v in secret:\n self.add_mask(v, name)\n\n\nclass RedactedIO(TextIO):\n \"\"\"IO class that redacts values going into stdout.\n\n Expected usage::\n\n with contextlib.redirect_stdout(RedactedIO()):\n ... # Writes to stdout will be redacted.\n \"\"\"\n\n def __init__(self):\n self.target = sys.stdout\n self.fileno = sys.stdout.fileno\n\n def write(self, s: str) -> int:\n s = redact(s)\n return self.target.write(s)\n\n def flush(self) -> None:\n return self.target.flush()\n", "path": "airflow/utils/log/secrets_masker.py"}]} |
gh_patches_debug_187 | rasdani/github-patches | git_diff | secdev__scapy-2046 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug: wrong base class usage in radius.
the following file should be corrected:
scapy/layers/radius.py
`
class _RadiusAttrIPv4AddrVal(RadiusAttribute):
.....
`
should be:
`
class _RadiusAttrIPv4AddrVal(_SpecificRadiusAttr):
.....
`
without this change, the 'type' IE will be always 4.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `scapy/layers/radius.py`
Content:
```
1 # This file is part of Scapy
2 # See http://www.secdev.org/projects/scapy for more information
3 # Copyright (C) Philippe Biondi <[email protected]>
4 # Vincent Mauge <[email protected]>
5 # This program is published under a GPLv2 license
6
7 """
8 RADIUS (Remote Authentication Dial In User Service)
9 """
10
11 import struct
12 import hashlib
13 import hmac
14 from scapy.compat import orb, raw
15 from scapy.packet import Packet, Padding, bind_layers
16 from scapy.fields import ByteField, ByteEnumField, IntField, StrLenField,\
17 XStrLenField, XStrFixedLenField, FieldLenField, PacketField,\
18 PacketListField, IPField, MultiEnumField
19 from scapy.layers.inet import UDP
20 from scapy.layers.eap import EAP
21 from scapy.utils import issubtype
22 from scapy.config import conf
23 from scapy.error import Scapy_Exception
24
25
26 # https://www.iana.org/assignments/radius-types/radius-types.xhtml
27 _radius_attribute_types = {
28 1: "User-Name",
29 2: "User-Password",
30 3: "CHAP-Password",
31 4: "NAS-IP-Address",
32 5: "NAS-Port",
33 6: "Service-Type",
34 7: "Framed-Protocol",
35 8: "Framed-IP-Address",
36 9: "Framed-IP-Netmask",
37 10: "Framed-Routing",
38 11: "Filter-Id",
39 12: "Framed-MTU",
40 13: "Framed-Compression",
41 14: "Login-IP-Host",
42 15: "Login-Service",
43 16: "Login-TCP-Port",
44 17: "Unassigned",
45 18: "Reply-Message",
46 19: "Callback-Number",
47 20: "Callback-Id",
48 21: "Unassigned",
49 22: "Framed-Route",
50 23: "Framed-IPX-Network",
51 24: "State",
52 25: "Class",
53 26: "Vendor-Specific",
54 27: "Session-Timeout",
55 28: "Idle-Timeout",
56 29: "Termination-Action",
57 30: "Called-Station-Id",
58 31: "Calling-Station-Id",
59 32: "NAS-Identifier",
60 33: "Proxy-State",
61 34: "Login-LAT-Service",
62 35: "Login-LAT-Node",
63 36: "Login-LAT-Group",
64 37: "Framed-AppleTalk-Link",
65 38: "Framed-AppleTalk-Network",
66 39: "Framed-AppleTalk-Zone",
67 40: "Acct-Status-Type",
68 41: "Acct-Delay-Time",
69 42: "Acct-Input-Octets",
70 43: "Acct-Output-Octets",
71 44: "Acct-Session-Id",
72 45: "Acct-Authentic",
73 46: "Acct-Session-Time",
74 47: "Acct-Input-Packets",
75 48: "Acct-Output-Packets",
76 49: "Acct-Terminate-Cause",
77 50: "Acct-Multi-Session-Id",
78 51: "Acct-Link-Count",
79 52: "Acct-Input-Gigawords",
80 53: "Acct-Output-Gigawords",
81 54: "Unassigned",
82 55: "Event-Timestamp",
83 56: "Egress-VLANID",
84 57: "Ingress-Filters",
85 58: "Egress-VLAN-Name",
86 59: "User-Priority-Table",
87 60: "CHAP-Challenge",
88 61: "NAS-Port-Type",
89 62: "Port-Limit",
90 63: "Login-LAT-Port",
91 64: "Tunnel-Type",
92 65: "Tunnel-Medium-Type",
93 66: "Tunnel-Client-Endpoint",
94 67: "Tunnel-Server-Endpoint",
95 68: "Acct-Tunnel-Connection",
96 69: "Tunnel-Password",
97 70: "ARAP-Password",
98 71: "ARAP-Features",
99 72: "ARAP-Zone-Access",
100 73: "ARAP-Security",
101 74: "ARAP-Security-Data",
102 75: "Password-Retry",
103 76: "Prompt",
104 77: "Connect-Info",
105 78: "Configuration-Token",
106 79: "EAP-Message",
107 80: "Message-Authenticator",
108 81: "Tunnel-Private-Group-ID",
109 82: "Tunnel-Assignment-ID",
110 83: "Tunnel-Preference",
111 84: "ARAP-Challenge-Response",
112 85: "Acct-Interim-Interval",
113 86: "Acct-Tunnel-Packets-Lost",
114 87: "NAS-Port-Id",
115 88: "Framed-Pool",
116 89: "CUI",
117 90: "Tunnel-Client-Auth-ID",
118 91: "Tunnel-Server-Auth-ID",
119 92: "NAS-Filter-Rule",
120 93: "Unassigned",
121 94: "Originating-Line-Info",
122 95: "NAS-IPv6-Address",
123 96: "Framed-Interface-Id",
124 97: "Framed-IPv6-Prefix",
125 98: "Login-IPv6-Host",
126 99: "Framed-IPv6-Route",
127 100: "Framed-IPv6-Pool",
128 101: "Error-Cause",
129 102: "EAP-Key-Name",
130 103: "Digest-Response",
131 104: "Digest-Realm",
132 105: "Digest-Nonce",
133 106: "Digest-Response-Auth",
134 107: "Digest-Nextnonce",
135 108: "Digest-Method",
136 109: "Digest-URI",
137 110: "Digest-Qop",
138 111: "Digest-Algorithm",
139 112: "Digest-Entity-Body-Hash",
140 113: "Digest-CNonce",
141 114: "Digest-Nonce-Count",
142 115: "Digest-Username",
143 116: "Digest-Opaque",
144 117: "Digest-Auth-Param",
145 118: "Digest-AKA-Auts",
146 119: "Digest-Domain",
147 120: "Digest-Stale",
148 121: "Digest-HA1",
149 122: "SIP-AOR",
150 123: "Delegated-IPv6-Prefix",
151 124: "MIP6-Feature-Vector",
152 125: "MIP6-Home-Link-Prefix",
153 126: "Operator-Name",
154 127: "Location-Information",
155 128: "Location-Data",
156 129: "Basic-Location-Policy-Rules",
157 130: "Extended-Location-Policy-Rules",
158 131: "Location-Capable",
159 132: "Requested-Location-Info",
160 133: "Framed-Management-Protocol",
161 134: "Management-Transport-Protection",
162 135: "Management-Policy-Id",
163 136: "Management-Privilege-Level",
164 137: "PKM-SS-Cert",
165 138: "PKM-CA-Cert",
166 139: "PKM-Config-Settings",
167 140: "PKM-Cryptosuite-List",
168 141: "PKM-SAID",
169 142: "PKM-SA-Descriptor",
170 143: "PKM-Auth-Key",
171 144: "DS-Lite-Tunnel-Name",
172 145: "Mobile-Node-Identifier",
173 146: "Service-Selection",
174 147: "PMIP6-Home-LMA-IPv6-Address",
175 148: "PMIP6-Visited-LMA-IPv6-Address",
176 149: "PMIP6-Home-LMA-IPv4-Address",
177 150: "PMIP6-Visited-LMA-IPv4-Address",
178 151: "PMIP6-Home-HN-Prefix",
179 152: "PMIP6-Visited-HN-Prefix",
180 153: "PMIP6-Home-Interface-ID",
181 154: "PMIP6-Visited-Interface-ID",
182 155: "PMIP6-Home-IPv4-HoA",
183 156: "PMIP6-Visited-IPv4-HoA",
184 157: "PMIP6-Home-DHCP4-Server-Address",
185 158: "PMIP6-Visited-DHCP4-Server-Address",
186 159: "PMIP6-Home-DHCP6-Server-Address",
187 160: "PMIP6-Visited-DHCP6-Server-Address",
188 161: "PMIP6-Home-IPv4-Gateway",
189 162: "PMIP6-Visited-IPv4-Gateway",
190 163: "EAP-Lower-Layer",
191 164: "GSS-Acceptor-Service-Name",
192 165: "GSS-Acceptor-Host-Name",
193 166: "GSS-Acceptor-Service-Specifics",
194 167: "GSS-Acceptor-Realm-Name",
195 168: "Framed-IPv6-Address",
196 169: "DNS-Server-IPv6-Address",
197 170: "Route-IPv6-Information",
198 171: "Delegated-IPv6-Prefix-Pool",
199 172: "Stateful-IPv6-Address-Pool",
200 173: "IPv6-6rd-Configuration",
201 174: "Allowed-Called-Station-Id",
202 175: "EAP-Peer-Id",
203 176: "EAP-Server-Id",
204 177: "Mobility-Domain-Id",
205 178: "Preauth-Timeout",
206 179: "Network-Id-Name",
207 180: "EAPoL-Announcement",
208 181: "WLAN-HESSID",
209 182: "WLAN-Venue-Info",
210 183: "WLAN-Venue-Language",
211 184: "WLAN-Venue-Name",
212 185: "WLAN-Reason-Code",
213 186: "WLAN-Pairwise-Cipher",
214 187: "WLAN-Group-Cipher",
215 188: "WLAN-AKM-Suite",
216 189: "WLAN-Group-Mgmt-Cipher",
217 190: "WLAN-RF-Band",
218 191: "Unassigned",
219 }
220
221
222 class RadiusAttribute(Packet):
223 """
224 Implements a RADIUS attribute (RFC 2865). Every specific RADIUS attribute
225 class should inherit from this one.
226 """
227
228 name = "Radius Attribute"
229 fields_desc = [
230 ByteEnumField("type", 1, _radius_attribute_types),
231 FieldLenField("len", None, "value", "B",
232 adjust=lambda pkt, x: len(pkt.value) + 2),
233 StrLenField("value", "", length_from=lambda pkt: pkt.len - 2)
234 ]
235
236 registered_attributes = {}
237
238 @classmethod
239 def register_variant(cls):
240 """
241 Registers the RADIUS attributes defined in this module.
242 """
243
244 if hasattr(cls, "val"):
245 cls.registered_attributes[cls.val] = cls
246 else:
247 cls.registered_attributes[cls.type.default] = cls
248
249 @classmethod
250 def dispatch_hook(cls, _pkt=None, *args, **kargs):
251 """
252 Returns the right RadiusAttribute class for the given data.
253 """
254
255 if _pkt:
256 attr_type = orb(_pkt[0])
257 return cls.registered_attributes.get(attr_type, cls)
258 return cls
259
260 def haslayer(self, cls):
261 if cls == "RadiusAttribute":
262 if isinstance(self, RadiusAttribute):
263 return True
264 elif issubtype(cls, RadiusAttribute):
265 if isinstance(self, cls):
266 return True
267 return super(RadiusAttribute, self).haslayer(cls)
268
269 def getlayer(self, cls, nb=1, _track=None, _subclass=True, **flt):
270 return super(RadiusAttribute, self).getlayer(cls, nb=nb, _track=_track,
271 _subclass=True, **flt)
272
273 def post_build(self, p, pay):
274 length = self.len
275 if length is None:
276 length = len(p)
277 p = p[:1] + struct.pack("!B", length) + p[2:]
278 return p
279
280 def guess_payload_class(self, _):
281 return Padding
282
283
284 class _SpecificRadiusAttr(RadiusAttribute):
285 """
286 Class from which every "specific" RADIUS attribute defined in this module
287 inherits.
288 """
289
290 __slots__ = ["val"]
291
292 def __init__(self, _pkt="", post_transform=None, _internal=0, _underlayer=None, **fields): # noqa: E501
293 super(_SpecificRadiusAttr, self).__init__(
294 _pkt,
295 post_transform,
296 _internal,
297 _underlayer
298 )
299 self.fields["type"] = self.val
300 name_parts = self.__class__.__name__.split('RadiusAttr_')
301 if len(name_parts) < 2:
302 raise Scapy_Exception(
303 "Invalid class name: {}".format(self.__class__.__name__)
304 )
305 self.name = name_parts[1].replace('_', '-')
306
307
308 #
309 # RADIUS attributes which values are 4 bytes integers
310 #
311
312 class _RadiusAttrIntValue(_SpecificRadiusAttr):
313 """
314 Implements a RADIUS attribute which value field is 4 bytes long integer.
315 """
316
317 fields_desc = [
318 ByteEnumField("type", 5, _radius_attribute_types),
319 ByteField("len", 6),
320 IntField("value", 0)
321 ]
322
323
324 class RadiusAttr_NAS_Port(_RadiusAttrIntValue):
325 """RFC 2865"""
326 val = 5
327
328
329 class RadiusAttr_Framed_MTU(_RadiusAttrIntValue):
330 """RFC 2865"""
331 val = 12
332
333
334 class RadiusAttr_Login_TCP_Port(_RadiusAttrIntValue):
335 """RFC 2865"""
336 val = 16
337
338
339 class RadiusAttr_Session_Timeout(_RadiusAttrIntValue):
340 """RFC 2865"""
341 val = 27
342
343
344 class RadiusAttr_Idle_Timeout(_RadiusAttrIntValue):
345 """RFC 2865"""
346 val = 28
347
348
349 class RadiusAttr_Framed_AppleTalk_Link(_RadiusAttrIntValue):
350 """RFC 2865"""
351 val = 37
352
353
354 class RadiusAttr_Framed_AppleTalk_Network(_RadiusAttrIntValue):
355 """RFC 2865"""
356 val = 38
357
358
359 class RadiusAttr_Acct_Delay_Time(_RadiusAttrIntValue):
360 """RFC 2866"""
361 val = 41
362
363
364 class RadiusAttr_Acct_Input_Octets(_RadiusAttrIntValue):
365 """RFC 2866"""
366 val = 42
367
368
369 class RadiusAttr_Acct_Output_Octets(_RadiusAttrIntValue):
370 """RFC 2866"""
371 val = 43
372
373
374 class RadiusAttr_Acct_Session_Time(_RadiusAttrIntValue):
375 """RFC 2866"""
376 val = 46
377
378
379 class RadiusAttr_Acct_Input_Packets(_RadiusAttrIntValue):
380 """RFC 2866"""
381 val = 47
382
383
384 class RadiusAttr_Acct_Output_Packets(_RadiusAttrIntValue):
385 """RFC 2866"""
386 val = 48
387
388
389 class RadiusAttr_Acct_Link_Count(_RadiusAttrIntValue):
390 """RFC 2866"""
391 val = 51
392
393
394 class RadiusAttr_Acct_Input_Gigawords(_RadiusAttrIntValue):
395 """RFC 2869"""
396 val = 52
397
398
399 class RadiusAttr_Acct_Output_Gigawords(_RadiusAttrIntValue):
400 """RFC 2869"""
401 val = 53
402
403
404 class RadiusAttr_Egress_VLANID(_RadiusAttrIntValue):
405 """RFC 4675"""
406 val = 56
407
408
409 class RadiusAttr_Port_Limit(_RadiusAttrIntValue):
410 """RFC 2865"""
411 val = 62
412
413
414 class RadiusAttr_ARAP_Security(_RadiusAttrIntValue):
415 """RFC 2869"""
416 val = 73
417
418
419 class RadiusAttr_Password_Retry(_RadiusAttrIntValue):
420 """RFC 2869"""
421 val = 75
422
423
424 class RadiusAttr_Tunnel_Preference(_RadiusAttrIntValue):
425 """RFC 2868"""
426 val = 83
427
428
429 class RadiusAttr_Acct_Interim_Interval(_RadiusAttrIntValue):
430 """RFC 2869"""
431 val = 85
432
433
434 class RadiusAttr_Acct_Tunnel_Packets_Lost(_RadiusAttrIntValue):
435 """RFC 2867"""
436 val = 86
437
438
439 class RadiusAttr_Management_Privilege_Level(_RadiusAttrIntValue):
440 """RFC 5607"""
441 val = 136
442
443
444 class RadiusAttr_Mobility_Domain_Id(_RadiusAttrIntValue):
445 """RFC 7268"""
446 val = 177
447
448
449 class RadiusAttr_Preauth_Timeout(_RadiusAttrIntValue):
450 """RFC 7268"""
451 val = 178
452
453
454 class RadiusAttr_WLAN_Venue_Info(_RadiusAttrIntValue):
455 """RFC 7268"""
456 val = 182
457
458
459 class RadiusAttr_WLAN_Reason_Code(_RadiusAttrIntValue):
460 """RFC 7268"""
461 val = 185
462
463
464 class RadiusAttr_WLAN_Pairwise_Cipher(_RadiusAttrIntValue):
465 """RFC 7268"""
466 val = 186
467
468
469 class RadiusAttr_WLAN_Group_Cipher(_RadiusAttrIntValue):
470 """RFC 7268"""
471 val = 187
472
473
474 class RadiusAttr_WLAN_AKM_Suite(_RadiusAttrIntValue):
475 """RFC 7268"""
476 val = 188
477
478
479 class RadiusAttr_WLAN_Group_Mgmt_Cipher(_RadiusAttrIntValue):
480 """RFC 7268"""
481 val = 189
482
483
484 class RadiusAttr_WLAN_RF_Band(_RadiusAttrIntValue):
485 """RFC 7268"""
486 val = 190
487
488
489 #
490 # RADIUS attributes which values are string (displayed as hex)
491 #
492
493 class _RadiusAttrHexStringVal(_SpecificRadiusAttr):
494 """
495 Implements a RADIUS attribute which value field is a string that will be
496 as a hex string.
497 """
498
499 __slots__ = ["val"]
500
501 def __init__(self, _pkt="", post_transform=None, _internal=0, _underlayer=None, **fields): # noqa: E501
502 super(_RadiusAttrHexStringVal, self).__init__(
503 _pkt,
504 post_transform,
505 _internal,
506 _underlayer
507 )
508 self.fields["type"] = self.val
509 name_parts = self.__class__.__name__.split('RadiusAttr_')
510 if len(name_parts) < 2:
511 raise Scapy_Exception(
512 "Invalid class name: {}".format(self.__class__.__name__)
513 )
514 self.name = name_parts[1].replace('_', '-')
515
516 fields_desc = [
517 ByteEnumField("type", 24, _radius_attribute_types),
518 FieldLenField(
519 "len",
520 None,
521 "value",
522 "B",
523 adjust=lambda p, x: len(p.value) + 2
524 ),
525 XStrLenField("value", "", length_from=lambda p: p.len - 2 if p.len else 0) # noqa: E501
526 ]
527
528
529 class RadiusAttr_State(_RadiusAttrHexStringVal):
530 """RFC 2865"""
531 val = 24
532
533
534 def prepare_packed_data(radius_packet, packed_req_authenticator):
535 """
536 Pack RADIUS data prior computing the authentication MAC
537 """
538
539 packed_hdr = struct.pack("!B", radius_packet.code)
540 packed_hdr += struct.pack("!B", radius_packet.id)
541 packed_hdr += struct.pack("!H", radius_packet.len)
542
543 packed_attrs = b''
544 for attr in radius_packet.attributes:
545 packed_attrs += raw(attr)
546
547 return packed_hdr + packed_req_authenticator + packed_attrs
548
549
550 class RadiusAttr_Message_Authenticator(_RadiusAttrHexStringVal):
551 """RFC 2869"""
552 val = 80
553
554 fields_desc = [
555 ByteEnumField("type", 24, _radius_attribute_types),
556 FieldLenField(
557 "len",
558 18,
559 "value",
560 "B",
561 ),
562 XStrFixedLenField("value", "\x00" * 16, length=16)
563 ]
564
565 @staticmethod
566 def compute_message_authenticator(radius_packet, packed_req_authenticator,
567 shared_secret):
568 """
569 Computes the "Message-Authenticator" of a given RADIUS packet.
570 """
571
572 data = prepare_packed_data(radius_packet, packed_req_authenticator)
573 radius_hmac = hmac.new(shared_secret, data, hashlib.md5)
574
575 return radius_hmac.digest()
576
577 #
578 # RADIUS attributes which values are IPv4 prefixes
579 #
580
581
582 class _RadiusAttrIPv4AddrVal(RadiusAttribute):
583 """
584 Implements a RADIUS attribute which value field is an IPv4 address.
585 """
586
587 __slots__ = ["val"]
588
589 fields_desc = [
590 ByteEnumField("type", 4, _radius_attribute_types),
591 ByteField("len", 6),
592 IPField("value", "0.0.0.0")
593 ]
594
595
596 class RadiusAttr_NAS_IP_Address(_RadiusAttrIPv4AddrVal):
597 """RFC 2865"""
598 val = 4
599
600
601 class RadiusAttr_Framed_IP_Address(_RadiusAttrIPv4AddrVal):
602 """RFC 2865"""
603 val = 8
604
605
606 class RadiusAttr_Framed_IP_Netmask(_RadiusAttrIPv4AddrVal):
607 """RFC 2865"""
608 val = 9
609
610
611 class RadiusAttr_Login_IP_Host(_RadiusAttrIPv4AddrVal):
612 """RFC 2865"""
613 val = 14
614
615
616 class RadiusAttr_Framed_IPX_Network(_RadiusAttrIPv4AddrVal):
617 """RFC 2865"""
618 val = 23
619
620
621 class RadiusAttr_PMIP6_Home_LMA_IPv4_Address(_RadiusAttrIPv4AddrVal):
622 """RFC 6572"""
623 val = 149
624
625
626 class RadiusAttr_PMIP6_Visited_LMA_IPv4_Address(_RadiusAttrIPv4AddrVal):
627 """RFC 6572"""
628 val = 150
629
630
631 class RadiusAttr_PMIP6_Home_DHCP4_Server_Address(_RadiusAttrIPv4AddrVal):
632 """RFC 6572"""
633 val = 157
634
635
636 class RadiusAttr_PMIP6_Visited_DHCP4_Server_Address(_RadiusAttrIPv4AddrVal):
637 """RFC 6572"""
638 val = 158
639
640
641 class RadiusAttr_PMIP6_Home_IPv4_Gateway(_RadiusAttrIPv4AddrVal):
642 """RFC 6572"""
643 val = 161
644
645
646 class RadiusAttr_PMIP6_Visited_IPv4_Gateway(_RadiusAttrIPv4AddrVal):
647 """RFC 6572"""
648 val = 162
649
650
651 # See IANA registry "RADIUS Types"
652 _radius_attrs_values = {
653 # Service-Type
654 6:
655 {
656 1: "Login",
657 2: "Framed",
658 3: "Callback Login",
659 4: "Callback Framed",
660 5: "Outbound",
661 6: "Administrative",
662 7: "NAS Prompt",
663 8: "Authenticate Only",
664 9: "Callback NAS Prompt",
665 10: "Call Check",
666 11: "Callback Administrative",
667 12: "Voice",
668 13: "Fax",
669 14: "Modem Relay",
670 15: "IAPP-Register",
671 16: "IAPP-AP-Check",
672 17: "Authorize Only",
673 18: "Framed-Management",
674 19: "Additional-Authorization"
675 },
676
677 # Framed-Protocol
678 7:
679 {
680 1: "PPP",
681 2: "SLIP",
682 3: "AppleTalk Remote Access Protocol (ARAP)",
683 4: "Gandalf proprietary SingleLink/MultiLink protocol",
684 5: "Xylogics proprietary IPX/SLIP",
685 6: "X.75 Synchronous",
686 7: "GPRS PDP Context"
687 },
688
689 # Framed-Routing
690 10:
691 {
692 0: "None",
693 1: "Send routing packets",
694 2: "Listen for routing packets",
695 3: "Send and Listen"
696 },
697
698 # Framed-Compression
699 13:
700 {
701 0: "None",
702 1: "VJ TCP/IP header compression",
703 2: "IPX header compression",
704 3: "Stac-LZS compression"
705 },
706
707 # Login-Service
708 15:
709 {
710 0: "Telnet",
711 1: "Rlogin",
712 2: "TCP Clear",
713 3: "PortMaster (proprietary)",
714 4: "LAT",
715 5: "X25-PAD",
716 6: "X25-T3POS",
717 7: "Unassigned",
718 8: "TCP Clear Quiet (suppresses any NAS-generated connect string)"
719 },
720
721 # Termination-Action
722 29:
723 {
724 0: "Default",
725 1: "RADIUS-Request"
726 },
727
728 # Acct-Status-Type
729 40:
730 {
731 1: "Start",
732 2: "Stop",
733 3: "Interim-Update",
734 4: "Unassigned",
735 5: "Unassigned",
736 6: "Unassigned",
737 7: "Accounting-On",
738 8: "Accounting-Off",
739 9: "Tunnel-Start",
740 10: "Tunnel-Stop",
741 11: "Tunnel-Reject",
742 12: "Tunnel-Link-Start",
743 13: "Tunnel-Link-Stop",
744 14: "Tunnel-Link-Reject",
745 15: "Failed"
746 },
747
748 # Acct-Authentic
749 45:
750 {
751 1: "RADIUS",
752 2: "Local",
753 3: "Remote",
754 4: "Diameter"
755 },
756
757 # Acct-Terminate-Cause
758 49:
759 {
760 1: "User Request",
761 2: "Lost Carrier",
762 3: "Lost Service",
763 4: "Idle Timeout",
764 5: "Session Timeout",
765 6: "Admin Reset",
766 7: "Admin Reboot",
767 8: "Port Error",
768 9: "NAS Error",
769 10: "NAS Request",
770 11: "NAS Reboot",
771 12: "Port Unneeded",
772 13: "Port Preempted",
773 14: "Port Suspended",
774 15: "Service Unavailable",
775 16: "Callback",
776 17: "User Error",
777 18: "Host Request",
778 19: "Supplicant Restart",
779 20: "Reauthentication Failure",
780 21: "Port Reinitialized",
781 22: "Port Administratively Disabled",
782 23: "Lost Power",
783 },
784
785 # NAS-Port-Type
786 61:
787 {
788 0: "Async",
789 1: "Sync",
790 2: "ISDN Sync",
791 3: "ISDN Async V.120",
792 4: "ISDN Async V.110",
793 5: "Virtual",
794 6: "PIAFS",
795 7: "HDLC Clear Channel",
796 8: "X.25",
797 9: "X.75",
798 10: "G.3 Fax",
799 11: "SDSL - Symmetric DSL",
800 12: "ADSL-CAP - Asymmetric DSL, Carrierless Amplitude Phase Modulation", # noqa: E501
801 13: "ADSL-DMT - Asymmetric DSL, Discrete Multi-Tone",
802 14: "IDSL - ISDN Digital Subscriber Line",
803 15: "Ethernet",
804 16: "xDSL - Digital Subscriber Line of unknown type",
805 17: "Cable",
806 18: "Wireles - Other",
807 19: "Wireless - IEEE 802.11",
808 20: "Token-Ring",
809 21: "FDDI",
810 22: "Wireless - CDMA2000",
811 23: "Wireless - UMTS",
812 24: "Wireless - 1X-EV",
813 25: "IAPP",
814 26: "FTTP - Fiber to the Premises",
815 27: "Wireless - IEEE 802.16",
816 28: "Wireless - IEEE 802.20",
817 29: "Wireless - IEEE 802.22",
818 30: "PPPoA - PPP over ATM",
819 31: "PPPoEoA - PPP over Ethernet over ATM",
820 32: "PPPoEoE - PPP over Ethernet over Ethernet",
821 33: "PPPoEoVLAN - PPP over Ethernet over VLAN",
822 34: "PPPoEoQinQ - PPP over Ethernet over IEEE 802.1QinQ",
823 35: "xPON - Passive Optical Network",
824 36: "Wireless - XGP",
825 37: "WiMAX Pre-Release 8 IWK Function",
826 38: "WIMAX-WIFI-IWK: WiMAX WIFI Interworking",
827 39: "WIMAX-SFF: Signaling Forwarding Function for LTE/3GPP2",
828 40: "WIMAX-HA-LMA: WiMAX HA and or LMA function",
829 41: "WIMAX-DHCP: WIMAX DHCP service",
830 42: "WIMAX-LBS: WiMAX location based service",
831 43: "WIMAX-WVS: WiMAX voice service"
832 },
833
834 # Tunnel-Type
835 64:
836 {
837 1: "Point-to-Point Tunneling Protocol (PPTP)",
838 2: "Layer Two Forwarding (L2F)",
839 3: "Layer Two Tunneling Protocol (L2TP)",
840 4: "Ascend Tunnel Management Protocol (ATMP)",
841 5: "Virtual Tunneling Protocol (VTP)",
842 6: "IP Authentication Header in the Tunnel-mode (AH)",
843 7: "IP-in-IP Encapsulation (IP-IP)",
844 8: "Minimal IP-in-IP Encapsulation (MIN-IP-IP)",
845 9: "IP Encapsulating Security Payload in the Tunnel-mode (ESP)",
846 10: "Generic Route Encapsulation (GRE)",
847 11: "Bay Dial Virtual Services (DVS)",
848 12: "IP-in-IP Tunneling",
849 13: "Virtual LANs (VLAN)"
850 },
851
852 # Tunnel-Medium-Type
853 65:
854 {
855 1: "IPv4 (IP version 4)",
856 2: "IPv6 (IP version 6)",
857 3: "NSAP",
858 4: "HDLC (8-bit multidrop)",
859 5: "BBN 1822",
860 6: "802",
861 7: "E.163 (POTS)",
862 8: "E.164 (SMDS, Frame Relay, ATM)",
863 9: "F.69 (Telex)",
864 10: "X.121 (X.25, Frame Relay)",
865 11: "IPX",
866 12: "Appletalk",
867 13: "Decnet IV",
868 14: "Banyan Vine",
869 15: "E.164 with NSAP format subaddress"
870 },
871
872 # ARAP-Zone-Access
873 72:
874 {
875 1: "Only allow access to default zone",
876 2: "Use zone filter inclusively",
877 3: "Not used",
878 4: "Use zone filter exclusively"
879 },
880
881 # Prompt
882 76:
883 {
884 0: "No Echo",
885 1: "Echo"
886 },
887
888 # Error-Cause Attribute
889 101:
890 {
891 201: "Residual Session Context Removed",
892 202: "Invalid EAP Packet (Ignored)",
893 401: "Unsupported Attribute",
894 402: "Missing Attribute",
895 403: "NAS Identification Mismatch",
896 404: "Invalid Request",
897 405: "Unsupported Service",
898 406: "Unsupported Extension",
899 407: "Invalid Attribute Value",
900 501: "Administratively Prohibited",
901 502: "Request Not Routable (Proxy)",
902 503: "Session Context Not Found",
903 504: "Session Context Not Removable",
904 505: "Other Proxy Processing Error",
905 506: "Resources Unavailable",
906 507: "Request Initiated",
907 508: "Multiple Session Selection Unsupported",
908 509: "Location-Info-Required",
909 601: "Response Too Big"
910 },
911
912 # Operator Namespace Identifier - Attribute 126
913 126:
914 {
915 0x30: "TADIG",
916 0x31: "REALM",
917 0x32: "E212",
918 0x33: "ICC",
919 0xFF: "Reserved"
920 },
921
922 # Basic-Location-Policy-Rules
923 129:
924 {
925 0: "Retransmission allowed",
926 },
927
928 # Location-Capable
929 131:
930 {
931 1: "CIVIC_LOCATION",
932 2: "GEO_LOCATION",
933 4: "USERS_LOCATION",
934 8: "NAS_LOCATION"
935 },
936
937 # Framed-Management-Protocol
938 133:
939 {
940 1: "SNMP",
941 2: "Web-based",
942 3: "NETCONF",
943 4: "FTP",
944 5: "TFTP",
945 6: "SFTP",
946 7: "RCP",
947 8: "SCP"
948 },
949
950 # Management-Transport-Protection
951 134:
952 {
953 1: "No-Protection",
954 2: "Integrity-Protection",
955 3: "Integrity-Confidentiality-Protection",
956 },
957 }
958
959
960 class _RadiusAttrIntEnumVal(_SpecificRadiusAttr):
961 """
962 Implements a RADIUS attribute which value field is 4 bytes long integer.
963 """
964
965 __slots__ = ["val"]
966
967 fields_desc = [
968 ByteEnumField("type", 6, _radius_attribute_types),
969 ByteField("len", 6),
970 MultiEnumField(
971 "value",
972 0,
973 _radius_attrs_values,
974 depends_on=lambda p: p.type,
975 fmt="I"
976 )
977 ]
978
979
980 class RadiusAttr_Service_Type(_RadiusAttrIntEnumVal):
981 """RFC 2865"""
982 val = 6
983
984
985 class RadiusAttr_Framed_Protocol(_RadiusAttrIntEnumVal):
986 """RFC 2865"""
987 val = 7
988
989
990 class RadiusAttr_NAS_Port_Type(_RadiusAttrIntEnumVal):
991 """RFC 2865"""
992 val = 61
993
994
995 class _EAPPacketField(PacketField):
996
997 """
998 Handles EAP-Message attribute value (the actual EAP packet).
999 """
1000
1001 def m2i(self, pkt, m):
1002 ret = None
1003 eap_packet_len = struct.unpack("!H", m[2:4])[0]
1004 if eap_packet_len < 254:
1005 # If the EAP packet has not been fragmented, build a Scapy EAP
1006 # packet from the data.
1007 ret = EAP(m)
1008 else:
1009 ret = conf.raw_layer(m)
1010 return ret
1011
1012
1013 class RadiusAttr_EAP_Message(RadiusAttribute):
1014 """
1015 Implements the "EAP-Message" attribute (RFC 3579).
1016 """
1017
1018 name = "EAP-Message"
1019 fields_desc = [
1020 ByteEnumField("type", 79, _radius_attribute_types),
1021 FieldLenField(
1022 "len",
1023 None,
1024 "value",
1025 "B",
1026 adjust=lambda pkt, x: len(pkt.value) + 2
1027 ),
1028 _EAPPacketField("value", "", EAP)
1029 ]
1030
1031
1032 class RadiusAttr_Vendor_Specific(RadiusAttribute):
1033 """
1034 Implements the "Vendor-Specific" attribute, as described in RFC 2865.
1035 """
1036
1037 name = "Vendor-Specific"
1038 fields_desc = [
1039 ByteEnumField("type", 26, _radius_attribute_types),
1040 FieldLenField(
1041 "len",
1042 None,
1043 "value",
1044 "B",
1045 adjust=lambda pkt, x: len(pkt.value) + 8
1046 ),
1047 IntField("vendor_id", 0),
1048 ByteField("vendor_type", 0),
1049 FieldLenField(
1050 "vendor_len",
1051 None,
1052 "value",
1053 "B",
1054 adjust=lambda p, x: len(p.value) + 2
1055 ),
1056 StrLenField("value", "", length_from=lambda p: p.vendor_len - 2)
1057 ]
1058
1059
1060 # See IANA RADIUS Packet Type Codes registry
1061 _packet_codes = {
1062 1: "Access-Request",
1063 2: "Access-Accept",
1064 3: "Access-Reject",
1065 4: "Accounting-Request",
1066 5: "Accounting-Response",
1067 6: "Accounting-Status (now Interim Accounting)",
1068 7: "Password-Request",
1069 8: "Password-Ack",
1070 9: "Password-Reject",
1071 10: "Accounting-Message",
1072 11: "Access-Challenge",
1073 12: "Status-Server (experimental)",
1074 13: "Status-Client (experimental)",
1075 21: "Resource-Free-Request",
1076 22: "Resource-Free-Response",
1077 23: "Resource-Query-Request",
1078 24: "Resource-Query-Response",
1079 25: "Alternate-Resource-Reclaim-Request",
1080 26: "NAS-Reboot-Request",
1081 27: "NAS-Reboot-Response",
1082 28: "Reserved",
1083 29: "Next-Passcode",
1084 30: "New-Pin",
1085 31: "Terminate-Session",
1086 32: "Password-Expired",
1087 33: "Event-Request",
1088 34: "Event-Response",
1089 40: "Disconnect-Request",
1090 41: "Disconnect-ACK",
1091 42: "Disconnect-NAK",
1092 43: "CoA-Request",
1093 44: "CoA-ACK",
1094 45: "CoA-NAK",
1095 50: "IP-Address-Allocate",
1096 51: "IP-Address-Release",
1097 52: "Protocol-Error",
1098 250: "Experimental Use",
1099 251: "Experimental Use",
1100 252: "Experimental Use",
1101 253: "Experimental Use",
1102 254: "Reserved",
1103 255: "Reserved"
1104 }
1105
1106
1107 class Radius(Packet):
1108 """
1109 Implements a RADIUS packet (RFC 2865).
1110 """
1111
1112 name = "RADIUS"
1113 fields_desc = [
1114 ByteEnumField("code", 1, _packet_codes),
1115 ByteField("id", 0),
1116 FieldLenField(
1117 "len",
1118 None,
1119 "attributes",
1120 "H",
1121 adjust=lambda pkt, x: len(pkt.attributes) + 20
1122 ),
1123 XStrFixedLenField("authenticator", "", 16),
1124 PacketListField(
1125 "attributes",
1126 [],
1127 RadiusAttribute,
1128 length_from=lambda pkt: pkt.len - 20
1129 )
1130 ]
1131
1132 def compute_authenticator(self, packed_request_auth, shared_secret):
1133 """
1134 Computes the authenticator field (RFC 2865 - Section 3)
1135 """
1136
1137 data = prepare_packed_data(self, packed_request_auth)
1138 radius_mac = hashlib.md5(data + shared_secret)
1139 return radius_mac.digest()
1140
1141 def post_build(self, p, pay):
1142 p += pay
1143 length = self.len
1144 if length is None:
1145 length = len(p)
1146 p = p[:2] + struct.pack("!H", length) + p[4:]
1147 return p
1148
1149
1150 bind_layers(UDP, Radius, sport=1812)
1151 bind_layers(UDP, Radius, dport=1812)
1152 bind_layers(UDP, Radius, sport=1813)
1153 bind_layers(UDP, Radius, dport=1813)
1154
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/scapy/layers/radius.py b/scapy/layers/radius.py
--- a/scapy/layers/radius.py
+++ b/scapy/layers/radius.py
@@ -579,7 +579,7 @@
#
-class _RadiusAttrIPv4AddrVal(RadiusAttribute):
+class _RadiusAttrIPv4AddrVal(_SpecificRadiusAttr):
"""
Implements a RADIUS attribute which value field is an IPv4 address.
"""
| {"golden_diff": "diff --git a/scapy/layers/radius.py b/scapy/layers/radius.py\n--- a/scapy/layers/radius.py\n+++ b/scapy/layers/radius.py\n@@ -579,7 +579,7 @@\n #\n \n \n-class _RadiusAttrIPv4AddrVal(RadiusAttribute):\n+class _RadiusAttrIPv4AddrVal(_SpecificRadiusAttr):\n \"\"\"\n Implements a RADIUS attribute which value field is an IPv4 address.\n \"\"\"\n", "issue": "Bug: wrong base class usage in radius.\nthe following file should be corrected:\r\nscapy/layers/radius.py\r\n\r\n`\r\nclass _RadiusAttrIPv4AddrVal(RadiusAttribute):\r\n.....\r\n`\r\n\r\nshould be:\r\n`\r\nclass _RadiusAttrIPv4AddrVal(_SpecificRadiusAttr):\r\n.....\r\n`\r\n\r\nwithout this change, the 'type' IE will be always 4.\r\n\r\n\n", "before_files": [{"content": "# This file is part of Scapy\n# See http://www.secdev.org/projects/scapy for more information\n# Copyright (C) Philippe Biondi <[email protected]>\n# Vincent Mauge <[email protected]>\n# This program is published under a GPLv2 license\n\n\"\"\"\nRADIUS (Remote Authentication Dial In User Service)\n\"\"\"\n\nimport struct\nimport hashlib\nimport hmac\nfrom scapy.compat import orb, raw\nfrom scapy.packet import Packet, Padding, bind_layers\nfrom scapy.fields import ByteField, ByteEnumField, IntField, StrLenField,\\\n XStrLenField, XStrFixedLenField, FieldLenField, PacketField,\\\n PacketListField, IPField, MultiEnumField\nfrom scapy.layers.inet import UDP\nfrom scapy.layers.eap import EAP\nfrom scapy.utils import issubtype\nfrom scapy.config import conf\nfrom scapy.error import Scapy_Exception\n\n\n# https://www.iana.org/assignments/radius-types/radius-types.xhtml\n_radius_attribute_types = {\n 1: \"User-Name\",\n 2: \"User-Password\",\n 3: \"CHAP-Password\",\n 4: \"NAS-IP-Address\",\n 5: \"NAS-Port\",\n 6: \"Service-Type\",\n 7: \"Framed-Protocol\",\n 8: \"Framed-IP-Address\",\n 9: \"Framed-IP-Netmask\",\n 10: \"Framed-Routing\",\n 11: \"Filter-Id\",\n 12: \"Framed-MTU\",\n 13: \"Framed-Compression\",\n 14: \"Login-IP-Host\",\n 15: \"Login-Service\",\n 16: \"Login-TCP-Port\",\n 17: \"Unassigned\",\n 18: \"Reply-Message\",\n 19: \"Callback-Number\",\n 20: \"Callback-Id\",\n 21: \"Unassigned\",\n 22: \"Framed-Route\",\n 23: \"Framed-IPX-Network\",\n 24: \"State\",\n 25: \"Class\",\n 26: \"Vendor-Specific\",\n 27: \"Session-Timeout\",\n 28: \"Idle-Timeout\",\n 29: \"Termination-Action\",\n 30: \"Called-Station-Id\",\n 31: \"Calling-Station-Id\",\n 32: \"NAS-Identifier\",\n 33: \"Proxy-State\",\n 34: \"Login-LAT-Service\",\n 35: \"Login-LAT-Node\",\n 36: \"Login-LAT-Group\",\n 37: \"Framed-AppleTalk-Link\",\n 38: \"Framed-AppleTalk-Network\",\n 39: \"Framed-AppleTalk-Zone\",\n 40: \"Acct-Status-Type\",\n 41: \"Acct-Delay-Time\",\n 42: \"Acct-Input-Octets\",\n 43: \"Acct-Output-Octets\",\n 44: \"Acct-Session-Id\",\n 45: \"Acct-Authentic\",\n 46: \"Acct-Session-Time\",\n 47: \"Acct-Input-Packets\",\n 48: \"Acct-Output-Packets\",\n 49: \"Acct-Terminate-Cause\",\n 50: \"Acct-Multi-Session-Id\",\n 51: \"Acct-Link-Count\",\n 52: \"Acct-Input-Gigawords\",\n 53: \"Acct-Output-Gigawords\",\n 54: \"Unassigned\",\n 55: \"Event-Timestamp\",\n 56: \"Egress-VLANID\",\n 57: \"Ingress-Filters\",\n 58: \"Egress-VLAN-Name\",\n 59: \"User-Priority-Table\",\n 60: \"CHAP-Challenge\",\n 61: \"NAS-Port-Type\",\n 62: \"Port-Limit\",\n 63: \"Login-LAT-Port\",\n 64: \"Tunnel-Type\",\n 65: \"Tunnel-Medium-Type\",\n 66: \"Tunnel-Client-Endpoint\",\n 67: \"Tunnel-Server-Endpoint\",\n 68: \"Acct-Tunnel-Connection\",\n 69: \"Tunnel-Password\",\n 70: \"ARAP-Password\",\n 71: \"ARAP-Features\",\n 72: \"ARAP-Zone-Access\",\n 73: \"ARAP-Security\",\n 74: \"ARAP-Security-Data\",\n 75: \"Password-Retry\",\n 76: \"Prompt\",\n 77: \"Connect-Info\",\n 78: \"Configuration-Token\",\n 79: \"EAP-Message\",\n 80: \"Message-Authenticator\",\n 81: \"Tunnel-Private-Group-ID\",\n 82: \"Tunnel-Assignment-ID\",\n 83: \"Tunnel-Preference\",\n 84: \"ARAP-Challenge-Response\",\n 85: \"Acct-Interim-Interval\",\n 86: \"Acct-Tunnel-Packets-Lost\",\n 87: \"NAS-Port-Id\",\n 88: \"Framed-Pool\",\n 89: \"CUI\",\n 90: \"Tunnel-Client-Auth-ID\",\n 91: \"Tunnel-Server-Auth-ID\",\n 92: \"NAS-Filter-Rule\",\n 93: \"Unassigned\",\n 94: \"Originating-Line-Info\",\n 95: \"NAS-IPv6-Address\",\n 96: \"Framed-Interface-Id\",\n 97: \"Framed-IPv6-Prefix\",\n 98: \"Login-IPv6-Host\",\n 99: \"Framed-IPv6-Route\",\n 100: \"Framed-IPv6-Pool\",\n 101: \"Error-Cause\",\n 102: \"EAP-Key-Name\",\n 103: \"Digest-Response\",\n 104: \"Digest-Realm\",\n 105: \"Digest-Nonce\",\n 106: \"Digest-Response-Auth\",\n 107: \"Digest-Nextnonce\",\n 108: \"Digest-Method\",\n 109: \"Digest-URI\",\n 110: \"Digest-Qop\",\n 111: \"Digest-Algorithm\",\n 112: \"Digest-Entity-Body-Hash\",\n 113: \"Digest-CNonce\",\n 114: \"Digest-Nonce-Count\",\n 115: \"Digest-Username\",\n 116: \"Digest-Opaque\",\n 117: \"Digest-Auth-Param\",\n 118: \"Digest-AKA-Auts\",\n 119: \"Digest-Domain\",\n 120: \"Digest-Stale\",\n 121: \"Digest-HA1\",\n 122: \"SIP-AOR\",\n 123: \"Delegated-IPv6-Prefix\",\n 124: \"MIP6-Feature-Vector\",\n 125: \"MIP6-Home-Link-Prefix\",\n 126: \"Operator-Name\",\n 127: \"Location-Information\",\n 128: \"Location-Data\",\n 129: \"Basic-Location-Policy-Rules\",\n 130: \"Extended-Location-Policy-Rules\",\n 131: \"Location-Capable\",\n 132: \"Requested-Location-Info\",\n 133: \"Framed-Management-Protocol\",\n 134: \"Management-Transport-Protection\",\n 135: \"Management-Policy-Id\",\n 136: \"Management-Privilege-Level\",\n 137: \"PKM-SS-Cert\",\n 138: \"PKM-CA-Cert\",\n 139: \"PKM-Config-Settings\",\n 140: \"PKM-Cryptosuite-List\",\n 141: \"PKM-SAID\",\n 142: \"PKM-SA-Descriptor\",\n 143: \"PKM-Auth-Key\",\n 144: \"DS-Lite-Tunnel-Name\",\n 145: \"Mobile-Node-Identifier\",\n 146: \"Service-Selection\",\n 147: \"PMIP6-Home-LMA-IPv6-Address\",\n 148: \"PMIP6-Visited-LMA-IPv6-Address\",\n 149: \"PMIP6-Home-LMA-IPv4-Address\",\n 150: \"PMIP6-Visited-LMA-IPv4-Address\",\n 151: \"PMIP6-Home-HN-Prefix\",\n 152: \"PMIP6-Visited-HN-Prefix\",\n 153: \"PMIP6-Home-Interface-ID\",\n 154: \"PMIP6-Visited-Interface-ID\",\n 155: \"PMIP6-Home-IPv4-HoA\",\n 156: \"PMIP6-Visited-IPv4-HoA\",\n 157: \"PMIP6-Home-DHCP4-Server-Address\",\n 158: \"PMIP6-Visited-DHCP4-Server-Address\",\n 159: \"PMIP6-Home-DHCP6-Server-Address\",\n 160: \"PMIP6-Visited-DHCP6-Server-Address\",\n 161: \"PMIP6-Home-IPv4-Gateway\",\n 162: \"PMIP6-Visited-IPv4-Gateway\",\n 163: \"EAP-Lower-Layer\",\n 164: \"GSS-Acceptor-Service-Name\",\n 165: \"GSS-Acceptor-Host-Name\",\n 166: \"GSS-Acceptor-Service-Specifics\",\n 167: \"GSS-Acceptor-Realm-Name\",\n 168: \"Framed-IPv6-Address\",\n 169: \"DNS-Server-IPv6-Address\",\n 170: \"Route-IPv6-Information\",\n 171: \"Delegated-IPv6-Prefix-Pool\",\n 172: \"Stateful-IPv6-Address-Pool\",\n 173: \"IPv6-6rd-Configuration\",\n 174: \"Allowed-Called-Station-Id\",\n 175: \"EAP-Peer-Id\",\n 176: \"EAP-Server-Id\",\n 177: \"Mobility-Domain-Id\",\n 178: \"Preauth-Timeout\",\n 179: \"Network-Id-Name\",\n 180: \"EAPoL-Announcement\",\n 181: \"WLAN-HESSID\",\n 182: \"WLAN-Venue-Info\",\n 183: \"WLAN-Venue-Language\",\n 184: \"WLAN-Venue-Name\",\n 185: \"WLAN-Reason-Code\",\n 186: \"WLAN-Pairwise-Cipher\",\n 187: \"WLAN-Group-Cipher\",\n 188: \"WLAN-AKM-Suite\",\n 189: \"WLAN-Group-Mgmt-Cipher\",\n 190: \"WLAN-RF-Band\",\n 191: \"Unassigned\",\n}\n\n\nclass RadiusAttribute(Packet):\n \"\"\"\n Implements a RADIUS attribute (RFC 2865). Every specific RADIUS attribute\n class should inherit from this one.\n \"\"\"\n\n name = \"Radius Attribute\"\n fields_desc = [\n ByteEnumField(\"type\", 1, _radius_attribute_types),\n FieldLenField(\"len\", None, \"value\", \"B\",\n adjust=lambda pkt, x: len(pkt.value) + 2),\n StrLenField(\"value\", \"\", length_from=lambda pkt: pkt.len - 2)\n ]\n\n registered_attributes = {}\n\n @classmethod\n def register_variant(cls):\n \"\"\"\n Registers the RADIUS attributes defined in this module.\n \"\"\"\n\n if hasattr(cls, \"val\"):\n cls.registered_attributes[cls.val] = cls\n else:\n cls.registered_attributes[cls.type.default] = cls\n\n @classmethod\n def dispatch_hook(cls, _pkt=None, *args, **kargs):\n \"\"\"\n Returns the right RadiusAttribute class for the given data.\n \"\"\"\n\n if _pkt:\n attr_type = orb(_pkt[0])\n return cls.registered_attributes.get(attr_type, cls)\n return cls\n\n def haslayer(self, cls):\n if cls == \"RadiusAttribute\":\n if isinstance(self, RadiusAttribute):\n return True\n elif issubtype(cls, RadiusAttribute):\n if isinstance(self, cls):\n return True\n return super(RadiusAttribute, self).haslayer(cls)\n\n def getlayer(self, cls, nb=1, _track=None, _subclass=True, **flt):\n return super(RadiusAttribute, self).getlayer(cls, nb=nb, _track=_track,\n _subclass=True, **flt)\n\n def post_build(self, p, pay):\n length = self.len\n if length is None:\n length = len(p)\n p = p[:1] + struct.pack(\"!B\", length) + p[2:]\n return p\n\n def guess_payload_class(self, _):\n return Padding\n\n\nclass _SpecificRadiusAttr(RadiusAttribute):\n \"\"\"\n Class from which every \"specific\" RADIUS attribute defined in this module\n inherits.\n \"\"\"\n\n __slots__ = [\"val\"]\n\n def __init__(self, _pkt=\"\", post_transform=None, _internal=0, _underlayer=None, **fields): # noqa: E501\n super(_SpecificRadiusAttr, self).__init__(\n _pkt,\n post_transform,\n _internal,\n _underlayer\n )\n self.fields[\"type\"] = self.val\n name_parts = self.__class__.__name__.split('RadiusAttr_')\n if len(name_parts) < 2:\n raise Scapy_Exception(\n \"Invalid class name: {}\".format(self.__class__.__name__)\n )\n self.name = name_parts[1].replace('_', '-')\n\n\n#\n# RADIUS attributes which values are 4 bytes integers\n#\n\nclass _RadiusAttrIntValue(_SpecificRadiusAttr):\n \"\"\"\n Implements a RADIUS attribute which value field is 4 bytes long integer.\n \"\"\"\n\n fields_desc = [\n ByteEnumField(\"type\", 5, _radius_attribute_types),\n ByteField(\"len\", 6),\n IntField(\"value\", 0)\n ]\n\n\nclass RadiusAttr_NAS_Port(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 5\n\n\nclass RadiusAttr_Framed_MTU(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 12\n\n\nclass RadiusAttr_Login_TCP_Port(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 16\n\n\nclass RadiusAttr_Session_Timeout(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 27\n\n\nclass RadiusAttr_Idle_Timeout(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 28\n\n\nclass RadiusAttr_Framed_AppleTalk_Link(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 37\n\n\nclass RadiusAttr_Framed_AppleTalk_Network(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 38\n\n\nclass RadiusAttr_Acct_Delay_Time(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 41\n\n\nclass RadiusAttr_Acct_Input_Octets(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 42\n\n\nclass RadiusAttr_Acct_Output_Octets(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 43\n\n\nclass RadiusAttr_Acct_Session_Time(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 46\n\n\nclass RadiusAttr_Acct_Input_Packets(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 47\n\n\nclass RadiusAttr_Acct_Output_Packets(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 48\n\n\nclass RadiusAttr_Acct_Link_Count(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 51\n\n\nclass RadiusAttr_Acct_Input_Gigawords(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 52\n\n\nclass RadiusAttr_Acct_Output_Gigawords(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 53\n\n\nclass RadiusAttr_Egress_VLANID(_RadiusAttrIntValue):\n \"\"\"RFC 4675\"\"\"\n val = 56\n\n\nclass RadiusAttr_Port_Limit(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 62\n\n\nclass RadiusAttr_ARAP_Security(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 73\n\n\nclass RadiusAttr_Password_Retry(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 75\n\n\nclass RadiusAttr_Tunnel_Preference(_RadiusAttrIntValue):\n \"\"\"RFC 2868\"\"\"\n val = 83\n\n\nclass RadiusAttr_Acct_Interim_Interval(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 85\n\n\nclass RadiusAttr_Acct_Tunnel_Packets_Lost(_RadiusAttrIntValue):\n \"\"\"RFC 2867\"\"\"\n val = 86\n\n\nclass RadiusAttr_Management_Privilege_Level(_RadiusAttrIntValue):\n \"\"\"RFC 5607\"\"\"\n val = 136\n\n\nclass RadiusAttr_Mobility_Domain_Id(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 177\n\n\nclass RadiusAttr_Preauth_Timeout(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 178\n\n\nclass RadiusAttr_WLAN_Venue_Info(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 182\n\n\nclass RadiusAttr_WLAN_Reason_Code(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 185\n\n\nclass RadiusAttr_WLAN_Pairwise_Cipher(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 186\n\n\nclass RadiusAttr_WLAN_Group_Cipher(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 187\n\n\nclass RadiusAttr_WLAN_AKM_Suite(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 188\n\n\nclass RadiusAttr_WLAN_Group_Mgmt_Cipher(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 189\n\n\nclass RadiusAttr_WLAN_RF_Band(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 190\n\n\n#\n# RADIUS attributes which values are string (displayed as hex)\n#\n\nclass _RadiusAttrHexStringVal(_SpecificRadiusAttr):\n \"\"\"\n Implements a RADIUS attribute which value field is a string that will be\n as a hex string.\n \"\"\"\n\n __slots__ = [\"val\"]\n\n def __init__(self, _pkt=\"\", post_transform=None, _internal=0, _underlayer=None, **fields): # noqa: E501\n super(_RadiusAttrHexStringVal, self).__init__(\n _pkt,\n post_transform,\n _internal,\n _underlayer\n )\n self.fields[\"type\"] = self.val\n name_parts = self.__class__.__name__.split('RadiusAttr_')\n if len(name_parts) < 2:\n raise Scapy_Exception(\n \"Invalid class name: {}\".format(self.__class__.__name__)\n )\n self.name = name_parts[1].replace('_', '-')\n\n fields_desc = [\n ByteEnumField(\"type\", 24, _radius_attribute_types),\n FieldLenField(\n \"len\",\n None,\n \"value\",\n \"B\",\n adjust=lambda p, x: len(p.value) + 2\n ),\n XStrLenField(\"value\", \"\", length_from=lambda p: p.len - 2 if p.len else 0) # noqa: E501\n ]\n\n\nclass RadiusAttr_State(_RadiusAttrHexStringVal):\n \"\"\"RFC 2865\"\"\"\n val = 24\n\n\ndef prepare_packed_data(radius_packet, packed_req_authenticator):\n \"\"\"\n Pack RADIUS data prior computing the authentication MAC\n \"\"\"\n\n packed_hdr = struct.pack(\"!B\", radius_packet.code)\n packed_hdr += struct.pack(\"!B\", radius_packet.id)\n packed_hdr += struct.pack(\"!H\", radius_packet.len)\n\n packed_attrs = b''\n for attr in radius_packet.attributes:\n packed_attrs += raw(attr)\n\n return packed_hdr + packed_req_authenticator + packed_attrs\n\n\nclass RadiusAttr_Message_Authenticator(_RadiusAttrHexStringVal):\n \"\"\"RFC 2869\"\"\"\n val = 80\n\n fields_desc = [\n ByteEnumField(\"type\", 24, _radius_attribute_types),\n FieldLenField(\n \"len\",\n 18,\n \"value\",\n \"B\",\n ),\n XStrFixedLenField(\"value\", \"\\x00\" * 16, length=16)\n ]\n\n @staticmethod\n def compute_message_authenticator(radius_packet, packed_req_authenticator,\n shared_secret):\n \"\"\"\n Computes the \"Message-Authenticator\" of a given RADIUS packet.\n \"\"\"\n\n data = prepare_packed_data(radius_packet, packed_req_authenticator)\n radius_hmac = hmac.new(shared_secret, data, hashlib.md5)\n\n return radius_hmac.digest()\n\n#\n# RADIUS attributes which values are IPv4 prefixes\n#\n\n\nclass _RadiusAttrIPv4AddrVal(RadiusAttribute):\n \"\"\"\n Implements a RADIUS attribute which value field is an IPv4 address.\n \"\"\"\n\n __slots__ = [\"val\"]\n\n fields_desc = [\n ByteEnumField(\"type\", 4, _radius_attribute_types),\n ByteField(\"len\", 6),\n IPField(\"value\", \"0.0.0.0\")\n ]\n\n\nclass RadiusAttr_NAS_IP_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 4\n\n\nclass RadiusAttr_Framed_IP_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 8\n\n\nclass RadiusAttr_Framed_IP_Netmask(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 9\n\n\nclass RadiusAttr_Login_IP_Host(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 14\n\n\nclass RadiusAttr_Framed_IPX_Network(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 23\n\n\nclass RadiusAttr_PMIP6_Home_LMA_IPv4_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 149\n\n\nclass RadiusAttr_PMIP6_Visited_LMA_IPv4_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 150\n\n\nclass RadiusAttr_PMIP6_Home_DHCP4_Server_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 157\n\n\nclass RadiusAttr_PMIP6_Visited_DHCP4_Server_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 158\n\n\nclass RadiusAttr_PMIP6_Home_IPv4_Gateway(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 161\n\n\nclass RadiusAttr_PMIP6_Visited_IPv4_Gateway(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 162\n\n\n# See IANA registry \"RADIUS Types\"\n_radius_attrs_values = {\n # Service-Type\n 6:\n {\n 1: \"Login\",\n 2: \"Framed\",\n 3: \"Callback Login\",\n 4: \"Callback Framed\",\n 5: \"Outbound\",\n 6: \"Administrative\",\n 7: \"NAS Prompt\",\n 8: \"Authenticate Only\",\n 9: \"Callback NAS Prompt\",\n 10: \"Call Check\",\n 11: \"Callback Administrative\",\n 12: \"Voice\",\n 13: \"Fax\",\n 14: \"Modem Relay\",\n 15: \"IAPP-Register\",\n 16: \"IAPP-AP-Check\",\n 17: \"Authorize Only\",\n 18: \"Framed-Management\",\n 19: \"Additional-Authorization\"\n },\n\n # Framed-Protocol\n 7:\n {\n 1: \"PPP\",\n 2: \"SLIP\",\n 3: \"AppleTalk Remote Access Protocol (ARAP)\",\n 4: \"Gandalf proprietary SingleLink/MultiLink protocol\",\n 5: \"Xylogics proprietary IPX/SLIP\",\n 6: \"X.75 Synchronous\",\n 7: \"GPRS PDP Context\"\n },\n\n # Framed-Routing\n 10:\n {\n 0: \"None\",\n 1: \"Send routing packets\",\n 2: \"Listen for routing packets\",\n 3: \"Send and Listen\"\n },\n\n # Framed-Compression\n 13:\n {\n 0: \"None\",\n 1: \"VJ TCP/IP header compression\",\n 2: \"IPX header compression\",\n 3: \"Stac-LZS compression\"\n },\n\n # Login-Service\n 15:\n {\n 0: \"Telnet\",\n 1: \"Rlogin\",\n 2: \"TCP Clear\",\n 3: \"PortMaster (proprietary)\",\n 4: \"LAT\",\n 5: \"X25-PAD\",\n 6: \"X25-T3POS\",\n 7: \"Unassigned\",\n 8: \"TCP Clear Quiet (suppresses any NAS-generated connect string)\"\n },\n\n # Termination-Action\n 29:\n {\n 0: \"Default\",\n 1: \"RADIUS-Request\"\n },\n\n # Acct-Status-Type\n 40:\n {\n 1: \"Start\",\n 2: \"Stop\",\n 3: \"Interim-Update\",\n 4: \"Unassigned\",\n 5: \"Unassigned\",\n 6: \"Unassigned\",\n 7: \"Accounting-On\",\n 8: \"Accounting-Off\",\n 9: \"Tunnel-Start\",\n 10: \"Tunnel-Stop\",\n 11: \"Tunnel-Reject\",\n 12: \"Tunnel-Link-Start\",\n 13: \"Tunnel-Link-Stop\",\n 14: \"Tunnel-Link-Reject\",\n 15: \"Failed\"\n },\n\n # Acct-Authentic\n 45:\n {\n 1: \"RADIUS\",\n 2: \"Local\",\n 3: \"Remote\",\n 4: \"Diameter\"\n },\n\n # Acct-Terminate-Cause\n 49:\n {\n 1: \"User Request\",\n 2: \"Lost Carrier\",\n 3: \"Lost Service\",\n 4: \"Idle Timeout\",\n 5: \"Session Timeout\",\n 6: \"Admin Reset\",\n 7: \"Admin Reboot\",\n 8: \"Port Error\",\n 9: \"NAS Error\",\n 10: \"NAS Request\",\n 11: \"NAS Reboot\",\n 12: \"Port Unneeded\",\n 13: \"Port Preempted\",\n 14: \"Port Suspended\",\n 15: \"Service Unavailable\",\n 16: \"Callback\",\n 17: \"User Error\",\n 18: \"Host Request\",\n 19: \"Supplicant Restart\",\n 20: \"Reauthentication Failure\",\n 21: \"Port Reinitialized\",\n 22: \"Port Administratively Disabled\",\n 23: \"Lost Power\",\n },\n\n # NAS-Port-Type\n 61:\n {\n 0: \"Async\",\n 1: \"Sync\",\n 2: \"ISDN Sync\",\n 3: \"ISDN Async V.120\",\n 4: \"ISDN Async V.110\",\n 5: \"Virtual\",\n 6: \"PIAFS\",\n 7: \"HDLC Clear Channel\",\n 8: \"X.25\",\n 9: \"X.75\",\n 10: \"G.3 Fax\",\n 11: \"SDSL - Symmetric DSL\",\n 12: \"ADSL-CAP - Asymmetric DSL, Carrierless Amplitude Phase Modulation\", # noqa: E501\n 13: \"ADSL-DMT - Asymmetric DSL, Discrete Multi-Tone\",\n 14: \"IDSL - ISDN Digital Subscriber Line\",\n 15: \"Ethernet\",\n 16: \"xDSL - Digital Subscriber Line of unknown type\",\n 17: \"Cable\",\n 18: \"Wireles - Other\",\n 19: \"Wireless - IEEE 802.11\",\n 20: \"Token-Ring\",\n 21: \"FDDI\",\n 22: \"Wireless - CDMA2000\",\n 23: \"Wireless - UMTS\",\n 24: \"Wireless - 1X-EV\",\n 25: \"IAPP\",\n 26: \"FTTP - Fiber to the Premises\",\n 27: \"Wireless - IEEE 802.16\",\n 28: \"Wireless - IEEE 802.20\",\n 29: \"Wireless - IEEE 802.22\",\n 30: \"PPPoA - PPP over ATM\",\n 31: \"PPPoEoA - PPP over Ethernet over ATM\",\n 32: \"PPPoEoE - PPP over Ethernet over Ethernet\",\n 33: \"PPPoEoVLAN - PPP over Ethernet over VLAN\",\n 34: \"PPPoEoQinQ - PPP over Ethernet over IEEE 802.1QinQ\",\n 35: \"xPON - Passive Optical Network\",\n 36: \"Wireless - XGP\",\n 37: \"WiMAX Pre-Release 8 IWK Function\",\n 38: \"WIMAX-WIFI-IWK: WiMAX WIFI Interworking\",\n 39: \"WIMAX-SFF: Signaling Forwarding Function for LTE/3GPP2\",\n 40: \"WIMAX-HA-LMA: WiMAX HA and or LMA function\",\n 41: \"WIMAX-DHCP: WIMAX DHCP service\",\n 42: \"WIMAX-LBS: WiMAX location based service\",\n 43: \"WIMAX-WVS: WiMAX voice service\"\n },\n\n # Tunnel-Type\n 64:\n {\n 1: \"Point-to-Point Tunneling Protocol (PPTP)\",\n 2: \"Layer Two Forwarding (L2F)\",\n 3: \"Layer Two Tunneling Protocol (L2TP)\",\n 4: \"Ascend Tunnel Management Protocol (ATMP)\",\n 5: \"Virtual Tunneling Protocol (VTP)\",\n 6: \"IP Authentication Header in the Tunnel-mode (AH)\",\n 7: \"IP-in-IP Encapsulation (IP-IP)\",\n 8: \"Minimal IP-in-IP Encapsulation (MIN-IP-IP)\",\n 9: \"IP Encapsulating Security Payload in the Tunnel-mode (ESP)\",\n 10: \"Generic Route Encapsulation (GRE)\",\n 11: \"Bay Dial Virtual Services (DVS)\",\n 12: \"IP-in-IP Tunneling\",\n 13: \"Virtual LANs (VLAN)\"\n },\n\n # Tunnel-Medium-Type\n 65:\n {\n 1: \"IPv4 (IP version 4)\",\n 2: \"IPv6 (IP version 6)\",\n 3: \"NSAP\",\n 4: \"HDLC (8-bit multidrop)\",\n 5: \"BBN 1822\",\n 6: \"802\",\n 7: \"E.163 (POTS)\",\n 8: \"E.164 (SMDS, Frame Relay, ATM)\",\n 9: \"F.69 (Telex)\",\n 10: \"X.121 (X.25, Frame Relay)\",\n 11: \"IPX\",\n 12: \"Appletalk\",\n 13: \"Decnet IV\",\n 14: \"Banyan Vine\",\n 15: \"E.164 with NSAP format subaddress\"\n },\n\n # ARAP-Zone-Access\n 72:\n {\n 1: \"Only allow access to default zone\",\n 2: \"Use zone filter inclusively\",\n 3: \"Not used\",\n 4: \"Use zone filter exclusively\"\n },\n\n # Prompt\n 76:\n {\n 0: \"No Echo\",\n 1: \"Echo\"\n },\n\n # Error-Cause Attribute\n 101:\n {\n 201: \"Residual Session Context Removed\",\n 202: \"Invalid EAP Packet (Ignored)\",\n 401: \"Unsupported Attribute\",\n 402: \"Missing Attribute\",\n 403: \"NAS Identification Mismatch\",\n 404: \"Invalid Request\",\n 405: \"Unsupported Service\",\n 406: \"Unsupported Extension\",\n 407: \"Invalid Attribute Value\",\n 501: \"Administratively Prohibited\",\n 502: \"Request Not Routable (Proxy)\",\n 503: \"Session Context Not Found\",\n 504: \"Session Context Not Removable\",\n 505: \"Other Proxy Processing Error\",\n 506: \"Resources Unavailable\",\n 507: \"Request Initiated\",\n 508: \"Multiple Session Selection Unsupported\",\n 509: \"Location-Info-Required\",\n 601: \"Response Too Big\"\n },\n\n # Operator Namespace Identifier - Attribute 126\n 126:\n {\n 0x30: \"TADIG\",\n 0x31: \"REALM\",\n 0x32: \"E212\",\n 0x33: \"ICC\",\n 0xFF: \"Reserved\"\n },\n\n # Basic-Location-Policy-Rules\n 129:\n {\n 0: \"Retransmission allowed\",\n },\n\n # Location-Capable\n 131:\n {\n 1: \"CIVIC_LOCATION\",\n 2: \"GEO_LOCATION\",\n 4: \"USERS_LOCATION\",\n 8: \"NAS_LOCATION\"\n },\n\n # Framed-Management-Protocol\n 133:\n {\n 1: \"SNMP\",\n 2: \"Web-based\",\n 3: \"NETCONF\",\n 4: \"FTP\",\n 5: \"TFTP\",\n 6: \"SFTP\",\n 7: \"RCP\",\n 8: \"SCP\"\n },\n\n # Management-Transport-Protection\n 134:\n {\n 1: \"No-Protection\",\n 2: \"Integrity-Protection\",\n 3: \"Integrity-Confidentiality-Protection\",\n },\n}\n\n\nclass _RadiusAttrIntEnumVal(_SpecificRadiusAttr):\n \"\"\"\n Implements a RADIUS attribute which value field is 4 bytes long integer.\n \"\"\"\n\n __slots__ = [\"val\"]\n\n fields_desc = [\n ByteEnumField(\"type\", 6, _radius_attribute_types),\n ByteField(\"len\", 6),\n MultiEnumField(\n \"value\",\n 0,\n _radius_attrs_values,\n depends_on=lambda p: p.type,\n fmt=\"I\"\n )\n ]\n\n\nclass RadiusAttr_Service_Type(_RadiusAttrIntEnumVal):\n \"\"\"RFC 2865\"\"\"\n val = 6\n\n\nclass RadiusAttr_Framed_Protocol(_RadiusAttrIntEnumVal):\n \"\"\"RFC 2865\"\"\"\n val = 7\n\n\nclass RadiusAttr_NAS_Port_Type(_RadiusAttrIntEnumVal):\n \"\"\"RFC 2865\"\"\"\n val = 61\n\n\nclass _EAPPacketField(PacketField):\n\n \"\"\"\n Handles EAP-Message attribute value (the actual EAP packet).\n \"\"\"\n\n def m2i(self, pkt, m):\n ret = None\n eap_packet_len = struct.unpack(\"!H\", m[2:4])[0]\n if eap_packet_len < 254:\n # If the EAP packet has not been fragmented, build a Scapy EAP\n # packet from the data.\n ret = EAP(m)\n else:\n ret = conf.raw_layer(m)\n return ret\n\n\nclass RadiusAttr_EAP_Message(RadiusAttribute):\n \"\"\"\n Implements the \"EAP-Message\" attribute (RFC 3579).\n \"\"\"\n\n name = \"EAP-Message\"\n fields_desc = [\n ByteEnumField(\"type\", 79, _radius_attribute_types),\n FieldLenField(\n \"len\",\n None,\n \"value\",\n \"B\",\n adjust=lambda pkt, x: len(pkt.value) + 2\n ),\n _EAPPacketField(\"value\", \"\", EAP)\n ]\n\n\nclass RadiusAttr_Vendor_Specific(RadiusAttribute):\n \"\"\"\n Implements the \"Vendor-Specific\" attribute, as described in RFC 2865.\n \"\"\"\n\n name = \"Vendor-Specific\"\n fields_desc = [\n ByteEnumField(\"type\", 26, _radius_attribute_types),\n FieldLenField(\n \"len\",\n None,\n \"value\",\n \"B\",\n adjust=lambda pkt, x: len(pkt.value) + 8\n ),\n IntField(\"vendor_id\", 0),\n ByteField(\"vendor_type\", 0),\n FieldLenField(\n \"vendor_len\",\n None,\n \"value\",\n \"B\",\n adjust=lambda p, x: len(p.value) + 2\n ),\n StrLenField(\"value\", \"\", length_from=lambda p: p.vendor_len - 2)\n ]\n\n\n# See IANA RADIUS Packet Type Codes registry\n_packet_codes = {\n 1: \"Access-Request\",\n 2: \"Access-Accept\",\n 3: \"Access-Reject\",\n 4: \"Accounting-Request\",\n 5: \"Accounting-Response\",\n 6: \"Accounting-Status (now Interim Accounting)\",\n 7: \"Password-Request\",\n 8: \"Password-Ack\",\n 9: \"Password-Reject\",\n 10: \"Accounting-Message\",\n 11: \"Access-Challenge\",\n 12: \"Status-Server (experimental)\",\n 13: \"Status-Client (experimental)\",\n 21: \"Resource-Free-Request\",\n 22: \"Resource-Free-Response\",\n 23: \"Resource-Query-Request\",\n 24: \"Resource-Query-Response\",\n 25: \"Alternate-Resource-Reclaim-Request\",\n 26: \"NAS-Reboot-Request\",\n 27: \"NAS-Reboot-Response\",\n 28: \"Reserved\",\n 29: \"Next-Passcode\",\n 30: \"New-Pin\",\n 31: \"Terminate-Session\",\n 32: \"Password-Expired\",\n 33: \"Event-Request\",\n 34: \"Event-Response\",\n 40: \"Disconnect-Request\",\n 41: \"Disconnect-ACK\",\n 42: \"Disconnect-NAK\",\n 43: \"CoA-Request\",\n 44: \"CoA-ACK\",\n 45: \"CoA-NAK\",\n 50: \"IP-Address-Allocate\",\n 51: \"IP-Address-Release\",\n 52: \"Protocol-Error\",\n 250: \"Experimental Use\",\n 251: \"Experimental Use\",\n 252: \"Experimental Use\",\n 253: \"Experimental Use\",\n 254: \"Reserved\",\n 255: \"Reserved\"\n}\n\n\nclass Radius(Packet):\n \"\"\"\n Implements a RADIUS packet (RFC 2865).\n \"\"\"\n\n name = \"RADIUS\"\n fields_desc = [\n ByteEnumField(\"code\", 1, _packet_codes),\n ByteField(\"id\", 0),\n FieldLenField(\n \"len\",\n None,\n \"attributes\",\n \"H\",\n adjust=lambda pkt, x: len(pkt.attributes) + 20\n ),\n XStrFixedLenField(\"authenticator\", \"\", 16),\n PacketListField(\n \"attributes\",\n [],\n RadiusAttribute,\n length_from=lambda pkt: pkt.len - 20\n )\n ]\n\n def compute_authenticator(self, packed_request_auth, shared_secret):\n \"\"\"\n Computes the authenticator field (RFC 2865 - Section 3)\n \"\"\"\n\n data = prepare_packed_data(self, packed_request_auth)\n radius_mac = hashlib.md5(data + shared_secret)\n return radius_mac.digest()\n\n def post_build(self, p, pay):\n p += pay\n length = self.len\n if length is None:\n length = len(p)\n p = p[:2] + struct.pack(\"!H\", length) + p[4:]\n return p\n\n\nbind_layers(UDP, Radius, sport=1812)\nbind_layers(UDP, Radius, dport=1812)\nbind_layers(UDP, Radius, sport=1813)\nbind_layers(UDP, Radius, dport=1813)\n", "path": "scapy/layers/radius.py"}], "after_files": [{"content": "# This file is part of Scapy\n# See http://www.secdev.org/projects/scapy for more information\n# Copyright (C) Philippe Biondi <[email protected]>\n# Vincent Mauge <[email protected]>\n# This program is published under a GPLv2 license\n\n\"\"\"\nRADIUS (Remote Authentication Dial In User Service)\n\"\"\"\n\nimport struct\nimport hashlib\nimport hmac\nfrom scapy.compat import orb, raw\nfrom scapy.packet import Packet, Padding, bind_layers\nfrom scapy.fields import ByteField, ByteEnumField, IntField, StrLenField,\\\n XStrLenField, XStrFixedLenField, FieldLenField, PacketField,\\\n PacketListField, IPField, MultiEnumField\nfrom scapy.layers.inet import UDP\nfrom scapy.layers.eap import EAP\nfrom scapy.utils import issubtype\nfrom scapy.config import conf\nfrom scapy.error import Scapy_Exception\n\n\n# https://www.iana.org/assignments/radius-types/radius-types.xhtml\n_radius_attribute_types = {\n 1: \"User-Name\",\n 2: \"User-Password\",\n 3: \"CHAP-Password\",\n 4: \"NAS-IP-Address\",\n 5: \"NAS-Port\",\n 6: \"Service-Type\",\n 7: \"Framed-Protocol\",\n 8: \"Framed-IP-Address\",\n 9: \"Framed-IP-Netmask\",\n 10: \"Framed-Routing\",\n 11: \"Filter-Id\",\n 12: \"Framed-MTU\",\n 13: \"Framed-Compression\",\n 14: \"Login-IP-Host\",\n 15: \"Login-Service\",\n 16: \"Login-TCP-Port\",\n 17: \"Unassigned\",\n 18: \"Reply-Message\",\n 19: \"Callback-Number\",\n 20: \"Callback-Id\",\n 21: \"Unassigned\",\n 22: \"Framed-Route\",\n 23: \"Framed-IPX-Network\",\n 24: \"State\",\n 25: \"Class\",\n 26: \"Vendor-Specific\",\n 27: \"Session-Timeout\",\n 28: \"Idle-Timeout\",\n 29: \"Termination-Action\",\n 30: \"Called-Station-Id\",\n 31: \"Calling-Station-Id\",\n 32: \"NAS-Identifier\",\n 33: \"Proxy-State\",\n 34: \"Login-LAT-Service\",\n 35: \"Login-LAT-Node\",\n 36: \"Login-LAT-Group\",\n 37: \"Framed-AppleTalk-Link\",\n 38: \"Framed-AppleTalk-Network\",\n 39: \"Framed-AppleTalk-Zone\",\n 40: \"Acct-Status-Type\",\n 41: \"Acct-Delay-Time\",\n 42: \"Acct-Input-Octets\",\n 43: \"Acct-Output-Octets\",\n 44: \"Acct-Session-Id\",\n 45: \"Acct-Authentic\",\n 46: \"Acct-Session-Time\",\n 47: \"Acct-Input-Packets\",\n 48: \"Acct-Output-Packets\",\n 49: \"Acct-Terminate-Cause\",\n 50: \"Acct-Multi-Session-Id\",\n 51: \"Acct-Link-Count\",\n 52: \"Acct-Input-Gigawords\",\n 53: \"Acct-Output-Gigawords\",\n 54: \"Unassigned\",\n 55: \"Event-Timestamp\",\n 56: \"Egress-VLANID\",\n 57: \"Ingress-Filters\",\n 58: \"Egress-VLAN-Name\",\n 59: \"User-Priority-Table\",\n 60: \"CHAP-Challenge\",\n 61: \"NAS-Port-Type\",\n 62: \"Port-Limit\",\n 63: \"Login-LAT-Port\",\n 64: \"Tunnel-Type\",\n 65: \"Tunnel-Medium-Type\",\n 66: \"Tunnel-Client-Endpoint\",\n 67: \"Tunnel-Server-Endpoint\",\n 68: \"Acct-Tunnel-Connection\",\n 69: \"Tunnel-Password\",\n 70: \"ARAP-Password\",\n 71: \"ARAP-Features\",\n 72: \"ARAP-Zone-Access\",\n 73: \"ARAP-Security\",\n 74: \"ARAP-Security-Data\",\n 75: \"Password-Retry\",\n 76: \"Prompt\",\n 77: \"Connect-Info\",\n 78: \"Configuration-Token\",\n 79: \"EAP-Message\",\n 80: \"Message-Authenticator\",\n 81: \"Tunnel-Private-Group-ID\",\n 82: \"Tunnel-Assignment-ID\",\n 83: \"Tunnel-Preference\",\n 84: \"ARAP-Challenge-Response\",\n 85: \"Acct-Interim-Interval\",\n 86: \"Acct-Tunnel-Packets-Lost\",\n 87: \"NAS-Port-Id\",\n 88: \"Framed-Pool\",\n 89: \"CUI\",\n 90: \"Tunnel-Client-Auth-ID\",\n 91: \"Tunnel-Server-Auth-ID\",\n 92: \"NAS-Filter-Rule\",\n 93: \"Unassigned\",\n 94: \"Originating-Line-Info\",\n 95: \"NAS-IPv6-Address\",\n 96: \"Framed-Interface-Id\",\n 97: \"Framed-IPv6-Prefix\",\n 98: \"Login-IPv6-Host\",\n 99: \"Framed-IPv6-Route\",\n 100: \"Framed-IPv6-Pool\",\n 101: \"Error-Cause\",\n 102: \"EAP-Key-Name\",\n 103: \"Digest-Response\",\n 104: \"Digest-Realm\",\n 105: \"Digest-Nonce\",\n 106: \"Digest-Response-Auth\",\n 107: \"Digest-Nextnonce\",\n 108: \"Digest-Method\",\n 109: \"Digest-URI\",\n 110: \"Digest-Qop\",\n 111: \"Digest-Algorithm\",\n 112: \"Digest-Entity-Body-Hash\",\n 113: \"Digest-CNonce\",\n 114: \"Digest-Nonce-Count\",\n 115: \"Digest-Username\",\n 116: \"Digest-Opaque\",\n 117: \"Digest-Auth-Param\",\n 118: \"Digest-AKA-Auts\",\n 119: \"Digest-Domain\",\n 120: \"Digest-Stale\",\n 121: \"Digest-HA1\",\n 122: \"SIP-AOR\",\n 123: \"Delegated-IPv6-Prefix\",\n 124: \"MIP6-Feature-Vector\",\n 125: \"MIP6-Home-Link-Prefix\",\n 126: \"Operator-Name\",\n 127: \"Location-Information\",\n 128: \"Location-Data\",\n 129: \"Basic-Location-Policy-Rules\",\n 130: \"Extended-Location-Policy-Rules\",\n 131: \"Location-Capable\",\n 132: \"Requested-Location-Info\",\n 133: \"Framed-Management-Protocol\",\n 134: \"Management-Transport-Protection\",\n 135: \"Management-Policy-Id\",\n 136: \"Management-Privilege-Level\",\n 137: \"PKM-SS-Cert\",\n 138: \"PKM-CA-Cert\",\n 139: \"PKM-Config-Settings\",\n 140: \"PKM-Cryptosuite-List\",\n 141: \"PKM-SAID\",\n 142: \"PKM-SA-Descriptor\",\n 143: \"PKM-Auth-Key\",\n 144: \"DS-Lite-Tunnel-Name\",\n 145: \"Mobile-Node-Identifier\",\n 146: \"Service-Selection\",\n 147: \"PMIP6-Home-LMA-IPv6-Address\",\n 148: \"PMIP6-Visited-LMA-IPv6-Address\",\n 149: \"PMIP6-Home-LMA-IPv4-Address\",\n 150: \"PMIP6-Visited-LMA-IPv4-Address\",\n 151: \"PMIP6-Home-HN-Prefix\",\n 152: \"PMIP6-Visited-HN-Prefix\",\n 153: \"PMIP6-Home-Interface-ID\",\n 154: \"PMIP6-Visited-Interface-ID\",\n 155: \"PMIP6-Home-IPv4-HoA\",\n 156: \"PMIP6-Visited-IPv4-HoA\",\n 157: \"PMIP6-Home-DHCP4-Server-Address\",\n 158: \"PMIP6-Visited-DHCP4-Server-Address\",\n 159: \"PMIP6-Home-DHCP6-Server-Address\",\n 160: \"PMIP6-Visited-DHCP6-Server-Address\",\n 161: \"PMIP6-Home-IPv4-Gateway\",\n 162: \"PMIP6-Visited-IPv4-Gateway\",\n 163: \"EAP-Lower-Layer\",\n 164: \"GSS-Acceptor-Service-Name\",\n 165: \"GSS-Acceptor-Host-Name\",\n 166: \"GSS-Acceptor-Service-Specifics\",\n 167: \"GSS-Acceptor-Realm-Name\",\n 168: \"Framed-IPv6-Address\",\n 169: \"DNS-Server-IPv6-Address\",\n 170: \"Route-IPv6-Information\",\n 171: \"Delegated-IPv6-Prefix-Pool\",\n 172: \"Stateful-IPv6-Address-Pool\",\n 173: \"IPv6-6rd-Configuration\",\n 174: \"Allowed-Called-Station-Id\",\n 175: \"EAP-Peer-Id\",\n 176: \"EAP-Server-Id\",\n 177: \"Mobility-Domain-Id\",\n 178: \"Preauth-Timeout\",\n 179: \"Network-Id-Name\",\n 180: \"EAPoL-Announcement\",\n 181: \"WLAN-HESSID\",\n 182: \"WLAN-Venue-Info\",\n 183: \"WLAN-Venue-Language\",\n 184: \"WLAN-Venue-Name\",\n 185: \"WLAN-Reason-Code\",\n 186: \"WLAN-Pairwise-Cipher\",\n 187: \"WLAN-Group-Cipher\",\n 188: \"WLAN-AKM-Suite\",\n 189: \"WLAN-Group-Mgmt-Cipher\",\n 190: \"WLAN-RF-Band\",\n 191: \"Unassigned\",\n}\n\n\nclass RadiusAttribute(Packet):\n \"\"\"\n Implements a RADIUS attribute (RFC 2865). Every specific RADIUS attribute\n class should inherit from this one.\n \"\"\"\n\n name = \"Radius Attribute\"\n fields_desc = [\n ByteEnumField(\"type\", 1, _radius_attribute_types),\n FieldLenField(\"len\", None, \"value\", \"B\",\n adjust=lambda pkt, x: len(pkt.value) + 2),\n StrLenField(\"value\", \"\", length_from=lambda pkt: pkt.len - 2)\n ]\n\n registered_attributes = {}\n\n @classmethod\n def register_variant(cls):\n \"\"\"\n Registers the RADIUS attributes defined in this module.\n \"\"\"\n\n if hasattr(cls, \"val\"):\n cls.registered_attributes[cls.val] = cls\n else:\n cls.registered_attributes[cls.type.default] = cls\n\n @classmethod\n def dispatch_hook(cls, _pkt=None, *args, **kargs):\n \"\"\"\n Returns the right RadiusAttribute class for the given data.\n \"\"\"\n\n if _pkt:\n attr_type = orb(_pkt[0])\n return cls.registered_attributes.get(attr_type, cls)\n return cls\n\n def haslayer(self, cls):\n if cls == \"RadiusAttribute\":\n if isinstance(self, RadiusAttribute):\n return True\n elif issubtype(cls, RadiusAttribute):\n if isinstance(self, cls):\n return True\n return super(RadiusAttribute, self).haslayer(cls)\n\n def getlayer(self, cls, nb=1, _track=None, _subclass=True, **flt):\n return super(RadiusAttribute, self).getlayer(cls, nb=nb, _track=_track,\n _subclass=True, **flt)\n\n def post_build(self, p, pay):\n length = self.len\n if length is None:\n length = len(p)\n p = p[:1] + struct.pack(\"!B\", length) + p[2:]\n return p\n\n def guess_payload_class(self, _):\n return Padding\n\n\nclass _SpecificRadiusAttr(RadiusAttribute):\n \"\"\"\n Class from which every \"specific\" RADIUS attribute defined in this module\n inherits.\n \"\"\"\n\n __slots__ = [\"val\"]\n\n def __init__(self, _pkt=\"\", post_transform=None, _internal=0, _underlayer=None, **fields): # noqa: E501\n super(_SpecificRadiusAttr, self).__init__(\n _pkt,\n post_transform,\n _internal,\n _underlayer\n )\n self.fields[\"type\"] = self.val\n name_parts = self.__class__.__name__.split('RadiusAttr_')\n if len(name_parts) < 2:\n raise Scapy_Exception(\n \"Invalid class name: {}\".format(self.__class__.__name__)\n )\n self.name = name_parts[1].replace('_', '-')\n\n\n#\n# RADIUS attributes which values are 4 bytes integers\n#\n\nclass _RadiusAttrIntValue(_SpecificRadiusAttr):\n \"\"\"\n Implements a RADIUS attribute which value field is 4 bytes long integer.\n \"\"\"\n\n fields_desc = [\n ByteEnumField(\"type\", 5, _radius_attribute_types),\n ByteField(\"len\", 6),\n IntField(\"value\", 0)\n ]\n\n\nclass RadiusAttr_NAS_Port(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 5\n\n\nclass RadiusAttr_Framed_MTU(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 12\n\n\nclass RadiusAttr_Login_TCP_Port(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 16\n\n\nclass RadiusAttr_Session_Timeout(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 27\n\n\nclass RadiusAttr_Idle_Timeout(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 28\n\n\nclass RadiusAttr_Framed_AppleTalk_Link(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 37\n\n\nclass RadiusAttr_Framed_AppleTalk_Network(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 38\n\n\nclass RadiusAttr_Acct_Delay_Time(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 41\n\n\nclass RadiusAttr_Acct_Input_Octets(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 42\n\n\nclass RadiusAttr_Acct_Output_Octets(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 43\n\n\nclass RadiusAttr_Acct_Session_Time(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 46\n\n\nclass RadiusAttr_Acct_Input_Packets(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 47\n\n\nclass RadiusAttr_Acct_Output_Packets(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 48\n\n\nclass RadiusAttr_Acct_Link_Count(_RadiusAttrIntValue):\n \"\"\"RFC 2866\"\"\"\n val = 51\n\n\nclass RadiusAttr_Acct_Input_Gigawords(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 52\n\n\nclass RadiusAttr_Acct_Output_Gigawords(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 53\n\n\nclass RadiusAttr_Egress_VLANID(_RadiusAttrIntValue):\n \"\"\"RFC 4675\"\"\"\n val = 56\n\n\nclass RadiusAttr_Port_Limit(_RadiusAttrIntValue):\n \"\"\"RFC 2865\"\"\"\n val = 62\n\n\nclass RadiusAttr_ARAP_Security(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 73\n\n\nclass RadiusAttr_Password_Retry(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 75\n\n\nclass RadiusAttr_Tunnel_Preference(_RadiusAttrIntValue):\n \"\"\"RFC 2868\"\"\"\n val = 83\n\n\nclass RadiusAttr_Acct_Interim_Interval(_RadiusAttrIntValue):\n \"\"\"RFC 2869\"\"\"\n val = 85\n\n\nclass RadiusAttr_Acct_Tunnel_Packets_Lost(_RadiusAttrIntValue):\n \"\"\"RFC 2867\"\"\"\n val = 86\n\n\nclass RadiusAttr_Management_Privilege_Level(_RadiusAttrIntValue):\n \"\"\"RFC 5607\"\"\"\n val = 136\n\n\nclass RadiusAttr_Mobility_Domain_Id(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 177\n\n\nclass RadiusAttr_Preauth_Timeout(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 178\n\n\nclass RadiusAttr_WLAN_Venue_Info(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 182\n\n\nclass RadiusAttr_WLAN_Reason_Code(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 185\n\n\nclass RadiusAttr_WLAN_Pairwise_Cipher(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 186\n\n\nclass RadiusAttr_WLAN_Group_Cipher(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 187\n\n\nclass RadiusAttr_WLAN_AKM_Suite(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 188\n\n\nclass RadiusAttr_WLAN_Group_Mgmt_Cipher(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 189\n\n\nclass RadiusAttr_WLAN_RF_Band(_RadiusAttrIntValue):\n \"\"\"RFC 7268\"\"\"\n val = 190\n\n\n#\n# RADIUS attributes which values are string (displayed as hex)\n#\n\nclass _RadiusAttrHexStringVal(_SpecificRadiusAttr):\n \"\"\"\n Implements a RADIUS attribute which value field is a string that will be\n as a hex string.\n \"\"\"\n\n __slots__ = [\"val\"]\n\n def __init__(self, _pkt=\"\", post_transform=None, _internal=0, _underlayer=None, **fields): # noqa: E501\n super(_RadiusAttrHexStringVal, self).__init__(\n _pkt,\n post_transform,\n _internal,\n _underlayer\n )\n self.fields[\"type\"] = self.val\n name_parts = self.__class__.__name__.split('RadiusAttr_')\n if len(name_parts) < 2:\n raise Scapy_Exception(\n \"Invalid class name: {}\".format(self.__class__.__name__)\n )\n self.name = name_parts[1].replace('_', '-')\n\n fields_desc = [\n ByteEnumField(\"type\", 24, _radius_attribute_types),\n FieldLenField(\n \"len\",\n None,\n \"value\",\n \"B\",\n adjust=lambda p, x: len(p.value) + 2\n ),\n XStrLenField(\"value\", \"\", length_from=lambda p: p.len - 2 if p.len else 0) # noqa: E501\n ]\n\n\nclass RadiusAttr_State(_RadiusAttrHexStringVal):\n \"\"\"RFC 2865\"\"\"\n val = 24\n\n\ndef prepare_packed_data(radius_packet, packed_req_authenticator):\n \"\"\"\n Pack RADIUS data prior computing the authentication MAC\n \"\"\"\n\n packed_hdr = struct.pack(\"!B\", radius_packet.code)\n packed_hdr += struct.pack(\"!B\", radius_packet.id)\n packed_hdr += struct.pack(\"!H\", radius_packet.len)\n\n packed_attrs = b''\n for attr in radius_packet.attributes:\n packed_attrs += raw(attr)\n\n return packed_hdr + packed_req_authenticator + packed_attrs\n\n\nclass RadiusAttr_Message_Authenticator(_RadiusAttrHexStringVal):\n \"\"\"RFC 2869\"\"\"\n val = 80\n\n fields_desc = [\n ByteEnumField(\"type\", 24, _radius_attribute_types),\n FieldLenField(\n \"len\",\n 18,\n \"value\",\n \"B\",\n ),\n XStrFixedLenField(\"value\", \"\\x00\" * 16, length=16)\n ]\n\n @staticmethod\n def compute_message_authenticator(radius_packet, packed_req_authenticator,\n shared_secret):\n \"\"\"\n Computes the \"Message-Authenticator\" of a given RADIUS packet.\n \"\"\"\n\n data = prepare_packed_data(radius_packet, packed_req_authenticator)\n radius_hmac = hmac.new(shared_secret, data, hashlib.md5)\n\n return radius_hmac.digest()\n\n#\n# RADIUS attributes which values are IPv4 prefixes\n#\n\n\nclass _RadiusAttrIPv4AddrVal(_SpecificRadiusAttr):\n \"\"\"\n Implements a RADIUS attribute which value field is an IPv4 address.\n \"\"\"\n\n __slots__ = [\"val\"]\n\n fields_desc = [\n ByteEnumField(\"type\", 4, _radius_attribute_types),\n ByteField(\"len\", 6),\n IPField(\"value\", \"0.0.0.0\")\n ]\n\n\nclass RadiusAttr_NAS_IP_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 4\n\n\nclass RadiusAttr_Framed_IP_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 8\n\n\nclass RadiusAttr_Framed_IP_Netmask(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 9\n\n\nclass RadiusAttr_Login_IP_Host(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 14\n\n\nclass RadiusAttr_Framed_IPX_Network(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 2865\"\"\"\n val = 23\n\n\nclass RadiusAttr_PMIP6_Home_LMA_IPv4_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 149\n\n\nclass RadiusAttr_PMIP6_Visited_LMA_IPv4_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 150\n\n\nclass RadiusAttr_PMIP6_Home_DHCP4_Server_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 157\n\n\nclass RadiusAttr_PMIP6_Visited_DHCP4_Server_Address(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 158\n\n\nclass RadiusAttr_PMIP6_Home_IPv4_Gateway(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 161\n\n\nclass RadiusAttr_PMIP6_Visited_IPv4_Gateway(_RadiusAttrIPv4AddrVal):\n \"\"\"RFC 6572\"\"\"\n val = 162\n\n\n# See IANA registry \"RADIUS Types\"\n_radius_attrs_values = {\n # Service-Type\n 6:\n {\n 1: \"Login\",\n 2: \"Framed\",\n 3: \"Callback Login\",\n 4: \"Callback Framed\",\n 5: \"Outbound\",\n 6: \"Administrative\",\n 7: \"NAS Prompt\",\n 8: \"Authenticate Only\",\n 9: \"Callback NAS Prompt\",\n 10: \"Call Check\",\n 11: \"Callback Administrative\",\n 12: \"Voice\",\n 13: \"Fax\",\n 14: \"Modem Relay\",\n 15: \"IAPP-Register\",\n 16: \"IAPP-AP-Check\",\n 17: \"Authorize Only\",\n 18: \"Framed-Management\",\n 19: \"Additional-Authorization\"\n },\n\n # Framed-Protocol\n 7:\n {\n 1: \"PPP\",\n 2: \"SLIP\",\n 3: \"AppleTalk Remote Access Protocol (ARAP)\",\n 4: \"Gandalf proprietary SingleLink/MultiLink protocol\",\n 5: \"Xylogics proprietary IPX/SLIP\",\n 6: \"X.75 Synchronous\",\n 7: \"GPRS PDP Context\"\n },\n\n # Framed-Routing\n 10:\n {\n 0: \"None\",\n 1: \"Send routing packets\",\n 2: \"Listen for routing packets\",\n 3: \"Send and Listen\"\n },\n\n # Framed-Compression\n 13:\n {\n 0: \"None\",\n 1: \"VJ TCP/IP header compression\",\n 2: \"IPX header compression\",\n 3: \"Stac-LZS compression\"\n },\n\n # Login-Service\n 15:\n {\n 0: \"Telnet\",\n 1: \"Rlogin\",\n 2: \"TCP Clear\",\n 3: \"PortMaster (proprietary)\",\n 4: \"LAT\",\n 5: \"X25-PAD\",\n 6: \"X25-T3POS\",\n 7: \"Unassigned\",\n 8: \"TCP Clear Quiet (suppresses any NAS-generated connect string)\"\n },\n\n # Termination-Action\n 29:\n {\n 0: \"Default\",\n 1: \"RADIUS-Request\"\n },\n\n # Acct-Status-Type\n 40:\n {\n 1: \"Start\",\n 2: \"Stop\",\n 3: \"Interim-Update\",\n 4: \"Unassigned\",\n 5: \"Unassigned\",\n 6: \"Unassigned\",\n 7: \"Accounting-On\",\n 8: \"Accounting-Off\",\n 9: \"Tunnel-Start\",\n 10: \"Tunnel-Stop\",\n 11: \"Tunnel-Reject\",\n 12: \"Tunnel-Link-Start\",\n 13: \"Tunnel-Link-Stop\",\n 14: \"Tunnel-Link-Reject\",\n 15: \"Failed\"\n },\n\n # Acct-Authentic\n 45:\n {\n 1: \"RADIUS\",\n 2: \"Local\",\n 3: \"Remote\",\n 4: \"Diameter\"\n },\n\n # Acct-Terminate-Cause\n 49:\n {\n 1: \"User Request\",\n 2: \"Lost Carrier\",\n 3: \"Lost Service\",\n 4: \"Idle Timeout\",\n 5: \"Session Timeout\",\n 6: \"Admin Reset\",\n 7: \"Admin Reboot\",\n 8: \"Port Error\",\n 9: \"NAS Error\",\n 10: \"NAS Request\",\n 11: \"NAS Reboot\",\n 12: \"Port Unneeded\",\n 13: \"Port Preempted\",\n 14: \"Port Suspended\",\n 15: \"Service Unavailable\",\n 16: \"Callback\",\n 17: \"User Error\",\n 18: \"Host Request\",\n 19: \"Supplicant Restart\",\n 20: \"Reauthentication Failure\",\n 21: \"Port Reinitialized\",\n 22: \"Port Administratively Disabled\",\n 23: \"Lost Power\",\n },\n\n # NAS-Port-Type\n 61:\n {\n 0: \"Async\",\n 1: \"Sync\",\n 2: \"ISDN Sync\",\n 3: \"ISDN Async V.120\",\n 4: \"ISDN Async V.110\",\n 5: \"Virtual\",\n 6: \"PIAFS\",\n 7: \"HDLC Clear Channel\",\n 8: \"X.25\",\n 9: \"X.75\",\n 10: \"G.3 Fax\",\n 11: \"SDSL - Symmetric DSL\",\n 12: \"ADSL-CAP - Asymmetric DSL, Carrierless Amplitude Phase Modulation\", # noqa: E501\n 13: \"ADSL-DMT - Asymmetric DSL, Discrete Multi-Tone\",\n 14: \"IDSL - ISDN Digital Subscriber Line\",\n 15: \"Ethernet\",\n 16: \"xDSL - Digital Subscriber Line of unknown type\",\n 17: \"Cable\",\n 18: \"Wireles - Other\",\n 19: \"Wireless - IEEE 802.11\",\n 20: \"Token-Ring\",\n 21: \"FDDI\",\n 22: \"Wireless - CDMA2000\",\n 23: \"Wireless - UMTS\",\n 24: \"Wireless - 1X-EV\",\n 25: \"IAPP\",\n 26: \"FTTP - Fiber to the Premises\",\n 27: \"Wireless - IEEE 802.16\",\n 28: \"Wireless - IEEE 802.20\",\n 29: \"Wireless - IEEE 802.22\",\n 30: \"PPPoA - PPP over ATM\",\n 31: \"PPPoEoA - PPP over Ethernet over ATM\",\n 32: \"PPPoEoE - PPP over Ethernet over Ethernet\",\n 33: \"PPPoEoVLAN - PPP over Ethernet over VLAN\",\n 34: \"PPPoEoQinQ - PPP over Ethernet over IEEE 802.1QinQ\",\n 35: \"xPON - Passive Optical Network\",\n 36: \"Wireless - XGP\",\n 37: \"WiMAX Pre-Release 8 IWK Function\",\n 38: \"WIMAX-WIFI-IWK: WiMAX WIFI Interworking\",\n 39: \"WIMAX-SFF: Signaling Forwarding Function for LTE/3GPP2\",\n 40: \"WIMAX-HA-LMA: WiMAX HA and or LMA function\",\n 41: \"WIMAX-DHCP: WIMAX DHCP service\",\n 42: \"WIMAX-LBS: WiMAX location based service\",\n 43: \"WIMAX-WVS: WiMAX voice service\"\n },\n\n # Tunnel-Type\n 64:\n {\n 1: \"Point-to-Point Tunneling Protocol (PPTP)\",\n 2: \"Layer Two Forwarding (L2F)\",\n 3: \"Layer Two Tunneling Protocol (L2TP)\",\n 4: \"Ascend Tunnel Management Protocol (ATMP)\",\n 5: \"Virtual Tunneling Protocol (VTP)\",\n 6: \"IP Authentication Header in the Tunnel-mode (AH)\",\n 7: \"IP-in-IP Encapsulation (IP-IP)\",\n 8: \"Minimal IP-in-IP Encapsulation (MIN-IP-IP)\",\n 9: \"IP Encapsulating Security Payload in the Tunnel-mode (ESP)\",\n 10: \"Generic Route Encapsulation (GRE)\",\n 11: \"Bay Dial Virtual Services (DVS)\",\n 12: \"IP-in-IP Tunneling\",\n 13: \"Virtual LANs (VLAN)\"\n },\n\n # Tunnel-Medium-Type\n 65:\n {\n 1: \"IPv4 (IP version 4)\",\n 2: \"IPv6 (IP version 6)\",\n 3: \"NSAP\",\n 4: \"HDLC (8-bit multidrop)\",\n 5: \"BBN 1822\",\n 6: \"802\",\n 7: \"E.163 (POTS)\",\n 8: \"E.164 (SMDS, Frame Relay, ATM)\",\n 9: \"F.69 (Telex)\",\n 10: \"X.121 (X.25, Frame Relay)\",\n 11: \"IPX\",\n 12: \"Appletalk\",\n 13: \"Decnet IV\",\n 14: \"Banyan Vine\",\n 15: \"E.164 with NSAP format subaddress\"\n },\n\n # ARAP-Zone-Access\n 72:\n {\n 1: \"Only allow access to default zone\",\n 2: \"Use zone filter inclusively\",\n 3: \"Not used\",\n 4: \"Use zone filter exclusively\"\n },\n\n # Prompt\n 76:\n {\n 0: \"No Echo\",\n 1: \"Echo\"\n },\n\n # Error-Cause Attribute\n 101:\n {\n 201: \"Residual Session Context Removed\",\n 202: \"Invalid EAP Packet (Ignored)\",\n 401: \"Unsupported Attribute\",\n 402: \"Missing Attribute\",\n 403: \"NAS Identification Mismatch\",\n 404: \"Invalid Request\",\n 405: \"Unsupported Service\",\n 406: \"Unsupported Extension\",\n 407: \"Invalid Attribute Value\",\n 501: \"Administratively Prohibited\",\n 502: \"Request Not Routable (Proxy)\",\n 503: \"Session Context Not Found\",\n 504: \"Session Context Not Removable\",\n 505: \"Other Proxy Processing Error\",\n 506: \"Resources Unavailable\",\n 507: \"Request Initiated\",\n 508: \"Multiple Session Selection Unsupported\",\n 509: \"Location-Info-Required\",\n 601: \"Response Too Big\"\n },\n\n # Operator Namespace Identifier - Attribute 126\n 126:\n {\n 0x30: \"TADIG\",\n 0x31: \"REALM\",\n 0x32: \"E212\",\n 0x33: \"ICC\",\n 0xFF: \"Reserved\"\n },\n\n # Basic-Location-Policy-Rules\n 129:\n {\n 0: \"Retransmission allowed\",\n },\n\n # Location-Capable\n 131:\n {\n 1: \"CIVIC_LOCATION\",\n 2: \"GEO_LOCATION\",\n 4: \"USERS_LOCATION\",\n 8: \"NAS_LOCATION\"\n },\n\n # Framed-Management-Protocol\n 133:\n {\n 1: \"SNMP\",\n 2: \"Web-based\",\n 3: \"NETCONF\",\n 4: \"FTP\",\n 5: \"TFTP\",\n 6: \"SFTP\",\n 7: \"RCP\",\n 8: \"SCP\"\n },\n\n # Management-Transport-Protection\n 134:\n {\n 1: \"No-Protection\",\n 2: \"Integrity-Protection\",\n 3: \"Integrity-Confidentiality-Protection\",\n },\n}\n\n\nclass _RadiusAttrIntEnumVal(_SpecificRadiusAttr):\n \"\"\"\n Implements a RADIUS attribute which value field is 4 bytes long integer.\n \"\"\"\n\n __slots__ = [\"val\"]\n\n fields_desc = [\n ByteEnumField(\"type\", 6, _radius_attribute_types),\n ByteField(\"len\", 6),\n MultiEnumField(\n \"value\",\n 0,\n _radius_attrs_values,\n depends_on=lambda p: p.type,\n fmt=\"I\"\n )\n ]\n\n\nclass RadiusAttr_Service_Type(_RadiusAttrIntEnumVal):\n \"\"\"RFC 2865\"\"\"\n val = 6\n\n\nclass RadiusAttr_Framed_Protocol(_RadiusAttrIntEnumVal):\n \"\"\"RFC 2865\"\"\"\n val = 7\n\n\nclass RadiusAttr_NAS_Port_Type(_RadiusAttrIntEnumVal):\n \"\"\"RFC 2865\"\"\"\n val = 61\n\n\nclass _EAPPacketField(PacketField):\n\n \"\"\"\n Handles EAP-Message attribute value (the actual EAP packet).\n \"\"\"\n\n def m2i(self, pkt, m):\n ret = None\n eap_packet_len = struct.unpack(\"!H\", m[2:4])[0]\n if eap_packet_len < 254:\n # If the EAP packet has not been fragmented, build a Scapy EAP\n # packet from the data.\n ret = EAP(m)\n else:\n ret = conf.raw_layer(m)\n return ret\n\n\nclass RadiusAttr_EAP_Message(RadiusAttribute):\n \"\"\"\n Implements the \"EAP-Message\" attribute (RFC 3579).\n \"\"\"\n\n name = \"EAP-Message\"\n fields_desc = [\n ByteEnumField(\"type\", 79, _radius_attribute_types),\n FieldLenField(\n \"len\",\n None,\n \"value\",\n \"B\",\n adjust=lambda pkt, x: len(pkt.value) + 2\n ),\n _EAPPacketField(\"value\", \"\", EAP)\n ]\n\n\nclass RadiusAttr_Vendor_Specific(RadiusAttribute):\n \"\"\"\n Implements the \"Vendor-Specific\" attribute, as described in RFC 2865.\n \"\"\"\n\n name = \"Vendor-Specific\"\n fields_desc = [\n ByteEnumField(\"type\", 26, _radius_attribute_types),\n FieldLenField(\n \"len\",\n None,\n \"value\",\n \"B\",\n adjust=lambda pkt, x: len(pkt.value) + 8\n ),\n IntField(\"vendor_id\", 0),\n ByteField(\"vendor_type\", 0),\n FieldLenField(\n \"vendor_len\",\n None,\n \"value\",\n \"B\",\n adjust=lambda p, x: len(p.value) + 2\n ),\n StrLenField(\"value\", \"\", length_from=lambda p: p.vendor_len - 2)\n ]\n\n\n# See IANA RADIUS Packet Type Codes registry\n_packet_codes = {\n 1: \"Access-Request\",\n 2: \"Access-Accept\",\n 3: \"Access-Reject\",\n 4: \"Accounting-Request\",\n 5: \"Accounting-Response\",\n 6: \"Accounting-Status (now Interim Accounting)\",\n 7: \"Password-Request\",\n 8: \"Password-Ack\",\n 9: \"Password-Reject\",\n 10: \"Accounting-Message\",\n 11: \"Access-Challenge\",\n 12: \"Status-Server (experimental)\",\n 13: \"Status-Client (experimental)\",\n 21: \"Resource-Free-Request\",\n 22: \"Resource-Free-Response\",\n 23: \"Resource-Query-Request\",\n 24: \"Resource-Query-Response\",\n 25: \"Alternate-Resource-Reclaim-Request\",\n 26: \"NAS-Reboot-Request\",\n 27: \"NAS-Reboot-Response\",\n 28: \"Reserved\",\n 29: \"Next-Passcode\",\n 30: \"New-Pin\",\n 31: \"Terminate-Session\",\n 32: \"Password-Expired\",\n 33: \"Event-Request\",\n 34: \"Event-Response\",\n 40: \"Disconnect-Request\",\n 41: \"Disconnect-ACK\",\n 42: \"Disconnect-NAK\",\n 43: \"CoA-Request\",\n 44: \"CoA-ACK\",\n 45: \"CoA-NAK\",\n 50: \"IP-Address-Allocate\",\n 51: \"IP-Address-Release\",\n 52: \"Protocol-Error\",\n 250: \"Experimental Use\",\n 251: \"Experimental Use\",\n 252: \"Experimental Use\",\n 253: \"Experimental Use\",\n 254: \"Reserved\",\n 255: \"Reserved\"\n}\n\n\nclass Radius(Packet):\n \"\"\"\n Implements a RADIUS packet (RFC 2865).\n \"\"\"\n\n name = \"RADIUS\"\n fields_desc = [\n ByteEnumField(\"code\", 1, _packet_codes),\n ByteField(\"id\", 0),\n FieldLenField(\n \"len\",\n None,\n \"attributes\",\n \"H\",\n adjust=lambda pkt, x: len(pkt.attributes) + 20\n ),\n XStrFixedLenField(\"authenticator\", \"\", 16),\n PacketListField(\n \"attributes\",\n [],\n RadiusAttribute,\n length_from=lambda pkt: pkt.len - 20\n )\n ]\n\n def compute_authenticator(self, packed_request_auth, shared_secret):\n \"\"\"\n Computes the authenticator field (RFC 2865 - Section 3)\n \"\"\"\n\n data = prepare_packed_data(self, packed_request_auth)\n radius_mac = hashlib.md5(data + shared_secret)\n return radius_mac.digest()\n\n def post_build(self, p, pay):\n p += pay\n length = self.len\n if length is None:\n length = len(p)\n p = p[:2] + struct.pack(\"!H\", length) + p[4:]\n return p\n\n\nbind_layers(UDP, Radius, sport=1812)\nbind_layers(UDP, Radius, dport=1812)\nbind_layers(UDP, Radius, sport=1813)\nbind_layers(UDP, Radius, dport=1813)\n", "path": "scapy/layers/radius.py"}]} |
gh_patches_debug_188 | rasdani/github-patches | git_diff | joke2k__faker-1710 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Lower bound for typing_extensions?
* Faker version: 14.1.0
* OS: OpenSUSE Leap 15.3
For old Python versions (before Python 3.8), *faker* requires the *typing_extensions* with at least version 3.10.0.2. Due to some other dependencies inside my project, I am required to use version 3.7.4.3.
### Steps to reproduce
1. `pip install faker`
### Expected behavior
*faker* works with `typing_extensions==3.7.4.3` as well, to allow better integration with larger systems which are stuck at such older versions for some reasons.
### Actual behavior
*faker* declares `typing_extensions>=3.10.0.2` (#1557), although I could see no issue when using it with `typing_extensions==3.7.4.3`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 #!/usr/bin/env python
2
3 from pathlib import Path
4
5 from setuptools import find_packages, setup
6
7 here = Path(__file__).resolve().parent
8 README = (here / "README.rst").read_text(encoding="utf-8")
9 VERSION = (here / "VERSION").read_text(encoding="utf-8").strip()
10
11 excluded_packages = ["docs", "tests", "tests.*"]
12
13
14 # this module can be zip-safe if the zipimporter implements iter_modules or if
15 # pkgutil.iter_importer_modules has registered a dispatch for the zipimporter.
16 try:
17 import pkgutil
18 import zipimport
19
20 zip_safe = (
21 hasattr(zipimport.zipimporter, "iter_modules")
22 or zipimport.zipimporter in pkgutil.iter_importer_modules.registry.keys()
23 )
24 except AttributeError:
25 zip_safe = False
26
27 setup(
28 name="Faker",
29 version=VERSION,
30 description="Faker is a Python package that generates fake data for you.",
31 long_description=README,
32 entry_points={
33 "console_scripts": ["faker=faker.cli:execute_from_command_line"],
34 "pytest11": ["faker = faker.contrib.pytest.plugin"],
35 },
36 classifiers=[
37 # See https://pypi.org/pypi?%3Aaction=list_classifiers
38 "Development Status :: 5 - Production/Stable",
39 "Environment :: Console",
40 "Intended Audience :: Developers",
41 "Programming Language :: Python",
42 "Programming Language :: Python :: 3",
43 "Programming Language :: Python :: 3 :: Only",
44 "Programming Language :: Python :: 3.6",
45 "Programming Language :: Python :: 3.7",
46 "Programming Language :: Python :: 3.8",
47 "Programming Language :: Python :: 3.9",
48 "Programming Language :: Python :: Implementation :: CPython",
49 "Programming Language :: Python :: Implementation :: PyPy",
50 "Topic :: Software Development :: Libraries :: Python Modules",
51 "Topic :: Software Development :: Testing",
52 "Topic :: Utilities",
53 "License :: OSI Approved :: MIT License",
54 ],
55 keywords="faker fixtures data test mock generator",
56 author="joke2k",
57 author_email="[email protected]",
58 url="https://github.com/joke2k/faker",
59 project_urls={
60 "Bug Tracker": "https://github.com/joke2k/faker/issues",
61 "Changes": "https://github.com/joke2k/faker/blob/master/CHANGELOG.md",
62 "Documentation": "http://faker.rtfd.org/",
63 "Source Code": "https://github.com/joke2k/faker",
64 },
65 license="MIT License",
66 packages=find_packages(exclude=excluded_packages),
67 package_data={
68 "faker": ["py.typed"],
69 },
70 platforms=["any"],
71 zip_safe=zip_safe,
72 python_requires=">=3.6",
73 install_requires=[
74 "python-dateutil>=2.4",
75 "typing-extensions>=3.10.0.2;python_version<'3.8'",
76 ],
77 )
78
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -72,6 +72,6 @@
python_requires=">=3.6",
install_requires=[
"python-dateutil>=2.4",
- "typing-extensions>=3.10.0.2;python_version<'3.8'",
+ "typing-extensions>=3.7.4.3;python_version<'3.8'",
],
)
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -72,6 +72,6 @@\n python_requires=\">=3.6\",\n install_requires=[\n \"python-dateutil>=2.4\",\n- \"typing-extensions>=3.10.0.2;python_version<'3.8'\",\n+ \"typing-extensions>=3.7.4.3;python_version<'3.8'\",\n ],\n )\n", "issue": "Lower bound for typing_extensions?\n* Faker version: 14.1.0\r\n* OS: OpenSUSE Leap 15.3\r\n\r\nFor old Python versions (before Python 3.8), *faker* requires the *typing_extensions* with at least version 3.10.0.2. Due to some other dependencies inside my project, I am required to use version 3.7.4.3.\r\n\r\n### Steps to reproduce\r\n\r\n1. `pip install faker`\r\n\r\n### Expected behavior\r\n\r\n*faker* works with `typing_extensions==3.7.4.3` as well, to allow better integration with larger systems which are stuck at such older versions for some reasons.\r\n\r\n### Actual behavior\r\n\r\n*faker* declares `typing_extensions>=3.10.0.2` (#1557), although I could see no issue when using it with `typing_extensions==3.7.4.3`.\n", "before_files": [{"content": "#!/usr/bin/env python\n\nfrom pathlib import Path\n\nfrom setuptools import find_packages, setup\n\nhere = Path(__file__).resolve().parent\nREADME = (here / \"README.rst\").read_text(encoding=\"utf-8\")\nVERSION = (here / \"VERSION\").read_text(encoding=\"utf-8\").strip()\n\nexcluded_packages = [\"docs\", \"tests\", \"tests.*\"]\n\n\n# this module can be zip-safe if the zipimporter implements iter_modules or if\n# pkgutil.iter_importer_modules has registered a dispatch for the zipimporter.\ntry:\n import pkgutil\n import zipimport\n\n zip_safe = (\n hasattr(zipimport.zipimporter, \"iter_modules\")\n or zipimport.zipimporter in pkgutil.iter_importer_modules.registry.keys()\n )\nexcept AttributeError:\n zip_safe = False\n\nsetup(\n name=\"Faker\",\n version=VERSION,\n description=\"Faker is a Python package that generates fake data for you.\",\n long_description=README,\n entry_points={\n \"console_scripts\": [\"faker=faker.cli:execute_from_command_line\"],\n \"pytest11\": [\"faker = faker.contrib.pytest.plugin\"],\n },\n classifiers=[\n # See https://pypi.org/pypi?%3Aaction=list_classifiers\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n \"Topic :: Software Development :: Testing\",\n \"Topic :: Utilities\",\n \"License :: OSI Approved :: MIT License\",\n ],\n keywords=\"faker fixtures data test mock generator\",\n author=\"joke2k\",\n author_email=\"[email protected]\",\n url=\"https://github.com/joke2k/faker\",\n project_urls={\n \"Bug Tracker\": \"https://github.com/joke2k/faker/issues\",\n \"Changes\": \"https://github.com/joke2k/faker/blob/master/CHANGELOG.md\",\n \"Documentation\": \"http://faker.rtfd.org/\",\n \"Source Code\": \"https://github.com/joke2k/faker\",\n },\n license=\"MIT License\",\n packages=find_packages(exclude=excluded_packages),\n package_data={\n \"faker\": [\"py.typed\"],\n },\n platforms=[\"any\"],\n zip_safe=zip_safe,\n python_requires=\">=3.6\",\n install_requires=[\n \"python-dateutil>=2.4\",\n \"typing-extensions>=3.10.0.2;python_version<'3.8'\",\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\nfrom pathlib import Path\n\nfrom setuptools import find_packages, setup\n\nhere = Path(__file__).resolve().parent\nREADME = (here / \"README.rst\").read_text(encoding=\"utf-8\")\nVERSION = (here / \"VERSION\").read_text(encoding=\"utf-8\").strip()\n\nexcluded_packages = [\"docs\", \"tests\", \"tests.*\"]\n\n\n# this module can be zip-safe if the zipimporter implements iter_modules or if\n# pkgutil.iter_importer_modules has registered a dispatch for the zipimporter.\ntry:\n import pkgutil\n import zipimport\n\n zip_safe = (\n hasattr(zipimport.zipimporter, \"iter_modules\")\n or zipimport.zipimporter in pkgutil.iter_importer_modules.registry.keys()\n )\nexcept AttributeError:\n zip_safe = False\n\nsetup(\n name=\"Faker\",\n version=VERSION,\n description=\"Faker is a Python package that generates fake data for you.\",\n long_description=README,\n entry_points={\n \"console_scripts\": [\"faker=faker.cli:execute_from_command_line\"],\n \"pytest11\": [\"faker = faker.contrib.pytest.plugin\"],\n },\n classifiers=[\n # See https://pypi.org/pypi?%3Aaction=list_classifiers\n \"Development Status :: 5 - Production/Stable\",\n \"Environment :: Console\",\n \"Intended Audience :: Developers\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Programming Language :: Python :: Implementation :: CPython\",\n \"Programming Language :: Python :: Implementation :: PyPy\",\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n \"Topic :: Software Development :: Testing\",\n \"Topic :: Utilities\",\n \"License :: OSI Approved :: MIT License\",\n ],\n keywords=\"faker fixtures data test mock generator\",\n author=\"joke2k\",\n author_email=\"[email protected]\",\n url=\"https://github.com/joke2k/faker\",\n project_urls={\n \"Bug Tracker\": \"https://github.com/joke2k/faker/issues\",\n \"Changes\": \"https://github.com/joke2k/faker/blob/master/CHANGELOG.md\",\n \"Documentation\": \"http://faker.rtfd.org/\",\n \"Source Code\": \"https://github.com/joke2k/faker\",\n },\n license=\"MIT License\",\n packages=find_packages(exclude=excluded_packages),\n package_data={\n \"faker\": [\"py.typed\"],\n },\n platforms=[\"any\"],\n zip_safe=zip_safe,\n python_requires=\">=3.6\",\n install_requires=[\n \"python-dateutil>=2.4\",\n \"typing-extensions>=3.7.4.3;python_version<'3.8'\",\n ],\n)\n", "path": "setup.py"}]} |
gh_patches_debug_189 | rasdani/github-patches | git_diff | yt-project__yt-3238 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
missing declaration of used attribute?
### Bug report
while trying to load enzo data into yt (cf. python code below), we run into an issue that the (lazy?) loading in `yt.data_objects.DatasetSeries` is triggered when accessing `__get_item__ `of the resulting EnzoSimulation objects which fails with an AttributeError (cf. stacktrace below) for using `self.kwargs`. Afaik this attribute is only declared in `DatasetSeries.__init__` which itself is not invoked by its subclasses involved here (`yt.data_objects.SimulationTimeSeries` -> `yt.frontends.enzo.simulation_handling.EnzoSimulation`). Hence the loading failed.
**Code for reproduction**
```python
import yt
from yt.extensions.astro_analysis.halo_analysis import HaloCatalog
yt.enable_parallelism()
es = yt.load_simulation("/mnt/lustre/carla/Box100/RandomSeedStudy/Wunder-L4/parameter_file.txt", "Enzo", find_outputs=True)
hc = HaloCatalog(data_ds=es, finder_method='rockstar', finder_kwargs={"num_readers": 2, "num_writers": 4})
hc.create()
```
**Actual outcome**
```
File "/users/carla/scripts/run_rockstar-galaxies.py", line 9, in <module>
hc.create()
File "/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_catalog/halo_catalog.py", line 333, in create
self._run(save_halos, save_output,
File "/opt/apps/pkgs/yt/4.0.dev0/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt/utilities/parallel_tools/parallel_analysis_interface.py", line 310, in barrierize
retval = func(*args, **kwargs)
File "/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_catalog/halo_catalog.py", line 224, in _run
self.finder_method(self)
File "/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_catalog/halo_finding_methods.py", line 39, in __call__
return self.function(hc, *self.args, **self.kwargs)
File "/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_catalog/halo_finding_methods.py", line 87, in _rockstar_method
rh = RockstarHaloFinder(ds, **finder_kwargs)
File "/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_finding/rockstar/rockstar.py", line 234, in __init__
tds = ts[-1] # Cache a reference
File "/opt/apps/pkgs/yt/4.0.dev0/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt/data_objects/time_series.py", line 214, in __getitem__
o = self._load(o, **self.kwargs)
P000 yt : [ERROR ] 2021-04-14 15:27:09,625 AttributeError: 'EnzoSimulation' object has no attribute 'kwargs'
```
**Expected outcome**
well, I expect no AttributeError ;)
**Version Information**
* Operating System: CentOS 7
* Python Version: 3.8.9 (cpython)
* yt version: 4.0.dev0
* Other Libraries (if applicable): yt_astro_analysis (latest commit on master i.e. 313cf31)
<!--Please tell us how you installed yt and python e.g., from source,
pip, conda. If you installed from conda, please specify which channel you used
if not the default-->
python itself as well as yt and yt_astro_analysis are compiled/installed from source. Supporting packages/dependencies (e.g. unyt) installed with pip.
Thanks again for any help you can provide to clarify this issue.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `yt/data_objects/time_series.py`
Content:
```
1 import functools
2 import glob
3 import inspect
4 import os
5 import weakref
6 from functools import wraps
7
8 import numpy as np
9 from more_itertools import always_iterable
10
11 from yt._maintenance.deprecation import issue_deprecation_warning
12 from yt.config import ytcfg
13 from yt.data_objects.analyzer_objects import AnalysisTask, create_quantity_proxy
14 from yt.data_objects.particle_trajectories import ParticleTrajectories
15 from yt.funcs import is_sequence, mylog
16 from yt.units.yt_array import YTArray, YTQuantity
17 from yt.utilities.exceptions import YTException
18 from yt.utilities.object_registries import (
19 analysis_task_registry,
20 data_object_registry,
21 derived_quantity_registry,
22 simulation_time_series_registry,
23 )
24 from yt.utilities.parallel_tools.parallel_analysis_interface import (
25 communication_system,
26 parallel_objects,
27 parallel_root_only,
28 )
29
30
31 class AnalysisTaskProxy:
32 def __init__(self, time_series):
33 self.time_series = time_series
34
35 def __getitem__(self, key):
36 task_cls = analysis_task_registry[key]
37
38 @wraps(task_cls.__init__)
39 def func(*args, **kwargs):
40 task = task_cls(*args, **kwargs)
41 return self.time_series.eval(task)
42
43 return func
44
45 def keys(self):
46 return analysis_task_registry.keys()
47
48 def __contains__(self, key):
49 return key in analysis_task_registry
50
51
52 def get_ds_prop(propname):
53 def _eval(params, ds):
54 return getattr(ds, propname)
55
56 cls = type(propname, (AnalysisTask,), dict(eval=_eval, _params=tuple()))
57 return cls
58
59
60 attrs = (
61 "refine_by",
62 "dimensionality",
63 "current_time",
64 "domain_dimensions",
65 "domain_left_edge",
66 "domain_right_edge",
67 "unique_identifier",
68 "current_redshift",
69 "cosmological_simulation",
70 "omega_matter",
71 "omega_lambda",
72 "omega_radiation",
73 "hubble_constant",
74 )
75
76
77 class TimeSeriesParametersContainer:
78 def __init__(self, data_object):
79 self.data_object = data_object
80
81 def __getattr__(self, attr):
82 if attr in attrs:
83 return self.data_object.eval(get_ds_prop(attr)())
84 raise AttributeError(attr)
85
86
87 class DatasetSeries:
88 r"""The DatasetSeries object is a container of multiple datasets,
89 allowing easy iteration and computation on them.
90
91 DatasetSeries objects are designed to provide easy ways to access,
92 analyze, parallelize and visualize multiple datasets sequentially. This is
93 primarily expressed through iteration, but can also be constructed via
94 analysis tasks (see :ref:`time-series-analysis`).
95
96 Note that contained datasets are lazily loaded and weakly referenced. This means
97 that in order to perform follow-up operations on data it's best to define handles on
98 these datasets during iteration.
99
100 Parameters
101 ----------
102 outputs : list of filenames, or pattern
103 A list of filenames, for instance ["DD0001/DD0001", "DD0002/DD0002"],
104 or a glob pattern (i.e. containing wildcards '[]?!*') such as "DD*/DD*.index".
105 In the latter case, results are sorted automatically.
106 Filenames and patterns can be of type str, os.Pathlike or bytes.
107 parallel : True, False or int
108 This parameter governs the behavior when .piter() is called on the
109 resultant DatasetSeries object. If this is set to False, the time
110 series will not iterate in parallel when .piter() is called. If
111 this is set to either True, one processor will be allocated for
112 each iteration of the loop. If this is set to an integer, the loop
113 will be parallelized over this many workgroups. It the integer
114 value is less than the total number of available processors,
115 more than one processor will be allocated to a given loop iteration,
116 causing the functionality within the loop to be run in parallel.
117 setup_function : callable, accepts a ds
118 This function will be called whenever a dataset is loaded.
119 mixed_dataset_types : True or False, default False
120 Set to True if the DatasetSeries will load different dataset types, set
121 to False if loading dataset of a single type as this will result in a
122 considerable speed up from not having to figure out the dataset type.
123
124 Examples
125 --------
126
127 >>> ts = DatasetSeries(
128 "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0[0-6][0-9]0")
129 >>> for ds in ts:
130 ... SlicePlot(ds, "x", ("gas", "density")).save()
131 ...
132 >>> def print_time(ds):
133 ... print(ds.current_time)
134 ...
135 >>> ts = DatasetSeries(
136 ... "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0[0-6][0-9]0",
137 ... setup_function = print_time)
138 ...
139 >>> for ds in ts:
140 ... SlicePlot(ds, "x", ("gas", "density")).save()
141
142 """
143
144 def __init_subclass__(cls, *args, **kwargs):
145 super().__init_subclass__(*args, **kwargs)
146 code_name = cls.__name__[: cls.__name__.find("Simulation")]
147 if code_name:
148 simulation_time_series_registry[code_name] = cls
149 mylog.debug("Registering simulation: %s as %s", code_name, cls)
150
151 def __new__(cls, outputs, *args, **kwargs):
152 try:
153 outputs = cls._get_filenames_from_glob_pattern(outputs)
154 except TypeError:
155 pass
156 ret = super().__new__(cls)
157 ret._pre_outputs = outputs[:]
158 return ret
159
160 def __init__(
161 self,
162 outputs,
163 parallel=True,
164 setup_function=None,
165 mixed_dataset_types=False,
166 **kwargs,
167 ):
168 # This is needed to properly set _pre_outputs for Simulation subclasses.
169 self._mixed_dataset_types = mixed_dataset_types
170 if is_sequence(outputs) and not isinstance(outputs, str):
171 self._pre_outputs = outputs[:]
172 self.tasks = AnalysisTaskProxy(self)
173 self.params = TimeSeriesParametersContainer(self)
174 if setup_function is None:
175
176 def _null(x):
177 return None
178
179 setup_function = _null
180 self._setup_function = setup_function
181 for type_name in data_object_registry:
182 setattr(
183 self, type_name, functools.partial(DatasetSeriesObject, self, type_name)
184 )
185 self.parallel = parallel
186 self.kwargs = kwargs
187
188 @staticmethod
189 def _get_filenames_from_glob_pattern(outputs):
190 """
191 Helper function to DatasetSeries.__new__
192 handle a special case where "outputs" is assumed to be really a pattern string
193 """
194 pattern = outputs
195 epattern = os.path.expanduser(pattern)
196 data_dir = ytcfg.get("yt", "test_data_dir")
197 # if no match if found from the current work dir,
198 # we try to match the pattern from the test data dir
199 file_list = glob.glob(epattern) or glob.glob(os.path.join(data_dir, epattern))
200 if not file_list:
201 raise FileNotFoundError(f"No match found for pattern : {pattern}")
202 return sorted(file_list)
203
204 def __getitem__(self, key):
205 if isinstance(key, slice):
206 if isinstance(key.start, float):
207 return self.get_range(key.start, key.stop)
208 # This will return a sliced up object!
209 return DatasetSeries(
210 self._pre_outputs[key], parallel=self.parallel, **self.kwargs
211 )
212 o = self._pre_outputs[key]
213 if isinstance(o, (str, os.PathLike)):
214 o = self._load(o, **self.kwargs)
215 self._setup_function(o)
216 return o
217
218 def __len__(self):
219 return len(self._pre_outputs)
220
221 @property
222 def outputs(self):
223 return self._pre_outputs
224
225 def piter(self, storage=None, dynamic=False):
226 r"""Iterate over time series components in parallel.
227
228 This allows you to iterate over a time series while dispatching
229 individual components of that time series to different processors or
230 processor groups. If the parallelism strategy was set to be
231 multi-processor (by "parallel = N" where N is an integer when the
232 DatasetSeries was created) this will issue each dataset to an
233 N-processor group. For instance, this would allow you to start a 1024
234 processor job, loading up 100 datasets in a time series and creating 8
235 processor groups of 128 processors each, each of which would be
236 assigned a different dataset. This could be accomplished as shown in
237 the examples below. The *storage* option is as seen in
238 :func:`~yt.utilities.parallel_tools.parallel_analysis_interface.parallel_objects`
239 which is a mechanism for storing results of analysis on an individual
240 dataset and then combining the results at the end, so that the entire
241 set of processors have access to those results.
242
243 Note that supplying a *store* changes the iteration mechanism; see
244 below.
245
246 Parameters
247 ----------
248 storage : dict
249 This is a dictionary, which will be filled with results during the
250 course of the iteration. The keys will be the dataset
251 indices and the values will be whatever is assigned to the *result*
252 attribute on the storage during iteration.
253 dynamic : boolean
254 This governs whether or not dynamic load balancing will be
255 enabled. This requires one dedicated processor; if this
256 is enabled with a set of 128 processors available, only
257 127 will be available to iterate over objects as one will
258 be load balancing the rest.
259
260
261 Examples
262 --------
263 Here is an example of iteration when the results do not need to be
264 stored. One processor will be assigned to each dataset.
265
266 >>> ts = DatasetSeries("DD*/DD*.index")
267 >>> for ds in ts.piter():
268 ... SlicePlot(ds, "x", ("gas", "density")).save()
269 ...
270
271 This demonstrates how one might store results:
272
273 >>> def print_time(ds):
274 ... print(ds.current_time)
275 ...
276 >>> ts = DatasetSeries("DD*/DD*.index",
277 ... setup_function = print_time )
278 ...
279 >>> my_storage = {}
280 >>> for sto, ds in ts.piter(storage=my_storage):
281 ... v, c = ds.find_max(("gas", "density"))
282 ... sto.result = (v, c)
283 ...
284 >>> for i, (v, c) in sorted(my_storage.items()):
285 ... print("% 4i %0.3e" % (i, v))
286 ...
287
288 This shows how to dispatch 4 processors to each dataset:
289
290 >>> ts = DatasetSeries("DD*/DD*.index",
291 ... parallel = 4)
292 >>> for ds in ts.piter():
293 ... ProjectionPlot(ds, "x", ("gas", "density")).save()
294 ...
295
296 """
297 if not self.parallel:
298 njobs = 1
299 elif not dynamic:
300 if self.parallel:
301 njobs = -1
302 else:
303 njobs = self.parallel
304 else:
305 my_communicator = communication_system.communicators[-1]
306 nsize = my_communicator.size
307 if nsize == 1:
308 self.parallel = False
309 dynamic = False
310 njobs = 1
311 else:
312 njobs = nsize - 1
313
314 for output in parallel_objects(
315 self._pre_outputs, njobs=njobs, storage=storage, dynamic=dynamic
316 ):
317 if storage is not None:
318 sto, output = output
319
320 if isinstance(output, str):
321 ds = self._load(output, **self.kwargs)
322 self._setup_function(ds)
323 else:
324 ds = output
325
326 if storage is not None:
327 next_ret = (sto, ds)
328 else:
329 next_ret = ds
330
331 yield next_ret
332
333 def eval(self, tasks, obj=None):
334 return_values = {}
335 for store, ds in self.piter(return_values):
336 store.result = []
337 for task in always_iterable(tasks):
338 try:
339 style = inspect.getargspec(task.eval)[0][1]
340 if style == "ds":
341 arg = ds
342 elif style == "data_object":
343 if obj is None:
344 obj = DatasetSeriesObject(self, "all_data")
345 arg = obj.get(ds)
346 rv = task.eval(arg)
347 # We catch and store YT-originating exceptions
348 # This fixes the standard problem of having a sphere that's too
349 # small.
350 except YTException:
351 pass
352 store.result.append(rv)
353 return [v for k, v in sorted(return_values.items())]
354
355 @classmethod
356 def from_filenames(cls, filenames, parallel=True, setup_function=None, **kwargs):
357 r"""Create a time series from either a filename pattern or a list of
358 filenames.
359
360 This method provides an easy way to create a
361 :class:`~yt.data_objects.time_series.DatasetSeries`, given a set of
362 filenames or a pattern that matches them. Additionally, it can set the
363 parallelism strategy.
364
365 Parameters
366 ----------
367 filenames : list or pattern
368 This can either be a list of filenames (such as ["DD0001/DD0001",
369 "DD0002/DD0002"]) or a pattern to match, such as
370 "DD*/DD*.index"). If it's the former, they will be loaded in
371 order. The latter will be identified with the glob module and then
372 sorted.
373 parallel : True, False or int
374 This parameter governs the behavior when .piter() is called on the
375 resultant DatasetSeries object. If this is set to False, the time
376 series will not iterate in parallel when .piter() is called. If
377 this is set to either True or an integer, it will be iterated with
378 1 or that integer number of processors assigned to each parameter
379 file provided to the loop.
380 setup_function : callable, accepts a ds
381 This function will be called whenever a dataset is loaded.
382
383 Examples
384 --------
385
386 >>> def print_time(ds):
387 ... print(ds.current_time)
388 ...
389 >>> ts = DatasetSeries.from_filenames(
390 ... "GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0[0-6][0-9]0",
391 ... setup_function = print_time)
392 ...
393 >>> for ds in ts:
394 ... SlicePlot(ds, "x", ("gas", "density")).save()
395
396 """
397 issue_deprecation_warning(
398 "DatasetSeries.from_filenames() is deprecated and will be removed "
399 "in a future version of yt. Use DatasetSeries() directly.",
400 since="4.0.0",
401 removal="4.1.0",
402 )
403 obj = cls(filenames, parallel=parallel, setup_function=setup_function, **kwargs)
404 return obj
405
406 @classmethod
407 def from_output_log(cls, output_log, line_prefix="DATASET WRITTEN", parallel=True):
408 filenames = []
409 for line in open(output_log):
410 if not line.startswith(line_prefix):
411 continue
412 cut_line = line[len(line_prefix) :].strip()
413 fn = cut_line.split()[0]
414 filenames.append(fn)
415 obj = cls(filenames, parallel=parallel)
416 return obj
417
418 _dataset_cls = None
419
420 def _load(self, output_fn, **kwargs):
421 from yt.loaders import load
422
423 if self._dataset_cls is not None:
424 return self._dataset_cls(output_fn, **kwargs)
425 elif self._mixed_dataset_types:
426 return load(output_fn, **kwargs)
427 ds = load(output_fn, **kwargs)
428 self._dataset_cls = ds.__class__
429 return ds
430
431 def particle_trajectories(
432 self, indices, fields=None, suppress_logging=False, ptype=None
433 ):
434 r"""Create a collection of particle trajectories in time over a series of
435 datasets.
436
437 Parameters
438 ----------
439 indices : array_like
440 An integer array of particle indices whose trajectories we
441 want to track. If they are not sorted they will be sorted.
442 fields : list of strings, optional
443 A set of fields that is retrieved when the trajectory
444 collection is instantiated. Default: None (will default
445 to the fields 'particle_position_x', 'particle_position_y',
446 'particle_position_z')
447 suppress_logging : boolean
448 Suppress yt's logging when iterating over the simulation time
449 series. Default: False
450 ptype : str, optional
451 Only use this particle type. Default: None, which uses all particle type.
452
453 Examples
454 --------
455 >>> my_fns = glob.glob("orbit_hdf5_chk_00[0-9][0-9]")
456 >>> my_fns.sort()
457 >>> fields = [("all", "particle_position_x"), ("all", "particle_position_y"),
458 >>> ("all", "particle_position_z"), ("all", "particle_velocity_x"),
459 >>> ("all", "particle_velocity_y"), ("all", "particle_velocity_z")]
460 >>> ds = load(my_fns[0])
461 >>> init_sphere = ds.sphere(ds.domain_center, (.5, "unitary"))
462 >>> indices = init_sphere[("all", "particle_index")].astype("int")
463 >>> ts = DatasetSeries(my_fns)
464 >>> trajs = ts.particle_trajectories(indices, fields=fields)
465 >>> for t in trajs :
466 >>> print(t[("all", "particle_velocity_x")].max(), t[("all", "particle_velocity_x")].min())
467
468 Notes
469 -----
470 This function will fail if there are duplicate particle ids or if some of the
471 particle disappear.
472 """
473 return ParticleTrajectories(
474 self, indices, fields=fields, suppress_logging=suppress_logging, ptype=ptype
475 )
476
477
478 class TimeSeriesQuantitiesContainer:
479 def __init__(self, data_object, quantities):
480 self.data_object = data_object
481 self.quantities = quantities
482
483 def __getitem__(self, key):
484 if key not in self.quantities:
485 raise KeyError(key)
486 q = self.quantities[key]
487
488 def run_quantity_wrapper(quantity, quantity_name):
489 @wraps(derived_quantity_registry[quantity_name][1])
490 def run_quantity(*args, **kwargs):
491 to_run = quantity(*args, **kwargs)
492 return self.data_object.eval(to_run)
493
494 return run_quantity
495
496 return run_quantity_wrapper(q, key)
497
498
499 class DatasetSeriesObject:
500 def __init__(self, time_series, data_object_name, *args, **kwargs):
501 self.time_series = weakref.proxy(time_series)
502 self.data_object_name = data_object_name
503 self._args = args
504 self._kwargs = kwargs
505 qs = {
506 qn: create_quantity_proxy(qv)
507 for qn, qv in derived_quantity_registry.items()
508 }
509 self.quantities = TimeSeriesQuantitiesContainer(self, qs)
510
511 def eval(self, tasks):
512 return self.time_series.eval(tasks, self)
513
514 def get(self, ds):
515 # We get the type name, which corresponds to an attribute of the
516 # index
517 cls = getattr(ds, self.data_object_name)
518 return cls(*self._args, **self._kwargs)
519
520
521 class SimulationTimeSeries(DatasetSeries):
522 def __init__(self, parameter_filename, find_outputs=False):
523 """
524 Base class for generating simulation time series types.
525 Principally consists of a *parameter_filename*.
526 """
527
528 if not os.path.exists(parameter_filename):
529 raise FileNotFoundError(parameter_filename)
530 self.parameter_filename = parameter_filename
531 self.basename = os.path.basename(parameter_filename)
532 self.directory = os.path.dirname(parameter_filename)
533 self.parameters = {}
534 self.key_parameters = []
535
536 # Set some parameter defaults.
537 self._set_parameter_defaults()
538 # Read the simulation dataset.
539 self._parse_parameter_file()
540 # Set units
541 self._set_units()
542 # Figure out the starting and stopping times and redshift.
543 self._calculate_simulation_bounds()
544 # Get all possible datasets.
545 self._get_all_outputs(find_outputs=find_outputs)
546
547 self.print_key_parameters()
548
549 def _set_parameter_defaults(self):
550 pass
551
552 def _parse_parameter_file(self):
553 pass
554
555 def _set_units(self):
556 pass
557
558 def _calculate_simulation_bounds(self):
559 pass
560
561 def _get_all_outputs(**kwargs):
562 pass
563
564 def __repr__(self):
565 return self.parameter_filename
566
567 _arr = None
568
569 @property
570 def arr(self):
571 if self._arr is not None:
572 return self._arr
573 self._arr = functools.partial(YTArray, registry=self.unit_registry)
574 return self._arr
575
576 _quan = None
577
578 @property
579 def quan(self):
580 if self._quan is not None:
581 return self._quan
582 self._quan = functools.partial(YTQuantity, registry=self.unit_registry)
583 return self._quan
584
585 @parallel_root_only
586 def print_key_parameters(self):
587 """
588 Print out some key parameters for the simulation.
589 """
590 if self.simulation_type == "grid":
591 for a in ["domain_dimensions", "domain_left_edge", "domain_right_edge"]:
592 self._print_attr(a)
593 for a in ["initial_time", "final_time", "cosmological_simulation"]:
594 self._print_attr(a)
595 if getattr(self, "cosmological_simulation", False):
596 for a in [
597 "box_size",
598 "omega_matter",
599 "omega_lambda",
600 "omega_radiation",
601 "hubble_constant",
602 "initial_redshift",
603 "final_redshift",
604 ]:
605 self._print_attr(a)
606 for a in self.key_parameters:
607 self._print_attr(a)
608 mylog.info("Total datasets: %d.", len(self.all_outputs))
609
610 def _print_attr(self, a):
611 """
612 Print the attribute or warn about it missing.
613 """
614 if not hasattr(self, a):
615 mylog.error("Missing %s in dataset definition!", a)
616 return
617 v = getattr(self, a)
618 mylog.info("Parameters: %-25s = %s", a, v)
619
620 def _get_outputs_by_key(self, key, values, tolerance=None, outputs=None):
621 r"""
622 Get datasets at or near to given values.
623
624 Parameters
625 ----------
626 key : str
627 The key by which to retrieve outputs, usually 'time' or
628 'redshift'.
629 values : array_like
630 A list of values, given as floats.
631 tolerance : float
632 If not None, do not return a dataset unless the value is
633 within the tolerance value. If None, simply return the
634 nearest dataset.
635 Default: None.
636 outputs : list
637 The list of outputs from which to choose. If None,
638 self.all_outputs is used.
639 Default: None.
640
641 Examples
642 --------
643 >>> datasets = es.get_outputs_by_key('redshift', [0, 1, 2], tolerance=0.1)
644
645 """
646
647 if not isinstance(values, YTArray):
648 if isinstance(values, tuple) and len(values) == 2:
649 values = self.arr(*values)
650 else:
651 values = self.arr(values)
652 values = values.in_base()
653
654 if outputs is None:
655 outputs = self.all_outputs
656 my_outputs = []
657 if not outputs:
658 return my_outputs
659 for value in values:
660 outputs.sort(key=lambda obj: np.abs(value - obj[key]))
661 if (
662 tolerance is None or np.abs(value - outputs[0][key]) <= tolerance
663 ) and outputs[0] not in my_outputs:
664 my_outputs.append(outputs[0])
665 else:
666 mylog.error("No dataset added for %s = %f.", key, value)
667
668 outputs.sort(key=lambda obj: obj["time"])
669 return my_outputs
670
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/yt/data_objects/time_series.py b/yt/data_objects/time_series.py
--- a/yt/data_objects/time_series.py
+++ b/yt/data_objects/time_series.py
@@ -155,6 +155,7 @@
pass
ret = super().__new__(cls)
ret._pre_outputs = outputs[:]
+ ret.kwargs = {}
return ret
def __init__(
| {"golden_diff": "diff --git a/yt/data_objects/time_series.py b/yt/data_objects/time_series.py\n--- a/yt/data_objects/time_series.py\n+++ b/yt/data_objects/time_series.py\n@@ -155,6 +155,7 @@\n pass\n ret = super().__new__(cls)\n ret._pre_outputs = outputs[:]\n+ ret.kwargs = {}\n return ret\n \n def __init__(\n", "issue": "missing declaration of used attribute?\n### Bug report\r\n\r\nwhile trying to load enzo data into yt (cf. python code below), we run into an issue that the (lazy?) loading in `yt.data_objects.DatasetSeries` is triggered when accessing `__get_item__ `of the resulting EnzoSimulation objects which fails with an AttributeError (cf. stacktrace below) for using `self.kwargs`. Afaik this attribute is only declared in `DatasetSeries.__init__` which itself is not invoked by its subclasses involved here (`yt.data_objects.SimulationTimeSeries` -> `yt.frontends.enzo.simulation_handling.EnzoSimulation`). Hence the loading failed.\r\n\r\n**Code for reproduction**\r\n\r\n```python\r\nimport yt\r\nfrom yt.extensions.astro_analysis.halo_analysis import HaloCatalog\r\nyt.enable_parallelism()\r\n \r\nes = yt.load_simulation(\"/mnt/lustre/carla/Box100/RandomSeedStudy/Wunder-L4/parameter_file.txt\", \"Enzo\", find_outputs=True)\r\n \r\nhc = HaloCatalog(data_ds=es, finder_method='rockstar', finder_kwargs={\"num_readers\": 2, \"num_writers\": 4})\r\nhc.create()\r\n```\r\n\r\n**Actual outcome**\r\n\r\n```\r\nFile \"/users/carla/scripts/run_rockstar-galaxies.py\", line 9, in <module>\r\n hc.create()\r\n File \"/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_catalog/halo_catalog.py\", line 333, in create\r\n self._run(save_halos, save_output,\r\n File \"/opt/apps/pkgs/yt/4.0.dev0/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt/utilities/parallel_tools/parallel_analysis_interface.py\", line 310, in barrierize\r\n retval = func(*args, **kwargs)\r\n File \"/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_catalog/halo_catalog.py\", line 224, in _run\r\n self.finder_method(self)\r\n File \"/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_catalog/halo_finding_methods.py\", line 39, in __call__\r\n return self.function(hc, *self.args, **self.kwargs)\r\n File \"/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_catalog/halo_finding_methods.py\", line 87, in _rockstar_method\r\n rh = RockstarHaloFinder(ds, **finder_kwargs)\r\n File \"/opt/apps/pkgs/yt_astro_analysis/313cf31/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt_astro_analysis/halo_analysis/halo_finding/rockstar/rockstar.py\", line 234, in __init__\r\n tds = ts[-1] # Cache a reference\r\n File \"/opt/apps/pkgs/yt/4.0.dev0/intel64/gnu_9.1.0/cpython_3.8.9/lib/python3.8/site-packages/yt/data_objects/time_series.py\", line 214, in __getitem__\r\n o = self._load(o, **self.kwargs)\r\nP000 yt : [ERROR ] 2021-04-14 15:27:09,625 AttributeError: 'EnzoSimulation' object has no attribute 'kwargs'\r\n```\r\n\r\n**Expected outcome**\r\n\r\nwell, I expect no AttributeError ;)\r\n\r\n**Version Information**\r\n * Operating System: CentOS 7\r\n * Python Version: 3.8.9 (cpython)\r\n * yt version: 4.0.dev0 \r\n * Other Libraries (if applicable): yt_astro_analysis (latest commit on master i.e. 313cf31)\r\n\r\n<!--Please tell us how you installed yt and python e.g., from source,\r\npip, conda. If you installed from conda, please specify which channel you used\r\nif not the default-->\r\n\r\npython itself as well as yt and yt_astro_analysis are compiled/installed from source. Supporting packages/dependencies (e.g. unyt) installed with pip. \r\n\r\nThanks again for any help you can provide to clarify this issue.\n", "before_files": [{"content": "import functools\nimport glob\nimport inspect\nimport os\nimport weakref\nfrom functools import wraps\n\nimport numpy as np\nfrom more_itertools import always_iterable\n\nfrom yt._maintenance.deprecation import issue_deprecation_warning\nfrom yt.config import ytcfg\nfrom yt.data_objects.analyzer_objects import AnalysisTask, create_quantity_proxy\nfrom yt.data_objects.particle_trajectories import ParticleTrajectories\nfrom yt.funcs import is_sequence, mylog\nfrom yt.units.yt_array import YTArray, YTQuantity\nfrom yt.utilities.exceptions import YTException\nfrom yt.utilities.object_registries import (\n analysis_task_registry,\n data_object_registry,\n derived_quantity_registry,\n simulation_time_series_registry,\n)\nfrom yt.utilities.parallel_tools.parallel_analysis_interface import (\n communication_system,\n parallel_objects,\n parallel_root_only,\n)\n\n\nclass AnalysisTaskProxy:\n def __init__(self, time_series):\n self.time_series = time_series\n\n def __getitem__(self, key):\n task_cls = analysis_task_registry[key]\n\n @wraps(task_cls.__init__)\n def func(*args, **kwargs):\n task = task_cls(*args, **kwargs)\n return self.time_series.eval(task)\n\n return func\n\n def keys(self):\n return analysis_task_registry.keys()\n\n def __contains__(self, key):\n return key in analysis_task_registry\n\n\ndef get_ds_prop(propname):\n def _eval(params, ds):\n return getattr(ds, propname)\n\n cls = type(propname, (AnalysisTask,), dict(eval=_eval, _params=tuple()))\n return cls\n\n\nattrs = (\n \"refine_by\",\n \"dimensionality\",\n \"current_time\",\n \"domain_dimensions\",\n \"domain_left_edge\",\n \"domain_right_edge\",\n \"unique_identifier\",\n \"current_redshift\",\n \"cosmological_simulation\",\n \"omega_matter\",\n \"omega_lambda\",\n \"omega_radiation\",\n \"hubble_constant\",\n)\n\n\nclass TimeSeriesParametersContainer:\n def __init__(self, data_object):\n self.data_object = data_object\n\n def __getattr__(self, attr):\n if attr in attrs:\n return self.data_object.eval(get_ds_prop(attr)())\n raise AttributeError(attr)\n\n\nclass DatasetSeries:\n r\"\"\"The DatasetSeries object is a container of multiple datasets,\n allowing easy iteration and computation on them.\n\n DatasetSeries objects are designed to provide easy ways to access,\n analyze, parallelize and visualize multiple datasets sequentially. This is\n primarily expressed through iteration, but can also be constructed via\n analysis tasks (see :ref:`time-series-analysis`).\n\n Note that contained datasets are lazily loaded and weakly referenced. This means\n that in order to perform follow-up operations on data it's best to define handles on\n these datasets during iteration.\n\n Parameters\n ----------\n outputs : list of filenames, or pattern\n A list of filenames, for instance [\"DD0001/DD0001\", \"DD0002/DD0002\"],\n or a glob pattern (i.e. containing wildcards '[]?!*') such as \"DD*/DD*.index\".\n In the latter case, results are sorted automatically.\n Filenames and patterns can be of type str, os.Pathlike or bytes.\n parallel : True, False or int\n This parameter governs the behavior when .piter() is called on the\n resultant DatasetSeries object. If this is set to False, the time\n series will not iterate in parallel when .piter() is called. If\n this is set to either True, one processor will be allocated for\n each iteration of the loop. If this is set to an integer, the loop\n will be parallelized over this many workgroups. It the integer\n value is less than the total number of available processors,\n more than one processor will be allocated to a given loop iteration,\n causing the functionality within the loop to be run in parallel.\n setup_function : callable, accepts a ds\n This function will be called whenever a dataset is loaded.\n mixed_dataset_types : True or False, default False\n Set to True if the DatasetSeries will load different dataset types, set\n to False if loading dataset of a single type as this will result in a\n considerable speed up from not having to figure out the dataset type.\n\n Examples\n --------\n\n >>> ts = DatasetSeries(\n \"GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0[0-6][0-9]0\")\n >>> for ds in ts:\n ... SlicePlot(ds, \"x\", (\"gas\", \"density\")).save()\n ...\n >>> def print_time(ds):\n ... print(ds.current_time)\n ...\n >>> ts = DatasetSeries(\n ... \"GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0[0-6][0-9]0\",\n ... setup_function = print_time)\n ...\n >>> for ds in ts:\n ... SlicePlot(ds, \"x\", (\"gas\", \"density\")).save()\n\n \"\"\"\n\n def __init_subclass__(cls, *args, **kwargs):\n super().__init_subclass__(*args, **kwargs)\n code_name = cls.__name__[: cls.__name__.find(\"Simulation\")]\n if code_name:\n simulation_time_series_registry[code_name] = cls\n mylog.debug(\"Registering simulation: %s as %s\", code_name, cls)\n\n def __new__(cls, outputs, *args, **kwargs):\n try:\n outputs = cls._get_filenames_from_glob_pattern(outputs)\n except TypeError:\n pass\n ret = super().__new__(cls)\n ret._pre_outputs = outputs[:]\n return ret\n\n def __init__(\n self,\n outputs,\n parallel=True,\n setup_function=None,\n mixed_dataset_types=False,\n **kwargs,\n ):\n # This is needed to properly set _pre_outputs for Simulation subclasses.\n self._mixed_dataset_types = mixed_dataset_types\n if is_sequence(outputs) and not isinstance(outputs, str):\n self._pre_outputs = outputs[:]\n self.tasks = AnalysisTaskProxy(self)\n self.params = TimeSeriesParametersContainer(self)\n if setup_function is None:\n\n def _null(x):\n return None\n\n setup_function = _null\n self._setup_function = setup_function\n for type_name in data_object_registry:\n setattr(\n self, type_name, functools.partial(DatasetSeriesObject, self, type_name)\n )\n self.parallel = parallel\n self.kwargs = kwargs\n\n @staticmethod\n def _get_filenames_from_glob_pattern(outputs):\n \"\"\"\n Helper function to DatasetSeries.__new__\n handle a special case where \"outputs\" is assumed to be really a pattern string\n \"\"\"\n pattern = outputs\n epattern = os.path.expanduser(pattern)\n data_dir = ytcfg.get(\"yt\", \"test_data_dir\")\n # if no match if found from the current work dir,\n # we try to match the pattern from the test data dir\n file_list = glob.glob(epattern) or glob.glob(os.path.join(data_dir, epattern))\n if not file_list:\n raise FileNotFoundError(f\"No match found for pattern : {pattern}\")\n return sorted(file_list)\n\n def __getitem__(self, key):\n if isinstance(key, slice):\n if isinstance(key.start, float):\n return self.get_range(key.start, key.stop)\n # This will return a sliced up object!\n return DatasetSeries(\n self._pre_outputs[key], parallel=self.parallel, **self.kwargs\n )\n o = self._pre_outputs[key]\n if isinstance(o, (str, os.PathLike)):\n o = self._load(o, **self.kwargs)\n self._setup_function(o)\n return o\n\n def __len__(self):\n return len(self._pre_outputs)\n\n @property\n def outputs(self):\n return self._pre_outputs\n\n def piter(self, storage=None, dynamic=False):\n r\"\"\"Iterate over time series components in parallel.\n\n This allows you to iterate over a time series while dispatching\n individual components of that time series to different processors or\n processor groups. If the parallelism strategy was set to be\n multi-processor (by \"parallel = N\" where N is an integer when the\n DatasetSeries was created) this will issue each dataset to an\n N-processor group. For instance, this would allow you to start a 1024\n processor job, loading up 100 datasets in a time series and creating 8\n processor groups of 128 processors each, each of which would be\n assigned a different dataset. This could be accomplished as shown in\n the examples below. The *storage* option is as seen in\n :func:`~yt.utilities.parallel_tools.parallel_analysis_interface.parallel_objects`\n which is a mechanism for storing results of analysis on an individual\n dataset and then combining the results at the end, so that the entire\n set of processors have access to those results.\n\n Note that supplying a *store* changes the iteration mechanism; see\n below.\n\n Parameters\n ----------\n storage : dict\n This is a dictionary, which will be filled with results during the\n course of the iteration. The keys will be the dataset\n indices and the values will be whatever is assigned to the *result*\n attribute on the storage during iteration.\n dynamic : boolean\n This governs whether or not dynamic load balancing will be\n enabled. This requires one dedicated processor; if this\n is enabled with a set of 128 processors available, only\n 127 will be available to iterate over objects as one will\n be load balancing the rest.\n\n\n Examples\n --------\n Here is an example of iteration when the results do not need to be\n stored. One processor will be assigned to each dataset.\n\n >>> ts = DatasetSeries(\"DD*/DD*.index\")\n >>> for ds in ts.piter():\n ... SlicePlot(ds, \"x\", (\"gas\", \"density\")).save()\n ...\n\n This demonstrates how one might store results:\n\n >>> def print_time(ds):\n ... print(ds.current_time)\n ...\n >>> ts = DatasetSeries(\"DD*/DD*.index\",\n ... setup_function = print_time )\n ...\n >>> my_storage = {}\n >>> for sto, ds in ts.piter(storage=my_storage):\n ... v, c = ds.find_max((\"gas\", \"density\"))\n ... sto.result = (v, c)\n ...\n >>> for i, (v, c) in sorted(my_storage.items()):\n ... print(\"% 4i %0.3e\" % (i, v))\n ...\n\n This shows how to dispatch 4 processors to each dataset:\n\n >>> ts = DatasetSeries(\"DD*/DD*.index\",\n ... parallel = 4)\n >>> for ds in ts.piter():\n ... ProjectionPlot(ds, \"x\", (\"gas\", \"density\")).save()\n ...\n\n \"\"\"\n if not self.parallel:\n njobs = 1\n elif not dynamic:\n if self.parallel:\n njobs = -1\n else:\n njobs = self.parallel\n else:\n my_communicator = communication_system.communicators[-1]\n nsize = my_communicator.size\n if nsize == 1:\n self.parallel = False\n dynamic = False\n njobs = 1\n else:\n njobs = nsize - 1\n\n for output in parallel_objects(\n self._pre_outputs, njobs=njobs, storage=storage, dynamic=dynamic\n ):\n if storage is not None:\n sto, output = output\n\n if isinstance(output, str):\n ds = self._load(output, **self.kwargs)\n self._setup_function(ds)\n else:\n ds = output\n\n if storage is not None:\n next_ret = (sto, ds)\n else:\n next_ret = ds\n\n yield next_ret\n\n def eval(self, tasks, obj=None):\n return_values = {}\n for store, ds in self.piter(return_values):\n store.result = []\n for task in always_iterable(tasks):\n try:\n style = inspect.getargspec(task.eval)[0][1]\n if style == \"ds\":\n arg = ds\n elif style == \"data_object\":\n if obj is None:\n obj = DatasetSeriesObject(self, \"all_data\")\n arg = obj.get(ds)\n rv = task.eval(arg)\n # We catch and store YT-originating exceptions\n # This fixes the standard problem of having a sphere that's too\n # small.\n except YTException:\n pass\n store.result.append(rv)\n return [v for k, v in sorted(return_values.items())]\n\n @classmethod\n def from_filenames(cls, filenames, parallel=True, setup_function=None, **kwargs):\n r\"\"\"Create a time series from either a filename pattern or a list of\n filenames.\n\n This method provides an easy way to create a\n :class:`~yt.data_objects.time_series.DatasetSeries`, given a set of\n filenames or a pattern that matches them. Additionally, it can set the\n parallelism strategy.\n\n Parameters\n ----------\n filenames : list or pattern\n This can either be a list of filenames (such as [\"DD0001/DD0001\",\n \"DD0002/DD0002\"]) or a pattern to match, such as\n \"DD*/DD*.index\"). If it's the former, they will be loaded in\n order. The latter will be identified with the glob module and then\n sorted.\n parallel : True, False or int\n This parameter governs the behavior when .piter() is called on the\n resultant DatasetSeries object. If this is set to False, the time\n series will not iterate in parallel when .piter() is called. If\n this is set to either True or an integer, it will be iterated with\n 1 or that integer number of processors assigned to each parameter\n file provided to the loop.\n setup_function : callable, accepts a ds\n This function will be called whenever a dataset is loaded.\n\n Examples\n --------\n\n >>> def print_time(ds):\n ... print(ds.current_time)\n ...\n >>> ts = DatasetSeries.from_filenames(\n ... \"GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0[0-6][0-9]0\",\n ... setup_function = print_time)\n ...\n >>> for ds in ts:\n ... SlicePlot(ds, \"x\", (\"gas\", \"density\")).save()\n\n \"\"\"\n issue_deprecation_warning(\n \"DatasetSeries.from_filenames() is deprecated and will be removed \"\n \"in a future version of yt. Use DatasetSeries() directly.\",\n since=\"4.0.0\",\n removal=\"4.1.0\",\n )\n obj = cls(filenames, parallel=parallel, setup_function=setup_function, **kwargs)\n return obj\n\n @classmethod\n def from_output_log(cls, output_log, line_prefix=\"DATASET WRITTEN\", parallel=True):\n filenames = []\n for line in open(output_log):\n if not line.startswith(line_prefix):\n continue\n cut_line = line[len(line_prefix) :].strip()\n fn = cut_line.split()[0]\n filenames.append(fn)\n obj = cls(filenames, parallel=parallel)\n return obj\n\n _dataset_cls = None\n\n def _load(self, output_fn, **kwargs):\n from yt.loaders import load\n\n if self._dataset_cls is not None:\n return self._dataset_cls(output_fn, **kwargs)\n elif self._mixed_dataset_types:\n return load(output_fn, **kwargs)\n ds = load(output_fn, **kwargs)\n self._dataset_cls = ds.__class__\n return ds\n\n def particle_trajectories(\n self, indices, fields=None, suppress_logging=False, ptype=None\n ):\n r\"\"\"Create a collection of particle trajectories in time over a series of\n datasets.\n\n Parameters\n ----------\n indices : array_like\n An integer array of particle indices whose trajectories we\n want to track. If they are not sorted they will be sorted.\n fields : list of strings, optional\n A set of fields that is retrieved when the trajectory\n collection is instantiated. Default: None (will default\n to the fields 'particle_position_x', 'particle_position_y',\n 'particle_position_z')\n suppress_logging : boolean\n Suppress yt's logging when iterating over the simulation time\n series. Default: False\n ptype : str, optional\n Only use this particle type. Default: None, which uses all particle type.\n\n Examples\n --------\n >>> my_fns = glob.glob(\"orbit_hdf5_chk_00[0-9][0-9]\")\n >>> my_fns.sort()\n >>> fields = [(\"all\", \"particle_position_x\"), (\"all\", \"particle_position_y\"),\n >>> (\"all\", \"particle_position_z\"), (\"all\", \"particle_velocity_x\"),\n >>> (\"all\", \"particle_velocity_y\"), (\"all\", \"particle_velocity_z\")]\n >>> ds = load(my_fns[0])\n >>> init_sphere = ds.sphere(ds.domain_center, (.5, \"unitary\"))\n >>> indices = init_sphere[(\"all\", \"particle_index\")].astype(\"int\")\n >>> ts = DatasetSeries(my_fns)\n >>> trajs = ts.particle_trajectories(indices, fields=fields)\n >>> for t in trajs :\n >>> print(t[(\"all\", \"particle_velocity_x\")].max(), t[(\"all\", \"particle_velocity_x\")].min())\n\n Notes\n -----\n This function will fail if there are duplicate particle ids or if some of the\n particle disappear.\n \"\"\"\n return ParticleTrajectories(\n self, indices, fields=fields, suppress_logging=suppress_logging, ptype=ptype\n )\n\n\nclass TimeSeriesQuantitiesContainer:\n def __init__(self, data_object, quantities):\n self.data_object = data_object\n self.quantities = quantities\n\n def __getitem__(self, key):\n if key not in self.quantities:\n raise KeyError(key)\n q = self.quantities[key]\n\n def run_quantity_wrapper(quantity, quantity_name):\n @wraps(derived_quantity_registry[quantity_name][1])\n def run_quantity(*args, **kwargs):\n to_run = quantity(*args, **kwargs)\n return self.data_object.eval(to_run)\n\n return run_quantity\n\n return run_quantity_wrapper(q, key)\n\n\nclass DatasetSeriesObject:\n def __init__(self, time_series, data_object_name, *args, **kwargs):\n self.time_series = weakref.proxy(time_series)\n self.data_object_name = data_object_name\n self._args = args\n self._kwargs = kwargs\n qs = {\n qn: create_quantity_proxy(qv)\n for qn, qv in derived_quantity_registry.items()\n }\n self.quantities = TimeSeriesQuantitiesContainer(self, qs)\n\n def eval(self, tasks):\n return self.time_series.eval(tasks, self)\n\n def get(self, ds):\n # We get the type name, which corresponds to an attribute of the\n # index\n cls = getattr(ds, self.data_object_name)\n return cls(*self._args, **self._kwargs)\n\n\nclass SimulationTimeSeries(DatasetSeries):\n def __init__(self, parameter_filename, find_outputs=False):\n \"\"\"\n Base class for generating simulation time series types.\n Principally consists of a *parameter_filename*.\n \"\"\"\n\n if not os.path.exists(parameter_filename):\n raise FileNotFoundError(parameter_filename)\n self.parameter_filename = parameter_filename\n self.basename = os.path.basename(parameter_filename)\n self.directory = os.path.dirname(parameter_filename)\n self.parameters = {}\n self.key_parameters = []\n\n # Set some parameter defaults.\n self._set_parameter_defaults()\n # Read the simulation dataset.\n self._parse_parameter_file()\n # Set units\n self._set_units()\n # Figure out the starting and stopping times and redshift.\n self._calculate_simulation_bounds()\n # Get all possible datasets.\n self._get_all_outputs(find_outputs=find_outputs)\n\n self.print_key_parameters()\n\n def _set_parameter_defaults(self):\n pass\n\n def _parse_parameter_file(self):\n pass\n\n def _set_units(self):\n pass\n\n def _calculate_simulation_bounds(self):\n pass\n\n def _get_all_outputs(**kwargs):\n pass\n\n def __repr__(self):\n return self.parameter_filename\n\n _arr = None\n\n @property\n def arr(self):\n if self._arr is not None:\n return self._arr\n self._arr = functools.partial(YTArray, registry=self.unit_registry)\n return self._arr\n\n _quan = None\n\n @property\n def quan(self):\n if self._quan is not None:\n return self._quan\n self._quan = functools.partial(YTQuantity, registry=self.unit_registry)\n return self._quan\n\n @parallel_root_only\n def print_key_parameters(self):\n \"\"\"\n Print out some key parameters for the simulation.\n \"\"\"\n if self.simulation_type == \"grid\":\n for a in [\"domain_dimensions\", \"domain_left_edge\", \"domain_right_edge\"]:\n self._print_attr(a)\n for a in [\"initial_time\", \"final_time\", \"cosmological_simulation\"]:\n self._print_attr(a)\n if getattr(self, \"cosmological_simulation\", False):\n for a in [\n \"box_size\",\n \"omega_matter\",\n \"omega_lambda\",\n \"omega_radiation\",\n \"hubble_constant\",\n \"initial_redshift\",\n \"final_redshift\",\n ]:\n self._print_attr(a)\n for a in self.key_parameters:\n self._print_attr(a)\n mylog.info(\"Total datasets: %d.\", len(self.all_outputs))\n\n def _print_attr(self, a):\n \"\"\"\n Print the attribute or warn about it missing.\n \"\"\"\n if not hasattr(self, a):\n mylog.error(\"Missing %s in dataset definition!\", a)\n return\n v = getattr(self, a)\n mylog.info(\"Parameters: %-25s = %s\", a, v)\n\n def _get_outputs_by_key(self, key, values, tolerance=None, outputs=None):\n r\"\"\"\n Get datasets at or near to given values.\n\n Parameters\n ----------\n key : str\n The key by which to retrieve outputs, usually 'time' or\n 'redshift'.\n values : array_like\n A list of values, given as floats.\n tolerance : float\n If not None, do not return a dataset unless the value is\n within the tolerance value. If None, simply return the\n nearest dataset.\n Default: None.\n outputs : list\n The list of outputs from which to choose. If None,\n self.all_outputs is used.\n Default: None.\n\n Examples\n --------\n >>> datasets = es.get_outputs_by_key('redshift', [0, 1, 2], tolerance=0.1)\n\n \"\"\"\n\n if not isinstance(values, YTArray):\n if isinstance(values, tuple) and len(values) == 2:\n values = self.arr(*values)\n else:\n values = self.arr(values)\n values = values.in_base()\n\n if outputs is None:\n outputs = self.all_outputs\n my_outputs = []\n if not outputs:\n return my_outputs\n for value in values:\n outputs.sort(key=lambda obj: np.abs(value - obj[key]))\n if (\n tolerance is None or np.abs(value - outputs[0][key]) <= tolerance\n ) and outputs[0] not in my_outputs:\n my_outputs.append(outputs[0])\n else:\n mylog.error(\"No dataset added for %s = %f.\", key, value)\n\n outputs.sort(key=lambda obj: obj[\"time\"])\n return my_outputs\n", "path": "yt/data_objects/time_series.py"}], "after_files": [{"content": "import functools\nimport glob\nimport inspect\nimport os\nimport weakref\nfrom functools import wraps\n\nimport numpy as np\nfrom more_itertools import always_iterable\n\nfrom yt._maintenance.deprecation import issue_deprecation_warning\nfrom yt.config import ytcfg\nfrom yt.data_objects.analyzer_objects import AnalysisTask, create_quantity_proxy\nfrom yt.data_objects.particle_trajectories import ParticleTrajectories\nfrom yt.funcs import is_sequence, mylog\nfrom yt.units.yt_array import YTArray, YTQuantity\nfrom yt.utilities.exceptions import YTException\nfrom yt.utilities.object_registries import (\n analysis_task_registry,\n data_object_registry,\n derived_quantity_registry,\n simulation_time_series_registry,\n)\nfrom yt.utilities.parallel_tools.parallel_analysis_interface import (\n communication_system,\n parallel_objects,\n parallel_root_only,\n)\n\n\nclass AnalysisTaskProxy:\n def __init__(self, time_series):\n self.time_series = time_series\n\n def __getitem__(self, key):\n task_cls = analysis_task_registry[key]\n\n @wraps(task_cls.__init__)\n def func(*args, **kwargs):\n task = task_cls(*args, **kwargs)\n return self.time_series.eval(task)\n\n return func\n\n def keys(self):\n return analysis_task_registry.keys()\n\n def __contains__(self, key):\n return key in analysis_task_registry\n\n\ndef get_ds_prop(propname):\n def _eval(params, ds):\n return getattr(ds, propname)\n\n cls = type(propname, (AnalysisTask,), dict(eval=_eval, _params=tuple()))\n return cls\n\n\nattrs = (\n \"refine_by\",\n \"dimensionality\",\n \"current_time\",\n \"domain_dimensions\",\n \"domain_left_edge\",\n \"domain_right_edge\",\n \"unique_identifier\",\n \"current_redshift\",\n \"cosmological_simulation\",\n \"omega_matter\",\n \"omega_lambda\",\n \"omega_radiation\",\n \"hubble_constant\",\n)\n\n\nclass TimeSeriesParametersContainer:\n def __init__(self, data_object):\n self.data_object = data_object\n\n def __getattr__(self, attr):\n if attr in attrs:\n return self.data_object.eval(get_ds_prop(attr)())\n raise AttributeError(attr)\n\n\nclass DatasetSeries:\n r\"\"\"The DatasetSeries object is a container of multiple datasets,\n allowing easy iteration and computation on them.\n\n DatasetSeries objects are designed to provide easy ways to access,\n analyze, parallelize and visualize multiple datasets sequentially. This is\n primarily expressed through iteration, but can also be constructed via\n analysis tasks (see :ref:`time-series-analysis`).\n\n Note that contained datasets are lazily loaded and weakly referenced. This means\n that in order to perform follow-up operations on data it's best to define handles on\n these datasets during iteration.\n\n Parameters\n ----------\n outputs : list of filenames, or pattern\n A list of filenames, for instance [\"DD0001/DD0001\", \"DD0002/DD0002\"],\n or a glob pattern (i.e. containing wildcards '[]?!*') such as \"DD*/DD*.index\".\n In the latter case, results are sorted automatically.\n Filenames and patterns can be of type str, os.Pathlike or bytes.\n parallel : True, False or int\n This parameter governs the behavior when .piter() is called on the\n resultant DatasetSeries object. If this is set to False, the time\n series will not iterate in parallel when .piter() is called. If\n this is set to either True, one processor will be allocated for\n each iteration of the loop. If this is set to an integer, the loop\n will be parallelized over this many workgroups. It the integer\n value is less than the total number of available processors,\n more than one processor will be allocated to a given loop iteration,\n causing the functionality within the loop to be run in parallel.\n setup_function : callable, accepts a ds\n This function will be called whenever a dataset is loaded.\n mixed_dataset_types : True or False, default False\n Set to True if the DatasetSeries will load different dataset types, set\n to False if loading dataset of a single type as this will result in a\n considerable speed up from not having to figure out the dataset type.\n\n Examples\n --------\n\n >>> ts = DatasetSeries(\n \"GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0[0-6][0-9]0\")\n >>> for ds in ts:\n ... SlicePlot(ds, \"x\", (\"gas\", \"density\")).save()\n ...\n >>> def print_time(ds):\n ... print(ds.current_time)\n ...\n >>> ts = DatasetSeries(\n ... \"GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0[0-6][0-9]0\",\n ... setup_function = print_time)\n ...\n >>> for ds in ts:\n ... SlicePlot(ds, \"x\", (\"gas\", \"density\")).save()\n\n \"\"\"\n\n def __init_subclass__(cls, *args, **kwargs):\n super().__init_subclass__(*args, **kwargs)\n code_name = cls.__name__[: cls.__name__.find(\"Simulation\")]\n if code_name:\n simulation_time_series_registry[code_name] = cls\n mylog.debug(\"Registering simulation: %s as %s\", code_name, cls)\n\n def __new__(cls, outputs, *args, **kwargs):\n try:\n outputs = cls._get_filenames_from_glob_pattern(outputs)\n except TypeError:\n pass\n ret = super().__new__(cls)\n ret._pre_outputs = outputs[:]\n ret.kwargs = {}\n return ret\n\n def __init__(\n self,\n outputs,\n parallel=True,\n setup_function=None,\n mixed_dataset_types=False,\n **kwargs,\n ):\n # This is needed to properly set _pre_outputs for Simulation subclasses.\n self._mixed_dataset_types = mixed_dataset_types\n if is_sequence(outputs) and not isinstance(outputs, str):\n self._pre_outputs = outputs[:]\n self.tasks = AnalysisTaskProxy(self)\n self.params = TimeSeriesParametersContainer(self)\n if setup_function is None:\n\n def _null(x):\n return None\n\n setup_function = _null\n self._setup_function = setup_function\n for type_name in data_object_registry:\n setattr(\n self, type_name, functools.partial(DatasetSeriesObject, self, type_name)\n )\n self.parallel = parallel\n self.kwargs = kwargs\n\n @staticmethod\n def _get_filenames_from_glob_pattern(outputs):\n \"\"\"\n Helper function to DatasetSeries.__new__\n handle a special case where \"outputs\" is assumed to be really a pattern string\n \"\"\"\n pattern = outputs\n epattern = os.path.expanduser(pattern)\n data_dir = ytcfg.get(\"yt\", \"test_data_dir\")\n # if no match if found from the current work dir,\n # we try to match the pattern from the test data dir\n file_list = glob.glob(epattern) or glob.glob(os.path.join(data_dir, epattern))\n if not file_list:\n raise FileNotFoundError(f\"No match found for pattern : {pattern}\")\n return sorted(file_list)\n\n def __getitem__(self, key):\n if isinstance(key, slice):\n if isinstance(key.start, float):\n return self.get_range(key.start, key.stop)\n # This will return a sliced up object!\n return DatasetSeries(\n self._pre_outputs[key], parallel=self.parallel, **self.kwargs\n )\n o = self._pre_outputs[key]\n if isinstance(o, (str, os.PathLike)):\n o = self._load(o, **self.kwargs)\n self._setup_function(o)\n return o\n\n def __len__(self):\n return len(self._pre_outputs)\n\n @property\n def outputs(self):\n return self._pre_outputs\n\n def piter(self, storage=None, dynamic=False):\n r\"\"\"Iterate over time series components in parallel.\n\n This allows you to iterate over a time series while dispatching\n individual components of that time series to different processors or\n processor groups. If the parallelism strategy was set to be\n multi-processor (by \"parallel = N\" where N is an integer when the\n DatasetSeries was created) this will issue each dataset to an\n N-processor group. For instance, this would allow you to start a 1024\n processor job, loading up 100 datasets in a time series and creating 8\n processor groups of 128 processors each, each of which would be\n assigned a different dataset. This could be accomplished as shown in\n the examples below. The *storage* option is as seen in\n :func:`~yt.utilities.parallel_tools.parallel_analysis_interface.parallel_objects`\n which is a mechanism for storing results of analysis on an individual\n dataset and then combining the results at the end, so that the entire\n set of processors have access to those results.\n\n Note that supplying a *store* changes the iteration mechanism; see\n below.\n\n Parameters\n ----------\n storage : dict\n This is a dictionary, which will be filled with results during the\n course of the iteration. The keys will be the dataset\n indices and the values will be whatever is assigned to the *result*\n attribute on the storage during iteration.\n dynamic : boolean\n This governs whether or not dynamic load balancing will be\n enabled. This requires one dedicated processor; if this\n is enabled with a set of 128 processors available, only\n 127 will be available to iterate over objects as one will\n be load balancing the rest.\n\n\n Examples\n --------\n Here is an example of iteration when the results do not need to be\n stored. One processor will be assigned to each dataset.\n\n >>> ts = DatasetSeries(\"DD*/DD*.index\")\n >>> for ds in ts.piter():\n ... SlicePlot(ds, \"x\", (\"gas\", \"density\")).save()\n ...\n\n This demonstrates how one might store results:\n\n >>> def print_time(ds):\n ... print(ds.current_time)\n ...\n >>> ts = DatasetSeries(\"DD*/DD*.index\",\n ... setup_function = print_time )\n ...\n >>> my_storage = {}\n >>> for sto, ds in ts.piter(storage=my_storage):\n ... v, c = ds.find_max((\"gas\", \"density\"))\n ... sto.result = (v, c)\n ...\n >>> for i, (v, c) in sorted(my_storage.items()):\n ... print(\"% 4i %0.3e\" % (i, v))\n ...\n\n This shows how to dispatch 4 processors to each dataset:\n\n >>> ts = DatasetSeries(\"DD*/DD*.index\",\n ... parallel = 4)\n >>> for ds in ts.piter():\n ... ProjectionPlot(ds, \"x\", (\"gas\", \"density\")).save()\n ...\n\n \"\"\"\n if not self.parallel:\n njobs = 1\n elif not dynamic:\n if self.parallel:\n njobs = -1\n else:\n njobs = self.parallel\n else:\n my_communicator = communication_system.communicators[-1]\n nsize = my_communicator.size\n if nsize == 1:\n self.parallel = False\n dynamic = False\n njobs = 1\n else:\n njobs = nsize - 1\n\n for output in parallel_objects(\n self._pre_outputs, njobs=njobs, storage=storage, dynamic=dynamic\n ):\n if storage is not None:\n sto, output = output\n\n if isinstance(output, str):\n ds = self._load(output, **self.kwargs)\n self._setup_function(ds)\n else:\n ds = output\n\n if storage is not None:\n next_ret = (sto, ds)\n else:\n next_ret = ds\n\n yield next_ret\n\n def eval(self, tasks, obj=None):\n return_values = {}\n for store, ds in self.piter(return_values):\n store.result = []\n for task in always_iterable(tasks):\n try:\n style = inspect.getargspec(task.eval)[0][1]\n if style == \"ds\":\n arg = ds\n elif style == \"data_object\":\n if obj is None:\n obj = DatasetSeriesObject(self, \"all_data\")\n arg = obj.get(ds)\n rv = task.eval(arg)\n # We catch and store YT-originating exceptions\n # This fixes the standard problem of having a sphere that's too\n # small.\n except YTException:\n pass\n store.result.append(rv)\n return [v for k, v in sorted(return_values.items())]\n\n @classmethod\n def from_filenames(cls, filenames, parallel=True, setup_function=None, **kwargs):\n r\"\"\"Create a time series from either a filename pattern or a list of\n filenames.\n\n This method provides an easy way to create a\n :class:`~yt.data_objects.time_series.DatasetSeries`, given a set of\n filenames or a pattern that matches them. Additionally, it can set the\n parallelism strategy.\n\n Parameters\n ----------\n filenames : list or pattern\n This can either be a list of filenames (such as [\"DD0001/DD0001\",\n \"DD0002/DD0002\"]) or a pattern to match, such as\n \"DD*/DD*.index\"). If it's the former, they will be loaded in\n order. The latter will be identified with the glob module and then\n sorted.\n parallel : True, False or int\n This parameter governs the behavior when .piter() is called on the\n resultant DatasetSeries object. If this is set to False, the time\n series will not iterate in parallel when .piter() is called. If\n this is set to either True or an integer, it will be iterated with\n 1 or that integer number of processors assigned to each parameter\n file provided to the loop.\n setup_function : callable, accepts a ds\n This function will be called whenever a dataset is loaded.\n\n Examples\n --------\n\n >>> def print_time(ds):\n ... print(ds.current_time)\n ...\n >>> ts = DatasetSeries.from_filenames(\n ... \"GasSloshingLowRes/sloshing_low_res_hdf5_plt_cnt_0[0-6][0-9]0\",\n ... setup_function = print_time)\n ...\n >>> for ds in ts:\n ... SlicePlot(ds, \"x\", (\"gas\", \"density\")).save()\n\n \"\"\"\n issue_deprecation_warning(\n \"DatasetSeries.from_filenames() is deprecated and will be removed \"\n \"in a future version of yt. Use DatasetSeries() directly.\",\n since=\"4.0.0\",\n removal=\"4.1.0\",\n )\n obj = cls(filenames, parallel=parallel, setup_function=setup_function, **kwargs)\n return obj\n\n @classmethod\n def from_output_log(cls, output_log, line_prefix=\"DATASET WRITTEN\", parallel=True):\n filenames = []\n for line in open(output_log):\n if not line.startswith(line_prefix):\n continue\n cut_line = line[len(line_prefix) :].strip()\n fn = cut_line.split()[0]\n filenames.append(fn)\n obj = cls(filenames, parallel=parallel)\n return obj\n\n _dataset_cls = None\n\n def _load(self, output_fn, **kwargs):\n from yt.loaders import load\n\n if self._dataset_cls is not None:\n return self._dataset_cls(output_fn, **kwargs)\n elif self._mixed_dataset_types:\n return load(output_fn, **kwargs)\n ds = load(output_fn, **kwargs)\n self._dataset_cls = ds.__class__\n return ds\n\n def particle_trajectories(\n self, indices, fields=None, suppress_logging=False, ptype=None\n ):\n r\"\"\"Create a collection of particle trajectories in time over a series of\n datasets.\n\n Parameters\n ----------\n indices : array_like\n An integer array of particle indices whose trajectories we\n want to track. If they are not sorted they will be sorted.\n fields : list of strings, optional\n A set of fields that is retrieved when the trajectory\n collection is instantiated. Default: None (will default\n to the fields 'particle_position_x', 'particle_position_y',\n 'particle_position_z')\n suppress_logging : boolean\n Suppress yt's logging when iterating over the simulation time\n series. Default: False\n ptype : str, optional\n Only use this particle type. Default: None, which uses all particle type.\n\n Examples\n --------\n >>> my_fns = glob.glob(\"orbit_hdf5_chk_00[0-9][0-9]\")\n >>> my_fns.sort()\n >>> fields = [(\"all\", \"particle_position_x\"), (\"all\", \"particle_position_y\"),\n >>> (\"all\", \"particle_position_z\"), (\"all\", \"particle_velocity_x\"),\n >>> (\"all\", \"particle_velocity_y\"), (\"all\", \"particle_velocity_z\")]\n >>> ds = load(my_fns[0])\n >>> init_sphere = ds.sphere(ds.domain_center, (.5, \"unitary\"))\n >>> indices = init_sphere[(\"all\", \"particle_index\")].astype(\"int\")\n >>> ts = DatasetSeries(my_fns)\n >>> trajs = ts.particle_trajectories(indices, fields=fields)\n >>> for t in trajs :\n >>> print(t[(\"all\", \"particle_velocity_x\")].max(), t[(\"all\", \"particle_velocity_x\")].min())\n\n Notes\n -----\n This function will fail if there are duplicate particle ids or if some of the\n particle disappear.\n \"\"\"\n return ParticleTrajectories(\n self, indices, fields=fields, suppress_logging=suppress_logging, ptype=ptype\n )\n\n\nclass TimeSeriesQuantitiesContainer:\n def __init__(self, data_object, quantities):\n self.data_object = data_object\n self.quantities = quantities\n\n def __getitem__(self, key):\n if key not in self.quantities:\n raise KeyError(key)\n q = self.quantities[key]\n\n def run_quantity_wrapper(quantity, quantity_name):\n @wraps(derived_quantity_registry[quantity_name][1])\n def run_quantity(*args, **kwargs):\n to_run = quantity(*args, **kwargs)\n return self.data_object.eval(to_run)\n\n return run_quantity\n\n return run_quantity_wrapper(q, key)\n\n\nclass DatasetSeriesObject:\n def __init__(self, time_series, data_object_name, *args, **kwargs):\n self.time_series = weakref.proxy(time_series)\n self.data_object_name = data_object_name\n self._args = args\n self._kwargs = kwargs\n qs = {\n qn: create_quantity_proxy(qv)\n for qn, qv in derived_quantity_registry.items()\n }\n self.quantities = TimeSeriesQuantitiesContainer(self, qs)\n\n def eval(self, tasks):\n return self.time_series.eval(tasks, self)\n\n def get(self, ds):\n # We get the type name, which corresponds to an attribute of the\n # index\n cls = getattr(ds, self.data_object_name)\n return cls(*self._args, **self._kwargs)\n\n\nclass SimulationTimeSeries(DatasetSeries):\n def __init__(self, parameter_filename, find_outputs=False):\n \"\"\"\n Base class for generating simulation time series types.\n Principally consists of a *parameter_filename*.\n \"\"\"\n\n if not os.path.exists(parameter_filename):\n raise FileNotFoundError(parameter_filename)\n self.parameter_filename = parameter_filename\n self.basename = os.path.basename(parameter_filename)\n self.directory = os.path.dirname(parameter_filename)\n self.parameters = {}\n self.key_parameters = []\n\n # Set some parameter defaults.\n self._set_parameter_defaults()\n # Read the simulation dataset.\n self._parse_parameter_file()\n # Set units\n self._set_units()\n # Figure out the starting and stopping times and redshift.\n self._calculate_simulation_bounds()\n # Get all possible datasets.\n self._get_all_outputs(find_outputs=find_outputs)\n\n self.print_key_parameters()\n\n def _set_parameter_defaults(self):\n pass\n\n def _parse_parameter_file(self):\n pass\n\n def _set_units(self):\n pass\n\n def _calculate_simulation_bounds(self):\n pass\n\n def _get_all_outputs(**kwargs):\n pass\n\n def __repr__(self):\n return self.parameter_filename\n\n _arr = None\n\n @property\n def arr(self):\n if self._arr is not None:\n return self._arr\n self._arr = functools.partial(YTArray, registry=self.unit_registry)\n return self._arr\n\n _quan = None\n\n @property\n def quan(self):\n if self._quan is not None:\n return self._quan\n self._quan = functools.partial(YTQuantity, registry=self.unit_registry)\n return self._quan\n\n @parallel_root_only\n def print_key_parameters(self):\n \"\"\"\n Print out some key parameters for the simulation.\n \"\"\"\n if self.simulation_type == \"grid\":\n for a in [\"domain_dimensions\", \"domain_left_edge\", \"domain_right_edge\"]:\n self._print_attr(a)\n for a in [\"initial_time\", \"final_time\", \"cosmological_simulation\"]:\n self._print_attr(a)\n if getattr(self, \"cosmological_simulation\", False):\n for a in [\n \"box_size\",\n \"omega_matter\",\n \"omega_lambda\",\n \"omega_radiation\",\n \"hubble_constant\",\n \"initial_redshift\",\n \"final_redshift\",\n ]:\n self._print_attr(a)\n for a in self.key_parameters:\n self._print_attr(a)\n mylog.info(\"Total datasets: %d.\", len(self.all_outputs))\n\n def _print_attr(self, a):\n \"\"\"\n Print the attribute or warn about it missing.\n \"\"\"\n if not hasattr(self, a):\n mylog.error(\"Missing %s in dataset definition!\", a)\n return\n v = getattr(self, a)\n mylog.info(\"Parameters: %-25s = %s\", a, v)\n\n def _get_outputs_by_key(self, key, values, tolerance=None, outputs=None):\n r\"\"\"\n Get datasets at or near to given values.\n\n Parameters\n ----------\n key : str\n The key by which to retrieve outputs, usually 'time' or\n 'redshift'.\n values : array_like\n A list of values, given as floats.\n tolerance : float\n If not None, do not return a dataset unless the value is\n within the tolerance value. If None, simply return the\n nearest dataset.\n Default: None.\n outputs : list\n The list of outputs from which to choose. If None,\n self.all_outputs is used.\n Default: None.\n\n Examples\n --------\n >>> datasets = es.get_outputs_by_key('redshift', [0, 1, 2], tolerance=0.1)\n\n \"\"\"\n\n if not isinstance(values, YTArray):\n if isinstance(values, tuple) and len(values) == 2:\n values = self.arr(*values)\n else:\n values = self.arr(values)\n values = values.in_base()\n\n if outputs is None:\n outputs = self.all_outputs\n my_outputs = []\n if not outputs:\n return my_outputs\n for value in values:\n outputs.sort(key=lambda obj: np.abs(value - obj[key]))\n if (\n tolerance is None or np.abs(value - outputs[0][key]) <= tolerance\n ) and outputs[0] not in my_outputs:\n my_outputs.append(outputs[0])\n else:\n mylog.error(\"No dataset added for %s = %f.\", key, value)\n\n outputs.sort(key=lambda obj: obj[\"time\"])\n return my_outputs\n", "path": "yt/data_objects/time_series.py"}]} |
gh_patches_debug_190 | rasdani/github-patches | git_diff | rlworkgroup__garage-1759 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Incompatible dependencies in pipenv install
Hello,
When I try to install garage in a fresh pipenv as per the documentation, I get the following error:
```
[pipenv.exceptions.ResolutionFailure]: Warning: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.
First try clearing your dependency cache with $ pipenv lock --clear, then try the original command again.
Alternatively, you can use $ pipenv install --skip-lock to bypass this mechanism, then run $ pipenv graph to inspect the situation.
Hint: try $ pipenv lock --pre if it is a pre-release dependency.
ERROR: Could not find a version that matches cloudpickle==1.3,~=1.2.0 (from garage==2020.6.0->-r /tmp/pipenvprlocesvrequirements/pipenv-7gor6s43-constraints.txt (line 2))
Tried: 0.1.0, 0.1.0, 0.1.1, 0.1.1, 0.2.1, 0.2.1, 0.2.2, 0.2.2, 0.3.0, 0.3.0, 0.3.1, 0.3.1, 0.4.0, 0.4.0, 0.4.1, 0.4.1, 0.4.2, 0.4.2, 0.4.3, 0.4.3, 0.4.4, 0.4.4, 0.5.0, 0.5.0, 0.5.1, 0.5.1, 0.5.2, 0.5.2, 0.5.3, 0.5.3, 0.5.4, 0.5.4, 0.5.5, 0.5.5, 0.5.6, 0.5.6, 0.6.0, 0.6.0, 0.6.1, 0.6.1, 0.7.0, 0.7.0, 0.8.0, 0.8.0, 0.8.1, 0.8.1, 1.0.0, 1.0.0, 1.1.1, 1.1.1, 1.2.0, 1.2.0, 1.2.1, 1.2.1, 1.2.2, 1.2.2, 1.3.0, 1.3.0, 1.4.0, 1.4.0, 1.4.1, 1.4.1, 1.5.0, 1.5.0
There are incompatible versions in the resolved dependencies:
cloudpickle (from garage==2020.6.0->-r /tmp/pipenvprlocesvrequirements/pipenv-7gor6s43-constraints.txt (line 2))
cloudpickle==1.3 (from tensorflow-probability==0.10.1->garage==2020.6.0->-r /tmp/pipenvprlocesvrequirements/pipenv-7gor6s43-constraints.txt (line 2))
cloudpickle~=1.2.0 (from gym[atari,box2d,classic_control]==0.15.4->garage==2020.6.0->-r /tmp/pipenvprlocesvrequirements/pipenv-7gor6s43-constraints.txt (line 2))
```
This can be shortcutted by appending `--skip-lock` to the `pipenv install`, but that's obviously not ideal. Thanks!
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 """setuptools based setup module."""
2 import os
3
4 from setuptools import find_packages, setup
5
6 GARAGE_GH_TOKEN = os.environ.get('GARAGE_GH_TOKEN') or 'git'
7 GYM_VERSION = '0.15.4'
8
9 # Required dependencies
10 REQUIRED = [
11 # Please keep alphabetized
12 'akro',
13 'click>=2.0',
14 'cloudpickle<1.5',
15 'cma==2.7.0',
16 'dowel==0.0.3',
17 f'gym[atari,box2d,classic_control]=={GYM_VERSION}',
18 'numpy>=1.14.5',
19 'psutil',
20 # Pyglet 1.4.0 introduces some api change which breaks some
21 # gym environments
22 # See: https://github.com/openai/gym/issues/1588
23 'pyglet<1.4.0,>=1.3.0',
24 'python-dateutil',
25 'ray',
26 'scikit-image',
27 'scipy',
28 'setproctitle>=1.0',
29 'tensorflow>=1.14',
30 'tensorflow-probability',
31 'torch>=1.0.0,!=1.5.0',
32 'torchvision>=0.2.1',
33 ]
34
35 # Dependencies for optional features
36 EXTRAS = {}
37
38 EXTRAS['mujoco'] = [
39 'mujoco-py<2.1,>=2.0',
40 f'gym[all]=={GYM_VERSION}',
41 ]
42
43 EXTRAS['dm_control'] = [
44 # dm_control throws an error during install about not being able to
45 # find a build dependency (absl-py). Later pip executes the `install`
46 # command again and the install succeeds because absl-py has been
47 # installed. This is stupid, but harmless.
48 'dm_control==0.0.300771433',
49 ]
50
51 EXTRAS['bullet'] = ['mpi4py', 'pybullet']
52
53 EXTRAS['all'] = list(set(sum(EXTRAS.values(), [])))
54
55 # Development dependencies (*not* included in 'all')
56 EXTRAS['dev'] = [
57 # Please keep alphabetized
58 'flake8',
59 'flake8-docstrings>=1.5.0',
60 'flake8-import-order',
61 f'metaworld @ https://{GARAGE_GH_TOKEN}@api.github.com/repos/rlworkgroup/metaworld/tarball/861ae8d8c4bef80a7ed86f47f47acaa494d4ab77', # noqa: E501
62 'isort>=4.3.21,<5.0.0',
63 'pep8-naming==0.7.0',
64 'pre-commit',
65 'pycodestyle>=2.5.0',
66 'pydocstyle>=4.0.0',
67 'pylint>=2.5.3',
68 'pytest>=4.5.0', # Required for strict-markers
69 'pytest-cov',
70 'pytest-timeout',
71 'pytest-xdist',
72 'recommonmark',
73 'sphinx',
74 'sphinx-autoapi>=1.4.0',
75 'sphinx_rtd_theme',
76 'yapf==0.30.0',
77 ] # yapf: disable
78
79 with open('README.md') as f:
80 README = f.read()
81
82 # Get the package version dynamically
83 with open('VERSION') as v:
84 VERSION = v.read().strip()
85
86 setup(
87 name='garage',
88 version=VERSION,
89 author='Reinforcement Learning Working Group',
90 description='A toolkit for reproducible reinforcement learning research',
91 url='https://github.com/rlworkgroup/garage',
92 packages=find_packages(where='src'),
93 package_dir={'': 'src'},
94 scripts=['scripts/garage'],
95 python_requires='>=3.6',
96 install_requires=REQUIRED,
97 extras_require=EXTRAS,
98 license='MIT',
99 long_description=README,
100 long_description_content_type='text/markdown',
101 classifiers=[
102 'Development Status :: 4 - Beta',
103 'Intended Audience :: Developers',
104 'Intended Audience :: Education',
105 'Intended Audience :: Science/Research',
106 'License :: OSI Approved :: MIT License',
107 'Programming Language :: Python :: 3.6',
108 'Programming Language :: Python :: 3.7',
109 'Programming Language :: Python :: 3 :: Only',
110 'Topic :: Scientific/Engineering :: Artificial Intelligence',
111 'Topic :: Scientific/Engineering :: Mathematics',
112 'Topic :: Software Development :: Libraries',
113 ],
114 )
115
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -27,7 +27,7 @@
'scipy',
'setproctitle>=1.0',
'tensorflow>=1.14',
- 'tensorflow-probability',
+ 'tensorflow-probability<=0.10.0',
'torch>=1.0.0,!=1.5.0',
'torchvision>=0.2.1',
]
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -27,7 +27,7 @@\n 'scipy',\n 'setproctitle>=1.0',\n 'tensorflow>=1.14',\n- 'tensorflow-probability',\n+ 'tensorflow-probability<=0.10.0',\n 'torch>=1.0.0,!=1.5.0',\n 'torchvision>=0.2.1',\n ]\n", "issue": "Incompatible dependencies in pipenv install\nHello,\r\n\r\nWhen I try to install garage in a fresh pipenv as per the documentation, I get the following error:\r\n\r\n```\r\n[pipenv.exceptions.ResolutionFailure]: Warning: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\r\n First try clearing your dependency cache with $ pipenv lock --clear, then try the original command again.\r\n Alternatively, you can use $ pipenv install --skip-lock to bypass this mechanism, then run $ pipenv graph to inspect the situation.\r\n Hint: try $ pipenv lock --pre if it is a pre-release dependency.\r\nERROR: Could not find a version that matches cloudpickle==1.3,~=1.2.0 (from garage==2020.6.0->-r /tmp/pipenvprlocesvrequirements/pipenv-7gor6s43-constraints.txt (line 2))\r\nTried: 0.1.0, 0.1.0, 0.1.1, 0.1.1, 0.2.1, 0.2.1, 0.2.2, 0.2.2, 0.3.0, 0.3.0, 0.3.1, 0.3.1, 0.4.0, 0.4.0, 0.4.1, 0.4.1, 0.4.2, 0.4.2, 0.4.3, 0.4.3, 0.4.4, 0.4.4, 0.5.0, 0.5.0, 0.5.1, 0.5.1, 0.5.2, 0.5.2, 0.5.3, 0.5.3, 0.5.4, 0.5.4, 0.5.5, 0.5.5, 0.5.6, 0.5.6, 0.6.0, 0.6.0, 0.6.1, 0.6.1, 0.7.0, 0.7.0, 0.8.0, 0.8.0, 0.8.1, 0.8.1, 1.0.0, 1.0.0, 1.1.1, 1.1.1, 1.2.0, 1.2.0, 1.2.1, 1.2.1, 1.2.2, 1.2.2, 1.3.0, 1.3.0, 1.4.0, 1.4.0, 1.4.1, 1.4.1, 1.5.0, 1.5.0\r\nThere are incompatible versions in the resolved dependencies:\r\n cloudpickle (from garage==2020.6.0->-r /tmp/pipenvprlocesvrequirements/pipenv-7gor6s43-constraints.txt (line 2))\r\n cloudpickle==1.3 (from tensorflow-probability==0.10.1->garage==2020.6.0->-r /tmp/pipenvprlocesvrequirements/pipenv-7gor6s43-constraints.txt (line 2))\r\n cloudpickle~=1.2.0 (from gym[atari,box2d,classic_control]==0.15.4->garage==2020.6.0->-r /tmp/pipenvprlocesvrequirements/pipenv-7gor6s43-constraints.txt (line 2))\r\n```\r\nThis can be shortcutted by appending `--skip-lock` to the `pipenv install`, but that's obviously not ideal. Thanks! \n", "before_files": [{"content": "\"\"\"setuptools based setup module.\"\"\"\nimport os\n\nfrom setuptools import find_packages, setup\n\nGARAGE_GH_TOKEN = os.environ.get('GARAGE_GH_TOKEN') or 'git'\nGYM_VERSION = '0.15.4'\n\n# Required dependencies\nREQUIRED = [\n # Please keep alphabetized\n 'akro',\n 'click>=2.0',\n 'cloudpickle<1.5',\n 'cma==2.7.0',\n 'dowel==0.0.3',\n f'gym[atari,box2d,classic_control]=={GYM_VERSION}',\n 'numpy>=1.14.5',\n 'psutil',\n # Pyglet 1.4.0 introduces some api change which breaks some\n # gym environments\n # See: https://github.com/openai/gym/issues/1588\n 'pyglet<1.4.0,>=1.3.0',\n 'python-dateutil',\n 'ray',\n 'scikit-image',\n 'scipy',\n 'setproctitle>=1.0',\n 'tensorflow>=1.14',\n 'tensorflow-probability',\n 'torch>=1.0.0,!=1.5.0',\n 'torchvision>=0.2.1',\n]\n\n# Dependencies for optional features\nEXTRAS = {}\n\nEXTRAS['mujoco'] = [\n 'mujoco-py<2.1,>=2.0',\n f'gym[all]=={GYM_VERSION}',\n]\n\nEXTRAS['dm_control'] = [\n # dm_control throws an error during install about not being able to\n # find a build dependency (absl-py). Later pip executes the `install`\n # command again and the install succeeds because absl-py has been\n # installed. This is stupid, but harmless.\n 'dm_control==0.0.300771433',\n]\n\nEXTRAS['bullet'] = ['mpi4py', 'pybullet']\n\nEXTRAS['all'] = list(set(sum(EXTRAS.values(), [])))\n\n# Development dependencies (*not* included in 'all')\nEXTRAS['dev'] = [\n # Please keep alphabetized\n 'flake8',\n 'flake8-docstrings>=1.5.0',\n 'flake8-import-order',\n f'metaworld @ https://{GARAGE_GH_TOKEN}@api.github.com/repos/rlworkgroup/metaworld/tarball/861ae8d8c4bef80a7ed86f47f47acaa494d4ab77', # noqa: E501\n 'isort>=4.3.21,<5.0.0',\n 'pep8-naming==0.7.0',\n 'pre-commit',\n 'pycodestyle>=2.5.0',\n 'pydocstyle>=4.0.0',\n 'pylint>=2.5.3',\n 'pytest>=4.5.0', # Required for strict-markers\n 'pytest-cov',\n 'pytest-timeout',\n 'pytest-xdist',\n 'recommonmark',\n 'sphinx',\n 'sphinx-autoapi>=1.4.0',\n 'sphinx_rtd_theme',\n 'yapf==0.30.0',\n] # yapf: disable\n\nwith open('README.md') as f:\n README = f.read()\n\n# Get the package version dynamically\nwith open('VERSION') as v:\n VERSION = v.read().strip()\n\nsetup(\n name='garage',\n version=VERSION,\n author='Reinforcement Learning Working Group',\n description='A toolkit for reproducible reinforcement learning research',\n url='https://github.com/rlworkgroup/garage',\n packages=find_packages(where='src'),\n package_dir={'': 'src'},\n scripts=['scripts/garage'],\n python_requires='>=3.6',\n install_requires=REQUIRED,\n extras_require=EXTRAS,\n license='MIT',\n long_description=README,\n long_description_content_type='text/markdown',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Software Development :: Libraries',\n ],\n)\n", "path": "setup.py"}], "after_files": [{"content": "\"\"\"setuptools based setup module.\"\"\"\nimport os\n\nfrom setuptools import find_packages, setup\n\nGARAGE_GH_TOKEN = os.environ.get('GARAGE_GH_TOKEN') or 'git'\nGYM_VERSION = '0.15.4'\n\n# Required dependencies\nREQUIRED = [\n # Please keep alphabetized\n 'akro',\n 'click>=2.0',\n 'cloudpickle<1.5',\n 'cma==2.7.0',\n 'dowel==0.0.3',\n f'gym[atari,box2d,classic_control]=={GYM_VERSION}',\n 'numpy>=1.14.5',\n 'psutil',\n # Pyglet 1.4.0 introduces some api change which breaks some\n # gym environments\n # See: https://github.com/openai/gym/issues/1588\n 'pyglet<1.4.0,>=1.3.0',\n 'python-dateutil',\n 'ray',\n 'scikit-image',\n 'scipy',\n 'setproctitle>=1.0',\n 'tensorflow>=1.14',\n 'tensorflow-probability<=0.10.0',\n 'torch>=1.0.0,!=1.5.0',\n 'torchvision>=0.2.1',\n]\n\n# Dependencies for optional features\nEXTRAS = {}\n\nEXTRAS['mujoco'] = [\n 'mujoco-py<2.1,>=2.0',\n f'gym[all]=={GYM_VERSION}',\n]\n\nEXTRAS['dm_control'] = [\n # dm_control throws an error during install about not being able to\n # find a build dependency (absl-py). Later pip executes the `install`\n # command again and the install succeeds because absl-py has been\n # installed. This is stupid, but harmless.\n 'dm_control==0.0.300771433',\n]\n\nEXTRAS['bullet'] = ['mpi4py', 'pybullet']\n\nEXTRAS['all'] = list(set(sum(EXTRAS.values(), [])))\n\n# Development dependencies (*not* included in 'all')\nEXTRAS['dev'] = [\n # Please keep alphabetized\n 'flake8',\n 'flake8-docstrings>=1.5.0',\n 'flake8-import-order',\n f'metaworld @ https://{GARAGE_GH_TOKEN}@api.github.com/repos/rlworkgroup/metaworld/tarball/861ae8d8c4bef80a7ed86f47f47acaa494d4ab77', # noqa: E501\n 'isort>=4.3.21,<5.0.0',\n 'pep8-naming==0.7.0',\n 'pre-commit',\n 'pycodestyle>=2.5.0',\n 'pydocstyle>=4.0.0',\n 'pylint>=2.5.3',\n 'pytest>=4.5.0', # Required for strict-markers\n 'pytest-cov',\n 'pytest-timeout',\n 'pytest-xdist',\n 'recommonmark',\n 'sphinx',\n 'sphinx-autoapi>=1.4.0',\n 'sphinx_rtd_theme',\n 'yapf==0.30.0',\n] # yapf: disable\n\nwith open('README.md') as f:\n README = f.read()\n\n# Get the package version dynamically\nwith open('VERSION') as v:\n VERSION = v.read().strip()\n\nsetup(\n name='garage',\n version=VERSION,\n author='Reinforcement Learning Working Group',\n description='A toolkit for reproducible reinforcement learning research',\n url='https://github.com/rlworkgroup/garage',\n packages=find_packages(where='src'),\n package_dir={'': 'src'},\n scripts=['scripts/garage'],\n python_requires='>=3.6',\n install_requires=REQUIRED,\n extras_require=EXTRAS,\n license='MIT',\n long_description=README,\n long_description_content_type='text/markdown',\n classifiers=[\n 'Development Status :: 4 - Beta',\n 'Intended Audience :: Developers',\n 'Intended Audience :: Education',\n 'Intended Audience :: Science/Research',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3.6',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: 3 :: Only',\n 'Topic :: Scientific/Engineering :: Artificial Intelligence',\n 'Topic :: Scientific/Engineering :: Mathematics',\n 'Topic :: Software Development :: Libraries',\n ],\n)\n", "path": "setup.py"}]} |
gh_patches_debug_191 | rasdani/github-patches | git_diff | netket__netket-111 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Python bindings for Jastrow machines randomly failing
I realized in #91 that once in a while the python tests for the `Jastrow` machines fail. This issue seems related to some memory problem, but I still don't understand if it is on the c++ side or python
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `setup.py`
Content:
```
1 import os
2 import re
3 import sys
4 import platform
5 import subprocess
6
7 from setuptools import setup, Extension
8 from setuptools.command.build_ext import build_ext
9 from distutils.version import LooseVersion
10
11
12 class CMakeExtension(Extension):
13 def __init__(self, name, sourcedir=''):
14 Extension.__init__(self, name, sources=[])
15 self.sourcedir = os.path.abspath(sourcedir)
16
17
18 class CMakeBuild(build_ext):
19 def run(self):
20 try:
21 out = subprocess.check_output(['cmake', '--version'])
22 except OSError:
23 raise RuntimeError("CMake must be installed to build the following extensions: " +
24 ", ".join(e.name for e in self.extensions))
25
26 if platform.system() == "Windows":
27 cmake_version = LooseVersion(re.search(r'version\s*([\d.]+)', out.decode()).group(1))
28 if cmake_version < '3.1.0':
29 raise RuntimeError("CMake >= 3.1.0 is required on Windows")
30
31 for ext in self.extensions:
32 self.build_extension(ext)
33
34 def build_extension(self, ext):
35 extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))
36 cmake_args = ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir,
37 '-DPYTHON_EXECUTABLE=' + sys.executable]
38
39 cfg = 'Debug' if self.debug else 'Release'
40 build_args = ['--config', cfg]
41
42 if platform.system() == "Windows":
43 cmake_args += ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format(cfg.upper(), extdir)]
44 if sys.maxsize > 2**32:
45 cmake_args += ['-A', 'x64']
46 build_args += ['--', '/m']
47 else:
48 cmake_args += ['-DCMAKE_BUILD_TYPE=' + cfg]
49 build_args += ['--', '-j2']
50
51 env = os.environ.copy()
52 env['CXXFLAGS'] = '{} -DVERSION_INFO=\\"{}\\"'.format(env.get('CXXFLAGS', ''),
53 self.distribution.get_version())
54 if not os.path.exists(self.build_temp):
55 os.makedirs(self.build_temp)
56 subprocess.check_call(['cmake', ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env)
57 subprocess.check_call(['cmake', '--build', '.'] + build_args, cwd=self.build_temp)
58
59 setup(
60 name='netket',
61 version='0.1',
62 author='Giuseppe Carleo et al.',
63 description='NetKet',
64 url='http://github.com/netket/netket',
65 author_email='[email protected]',
66 license='Apache',
67 ext_modules=[CMakeExtension('netket')],
68 cmdclass=dict(build_ext=CMakeBuild),
69 zip_safe=False,
70 )
71
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/setup.py b/setup.py
--- a/setup.py
+++ b/setup.py
@@ -58,7 +58,7 @@
setup(
name='netket',
- version='0.1',
+ version='2.0',
author='Giuseppe Carleo et al.',
description='NetKet',
url='http://github.com/netket/netket',
| {"golden_diff": "diff --git a/setup.py b/setup.py\n--- a/setup.py\n+++ b/setup.py\n@@ -58,7 +58,7 @@\n \n setup(\n name='netket',\n- version='0.1',\n+ version='2.0',\n author='Giuseppe Carleo et al.',\n description='NetKet',\n url='http://github.com/netket/netket',\n", "issue": "Python bindings for Jastrow machines randomly failing\nI realized in #91 that once in a while the python tests for the `Jastrow` machines fail. This issue seems related to some memory problem, but I still don't understand if it is on the c++ side or python \n", "before_files": [{"content": "import os\nimport re\nimport sys\nimport platform\nimport subprocess\n\nfrom setuptools import setup, Extension\nfrom setuptools.command.build_ext import build_ext\nfrom distutils.version import LooseVersion\n\n\nclass CMakeExtension(Extension):\n def __init__(self, name, sourcedir=''):\n Extension.__init__(self, name, sources=[])\n self.sourcedir = os.path.abspath(sourcedir)\n\n\nclass CMakeBuild(build_ext):\n def run(self):\n try:\n out = subprocess.check_output(['cmake', '--version'])\n except OSError:\n raise RuntimeError(\"CMake must be installed to build the following extensions: \" +\n \", \".join(e.name for e in self.extensions))\n\n if platform.system() == \"Windows\":\n cmake_version = LooseVersion(re.search(r'version\\s*([\\d.]+)', out.decode()).group(1))\n if cmake_version < '3.1.0':\n raise RuntimeError(\"CMake >= 3.1.0 is required on Windows\")\n\n for ext in self.extensions:\n self.build_extension(ext)\n\n def build_extension(self, ext):\n extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))\n cmake_args = ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir,\n '-DPYTHON_EXECUTABLE=' + sys.executable]\n\n cfg = 'Debug' if self.debug else 'Release'\n build_args = ['--config', cfg]\n\n if platform.system() == \"Windows\":\n cmake_args += ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format(cfg.upper(), extdir)]\n if sys.maxsize > 2**32:\n cmake_args += ['-A', 'x64']\n build_args += ['--', '/m']\n else:\n cmake_args += ['-DCMAKE_BUILD_TYPE=' + cfg]\n build_args += ['--', '-j2']\n\n env = os.environ.copy()\n env['CXXFLAGS'] = '{} -DVERSION_INFO=\\\\\"{}\\\\\"'.format(env.get('CXXFLAGS', ''),\n self.distribution.get_version())\n if not os.path.exists(self.build_temp):\n os.makedirs(self.build_temp)\n subprocess.check_call(['cmake', ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env)\n subprocess.check_call(['cmake', '--build', '.'] + build_args, cwd=self.build_temp)\n\nsetup(\n name='netket',\n version='0.1',\n author='Giuseppe Carleo et al.',\n description='NetKet',\n url='http://github.com/netket/netket',\n author_email='[email protected]',\n license='Apache',\n ext_modules=[CMakeExtension('netket')],\n cmdclass=dict(build_ext=CMakeBuild),\n zip_safe=False,\n)\n", "path": "setup.py"}], "after_files": [{"content": "import os\nimport re\nimport sys\nimport platform\nimport subprocess\n\nfrom setuptools import setup, Extension\nfrom setuptools.command.build_ext import build_ext\nfrom distutils.version import LooseVersion\n\n\nclass CMakeExtension(Extension):\n def __init__(self, name, sourcedir=''):\n Extension.__init__(self, name, sources=[])\n self.sourcedir = os.path.abspath(sourcedir)\n\n\nclass CMakeBuild(build_ext):\n def run(self):\n try:\n out = subprocess.check_output(['cmake', '--version'])\n except OSError:\n raise RuntimeError(\"CMake must be installed to build the following extensions: \" +\n \", \".join(e.name for e in self.extensions))\n\n if platform.system() == \"Windows\":\n cmake_version = LooseVersion(re.search(r'version\\s*([\\d.]+)', out.decode()).group(1))\n if cmake_version < '3.1.0':\n raise RuntimeError(\"CMake >= 3.1.0 is required on Windows\")\n\n for ext in self.extensions:\n self.build_extension(ext)\n\n def build_extension(self, ext):\n extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))\n cmake_args = ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir,\n '-DPYTHON_EXECUTABLE=' + sys.executable]\n\n cfg = 'Debug' if self.debug else 'Release'\n build_args = ['--config', cfg]\n\n if platform.system() == \"Windows\":\n cmake_args += ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format(cfg.upper(), extdir)]\n if sys.maxsize > 2**32:\n cmake_args += ['-A', 'x64']\n build_args += ['--', '/m']\n else:\n cmake_args += ['-DCMAKE_BUILD_TYPE=' + cfg]\n build_args += ['--', '-j2']\n\n env = os.environ.copy()\n env['CXXFLAGS'] = '{} -DVERSION_INFO=\\\\\"{}\\\\\"'.format(env.get('CXXFLAGS', ''),\n self.distribution.get_version())\n if not os.path.exists(self.build_temp):\n os.makedirs(self.build_temp)\n subprocess.check_call(['cmake', ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env)\n subprocess.check_call(['cmake', '--build', '.'] + build_args, cwd=self.build_temp)\n\nsetup(\n name='netket',\n version='2.0',\n author='Giuseppe Carleo et al.',\n description='NetKet',\n url='http://github.com/netket/netket',\n author_email='[email protected]',\n license='Apache',\n ext_modules=[CMakeExtension('netket')],\n cmdclass=dict(build_ext=CMakeBuild),\n zip_safe=False,\n)\n", "path": "setup.py"}]} |
gh_patches_debug_192 | rasdani/github-patches | git_diff | jupyterhub__jupyterhub-1323 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
PrefixRedirectUrl redirect `/hub` to `/hub/hub`
It might be an edge case, which is not really important,
but I expected `/hub` -> `/hub/` and not `/hub/hub`. This is to to `uri.startswith(self.base_url)`, and `base_url` is guarantied to end with a `/`. Now of course we can't just strip the trailing slash from `base_url` or things like `/hubot` will not be redirected to `/hub/hubot`, and doing nothing may be the right answer.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `jupyterhub/handlers/pages.py`
Content:
```
1 """Basic html-rendering handlers."""
2
3 # Copyright (c) Jupyter Development Team.
4 # Distributed under the terms of the Modified BSD License.
5
6 from http.client import responses
7
8 from jinja2 import TemplateNotFound
9 from tornado import web, gen
10 from tornado.httputil import url_concat
11
12 from .. import orm
13 from ..utils import admin_only, url_path_join
14 from .base import BaseHandler
15
16
17 class RootHandler(BaseHandler):
18 """Render the Hub root page.
19
20 If next argument is passed by single-user server,
21 redirect to base_url + single-user page.
22
23 If logged in, redirects to:
24
25 - single-user server if running
26 - hub home, otherwise
27
28 Otherwise, renders login page.
29 """
30 def get(self):
31 next_url = self.get_argument('next', '')
32 if next_url and not next_url.startswith('/'):
33 self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
34 next_url = ''
35 if next_url and next_url.startswith(url_path_join(self.base_url, 'user/')):
36 # add /hub/ prefix, to ensure we redirect to the right user's server.
37 # The next request will be handled by UserSpawnHandler,
38 # ultimately redirecting to the logged-in user's server.
39 without_prefix = next_url[len(self.base_url):]
40 next_url = url_path_join(self.hub.base_url, without_prefix)
41 self.log.warning("Redirecting %s to %s. For sharing public links, use /user-redirect/",
42 self.request.uri, next_url,
43 )
44 self.redirect(next_url)
45 return
46 user = self.get_current_user()
47 if user:
48 if user.running:
49 url = user.url
50 self.log.debug("User is running: %s", url)
51 self.set_login_cookie(user) # set cookie
52 else:
53 url = url_path_join(self.hub.base_url, 'home')
54 self.log.debug("User is not running: %s", url)
55 else:
56 url = self.settings['login_url']
57 self.redirect(url)
58
59
60 class HomeHandler(BaseHandler):
61 """Render the user's home page."""
62
63 @web.authenticated
64 @gen.coroutine
65 def get(self):
66 user = self.get_current_user()
67 if user.running:
68 # trigger poll_and_notify event in case of a server that died
69 yield user.spawner.poll_and_notify()
70 html = self.render_template('home.html',
71 user=user,
72 url=user.url,
73 )
74 self.finish(html)
75
76
77 class SpawnHandler(BaseHandler):
78 """Handle spawning of single-user servers via form.
79
80 GET renders the form, POST handles form submission.
81
82 Only enabled when Spawner.options_form is defined.
83 """
84 def _render_form(self, message=''):
85 user = self.get_current_user()
86 return self.render_template('spawn.html',
87 user=user,
88 spawner_options_form=user.spawner.options_form,
89 error_message=message,
90 url=self.request.uri,
91 )
92
93 @web.authenticated
94 def get(self):
95 """GET renders form for spawning with user-specified options"""
96 user = self.get_current_user()
97 if not self.allow_named_servers and user.running:
98 url = user.url
99 self.log.debug("User is running: %s", url)
100 self.redirect(url)
101 return
102 if user.spawner.options_form:
103 self.finish(self._render_form())
104 else:
105 # not running, no form. Trigger spawn.
106 self.redirect(user.url)
107
108 @web.authenticated
109 @gen.coroutine
110 def post(self):
111 """POST spawns with user-specified options"""
112 user = self.get_current_user()
113 if not self.allow_named_servers and user.running:
114 url = user.url
115 self.log.warning("User is already running: %s", url)
116 self.redirect(url)
117 return
118 form_options = {}
119 for key, byte_list in self.request.body_arguments.items():
120 form_options[key] = [ bs.decode('utf8') for bs in byte_list ]
121 for key, byte_list in self.request.files.items():
122 form_options["%s_file"%key] = byte_list
123 try:
124 options = user.spawner.options_from_form(form_options)
125 yield self.spawn_single_user(user, options=options)
126 except Exception as e:
127 self.log.error("Failed to spawn single-user server with form", exc_info=True)
128 self.finish(self._render_form(str(e)))
129 return
130 self.set_login_cookie(user)
131 url = user.url
132
133 next_url = self.get_argument('next', '')
134 if next_url and not next_url.startswith('/'):
135 self.log.warning("Disallowing redirect outside JupyterHub: %r", next_url)
136 elif next_url:
137 url = next_url
138
139 self.redirect(url)
140
141 class AdminHandler(BaseHandler):
142 """Render the admin page."""
143
144 @admin_only
145 def get(self):
146 available = {'name', 'admin', 'running', 'last_activity'}
147 default_sort = ['admin', 'name']
148 mapping = {
149 'running': '_server_id'
150 }
151 default_order = {
152 'name': 'asc',
153 'last_activity': 'desc',
154 'admin': 'desc',
155 'running': 'desc',
156 }
157 sorts = self.get_arguments('sort') or default_sort
158 orders = self.get_arguments('order')
159
160 for bad in set(sorts).difference(available):
161 self.log.warning("ignoring invalid sort: %r", bad)
162 sorts.remove(bad)
163 for bad in set(orders).difference({'asc', 'desc'}):
164 self.log.warning("ignoring invalid order: %r", bad)
165 orders.remove(bad)
166
167 # add default sort as secondary
168 for s in default_sort:
169 if s not in sorts:
170 sorts.append(s)
171 if len(orders) < len(sorts):
172 for col in sorts[len(orders):]:
173 orders.append(default_order[col])
174 else:
175 orders = orders[:len(sorts)]
176
177 # this could be one incomprehensible nested list comprehension
178 # get User columns
179 cols = [ getattr(orm.User, mapping.get(c, c)) for c in sorts ]
180 # get User.col.desc() order objects
181 ordered = [ getattr(c, o)() for c, o in zip(cols, orders) ]
182
183 users = self.db.query(orm.User).order_by(*ordered)
184 users = [ self._user_from_orm(u) for u in users ]
185 running = [ u for u in users if u.running ]
186
187 html = self.render_template('admin.html',
188 user=self.get_current_user(),
189 admin_access=self.settings.get('admin_access', False),
190 users=users,
191 running=running,
192 sort={s:o for s,o in zip(sorts, orders)},
193 )
194 self.finish(html)
195
196
197 class TokenPageHandler(BaseHandler):
198 """Handler for page requesting new API tokens"""
199
200 @web.authenticated
201 def get(self):
202 html = self.render_template('token.html')
203 self.finish(html)
204
205
206 class ProxyErrorHandler(BaseHandler):
207 """Handler for rendering proxy error pages"""
208
209 def get(self, status_code_s):
210 status_code = int(status_code_s)
211 status_message = responses.get(status_code, 'Unknown HTTP Error')
212 # build template namespace
213
214 hub_home = url_path_join(self.hub.base_url, 'home')
215 message_html = ''
216 if status_code == 503:
217 message_html = ' '.join([
218 "Your server appears to be down.",
219 "Try restarting it <a href='%s'>from the hub</a>" % hub_home
220 ])
221 ns = dict(
222 status_code=status_code,
223 status_message=status_message,
224 message_html=message_html,
225 logo_url=hub_home,
226 )
227
228 self.set_header('Content-Type', 'text/html')
229 # render the template
230 try:
231 html = self.render_template('%s.html' % status_code, **ns)
232 except TemplateNotFound:
233 self.log.debug("No template for %d", status_code)
234 html = self.render_template('error.html', **ns)
235
236 self.write(html)
237
238
239 default_handlers = [
240 (r'/', RootHandler),
241 (r'/home', HomeHandler),
242 (r'/admin', AdminHandler),
243 (r'/spawn', SpawnHandler),
244 (r'/token', TokenPageHandler),
245 (r'/error/(\d+)', ProxyErrorHandler),
246 ]
247
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/jupyterhub/handlers/pages.py b/jupyterhub/handlers/pages.py
--- a/jupyterhub/handlers/pages.py
+++ b/jupyterhub/handlers/pages.py
@@ -237,7 +237,7 @@
default_handlers = [
- (r'/', RootHandler),
+ (r'/?', RootHandler),
(r'/home', HomeHandler),
(r'/admin', AdminHandler),
(r'/spawn', SpawnHandler),
| {"golden_diff": "diff --git a/jupyterhub/handlers/pages.py b/jupyterhub/handlers/pages.py\n--- a/jupyterhub/handlers/pages.py\n+++ b/jupyterhub/handlers/pages.py\n@@ -237,7 +237,7 @@\n \n \n default_handlers = [\n- (r'/', RootHandler),\n+ (r'/?', RootHandler),\n (r'/home', HomeHandler),\n (r'/admin', AdminHandler),\n (r'/spawn', SpawnHandler),\n", "issue": "PrefixRedirectUrl redirect `/hub` to `/hub/hub`\nIt might be an edge case, which is not really important, \r\nbut I expected `/hub` -> `/hub/` and not `/hub/hub`. This is to to `uri.startswith(self.base_url)`, and `base_url` is guarantied to end with a `/`. Now of course we can't just strip the trailing slash from `base_url` or things like `/hubot` will not be redirected to `/hub/hubot`, and doing nothing may be the right answer. \n", "before_files": [{"content": "\"\"\"Basic html-rendering handlers.\"\"\"\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nfrom http.client import responses\n\nfrom jinja2 import TemplateNotFound\nfrom tornado import web, gen\nfrom tornado.httputil import url_concat\n\nfrom .. import orm\nfrom ..utils import admin_only, url_path_join\nfrom .base import BaseHandler\n\n\nclass RootHandler(BaseHandler):\n \"\"\"Render the Hub root page.\n\n If next argument is passed by single-user server,\n redirect to base_url + single-user page.\n\n If logged in, redirects to:\n\n - single-user server if running\n - hub home, otherwise\n\n Otherwise, renders login page.\n \"\"\"\n def get(self):\n next_url = self.get_argument('next', '')\n if next_url and not next_url.startswith('/'):\n self.log.warning(\"Disallowing redirect outside JupyterHub: %r\", next_url)\n next_url = ''\n if next_url and next_url.startswith(url_path_join(self.base_url, 'user/')):\n # add /hub/ prefix, to ensure we redirect to the right user's server.\n # The next request will be handled by UserSpawnHandler,\n # ultimately redirecting to the logged-in user's server.\n without_prefix = next_url[len(self.base_url):]\n next_url = url_path_join(self.hub.base_url, without_prefix)\n self.log.warning(\"Redirecting %s to %s. For sharing public links, use /user-redirect/\",\n self.request.uri, next_url,\n )\n self.redirect(next_url)\n return\n user = self.get_current_user()\n if user:\n if user.running:\n url = user.url\n self.log.debug(\"User is running: %s\", url)\n self.set_login_cookie(user) # set cookie\n else:\n url = url_path_join(self.hub.base_url, 'home')\n self.log.debug(\"User is not running: %s\", url)\n else:\n url = self.settings['login_url']\n self.redirect(url)\n\n\nclass HomeHandler(BaseHandler):\n \"\"\"Render the user's home page.\"\"\"\n\n @web.authenticated\n @gen.coroutine\n def get(self):\n user = self.get_current_user()\n if user.running:\n # trigger poll_and_notify event in case of a server that died\n yield user.spawner.poll_and_notify()\n html = self.render_template('home.html',\n user=user,\n url=user.url,\n )\n self.finish(html)\n\n\nclass SpawnHandler(BaseHandler):\n \"\"\"Handle spawning of single-user servers via form.\n\n GET renders the form, POST handles form submission.\n\n Only enabled when Spawner.options_form is defined.\n \"\"\"\n def _render_form(self, message=''):\n user = self.get_current_user()\n return self.render_template('spawn.html',\n user=user,\n spawner_options_form=user.spawner.options_form,\n error_message=message,\n url=self.request.uri,\n )\n\n @web.authenticated\n def get(self):\n \"\"\"GET renders form for spawning with user-specified options\"\"\"\n user = self.get_current_user()\n if not self.allow_named_servers and user.running:\n url = user.url\n self.log.debug(\"User is running: %s\", url)\n self.redirect(url)\n return\n if user.spawner.options_form:\n self.finish(self._render_form())\n else:\n # not running, no form. Trigger spawn.\n self.redirect(user.url)\n\n @web.authenticated\n @gen.coroutine\n def post(self):\n \"\"\"POST spawns with user-specified options\"\"\"\n user = self.get_current_user()\n if not self.allow_named_servers and user.running:\n url = user.url\n self.log.warning(\"User is already running: %s\", url)\n self.redirect(url)\n return\n form_options = {}\n for key, byte_list in self.request.body_arguments.items():\n form_options[key] = [ bs.decode('utf8') for bs in byte_list ]\n for key, byte_list in self.request.files.items():\n form_options[\"%s_file\"%key] = byte_list\n try:\n options = user.spawner.options_from_form(form_options)\n yield self.spawn_single_user(user, options=options)\n except Exception as e:\n self.log.error(\"Failed to spawn single-user server with form\", exc_info=True)\n self.finish(self._render_form(str(e)))\n return\n self.set_login_cookie(user)\n url = user.url\n\n next_url = self.get_argument('next', '')\n if next_url and not next_url.startswith('/'):\n self.log.warning(\"Disallowing redirect outside JupyterHub: %r\", next_url)\n elif next_url:\n url = next_url\n\n self.redirect(url)\n\nclass AdminHandler(BaseHandler):\n \"\"\"Render the admin page.\"\"\"\n\n @admin_only\n def get(self):\n available = {'name', 'admin', 'running', 'last_activity'}\n default_sort = ['admin', 'name']\n mapping = {\n 'running': '_server_id'\n }\n default_order = {\n 'name': 'asc',\n 'last_activity': 'desc',\n 'admin': 'desc',\n 'running': 'desc',\n }\n sorts = self.get_arguments('sort') or default_sort\n orders = self.get_arguments('order')\n\n for bad in set(sorts).difference(available):\n self.log.warning(\"ignoring invalid sort: %r\", bad)\n sorts.remove(bad)\n for bad in set(orders).difference({'asc', 'desc'}):\n self.log.warning(\"ignoring invalid order: %r\", bad)\n orders.remove(bad)\n\n # add default sort as secondary\n for s in default_sort:\n if s not in sorts:\n sorts.append(s)\n if len(orders) < len(sorts):\n for col in sorts[len(orders):]:\n orders.append(default_order[col])\n else:\n orders = orders[:len(sorts)]\n\n # this could be one incomprehensible nested list comprehension\n # get User columns\n cols = [ getattr(orm.User, mapping.get(c, c)) for c in sorts ]\n # get User.col.desc() order objects\n ordered = [ getattr(c, o)() for c, o in zip(cols, orders) ]\n\n users = self.db.query(orm.User).order_by(*ordered)\n users = [ self._user_from_orm(u) for u in users ]\n running = [ u for u in users if u.running ]\n\n html = self.render_template('admin.html',\n user=self.get_current_user(),\n admin_access=self.settings.get('admin_access', False),\n users=users,\n running=running,\n sort={s:o for s,o in zip(sorts, orders)},\n )\n self.finish(html)\n\n\nclass TokenPageHandler(BaseHandler):\n \"\"\"Handler for page requesting new API tokens\"\"\"\n\n @web.authenticated\n def get(self):\n html = self.render_template('token.html')\n self.finish(html)\n\n\nclass ProxyErrorHandler(BaseHandler):\n \"\"\"Handler for rendering proxy error pages\"\"\"\n \n def get(self, status_code_s):\n status_code = int(status_code_s)\n status_message = responses.get(status_code, 'Unknown HTTP Error')\n # build template namespace\n \n hub_home = url_path_join(self.hub.base_url, 'home')\n message_html = ''\n if status_code == 503:\n message_html = ' '.join([\n \"Your server appears to be down.\",\n \"Try restarting it <a href='%s'>from the hub</a>\" % hub_home\n ])\n ns = dict(\n status_code=status_code,\n status_message=status_message,\n message_html=message_html,\n logo_url=hub_home,\n )\n\n self.set_header('Content-Type', 'text/html')\n # render the template\n try:\n html = self.render_template('%s.html' % status_code, **ns)\n except TemplateNotFound:\n self.log.debug(\"No template for %d\", status_code)\n html = self.render_template('error.html', **ns)\n\n self.write(html)\n\n\ndefault_handlers = [\n (r'/', RootHandler),\n (r'/home', HomeHandler),\n (r'/admin', AdminHandler),\n (r'/spawn', SpawnHandler),\n (r'/token', TokenPageHandler),\n (r'/error/(\\d+)', ProxyErrorHandler),\n]\n", "path": "jupyterhub/handlers/pages.py"}], "after_files": [{"content": "\"\"\"Basic html-rendering handlers.\"\"\"\n\n# Copyright (c) Jupyter Development Team.\n# Distributed under the terms of the Modified BSD License.\n\nfrom http.client import responses\n\nfrom jinja2 import TemplateNotFound\nfrom tornado import web, gen\nfrom tornado.httputil import url_concat\n\nfrom .. import orm\nfrom ..utils import admin_only, url_path_join\nfrom .base import BaseHandler\n\n\nclass RootHandler(BaseHandler):\n \"\"\"Render the Hub root page.\n\n If next argument is passed by single-user server,\n redirect to base_url + single-user page.\n\n If logged in, redirects to:\n\n - single-user server if running\n - hub home, otherwise\n\n Otherwise, renders login page.\n \"\"\"\n def get(self):\n next_url = self.get_argument('next', '')\n if next_url and not next_url.startswith('/'):\n self.log.warning(\"Disallowing redirect outside JupyterHub: %r\", next_url)\n next_url = ''\n if next_url and next_url.startswith(url_path_join(self.base_url, 'user/')):\n # add /hub/ prefix, to ensure we redirect to the right user's server.\n # The next request will be handled by UserSpawnHandler,\n # ultimately redirecting to the logged-in user's server.\n without_prefix = next_url[len(self.base_url):]\n next_url = url_path_join(self.hub.base_url, without_prefix)\n self.log.warning(\"Redirecting %s to %s. For sharing public links, use /user-redirect/\",\n self.request.uri, next_url,\n )\n self.redirect(next_url)\n return\n user = self.get_current_user()\n if user:\n if user.running:\n url = user.url\n self.log.debug(\"User is running: %s\", url)\n self.set_login_cookie(user) # set cookie\n else:\n url = url_path_join(self.hub.base_url, 'home')\n self.log.debug(\"User is not running: %s\", url)\n else:\n url = self.settings['login_url']\n self.redirect(url)\n\n\nclass HomeHandler(BaseHandler):\n \"\"\"Render the user's home page.\"\"\"\n\n @web.authenticated\n @gen.coroutine\n def get(self):\n user = self.get_current_user()\n if user.running:\n # trigger poll_and_notify event in case of a server that died\n yield user.spawner.poll_and_notify()\n html = self.render_template('home.html',\n user=user,\n url=user.url,\n )\n self.finish(html)\n\n\nclass SpawnHandler(BaseHandler):\n \"\"\"Handle spawning of single-user servers via form.\n\n GET renders the form, POST handles form submission.\n\n Only enabled when Spawner.options_form is defined.\n \"\"\"\n def _render_form(self, message=''):\n user = self.get_current_user()\n return self.render_template('spawn.html',\n user=user,\n spawner_options_form=user.spawner.options_form,\n error_message=message,\n url=self.request.uri,\n )\n\n @web.authenticated\n def get(self):\n \"\"\"GET renders form for spawning with user-specified options\"\"\"\n user = self.get_current_user()\n if not self.allow_named_servers and user.running:\n url = user.url\n self.log.debug(\"User is running: %s\", url)\n self.redirect(url)\n return\n if user.spawner.options_form:\n self.finish(self._render_form())\n else:\n # not running, no form. Trigger spawn.\n self.redirect(user.url)\n\n @web.authenticated\n @gen.coroutine\n def post(self):\n \"\"\"POST spawns with user-specified options\"\"\"\n user = self.get_current_user()\n if not self.allow_named_servers and user.running:\n url = user.url\n self.log.warning(\"User is already running: %s\", url)\n self.redirect(url)\n return\n form_options = {}\n for key, byte_list in self.request.body_arguments.items():\n form_options[key] = [ bs.decode('utf8') for bs in byte_list ]\n for key, byte_list in self.request.files.items():\n form_options[\"%s_file\"%key] = byte_list\n try:\n options = user.spawner.options_from_form(form_options)\n yield self.spawn_single_user(user, options=options)\n except Exception as e:\n self.log.error(\"Failed to spawn single-user server with form\", exc_info=True)\n self.finish(self._render_form(str(e)))\n return\n self.set_login_cookie(user)\n url = user.url\n\n next_url = self.get_argument('next', '')\n if next_url and not next_url.startswith('/'):\n self.log.warning(\"Disallowing redirect outside JupyterHub: %r\", next_url)\n elif next_url:\n url = next_url\n\n self.redirect(url)\n\nclass AdminHandler(BaseHandler):\n \"\"\"Render the admin page.\"\"\"\n\n @admin_only\n def get(self):\n available = {'name', 'admin', 'running', 'last_activity'}\n default_sort = ['admin', 'name']\n mapping = {\n 'running': '_server_id'\n }\n default_order = {\n 'name': 'asc',\n 'last_activity': 'desc',\n 'admin': 'desc',\n 'running': 'desc',\n }\n sorts = self.get_arguments('sort') or default_sort\n orders = self.get_arguments('order')\n\n for bad in set(sorts).difference(available):\n self.log.warning(\"ignoring invalid sort: %r\", bad)\n sorts.remove(bad)\n for bad in set(orders).difference({'asc', 'desc'}):\n self.log.warning(\"ignoring invalid order: %r\", bad)\n orders.remove(bad)\n\n # add default sort as secondary\n for s in default_sort:\n if s not in sorts:\n sorts.append(s)\n if len(orders) < len(sorts):\n for col in sorts[len(orders):]:\n orders.append(default_order[col])\n else:\n orders = orders[:len(sorts)]\n\n # this could be one incomprehensible nested list comprehension\n # get User columns\n cols = [ getattr(orm.User, mapping.get(c, c)) for c in sorts ]\n # get User.col.desc() order objects\n ordered = [ getattr(c, o)() for c, o in zip(cols, orders) ]\n\n users = self.db.query(orm.User).order_by(*ordered)\n users = [ self._user_from_orm(u) for u in users ]\n running = [ u for u in users if u.running ]\n\n html = self.render_template('admin.html',\n user=self.get_current_user(),\n admin_access=self.settings.get('admin_access', False),\n users=users,\n running=running,\n sort={s:o for s,o in zip(sorts, orders)},\n )\n self.finish(html)\n\n\nclass TokenPageHandler(BaseHandler):\n \"\"\"Handler for page requesting new API tokens\"\"\"\n\n @web.authenticated\n def get(self):\n html = self.render_template('token.html')\n self.finish(html)\n\n\nclass ProxyErrorHandler(BaseHandler):\n \"\"\"Handler for rendering proxy error pages\"\"\"\n \n def get(self, status_code_s):\n status_code = int(status_code_s)\n status_message = responses.get(status_code, 'Unknown HTTP Error')\n # build template namespace\n \n hub_home = url_path_join(self.hub.base_url, 'home')\n message_html = ''\n if status_code == 503:\n message_html = ' '.join([\n \"Your server appears to be down.\",\n \"Try restarting it <a href='%s'>from the hub</a>\" % hub_home\n ])\n ns = dict(\n status_code=status_code,\n status_message=status_message,\n message_html=message_html,\n logo_url=hub_home,\n )\n\n self.set_header('Content-Type', 'text/html')\n # render the template\n try:\n html = self.render_template('%s.html' % status_code, **ns)\n except TemplateNotFound:\n self.log.debug(\"No template for %d\", status_code)\n html = self.render_template('error.html', **ns)\n\n self.write(html)\n\n\ndefault_handlers = [\n (r'/?', RootHandler),\n (r'/home', HomeHandler),\n (r'/admin', AdminHandler),\n (r'/spawn', SpawnHandler),\n (r'/token', TokenPageHandler),\n (r'/error/(\\d+)', ProxyErrorHandler),\n]\n", "path": "jupyterhub/handlers/pages.py"}]} |
gh_patches_debug_193 | rasdani/github-patches | git_diff | cupy__cupy-1239 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
sum without upcast
`cupy.sum` and `cupy.prod` upcasts ints (or bool) to `int64` or `uint64`, to align with numpy. This feature would be disabled with `x.sum(dtype=x.dtype)` but not supported in cupy.
```
>>> x = cupy.arange(3).astype(cupy.int16)
>>> x.sum(dtype=x.dtype)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "cupy/core/core.pyx", line 1139, in cupy.core.core.ndarray.sum
File "cupy/core/core.pyx", line 1147, in cupy.core.core.ndarray.sum
File "cupy/core/reduction.pxi", line 222, in cupy.core.core.simple_reduction_function.__call__
File "cupy/core/elementwise.pxi", line 698, in cupy.core.core._guess_routine
TypeError: Wrong type (<class 'numpy.int16'>) of arguments for cupy_sum
>>> cupy.__version__
'4.0.0'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cupy/core/fusion.py`
Content:
```
1 import functools
2 import six
3 from six.moves import builtins
4 import string
5 import threading
6 import warnings
7
8 import numpy
9
10 from cupy.core import core
11 from cupy import creation
12 from cupy import logic
13 from cupy import math
14 from cupy import sorting
15 from cupy import statistics
16
17
18 _thread_local = threading.local()
19
20
21 class FusionOp(object):
22
23 def __init__(self, name, operation, param_names,
24 nin, nout, in_vars, out_vars, types, num):
25 self.name = name
26 self.operation = operation
27 self.param_names = param_names
28 self.nin = nin
29 self.nout = nout
30 self.in_vars = in_vars
31 self.out_vars = out_vars
32 self.types = types
33 self.num = num
34
35 def __repr__(self):
36 return "<FusionOp, name={}, types=[{}]>".format(
37 self.name, ', '.join(_.name for _ in self.types))
38
39
40 class _FusionVar(object):
41
42 def __init__(self, num, ty, const=None):
43 self.num = num
44 self.ty = ty
45 self.const = const
46
47 def __repr__(self):
48 return "<_FusionVar, num={}, ty={}, const={}>".format(
49 self.num, self.ty, self.const)
50
51
52 class _FusionMem(object):
53
54 def __init__(self, var_list):
55 self.op_list = []
56 self.var_list = var_list[:]
57
58 def __repr__(self):
59 return "<_FusionMem, op_list={}, var_list={}>".format(
60 self.op_list,
61 self.var_list)
62
63 def get_fresh(self, ty, **kwargs):
64 n = len(self.var_list)
65 ret = _FusionVar(n, ty, **kwargs)
66 self.var_list.append(ret)
67 return ret
68
69 def set_op(self, name, operation, param_names,
70 nin, nout, in_vars, out_vars, types):
71 num = len(self.op_list)
72 op = FusionOp(name, operation, param_names,
73 nin, nout, in_vars, out_vars, types, num)
74 self.op_list.append(op)
75
76
77 class _FusionRef(object):
78
79 def __init__(self, var, mem):
80 self._var = var
81 self.dtype = var.ty
82 self._mem = mem
83
84 def __repr__(self):
85 return "<_FusionRef, dtype=%s>" % self.dtype
86
87 def __neg__(self):
88 return negative(self)
89
90 def __add__(self, other):
91 return add(self, other)
92
93 def __iadd__(self, other):
94 return add(self, other, self)
95
96 def __radd__(self, other):
97 return add(other, self)
98
99 def __sub__(self, other):
100 return subtract(self, other)
101
102 def __isub__(self, other):
103 return subtract(self, other, self)
104
105 def __rsub__(self, other):
106 return subtract(other, self)
107
108 def __mul__(self, other):
109 return multiply(self, other)
110
111 def __imul__(self, other):
112 return multiply(self, other, self)
113
114 def __rmul__(self, other):
115 return multiply(other, self)
116
117 def __div__(self, other):
118 return divide(self, other)
119
120 def __idiv__(self, other):
121 return divide(self, other, self)
122
123 def __rdiv__(self, other):
124 return divide(other, self)
125
126 def __truediv__(self, other):
127 return true_divide(self, other)
128
129 def __itruediv__(self, other):
130 return true_divide(self, other, self)
131
132 def __rtruediv__(self, other):
133 return true_divide(other, self)
134
135 def __floordiv__(self, other):
136 return floor_divide(self, other)
137
138 def __ifloordiv__(self, other):
139 return floor_divide(self, other, self)
140
141 def __rfloordiv__(self, other):
142 return floor_divide(other, self)
143
144 def __mod__(self, other):
145 return remainder(self, other)
146
147 def __imod__(self, other):
148 return remainder(self, other, self)
149
150 def __rmod__(self, other):
151 return remainder(other, self)
152
153 def __pow__(x, y):
154 return power(x, y)
155
156 def __ipow__(self, other):
157 return power(self, other, self)
158
159 def __lshift__(self, other):
160 return left_shift(self, other)
161
162 def __ilshift__(self, other):
163 return left_shift(self, other, self)
164
165 def __rlshift__(self, other):
166 return left_shift(other, self)
167
168 def __rshift__(self, other):
169 return right_shift(self, other)
170
171 def __irshift__(self, other):
172 return right_shift(self, other, self)
173
174 def __rrshift__(self, other):
175 return right_shift(other, self)
176
177 def __and__(self, other):
178 return bitwise_and(self, other)
179
180 def __iand__(self, other):
181 return bitwise_and(self, other, self)
182
183 def __rand__(self, other):
184 return bitwise_and(other, self)
185
186 def __or__(self, other):
187 return bitwise_or(self, other)
188
189 def __ior__(self, other):
190 return bitwise_or(self, other, self)
191
192 def __ror__(self, other):
193 return bitwise_or(other, self)
194
195 def __xor__(self, other):
196 return bitwise_xor(self, other)
197
198 def __ixor__(self, other):
199 return bitwise_xor(self, other, self)
200
201 def __rxor__(self, other):
202 return bitwise_xor(other, self)
203
204 def __invert__(self):
205 return invert(self)
206
207 def __lt__(self, other):
208 return less(self, other)
209
210 def __le__(self, other):
211 return less_equal(self, other)
212
213 def __eq__(self, other):
214 return equal(self, other)
215
216 def __ne__(self, other):
217 return not_equal(self, other)
218
219 def __gt__(self, other):
220 return greater(self, other)
221
222 def __ge__(self, other):
223 return greater_equal(self, other)
224
225 def __nonzero__(self):
226 raise Exception("Can't cast to bool")
227
228 def __bool__(self):
229 raise Exception("Can't cast to bool")
230
231 def __setitem__(self, slices, value):
232 if slices is Ellipsis or (isinstance(slices, slice) and
233 slices == slice(None)):
234 copy(value, self)
235 else:
236 raise ValueError('The fusion supports `[...]` or `[:]`.')
237
238 def copy(self):
239 return copy(self)
240
241
242 _kind_score = {
243 'b': 0,
244 'u': 1,
245 'i': 1,
246 'f': 2,
247 'c': 3,
248 }
249
250 _dtype_to_ctype = {
251 numpy.dtype('float64'): 'double',
252 numpy.dtype('float32'): 'float',
253 numpy.dtype('float16'): 'float16',
254 numpy.dtype('complex128'): 'complex<double>',
255 numpy.dtype('complex64'): 'complex<float>',
256 numpy.dtype('int64'): 'long long',
257 numpy.dtype('int32'): 'int',
258 numpy.dtype('int16'): 'short',
259 numpy.dtype('int8'): 'signed char',
260 numpy.dtype('uint64'): 'unsigned long long',
261 numpy.dtype('uint32'): 'unsigned int',
262 numpy.dtype('uint16'): 'unsigned short',
263 numpy.dtype('uint8'): 'unsigned char',
264 numpy.dtype('bool'): 'bool',
265 }
266
267 _dtype_list = [numpy.dtype(_) for _ in '?bhilqBHILQefdFD']
268
269
270 def _normalize_arg(arg, mem):
271 arg_type = type(arg)
272 if arg_type is _FusionRef:
273 return arg._var
274 is_scalar = arg_type in six.integer_types + (float, bool, complex)
275 is_ndarray = hasattr(arg, 'dtype') and arg.dtype in _dtype_list
276 if is_scalar or is_ndarray:
277 return mem.get_fresh(numpy.dtype(arg_type), const=arg)
278 raise Exception('Unsupported type %s' % arg_type)
279
280
281 def _convert(f):
282 if type(f) is core.ufunc:
283 return _convert_from_ufunc(f)
284 if type(f) is core.ElementwiseKernel:
285 return _convert_from_elementwise(f)
286 raise Exception("Can't convert from %s to FusionOp" % type(f))
287
288
289 def _should_use_min_scalar(in_args):
290 max_array_kind = -2
291 max_scalar_kind = -1
292 for i in in_args:
293 kind = _kind_score[i.ty.kind]
294 if i.const is None:
295 max_array_kind = max(max_array_kind, kind)
296 else:
297 max_scalar_kind = max(max_scalar_kind, kind)
298 return (max_scalar_kind != -1 and
299 max_array_kind >= max_scalar_kind)
300
301
302 def _convert_from_ufunc(ufunc):
303 nin = ufunc.nin
304 nout = ufunc.nout
305
306 def get_mem(args):
307 for i in args:
308 if type(i) == _FusionRef:
309 return i._mem
310 raise Exception('number of ndarray arguments must be more than 0')
311
312 def can_cast1(args, ty_ins):
313 for i in six.moves.range(nin):
314 if args[i].const is None:
315 if not numpy.can_cast(args[i].ty, ty_ins[i]):
316 return False
317 else:
318 if not numpy.can_cast(args[i].const, ty_ins[i]):
319 return False
320 return True
321
322 def can_cast2(args, ty_ins):
323 for i in six.moves.range(nin):
324 if not numpy.can_cast(args[i].ty, ty_ins[i]):
325 return False
326 return True
327
328 def res(*args, **kwargs):
329 mem = get_mem(args)
330 var_list = [_normalize_arg(_, mem) for _ in args]
331 if 'out' in kwargs:
332 var_list.append(_normalize_arg(kwargs.pop('out'), mem))
333 if kwargs:
334 raise TypeError('Wrong arguments %s' % kwargs)
335 assert nin <= len(var_list) <= nin + nout
336 in_vars = var_list[:nin]
337 out_vars = var_list[nin:]
338 can_cast = can_cast1 if _should_use_min_scalar(in_vars) else can_cast2
339 for ty_ins, ty_outs, op in ufunc._ops:
340 ty_ins = [numpy.dtype(_) for _ in ty_ins]
341 ty_outs = [numpy.dtype(_) for _ in ty_outs]
342 if can_cast(in_vars, ty_ins):
343 param_names = (['in%d' % i for i in six.moves.range(nin)] +
344 ['out%d' % i for i in six.moves.range(nout)])
345 ret = []
346 for i in six.moves.range(nout):
347 if i >= len(out_vars):
348 v = mem.get_fresh(ty_outs[i])
349 out_vars.append(v)
350 ret.append(_FusionRef(v, mem))
351 elif numpy.can_cast(ty_outs[i], out_vars[i].ty,
352 "same_kind"):
353 v = out_vars[i]
354 ret.append(_FusionRef(v, mem))
355 else:
356 raise TypeError(
357 'output (typecode \'{}\') could not be coerced '
358 'to provided output parameter (typecode \'{}\') '
359 'according to the casting rule '
360 '"same_kind"'.format(
361 ty_outs[i].char, out_vars[i].ty.char))
362 mem.set_op(ufunc.name, op, param_names, nin, nout,
363 in_vars, out_vars, ty_ins + ty_outs)
364 return ret[0] if len(ret) == 1 else tuple(ret)
365 raise TypeError('Invalid type cast in \'{}\': {} -> {}'.format(
366 ufunc.name,
367 [_.ty for _ in in_vars],
368 [_.ty for _ in out_vars]))
369 return res
370
371
372 def _convert_from_elementwise(elem):
373 raise Exception('Not Impletmented')
374
375
376 def _gather_submodules(ops):
377 return {(op.name, tuple(op.types)): op for op in ops}
378
379
380 def _get_params(var_list):
381 return ['%s v%d' % (var.ty, var.num) for var in var_list]
382
383
384 def _get_out_params(var_list):
385 return ['%s ret%d' % (var.ty, i) for i, var in enumerate(var_list)]
386
387
388 def _get_declaration_from_var(var):
389 if var.const is None:
390 return '%s v%d;\n' % (_dtype_to_ctype[var.ty], var.num)
391
392 c = var.const
393 val = numpy.asscalar(c) if hasattr(c, 'dtype') else c
394
395 if isinstance(val, bool):
396 init = '= %s' % str(c).lower()
397 elif isinstance(val, complex):
398 init = '(%s, %s)' % (c.real, c.imag)
399 elif isinstance(val, six.integer_types + (float,)):
400 init = '= %s' % str(c)
401 else:
402 raise TypeError('Invalid constant type: {}'.format(type(c)))
403 return 'const %s v%d %s;\n' % (_dtype_to_ctype[var.ty], var.num, init)
404
405
406 def _get_declaration_from_op(op):
407 return ''.join('%s v%d_%d;\n' % (_dtype_to_ctype[t], op.num, j)
408 for j, t in enumerate(op.types))
409
410
411 def _get_operation_code(op):
412 code = ''.join('v%d_%d = v%d;\n' % (op.num, i, v.num)
413 for i, v in enumerate(op.in_vars))
414 params = ['v%d_%d' % (op.num, i)
415 for i in six.moves.range(op.nin + op.nout)]
416 code += op.name + '(' + ', '.join(params) + ');\n'
417 code += ''.join('v%d = v%d_%d;\n' %
418 (v.num, op.num, i + op.nin)
419 for i, v in enumerate(op.out_vars))
420 return code
421
422
423 def _get_submodule_code(op):
424 parameters = ', '.join('%s &%s' % (_dtype_to_ctype[t], name)
425 for i, (name, t)
426 in enumerate(zip(op.param_names, op.types)))
427 typedecl = ''.join(('typedef %s in%d_type;\n' % (_dtype_to_ctype[t], i))
428 for i, t in enumerate(op.types[:op.nin]))
429 typedecl += ''.join(('typedef %s out%d_type;\n' % (_dtype_to_ctype[t], i))
430 for i, t in enumerate(op.types[op.nin:]))
431 module_code = string.Template('''
432 __device__ void ${name}(${parameters}) {
433 ${typedecl}
434 ${operation};
435 }
436 ''').substitute(
437 name=op.name,
438 parameters=parameters,
439 operation=op.operation,
440 typedecl=typedecl)
441 return module_code + '\n'
442
443
444 def _get_pre_code(in_vars, out_vars, operation):
445 in_params = ', '.join('%s v%s' % (_dtype_to_ctype[v.ty], v.num)
446 for v in in_vars)
447 out_params = ''.join('%s v%s;\n' % (_dtype_to_ctype[v.ty], v.num)
448 for v in out_vars)
449 module_code = string.Template('''
450 __device__ ${return_type} _pre_map(${in_params}) {
451 ${out_params}
452 ${operation};
453 return ${return_var};
454 }
455 ''').substitute(
456 return_type=_dtype_to_ctype[out_vars[0].ty],
457 in_params=in_params,
458 out_params=out_params,
459 operation=operation,
460 return_var='v%d' % out_vars[0].num)
461 return module_code
462
463
464 def _get_reduce_op(ops, dtype):
465 for i in ops._ops:
466 if numpy.can_cast(dtype.type, i[0][0]):
467 return i
468 raise TypeError("Type is mismatched. %s(...), %s" % (ops.name, dtype.type))
469
470
471 def _get_post_code(post_vars, operation, post_out):
472 module_code = string.Template('''
473 __device__ ${return_type} _post_map(${arg_type} v0) {
474 ${operation};
475 return v${return_var};
476 }
477 ''').substitute(
478 arg_type=_dtype_to_ctype[post_vars[0].ty],
479 return_type=_dtype_to_ctype[post_vars[post_out.num].ty],
480 operation=operation,
481 return_var=post_out.num)
482 return module_code
483
484
485 def _get_fix_code(data_type, fixed_type, operation):
486 module_code = string.Template('''
487 __device__ ${fixed_type} _post_fix(${data_type} a) {
488 ${fixed_type} out0;
489 ${operation};
490 return out0;
491 }
492 ''').substitute(
493 data_type=data_type,
494 fixed_type=_dtype_to_ctype[fixed_type],
495 operation=operation)
496 return module_code
497
498
499 def _get_fusion(func, nin, reduce, post_map, identity, input_types, name):
500 in_vars = [_FusionVar(i, t) for i, t in enumerate(input_types)]
501 mem = _FusionMem(in_vars)
502 in_refs = [_FusionRef(_, mem) for _ in in_vars]
503 out_refs = func(*in_refs)
504 out_refs = list(out_refs) if type(out_refs) == tuple else [out_refs]
505 out_refs = [_ for _ in out_refs if _ is not None]
506 out_refs = [_FusionRef(_normalize_arg(_, mem), mem) for _ in out_refs]
507 out_vars = [_normalize_arg(copy(_), mem) for _ in out_refs]
508 nout = len(out_vars)
509 op_list = mem.op_list
510 tmpvars = mem.var_list[len(in_vars):]
511 if nout > 0:
512 tmpvars = tmpvars[:-nout]
513
514 in_params = ', '.join(_get_params(in_vars[:nin]))
515 out_params = ', '.join(_get_params(out_vars))
516 operation = ''.join(_get_declaration_from_var(_) for _ in tmpvars)
517 operation += ''.join(_get_declaration_from_op(_) for _ in op_list)
518 operation += '\n'.join(_get_operation_code(_) for _ in op_list)
519
520 if reduce is None:
521 if not out_params:
522 in_params = ', '.join(_get_params(in_vars[:-1]))
523 out_params = ', '.join(_get_params([in_vars[-1]]))
524 submodules = _gather_submodules(op_list)
525 submodule_code = ''.join(_get_submodule_code(_)
526 for _ in submodules.values())
527 return core.ElementwiseKernel(in_params, out_params,
528 operation, preamble=submodule_code,
529 name=name)
530 else:
531 if nout != 1:
532 raise Exception("Wrong number of number of arguments")
533 # pre-map
534 pre_type = out_vars[0].ty
535 pre_code = _get_pre_code(in_vars, out_vars, operation)
536
537 # reduce
538 reduce_op = _get_reduce_op(reduce._raw, pre_type)
539 reduce_code = reduce_op[2][1]
540 reduce_type = numpy.dtype(reduce_op[1][0])
541 rtype = reduce_op[2][3]
542 post_type = "type_in0_raw" if rtype is None else rtype
543 pre_code += "typedef %s type_in0_raw;\n" % _dtype_to_ctype[reduce_type]
544
545 # post-map
546 post_in = [_FusionVar(0, reduce_type)]
547 mem = _FusionMem(post_in)
548 post_in_ref = [_FusionRef(_, mem) for _ in post_in]
549 post_out = _normalize_arg(post_map(*post_in_ref), mem)
550 if type(post_out) == tuple:
551 raise Exception("Can't reduce a tuple")
552 post_vars = mem.var_list
553 post_ops = mem.op_list
554 post_code = ''.join(_get_declaration_from_var(_)
555 for _ in post_vars[1:])
556 post_code += ''.join(_get_declaration_from_op(_) for _ in post_ops)
557 post_code += '\n'.join(_get_operation_code(_) for _ in post_ops)
558 post_code = _get_post_code(post_vars, post_code, post_out)
559 post_code += (
560 "typedef %s type_out0_raw;\n" % _dtype_to_ctype[reduce_type])
561 post_code += _get_fix_code(post_type, reduce_type, reduce_op[2][2])
562
563 submodules = _gather_submodules(op_list + post_ops)
564 submodule_code = ''.join(_get_submodule_code(v)
565 for v in submodules.values())
566 submodule_code += reduce._raw._preamble + pre_code + post_code
567 operation_args = ['v' + str(i) for i in six.moves.range(nin)]
568 operation = '_pre_map(' + ', '.join(operation_args) + ')'
569 out_params = '%s res' % post_out.ty
570 return core.ReductionKernel(in_params, out_params, operation,
571 reduce_code,
572 'res = _post_map(_post_fix(a))',
573 identity,
574 name=name,
575 reduce_type=post_type,
576 preamble=submodule_code)
577
578
579 class Fusion(object):
580
581 """Function class.
582
583 This class can be get by using `fuse` function and
584 works like `ElementwiseKernel` or `ReductionKernel`.
585
586 Attributes:
587 func (function): The function before fusing.
588 name (str): The name of the function.
589 reduce (ufunc): Reduction ufunc.
590 post_map (function): Mapping function for reduced values.
591 """
592
593 def __init__(self, func, input_num, reduce, post_map, name=None):
594 self.func = func
595 self.name = name or func.__name__
596 self.input_num = input_num
597 self.reduce = reduce
598 self.post_map = post_map
599 self.identity = None if reduce is None else self.reduce._raw.identity
600 self._memo = {}
601
602 def __repr__(self):
603 return "<Fusion '%s'>" % self.name
604
605 def __call__(self, *args, **kwargs):
606 _thread_local.in_fusion = True
607 try:
608 return self._call(*args, **kwargs)
609 finally:
610 _thread_local.in_fusion = False
611
612 def _call(self, *args, **kwargs):
613 axis = kwargs['axis'] if 'axis' in kwargs else None
614 if len(args) == 0:
615 raise Exception('number of arguments must be more than 0')
616 if builtins.any(
617 not isinstance(_, (core.ndarray, numpy.ndarray, numpy.generic))
618 for _ in args):
619 raise TypeError('Invalid argument type for \'{}\': ({})'.format(
620 self.name,
621 ', '.join(repr(type(_)) for _ in args)))
622
623 def is_cupy_data(a):
624 return isinstance(a, (core.ndarray, numpy.generic))
625 if builtins.all(is_cupy_data(_) for _ in args):
626 types = [_.dtype for _ in args]
627 key = tuple(types)
628 if key not in self._memo:
629 if self.input_num is not None:
630 nin = self.input_num
631 else:
632 nin = len(args)
633 f = _get_fusion(self.func, nin, self.reduce,
634 self.post_map, self.identity, types, self.name)
635 self._memo[key] = f
636 f = self._memo[key]
637 if self.reduce is None:
638 return f(*args)
639 else:
640 return f(*args, axis=axis)
641 else:
642 if builtins.any(type(_) is core.ndarray for _ in args):
643 types = '.'.join(repr(type(_)) for _ in args)
644 message = "Can't fuse \n %s(%s)" % (self.name, types)
645 warnings.warn(message)
646 if self.reduce is None:
647 return self.func(*args)
648 elif axis is None:
649 return self.post_map(self.reduce(self.func(*args)))
650 else:
651 return self.post_map(self.reduce(self.func(*args), axis=axis))
652
653
654 def fuse(*args, **kwargs):
655 """Function fusing decorator.
656
657 This decorator can be used to define an elementwise or reduction kernel
658 more easily than `ElementwiseKernel` class or `ReductionKernel` class.
659
660 This decorator makes `Fusion` class from the given function.
661
662 Args:
663 input_num (int): Number of input arguments of the given function.
664 reduce (function): The reduce function which is applied after
665 pre-mapping step. If not assigned, reduction step is skipped.
666 post_map (function): Mapping function for reduced values.
667 If not assigned, post_map step is skipped.
668 kernel_name (str): Name of the fused kernel function.
669 If omitted, the name of the decorated function is used.
670
671 .. note::
672 This API is currently experimental and the interface may be changed in
673 the future version.
674
675 """
676
677 def wrapper(
678 f, input_num=None, reduce=None, post_map=lambda x: x,
679 kernel_name=None):
680 return Fusion(f, input_num, reduce, post_map, kernel_name)
681
682 if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
683 return functools.update_wrapper(wrapper(args[0]), args[0])
684 else:
685 return lambda f: functools.update_wrapper(
686 wrapper(f, *args, **kwargs), f)
687
688
689 class ufunc(core.ufunc):
690
691 def __init__(self, fusion_op, cupy_op, numpy_op):
692 self.name = fusion_op.name
693 self.nin = fusion_op.nin
694 self.nout = fusion_op.nout
695 self.nargs = fusion_op.nargs
696 self._ops = fusion_op._ops
697 self._preamble = fusion_op._preamble
698 self.__doc__ = cupy_op.__doc__
699 self._params = fusion_op._params
700 self._routine_cache = fusion_op._routine_cache
701
702 self._fusion_op = fusion_op
703 self._cupy_op = cupy_op
704 self._numpy_op = numpy_op
705
706 def __repr__(self):
707 return repr(self._cupy_op)
708
709 def __call__(self, *args, **kwargs):
710 in_fusion = getattr(_thread_local, 'in_fusion', False)
711 if in_fusion:
712 if builtins.any(isinstance(_, _FusionRef) for _ in args):
713 return _convert(self._fusion_op)(*args, **kwargs)
714 elif builtins.any(isinstance(_, numpy.ndarray) for _ in args):
715 return self._numpy_op(*args, **kwargs)
716
717 return self._cupy_op(*args, **kwargs)
718
719 __doc__ = core.ufunc.__doc__
720 __call__.__doc__ = core.ufunc.__call__.__doc__
721
722
723 def _create_ufunc(cupy_ufunc, numpy_ufunc):
724 return ufunc(cupy_ufunc, cupy_ufunc, numpy_ufunc)
725
726
727 where = ufunc(sorting.search._where_ufunc,
728 sorting.search.where, numpy.where)
729
730 clip = ufunc(core._clip, math.misc.clip, numpy.clip)
731
732 copy = ufunc(core.elementwise_copy,
733 creation.from_data.copy, numpy.copy)
734
735 bitwise_and = _create_ufunc(core.bitwise_and, numpy.bitwise_and)
736 bitwise_or = _create_ufunc(core.bitwise_or, numpy.bitwise_or)
737 bitwise_xor = _create_ufunc(core.bitwise_xor, numpy.bitwise_xor)
738 invert = _create_ufunc(core.invert, numpy.invert)
739 left_shift = _create_ufunc(core.left_shift, numpy.left_shift)
740 right_shift = _create_ufunc(core.right_shift, numpy.right_shift)
741
742 greater = _create_ufunc(core.greater, numpy.greater)
743 greater_equal = _create_ufunc(core.greater_equal, numpy.greater_equal)
744 less = _create_ufunc(core.less, numpy.less)
745 less_equal = _create_ufunc(core.less_equal, numpy.less_equal)
746 equal = _create_ufunc(core.equal, numpy.equal)
747 not_equal = _create_ufunc(core.not_equal, numpy.not_equal)
748
749 isfinite = _create_ufunc(logic.content.isfinite, numpy.isfinite)
750 isinf = _create_ufunc(logic.content.isinf, numpy.isinf)
751 isnan = _create_ufunc(logic.content.isnan, numpy.isnan)
752
753 logical_and = _create_ufunc(logic.ops.logical_and, numpy.logical_and)
754 logical_or = _create_ufunc(logic.ops.logical_or, numpy.logical_or)
755 logical_not = _create_ufunc(logic.ops.logical_not, numpy.logical_not)
756 logical_xor = _create_ufunc(logic.ops.logical_xor, numpy.logical_xor)
757
758 sin = _create_ufunc(math.trigonometric.sin, numpy.sin)
759 cos = _create_ufunc(math.trigonometric.cos, numpy.cos)
760 tan = _create_ufunc(math.trigonometric.tan, numpy.tan)
761 arcsin = _create_ufunc(math.trigonometric.arcsin, numpy.arcsin)
762 arccos = _create_ufunc(math.trigonometric.arccos, numpy.arccos)
763 arctan = _create_ufunc(math.trigonometric.arctan, numpy.arctan)
764 arctan2 = _create_ufunc(math.trigonometric.arctan2, numpy.arctan2)
765 hypot = _create_ufunc(math.trigonometric.hypot, numpy.hypot)
766 deg2rad = _create_ufunc(math.trigonometric.deg2rad, numpy.deg2rad)
767 rad2deg = _create_ufunc(math.trigonometric.rad2deg, numpy.rad2deg)
768 degrees = _create_ufunc(math.trigonometric.degrees, numpy.degrees)
769 radians = _create_ufunc(math.trigonometric.radians, numpy.radians)
770
771 sinh = _create_ufunc(math.hyperbolic.sinh, numpy.sinh)
772 cosh = _create_ufunc(math.hyperbolic.cosh, numpy.cosh)
773 tanh = _create_ufunc(math.hyperbolic.tanh, numpy.tanh)
774 arcsinh = _create_ufunc(math.hyperbolic.arcsinh, numpy.arcsinh)
775 arccosh = _create_ufunc(math.hyperbolic.arccosh, numpy.arccosh)
776 arctanh = _create_ufunc(math.hyperbolic.arctanh, numpy.arctanh)
777
778 rint = _create_ufunc(math.rounding.rint, numpy.rint)
779 floor = _create_ufunc(math.rounding.floor, numpy.floor)
780 ceil = _create_ufunc(math.rounding.ceil, numpy.ceil)
781 trunc = _create_ufunc(math.rounding.trunc, numpy.trunc)
782 fix = _create_ufunc(math.rounding.fix, numpy.fix)
783
784 exp = _create_ufunc(math.explog.exp, numpy.exp)
785 expm1 = _create_ufunc(math.explog.expm1, numpy.expm1)
786 exp2 = _create_ufunc(math.explog.exp2, numpy.exp2)
787 log = _create_ufunc(math.explog.log, numpy.log)
788 log10 = _create_ufunc(math.explog.log10, numpy.log10)
789 log2 = _create_ufunc(math.explog.log2, numpy.log2)
790 log1p = _create_ufunc(math.explog.log1p, numpy.log1p)
791 logaddexp = _create_ufunc(math.explog.logaddexp, numpy.logaddexp)
792 logaddexp2 = _create_ufunc(math.explog.logaddexp2, numpy.logaddexp2)
793
794 signbit = _create_ufunc(math.floating.signbit, numpy.signbit)
795 copysign = _create_ufunc(math.floating.copysign, numpy.copysign)
796 ldexp = _create_ufunc(math.floating.ldexp, numpy.ldexp)
797 frexp = _create_ufunc(math.floating.frexp, numpy.frexp)
798 nextafter = _create_ufunc(math.floating.nextafter, numpy.nextafter)
799
800 add = _create_ufunc(math.arithmetic.add, numpy.add)
801 reciprocal = _create_ufunc(math.arithmetic.reciprocal, numpy.reciprocal)
802 negative = _create_ufunc(math.arithmetic.negative, numpy.negative)
803 angle = _create_ufunc(math.arithmetic.angle, numpy.angle)
804 conj = _create_ufunc(math.arithmetic.conj, numpy.conj)
805 real = _create_ufunc(math.arithmetic.real, numpy.real)
806 imag = _create_ufunc(math.arithmetic.imag, numpy.imag)
807 multiply = _create_ufunc(math.arithmetic.multiply, numpy.multiply)
808 divide = _create_ufunc(math.arithmetic.divide, numpy.divide)
809 power = _create_ufunc(math.arithmetic.power, numpy.power)
810 subtract = _create_ufunc(math.arithmetic.subtract, numpy.subtract)
811 true_divide = _create_ufunc(math.arithmetic.true_divide, numpy.true_divide)
812 floor_divide = _create_ufunc(math.arithmetic.floor_divide, numpy.floor_divide)
813 fmod = _create_ufunc(math.arithmetic.fmod, numpy.fmod)
814 mod = _create_ufunc(math.arithmetic.remainder, numpy.mod)
815 modf = _create_ufunc(math.arithmetic.modf, numpy.modf)
816 remainder = _create_ufunc(math.arithmetic.remainder, numpy.remainder)
817
818 sqrt = _create_ufunc(math.misc.sqrt, numpy.sqrt)
819 sqrt_fixed = _create_ufunc(math.misc.sqrt_fixed, numpy.sqrt)
820 square = _create_ufunc(math.misc.square, numpy.square)
821 absolute = _create_ufunc(math.misc.absolute, numpy.absolute)
822 abs = _create_ufunc(math.misc.absolute, numpy.abs)
823 sign = _create_ufunc(math.misc.sign, numpy.sign)
824 maximum = _create_ufunc(math.misc.maximum, numpy.maximum)
825 minimum = _create_ufunc(math.misc.minimum, numpy.minimum)
826 fmax = _create_ufunc(math.misc.fmax, numpy.fmax)
827 fmin = _create_ufunc(math.misc.fmin, numpy.fmin)
828
829
830 class reduction(object):
831
832 def __init__(self, cupy_op, numpy_op):
833 self._cupy_op = cupy_op
834 self._numpy_op = numpy_op
835 self.__doc__ = cupy_op.__doc__
836
837 def __call__(self, *args, **kwargs):
838 if builtins.any(type(_) == numpy.ndarray for _ in args):
839 return self._numpy_op(*args, **kwargs)
840 else:
841 return self._cupy_op(*args, **kwargs)
842
843
844 all = reduction(logic.truth.all, numpy.all)
845 any = reduction(logic.truth.any, numpy.any)
846 sum = reduction(math.sumprod.sum, numpy.sum)
847 prod = reduction(math.sumprod.prod, numpy.prod)
848 amax = reduction(statistics.order.amax, numpy.amax)
849 amin = reduction(statistics.order.amin, numpy.amin)
850
851
852 all._raw = core._all
853 any._raw = core._any
854 sum._raw = core._sum
855 prod._raw = core._prod
856 amax._raw = core._amax
857 amin._raw = core._amin
858
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/cupy/core/fusion.py b/cupy/core/fusion.py
--- a/cupy/core/fusion.py
+++ b/cupy/core/fusion.py
@@ -851,7 +851,7 @@
all._raw = core._all
any._raw = core._any
-sum._raw = core._sum
-prod._raw = core._prod
+sum._raw = core._sum_auto_dtype
+prod._raw = core._prod_auto_dtype
amax._raw = core._amax
amin._raw = core._amin
| {"golden_diff": "diff --git a/cupy/core/fusion.py b/cupy/core/fusion.py\n--- a/cupy/core/fusion.py\n+++ b/cupy/core/fusion.py\n@@ -851,7 +851,7 @@\n \n all._raw = core._all\n any._raw = core._any\n-sum._raw = core._sum\n-prod._raw = core._prod\n+sum._raw = core._sum_auto_dtype\n+prod._raw = core._prod_auto_dtype\n amax._raw = core._amax\n amin._raw = core._amin\n", "issue": "sum without upcast\n`cupy.sum` and `cupy.prod` upcasts ints (or bool) to `int64` or `uint64`, to align with numpy. This feature would be disabled with `x.sum(dtype=x.dtype)` but not supported in cupy.\r\n```\r\n>>> x = cupy.arange(3).astype(cupy.int16)\r\n>>> x.sum(dtype=x.dtype)\r\nTraceback (most recent call last):\r\n File \"<stdin>\", line 1, in <module>\r\n File \"cupy/core/core.pyx\", line 1139, in cupy.core.core.ndarray.sum\r\n File \"cupy/core/core.pyx\", line 1147, in cupy.core.core.ndarray.sum\r\n File \"cupy/core/reduction.pxi\", line 222, in cupy.core.core.simple_reduction_function.__call__\r\n File \"cupy/core/elementwise.pxi\", line 698, in cupy.core.core._guess_routine\r\nTypeError: Wrong type (<class 'numpy.int16'>) of arguments for cupy_sum\r\n>>> cupy.__version__\r\n'4.0.0'\r\n```\n", "before_files": [{"content": "import functools\nimport six\nfrom six.moves import builtins\nimport string\nimport threading\nimport warnings\n\nimport numpy\n\nfrom cupy.core import core\nfrom cupy import creation\nfrom cupy import logic\nfrom cupy import math\nfrom cupy import sorting\nfrom cupy import statistics\n\n\n_thread_local = threading.local()\n\n\nclass FusionOp(object):\n\n def __init__(self, name, operation, param_names,\n nin, nout, in_vars, out_vars, types, num):\n self.name = name\n self.operation = operation\n self.param_names = param_names\n self.nin = nin\n self.nout = nout\n self.in_vars = in_vars\n self.out_vars = out_vars\n self.types = types\n self.num = num\n\n def __repr__(self):\n return \"<FusionOp, name={}, types=[{}]>\".format(\n self.name, ', '.join(_.name for _ in self.types))\n\n\nclass _FusionVar(object):\n\n def __init__(self, num, ty, const=None):\n self.num = num\n self.ty = ty\n self.const = const\n\n def __repr__(self):\n return \"<_FusionVar, num={}, ty={}, const={}>\".format(\n self.num, self.ty, self.const)\n\n\nclass _FusionMem(object):\n\n def __init__(self, var_list):\n self.op_list = []\n self.var_list = var_list[:]\n\n def __repr__(self):\n return \"<_FusionMem, op_list={}, var_list={}>\".format(\n self.op_list,\n self.var_list)\n\n def get_fresh(self, ty, **kwargs):\n n = len(self.var_list)\n ret = _FusionVar(n, ty, **kwargs)\n self.var_list.append(ret)\n return ret\n\n def set_op(self, name, operation, param_names,\n nin, nout, in_vars, out_vars, types):\n num = len(self.op_list)\n op = FusionOp(name, operation, param_names,\n nin, nout, in_vars, out_vars, types, num)\n self.op_list.append(op)\n\n\nclass _FusionRef(object):\n\n def __init__(self, var, mem):\n self._var = var\n self.dtype = var.ty\n self._mem = mem\n\n def __repr__(self):\n return \"<_FusionRef, dtype=%s>\" % self.dtype\n\n def __neg__(self):\n return negative(self)\n\n def __add__(self, other):\n return add(self, other)\n\n def __iadd__(self, other):\n return add(self, other, self)\n\n def __radd__(self, other):\n return add(other, self)\n\n def __sub__(self, other):\n return subtract(self, other)\n\n def __isub__(self, other):\n return subtract(self, other, self)\n\n def __rsub__(self, other):\n return subtract(other, self)\n\n def __mul__(self, other):\n return multiply(self, other)\n\n def __imul__(self, other):\n return multiply(self, other, self)\n\n def __rmul__(self, other):\n return multiply(other, self)\n\n def __div__(self, other):\n return divide(self, other)\n\n def __idiv__(self, other):\n return divide(self, other, self)\n\n def __rdiv__(self, other):\n return divide(other, self)\n\n def __truediv__(self, other):\n return true_divide(self, other)\n\n def __itruediv__(self, other):\n return true_divide(self, other, self)\n\n def __rtruediv__(self, other):\n return true_divide(other, self)\n\n def __floordiv__(self, other):\n return floor_divide(self, other)\n\n def __ifloordiv__(self, other):\n return floor_divide(self, other, self)\n\n def __rfloordiv__(self, other):\n return floor_divide(other, self)\n\n def __mod__(self, other):\n return remainder(self, other)\n\n def __imod__(self, other):\n return remainder(self, other, self)\n\n def __rmod__(self, other):\n return remainder(other, self)\n\n def __pow__(x, y):\n return power(x, y)\n\n def __ipow__(self, other):\n return power(self, other, self)\n\n def __lshift__(self, other):\n return left_shift(self, other)\n\n def __ilshift__(self, other):\n return left_shift(self, other, self)\n\n def __rlshift__(self, other):\n return left_shift(other, self)\n\n def __rshift__(self, other):\n return right_shift(self, other)\n\n def __irshift__(self, other):\n return right_shift(self, other, self)\n\n def __rrshift__(self, other):\n return right_shift(other, self)\n\n def __and__(self, other):\n return bitwise_and(self, other)\n\n def __iand__(self, other):\n return bitwise_and(self, other, self)\n\n def __rand__(self, other):\n return bitwise_and(other, self)\n\n def __or__(self, other):\n return bitwise_or(self, other)\n\n def __ior__(self, other):\n return bitwise_or(self, other, self)\n\n def __ror__(self, other):\n return bitwise_or(other, self)\n\n def __xor__(self, other):\n return bitwise_xor(self, other)\n\n def __ixor__(self, other):\n return bitwise_xor(self, other, self)\n\n def __rxor__(self, other):\n return bitwise_xor(other, self)\n\n def __invert__(self):\n return invert(self)\n\n def __lt__(self, other):\n return less(self, other)\n\n def __le__(self, other):\n return less_equal(self, other)\n\n def __eq__(self, other):\n return equal(self, other)\n\n def __ne__(self, other):\n return not_equal(self, other)\n\n def __gt__(self, other):\n return greater(self, other)\n\n def __ge__(self, other):\n return greater_equal(self, other)\n\n def __nonzero__(self):\n raise Exception(\"Can't cast to bool\")\n\n def __bool__(self):\n raise Exception(\"Can't cast to bool\")\n\n def __setitem__(self, slices, value):\n if slices is Ellipsis or (isinstance(slices, slice) and\n slices == slice(None)):\n copy(value, self)\n else:\n raise ValueError('The fusion supports `[...]` or `[:]`.')\n\n def copy(self):\n return copy(self)\n\n\n_kind_score = {\n 'b': 0,\n 'u': 1,\n 'i': 1,\n 'f': 2,\n 'c': 3,\n}\n\n_dtype_to_ctype = {\n numpy.dtype('float64'): 'double',\n numpy.dtype('float32'): 'float',\n numpy.dtype('float16'): 'float16',\n numpy.dtype('complex128'): 'complex<double>',\n numpy.dtype('complex64'): 'complex<float>',\n numpy.dtype('int64'): 'long long',\n numpy.dtype('int32'): 'int',\n numpy.dtype('int16'): 'short',\n numpy.dtype('int8'): 'signed char',\n numpy.dtype('uint64'): 'unsigned long long',\n numpy.dtype('uint32'): 'unsigned int',\n numpy.dtype('uint16'): 'unsigned short',\n numpy.dtype('uint8'): 'unsigned char',\n numpy.dtype('bool'): 'bool',\n}\n\n_dtype_list = [numpy.dtype(_) for _ in '?bhilqBHILQefdFD']\n\n\ndef _normalize_arg(arg, mem):\n arg_type = type(arg)\n if arg_type is _FusionRef:\n return arg._var\n is_scalar = arg_type in six.integer_types + (float, bool, complex)\n is_ndarray = hasattr(arg, 'dtype') and arg.dtype in _dtype_list\n if is_scalar or is_ndarray:\n return mem.get_fresh(numpy.dtype(arg_type), const=arg)\n raise Exception('Unsupported type %s' % arg_type)\n\n\ndef _convert(f):\n if type(f) is core.ufunc:\n return _convert_from_ufunc(f)\n if type(f) is core.ElementwiseKernel:\n return _convert_from_elementwise(f)\n raise Exception(\"Can't convert from %s to FusionOp\" % type(f))\n\n\ndef _should_use_min_scalar(in_args):\n max_array_kind = -2\n max_scalar_kind = -1\n for i in in_args:\n kind = _kind_score[i.ty.kind]\n if i.const is None:\n max_array_kind = max(max_array_kind, kind)\n else:\n max_scalar_kind = max(max_scalar_kind, kind)\n return (max_scalar_kind != -1 and\n max_array_kind >= max_scalar_kind)\n\n\ndef _convert_from_ufunc(ufunc):\n nin = ufunc.nin\n nout = ufunc.nout\n\n def get_mem(args):\n for i in args:\n if type(i) == _FusionRef:\n return i._mem\n raise Exception('number of ndarray arguments must be more than 0')\n\n def can_cast1(args, ty_ins):\n for i in six.moves.range(nin):\n if args[i].const is None:\n if not numpy.can_cast(args[i].ty, ty_ins[i]):\n return False\n else:\n if not numpy.can_cast(args[i].const, ty_ins[i]):\n return False\n return True\n\n def can_cast2(args, ty_ins):\n for i in six.moves.range(nin):\n if not numpy.can_cast(args[i].ty, ty_ins[i]):\n return False\n return True\n\n def res(*args, **kwargs):\n mem = get_mem(args)\n var_list = [_normalize_arg(_, mem) for _ in args]\n if 'out' in kwargs:\n var_list.append(_normalize_arg(kwargs.pop('out'), mem))\n if kwargs:\n raise TypeError('Wrong arguments %s' % kwargs)\n assert nin <= len(var_list) <= nin + nout\n in_vars = var_list[:nin]\n out_vars = var_list[nin:]\n can_cast = can_cast1 if _should_use_min_scalar(in_vars) else can_cast2\n for ty_ins, ty_outs, op in ufunc._ops:\n ty_ins = [numpy.dtype(_) for _ in ty_ins]\n ty_outs = [numpy.dtype(_) for _ in ty_outs]\n if can_cast(in_vars, ty_ins):\n param_names = (['in%d' % i for i in six.moves.range(nin)] +\n ['out%d' % i for i in six.moves.range(nout)])\n ret = []\n for i in six.moves.range(nout):\n if i >= len(out_vars):\n v = mem.get_fresh(ty_outs[i])\n out_vars.append(v)\n ret.append(_FusionRef(v, mem))\n elif numpy.can_cast(ty_outs[i], out_vars[i].ty,\n \"same_kind\"):\n v = out_vars[i]\n ret.append(_FusionRef(v, mem))\n else:\n raise TypeError(\n 'output (typecode \\'{}\\') could not be coerced '\n 'to provided output parameter (typecode \\'{}\\') '\n 'according to the casting rule '\n '\"same_kind\"'.format(\n ty_outs[i].char, out_vars[i].ty.char))\n mem.set_op(ufunc.name, op, param_names, nin, nout,\n in_vars, out_vars, ty_ins + ty_outs)\n return ret[0] if len(ret) == 1 else tuple(ret)\n raise TypeError('Invalid type cast in \\'{}\\': {} -> {}'.format(\n ufunc.name,\n [_.ty for _ in in_vars],\n [_.ty for _ in out_vars]))\n return res\n\n\ndef _convert_from_elementwise(elem):\n raise Exception('Not Impletmented')\n\n\ndef _gather_submodules(ops):\n return {(op.name, tuple(op.types)): op for op in ops}\n\n\ndef _get_params(var_list):\n return ['%s v%d' % (var.ty, var.num) for var in var_list]\n\n\ndef _get_out_params(var_list):\n return ['%s ret%d' % (var.ty, i) for i, var in enumerate(var_list)]\n\n\ndef _get_declaration_from_var(var):\n if var.const is None:\n return '%s v%d;\\n' % (_dtype_to_ctype[var.ty], var.num)\n\n c = var.const\n val = numpy.asscalar(c) if hasattr(c, 'dtype') else c\n\n if isinstance(val, bool):\n init = '= %s' % str(c).lower()\n elif isinstance(val, complex):\n init = '(%s, %s)' % (c.real, c.imag)\n elif isinstance(val, six.integer_types + (float,)):\n init = '= %s' % str(c)\n else:\n raise TypeError('Invalid constant type: {}'.format(type(c)))\n return 'const %s v%d %s;\\n' % (_dtype_to_ctype[var.ty], var.num, init)\n\n\ndef _get_declaration_from_op(op):\n return ''.join('%s v%d_%d;\\n' % (_dtype_to_ctype[t], op.num, j)\n for j, t in enumerate(op.types))\n\n\ndef _get_operation_code(op):\n code = ''.join('v%d_%d = v%d;\\n' % (op.num, i, v.num)\n for i, v in enumerate(op.in_vars))\n params = ['v%d_%d' % (op.num, i)\n for i in six.moves.range(op.nin + op.nout)]\n code += op.name + '(' + ', '.join(params) + ');\\n'\n code += ''.join('v%d = v%d_%d;\\n' %\n (v.num, op.num, i + op.nin)\n for i, v in enumerate(op.out_vars))\n return code\n\n\ndef _get_submodule_code(op):\n parameters = ', '.join('%s &%s' % (_dtype_to_ctype[t], name)\n for i, (name, t)\n in enumerate(zip(op.param_names, op.types)))\n typedecl = ''.join(('typedef %s in%d_type;\\n' % (_dtype_to_ctype[t], i))\n for i, t in enumerate(op.types[:op.nin]))\n typedecl += ''.join(('typedef %s out%d_type;\\n' % (_dtype_to_ctype[t], i))\n for i, t in enumerate(op.types[op.nin:]))\n module_code = string.Template('''\n __device__ void ${name}(${parameters}) {\n ${typedecl}\n ${operation};\n }\n ''').substitute(\n name=op.name,\n parameters=parameters,\n operation=op.operation,\n typedecl=typedecl)\n return module_code + '\\n'\n\n\ndef _get_pre_code(in_vars, out_vars, operation):\n in_params = ', '.join('%s v%s' % (_dtype_to_ctype[v.ty], v.num)\n for v in in_vars)\n out_params = ''.join('%s v%s;\\n' % (_dtype_to_ctype[v.ty], v.num)\n for v in out_vars)\n module_code = string.Template('''\n __device__ ${return_type} _pre_map(${in_params}) {\n ${out_params}\n ${operation};\n return ${return_var};\n }\n ''').substitute(\n return_type=_dtype_to_ctype[out_vars[0].ty],\n in_params=in_params,\n out_params=out_params,\n operation=operation,\n return_var='v%d' % out_vars[0].num)\n return module_code\n\n\ndef _get_reduce_op(ops, dtype):\n for i in ops._ops:\n if numpy.can_cast(dtype.type, i[0][0]):\n return i\n raise TypeError(\"Type is mismatched. %s(...), %s\" % (ops.name, dtype.type))\n\n\ndef _get_post_code(post_vars, operation, post_out):\n module_code = string.Template('''\n __device__ ${return_type} _post_map(${arg_type} v0) {\n ${operation};\n return v${return_var};\n }\n ''').substitute(\n arg_type=_dtype_to_ctype[post_vars[0].ty],\n return_type=_dtype_to_ctype[post_vars[post_out.num].ty],\n operation=operation,\n return_var=post_out.num)\n return module_code\n\n\ndef _get_fix_code(data_type, fixed_type, operation):\n module_code = string.Template('''\n __device__ ${fixed_type} _post_fix(${data_type} a) {\n ${fixed_type} out0;\n ${operation};\n return out0;\n }\n ''').substitute(\n data_type=data_type,\n fixed_type=_dtype_to_ctype[fixed_type],\n operation=operation)\n return module_code\n\n\ndef _get_fusion(func, nin, reduce, post_map, identity, input_types, name):\n in_vars = [_FusionVar(i, t) for i, t in enumerate(input_types)]\n mem = _FusionMem(in_vars)\n in_refs = [_FusionRef(_, mem) for _ in in_vars]\n out_refs = func(*in_refs)\n out_refs = list(out_refs) if type(out_refs) == tuple else [out_refs]\n out_refs = [_ for _ in out_refs if _ is not None]\n out_refs = [_FusionRef(_normalize_arg(_, mem), mem) for _ in out_refs]\n out_vars = [_normalize_arg(copy(_), mem) for _ in out_refs]\n nout = len(out_vars)\n op_list = mem.op_list\n tmpvars = mem.var_list[len(in_vars):]\n if nout > 0:\n tmpvars = tmpvars[:-nout]\n\n in_params = ', '.join(_get_params(in_vars[:nin]))\n out_params = ', '.join(_get_params(out_vars))\n operation = ''.join(_get_declaration_from_var(_) for _ in tmpvars)\n operation += ''.join(_get_declaration_from_op(_) for _ in op_list)\n operation += '\\n'.join(_get_operation_code(_) for _ in op_list)\n\n if reduce is None:\n if not out_params:\n in_params = ', '.join(_get_params(in_vars[:-1]))\n out_params = ', '.join(_get_params([in_vars[-1]]))\n submodules = _gather_submodules(op_list)\n submodule_code = ''.join(_get_submodule_code(_)\n for _ in submodules.values())\n return core.ElementwiseKernel(in_params, out_params,\n operation, preamble=submodule_code,\n name=name)\n else:\n if nout != 1:\n raise Exception(\"Wrong number of number of arguments\")\n # pre-map\n pre_type = out_vars[0].ty\n pre_code = _get_pre_code(in_vars, out_vars, operation)\n\n # reduce\n reduce_op = _get_reduce_op(reduce._raw, pre_type)\n reduce_code = reduce_op[2][1]\n reduce_type = numpy.dtype(reduce_op[1][0])\n rtype = reduce_op[2][3]\n post_type = \"type_in0_raw\" if rtype is None else rtype\n pre_code += \"typedef %s type_in0_raw;\\n\" % _dtype_to_ctype[reduce_type]\n\n # post-map\n post_in = [_FusionVar(0, reduce_type)]\n mem = _FusionMem(post_in)\n post_in_ref = [_FusionRef(_, mem) for _ in post_in]\n post_out = _normalize_arg(post_map(*post_in_ref), mem)\n if type(post_out) == tuple:\n raise Exception(\"Can't reduce a tuple\")\n post_vars = mem.var_list\n post_ops = mem.op_list\n post_code = ''.join(_get_declaration_from_var(_)\n for _ in post_vars[1:])\n post_code += ''.join(_get_declaration_from_op(_) for _ in post_ops)\n post_code += '\\n'.join(_get_operation_code(_) for _ in post_ops)\n post_code = _get_post_code(post_vars, post_code, post_out)\n post_code += (\n \"typedef %s type_out0_raw;\\n\" % _dtype_to_ctype[reduce_type])\n post_code += _get_fix_code(post_type, reduce_type, reduce_op[2][2])\n\n submodules = _gather_submodules(op_list + post_ops)\n submodule_code = ''.join(_get_submodule_code(v)\n for v in submodules.values())\n submodule_code += reduce._raw._preamble + pre_code + post_code\n operation_args = ['v' + str(i) for i in six.moves.range(nin)]\n operation = '_pre_map(' + ', '.join(operation_args) + ')'\n out_params = '%s res' % post_out.ty\n return core.ReductionKernel(in_params, out_params, operation,\n reduce_code,\n 'res = _post_map(_post_fix(a))',\n identity,\n name=name,\n reduce_type=post_type,\n preamble=submodule_code)\n\n\nclass Fusion(object):\n\n \"\"\"Function class.\n\n This class can be get by using `fuse` function and\n works like `ElementwiseKernel` or `ReductionKernel`.\n\n Attributes:\n func (function): The function before fusing.\n name (str): The name of the function.\n reduce (ufunc): Reduction ufunc.\n post_map (function): Mapping function for reduced values.\n \"\"\"\n\n def __init__(self, func, input_num, reduce, post_map, name=None):\n self.func = func\n self.name = name or func.__name__\n self.input_num = input_num\n self.reduce = reduce\n self.post_map = post_map\n self.identity = None if reduce is None else self.reduce._raw.identity\n self._memo = {}\n\n def __repr__(self):\n return \"<Fusion '%s'>\" % self.name\n\n def __call__(self, *args, **kwargs):\n _thread_local.in_fusion = True\n try:\n return self._call(*args, **kwargs)\n finally:\n _thread_local.in_fusion = False\n\n def _call(self, *args, **kwargs):\n axis = kwargs['axis'] if 'axis' in kwargs else None\n if len(args) == 0:\n raise Exception('number of arguments must be more than 0')\n if builtins.any(\n not isinstance(_, (core.ndarray, numpy.ndarray, numpy.generic))\n for _ in args):\n raise TypeError('Invalid argument type for \\'{}\\': ({})'.format(\n self.name,\n ', '.join(repr(type(_)) for _ in args)))\n\n def is_cupy_data(a):\n return isinstance(a, (core.ndarray, numpy.generic))\n if builtins.all(is_cupy_data(_) for _ in args):\n types = [_.dtype for _ in args]\n key = tuple(types)\n if key not in self._memo:\n if self.input_num is not None:\n nin = self.input_num\n else:\n nin = len(args)\n f = _get_fusion(self.func, nin, self.reduce,\n self.post_map, self.identity, types, self.name)\n self._memo[key] = f\n f = self._memo[key]\n if self.reduce is None:\n return f(*args)\n else:\n return f(*args, axis=axis)\n else:\n if builtins.any(type(_) is core.ndarray for _ in args):\n types = '.'.join(repr(type(_)) for _ in args)\n message = \"Can't fuse \\n %s(%s)\" % (self.name, types)\n warnings.warn(message)\n if self.reduce is None:\n return self.func(*args)\n elif axis is None:\n return self.post_map(self.reduce(self.func(*args)))\n else:\n return self.post_map(self.reduce(self.func(*args), axis=axis))\n\n\ndef fuse(*args, **kwargs):\n \"\"\"Function fusing decorator.\n\n This decorator can be used to define an elementwise or reduction kernel\n more easily than `ElementwiseKernel` class or `ReductionKernel` class.\n\n This decorator makes `Fusion` class from the given function.\n\n Args:\n input_num (int): Number of input arguments of the given function.\n reduce (function): The reduce function which is applied after\n pre-mapping step. If not assigned, reduction step is skipped.\n post_map (function): Mapping function for reduced values.\n If not assigned, post_map step is skipped.\n kernel_name (str): Name of the fused kernel function.\n If omitted, the name of the decorated function is used.\n\n .. note::\n This API is currently experimental and the interface may be changed in\n the future version.\n\n \"\"\"\n\n def wrapper(\n f, input_num=None, reduce=None, post_map=lambda x: x,\n kernel_name=None):\n return Fusion(f, input_num, reduce, post_map, kernel_name)\n\n if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):\n return functools.update_wrapper(wrapper(args[0]), args[0])\n else:\n return lambda f: functools.update_wrapper(\n wrapper(f, *args, **kwargs), f)\n\n\nclass ufunc(core.ufunc):\n\n def __init__(self, fusion_op, cupy_op, numpy_op):\n self.name = fusion_op.name\n self.nin = fusion_op.nin\n self.nout = fusion_op.nout\n self.nargs = fusion_op.nargs\n self._ops = fusion_op._ops\n self._preamble = fusion_op._preamble\n self.__doc__ = cupy_op.__doc__\n self._params = fusion_op._params\n self._routine_cache = fusion_op._routine_cache\n\n self._fusion_op = fusion_op\n self._cupy_op = cupy_op\n self._numpy_op = numpy_op\n\n def __repr__(self):\n return repr(self._cupy_op)\n\n def __call__(self, *args, **kwargs):\n in_fusion = getattr(_thread_local, 'in_fusion', False)\n if in_fusion:\n if builtins.any(isinstance(_, _FusionRef) for _ in args):\n return _convert(self._fusion_op)(*args, **kwargs)\n elif builtins.any(isinstance(_, numpy.ndarray) for _ in args):\n return self._numpy_op(*args, **kwargs)\n\n return self._cupy_op(*args, **kwargs)\n\n __doc__ = core.ufunc.__doc__\n __call__.__doc__ = core.ufunc.__call__.__doc__\n\n\ndef _create_ufunc(cupy_ufunc, numpy_ufunc):\n return ufunc(cupy_ufunc, cupy_ufunc, numpy_ufunc)\n\n\nwhere = ufunc(sorting.search._where_ufunc,\n sorting.search.where, numpy.where)\n\nclip = ufunc(core._clip, math.misc.clip, numpy.clip)\n\ncopy = ufunc(core.elementwise_copy,\n creation.from_data.copy, numpy.copy)\n\nbitwise_and = _create_ufunc(core.bitwise_and, numpy.bitwise_and)\nbitwise_or = _create_ufunc(core.bitwise_or, numpy.bitwise_or)\nbitwise_xor = _create_ufunc(core.bitwise_xor, numpy.bitwise_xor)\ninvert = _create_ufunc(core.invert, numpy.invert)\nleft_shift = _create_ufunc(core.left_shift, numpy.left_shift)\nright_shift = _create_ufunc(core.right_shift, numpy.right_shift)\n\ngreater = _create_ufunc(core.greater, numpy.greater)\ngreater_equal = _create_ufunc(core.greater_equal, numpy.greater_equal)\nless = _create_ufunc(core.less, numpy.less)\nless_equal = _create_ufunc(core.less_equal, numpy.less_equal)\nequal = _create_ufunc(core.equal, numpy.equal)\nnot_equal = _create_ufunc(core.not_equal, numpy.not_equal)\n\nisfinite = _create_ufunc(logic.content.isfinite, numpy.isfinite)\nisinf = _create_ufunc(logic.content.isinf, numpy.isinf)\nisnan = _create_ufunc(logic.content.isnan, numpy.isnan)\n\nlogical_and = _create_ufunc(logic.ops.logical_and, numpy.logical_and)\nlogical_or = _create_ufunc(logic.ops.logical_or, numpy.logical_or)\nlogical_not = _create_ufunc(logic.ops.logical_not, numpy.logical_not)\nlogical_xor = _create_ufunc(logic.ops.logical_xor, numpy.logical_xor)\n\nsin = _create_ufunc(math.trigonometric.sin, numpy.sin)\ncos = _create_ufunc(math.trigonometric.cos, numpy.cos)\ntan = _create_ufunc(math.trigonometric.tan, numpy.tan)\narcsin = _create_ufunc(math.trigonometric.arcsin, numpy.arcsin)\narccos = _create_ufunc(math.trigonometric.arccos, numpy.arccos)\narctan = _create_ufunc(math.trigonometric.arctan, numpy.arctan)\narctan2 = _create_ufunc(math.trigonometric.arctan2, numpy.arctan2)\nhypot = _create_ufunc(math.trigonometric.hypot, numpy.hypot)\ndeg2rad = _create_ufunc(math.trigonometric.deg2rad, numpy.deg2rad)\nrad2deg = _create_ufunc(math.trigonometric.rad2deg, numpy.rad2deg)\ndegrees = _create_ufunc(math.trigonometric.degrees, numpy.degrees)\nradians = _create_ufunc(math.trigonometric.radians, numpy.radians)\n\nsinh = _create_ufunc(math.hyperbolic.sinh, numpy.sinh)\ncosh = _create_ufunc(math.hyperbolic.cosh, numpy.cosh)\ntanh = _create_ufunc(math.hyperbolic.tanh, numpy.tanh)\narcsinh = _create_ufunc(math.hyperbolic.arcsinh, numpy.arcsinh)\narccosh = _create_ufunc(math.hyperbolic.arccosh, numpy.arccosh)\narctanh = _create_ufunc(math.hyperbolic.arctanh, numpy.arctanh)\n\nrint = _create_ufunc(math.rounding.rint, numpy.rint)\nfloor = _create_ufunc(math.rounding.floor, numpy.floor)\nceil = _create_ufunc(math.rounding.ceil, numpy.ceil)\ntrunc = _create_ufunc(math.rounding.trunc, numpy.trunc)\nfix = _create_ufunc(math.rounding.fix, numpy.fix)\n\nexp = _create_ufunc(math.explog.exp, numpy.exp)\nexpm1 = _create_ufunc(math.explog.expm1, numpy.expm1)\nexp2 = _create_ufunc(math.explog.exp2, numpy.exp2)\nlog = _create_ufunc(math.explog.log, numpy.log)\nlog10 = _create_ufunc(math.explog.log10, numpy.log10)\nlog2 = _create_ufunc(math.explog.log2, numpy.log2)\nlog1p = _create_ufunc(math.explog.log1p, numpy.log1p)\nlogaddexp = _create_ufunc(math.explog.logaddexp, numpy.logaddexp)\nlogaddexp2 = _create_ufunc(math.explog.logaddexp2, numpy.logaddexp2)\n\nsignbit = _create_ufunc(math.floating.signbit, numpy.signbit)\ncopysign = _create_ufunc(math.floating.copysign, numpy.copysign)\nldexp = _create_ufunc(math.floating.ldexp, numpy.ldexp)\nfrexp = _create_ufunc(math.floating.frexp, numpy.frexp)\nnextafter = _create_ufunc(math.floating.nextafter, numpy.nextafter)\n\nadd = _create_ufunc(math.arithmetic.add, numpy.add)\nreciprocal = _create_ufunc(math.arithmetic.reciprocal, numpy.reciprocal)\nnegative = _create_ufunc(math.arithmetic.negative, numpy.negative)\nangle = _create_ufunc(math.arithmetic.angle, numpy.angle)\nconj = _create_ufunc(math.arithmetic.conj, numpy.conj)\nreal = _create_ufunc(math.arithmetic.real, numpy.real)\nimag = _create_ufunc(math.arithmetic.imag, numpy.imag)\nmultiply = _create_ufunc(math.arithmetic.multiply, numpy.multiply)\ndivide = _create_ufunc(math.arithmetic.divide, numpy.divide)\npower = _create_ufunc(math.arithmetic.power, numpy.power)\nsubtract = _create_ufunc(math.arithmetic.subtract, numpy.subtract)\ntrue_divide = _create_ufunc(math.arithmetic.true_divide, numpy.true_divide)\nfloor_divide = _create_ufunc(math.arithmetic.floor_divide, numpy.floor_divide)\nfmod = _create_ufunc(math.arithmetic.fmod, numpy.fmod)\nmod = _create_ufunc(math.arithmetic.remainder, numpy.mod)\nmodf = _create_ufunc(math.arithmetic.modf, numpy.modf)\nremainder = _create_ufunc(math.arithmetic.remainder, numpy.remainder)\n\nsqrt = _create_ufunc(math.misc.sqrt, numpy.sqrt)\nsqrt_fixed = _create_ufunc(math.misc.sqrt_fixed, numpy.sqrt)\nsquare = _create_ufunc(math.misc.square, numpy.square)\nabsolute = _create_ufunc(math.misc.absolute, numpy.absolute)\nabs = _create_ufunc(math.misc.absolute, numpy.abs)\nsign = _create_ufunc(math.misc.sign, numpy.sign)\nmaximum = _create_ufunc(math.misc.maximum, numpy.maximum)\nminimum = _create_ufunc(math.misc.minimum, numpy.minimum)\nfmax = _create_ufunc(math.misc.fmax, numpy.fmax)\nfmin = _create_ufunc(math.misc.fmin, numpy.fmin)\n\n\nclass reduction(object):\n\n def __init__(self, cupy_op, numpy_op):\n self._cupy_op = cupy_op\n self._numpy_op = numpy_op\n self.__doc__ = cupy_op.__doc__\n\n def __call__(self, *args, **kwargs):\n if builtins.any(type(_) == numpy.ndarray for _ in args):\n return self._numpy_op(*args, **kwargs)\n else:\n return self._cupy_op(*args, **kwargs)\n\n\nall = reduction(logic.truth.all, numpy.all)\nany = reduction(logic.truth.any, numpy.any)\nsum = reduction(math.sumprod.sum, numpy.sum)\nprod = reduction(math.sumprod.prod, numpy.prod)\namax = reduction(statistics.order.amax, numpy.amax)\namin = reduction(statistics.order.amin, numpy.amin)\n\n\nall._raw = core._all\nany._raw = core._any\nsum._raw = core._sum\nprod._raw = core._prod\namax._raw = core._amax\namin._raw = core._amin\n", "path": "cupy/core/fusion.py"}], "after_files": [{"content": "import functools\nimport six\nfrom six.moves import builtins\nimport string\nimport threading\nimport warnings\n\nimport numpy\n\nfrom cupy.core import core\nfrom cupy import creation\nfrom cupy import logic\nfrom cupy import math\nfrom cupy import sorting\nfrom cupy import statistics\n\n\n_thread_local = threading.local()\n\n\nclass FusionOp(object):\n\n def __init__(self, name, operation, param_names,\n nin, nout, in_vars, out_vars, types, num):\n self.name = name\n self.operation = operation\n self.param_names = param_names\n self.nin = nin\n self.nout = nout\n self.in_vars = in_vars\n self.out_vars = out_vars\n self.types = types\n self.num = num\n\n def __repr__(self):\n return \"<FusionOp, name={}, types=[{}]>\".format(\n self.name, ', '.join(_.name for _ in self.types))\n\n\nclass _FusionVar(object):\n\n def __init__(self, num, ty, const=None):\n self.num = num\n self.ty = ty\n self.const = const\n\n def __repr__(self):\n return \"<_FusionVar, num={}, ty={}, const={}>\".format(\n self.num, self.ty, self.const)\n\n\nclass _FusionMem(object):\n\n def __init__(self, var_list):\n self.op_list = []\n self.var_list = var_list[:]\n\n def __repr__(self):\n return \"<_FusionMem, op_list={}, var_list={}>\".format(\n self.op_list,\n self.var_list)\n\n def get_fresh(self, ty, **kwargs):\n n = len(self.var_list)\n ret = _FusionVar(n, ty, **kwargs)\n self.var_list.append(ret)\n return ret\n\n def set_op(self, name, operation, param_names,\n nin, nout, in_vars, out_vars, types):\n num = len(self.op_list)\n op = FusionOp(name, operation, param_names,\n nin, nout, in_vars, out_vars, types, num)\n self.op_list.append(op)\n\n\nclass _FusionRef(object):\n\n def __init__(self, var, mem):\n self._var = var\n self.dtype = var.ty\n self._mem = mem\n\n def __repr__(self):\n return \"<_FusionRef, dtype=%s>\" % self.dtype\n\n def __neg__(self):\n return negative(self)\n\n def __add__(self, other):\n return add(self, other)\n\n def __iadd__(self, other):\n return add(self, other, self)\n\n def __radd__(self, other):\n return add(other, self)\n\n def __sub__(self, other):\n return subtract(self, other)\n\n def __isub__(self, other):\n return subtract(self, other, self)\n\n def __rsub__(self, other):\n return subtract(other, self)\n\n def __mul__(self, other):\n return multiply(self, other)\n\n def __imul__(self, other):\n return multiply(self, other, self)\n\n def __rmul__(self, other):\n return multiply(other, self)\n\n def __div__(self, other):\n return divide(self, other)\n\n def __idiv__(self, other):\n return divide(self, other, self)\n\n def __rdiv__(self, other):\n return divide(other, self)\n\n def __truediv__(self, other):\n return true_divide(self, other)\n\n def __itruediv__(self, other):\n return true_divide(self, other, self)\n\n def __rtruediv__(self, other):\n return true_divide(other, self)\n\n def __floordiv__(self, other):\n return floor_divide(self, other)\n\n def __ifloordiv__(self, other):\n return floor_divide(self, other, self)\n\n def __rfloordiv__(self, other):\n return floor_divide(other, self)\n\n def __mod__(self, other):\n return remainder(self, other)\n\n def __imod__(self, other):\n return remainder(self, other, self)\n\n def __rmod__(self, other):\n return remainder(other, self)\n\n def __pow__(x, y):\n return power(x, y)\n\n def __ipow__(self, other):\n return power(self, other, self)\n\n def __lshift__(self, other):\n return left_shift(self, other)\n\n def __ilshift__(self, other):\n return left_shift(self, other, self)\n\n def __rlshift__(self, other):\n return left_shift(other, self)\n\n def __rshift__(self, other):\n return right_shift(self, other)\n\n def __irshift__(self, other):\n return right_shift(self, other, self)\n\n def __rrshift__(self, other):\n return right_shift(other, self)\n\n def __and__(self, other):\n return bitwise_and(self, other)\n\n def __iand__(self, other):\n return bitwise_and(self, other, self)\n\n def __rand__(self, other):\n return bitwise_and(other, self)\n\n def __or__(self, other):\n return bitwise_or(self, other)\n\n def __ior__(self, other):\n return bitwise_or(self, other, self)\n\n def __ror__(self, other):\n return bitwise_or(other, self)\n\n def __xor__(self, other):\n return bitwise_xor(self, other)\n\n def __ixor__(self, other):\n return bitwise_xor(self, other, self)\n\n def __rxor__(self, other):\n return bitwise_xor(other, self)\n\n def __invert__(self):\n return invert(self)\n\n def __lt__(self, other):\n return less(self, other)\n\n def __le__(self, other):\n return less_equal(self, other)\n\n def __eq__(self, other):\n return equal(self, other)\n\n def __ne__(self, other):\n return not_equal(self, other)\n\n def __gt__(self, other):\n return greater(self, other)\n\n def __ge__(self, other):\n return greater_equal(self, other)\n\n def __nonzero__(self):\n raise Exception(\"Can't cast to bool\")\n\n def __bool__(self):\n raise Exception(\"Can't cast to bool\")\n\n def __setitem__(self, slices, value):\n if slices is Ellipsis or (isinstance(slices, slice) and\n slices == slice(None)):\n copy(value, self)\n else:\n raise ValueError('The fusion supports `[...]` or `[:]`.')\n\n def copy(self):\n return copy(self)\n\n\n_kind_score = {\n 'b': 0,\n 'u': 1,\n 'i': 1,\n 'f': 2,\n 'c': 3,\n}\n\n_dtype_to_ctype = {\n numpy.dtype('float64'): 'double',\n numpy.dtype('float32'): 'float',\n numpy.dtype('float16'): 'float16',\n numpy.dtype('complex128'): 'complex<double>',\n numpy.dtype('complex64'): 'complex<float>',\n numpy.dtype('int64'): 'long long',\n numpy.dtype('int32'): 'int',\n numpy.dtype('int16'): 'short',\n numpy.dtype('int8'): 'signed char',\n numpy.dtype('uint64'): 'unsigned long long',\n numpy.dtype('uint32'): 'unsigned int',\n numpy.dtype('uint16'): 'unsigned short',\n numpy.dtype('uint8'): 'unsigned char',\n numpy.dtype('bool'): 'bool',\n}\n\n_dtype_list = [numpy.dtype(_) for _ in '?bhilqBHILQefdFD']\n\n\ndef _normalize_arg(arg, mem):\n arg_type = type(arg)\n if arg_type is _FusionRef:\n return arg._var\n is_scalar = arg_type in six.integer_types + (float, bool, complex)\n is_ndarray = hasattr(arg, 'dtype') and arg.dtype in _dtype_list\n if is_scalar or is_ndarray:\n return mem.get_fresh(numpy.dtype(arg_type), const=arg)\n raise Exception('Unsupported type %s' % arg_type)\n\n\ndef _convert(f):\n if type(f) is core.ufunc:\n return _convert_from_ufunc(f)\n if type(f) is core.ElementwiseKernel:\n return _convert_from_elementwise(f)\n raise Exception(\"Can't convert from %s to FusionOp\" % type(f))\n\n\ndef _should_use_min_scalar(in_args):\n max_array_kind = -2\n max_scalar_kind = -1\n for i in in_args:\n kind = _kind_score[i.ty.kind]\n if i.const is None:\n max_array_kind = max(max_array_kind, kind)\n else:\n max_scalar_kind = max(max_scalar_kind, kind)\n return (max_scalar_kind != -1 and\n max_array_kind >= max_scalar_kind)\n\n\ndef _convert_from_ufunc(ufunc):\n nin = ufunc.nin\n nout = ufunc.nout\n\n def get_mem(args):\n for i in args:\n if type(i) == _FusionRef:\n return i._mem\n raise Exception('number of ndarray arguments must be more than 0')\n\n def can_cast1(args, ty_ins):\n for i in six.moves.range(nin):\n if args[i].const is None:\n if not numpy.can_cast(args[i].ty, ty_ins[i]):\n return False\n else:\n if not numpy.can_cast(args[i].const, ty_ins[i]):\n return False\n return True\n\n def can_cast2(args, ty_ins):\n for i in six.moves.range(nin):\n if not numpy.can_cast(args[i].ty, ty_ins[i]):\n return False\n return True\n\n def res(*args, **kwargs):\n mem = get_mem(args)\n var_list = [_normalize_arg(_, mem) for _ in args]\n if 'out' in kwargs:\n var_list.append(_normalize_arg(kwargs.pop('out'), mem))\n if kwargs:\n raise TypeError('Wrong arguments %s' % kwargs)\n assert nin <= len(var_list) <= nin + nout\n in_vars = var_list[:nin]\n out_vars = var_list[nin:]\n can_cast = can_cast1 if _should_use_min_scalar(in_vars) else can_cast2\n for ty_ins, ty_outs, op in ufunc._ops:\n ty_ins = [numpy.dtype(_) for _ in ty_ins]\n ty_outs = [numpy.dtype(_) for _ in ty_outs]\n if can_cast(in_vars, ty_ins):\n param_names = (['in%d' % i for i in six.moves.range(nin)] +\n ['out%d' % i for i in six.moves.range(nout)])\n ret = []\n for i in six.moves.range(nout):\n if i >= len(out_vars):\n v = mem.get_fresh(ty_outs[i])\n out_vars.append(v)\n ret.append(_FusionRef(v, mem))\n elif numpy.can_cast(ty_outs[i], out_vars[i].ty,\n \"same_kind\"):\n v = out_vars[i]\n ret.append(_FusionRef(v, mem))\n else:\n raise TypeError(\n 'output (typecode \\'{}\\') could not be coerced '\n 'to provided output parameter (typecode \\'{}\\') '\n 'according to the casting rule '\n '\"same_kind\"'.format(\n ty_outs[i].char, out_vars[i].ty.char))\n mem.set_op(ufunc.name, op, param_names, nin, nout,\n in_vars, out_vars, ty_ins + ty_outs)\n return ret[0] if len(ret) == 1 else tuple(ret)\n raise TypeError('Invalid type cast in \\'{}\\': {} -> {}'.format(\n ufunc.name,\n [_.ty for _ in in_vars],\n [_.ty for _ in out_vars]))\n return res\n\n\ndef _convert_from_elementwise(elem):\n raise Exception('Not Impletmented')\n\n\ndef _gather_submodules(ops):\n return {(op.name, tuple(op.types)): op for op in ops}\n\n\ndef _get_params(var_list):\n return ['%s v%d' % (var.ty, var.num) for var in var_list]\n\n\ndef _get_out_params(var_list):\n return ['%s ret%d' % (var.ty, i) for i, var in enumerate(var_list)]\n\n\ndef _get_declaration_from_var(var):\n if var.const is None:\n return '%s v%d;\\n' % (_dtype_to_ctype[var.ty], var.num)\n\n c = var.const\n val = numpy.asscalar(c) if hasattr(c, 'dtype') else c\n\n if isinstance(val, bool):\n init = '= %s' % str(c).lower()\n elif isinstance(val, complex):\n init = '(%s, %s)' % (c.real, c.imag)\n elif isinstance(val, six.integer_types + (float,)):\n init = '= %s' % str(c)\n else:\n raise TypeError('Invalid constant type: {}'.format(type(c)))\n return 'const %s v%d %s;\\n' % (_dtype_to_ctype[var.ty], var.num, init)\n\n\ndef _get_declaration_from_op(op):\n return ''.join('%s v%d_%d;\\n' % (_dtype_to_ctype[t], op.num, j)\n for j, t in enumerate(op.types))\n\n\ndef _get_operation_code(op):\n code = ''.join('v%d_%d = v%d;\\n' % (op.num, i, v.num)\n for i, v in enumerate(op.in_vars))\n params = ['v%d_%d' % (op.num, i)\n for i in six.moves.range(op.nin + op.nout)]\n code += op.name + '(' + ', '.join(params) + ');\\n'\n code += ''.join('v%d = v%d_%d;\\n' %\n (v.num, op.num, i + op.nin)\n for i, v in enumerate(op.out_vars))\n return code\n\n\ndef _get_submodule_code(op):\n parameters = ', '.join('%s &%s' % (_dtype_to_ctype[t], name)\n for i, (name, t)\n in enumerate(zip(op.param_names, op.types)))\n typedecl = ''.join(('typedef %s in%d_type;\\n' % (_dtype_to_ctype[t], i))\n for i, t in enumerate(op.types[:op.nin]))\n typedecl += ''.join(('typedef %s out%d_type;\\n' % (_dtype_to_ctype[t], i))\n for i, t in enumerate(op.types[op.nin:]))\n module_code = string.Template('''\n __device__ void ${name}(${parameters}) {\n ${typedecl}\n ${operation};\n }\n ''').substitute(\n name=op.name,\n parameters=parameters,\n operation=op.operation,\n typedecl=typedecl)\n return module_code + '\\n'\n\n\ndef _get_pre_code(in_vars, out_vars, operation):\n in_params = ', '.join('%s v%s' % (_dtype_to_ctype[v.ty], v.num)\n for v in in_vars)\n out_params = ''.join('%s v%s;\\n' % (_dtype_to_ctype[v.ty], v.num)\n for v in out_vars)\n module_code = string.Template('''\n __device__ ${return_type} _pre_map(${in_params}) {\n ${out_params}\n ${operation};\n return ${return_var};\n }\n ''').substitute(\n return_type=_dtype_to_ctype[out_vars[0].ty],\n in_params=in_params,\n out_params=out_params,\n operation=operation,\n return_var='v%d' % out_vars[0].num)\n return module_code\n\n\ndef _get_reduce_op(ops, dtype):\n for i in ops._ops:\n if numpy.can_cast(dtype.type, i[0][0]):\n return i\n raise TypeError(\"Type is mismatched. %s(...), %s\" % (ops.name, dtype.type))\n\n\ndef _get_post_code(post_vars, operation, post_out):\n module_code = string.Template('''\n __device__ ${return_type} _post_map(${arg_type} v0) {\n ${operation};\n return v${return_var};\n }\n ''').substitute(\n arg_type=_dtype_to_ctype[post_vars[0].ty],\n return_type=_dtype_to_ctype[post_vars[post_out.num].ty],\n operation=operation,\n return_var=post_out.num)\n return module_code\n\n\ndef _get_fix_code(data_type, fixed_type, operation):\n module_code = string.Template('''\n __device__ ${fixed_type} _post_fix(${data_type} a) {\n ${fixed_type} out0;\n ${operation};\n return out0;\n }\n ''').substitute(\n data_type=data_type,\n fixed_type=_dtype_to_ctype[fixed_type],\n operation=operation)\n return module_code\n\n\ndef _get_fusion(func, nin, reduce, post_map, identity, input_types, name):\n in_vars = [_FusionVar(i, t) for i, t in enumerate(input_types)]\n mem = _FusionMem(in_vars)\n in_refs = [_FusionRef(_, mem) for _ in in_vars]\n out_refs = func(*in_refs)\n out_refs = list(out_refs) if type(out_refs) == tuple else [out_refs]\n out_refs = [_ for _ in out_refs if _ is not None]\n out_refs = [_FusionRef(_normalize_arg(_, mem), mem) for _ in out_refs]\n out_vars = [_normalize_arg(copy(_), mem) for _ in out_refs]\n nout = len(out_vars)\n op_list = mem.op_list\n tmpvars = mem.var_list[len(in_vars):]\n if nout > 0:\n tmpvars = tmpvars[:-nout]\n\n in_params = ', '.join(_get_params(in_vars[:nin]))\n out_params = ', '.join(_get_params(out_vars))\n operation = ''.join(_get_declaration_from_var(_) for _ in tmpvars)\n operation += ''.join(_get_declaration_from_op(_) for _ in op_list)\n operation += '\\n'.join(_get_operation_code(_) for _ in op_list)\n\n if reduce is None:\n if not out_params:\n in_params = ', '.join(_get_params(in_vars[:-1]))\n out_params = ', '.join(_get_params([in_vars[-1]]))\n submodules = _gather_submodules(op_list)\n submodule_code = ''.join(_get_submodule_code(_)\n for _ in submodules.values())\n return core.ElementwiseKernel(in_params, out_params,\n operation, preamble=submodule_code,\n name=name)\n else:\n if nout != 1:\n raise Exception(\"Wrong number of number of arguments\")\n # pre-map\n pre_type = out_vars[0].ty\n pre_code = _get_pre_code(in_vars, out_vars, operation)\n\n # reduce\n reduce_op = _get_reduce_op(reduce._raw, pre_type)\n reduce_code = reduce_op[2][1]\n reduce_type = numpy.dtype(reduce_op[1][0])\n rtype = reduce_op[2][3]\n post_type = \"type_in0_raw\" if rtype is None else rtype\n pre_code += \"typedef %s type_in0_raw;\\n\" % _dtype_to_ctype[reduce_type]\n\n # post-map\n post_in = [_FusionVar(0, reduce_type)]\n mem = _FusionMem(post_in)\n post_in_ref = [_FusionRef(_, mem) for _ in post_in]\n post_out = _normalize_arg(post_map(*post_in_ref), mem)\n if type(post_out) == tuple:\n raise Exception(\"Can't reduce a tuple\")\n post_vars = mem.var_list\n post_ops = mem.op_list\n post_code = ''.join(_get_declaration_from_var(_)\n for _ in post_vars[1:])\n post_code += ''.join(_get_declaration_from_op(_) for _ in post_ops)\n post_code += '\\n'.join(_get_operation_code(_) for _ in post_ops)\n post_code = _get_post_code(post_vars, post_code, post_out)\n post_code += (\n \"typedef %s type_out0_raw;\\n\" % _dtype_to_ctype[reduce_type])\n post_code += _get_fix_code(post_type, reduce_type, reduce_op[2][2])\n\n submodules = _gather_submodules(op_list + post_ops)\n submodule_code = ''.join(_get_submodule_code(v)\n for v in submodules.values())\n submodule_code += reduce._raw._preamble + pre_code + post_code\n operation_args = ['v' + str(i) for i in six.moves.range(nin)]\n operation = '_pre_map(' + ', '.join(operation_args) + ')'\n out_params = '%s res' % post_out.ty\n return core.ReductionKernel(in_params, out_params, operation,\n reduce_code,\n 'res = _post_map(_post_fix(a))',\n identity,\n name=name,\n reduce_type=post_type,\n preamble=submodule_code)\n\n\nclass Fusion(object):\n\n \"\"\"Function class.\n\n This class can be get by using `fuse` function and\n works like `ElementwiseKernel` or `ReductionKernel`.\n\n Attributes:\n func (function): The function before fusing.\n name (str): The name of the function.\n reduce (ufunc): Reduction ufunc.\n post_map (function): Mapping function for reduced values.\n \"\"\"\n\n def __init__(self, func, input_num, reduce, post_map, name=None):\n self.func = func\n self.name = name or func.__name__\n self.input_num = input_num\n self.reduce = reduce\n self.post_map = post_map\n self.identity = None if reduce is None else self.reduce._raw.identity\n self._memo = {}\n\n def __repr__(self):\n return \"<Fusion '%s'>\" % self.name\n\n def __call__(self, *args, **kwargs):\n _thread_local.in_fusion = True\n try:\n return self._call(*args, **kwargs)\n finally:\n _thread_local.in_fusion = False\n\n def _call(self, *args, **kwargs):\n axis = kwargs['axis'] if 'axis' in kwargs else None\n if len(args) == 0:\n raise Exception('number of arguments must be more than 0')\n if builtins.any(\n not isinstance(_, (core.ndarray, numpy.ndarray, numpy.generic))\n for _ in args):\n raise TypeError('Invalid argument type for \\'{}\\': ({})'.format(\n self.name,\n ', '.join(repr(type(_)) for _ in args)))\n\n def is_cupy_data(a):\n return isinstance(a, (core.ndarray, numpy.generic))\n if builtins.all(is_cupy_data(_) for _ in args):\n types = [_.dtype for _ in args]\n key = tuple(types)\n if key not in self._memo:\n if self.input_num is not None:\n nin = self.input_num\n else:\n nin = len(args)\n f = _get_fusion(self.func, nin, self.reduce,\n self.post_map, self.identity, types, self.name)\n self._memo[key] = f\n f = self._memo[key]\n if self.reduce is None:\n return f(*args)\n else:\n return f(*args, axis=axis)\n else:\n if builtins.any(type(_) is core.ndarray for _ in args):\n types = '.'.join(repr(type(_)) for _ in args)\n message = \"Can't fuse \\n %s(%s)\" % (self.name, types)\n warnings.warn(message)\n if self.reduce is None:\n return self.func(*args)\n elif axis is None:\n return self.post_map(self.reduce(self.func(*args)))\n else:\n return self.post_map(self.reduce(self.func(*args), axis=axis))\n\n\ndef fuse(*args, **kwargs):\n \"\"\"Function fusing decorator.\n\n This decorator can be used to define an elementwise or reduction kernel\n more easily than `ElementwiseKernel` class or `ReductionKernel` class.\n\n This decorator makes `Fusion` class from the given function.\n\n Args:\n input_num (int): Number of input arguments of the given function.\n reduce (function): The reduce function which is applied after\n pre-mapping step. If not assigned, reduction step is skipped.\n post_map (function): Mapping function for reduced values.\n If not assigned, post_map step is skipped.\n kernel_name (str): Name of the fused kernel function.\n If omitted, the name of the decorated function is used.\n\n .. note::\n This API is currently experimental and the interface may be changed in\n the future version.\n\n \"\"\"\n\n def wrapper(\n f, input_num=None, reduce=None, post_map=lambda x: x,\n kernel_name=None):\n return Fusion(f, input_num, reduce, post_map, kernel_name)\n\n if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):\n return functools.update_wrapper(wrapper(args[0]), args[0])\n else:\n return lambda f: functools.update_wrapper(\n wrapper(f, *args, **kwargs), f)\n\n\nclass ufunc(core.ufunc):\n\n def __init__(self, fusion_op, cupy_op, numpy_op):\n self.name = fusion_op.name\n self.nin = fusion_op.nin\n self.nout = fusion_op.nout\n self.nargs = fusion_op.nargs\n self._ops = fusion_op._ops\n self._preamble = fusion_op._preamble\n self.__doc__ = cupy_op.__doc__\n self._params = fusion_op._params\n self._routine_cache = fusion_op._routine_cache\n\n self._fusion_op = fusion_op\n self._cupy_op = cupy_op\n self._numpy_op = numpy_op\n\n def __repr__(self):\n return repr(self._cupy_op)\n\n def __call__(self, *args, **kwargs):\n in_fusion = getattr(_thread_local, 'in_fusion', False)\n if in_fusion:\n if builtins.any(isinstance(_, _FusionRef) for _ in args):\n return _convert(self._fusion_op)(*args, **kwargs)\n elif builtins.any(isinstance(_, numpy.ndarray) for _ in args):\n return self._numpy_op(*args, **kwargs)\n\n return self._cupy_op(*args, **kwargs)\n\n __doc__ = core.ufunc.__doc__\n __call__.__doc__ = core.ufunc.__call__.__doc__\n\n\ndef _create_ufunc(cupy_ufunc, numpy_ufunc):\n return ufunc(cupy_ufunc, cupy_ufunc, numpy_ufunc)\n\n\nwhere = ufunc(sorting.search._where_ufunc,\n sorting.search.where, numpy.where)\n\nclip = ufunc(core._clip, math.misc.clip, numpy.clip)\n\ncopy = ufunc(core.elementwise_copy,\n creation.from_data.copy, numpy.copy)\n\nbitwise_and = _create_ufunc(core.bitwise_and, numpy.bitwise_and)\nbitwise_or = _create_ufunc(core.bitwise_or, numpy.bitwise_or)\nbitwise_xor = _create_ufunc(core.bitwise_xor, numpy.bitwise_xor)\ninvert = _create_ufunc(core.invert, numpy.invert)\nleft_shift = _create_ufunc(core.left_shift, numpy.left_shift)\nright_shift = _create_ufunc(core.right_shift, numpy.right_shift)\n\ngreater = _create_ufunc(core.greater, numpy.greater)\ngreater_equal = _create_ufunc(core.greater_equal, numpy.greater_equal)\nless = _create_ufunc(core.less, numpy.less)\nless_equal = _create_ufunc(core.less_equal, numpy.less_equal)\nequal = _create_ufunc(core.equal, numpy.equal)\nnot_equal = _create_ufunc(core.not_equal, numpy.not_equal)\n\nisfinite = _create_ufunc(logic.content.isfinite, numpy.isfinite)\nisinf = _create_ufunc(logic.content.isinf, numpy.isinf)\nisnan = _create_ufunc(logic.content.isnan, numpy.isnan)\n\nlogical_and = _create_ufunc(logic.ops.logical_and, numpy.logical_and)\nlogical_or = _create_ufunc(logic.ops.logical_or, numpy.logical_or)\nlogical_not = _create_ufunc(logic.ops.logical_not, numpy.logical_not)\nlogical_xor = _create_ufunc(logic.ops.logical_xor, numpy.logical_xor)\n\nsin = _create_ufunc(math.trigonometric.sin, numpy.sin)\ncos = _create_ufunc(math.trigonometric.cos, numpy.cos)\ntan = _create_ufunc(math.trigonometric.tan, numpy.tan)\narcsin = _create_ufunc(math.trigonometric.arcsin, numpy.arcsin)\narccos = _create_ufunc(math.trigonometric.arccos, numpy.arccos)\narctan = _create_ufunc(math.trigonometric.arctan, numpy.arctan)\narctan2 = _create_ufunc(math.trigonometric.arctan2, numpy.arctan2)\nhypot = _create_ufunc(math.trigonometric.hypot, numpy.hypot)\ndeg2rad = _create_ufunc(math.trigonometric.deg2rad, numpy.deg2rad)\nrad2deg = _create_ufunc(math.trigonometric.rad2deg, numpy.rad2deg)\ndegrees = _create_ufunc(math.trigonometric.degrees, numpy.degrees)\nradians = _create_ufunc(math.trigonometric.radians, numpy.radians)\n\nsinh = _create_ufunc(math.hyperbolic.sinh, numpy.sinh)\ncosh = _create_ufunc(math.hyperbolic.cosh, numpy.cosh)\ntanh = _create_ufunc(math.hyperbolic.tanh, numpy.tanh)\narcsinh = _create_ufunc(math.hyperbolic.arcsinh, numpy.arcsinh)\narccosh = _create_ufunc(math.hyperbolic.arccosh, numpy.arccosh)\narctanh = _create_ufunc(math.hyperbolic.arctanh, numpy.arctanh)\n\nrint = _create_ufunc(math.rounding.rint, numpy.rint)\nfloor = _create_ufunc(math.rounding.floor, numpy.floor)\nceil = _create_ufunc(math.rounding.ceil, numpy.ceil)\ntrunc = _create_ufunc(math.rounding.trunc, numpy.trunc)\nfix = _create_ufunc(math.rounding.fix, numpy.fix)\n\nexp = _create_ufunc(math.explog.exp, numpy.exp)\nexpm1 = _create_ufunc(math.explog.expm1, numpy.expm1)\nexp2 = _create_ufunc(math.explog.exp2, numpy.exp2)\nlog = _create_ufunc(math.explog.log, numpy.log)\nlog10 = _create_ufunc(math.explog.log10, numpy.log10)\nlog2 = _create_ufunc(math.explog.log2, numpy.log2)\nlog1p = _create_ufunc(math.explog.log1p, numpy.log1p)\nlogaddexp = _create_ufunc(math.explog.logaddexp, numpy.logaddexp)\nlogaddexp2 = _create_ufunc(math.explog.logaddexp2, numpy.logaddexp2)\n\nsignbit = _create_ufunc(math.floating.signbit, numpy.signbit)\ncopysign = _create_ufunc(math.floating.copysign, numpy.copysign)\nldexp = _create_ufunc(math.floating.ldexp, numpy.ldexp)\nfrexp = _create_ufunc(math.floating.frexp, numpy.frexp)\nnextafter = _create_ufunc(math.floating.nextafter, numpy.nextafter)\n\nadd = _create_ufunc(math.arithmetic.add, numpy.add)\nreciprocal = _create_ufunc(math.arithmetic.reciprocal, numpy.reciprocal)\nnegative = _create_ufunc(math.arithmetic.negative, numpy.negative)\nangle = _create_ufunc(math.arithmetic.angle, numpy.angle)\nconj = _create_ufunc(math.arithmetic.conj, numpy.conj)\nreal = _create_ufunc(math.arithmetic.real, numpy.real)\nimag = _create_ufunc(math.arithmetic.imag, numpy.imag)\nmultiply = _create_ufunc(math.arithmetic.multiply, numpy.multiply)\ndivide = _create_ufunc(math.arithmetic.divide, numpy.divide)\npower = _create_ufunc(math.arithmetic.power, numpy.power)\nsubtract = _create_ufunc(math.arithmetic.subtract, numpy.subtract)\ntrue_divide = _create_ufunc(math.arithmetic.true_divide, numpy.true_divide)\nfloor_divide = _create_ufunc(math.arithmetic.floor_divide, numpy.floor_divide)\nfmod = _create_ufunc(math.arithmetic.fmod, numpy.fmod)\nmod = _create_ufunc(math.arithmetic.remainder, numpy.mod)\nmodf = _create_ufunc(math.arithmetic.modf, numpy.modf)\nremainder = _create_ufunc(math.arithmetic.remainder, numpy.remainder)\n\nsqrt = _create_ufunc(math.misc.sqrt, numpy.sqrt)\nsqrt_fixed = _create_ufunc(math.misc.sqrt_fixed, numpy.sqrt)\nsquare = _create_ufunc(math.misc.square, numpy.square)\nabsolute = _create_ufunc(math.misc.absolute, numpy.absolute)\nabs = _create_ufunc(math.misc.absolute, numpy.abs)\nsign = _create_ufunc(math.misc.sign, numpy.sign)\nmaximum = _create_ufunc(math.misc.maximum, numpy.maximum)\nminimum = _create_ufunc(math.misc.minimum, numpy.minimum)\nfmax = _create_ufunc(math.misc.fmax, numpy.fmax)\nfmin = _create_ufunc(math.misc.fmin, numpy.fmin)\n\n\nclass reduction(object):\n\n def __init__(self, cupy_op, numpy_op):\n self._cupy_op = cupy_op\n self._numpy_op = numpy_op\n self.__doc__ = cupy_op.__doc__\n\n def __call__(self, *args, **kwargs):\n if builtins.any(type(_) == numpy.ndarray for _ in args):\n return self._numpy_op(*args, **kwargs)\n else:\n return self._cupy_op(*args, **kwargs)\n\n\nall = reduction(logic.truth.all, numpy.all)\nany = reduction(logic.truth.any, numpy.any)\nsum = reduction(math.sumprod.sum, numpy.sum)\nprod = reduction(math.sumprod.prod, numpy.prod)\namax = reduction(statistics.order.amax, numpy.amax)\namin = reduction(statistics.order.amin, numpy.amin)\n\n\nall._raw = core._all\nany._raw = core._any\nsum._raw = core._sum_auto_dtype\nprod._raw = core._prod_auto_dtype\namax._raw = core._amax\namin._raw = core._amin\n", "path": "cupy/core/fusion.py"}]} |
gh_patches_debug_194 | rasdani/github-patches | git_diff | mdn__kuma-7256 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
"Report a content problem" gets wrong title
**Summary**
You end up on https://github.com/mdn/sprints/issues/new?template=issue-template.md&projects=mdn/sprints/2&labels=user-report&title=%2Fen-US
That prefills the title only to `/en-US`.
**Steps To Reproduce (STR)**
1. Go to https://developer.mozilla.org/en-US/docs/Web/JavaScript
2. Click "Report a content problem" in the "Feedback" menu
**Actual behavior**
GitHub issue form title is just `/en-US`
**Expected behavior**
`/en-US/docs/Web/JavaScript`
**Additional context**
Should it be more than just the document URI?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kuma/wiki/templatetags/ssr.py`
Content:
```
1 import json
2 import os
3 from functools import lru_cache
4
5 import requests
6 import requests.exceptions
7 from django.conf import settings
8 from django_jinja import library
9
10
11 @lru_cache()
12 def get_localization_data(locale):
13 """
14 Read the frontend string catalog for the specified locale, parse
15 it as JSON, and return the resulting dict. The returned values
16 are cached so that we don't have to read files all the time.
17 """
18 path = os.path.join(settings.BASE_DIR, "static", "jsi18n", locale, "react.json")
19 with open(path, "r") as f:
20 return json.load(f)
21
22
23 @library.global_function
24 def render_react(component_name, locale, url, document_data, ssr=True):
25 """
26 Render a script tag to define the data and any other HTML tags needed
27 to enable the display of a React-based UI. By default, this does
28 server side rendering, falling back to client-side rendering if
29 the SSR attempt fails. Pass False as the second argument to do
30 client-side rendering unconditionally.
31
32 Note that we are not defining a generic Jinja template tag here.
33 The code in this file is specific to Kuma's React-based UI.
34 """
35 localization_data = get_localization_data(locale)
36
37 data = {
38 "locale": locale,
39 "stringCatalog": localization_data["catalog"],
40 "pluralExpression": localization_data["plural"],
41 "url": url,
42 "documentData": document_data,
43 }
44
45 if ssr:
46 return server_side_render(component_name, data)
47 else:
48 return client_side_render(component_name, data)
49
50
51 def _render(component_name, html, script, needs_serialization=False):
52 """A utility function used by both client side and server side rendering.
53 Returns a string that includes the specified HTML and a serialized
54 form of the state dict, in the format expected by the client-side code
55 in kuma/javascript/src/index.jsx.
56 """
57 if needs_serialization:
58 assert isinstance(script, dict), type(script)
59 script = json.dumps(script).replace("</", "<\\/")
60 else:
61 script = "JSON.parse({})".format(script)
62
63 return (
64 '<div id="react-container" data-component-name="{}">{}</div>\n'
65 "<script>window._react_data = {};</script>\n"
66 ).format(component_name, html, script)
67
68
69 def client_side_render(component_name, data):
70 """
71 Output an empty <div> and a script with complete state so that
72 the UI can be rendered on the client-side.
73 """
74 return _render(component_name, "", data, needs_serialization=True)
75
76
77 def server_side_render(component_name, data):
78 """
79 Pre-render the React UI to HTML and output it in a <div>, and then
80 also pass the necessary serialized state in a <script> so that
81 React on the client side can sync itself with the pre-rendred HTML.
82
83 If any exceptions are thrown during the server-side rendering, we
84 fall back to client-side rendering instead.
85 """
86 url = "{}/{}".format(settings.SSR_URL, component_name)
87 timeout = settings.SSR_TIMEOUT
88 # Try server side rendering
89 try:
90 # POST the document data as JSON to the SSR server and we
91 # should get HTML text (encoded as plain text) in the body
92 # of the response
93 response = requests.post(
94 url,
95 headers={"Content-Type": "application/json"},
96 data=json.dumps(data).encode("utf8"),
97 timeout=timeout,
98 )
99
100 # Even though we've got fully rendered HTML now, we still need to
101 # send the document data along with it so that React can sync its
102 # state on the client side with what is in the HTML. When rendering
103 # a document page, the data includes long strings of HTML that
104 # we can get away without duplicating. So as an optimization when
105 # component_name is "document", we're going to make a copy of the
106 # data (because the original belongs to our caller) and delete those
107 # strings from the copy.
108 #
109 # WARNING: This optimization can save 20kb in data transfer
110 # for typical pages, but it requires us to be very careful on
111 # the frontend. If any components render conditionally based on
112 # the state of bodyHTML, tocHTML or quickLinkHTML, then they will
113 # render differently on the client than during SSR, and the hydrate
114 # will not just work cleanly, and those components will re-render
115 # with empty strings. This has already caused Bug 1558308, and
116 # I've commented it out because the benefit in file size doesn't
117 # seem worth the risk of client-side bugs.
118 #
119 # As an alternative, it ought to be possible to extract the HTML
120 # strings from the SSR'ed document and rebuild the document object
121 # on the client right before we call hydrate(). So if you uncomment
122 # the lines below, you should also edit kuma/javascript/src/index.jsx
123 # to extract the HTML from the document as well.
124 #
125 # if component_name == 'document':
126 # data = data.copy()
127 # data['documentData'] = data['documentData'].copy()
128 # data['documentData'].update(bodyHTML='',
129 # tocHTML='',
130 # quickLinksHTML='')
131 response.raise_for_status()
132 result = response.json()
133 return _render(component_name, result["html"], result["script"])
134
135 except requests.exceptions.RequestException as exception:
136 print(f"{exception.__class__} error contacting SSR server.")
137 print("Falling back to client side rendering.")
138 return client_side_render(component_name, data)
139
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kuma/wiki/templatetags/ssr.py b/kuma/wiki/templatetags/ssr.py
--- a/kuma/wiki/templatetags/ssr.py
+++ b/kuma/wiki/templatetags/ssr.py
@@ -41,7 +41,6 @@
"url": url,
"documentData": document_data,
}
-
if ssr:
return server_side_render(component_name, data)
else:
| {"golden_diff": "diff --git a/kuma/wiki/templatetags/ssr.py b/kuma/wiki/templatetags/ssr.py\n--- a/kuma/wiki/templatetags/ssr.py\n+++ b/kuma/wiki/templatetags/ssr.py\n@@ -41,7 +41,6 @@\n \"url\": url,\n \"documentData\": document_data,\n }\n-\n if ssr:\n return server_side_render(component_name, data)\n else:\n", "issue": "\"Report a content problem\" gets wrong title\n**Summary**\r\nYou end up on https://github.com/mdn/sprints/issues/new?template=issue-template.md&projects=mdn/sprints/2&labels=user-report&title=%2Fen-US\r\n\r\nThat prefills the title only to `/en-US`.\r\n\r\n\r\n**Steps To Reproduce (STR)**\r\n\r\n1. Go to https://developer.mozilla.org/en-US/docs/Web/JavaScript\r\n2. Click \"Report a content problem\" in the \"Feedback\" menu\r\n\r\n\r\n**Actual behavior**\r\nGitHub issue form title is just `/en-US`\r\n\r\n\r\n**Expected behavior**\r\n`/en-US/docs/Web/JavaScript`\r\n\r\n\r\n**Additional context**\r\nShould it be more than just the document URI?\r\n\n", "before_files": [{"content": "import json\nimport os\nfrom functools import lru_cache\n\nimport requests\nimport requests.exceptions\nfrom django.conf import settings\nfrom django_jinja import library\n\n\n@lru_cache()\ndef get_localization_data(locale):\n \"\"\"\n Read the frontend string catalog for the specified locale, parse\n it as JSON, and return the resulting dict. The returned values\n are cached so that we don't have to read files all the time.\n \"\"\"\n path = os.path.join(settings.BASE_DIR, \"static\", \"jsi18n\", locale, \"react.json\")\n with open(path, \"r\") as f:\n return json.load(f)\n\n\[email protected]_function\ndef render_react(component_name, locale, url, document_data, ssr=True):\n \"\"\"\n Render a script tag to define the data and any other HTML tags needed\n to enable the display of a React-based UI. By default, this does\n server side rendering, falling back to client-side rendering if\n the SSR attempt fails. Pass False as the second argument to do\n client-side rendering unconditionally.\n\n Note that we are not defining a generic Jinja template tag here.\n The code in this file is specific to Kuma's React-based UI.\n \"\"\"\n localization_data = get_localization_data(locale)\n\n data = {\n \"locale\": locale,\n \"stringCatalog\": localization_data[\"catalog\"],\n \"pluralExpression\": localization_data[\"plural\"],\n \"url\": url,\n \"documentData\": document_data,\n }\n\n if ssr:\n return server_side_render(component_name, data)\n else:\n return client_side_render(component_name, data)\n\n\ndef _render(component_name, html, script, needs_serialization=False):\n \"\"\"A utility function used by both client side and server side rendering.\n Returns a string that includes the specified HTML and a serialized\n form of the state dict, in the format expected by the client-side code\n in kuma/javascript/src/index.jsx.\n \"\"\"\n if needs_serialization:\n assert isinstance(script, dict), type(script)\n script = json.dumps(script).replace(\"</\", \"<\\\\/\")\n else:\n script = \"JSON.parse({})\".format(script)\n\n return (\n '<div id=\"react-container\" data-component-name=\"{}\">{}</div>\\n'\n \"<script>window._react_data = {};</script>\\n\"\n ).format(component_name, html, script)\n\n\ndef client_side_render(component_name, data):\n \"\"\"\n Output an empty <div> and a script with complete state so that\n the UI can be rendered on the client-side.\n \"\"\"\n return _render(component_name, \"\", data, needs_serialization=True)\n\n\ndef server_side_render(component_name, data):\n \"\"\"\n Pre-render the React UI to HTML and output it in a <div>, and then\n also pass the necessary serialized state in a <script> so that\n React on the client side can sync itself with the pre-rendred HTML.\n\n If any exceptions are thrown during the server-side rendering, we\n fall back to client-side rendering instead.\n \"\"\"\n url = \"{}/{}\".format(settings.SSR_URL, component_name)\n timeout = settings.SSR_TIMEOUT\n # Try server side rendering\n try:\n # POST the document data as JSON to the SSR server and we\n # should get HTML text (encoded as plain text) in the body\n # of the response\n response = requests.post(\n url,\n headers={\"Content-Type\": \"application/json\"},\n data=json.dumps(data).encode(\"utf8\"),\n timeout=timeout,\n )\n\n # Even though we've got fully rendered HTML now, we still need to\n # send the document data along with it so that React can sync its\n # state on the client side with what is in the HTML. When rendering\n # a document page, the data includes long strings of HTML that\n # we can get away without duplicating. So as an optimization when\n # component_name is \"document\", we're going to make a copy of the\n # data (because the original belongs to our caller) and delete those\n # strings from the copy.\n #\n # WARNING: This optimization can save 20kb in data transfer\n # for typical pages, but it requires us to be very careful on\n # the frontend. If any components render conditionally based on\n # the state of bodyHTML, tocHTML or quickLinkHTML, then they will\n # render differently on the client than during SSR, and the hydrate\n # will not just work cleanly, and those components will re-render\n # with empty strings. This has already caused Bug 1558308, and\n # I've commented it out because the benefit in file size doesn't\n # seem worth the risk of client-side bugs.\n #\n # As an alternative, it ought to be possible to extract the HTML\n # strings from the SSR'ed document and rebuild the document object\n # on the client right before we call hydrate(). So if you uncomment\n # the lines below, you should also edit kuma/javascript/src/index.jsx\n # to extract the HTML from the document as well.\n #\n # if component_name == 'document':\n # data = data.copy()\n # data['documentData'] = data['documentData'].copy()\n # data['documentData'].update(bodyHTML='',\n # tocHTML='',\n # quickLinksHTML='')\n response.raise_for_status()\n result = response.json()\n return _render(component_name, result[\"html\"], result[\"script\"])\n\n except requests.exceptions.RequestException as exception:\n print(f\"{exception.__class__} error contacting SSR server.\")\n print(\"Falling back to client side rendering.\")\n return client_side_render(component_name, data)\n", "path": "kuma/wiki/templatetags/ssr.py"}], "after_files": [{"content": "import json\nimport os\nfrom functools import lru_cache\n\nimport requests\nimport requests.exceptions\nfrom django.conf import settings\nfrom django_jinja import library\n\n\n@lru_cache()\ndef get_localization_data(locale):\n \"\"\"\n Read the frontend string catalog for the specified locale, parse\n it as JSON, and return the resulting dict. The returned values\n are cached so that we don't have to read files all the time.\n \"\"\"\n path = os.path.join(settings.BASE_DIR, \"static\", \"jsi18n\", locale, \"react.json\")\n with open(path, \"r\") as f:\n return json.load(f)\n\n\[email protected]_function\ndef render_react(component_name, locale, url, document_data, ssr=True):\n \"\"\"\n Render a script tag to define the data and any other HTML tags needed\n to enable the display of a React-based UI. By default, this does\n server side rendering, falling back to client-side rendering if\n the SSR attempt fails. Pass False as the second argument to do\n client-side rendering unconditionally.\n\n Note that we are not defining a generic Jinja template tag here.\n The code in this file is specific to Kuma's React-based UI.\n \"\"\"\n localization_data = get_localization_data(locale)\n\n data = {\n \"locale\": locale,\n \"stringCatalog\": localization_data[\"catalog\"],\n \"pluralExpression\": localization_data[\"plural\"],\n \"url\": url,\n \"documentData\": document_data,\n }\n if ssr:\n return server_side_render(component_name, data)\n else:\n return client_side_render(component_name, data)\n\n\ndef _render(component_name, html, script, needs_serialization=False):\n \"\"\"A utility function used by both client side and server side rendering.\n Returns a string that includes the specified HTML and a serialized\n form of the state dict, in the format expected by the client-side code\n in kuma/javascript/src/index.jsx.\n \"\"\"\n if needs_serialization:\n assert isinstance(script, dict), type(script)\n script = json.dumps(script).replace(\"</\", \"<\\\\/\")\n else:\n script = \"JSON.parse({})\".format(script)\n\n return (\n '<div id=\"react-container\" data-component-name=\"{}\">{}</div>\\n'\n \"<script>window._react_data = {};</script>\\n\"\n ).format(component_name, html, script)\n\n\ndef client_side_render(component_name, data):\n \"\"\"\n Output an empty <div> and a script with complete state so that\n the UI can be rendered on the client-side.\n \"\"\"\n return _render(component_name, \"\", data, needs_serialization=True)\n\n\ndef server_side_render(component_name, data):\n \"\"\"\n Pre-render the React UI to HTML and output it in a <div>, and then\n also pass the necessary serialized state in a <script> so that\n React on the client side can sync itself with the pre-rendred HTML.\n\n If any exceptions are thrown during the server-side rendering, we\n fall back to client-side rendering instead.\n \"\"\"\n url = \"{}/{}\".format(settings.SSR_URL, component_name)\n timeout = settings.SSR_TIMEOUT\n # Try server side rendering\n try:\n # POST the document data as JSON to the SSR server and we\n # should get HTML text (encoded as plain text) in the body\n # of the response\n response = requests.post(\n url,\n headers={\"Content-Type\": \"application/json\"},\n data=json.dumps(data).encode(\"utf8\"),\n timeout=timeout,\n )\n\n # Even though we've got fully rendered HTML now, we still need to\n # send the document data along with it so that React can sync its\n # state on the client side with what is in the HTML. When rendering\n # a document page, the data includes long strings of HTML that\n # we can get away without duplicating. So as an optimization when\n # component_name is \"document\", we're going to make a copy of the\n # data (because the original belongs to our caller) and delete those\n # strings from the copy.\n #\n # WARNING: This optimization can save 20kb in data transfer\n # for typical pages, but it requires us to be very careful on\n # the frontend. If any components render conditionally based on\n # the state of bodyHTML, tocHTML or quickLinkHTML, then they will\n # render differently on the client than during SSR, and the hydrate\n # will not just work cleanly, and those components will re-render\n # with empty strings. This has already caused Bug 1558308, and\n # I've commented it out because the benefit in file size doesn't\n # seem worth the risk of client-side bugs.\n #\n # As an alternative, it ought to be possible to extract the HTML\n # strings from the SSR'ed document and rebuild the document object\n # on the client right before we call hydrate(). So if you uncomment\n # the lines below, you should also edit kuma/javascript/src/index.jsx\n # to extract the HTML from the document as well.\n #\n # if component_name == 'document':\n # data = data.copy()\n # data['documentData'] = data['documentData'].copy()\n # data['documentData'].update(bodyHTML='',\n # tocHTML='',\n # quickLinksHTML='')\n response.raise_for_status()\n result = response.json()\n return _render(component_name, result[\"html\"], result[\"script\"])\n\n except requests.exceptions.RequestException as exception:\n print(f\"{exception.__class__} error contacting SSR server.\")\n print(\"Falling back to client side rendering.\")\n return client_side_render(component_name, data)\n", "path": "kuma/wiki/templatetags/ssr.py"}]} |
gh_patches_debug_195 | rasdani/github-patches | git_diff | bookwyrm-social__bookwyrm-404 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Rate stars don't work
You should be able to click to give a star rating to a book on the book page, it doesn't do anything.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bookwyrm/activitypub/note.py`
Content:
```
1 ''' note serializer and children thereof '''
2 from dataclasses import dataclass, field
3 from typing import Dict, List
4
5 from .base_activity import ActivityObject, Link
6 from .image import Image
7
8 @dataclass(init=False)
9 class Tombstone(ActivityObject):
10 ''' the placeholder for a deleted status '''
11 published: str
12 deleted: str
13 type: str = 'Tombstone'
14
15
16 @dataclass(init=False)
17 class Note(ActivityObject):
18 ''' Note activity '''
19 published: str
20 attributedTo: str
21 content: str
22 to: List[str] = field(default_factory=lambda: [])
23 cc: List[str] = field(default_factory=lambda: [])
24 replies: Dict = field(default_factory=lambda: {})
25 inReplyTo: str = ''
26 summary: str = ''
27 tag: List[Link] = field(default_factory=lambda: [])
28 attachment: List[Image] = field(default_factory=lambda: [])
29 sensitive: bool = False
30 type: str = 'Note'
31
32
33 @dataclass(init=False)
34 class Article(Note):
35 ''' what's an article except a note with more fields '''
36 name: str
37 type: str = 'Article'
38
39
40 @dataclass(init=False)
41 class GeneratedNote(Note):
42 ''' just a re-typed note '''
43 type: str = 'GeneratedNote'
44
45
46 @dataclass(init=False)
47 class Comment(Note):
48 ''' like a note but with a book '''
49 inReplyToBook: str
50 type: str = 'Comment'
51
52
53 @dataclass(init=False)
54 class Review(Comment):
55 ''' a full book review '''
56 name: str
57 rating: int = None
58 type: str = 'Review'
59
60
61 @dataclass(init=False)
62 class Quotation(Comment):
63 ''' a quote and commentary on a book '''
64 quote: str
65 type: str = 'Quotation'
66
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bookwyrm/activitypub/note.py b/bookwyrm/activitypub/note.py
--- a/bookwyrm/activitypub/note.py
+++ b/bookwyrm/activitypub/note.py
@@ -53,7 +53,7 @@
@dataclass(init=False)
class Review(Comment):
''' a full book review '''
- name: str
+ name: str = None
rating: int = None
type: str = 'Review'
| {"golden_diff": "diff --git a/bookwyrm/activitypub/note.py b/bookwyrm/activitypub/note.py\n--- a/bookwyrm/activitypub/note.py\n+++ b/bookwyrm/activitypub/note.py\n@@ -53,7 +53,7 @@\n @dataclass(init=False)\n class Review(Comment):\n ''' a full book review '''\n- name: str\n+ name: str = None\n rating: int = None\n type: str = 'Review'\n", "issue": "Rate stars don't work\nYou should be able to click to give a star rating to a book on the book page, it doesn't do anything.\n", "before_files": [{"content": "''' note serializer and children thereof '''\nfrom dataclasses import dataclass, field\nfrom typing import Dict, List\n\nfrom .base_activity import ActivityObject, Link\nfrom .image import Image\n\n@dataclass(init=False)\nclass Tombstone(ActivityObject):\n ''' the placeholder for a deleted status '''\n published: str\n deleted: str\n type: str = 'Tombstone'\n\n\n@dataclass(init=False)\nclass Note(ActivityObject):\n ''' Note activity '''\n published: str\n attributedTo: str\n content: str\n to: List[str] = field(default_factory=lambda: [])\n cc: List[str] = field(default_factory=lambda: [])\n replies: Dict = field(default_factory=lambda: {})\n inReplyTo: str = ''\n summary: str = ''\n tag: List[Link] = field(default_factory=lambda: [])\n attachment: List[Image] = field(default_factory=lambda: [])\n sensitive: bool = False\n type: str = 'Note'\n\n\n@dataclass(init=False)\nclass Article(Note):\n ''' what's an article except a note with more fields '''\n name: str\n type: str = 'Article'\n\n\n@dataclass(init=False)\nclass GeneratedNote(Note):\n ''' just a re-typed note '''\n type: str = 'GeneratedNote'\n\n\n@dataclass(init=False)\nclass Comment(Note):\n ''' like a note but with a book '''\n inReplyToBook: str\n type: str = 'Comment'\n\n\n@dataclass(init=False)\nclass Review(Comment):\n ''' a full book review '''\n name: str\n rating: int = None\n type: str = 'Review'\n\n\n@dataclass(init=False)\nclass Quotation(Comment):\n ''' a quote and commentary on a book '''\n quote: str\n type: str = 'Quotation'\n", "path": "bookwyrm/activitypub/note.py"}], "after_files": [{"content": "''' note serializer and children thereof '''\nfrom dataclasses import dataclass, field\nfrom typing import Dict, List\n\nfrom .base_activity import ActivityObject, Link\nfrom .image import Image\n\n@dataclass(init=False)\nclass Tombstone(ActivityObject):\n ''' the placeholder for a deleted status '''\n published: str\n deleted: str\n type: str = 'Tombstone'\n\n\n@dataclass(init=False)\nclass Note(ActivityObject):\n ''' Note activity '''\n published: str\n attributedTo: str\n content: str\n to: List[str] = field(default_factory=lambda: [])\n cc: List[str] = field(default_factory=lambda: [])\n replies: Dict = field(default_factory=lambda: {})\n inReplyTo: str = ''\n summary: str = ''\n tag: List[Link] = field(default_factory=lambda: [])\n attachment: List[Image] = field(default_factory=lambda: [])\n sensitive: bool = False\n type: str = 'Note'\n\n\n@dataclass(init=False)\nclass Article(Note):\n ''' what's an article except a note with more fields '''\n name: str\n type: str = 'Article'\n\n\n@dataclass(init=False)\nclass GeneratedNote(Note):\n ''' just a re-typed note '''\n type: str = 'GeneratedNote'\n\n\n@dataclass(init=False)\nclass Comment(Note):\n ''' like a note but with a book '''\n inReplyToBook: str\n type: str = 'Comment'\n\n\n@dataclass(init=False)\nclass Review(Comment):\n ''' a full book review '''\n name: str = None\n rating: int = None\n type: str = 'Review'\n\n\n@dataclass(init=False)\nclass Quotation(Comment):\n ''' a quote and commentary on a book '''\n quote: str\n type: str = 'Quotation'\n", "path": "bookwyrm/activitypub/note.py"}]} |
gh_patches_debug_196 | rasdani/github-patches | git_diff | PennyLaneAI__pennylane-2766 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG] `adjoint(adjoint(op))` returns identity
### Expected behavior
When queuing `adjoint(adjoint(op))` in a tape, op should be added
### Actual behavior
op and adjoint(op) are queued
### Additional information
Here is a simple example, where the original circuit contains an adjoint and is taken to a new circuit that executes the original and its adjoint, Since `adjoint(adjoint())` becomes identity, the original circuit is restored (instead of the identity, as expected:
```python
with qml.tape.QuantumTape() as circuit:
qml.adjoint(qml.RX(0.5, wires=0))
qml.expval(qml.PauliZ(0))
base_ops = circuit.expand().copy(copy_operations=True).operations
new_list_of_ops = [op for op in base_ops]
new_list_of_ops += [adjoint(op) for op in base_ops[::-1]]
with QuantumTape() as new_circuit:
for op in new_list_of_ops:
apply(op)
for meas in circuit.measurements:
apply(meas)
dev = qml.device("default.qubit", wires=range(2))
>>> print(qml.execute([circuit], dev, gradient_fn=None), qml.execute([new_circuit], dev, gradient_fn=None))
[array([0.87758256])] [array([0.87758256])]
```
But the latter should yield 1 since it is supposed to be the identity.
### Source code
_No response_
### Tracebacks
_No response_
### System information
```shell
0.24.0.dev0
```
### Existing GitHub issues
- [X] I have searched existing GitHub issues to make sure the issue does not already exist.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pennylane/ops/op_math/adjoint_class.py`
Content:
```
1 # Copyright 2018-2022 Xanadu Quantum Technologies Inc.
2
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6
7 # http://www.apache.org/licenses/LICENSE-2.0
8
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """
15 This submodule defines the symbolic operation that indicates the adjoint of an operator.
16 """
17 from pennylane.operation import Operator, Operation, AdjointUndefinedError, Observable
18 from pennylane.queuing import QueuingContext
19 from pennylane.math import transpose, conj
20
21
22 # pylint: disable=no-member
23 class AdjointOperation(Operation):
24 """This mixin class is dynamically added to an ``Adjoint`` instance if the provided base class is an ``Operation``.
25
26 .. warning::
27 This mixin class should never be initialized independent of ``Adjoint``.
28
29 Overriding the dunder method ``__new__`` in ``Adjoint`` allows us to customize the creation of an instance and dynamically
30 add in parent classes.
31
32 .. note:: Once the ``Operation`` class does not contain any unique logic any more, this mixin class can be removed.
33 """
34
35 # This inverse behavior only needs to temporarily patch behavior until in-place inversion is removed.
36
37 @property
38 def _inverse(self):
39 return self.base._inverse # pylint: disable=protected-access
40
41 @_inverse.setter
42 def _inverse(self, boolean):
43 self.base._inverse = boolean # pylint: disable=protected-access
44 # refresh name as base_name got updated.
45 self._name = f"Adjoint({self.base.name})"
46
47 def inv(self):
48 self.base.inv()
49 # refresh name as base_name got updated.
50 self._name = f"Adjoint({self.base.name})"
51 return self
52
53 @property
54 def base_name(self):
55 return self._name
56
57 @property
58 def name(self):
59 return self._name
60
61 # pylint: disable=missing-function-docstring
62 @property
63 def basis(self):
64 return self.base.basis
65
66 @property
67 def control_wires(self):
68 return self.base.control_wires
69
70 def single_qubit_rot_angles(self):
71 omega, theta, phi = self.base.single_qubit_rot_angles()
72 return [-phi, -theta, -omega]
73
74 @property
75 def grad_method(self):
76 return self.base.grad_method
77
78 # pylint: disable=missing-function-docstring
79 @property
80 def grad_recipe(self):
81 return self.base.grad_recipe
82
83 def get_parameter_shift(self, idx):
84 return self.base.get_parameter_shift(idx)
85
86 @property
87 def parameter_frequencies(self):
88 return self.base.parameter_frequencies
89
90 def generator(self):
91 return -1.0 * self.base.generator()
92
93
94 # pylint: disable=too-many-public-methods
95 class Adjoint(Operator):
96 """
97 The Adjoint of an operator.
98
99 Args:
100 base (~.operation.Operator): The operator that is adjointed.
101
102 .. seealso:: :func:`~.adjoint`, :meth:`~.operation.Operator.adjoint`
103
104 This is a *developer*-facing class, and the :func:`~.adjoint` transform should be used to construct instances
105 of this class.
106
107 **Example**
108
109 >>> op = Adjoint(qml.S(0))
110 >>> op.name
111 'Adjoint(S)'
112 >>> qml.matrix(op)
113 array([[1.-0.j, 0.-0.j],
114 [0.-0.j, 0.-1.j]])
115 >>> qml.generator(Adjoint(qml.RX(1.0, wires=0)))
116 (PauliX(wires=[0]), 0.5)
117 >>> Adjoint(qml.RX(1.234, wires=0)).data
118 [1.234]
119
120 .. details::
121 :title: Developer Details
122
123 This class mixes in parent classes based on the inheritance tree of the provided ``Operator``. For example, when
124 provided an ``Operation``, the instance will inherit from ``Operation`` and the ``AdjointOperation`` mixin.
125
126 >>> op = Adjoint(qml.RX(1.234, wires=0))
127 >>> isinstance(op, qml.operation.Operation)
128 True
129 >>> isinstance(op, AdjointOperation)
130 True
131 >>> op.grad_method
132 'A'
133
134 If the base class is an ``Observable`` instead, the ``Adjoint`` will be an ``Observable`` as well.
135
136 >>> op = Adjoint(1.0 * qml.PauliX(0))
137 >>> isinstance(op, qml.operation.Observable)
138 True
139 >>> isinstance(op, qml.operation.Operation)
140 False
141 >>> Adjoint(qml.PauliX(0)) @ qml.PauliY(1)
142 Adjoint(PauliX)(wires=[0]) @ PauliY(wires=[1])
143
144 """
145
146 _operation_type = None # type if base inherits from operation and not observable
147 _operation_observable_type = None # type if base inherits from both operation and observable
148 _observable_type = None # type if base inherits from observable and not operation
149
150 # pylint: disable=unused-argument
151 def __new__(cls, base=None, do_queue=True, id=None):
152 """Mixes in parents based on inheritance structure of base.
153
154 Though all the types will be named "Adjoint", their *identity* and location in memory will be different
155 based on ``base``'s inheritance. We cache the different types in private class variables so that:
156
157 >>> Adjoint(op).__class__ is Adjoint(op).__class__
158 True
159 >>> type(Adjoint(op)) == type(Adjoint(op))
160 True
161 >>> Adjoint(qml.RX(1.2, wires=0)).__class__ is Adjoint._operation_type
162 True
163 >>> Adjoint(qml.PauliX(0)).__class__ is Adjoint._operation_observable_type
164 True
165
166 """
167
168 if isinstance(base, Operation):
169 if isinstance(base, Observable):
170 if cls._operation_observable_type is None:
171 class_bases = (AdjointOperation, Adjoint, Observable, Operation)
172 cls._operation_observable_type = type(
173 "Adjoint", class_bases, dict(cls.__dict__)
174 )
175 return object.__new__(cls._operation_observable_type)
176
177 # not an observable
178 if cls._operation_type is None:
179 class_bases = (AdjointOperation, Adjoint, Operation)
180 cls._operation_type = type("Adjoint", class_bases, dict(cls.__dict__))
181 return object.__new__(cls._operation_type)
182
183 if isinstance(base, Observable):
184 if cls._observable_type is None:
185 class_bases = (Adjoint, Observable)
186 cls._observable_type = type("Adjoint", class_bases, dict(cls.__dict__))
187 return object.__new__(cls._observable_type)
188
189 return object.__new__(Adjoint)
190
191 # pylint: disable=attribute-defined-outside-init
192 def __copy__(self):
193 # this method needs to be overwritten becuase the base must be copied too.
194 copied_op = object.__new__(type(self))
195 # copied_op must maintain inheritance structure of self
196 # For example, it must keep AdjointOperation if self has it
197 # this way preserves inheritance structure
198
199 copied_base = self.base.__copy__()
200 copied_op._hyperparameters = {"base": copied_base}
201 for attr, value in vars(self).items():
202 if attr not in {"data", "base", "_hyperparameters"}:
203 setattr(copied_op, attr, value)
204
205 return copied_op
206
207 # pylint: disable=super-init-not-called
208 def __init__(self, base=None, do_queue=True, id=None):
209 self.hyperparameters["base"] = base
210 self._id = id
211 self.queue_idx = None
212
213 self._name = f"Adjoint({self.base.name})"
214
215 if do_queue:
216 self.queue()
217
218 @property
219 def base(self):
220 """The operator that is adjointed."""
221 return self.hyperparameters["base"]
222
223 @property
224 def data(self):
225 """Trainable parameters that the operator depends on."""
226 return self.base.data
227
228 @data.setter
229 def data(self, new_data):
230 """Allows us to set base operation parameters."""
231 self.base.data = new_data
232
233 @property
234 def parameters(self):
235 return self.base.parameters
236
237 @property
238 def num_params(self):
239 return self.base.num_params
240
241 @property
242 def wires(self):
243 return self.base.wires
244
245 # pylint: disable=protected-access
246 @property
247 def _wires(self):
248 return self.base._wires
249
250 # pylint: disable=protected-access
251 @_wires.setter
252 def _wires(self, new_wires):
253 # we should have a better way of updating wires than accessing a private attribute.
254 self.base._wires = new_wires
255
256 @property
257 def num_wires(self):
258 return self.base.num_wires
259
260 @property
261 def batch_size(self):
262 return self.base.batch_size
263
264 @property
265 def ndim_params(self):
266 return self.base.ndim_params
267
268 @property
269 def is_hermitian(self):
270 return self.base.is_hermitian
271
272 def queue(self, context=QueuingContext):
273 context.safe_update_info(self.base, owner=self)
274 context.append(self, owns=self.base)
275
276 return self
277
278 def label(self, decimals=None, base_label=None, cache=None):
279 return self.base.label(decimals, base_label, cache=cache) + "†"
280
281 # pylint: disable=arguments-differ
282 @staticmethod
283 def compute_matrix(*params, base=None):
284 base_matrix = base.compute_matrix(*params, **base.hyperparameters)
285 return transpose(conj(base_matrix))
286
287 def decomposition(self):
288 try:
289 return [self.base.adjoint()]
290 except AdjointUndefinedError:
291 base_decomp = self.base.decomposition()
292 return [Adjoint(op) for op in reversed(base_decomp)]
293
294 # pylint: disable=arguments-differ
295 @staticmethod
296 def compute_sparse_matrix(*params, base=None):
297 base_matrix = base.compute_sparse_matrix(*params, **base.hyperparameters)
298 return transpose(conj(base_matrix)).tocsr()
299
300 def eigvals(self):
301 # Cannot define ``compute_eigvals`` because Hermitian only defines ``eigvals``
302 return conj(self.base.eigvals())
303
304 def diagonalizing_gates(self):
305 return self.base.diagonalizing_gates()
306
307 # pylint: disable=arguments-renamed, invalid-overridden-method
308 @property
309 def has_matrix(self):
310 return self.base.has_matrix
311
312 def adjoint(self):
313 return self.base
314
315 @property
316 def _queue_category(self):
317 """Used for sorting objects into their respective lists in `QuantumTape` objects.
318
319 This property is a temporary solution that should not exist long-term and should not be
320 used outside of ``QuantumTape._process_queue``.
321
322 Returns ``_queue_cateogory`` for base operator.
323 """
324 return self.base._queue_category # pylint: disable=protected-access
325
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pennylane/ops/op_math/adjoint_class.py b/pennylane/ops/op_math/adjoint_class.py
--- a/pennylane/ops/op_math/adjoint_class.py
+++ b/pennylane/ops/op_math/adjoint_class.py
@@ -310,7 +310,7 @@
return self.base.has_matrix
def adjoint(self):
- return self.base
+ return self.base.queue()
@property
def _queue_category(self):
| {"golden_diff": "diff --git a/pennylane/ops/op_math/adjoint_class.py b/pennylane/ops/op_math/adjoint_class.py\n--- a/pennylane/ops/op_math/adjoint_class.py\n+++ b/pennylane/ops/op_math/adjoint_class.py\n@@ -310,7 +310,7 @@\n return self.base.has_matrix\n \n def adjoint(self):\n- return self.base\n+ return self.base.queue()\n \n @property\n def _queue_category(self):\n", "issue": "[BUG] `adjoint(adjoint(op))` returns identity\n### Expected behavior\n\nWhen queuing `adjoint(adjoint(op))` in a tape, op should be added\n\n### Actual behavior\n\nop and adjoint(op) are queued\n\n### Additional information\n\nHere is a simple example, where the original circuit contains an adjoint and is taken to a new circuit that executes the original and its adjoint, Since `adjoint(adjoint())` becomes identity, the original circuit is restored (instead of the identity, as expected:\r\n\r\n```python\r\nwith qml.tape.QuantumTape() as circuit:\r\n qml.adjoint(qml.RX(0.5, wires=0))\r\n qml.expval(qml.PauliZ(0))\r\n\r\nbase_ops = circuit.expand().copy(copy_operations=True).operations\r\n\r\nnew_list_of_ops = [op for op in base_ops]\r\nnew_list_of_ops += [adjoint(op) for op in base_ops[::-1]]\r\n\r\nwith QuantumTape() as new_circuit:\r\n for op in new_list_of_ops:\r\n apply(op)\r\n\r\n for meas in circuit.measurements:\r\n apply(meas)\r\n\r\ndev = qml.device(\"default.qubit\", wires=range(2))\r\n>>> print(qml.execute([circuit], dev, gradient_fn=None), qml.execute([new_circuit], dev, gradient_fn=None))\r\n[array([0.87758256])] [array([0.87758256])]\r\n```\r\nBut the latter should yield 1 since it is supposed to be the identity.\n\n### Source code\n\n_No response_\n\n### Tracebacks\n\n_No response_\n\n### System information\n\n```shell\n0.24.0.dev0\n```\n\n\n### Existing GitHub issues\n\n- [X] I have searched existing GitHub issues to make sure the issue does not already exist.\n", "before_files": [{"content": "# Copyright 2018-2022 Xanadu Quantum Technologies Inc.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nThis submodule defines the symbolic operation that indicates the adjoint of an operator.\n\"\"\"\nfrom pennylane.operation import Operator, Operation, AdjointUndefinedError, Observable\nfrom pennylane.queuing import QueuingContext\nfrom pennylane.math import transpose, conj\n\n\n# pylint: disable=no-member\nclass AdjointOperation(Operation):\n \"\"\"This mixin class is dynamically added to an ``Adjoint`` instance if the provided base class is an ``Operation``.\n\n .. warning::\n This mixin class should never be initialized independent of ``Adjoint``.\n\n Overriding the dunder method ``__new__`` in ``Adjoint`` allows us to customize the creation of an instance and dynamically\n add in parent classes.\n\n .. note:: Once the ``Operation`` class does not contain any unique logic any more, this mixin class can be removed.\n \"\"\"\n\n # This inverse behavior only needs to temporarily patch behavior until in-place inversion is removed.\n\n @property\n def _inverse(self):\n return self.base._inverse # pylint: disable=protected-access\n\n @_inverse.setter\n def _inverse(self, boolean):\n self.base._inverse = boolean # pylint: disable=protected-access\n # refresh name as base_name got updated.\n self._name = f\"Adjoint({self.base.name})\"\n\n def inv(self):\n self.base.inv()\n # refresh name as base_name got updated.\n self._name = f\"Adjoint({self.base.name})\"\n return self\n\n @property\n def base_name(self):\n return self._name\n\n @property\n def name(self):\n return self._name\n\n # pylint: disable=missing-function-docstring\n @property\n def basis(self):\n return self.base.basis\n\n @property\n def control_wires(self):\n return self.base.control_wires\n\n def single_qubit_rot_angles(self):\n omega, theta, phi = self.base.single_qubit_rot_angles()\n return [-phi, -theta, -omega]\n\n @property\n def grad_method(self):\n return self.base.grad_method\n\n # pylint: disable=missing-function-docstring\n @property\n def grad_recipe(self):\n return self.base.grad_recipe\n\n def get_parameter_shift(self, idx):\n return self.base.get_parameter_shift(idx)\n\n @property\n def parameter_frequencies(self):\n return self.base.parameter_frequencies\n\n def generator(self):\n return -1.0 * self.base.generator()\n\n\n# pylint: disable=too-many-public-methods\nclass Adjoint(Operator):\n \"\"\"\n The Adjoint of an operator.\n\n Args:\n base (~.operation.Operator): The operator that is adjointed.\n\n .. seealso:: :func:`~.adjoint`, :meth:`~.operation.Operator.adjoint`\n\n This is a *developer*-facing class, and the :func:`~.adjoint` transform should be used to construct instances\n of this class.\n\n **Example**\n\n >>> op = Adjoint(qml.S(0))\n >>> op.name\n 'Adjoint(S)'\n >>> qml.matrix(op)\n array([[1.-0.j, 0.-0.j],\n [0.-0.j, 0.-1.j]])\n >>> qml.generator(Adjoint(qml.RX(1.0, wires=0)))\n (PauliX(wires=[0]), 0.5)\n >>> Adjoint(qml.RX(1.234, wires=0)).data\n [1.234]\n\n .. details::\n :title: Developer Details\n\n This class mixes in parent classes based on the inheritance tree of the provided ``Operator``. For example, when\n provided an ``Operation``, the instance will inherit from ``Operation`` and the ``AdjointOperation`` mixin.\n\n >>> op = Adjoint(qml.RX(1.234, wires=0))\n >>> isinstance(op, qml.operation.Operation)\n True\n >>> isinstance(op, AdjointOperation)\n True\n >>> op.grad_method\n 'A'\n\n If the base class is an ``Observable`` instead, the ``Adjoint`` will be an ``Observable`` as well.\n\n >>> op = Adjoint(1.0 * qml.PauliX(0))\n >>> isinstance(op, qml.operation.Observable)\n True\n >>> isinstance(op, qml.operation.Operation)\n False\n >>> Adjoint(qml.PauliX(0)) @ qml.PauliY(1)\n Adjoint(PauliX)(wires=[0]) @ PauliY(wires=[1])\n\n \"\"\"\n\n _operation_type = None # type if base inherits from operation and not observable\n _operation_observable_type = None # type if base inherits from both operation and observable\n _observable_type = None # type if base inherits from observable and not operation\n\n # pylint: disable=unused-argument\n def __new__(cls, base=None, do_queue=True, id=None):\n \"\"\"Mixes in parents based on inheritance structure of base.\n\n Though all the types will be named \"Adjoint\", their *identity* and location in memory will be different\n based on ``base``'s inheritance. We cache the different types in private class variables so that:\n\n >>> Adjoint(op).__class__ is Adjoint(op).__class__\n True\n >>> type(Adjoint(op)) == type(Adjoint(op))\n True\n >>> Adjoint(qml.RX(1.2, wires=0)).__class__ is Adjoint._operation_type\n True\n >>> Adjoint(qml.PauliX(0)).__class__ is Adjoint._operation_observable_type\n True\n\n \"\"\"\n\n if isinstance(base, Operation):\n if isinstance(base, Observable):\n if cls._operation_observable_type is None:\n class_bases = (AdjointOperation, Adjoint, Observable, Operation)\n cls._operation_observable_type = type(\n \"Adjoint\", class_bases, dict(cls.__dict__)\n )\n return object.__new__(cls._operation_observable_type)\n\n # not an observable\n if cls._operation_type is None:\n class_bases = (AdjointOperation, Adjoint, Operation)\n cls._operation_type = type(\"Adjoint\", class_bases, dict(cls.__dict__))\n return object.__new__(cls._operation_type)\n\n if isinstance(base, Observable):\n if cls._observable_type is None:\n class_bases = (Adjoint, Observable)\n cls._observable_type = type(\"Adjoint\", class_bases, dict(cls.__dict__))\n return object.__new__(cls._observable_type)\n\n return object.__new__(Adjoint)\n\n # pylint: disable=attribute-defined-outside-init\n def __copy__(self):\n # this method needs to be overwritten becuase the base must be copied too.\n copied_op = object.__new__(type(self))\n # copied_op must maintain inheritance structure of self\n # For example, it must keep AdjointOperation if self has it\n # this way preserves inheritance structure\n\n copied_base = self.base.__copy__()\n copied_op._hyperparameters = {\"base\": copied_base}\n for attr, value in vars(self).items():\n if attr not in {\"data\", \"base\", \"_hyperparameters\"}:\n setattr(copied_op, attr, value)\n\n return copied_op\n\n # pylint: disable=super-init-not-called\n def __init__(self, base=None, do_queue=True, id=None):\n self.hyperparameters[\"base\"] = base\n self._id = id\n self.queue_idx = None\n\n self._name = f\"Adjoint({self.base.name})\"\n\n if do_queue:\n self.queue()\n\n @property\n def base(self):\n \"\"\"The operator that is adjointed.\"\"\"\n return self.hyperparameters[\"base\"]\n\n @property\n def data(self):\n \"\"\"Trainable parameters that the operator depends on.\"\"\"\n return self.base.data\n\n @data.setter\n def data(self, new_data):\n \"\"\"Allows us to set base operation parameters.\"\"\"\n self.base.data = new_data\n\n @property\n def parameters(self):\n return self.base.parameters\n\n @property\n def num_params(self):\n return self.base.num_params\n\n @property\n def wires(self):\n return self.base.wires\n\n # pylint: disable=protected-access\n @property\n def _wires(self):\n return self.base._wires\n\n # pylint: disable=protected-access\n @_wires.setter\n def _wires(self, new_wires):\n # we should have a better way of updating wires than accessing a private attribute.\n self.base._wires = new_wires\n\n @property\n def num_wires(self):\n return self.base.num_wires\n\n @property\n def batch_size(self):\n return self.base.batch_size\n\n @property\n def ndim_params(self):\n return self.base.ndim_params\n\n @property\n def is_hermitian(self):\n return self.base.is_hermitian\n\n def queue(self, context=QueuingContext):\n context.safe_update_info(self.base, owner=self)\n context.append(self, owns=self.base)\n\n return self\n\n def label(self, decimals=None, base_label=None, cache=None):\n return self.base.label(decimals, base_label, cache=cache) + \"\u2020\"\n\n # pylint: disable=arguments-differ\n @staticmethod\n def compute_matrix(*params, base=None):\n base_matrix = base.compute_matrix(*params, **base.hyperparameters)\n return transpose(conj(base_matrix))\n\n def decomposition(self):\n try:\n return [self.base.adjoint()]\n except AdjointUndefinedError:\n base_decomp = self.base.decomposition()\n return [Adjoint(op) for op in reversed(base_decomp)]\n\n # pylint: disable=arguments-differ\n @staticmethod\n def compute_sparse_matrix(*params, base=None):\n base_matrix = base.compute_sparse_matrix(*params, **base.hyperparameters)\n return transpose(conj(base_matrix)).tocsr()\n\n def eigvals(self):\n # Cannot define ``compute_eigvals`` because Hermitian only defines ``eigvals``\n return conj(self.base.eigvals())\n\n def diagonalizing_gates(self):\n return self.base.diagonalizing_gates()\n\n # pylint: disable=arguments-renamed, invalid-overridden-method\n @property\n def has_matrix(self):\n return self.base.has_matrix\n\n def adjoint(self):\n return self.base\n\n @property\n def _queue_category(self):\n \"\"\"Used for sorting objects into their respective lists in `QuantumTape` objects.\n\n This property is a temporary solution that should not exist long-term and should not be\n used outside of ``QuantumTape._process_queue``.\n\n Returns ``_queue_cateogory`` for base operator.\n \"\"\"\n return self.base._queue_category # pylint: disable=protected-access\n", "path": "pennylane/ops/op_math/adjoint_class.py"}], "after_files": [{"content": "# Copyright 2018-2022 Xanadu Quantum Technologies Inc.\n\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n\n# http://www.apache.org/licenses/LICENSE-2.0\n\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\"\"\"\nThis submodule defines the symbolic operation that indicates the adjoint of an operator.\n\"\"\"\nfrom pennylane.operation import Operator, Operation, AdjointUndefinedError, Observable\nfrom pennylane.queuing import QueuingContext\nfrom pennylane.math import transpose, conj\n\n\n# pylint: disable=no-member\nclass AdjointOperation(Operation):\n \"\"\"This mixin class is dynamically added to an ``Adjoint`` instance if the provided base class is an ``Operation``.\n\n .. warning::\n This mixin class should never be initialized independent of ``Adjoint``.\n\n Overriding the dunder method ``__new__`` in ``Adjoint`` allows us to customize the creation of an instance and dynamically\n add in parent classes.\n\n .. note:: Once the ``Operation`` class does not contain any unique logic any more, this mixin class can be removed.\n \"\"\"\n\n # This inverse behavior only needs to temporarily patch behavior until in-place inversion is removed.\n\n @property\n def _inverse(self):\n return self.base._inverse # pylint: disable=protected-access\n\n @_inverse.setter\n def _inverse(self, boolean):\n self.base._inverse = boolean # pylint: disable=protected-access\n # refresh name as base_name got updated.\n self._name = f\"Adjoint({self.base.name})\"\n\n def inv(self):\n self.base.inv()\n # refresh name as base_name got updated.\n self._name = f\"Adjoint({self.base.name})\"\n return self\n\n @property\n def base_name(self):\n return self._name\n\n @property\n def name(self):\n return self._name\n\n # pylint: disable=missing-function-docstring\n @property\n def basis(self):\n return self.base.basis\n\n @property\n def control_wires(self):\n return self.base.control_wires\n\n def single_qubit_rot_angles(self):\n omega, theta, phi = self.base.single_qubit_rot_angles()\n return [-phi, -theta, -omega]\n\n @property\n def grad_method(self):\n return self.base.grad_method\n\n # pylint: disable=missing-function-docstring\n @property\n def grad_recipe(self):\n return self.base.grad_recipe\n\n def get_parameter_shift(self, idx):\n return self.base.get_parameter_shift(idx)\n\n @property\n def parameter_frequencies(self):\n return self.base.parameter_frequencies\n\n def generator(self):\n return -1.0 * self.base.generator()\n\n\n# pylint: disable=too-many-public-methods\nclass Adjoint(Operator):\n \"\"\"\n The Adjoint of an operator.\n\n Args:\n base (~.operation.Operator): The operator that is adjointed.\n\n .. seealso:: :func:`~.adjoint`, :meth:`~.operation.Operator.adjoint`\n\n This is a *developer*-facing class, and the :func:`~.adjoint` transform should be used to construct instances\n of this class.\n\n **Example**\n\n >>> op = Adjoint(qml.S(0))\n >>> op.name\n 'Adjoint(S)'\n >>> qml.matrix(op)\n array([[1.-0.j, 0.-0.j],\n [0.-0.j, 0.-1.j]])\n >>> qml.generator(Adjoint(qml.RX(1.0, wires=0)))\n (PauliX(wires=[0]), 0.5)\n >>> Adjoint(qml.RX(1.234, wires=0)).data\n [1.234]\n\n .. details::\n :title: Developer Details\n\n This class mixes in parent classes based on the inheritance tree of the provided ``Operator``. For example, when\n provided an ``Operation``, the instance will inherit from ``Operation`` and the ``AdjointOperation`` mixin.\n\n >>> op = Adjoint(qml.RX(1.234, wires=0))\n >>> isinstance(op, qml.operation.Operation)\n True\n >>> isinstance(op, AdjointOperation)\n True\n >>> op.grad_method\n 'A'\n\n If the base class is an ``Observable`` instead, the ``Adjoint`` will be an ``Observable`` as well.\n\n >>> op = Adjoint(1.0 * qml.PauliX(0))\n >>> isinstance(op, qml.operation.Observable)\n True\n >>> isinstance(op, qml.operation.Operation)\n False\n >>> Adjoint(qml.PauliX(0)) @ qml.PauliY(1)\n Adjoint(PauliX)(wires=[0]) @ PauliY(wires=[1])\n\n \"\"\"\n\n _operation_type = None # type if base inherits from operation and not observable\n _operation_observable_type = None # type if base inherits from both operation and observable\n _observable_type = None # type if base inherits from observable and not operation\n\n # pylint: disable=unused-argument\n def __new__(cls, base=None, do_queue=True, id=None):\n \"\"\"Mixes in parents based on inheritance structure of base.\n\n Though all the types will be named \"Adjoint\", their *identity* and location in memory will be different\n based on ``base``'s inheritance. We cache the different types in private class variables so that:\n\n >>> Adjoint(op).__class__ is Adjoint(op).__class__\n True\n >>> type(Adjoint(op)) == type(Adjoint(op))\n True\n >>> Adjoint(qml.RX(1.2, wires=0)).__class__ is Adjoint._operation_type\n True\n >>> Adjoint(qml.PauliX(0)).__class__ is Adjoint._operation_observable_type\n True\n\n \"\"\"\n\n if isinstance(base, Operation):\n if isinstance(base, Observable):\n if cls._operation_observable_type is None:\n class_bases = (AdjointOperation, Adjoint, Observable, Operation)\n cls._operation_observable_type = type(\n \"Adjoint\", class_bases, dict(cls.__dict__)\n )\n return object.__new__(cls._operation_observable_type)\n\n # not an observable\n if cls._operation_type is None:\n class_bases = (AdjointOperation, Adjoint, Operation)\n cls._operation_type = type(\"Adjoint\", class_bases, dict(cls.__dict__))\n return object.__new__(cls._operation_type)\n\n if isinstance(base, Observable):\n if cls._observable_type is None:\n class_bases = (Adjoint, Observable)\n cls._observable_type = type(\"Adjoint\", class_bases, dict(cls.__dict__))\n return object.__new__(cls._observable_type)\n\n return object.__new__(Adjoint)\n\n # pylint: disable=attribute-defined-outside-init\n def __copy__(self):\n # this method needs to be overwritten becuase the base must be copied too.\n copied_op = object.__new__(type(self))\n # copied_op must maintain inheritance structure of self\n # For example, it must keep AdjointOperation if self has it\n # this way preserves inheritance structure\n\n copied_base = self.base.__copy__()\n copied_op._hyperparameters = {\"base\": copied_base}\n for attr, value in vars(self).items():\n if attr not in {\"data\", \"base\", \"_hyperparameters\"}:\n setattr(copied_op, attr, value)\n\n return copied_op\n\n # pylint: disable=super-init-not-called\n def __init__(self, base=None, do_queue=True, id=None):\n self.hyperparameters[\"base\"] = base\n self._id = id\n self.queue_idx = None\n\n self._name = f\"Adjoint({self.base.name})\"\n\n if do_queue:\n self.queue()\n\n @property\n def base(self):\n \"\"\"The operator that is adjointed.\"\"\"\n return self.hyperparameters[\"base\"]\n\n @property\n def data(self):\n \"\"\"Trainable parameters that the operator depends on.\"\"\"\n return self.base.data\n\n @data.setter\n def data(self, new_data):\n \"\"\"Allows us to set base operation parameters.\"\"\"\n self.base.data = new_data\n\n @property\n def parameters(self):\n return self.base.parameters\n\n @property\n def num_params(self):\n return self.base.num_params\n\n @property\n def wires(self):\n return self.base.wires\n\n # pylint: disable=protected-access\n @property\n def _wires(self):\n return self.base._wires\n\n # pylint: disable=protected-access\n @_wires.setter\n def _wires(self, new_wires):\n # we should have a better way of updating wires than accessing a private attribute.\n self.base._wires = new_wires\n\n @property\n def num_wires(self):\n return self.base.num_wires\n\n @property\n def batch_size(self):\n return self.base.batch_size\n\n @property\n def ndim_params(self):\n return self.base.ndim_params\n\n @property\n def is_hermitian(self):\n return self.base.is_hermitian\n\n def queue(self, context=QueuingContext):\n context.safe_update_info(self.base, owner=self)\n context.append(self, owns=self.base)\n\n return self\n\n def label(self, decimals=None, base_label=None, cache=None):\n return self.base.label(decimals, base_label, cache=cache) + \"\u2020\"\n\n # pylint: disable=arguments-differ\n @staticmethod\n def compute_matrix(*params, base=None):\n base_matrix = base.compute_matrix(*params, **base.hyperparameters)\n return transpose(conj(base_matrix))\n\n def decomposition(self):\n try:\n return [self.base.adjoint()]\n except AdjointUndefinedError:\n base_decomp = self.base.decomposition()\n return [Adjoint(op) for op in reversed(base_decomp)]\n\n # pylint: disable=arguments-differ\n @staticmethod\n def compute_sparse_matrix(*params, base=None):\n base_matrix = base.compute_sparse_matrix(*params, **base.hyperparameters)\n return transpose(conj(base_matrix)).tocsr()\n\n def eigvals(self):\n # Cannot define ``compute_eigvals`` because Hermitian only defines ``eigvals``\n return conj(self.base.eigvals())\n\n def diagonalizing_gates(self):\n return self.base.diagonalizing_gates()\n\n # pylint: disable=arguments-renamed, invalid-overridden-method\n @property\n def has_matrix(self):\n return self.base.has_matrix\n\n def adjoint(self):\n return self.base.queue()\n\n @property\n def _queue_category(self):\n \"\"\"Used for sorting objects into their respective lists in `QuantumTape` objects.\n\n This property is a temporary solution that should not exist long-term and should not be\n used outside of ``QuantumTape._process_queue``.\n\n Returns ``_queue_cateogory`` for base operator.\n \"\"\"\n return self.base._queue_category # pylint: disable=protected-access\n", "path": "pennylane/ops/op_math/adjoint_class.py"}]} |
gh_patches_debug_197 | rasdani/github-patches | git_diff | crytic__slither-1110 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Bug-Candidate]: Phi-node print missing 'f' in f-string
### Describe the issue:
When printing a Phi-node the string is not formatted.
There seems to be a 'f' missing ahead of the str in https://github.com/crytic/slither/blob/dev/slither/slithir/operations/phi.py#L36
### Code example to reproduce the issue:
slither tests/complex_func.sol --print slithir-ssa
### Version:
dev-branch dd91f770f61eaadc286e2af3c72fb5798e376c16
### Relevant log output:
```
Contract Increment
Function Increment.increaseBy1()
IRs:
{self.lvalue}({self.lvalue.type}) := ϕ({[str(v) for v in self._rvalues]})
Expression: i += 1
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `slither/slithir/operations/phi.py`
Content:
```
1 from slither.slithir.operations.lvalue import OperationWithLValue
2 from slither.slithir.utils.utils import is_valid_lvalue
3
4
5 class Phi(OperationWithLValue):
6 def __init__(self, left_variable, nodes):
7 # When Phi operations are created the
8 # correct indexes of the variables are not yet computed
9 # We store the nodes where the variables are written
10 # so we can update the rvalues of the Phi operation
11 # after its instantiation
12 assert is_valid_lvalue(left_variable)
13 assert isinstance(nodes, set)
14 super().__init__()
15 self._lvalue = left_variable
16 self._rvalues = []
17 self._nodes = nodes
18
19 @property
20 def read(self):
21 return self.rvalues
22
23 @property
24 def rvalues(self):
25 return self._rvalues
26
27 @rvalues.setter
28 def rvalues(self, vals):
29 self._rvalues = vals
30
31 @property
32 def nodes(self):
33 return self._nodes
34
35 def __str__(self):
36 return "{self.lvalue}({self.lvalue.type}) := \u03D5({[str(v) for v in self._rvalues]})"
37
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/slither/slithir/operations/phi.py b/slither/slithir/operations/phi.py
--- a/slither/slithir/operations/phi.py
+++ b/slither/slithir/operations/phi.py
@@ -33,4 +33,4 @@
return self._nodes
def __str__(self):
- return "{self.lvalue}({self.lvalue.type}) := \u03D5({[str(v) for v in self._rvalues]})"
+ return f"{self.lvalue}({self.lvalue.type}) := \u03D5({[str(v) for v in self._rvalues]})"
| {"golden_diff": "diff --git a/slither/slithir/operations/phi.py b/slither/slithir/operations/phi.py\n--- a/slither/slithir/operations/phi.py\n+++ b/slither/slithir/operations/phi.py\n@@ -33,4 +33,4 @@\n return self._nodes\n \n def __str__(self):\n- return \"{self.lvalue}({self.lvalue.type}) := \\u03D5({[str(v) for v in self._rvalues]})\"\n+ return f\"{self.lvalue}({self.lvalue.type}) := \\u03D5({[str(v) for v in self._rvalues]})\"\n", "issue": "[Bug-Candidate]: Phi-node print missing 'f' in f-string\n### Describe the issue:\n\nWhen printing a Phi-node the string is not formatted.\r\nThere seems to be a 'f' missing ahead of the str in https://github.com/crytic/slither/blob/dev/slither/slithir/operations/phi.py#L36\n\n### Code example to reproduce the issue:\n\nslither tests/complex_func.sol --print slithir-ssa\n\n### Version:\n\ndev-branch dd91f770f61eaadc286e2af3c72fb5798e376c16\n\n### Relevant log output:\n\n```\r\nContract Increment\r\n Function Increment.increaseBy1()\r\n IRs:\r\n {self.lvalue}({self.lvalue.type}) := \u03d5({[str(v) for v in self._rvalues]})\r\n Expression: i += 1\r\n```\n", "before_files": [{"content": "from slither.slithir.operations.lvalue import OperationWithLValue\nfrom slither.slithir.utils.utils import is_valid_lvalue\n\n\nclass Phi(OperationWithLValue):\n def __init__(self, left_variable, nodes):\n # When Phi operations are created the\n # correct indexes of the variables are not yet computed\n # We store the nodes where the variables are written\n # so we can update the rvalues of the Phi operation\n # after its instantiation\n assert is_valid_lvalue(left_variable)\n assert isinstance(nodes, set)\n super().__init__()\n self._lvalue = left_variable\n self._rvalues = []\n self._nodes = nodes\n\n @property\n def read(self):\n return self.rvalues\n\n @property\n def rvalues(self):\n return self._rvalues\n\n @rvalues.setter\n def rvalues(self, vals):\n self._rvalues = vals\n\n @property\n def nodes(self):\n return self._nodes\n\n def __str__(self):\n return \"{self.lvalue}({self.lvalue.type}) := \\u03D5({[str(v) for v in self._rvalues]})\"\n", "path": "slither/slithir/operations/phi.py"}], "after_files": [{"content": "from slither.slithir.operations.lvalue import OperationWithLValue\nfrom slither.slithir.utils.utils import is_valid_lvalue\n\n\nclass Phi(OperationWithLValue):\n def __init__(self, left_variable, nodes):\n # When Phi operations are created the\n # correct indexes of the variables are not yet computed\n # We store the nodes where the variables are written\n # so we can update the rvalues of the Phi operation\n # after its instantiation\n assert is_valid_lvalue(left_variable)\n assert isinstance(nodes, set)\n super().__init__()\n self._lvalue = left_variable\n self._rvalues = []\n self._nodes = nodes\n\n @property\n def read(self):\n return self.rvalues\n\n @property\n def rvalues(self):\n return self._rvalues\n\n @rvalues.setter\n def rvalues(self, vals):\n self._rvalues = vals\n\n @property\n def nodes(self):\n return self._nodes\n\n def __str__(self):\n return f\"{self.lvalue}({self.lvalue.type}) := \\u03D5({[str(v) for v in self._rvalues]})\"\n", "path": "slither/slithir/operations/phi.py"}]} |
gh_patches_debug_198 | rasdani/github-patches | git_diff | acl-org__acl-anthology-3109 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Reingestion Request: ROCLING (10-20-2023)
### General information about this request
- [X] I confirm that I have read the [Information for Submitters](https://aclanthology.org/info/contrib/).
- [ ] I am submitting a request for a **new venue** that does not exist in the ACL Anthology yet.
### Venue Identifier
ROCLING
### Volume Title
Proceedings of the 35th Conference on Computational Linguistics and Speech Processing (ROCLING 2023)
### Venue Name (only if you are submitting a new venue)
Conference on Computational Linguistics and Speech Processing
### Venue Website (only if you are submitting a new venue)
https://rocling2023.github.io/
### Date of Publication
2023-10-20
### Supporting Information
Dear Anthology Director,
I'm Hou-Chiang Tseng who the publication chair of the 35th annual Conference on Computational Linguistics and Speech Processing (ROCLING 2023).
The conference website: https://rocling2023.github.io/
We want to register the ROCLING 2023 to ACL Anthology. Please see following two items:
(a) the complete list of volumes: please see the attached file,
and (b) all the new material can be downloaded from the following URL:
https://drive.google.com/drive/folders/1dxt_gYlUvmuLiNETgDRg9cGpiJxVGwbD?usp=sharing
If there is any question, please let me know.
[Anthology.Volume_ROCLING.2023.xlsx](https://github.com/acl-org/acl-anthology/files/14318157/Anthology.Volume_ROCLING.2023.xlsx)
Best regards,
Dr. Hou-Chiang Tseng
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bin/volumes_from_diff.py`
Content:
```
1 #!/usr/bin/env python3
2
3 """
4 Takes a list of XML files on STDIN, and prints all the volumes
5 within each of those files. e.g.,
6
7 git diff --name-only master | ./bin/volumes_from_xml.py https://preview.aclanthology.org/BRANCH
8
9 Used to find the list of volumes to generate previews for.
10 """
11
12 import sys
13 import argparse
14 import lxml.etree as etree
15
16
17 parser = argparse.ArgumentParser()
18 parser.add_argument("url_root")
19 args = parser.parse_args()
20
21 volumes = []
22 for filepath in sys.stdin:
23 if filepath.startswith("python/") or not filepath.endswith(".xml"):
24 continue
25
26 try:
27 tree = etree.parse(filepath.rstrip())
28 except Exception:
29 continue
30
31 root = tree.getroot()
32 collection_id = root.attrib["id"]
33 for volume in root.findall("./volume"):
34 volume_name = volume.attrib["id"]
35 volume_id = f"{collection_id}-{volume_name}"
36 volumes.append(f"[{volume_id}]({args.url_root}/{volume_id})")
37
38 if len(volumes) > 50:
39 volumes = volumes[0:50] + [f"(plus {len(volumes)-50} more...)"]
40
41 print(", ".join(volumes))
42
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bin/volumes_from_diff.py b/bin/volumes_from_diff.py
--- a/bin/volumes_from_diff.py
+++ b/bin/volumes_from_diff.py
@@ -20,6 +20,7 @@
volumes = []
for filepath in sys.stdin:
+ filepath = filepath.rstrip()
if filepath.startswith("python/") or not filepath.endswith(".xml"):
continue
| {"golden_diff": "diff --git a/bin/volumes_from_diff.py b/bin/volumes_from_diff.py\n--- a/bin/volumes_from_diff.py\n+++ b/bin/volumes_from_diff.py\n@@ -20,6 +20,7 @@\n \n volumes = []\n for filepath in sys.stdin:\n+ filepath = filepath.rstrip()\n if filepath.startswith(\"python/\") or not filepath.endswith(\".xml\"):\n continue\n", "issue": "Reingestion Request: ROCLING (10-20-2023)\n### General information about this request\n\n- [X] I confirm that I have read the [Information for Submitters](https://aclanthology.org/info/contrib/).\n- [ ] I am submitting a request for a **new venue** that does not exist in the ACL Anthology yet.\n\n### Venue Identifier\n\nROCLING\n\n### Volume Title\n\nProceedings of the 35th Conference on Computational Linguistics and Speech Processing (ROCLING 2023)\n\n### Venue Name (only if you are submitting a new venue)\n\nConference on Computational Linguistics and Speech Processing\n\n### Venue Website (only if you are submitting a new venue)\n\nhttps://rocling2023.github.io/\n\n### Date of Publication\n\n2023-10-20\n\n### Supporting Information\n\nDear Anthology Director,\r\n\r\nI'm Hou-Chiang Tseng who the publication chair of the 35th annual Conference on Computational Linguistics and Speech Processing (ROCLING 2023).\r\n\r\nThe conference website: https://rocling2023.github.io/\r\n\r\nWe want to register the ROCLING 2023 to ACL Anthology. Please see following two items:\r\n(a) the complete list of volumes: please see the attached file,\r\nand (b) all the new material can be downloaded from the following URL:\r\nhttps://drive.google.com/drive/folders/1dxt_gYlUvmuLiNETgDRg9cGpiJxVGwbD?usp=sharing\r\n\r\nIf there is any question, please let me know.\r\n[Anthology.Volume_ROCLING.2023.xlsx](https://github.com/acl-org/acl-anthology/files/14318157/Anthology.Volume_ROCLING.2023.xlsx)\r\n\r\nBest regards,\r\nDr. Hou-Chiang Tseng\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"\nTakes a list of XML files on STDIN, and prints all the volumes\nwithin each of those files. e.g.,\n\n git diff --name-only master | ./bin/volumes_from_xml.py https://preview.aclanthology.org/BRANCH\n\nUsed to find the list of volumes to generate previews for.\n\"\"\"\n\nimport sys\nimport argparse\nimport lxml.etree as etree\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"url_root\")\nargs = parser.parse_args()\n\nvolumes = []\nfor filepath in sys.stdin:\n if filepath.startswith(\"python/\") or not filepath.endswith(\".xml\"):\n continue\n\n try:\n tree = etree.parse(filepath.rstrip())\n except Exception:\n continue\n\n root = tree.getroot()\n collection_id = root.attrib[\"id\"]\n for volume in root.findall(\"./volume\"):\n volume_name = volume.attrib[\"id\"]\n volume_id = f\"{collection_id}-{volume_name}\"\n volumes.append(f\"[{volume_id}]({args.url_root}/{volume_id})\")\n\nif len(volumes) > 50:\n volumes = volumes[0:50] + [f\"(plus {len(volumes)-50} more...)\"]\n\nprint(\", \".join(volumes))\n", "path": "bin/volumes_from_diff.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"\nTakes a list of XML files on STDIN, and prints all the volumes\nwithin each of those files. e.g.,\n\n git diff --name-only master | ./bin/volumes_from_xml.py https://preview.aclanthology.org/BRANCH\n\nUsed to find the list of volumes to generate previews for.\n\"\"\"\n\nimport sys\nimport argparse\nimport lxml.etree as etree\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument(\"url_root\")\nargs = parser.parse_args()\n\nvolumes = []\nfor filepath in sys.stdin:\n filepath = filepath.rstrip()\n if filepath.startswith(\"python/\") or not filepath.endswith(\".xml\"):\n continue\n\n try:\n tree = etree.parse(filepath.rstrip())\n except Exception:\n continue\n\n root = tree.getroot()\n collection_id = root.attrib[\"id\"]\n for volume in root.findall(\"./volume\"):\n volume_name = volume.attrib[\"id\"]\n volume_id = f\"{collection_id}-{volume_name}\"\n volumes.append(f\"[{volume_id}]({args.url_root}/{volume_id})\")\n\nif len(volumes) > 50:\n volumes = volumes[0:50] + [f\"(plus {len(volumes)-50} more...)\"]\n\nprint(\", \".join(volumes))\n", "path": "bin/volumes_from_diff.py"}]} |
gh_patches_debug_199 | rasdani/github-patches | git_diff | bookwyrm-social__bookwyrm-3224 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Notitications was not showing followers, now it's not showing anything and the notification page shows "System error"
**Describe the bug**
The notification page is not working. When I see that there's a new notification (with a number close to the bell icon), I click on it. Then I get "System error"
<img width="672" alt="Screenshot 2024-01-05 at 10 30 20" src="https://github.com/bookwyrm-social/bookwyrm/assets/6791923/4cd46e03-6b50-4679-b8a0-61fdb50570a8">
<img width="456" alt="Screenshot 2024-01-05 at 10 30 08" src="https://github.com/bookwyrm-social/bookwyrm/assets/6791923/2130a984-9ed1-4a77-92b3-9dd63fa9c41f">
**Instance**
https://books.babb.no
**Additional context**
Before this happened, the notifications for new following requests were not showing on the list of notifications. A follow request would trigger the update on the number of new notifications, but the notification itself wasn't showing on the list of notifications
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `bookwyrm/templatetags/utilities.py`
Content:
```
1 """ template filters for really common utilities """
2 import os
3 import re
4 from uuid import uuid4
5 from urllib.parse import urlparse
6 from django import template
7 from django.utils.safestring import mark_safe
8 from django.utils.translation import gettext_lazy as _
9 from django.templatetags.static import static
10
11 from bookwyrm.models import User
12 from bookwyrm.settings import INSTANCE_ACTOR_USERNAME
13
14 register = template.Library()
15
16
17 @register.filter(name="uuid")
18 def get_uuid(identifier):
19 """for avoiding clashing ids when there are many forms"""
20 return f"{identifier}{uuid4()}"
21
22
23 @register.simple_tag(takes_context=False)
24 def join(*args):
25 """concatenate an arbitrary set of values"""
26 return "_".join(str(a) for a in args)
27
28
29 @register.filter(name="username")
30 def get_user_identifier(user):
31 """use localname for local users, username for remote"""
32 return user.localname if user.localname else user.username
33
34
35 @register.filter(name="user_from_remote_id")
36 def get_user_identifier_from_remote_id(remote_id):
37 """get the local user id from their remote id"""
38 user = User.objects.get(remote_id=remote_id)
39 return user if user else None
40
41
42 @register.filter(name="book_title")
43 def get_title(book, too_short=5):
44 """display the subtitle if the title is short"""
45 if not book:
46 return ""
47 title = book.title
48 if len(title) <= too_short and book.subtitle:
49 title = _("%(title)s: %(subtitle)s") % {
50 "title": title,
51 "subtitle": book.subtitle,
52 }
53 return title
54
55
56 @register.simple_tag(takes_context=False)
57 def comparison_bool(str1, str2, reverse=False):
58 """idk why I need to write a tag for this, it returns a bool"""
59 if reverse:
60 return str1 != str2
61 return str1 == str2
62
63
64 @register.filter(is_safe=True)
65 def truncatepath(value, arg):
66 """Truncate a path by removing all directories except the first and truncating"""
67 path = os.path.normpath(value.name)
68 path_list = path.split(os.sep)
69 try:
70 length = int(arg)
71 except ValueError: # invalid literal for int()
72 return path_list[-1] # Fail silently.
73 return f"{path_list[0]}/…{path_list[-1][-length:]}"
74
75
76 @register.simple_tag(takes_context=False)
77 def get_book_cover_thumbnail(book, size="medium", ext="jpg"):
78 """Returns a book thumbnail at the specified size and extension,
79 with fallback if needed"""
80 if size == "":
81 size = "medium"
82 try:
83 cover_thumbnail = getattr(book, f"cover_bw_book_{size}_{ext}")
84 return cover_thumbnail.url
85 except OSError:
86 return static("images/no_cover.jpg")
87
88
89 @register.filter(name="get_isni_bio")
90 def get_isni_bio(existing, author):
91 """Returns the isni bio string if an existing author has an isni listed"""
92 auth_isni = re.sub(r"\D", "", str(author.isni))
93 if len(existing) == 0:
94 return ""
95 for value in existing:
96 if hasattr(value, "bio") and auth_isni == re.sub(r"\D", "", str(value.isni)):
97 return mark_safe(f"Author of <em>{value.bio}</em>")
98
99 return ""
100
101
102 # pylint: disable=unused-argument
103 @register.filter(name="get_isni", needs_autoescape=True)
104 def get_isni(existing, author, autoescape=True):
105 """Returns the isni ID if an existing author has an ISNI listing"""
106 auth_isni = re.sub(r"\D", "", str(author.isni))
107 if len(existing) == 0:
108 return ""
109 for value in existing:
110 if hasattr(value, "isni") and auth_isni == re.sub(r"\D", "", str(value.isni)):
111 isni = value.isni
112 return mark_safe(
113 f'<input type="text" name="isni-for-{author.id}" value="{isni}" hidden>'
114 )
115 return ""
116
117
118 @register.simple_tag(takes_context=False)
119 def id_to_username(user_id):
120 """given an arbitrary remote id, return the username"""
121 if user_id:
122 url = urlparse(user_id)
123 domain = url.netloc
124 parts = url.path.split("/")
125 name = parts[-1]
126 value = f"{name}@{domain}"
127
128 return value
129
130
131 @register.filter(name="get_file_size")
132 def get_file_size(file):
133 """display the size of a file in human readable terms"""
134
135 try:
136 raw_size = os.stat(file.path).st_size
137 if raw_size < 1024:
138 return f"{raw_size} bytes"
139 if raw_size < 1024**2:
140 return f"{raw_size/1024:.2f} KB"
141 if raw_size < 1024**3:
142 return f"{raw_size/1024**2:.2f} MB"
143 return f"{raw_size/1024**3:.2f} GB"
144 except Exception: # pylint: disable=broad-except
145 return ""
146
147
148 @register.filter(name="get_user_permission")
149 def get_user_permission(user):
150 """given a user, return their permission level"""
151
152 return user.groups.first() or "User"
153
154
155 @register.filter(name="is_instance_admin")
156 def is_instance_admin(localname):
157 """Returns a boolean indicating whether the user is the instance admin account"""
158 return localname == INSTANCE_ACTOR_USERNAME
159
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/bookwyrm/templatetags/utilities.py b/bookwyrm/templatetags/utilities.py
--- a/bookwyrm/templatetags/utilities.py
+++ b/bookwyrm/templatetags/utilities.py
@@ -125,7 +125,8 @@
name = parts[-1]
value = f"{name}@{domain}"
- return value
+ return value
+ return "a new user account"
@register.filter(name="get_file_size")
| {"golden_diff": "diff --git a/bookwyrm/templatetags/utilities.py b/bookwyrm/templatetags/utilities.py\n--- a/bookwyrm/templatetags/utilities.py\n+++ b/bookwyrm/templatetags/utilities.py\n@@ -125,7 +125,8 @@\n name = parts[-1]\n value = f\"{name}@{domain}\"\n \n- return value\n+ return value\n+ return \"a new user account\"\n \n \n @register.filter(name=\"get_file_size\")\n", "issue": "Notitications was not showing followers, now it's not showing anything and the notification page shows \"System error\"\n**Describe the bug**\r\nThe notification page is not working. When I see that there's a new notification (with a number close to the bell icon), I click on it. Then I get \"System error\" \r\n<img width=\"672\" alt=\"Screenshot 2024-01-05 at 10 30 20\" src=\"https://github.com/bookwyrm-social/bookwyrm/assets/6791923/4cd46e03-6b50-4679-b8a0-61fdb50570a8\">\r\n<img width=\"456\" alt=\"Screenshot 2024-01-05 at 10 30 08\" src=\"https://github.com/bookwyrm-social/bookwyrm/assets/6791923/2130a984-9ed1-4a77-92b3-9dd63fa9c41f\">\r\n\r\n**Instance**\r\n\r\nhttps://books.babb.no\r\n\r\n**Additional context**\r\nBefore this happened, the notifications for new following requests were not showing on the list of notifications. A follow request would trigger the update on the number of new notifications, but the notification itself wasn't showing on the list of notifications\r\n\r\n\r\n\n", "before_files": [{"content": "\"\"\" template filters for really common utilities \"\"\"\nimport os\nimport re\nfrom uuid import uuid4\nfrom urllib.parse import urlparse\nfrom django import template\nfrom django.utils.safestring import mark_safe\nfrom django.utils.translation import gettext_lazy as _\nfrom django.templatetags.static import static\n\nfrom bookwyrm.models import User\nfrom bookwyrm.settings import INSTANCE_ACTOR_USERNAME\n\nregister = template.Library()\n\n\[email protected](name=\"uuid\")\ndef get_uuid(identifier):\n \"\"\"for avoiding clashing ids when there are many forms\"\"\"\n return f\"{identifier}{uuid4()}\"\n\n\[email protected]_tag(takes_context=False)\ndef join(*args):\n \"\"\"concatenate an arbitrary set of values\"\"\"\n return \"_\".join(str(a) for a in args)\n\n\[email protected](name=\"username\")\ndef get_user_identifier(user):\n \"\"\"use localname for local users, username for remote\"\"\"\n return user.localname if user.localname else user.username\n\n\[email protected](name=\"user_from_remote_id\")\ndef get_user_identifier_from_remote_id(remote_id):\n \"\"\"get the local user id from their remote id\"\"\"\n user = User.objects.get(remote_id=remote_id)\n return user if user else None\n\n\[email protected](name=\"book_title\")\ndef get_title(book, too_short=5):\n \"\"\"display the subtitle if the title is short\"\"\"\n if not book:\n return \"\"\n title = book.title\n if len(title) <= too_short and book.subtitle:\n title = _(\"%(title)s: %(subtitle)s\") % {\n \"title\": title,\n \"subtitle\": book.subtitle,\n }\n return title\n\n\[email protected]_tag(takes_context=False)\ndef comparison_bool(str1, str2, reverse=False):\n \"\"\"idk why I need to write a tag for this, it returns a bool\"\"\"\n if reverse:\n return str1 != str2\n return str1 == str2\n\n\[email protected](is_safe=True)\ndef truncatepath(value, arg):\n \"\"\"Truncate a path by removing all directories except the first and truncating\"\"\"\n path = os.path.normpath(value.name)\n path_list = path.split(os.sep)\n try:\n length = int(arg)\n except ValueError: # invalid literal for int()\n return path_list[-1] # Fail silently.\n return f\"{path_list[0]}/\u2026{path_list[-1][-length:]}\"\n\n\[email protected]_tag(takes_context=False)\ndef get_book_cover_thumbnail(book, size=\"medium\", ext=\"jpg\"):\n \"\"\"Returns a book thumbnail at the specified size and extension,\n with fallback if needed\"\"\"\n if size == \"\":\n size = \"medium\"\n try:\n cover_thumbnail = getattr(book, f\"cover_bw_book_{size}_{ext}\")\n return cover_thumbnail.url\n except OSError:\n return static(\"images/no_cover.jpg\")\n\n\[email protected](name=\"get_isni_bio\")\ndef get_isni_bio(existing, author):\n \"\"\"Returns the isni bio string if an existing author has an isni listed\"\"\"\n auth_isni = re.sub(r\"\\D\", \"\", str(author.isni))\n if len(existing) == 0:\n return \"\"\n for value in existing:\n if hasattr(value, \"bio\") and auth_isni == re.sub(r\"\\D\", \"\", str(value.isni)):\n return mark_safe(f\"Author of <em>{value.bio}</em>\")\n\n return \"\"\n\n\n# pylint: disable=unused-argument\[email protected](name=\"get_isni\", needs_autoescape=True)\ndef get_isni(existing, author, autoescape=True):\n \"\"\"Returns the isni ID if an existing author has an ISNI listing\"\"\"\n auth_isni = re.sub(r\"\\D\", \"\", str(author.isni))\n if len(existing) == 0:\n return \"\"\n for value in existing:\n if hasattr(value, \"isni\") and auth_isni == re.sub(r\"\\D\", \"\", str(value.isni)):\n isni = value.isni\n return mark_safe(\n f'<input type=\"text\" name=\"isni-for-{author.id}\" value=\"{isni}\" hidden>'\n )\n return \"\"\n\n\[email protected]_tag(takes_context=False)\ndef id_to_username(user_id):\n \"\"\"given an arbitrary remote id, return the username\"\"\"\n if user_id:\n url = urlparse(user_id)\n domain = url.netloc\n parts = url.path.split(\"/\")\n name = parts[-1]\n value = f\"{name}@{domain}\"\n\n return value\n\n\[email protected](name=\"get_file_size\")\ndef get_file_size(file):\n \"\"\"display the size of a file in human readable terms\"\"\"\n\n try:\n raw_size = os.stat(file.path).st_size\n if raw_size < 1024:\n return f\"{raw_size} bytes\"\n if raw_size < 1024**2:\n return f\"{raw_size/1024:.2f} KB\"\n if raw_size < 1024**3:\n return f\"{raw_size/1024**2:.2f} MB\"\n return f\"{raw_size/1024**3:.2f} GB\"\n except Exception: # pylint: disable=broad-except\n return \"\"\n\n\[email protected](name=\"get_user_permission\")\ndef get_user_permission(user):\n \"\"\"given a user, return their permission level\"\"\"\n\n return user.groups.first() or \"User\"\n\n\[email protected](name=\"is_instance_admin\")\ndef is_instance_admin(localname):\n \"\"\"Returns a boolean indicating whether the user is the instance admin account\"\"\"\n return localname == INSTANCE_ACTOR_USERNAME\n", "path": "bookwyrm/templatetags/utilities.py"}], "after_files": [{"content": "\"\"\" template filters for really common utilities \"\"\"\nimport os\nimport re\nfrom uuid import uuid4\nfrom urllib.parse import urlparse\nfrom django import template\nfrom django.utils.safestring import mark_safe\nfrom django.utils.translation import gettext_lazy as _\nfrom django.templatetags.static import static\n\nfrom bookwyrm.models import User\nfrom bookwyrm.settings import INSTANCE_ACTOR_USERNAME\n\nregister = template.Library()\n\n\[email protected](name=\"uuid\")\ndef get_uuid(identifier):\n \"\"\"for avoiding clashing ids when there are many forms\"\"\"\n return f\"{identifier}{uuid4()}\"\n\n\[email protected]_tag(takes_context=False)\ndef join(*args):\n \"\"\"concatenate an arbitrary set of values\"\"\"\n return \"_\".join(str(a) for a in args)\n\n\[email protected](name=\"username\")\ndef get_user_identifier(user):\n \"\"\"use localname for local users, username for remote\"\"\"\n return user.localname if user.localname else user.username\n\n\[email protected](name=\"user_from_remote_id\")\ndef get_user_identifier_from_remote_id(remote_id):\n \"\"\"get the local user id from their remote id\"\"\"\n user = User.objects.get(remote_id=remote_id)\n return user if user else None\n\n\[email protected](name=\"book_title\")\ndef get_title(book, too_short=5):\n \"\"\"display the subtitle if the title is short\"\"\"\n if not book:\n return \"\"\n title = book.title\n if len(title) <= too_short and book.subtitle:\n title = _(\"%(title)s: %(subtitle)s\") % {\n \"title\": title,\n \"subtitle\": book.subtitle,\n }\n return title\n\n\[email protected]_tag(takes_context=False)\ndef comparison_bool(str1, str2, reverse=False):\n \"\"\"idk why I need to write a tag for this, it returns a bool\"\"\"\n if reverse:\n return str1 != str2\n return str1 == str2\n\n\[email protected](is_safe=True)\ndef truncatepath(value, arg):\n \"\"\"Truncate a path by removing all directories except the first and truncating\"\"\"\n path = os.path.normpath(value.name)\n path_list = path.split(os.sep)\n try:\n length = int(arg)\n except ValueError: # invalid literal for int()\n return path_list[-1] # Fail silently.\n return f\"{path_list[0]}/\u2026{path_list[-1][-length:]}\"\n\n\[email protected]_tag(takes_context=False)\ndef get_book_cover_thumbnail(book, size=\"medium\", ext=\"jpg\"):\n \"\"\"Returns a book thumbnail at the specified size and extension,\n with fallback if needed\"\"\"\n if size == \"\":\n size = \"medium\"\n try:\n cover_thumbnail = getattr(book, f\"cover_bw_book_{size}_{ext}\")\n return cover_thumbnail.url\n except OSError:\n return static(\"images/no_cover.jpg\")\n\n\[email protected](name=\"get_isni_bio\")\ndef get_isni_bio(existing, author):\n \"\"\"Returns the isni bio string if an existing author has an isni listed\"\"\"\n auth_isni = re.sub(r\"\\D\", \"\", str(author.isni))\n if len(existing) == 0:\n return \"\"\n for value in existing:\n if hasattr(value, \"bio\") and auth_isni == re.sub(r\"\\D\", \"\", str(value.isni)):\n return mark_safe(f\"Author of <em>{value.bio}</em>\")\n\n return \"\"\n\n\n# pylint: disable=unused-argument\[email protected](name=\"get_isni\", needs_autoescape=True)\ndef get_isni(existing, author, autoescape=True):\n \"\"\"Returns the isni ID if an existing author has an ISNI listing\"\"\"\n auth_isni = re.sub(r\"\\D\", \"\", str(author.isni))\n if len(existing) == 0:\n return \"\"\n for value in existing:\n if hasattr(value, \"isni\") and auth_isni == re.sub(r\"\\D\", \"\", str(value.isni)):\n isni = value.isni\n return mark_safe(\n f'<input type=\"text\" name=\"isni-for-{author.id}\" value=\"{isni}\" hidden>'\n )\n return \"\"\n\n\[email protected]_tag(takes_context=False)\ndef id_to_username(user_id):\n \"\"\"given an arbitrary remote id, return the username\"\"\"\n if user_id:\n url = urlparse(user_id)\n domain = url.netloc\n parts = url.path.split(\"/\")\n name = parts[-1]\n value = f\"{name}@{domain}\"\n\n return value\n return \"a new user account\"\n\n\[email protected](name=\"get_file_size\")\ndef get_file_size(file):\n \"\"\"display the size of a file in human readable terms\"\"\"\n\n try:\n raw_size = os.stat(file.path).st_size\n if raw_size < 1024:\n return f\"{raw_size} bytes\"\n if raw_size < 1024**2:\n return f\"{raw_size/1024:.2f} KB\"\n if raw_size < 1024**3:\n return f\"{raw_size/1024**2:.2f} MB\"\n return f\"{raw_size/1024**3:.2f} GB\"\n except Exception: # pylint: disable=broad-except\n return \"\"\n\n\[email protected](name=\"get_user_permission\")\ndef get_user_permission(user):\n \"\"\"given a user, return their permission level\"\"\"\n\n return user.groups.first() or \"User\"\n\n\[email protected](name=\"is_instance_admin\")\ndef is_instance_admin(localname):\n \"\"\"Returns a boolean indicating whether the user is the instance admin account\"\"\"\n return localname == INSTANCE_ACTOR_USERNAME\n", "path": "bookwyrm/templatetags/utilities.py"}]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.