problem_id
stringlengths 18
21
| source
stringclasses 1
value | task_type
stringclasses 1
value | in_source_id
stringlengths 13
54
| prompt
stringlengths 1.28k
64.2k
| golden_diff
stringlengths 166
811
| verification_info
stringlengths 604
118k
|
---|---|---|---|---|---|---|
gh_patches_debug_1600 | rasdani/github-patches | git_diff | searx__searx-2132 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Chromium-based browsers (Android) don't detect searx
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `searx/webapp.py`
Content:
```
1 #!/usr/bin/env python
2
3 '''
4 searx is free software: you can redistribute it and/or modify
5 it under the terms of the GNU Affero General Public License as published by
6 the Free Software Foundation, either version 3 of the License, or
7 (at your option) any later version.
8
9 searx is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU Affero General Public License for more details.
13
14 You should have received a copy of the GNU Affero General Public License
15 along with searx. If not, see < http://www.gnu.org/licenses/ >.
16
17 (C) 2013- by Adam Tauber, <[email protected]>
18 '''
19
20 if __name__ == '__main__':
21 from sys import path
22 from os.path import realpath, dirname
23 path.append(realpath(dirname(realpath(__file__)) + '/../'))
24
25 import hashlib
26 import hmac
27 import json
28 import os
29 import sys
30
31 import requests
32
33 from searx import logger
34 logger = logger.getChild('webapp')
35
36 try:
37 from pygments import highlight
38 from pygments.lexers import get_lexer_by_name
39 from pygments.formatters import HtmlFormatter
40 except:
41 logger.critical("cannot import dependency: pygments")
42 from sys import exit
43 exit(1)
44 try:
45 from cgi import escape
46 except:
47 from html import escape
48 from six import next
49 from datetime import datetime, timedelta
50 from time import time
51 from werkzeug.middleware.proxy_fix import ProxyFix
52 from flask import (
53 Flask, request, render_template, url_for, Response, make_response,
54 redirect, send_from_directory
55 )
56 from babel.support import Translations
57 import flask_babel
58 from flask_babel import Babel, gettext, format_date, format_decimal
59 from flask.ctx import has_request_context
60 from flask.json import jsonify
61 from searx import brand, static_path
62 from searx import settings, searx_dir, searx_debug
63 from searx.exceptions import SearxParameterException
64 from searx.engines import (
65 categories, engines, engine_shortcuts, get_engines_stats, initialize_engines
66 )
67 from searx.utils import (
68 UnicodeWriter, highlight_content, html_to_text, get_resources_directory,
69 get_static_files, get_result_templates, get_themes, gen_useragent,
70 dict_subset, prettify_url, match_language
71 )
72 from searx.version import VERSION_STRING
73 from searx.languages import language_codes as languages
74 from searx.search import SearchWithPlugins, get_search_query_from_webapp
75 from searx.query import RawTextQuery
76 from searx.autocomplete import searx_bang, backends as autocomplete_backends
77 from searx.plugins import plugins
78 from searx.plugins.oa_doi_rewrite import get_doi_resolver
79 from searx.preferences import Preferences, ValidationException, LANGUAGE_CODES
80 from searx.answerers import answerers
81 from searx.url_utils import urlencode, urlparse, urljoin
82 from searx.utils import new_hmac
83
84 # check if the pyopenssl package is installed.
85 # It is needed for SSL connection without trouble, see #298
86 try:
87 import OpenSSL.SSL # NOQA
88 except ImportError:
89 logger.critical("The pyopenssl package has to be installed.\n"
90 "Some HTTPS connections will fail")
91
92 try:
93 from cStringIO import StringIO
94 except:
95 from io import StringIO
96
97
98 if sys.version_info[0] == 3:
99 unicode = str
100 PY3 = True
101 else:
102 logger.warning('\033[1;31m Python2 is no longer supported\033[0m')
103 exit(1)
104
105 # serve pages with HTTP/1.1
106 from werkzeug.serving import WSGIRequestHandler
107 WSGIRequestHandler.protocol_version = "HTTP/{}".format(settings['server'].get('http_protocol_version', '1.0'))
108
109 # about static
110 static_path = get_resources_directory(searx_dir, 'static', settings['ui']['static_path'])
111 logger.debug('static directory is %s', static_path)
112 static_files = get_static_files(static_path)
113
114 # about templates
115 default_theme = settings['ui']['default_theme']
116 templates_path = get_resources_directory(searx_dir, 'templates', settings['ui']['templates_path'])
117 logger.debug('templates directory is %s', templates_path)
118 themes = get_themes(templates_path)
119 result_templates = get_result_templates(templates_path)
120 global_favicons = []
121 for indice, theme in enumerate(themes):
122 global_favicons.append([])
123 theme_img_path = os.path.join(static_path, 'themes', theme, 'img', 'icons')
124 for (dirpath, dirnames, filenames) in os.walk(theme_img_path):
125 global_favicons[indice].extend(filenames)
126
127 # Flask app
128 app = Flask(
129 __name__,
130 static_folder=static_path,
131 template_folder=templates_path
132 )
133
134 app.jinja_env.trim_blocks = True
135 app.jinja_env.lstrip_blocks = True
136 app.jinja_env.add_extension('jinja2.ext.loopcontrols')
137 app.secret_key = settings['server']['secret_key']
138
139 if not searx_debug \
140 or os.environ.get("WERKZEUG_RUN_MAIN") == "true" \
141 or os.environ.get('UWSGI_ORIGINAL_PROC_NAME') is not None:
142 initialize_engines(settings['engines'])
143
144 babel = Babel(app)
145
146 rtl_locales = ['ar', 'arc', 'bcc', 'bqi', 'ckb', 'dv', 'fa', 'fa_IR', 'glk', 'he',
147 'ku', 'mzn', 'pnb', 'ps', 'sd', 'ug', 'ur', 'yi']
148
149 # used when translating category names
150 _category_names = (gettext('files'),
151 gettext('general'),
152 gettext('music'),
153 gettext('social media'),
154 gettext('images'),
155 gettext('videos'),
156 gettext('it'),
157 gettext('news'),
158 gettext('map'),
159 gettext('science'))
160
161 outgoing_proxies = settings['outgoing'].get('proxies') or None
162
163 _flask_babel_get_translations = flask_babel.get_translations
164
165
166 # monkey patch for flask_babel.get_translations
167 def _get_translations():
168 if has_request_context() and request.form.get('use-translation') == 'oc':
169 babel_ext = flask_babel.current_app.extensions['babel']
170 return Translations.load(next(babel_ext.translation_directories), 'oc')
171
172 return _flask_babel_get_translations()
173
174
175 flask_babel.get_translations = _get_translations
176
177
178 def _get_browser_language(request, lang_list):
179 for lang in request.headers.get("Accept-Language", "en").split(","):
180 if ';' in lang:
181 lang = lang.split(';')[0]
182 locale = match_language(lang, lang_list, fallback=None)
183 if locale is not None:
184 return locale
185 return settings['search']['default_lang'] or 'en'
186
187
188 @babel.localeselector
189 def get_locale():
190 locale = _get_browser_language(request, settings['locales'].keys())
191
192 logger.debug("default locale from browser info is `%s`", locale)
193
194 if request.preferences.get_value('locale') != '':
195 locale = request.preferences.get_value('locale')
196
197 if 'locale' in request.form\
198 and request.form['locale'] in settings['locales']:
199 locale = request.form['locale']
200
201 if locale == 'zh_TW':
202 locale = 'zh_Hant_TW'
203
204 if locale == 'oc':
205 request.form['use-translation'] = 'oc'
206 locale = 'fr_FR'
207
208 logger.debug("selected locale is `%s`", locale)
209
210 return locale
211
212
213 # code-highlighter
214 @app.template_filter('code_highlighter')
215 def code_highlighter(codelines, language=None):
216 if not language:
217 language = 'text'
218
219 try:
220 # find lexer by programing language
221 lexer = get_lexer_by_name(language, stripall=True)
222 except:
223 # if lexer is not found, using default one
224 logger.debug('highlighter cannot find lexer for {0}'.format(language))
225 lexer = get_lexer_by_name('text', stripall=True)
226
227 html_code = ''
228 tmp_code = ''
229 last_line = None
230
231 # parse lines
232 for line, code in codelines:
233 if not last_line:
234 line_code_start = line
235
236 # new codeblock is detected
237 if last_line is not None and\
238 last_line + 1 != line:
239
240 # highlight last codepart
241 formatter = HtmlFormatter(linenos='inline',
242 linenostart=line_code_start)
243 html_code = html_code + highlight(tmp_code, lexer, formatter)
244
245 # reset conditions for next codepart
246 tmp_code = ''
247 line_code_start = line
248
249 # add codepart
250 tmp_code += code + '\n'
251
252 # update line
253 last_line = line
254
255 # highlight last codepart
256 formatter = HtmlFormatter(linenos='inline', linenostart=line_code_start)
257 html_code = html_code + highlight(tmp_code, lexer, formatter)
258
259 return html_code
260
261
262 # Extract domain from url
263 @app.template_filter('extract_domain')
264 def extract_domain(url):
265 return urlparse(url)[1]
266
267
268 def get_base_url():
269 if settings['server']['base_url']:
270 hostname = settings['server']['base_url']
271 else:
272 scheme = 'http'
273 if request.is_secure:
274 scheme = 'https'
275 hostname = url_for('index', _external=True, _scheme=scheme)
276 return hostname
277
278
279 def get_current_theme_name(override=None):
280 """Returns theme name.
281
282 Checks in this order:
283 1. override
284 2. cookies
285 3. settings"""
286
287 if override and (override in themes or override == '__common__'):
288 return override
289 theme_name = request.args.get('theme', request.preferences.get_value('theme'))
290 if theme_name not in themes:
291 theme_name = default_theme
292 return theme_name
293
294
295 def get_result_template(theme, template_name):
296 themed_path = theme + '/result_templates/' + template_name
297 if themed_path in result_templates:
298 return themed_path
299 return 'result_templates/' + template_name
300
301
302 def url_for_theme(endpoint, override_theme=None, **values):
303 if endpoint == 'static' and values.get('filename'):
304 theme_name = get_current_theme_name(override=override_theme)
305 filename_with_theme = "themes/{}/{}".format(theme_name, values['filename'])
306 if filename_with_theme in static_files:
307 values['filename'] = filename_with_theme
308 return url_for(endpoint, **values)
309
310
311 def proxify(url):
312 if url.startswith('//'):
313 url = 'https:' + url
314
315 if not settings.get('result_proxy'):
316 return url
317
318 url_params = dict(mortyurl=url.encode('utf-8'))
319
320 if settings['result_proxy'].get('key'):
321 url_params['mortyhash'] = hmac.new(settings['result_proxy']['key'],
322 url.encode('utf-8'),
323 hashlib.sha256).hexdigest()
324
325 return '{0}?{1}'.format(settings['result_proxy']['url'],
326 urlencode(url_params))
327
328
329 def image_proxify(url):
330
331 if url.startswith('//'):
332 url = 'https:' + url
333
334 if not request.preferences.get_value('image_proxy'):
335 return url
336
337 if url.startswith('data:image/'):
338 # 50 is an arbitrary number to get only the beginning of the image.
339 partial_base64 = url[len('data:image/'):50].split(';')
340 if len(partial_base64) == 2 \
341 and partial_base64[0] in ['gif', 'png', 'jpeg', 'pjpeg', 'webp', 'tiff', 'bmp']\
342 and partial_base64[1].startswith('base64,'):
343 return url
344 else:
345 return None
346
347 if settings.get('result_proxy'):
348 return proxify(url)
349
350 h = new_hmac(settings['server']['secret_key'], url.encode('utf-8'))
351
352 return '{0}?{1}'.format(url_for('image_proxy'),
353 urlencode(dict(url=url.encode('utf-8'), h=h)))
354
355
356 def render(template_name, override_theme=None, **kwargs):
357 disabled_engines = request.preferences.engines.get_disabled()
358
359 enabled_categories = set(category for engine_name in engines
360 for category in engines[engine_name].categories
361 if (engine_name, category) not in disabled_engines)
362
363 if 'categories' not in kwargs:
364 kwargs['categories'] = [x for x in
365 _get_ordered_categories()
366 if x in enabled_categories]
367
368 if 'all_categories' not in kwargs:
369 kwargs['all_categories'] = _get_ordered_categories()
370
371 if 'selected_categories' not in kwargs:
372 kwargs['selected_categories'] = []
373 for arg in request.args:
374 if arg.startswith('category_'):
375 c = arg.split('_', 1)[1]
376 if c in categories:
377 kwargs['selected_categories'].append(c)
378
379 if not kwargs['selected_categories']:
380 cookie_categories = request.preferences.get_value('categories')
381 for ccateg in cookie_categories:
382 kwargs['selected_categories'].append(ccateg)
383
384 if not kwargs['selected_categories']:
385 kwargs['selected_categories'] = ['general']
386
387 if 'autocomplete' not in kwargs:
388 kwargs['autocomplete'] = request.preferences.get_value('autocomplete')
389
390 locale = request.preferences.get_value('locale')
391
392 if locale in rtl_locales and 'rtl' not in kwargs:
393 kwargs['rtl'] = True
394
395 kwargs['searx_version'] = VERSION_STRING
396
397 kwargs['method'] = request.preferences.get_value('method')
398
399 kwargs['safesearch'] = str(request.preferences.get_value('safesearch'))
400
401 kwargs['language_codes'] = languages
402 if 'current_language' not in kwargs:
403 kwargs['current_language'] = match_language(request.preferences.get_value('language'),
404 LANGUAGE_CODES)
405
406 # override url_for function in templates
407 kwargs['url_for'] = url_for_theme
408
409 kwargs['image_proxify'] = image_proxify
410
411 kwargs['proxify'] = proxify if settings.get('result_proxy', {}).get('url') else None
412
413 kwargs['get_result_template'] = get_result_template
414
415 kwargs['theme'] = get_current_theme_name(override=override_theme)
416
417 kwargs['template_name'] = template_name
418
419 kwargs['cookies'] = request.cookies
420
421 kwargs['errors'] = request.errors
422
423 kwargs['instance_name'] = settings['general']['instance_name']
424
425 kwargs['results_on_new_tab'] = request.preferences.get_value('results_on_new_tab')
426
427 kwargs['unicode'] = unicode
428
429 kwargs['preferences'] = request.preferences
430
431 kwargs['brand'] = brand
432
433 kwargs['scripts'] = set()
434 kwargs['endpoint'] = 'results' if 'q' in kwargs else request.endpoint
435 for plugin in request.user_plugins:
436 for script in plugin.js_dependencies:
437 kwargs['scripts'].add(script)
438
439 kwargs['styles'] = set()
440 for plugin in request.user_plugins:
441 for css in plugin.css_dependencies:
442 kwargs['styles'].add(css)
443
444 return render_template(
445 '{}/{}'.format(kwargs['theme'], template_name), **kwargs)
446
447
448 def _get_ordered_categories():
449 ordered_categories = []
450 if 'categories_order' not in settings['ui']:
451 ordered_categories = ['general']
452 ordered_categories.extend(x for x in sorted(categories.keys()) if x != 'general')
453 return ordered_categories
454 ordered_categories = settings['ui']['categories_order']
455 ordered_categories.extend(x for x in sorted(categories.keys()) if x not in ordered_categories)
456 return ordered_categories
457
458
459 @app.before_request
460 def pre_request():
461 request.start_time = time()
462 request.timings = []
463 request.errors = []
464
465 preferences = Preferences(themes, list(categories.keys()), engines, plugins)
466 request.preferences = preferences
467 try:
468 preferences.parse_dict(request.cookies)
469 except:
470 request.errors.append(gettext('Invalid settings, please edit your preferences'))
471
472 # merge GET, POST vars
473 # request.form
474 request.form = dict(request.form.items())
475 for k, v in request.args.items():
476 if k not in request.form:
477 request.form[k] = v
478
479 if request.form.get('preferences'):
480 preferences.parse_encoded_data(request.form['preferences'])
481 else:
482 try:
483 preferences.parse_dict(request.form)
484 except Exception as e:
485 logger.exception('invalid settings')
486 request.errors.append(gettext('Invalid settings'))
487
488 # init search language and locale
489 if not preferences.get_value("language"):
490 preferences.parse_dict({"language": _get_browser_language(request, LANGUAGE_CODES)})
491 if not preferences.get_value("locale"):
492 preferences.parse_dict({"locale": get_locale()})
493
494 # request.user_plugins
495 request.user_plugins = []
496 allowed_plugins = preferences.plugins.get_enabled()
497 disabled_plugins = preferences.plugins.get_disabled()
498 for plugin in plugins:
499 if ((plugin.default_on and plugin.id not in disabled_plugins)
500 or plugin.id in allowed_plugins):
501 request.user_plugins.append(plugin)
502
503
504 @app.after_request
505 def post_request(response):
506 total_time = time() - request.start_time
507 timings_all = ['total;dur=' + str(round(total_time * 1000, 3))]
508 if len(request.timings) > 0:
509 timings = sorted(request.timings, key=lambda v: v['total'])
510 timings_total = ['total_' + str(i) + '_' + v['engine'] +
511 ';dur=' + str(round(v['total'] * 1000, 3)) for i, v in enumerate(timings)]
512 timings_load = ['load_' + str(i) + '_' + v['engine'] +
513 ';dur=' + str(round(v['load'] * 1000, 3)) for i, v in enumerate(timings)]
514 timings_all = timings_all + timings_total + timings_load
515 response.headers.add('Server-Timing', ', '.join(timings_all))
516 return response
517
518
519 def index_error(output_format, error_message):
520 if output_format == 'json':
521 return Response(json.dumps({'error': error_message}),
522 mimetype='application/json')
523 elif output_format == 'csv':
524 response = Response('', mimetype='application/csv')
525 cont_disp = 'attachment;Filename=searx.csv'
526 response.headers.add('Content-Disposition', cont_disp)
527 return response
528 elif output_format == 'rss':
529 response_rss = render(
530 'opensearch_response_rss.xml',
531 results=[],
532 q=request.form['q'] if 'q' in request.form else '',
533 number_of_results=0,
534 base_url=get_base_url(),
535 error_message=error_message,
536 override_theme='__common__',
537 )
538 return Response(response_rss, mimetype='text/xml')
539 else:
540 # html
541 request.errors.append(gettext('search error'))
542 return render(
543 'index.html',
544 )
545
546
547 @app.route('/search', methods=['GET', 'POST'])
548 @app.route('/', methods=['GET', 'POST'])
549 def index():
550 """Render index page.
551
552 Supported outputs: html, json, csv, rss.
553 """
554
555 # output_format
556 output_format = request.form.get('format', 'html')
557 if output_format not in ['html', 'csv', 'json', 'rss']:
558 output_format = 'html'
559
560 # check if there is query
561 if request.form.get('q') is None:
562 if output_format == 'html':
563 return render(
564 'index.html',
565 )
566 else:
567 return index_error(output_format, 'No query'), 400
568
569 # search
570 search_query = None
571 raw_text_query = None
572 result_container = None
573 try:
574 search_query, raw_text_query = get_search_query_from_webapp(request.preferences, request.form)
575 # search = Search(search_query) # without plugins
576 search = SearchWithPlugins(search_query, request.user_plugins, request)
577
578 result_container = search.search()
579
580 except Exception as e:
581 # log exception
582 logger.exception('search error')
583
584 # is it an invalid input parameter or something else ?
585 if (issubclass(e.__class__, SearxParameterException)):
586 return index_error(output_format, e.message), 400
587 else:
588 return index_error(output_format, gettext('search error')), 500
589
590 # results
591 results = result_container.get_ordered_results()
592 number_of_results = result_container.results_number()
593 if number_of_results < result_container.results_length():
594 number_of_results = 0
595
596 # checkin for a external bang
597 if result_container.redirect_url:
598 return redirect(result_container.redirect_url)
599
600 # UI
601 advanced_search = request.form.get('advanced_search', None)
602
603 # Server-Timing header
604 request.timings = result_container.get_timings()
605
606 # output
607 for result in results:
608 if output_format == 'html':
609 if 'content' in result and result['content']:
610 result['content'] = highlight_content(escape(result['content'][:1024]), search_query.query)
611 if 'title' in result and result['title']:
612 result['title'] = highlight_content(escape(result['title'] or u''), search_query.query)
613 else:
614 if result.get('content'):
615 result['content'] = html_to_text(result['content']).strip()
616 # removing html content and whitespace duplications
617 result['title'] = ' '.join(html_to_text(result['title']).strip().split())
618
619 if 'url' in result:
620 result['pretty_url'] = prettify_url(result['url'])
621
622 # TODO, check if timezone is calculated right
623 if 'publishedDate' in result:
624 try: # test if publishedDate >= 1900 (datetime module bug)
625 result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')
626 except ValueError:
627 result['publishedDate'] = None
628 else:
629 if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):
630 timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)
631 minutes = int((timedifference.seconds / 60) % 60)
632 hours = int(timedifference.seconds / 60 / 60)
633 if hours == 0:
634 result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes)
635 else:
636 result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa
637 else:
638 result['publishedDate'] = format_date(result['publishedDate'])
639
640 if output_format == 'json':
641 return Response(json.dumps({'query': search_query.query.decode('utf-8'),
642 'number_of_results': number_of_results,
643 'results': results,
644 'answers': list(result_container.answers),
645 'corrections': list(result_container.corrections),
646 'infoboxes': result_container.infoboxes,
647 'suggestions': list(result_container.suggestions),
648 'unresponsive_engines': __get_translated_errors(result_container.unresponsive_engines)}, # noqa
649 default=lambda item: list(item) if isinstance(item, set) else item),
650 mimetype='application/json')
651 elif output_format == 'csv':
652 csv = UnicodeWriter(StringIO())
653 keys = ('title', 'url', 'content', 'host', 'engine', 'score', 'type')
654 csv.writerow(keys)
655 for row in results:
656 row['host'] = row['parsed_url'].netloc
657 row['type'] = 'result'
658 csv.writerow([row.get(key, '') for key in keys])
659 for a in result_container.answers:
660 row = {'title': a, 'type': 'answer'}
661 csv.writerow([row.get(key, '') for key in keys])
662 for a in result_container.suggestions:
663 row = {'title': a, 'type': 'suggestion'}
664 csv.writerow([row.get(key, '') for key in keys])
665 for a in result_container.corrections:
666 row = {'title': a, 'type': 'correction'}
667 csv.writerow([row.get(key, '') for key in keys])
668 csv.stream.seek(0)
669 response = Response(csv.stream.read(), mimetype='application/csv')
670 cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search_query.query.decode('utf-8'))
671 response.headers.add('Content-Disposition', cont_disp)
672 return response
673
674 elif output_format == 'rss':
675 response_rss = render(
676 'opensearch_response_rss.xml',
677 results=results,
678 answers=result_container.answers,
679 corrections=result_container.corrections,
680 suggestions=result_container.suggestions,
681 q=request.form['q'],
682 number_of_results=number_of_results,
683 base_url=get_base_url(),
684 override_theme='__common__',
685 )
686 return Response(response_rss, mimetype='text/xml')
687
688 # HTML output format
689
690 # suggestions: use RawTextQuery to get the suggestion URLs with the same bang
691 suggestion_urls = list(map(lambda suggestion: {
692 'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(),
693 'title': suggestion
694 },
695 result_container.suggestions))
696
697 correction_urls = list(map(lambda correction: {
698 'url': raw_text_query.changeSearchQuery(correction).getFullQuery(),
699 'title': correction
700 },
701 result_container.corrections))
702 #
703 return render(
704 'results.html',
705 results=results,
706 q=request.form['q'],
707 selected_categories=search_query.categories,
708 pageno=search_query.pageno,
709 time_range=search_query.time_range,
710 number_of_results=format_decimal(number_of_results),
711 advanced_search=advanced_search,
712 suggestions=suggestion_urls,
713 answers=result_container.answers,
714 corrections=correction_urls,
715 infoboxes=result_container.infoboxes,
716 paging=result_container.paging,
717 unresponsive_engines=__get_translated_errors(result_container.unresponsive_engines),
718 current_language=match_language(search_query.lang,
719 LANGUAGE_CODES,
720 fallback=request.preferences.get_value("language")),
721 base_url=get_base_url(),
722 theme=get_current_theme_name(),
723 favicons=global_favicons[themes.index(get_current_theme_name())],
724 timeout_limit=request.form.get('timeout_limit', None)
725 )
726
727
728 def __get_translated_errors(unresponsive_engines):
729 translated_errors = []
730 for unresponsive_engine in unresponsive_engines:
731 error_msg = gettext(unresponsive_engine[1])
732 if unresponsive_engine[2]:
733 error_msg = "{} {}".format(error_msg, unresponsive_engine[2])
734 translated_errors.append((unresponsive_engine[0], error_msg))
735 return translated_errors
736
737
738 @app.route('/about', methods=['GET'])
739 def about():
740 """Render about page"""
741 return render(
742 'about.html',
743 )
744
745
746 @app.route('/autocompleter', methods=['GET', 'POST'])
747 def autocompleter():
748 """Return autocompleter results"""
749
750 # set blocked engines
751 disabled_engines = request.preferences.engines.get_disabled()
752
753 # parse query
754 if PY3:
755 raw_text_query = RawTextQuery(request.form.get('q', b''), disabled_engines)
756 else:
757 raw_text_query = RawTextQuery(request.form.get('q', u'').encode('utf-8'), disabled_engines)
758 raw_text_query.parse_query()
759
760 # check if search query is set
761 if not raw_text_query.getSearchQuery():
762 return '', 400
763
764 # run autocompleter
765 completer = autocomplete_backends.get(request.preferences.get_value('autocomplete'))
766
767 # parse searx specific autocompleter results like !bang
768 raw_results = searx_bang(raw_text_query)
769
770 # normal autocompletion results only appear if no inner results returned
771 # and there is a query part besides the engine and language bangs
772 if len(raw_results) == 0 and completer and (len(raw_text_query.query_parts) > 1 or
773 (len(raw_text_query.languages) == 0 and
774 not raw_text_query.specific)):
775 # get language from cookie
776 language = request.preferences.get_value('language')
777 if not language or language == 'all':
778 language = 'en'
779 else:
780 language = language.split('-')[0]
781 # run autocompletion
782 raw_results.extend(completer(raw_text_query.getSearchQuery(), language))
783
784 # parse results (write :language and !engine back to result string)
785 results = []
786 for result in raw_results:
787 raw_text_query.changeSearchQuery(result)
788
789 # add parsed result
790 results.append(raw_text_query.getFullQuery())
791
792 # return autocompleter results
793 if request.headers.get('X-Requested-With') == 'XMLHttpRequest':
794 return Response(json.dumps(results),
795 mimetype='application/json')
796
797 return Response(json.dumps([raw_text_query.query, results]),
798 mimetype='application/x-suggestions+json')
799
800
801 @app.route('/preferences', methods=['GET', 'POST'])
802 def preferences():
803 """Render preferences page && save user preferences"""
804
805 # save preferences
806 if request.method == 'POST':
807 resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))
808 try:
809 request.preferences.parse_form(request.form)
810 except ValidationException:
811 request.errors.append(gettext('Invalid settings, please edit your preferences'))
812 return resp
813 return request.preferences.save(resp)
814
815 # render preferences
816 image_proxy = request.preferences.get_value('image_proxy')
817 lang = request.preferences.get_value('language')
818 disabled_engines = request.preferences.engines.get_disabled()
819 allowed_plugins = request.preferences.plugins.get_enabled()
820
821 # stats for preferences page
822 stats = {}
823
824 engines_by_category = {}
825 for c in categories:
826 engines_by_category[c] = []
827 for e in categories[c]:
828 if not request.preferences.validate_token(e):
829 continue
830
831 stats[e.name] = {'time': None,
832 'warn_timeout': False,
833 'warn_time': False}
834 if e.timeout > settings['outgoing']['request_timeout']:
835 stats[e.name]['warn_timeout'] = True
836 stats[e.name]['supports_selected_language'] = _is_selected_language_supported(e, request.preferences)
837
838 engines_by_category[c].append(e)
839
840 # get first element [0], the engine time,
841 # and then the second element [1] : the time (the first one is the label)
842 for engine_stat in get_engines_stats(request.preferences)[0][1]:
843 stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3)
844 if engine_stat.get('avg') > settings['outgoing']['request_timeout']:
845 stats[engine_stat.get('name')]['warn_time'] = True
846 # end of stats
847
848 return render('preferences.html',
849 locales=settings['locales'],
850 current_locale=request.preferences.get_value("locale"),
851 image_proxy=image_proxy,
852 engines_by_category=engines_by_category,
853 stats=stats,
854 answerers=[{'info': a.self_info(), 'keywords': a.keywords} for a in answerers],
855 disabled_engines=disabled_engines,
856 autocomplete_backends=autocomplete_backends,
857 shortcuts={y: x for x, y in engine_shortcuts.items()},
858 themes=themes,
859 plugins=plugins,
860 doi_resolvers=settings['doi_resolvers'],
861 current_doi_resolver=get_doi_resolver(request.args, request.preferences.get_value('doi_resolver')),
862 allowed_plugins=allowed_plugins,
863 theme=get_current_theme_name(),
864 preferences_url_params=request.preferences.get_as_url_params(),
865 base_url=get_base_url(),
866 preferences=True)
867
868
869 def _is_selected_language_supported(engine, preferences):
870 language = preferences.get_value('language')
871 return (language == 'all'
872 or match_language(language,
873 getattr(engine, 'supported_languages', []),
874 getattr(engine, 'language_aliases', {}), None))
875
876
877 @app.route('/image_proxy', methods=['GET'])
878 def image_proxy():
879 url = request.args.get('url').encode('utf-8')
880
881 if not url:
882 return '', 400
883
884 h = new_hmac(settings['server']['secret_key'], url)
885
886 if h != request.args.get('h'):
887 return '', 400
888
889 headers = dict_subset(request.headers, {'If-Modified-Since', 'If-None-Match'})
890 headers['User-Agent'] = gen_useragent()
891
892 resp = requests.get(url,
893 stream=True,
894 timeout=settings['outgoing']['request_timeout'],
895 headers=headers,
896 proxies=outgoing_proxies)
897
898 if resp.status_code == 304:
899 return '', resp.status_code
900
901 if resp.status_code != 200:
902 logger.debug('image-proxy: wrong response code: {0}'.format(resp.status_code))
903 if resp.status_code >= 400:
904 return '', resp.status_code
905 return '', 400
906
907 if not resp.headers.get('content-type', '').startswith('image/'):
908 logger.debug('image-proxy: wrong content-type: {0}'.format(resp.headers.get('content-type')))
909 return '', 400
910
911 img = b''
912 chunk_counter = 0
913
914 for chunk in resp.iter_content(1024 * 1024):
915 chunk_counter += 1
916 if chunk_counter > 5:
917 return '', 502 # Bad gateway - file is too big (>5M)
918 img += chunk
919
920 headers = dict_subset(resp.headers, {'Content-Length', 'Length', 'Date', 'Last-Modified', 'Expires', 'Etag'})
921
922 return Response(img, mimetype=resp.headers['content-type'], headers=headers)
923
924
925 @app.route('/stats', methods=['GET'])
926 def stats():
927 """Render engine statistics page."""
928 stats = get_engines_stats(request.preferences)
929 return render(
930 'stats.html',
931 stats=stats,
932 )
933
934
935 @app.route('/robots.txt', methods=['GET'])
936 def robots():
937 return Response("""User-agent: *
938 Allow: /
939 Allow: /about
940 Disallow: /stats
941 Disallow: /preferences
942 Disallow: /*?*q=*
943 """, mimetype='text/plain')
944
945
946 @app.route('/opensearch.xml', methods=['GET'])
947 def opensearch():
948 method = 'post'
949
950 if request.preferences.get_value('method') == 'GET':
951 method = 'get'
952
953 # chrome/chromium only supports HTTP GET....
954 if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
955 method = 'get'
956
957 ret = render('opensearch.xml',
958 opensearch_method=method,
959 host=get_base_url(),
960 urljoin=urljoin,
961 override_theme='__common__')
962
963 resp = Response(response=ret,
964 status=200,
965 mimetype="application/opensearchdescription+xml")
966 return resp
967
968
969 @app.route('/favicon.ico')
970 def favicon():
971 return send_from_directory(os.path.join(app.root_path,
972 static_path,
973 'themes',
974 get_current_theme_name(),
975 'img'),
976 'favicon.png',
977 mimetype='image/vnd.microsoft.icon')
978
979
980 @app.route('/clear_cookies')
981 def clear_cookies():
982 resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))
983 for cookie_name in request.cookies:
984 resp.delete_cookie(cookie_name)
985 return resp
986
987
988 @app.route('/config')
989 def config():
990 """Return configuration in JSON format."""
991 _engines = []
992 for name, engine in engines.items():
993 if not request.preferences.validate_token(engine):
994 continue
995
996 supported_languages = engine.supported_languages
997 if isinstance(engine.supported_languages, dict):
998 supported_languages = list(engine.supported_languages.keys())
999
1000 _engines.append({
1001 'name': name,
1002 'categories': engine.categories,
1003 'shortcut': engine.shortcut,
1004 'enabled': not engine.disabled,
1005 'paging': engine.paging,
1006 'language_support': engine.language_support,
1007 'supported_languages': supported_languages,
1008 'safesearch': engine.safesearch,
1009 'time_range_support': engine.time_range_support,
1010 'timeout': engine.timeout
1011 })
1012
1013 _plugins = []
1014 for _ in plugins:
1015 _plugins.append({'name': _.name, 'enabled': _.default_on})
1016
1017 return jsonify({
1018 'categories': list(categories.keys()),
1019 'engines': _engines,
1020 'plugins': _plugins,
1021 'instance_name': settings['general']['instance_name'],
1022 'locales': settings['locales'],
1023 'default_locale': settings['ui']['default_locale'],
1024 'autocomplete': settings['search']['autocomplete'],
1025 'safe_search': settings['search']['safe_search'],
1026 'default_theme': settings['ui']['default_theme'],
1027 'version': VERSION_STRING,
1028 'brand': {
1029 'GIT_URL': brand.GIT_URL,
1030 'DOCS_URL': brand.DOCS_URL
1031 },
1032 'doi_resolvers': [r for r in settings['doi_resolvers']],
1033 'default_doi_resolver': settings['default_doi_resolver'],
1034 })
1035
1036
1037 @app.route('/translations.js')
1038 def js_translations():
1039 return render(
1040 'translations.js.tpl',
1041 override_theme='__common__',
1042 ), {'Content-Type': 'text/javascript; charset=UTF-8'}
1043
1044
1045 @app.errorhandler(404)
1046 def page_not_found(e):
1047 return render('404.html'), 404
1048
1049
1050 def run():
1051 logger.debug('starting webserver on %s:%s', settings['server']['bind_address'], settings['server']['port'])
1052 app.run(
1053 debug=searx_debug,
1054 use_debugger=searx_debug,
1055 port=settings['server']['port'],
1056 host=settings['server']['bind_address'],
1057 threaded=True
1058 )
1059
1060
1061 class ReverseProxyPathFix(object):
1062 '''Wrap the application in this middleware and configure the
1063 front-end server to add these headers, to let you quietly bind
1064 this to a URL other than / and to an HTTP scheme that is
1065 different than what is used locally.
1066
1067 http://flask.pocoo.org/snippets/35/
1068
1069 In nginx:
1070 location /myprefix {
1071 proxy_pass http://127.0.0.1:8000;
1072 proxy_set_header Host $host;
1073 proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
1074 proxy_set_header X-Scheme $scheme;
1075 proxy_set_header X-Script-Name /myprefix;
1076 }
1077
1078 :param app: the WSGI application
1079 '''
1080
1081 def __init__(self, app):
1082 self.app = app
1083
1084 def __call__(self, environ, start_response):
1085 script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
1086 if script_name:
1087 environ['SCRIPT_NAME'] = script_name
1088 path_info = environ['PATH_INFO']
1089 if path_info.startswith(script_name):
1090 environ['PATH_INFO'] = path_info[len(script_name):]
1091
1092 scheme = environ.get('HTTP_X_SCHEME', '')
1093 if scheme:
1094 environ['wsgi.url_scheme'] = scheme
1095 return self.app(environ, start_response)
1096
1097
1098 application = app
1099 # patch app to handle non root url-s behind proxy & wsgi
1100 app.wsgi_app = ReverseProxyPathFix(ProxyFix(application.wsgi_app))
1101
1102 if __name__ == "__main__":
1103 run()
1104
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/searx/webapp.py b/searx/webapp.py
--- a/searx/webapp.py
+++ b/searx/webapp.py
@@ -463,6 +463,9 @@
request.errors = []
preferences = Preferences(themes, list(categories.keys()), engines, plugins)
+ user_agent = request.headers.get('User-Agent', '').lower()
+ if 'webkit' in user_agent and 'android' in user_agent:
+ preferences.key_value_settings['method'].value = 'GET'
request.preferences = preferences
try:
preferences.parse_dict(request.cookies)
| {"golden_diff": "diff --git a/searx/webapp.py b/searx/webapp.py\n--- a/searx/webapp.py\n+++ b/searx/webapp.py\n@@ -463,6 +463,9 @@\n request.errors = []\n \n preferences = Preferences(themes, list(categories.keys()), engines, plugins)\n+ user_agent = request.headers.get('User-Agent', '').lower()\n+ if 'webkit' in user_agent and 'android' in user_agent:\n+ preferences.key_value_settings['method'].value = 'GET'\n request.preferences = preferences\n try:\n preferences.parse_dict(request.cookies)\n", "issue": "Chromium-based browsers (Android) don't detect searx\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\n'''\nsearx is free software: you can redistribute it and/or modify\nit under the terms of the GNU Affero General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nsearx is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU Affero General Public License for more details.\n\nYou should have received a copy of the GNU Affero General Public License\nalong with searx. If not, see < http://www.gnu.org/licenses/ >.\n\n(C) 2013- by Adam Tauber, <[email protected]>\n'''\n\nif __name__ == '__main__':\n from sys import path\n from os.path import realpath, dirname\n path.append(realpath(dirname(realpath(__file__)) + '/../'))\n\nimport hashlib\nimport hmac\nimport json\nimport os\nimport sys\n\nimport requests\n\nfrom searx import logger\nlogger = logger.getChild('webapp')\n\ntry:\n from pygments import highlight\n from pygments.lexers import get_lexer_by_name\n from pygments.formatters import HtmlFormatter\nexcept:\n logger.critical(\"cannot import dependency: pygments\")\n from sys import exit\n exit(1)\ntry:\n from cgi import escape\nexcept:\n from html import escape\nfrom six import next\nfrom datetime import datetime, timedelta\nfrom time import time\nfrom werkzeug.middleware.proxy_fix import ProxyFix\nfrom flask import (\n Flask, request, render_template, url_for, Response, make_response,\n redirect, send_from_directory\n)\nfrom babel.support import Translations\nimport flask_babel\nfrom flask_babel import Babel, gettext, format_date, format_decimal\nfrom flask.ctx import has_request_context\nfrom flask.json import jsonify\nfrom searx import brand, static_path\nfrom searx import settings, searx_dir, searx_debug\nfrom searx.exceptions import SearxParameterException\nfrom searx.engines import (\n categories, engines, engine_shortcuts, get_engines_stats, initialize_engines\n)\nfrom searx.utils import (\n UnicodeWriter, highlight_content, html_to_text, get_resources_directory,\n get_static_files, get_result_templates, get_themes, gen_useragent,\n dict_subset, prettify_url, match_language\n)\nfrom searx.version import VERSION_STRING\nfrom searx.languages import language_codes as languages\nfrom searx.search import SearchWithPlugins, get_search_query_from_webapp\nfrom searx.query import RawTextQuery\nfrom searx.autocomplete import searx_bang, backends as autocomplete_backends\nfrom searx.plugins import plugins\nfrom searx.plugins.oa_doi_rewrite import get_doi_resolver\nfrom searx.preferences import Preferences, ValidationException, LANGUAGE_CODES\nfrom searx.answerers import answerers\nfrom searx.url_utils import urlencode, urlparse, urljoin\nfrom searx.utils import new_hmac\n\n# check if the pyopenssl package is installed.\n# It is needed for SSL connection without trouble, see #298\ntry:\n import OpenSSL.SSL # NOQA\nexcept ImportError:\n logger.critical(\"The pyopenssl package has to be installed.\\n\"\n \"Some HTTPS connections will fail\")\n\ntry:\n from cStringIO import StringIO\nexcept:\n from io import StringIO\n\n\nif sys.version_info[0] == 3:\n unicode = str\n PY3 = True\nelse:\n logger.warning('\\033[1;31m Python2 is no longer supported\\033[0m')\n exit(1)\n\n# serve pages with HTTP/1.1\nfrom werkzeug.serving import WSGIRequestHandler\nWSGIRequestHandler.protocol_version = \"HTTP/{}\".format(settings['server'].get('http_protocol_version', '1.0'))\n\n# about static\nstatic_path = get_resources_directory(searx_dir, 'static', settings['ui']['static_path'])\nlogger.debug('static directory is %s', static_path)\nstatic_files = get_static_files(static_path)\n\n# about templates\ndefault_theme = settings['ui']['default_theme']\ntemplates_path = get_resources_directory(searx_dir, 'templates', settings['ui']['templates_path'])\nlogger.debug('templates directory is %s', templates_path)\nthemes = get_themes(templates_path)\nresult_templates = get_result_templates(templates_path)\nglobal_favicons = []\nfor indice, theme in enumerate(themes):\n global_favicons.append([])\n theme_img_path = os.path.join(static_path, 'themes', theme, 'img', 'icons')\n for (dirpath, dirnames, filenames) in os.walk(theme_img_path):\n global_favicons[indice].extend(filenames)\n\n# Flask app\napp = Flask(\n __name__,\n static_folder=static_path,\n template_folder=templates_path\n)\n\napp.jinja_env.trim_blocks = True\napp.jinja_env.lstrip_blocks = True\napp.jinja_env.add_extension('jinja2.ext.loopcontrols')\napp.secret_key = settings['server']['secret_key']\n\nif not searx_debug \\\n or os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" \\\n or os.environ.get('UWSGI_ORIGINAL_PROC_NAME') is not None:\n initialize_engines(settings['engines'])\n\nbabel = Babel(app)\n\nrtl_locales = ['ar', 'arc', 'bcc', 'bqi', 'ckb', 'dv', 'fa', 'fa_IR', 'glk', 'he',\n 'ku', 'mzn', 'pnb', 'ps', 'sd', 'ug', 'ur', 'yi']\n\n# used when translating category names\n_category_names = (gettext('files'),\n gettext('general'),\n gettext('music'),\n gettext('social media'),\n gettext('images'),\n gettext('videos'),\n gettext('it'),\n gettext('news'),\n gettext('map'),\n gettext('science'))\n\noutgoing_proxies = settings['outgoing'].get('proxies') or None\n\n_flask_babel_get_translations = flask_babel.get_translations\n\n\n# monkey patch for flask_babel.get_translations\ndef _get_translations():\n if has_request_context() and request.form.get('use-translation') == 'oc':\n babel_ext = flask_babel.current_app.extensions['babel']\n return Translations.load(next(babel_ext.translation_directories), 'oc')\n\n return _flask_babel_get_translations()\n\n\nflask_babel.get_translations = _get_translations\n\n\ndef _get_browser_language(request, lang_list):\n for lang in request.headers.get(\"Accept-Language\", \"en\").split(\",\"):\n if ';' in lang:\n lang = lang.split(';')[0]\n locale = match_language(lang, lang_list, fallback=None)\n if locale is not None:\n return locale\n return settings['search']['default_lang'] or 'en'\n\n\[email protected]\ndef get_locale():\n locale = _get_browser_language(request, settings['locales'].keys())\n\n logger.debug(\"default locale from browser info is `%s`\", locale)\n\n if request.preferences.get_value('locale') != '':\n locale = request.preferences.get_value('locale')\n\n if 'locale' in request.form\\\n and request.form['locale'] in settings['locales']:\n locale = request.form['locale']\n\n if locale == 'zh_TW':\n locale = 'zh_Hant_TW'\n\n if locale == 'oc':\n request.form['use-translation'] = 'oc'\n locale = 'fr_FR'\n\n logger.debug(\"selected locale is `%s`\", locale)\n\n return locale\n\n\n# code-highlighter\[email protected]_filter('code_highlighter')\ndef code_highlighter(codelines, language=None):\n if not language:\n language = 'text'\n\n try:\n # find lexer by programing language\n lexer = get_lexer_by_name(language, stripall=True)\n except:\n # if lexer is not found, using default one\n logger.debug('highlighter cannot find lexer for {0}'.format(language))\n lexer = get_lexer_by_name('text', stripall=True)\n\n html_code = ''\n tmp_code = ''\n last_line = None\n\n # parse lines\n for line, code in codelines:\n if not last_line:\n line_code_start = line\n\n # new codeblock is detected\n if last_line is not None and\\\n last_line + 1 != line:\n\n # highlight last codepart\n formatter = HtmlFormatter(linenos='inline',\n linenostart=line_code_start)\n html_code = html_code + highlight(tmp_code, lexer, formatter)\n\n # reset conditions for next codepart\n tmp_code = ''\n line_code_start = line\n\n # add codepart\n tmp_code += code + '\\n'\n\n # update line\n last_line = line\n\n # highlight last codepart\n formatter = HtmlFormatter(linenos='inline', linenostart=line_code_start)\n html_code = html_code + highlight(tmp_code, lexer, formatter)\n\n return html_code\n\n\n# Extract domain from url\[email protected]_filter('extract_domain')\ndef extract_domain(url):\n return urlparse(url)[1]\n\n\ndef get_base_url():\n if settings['server']['base_url']:\n hostname = settings['server']['base_url']\n else:\n scheme = 'http'\n if request.is_secure:\n scheme = 'https'\n hostname = url_for('index', _external=True, _scheme=scheme)\n return hostname\n\n\ndef get_current_theme_name(override=None):\n \"\"\"Returns theme name.\n\n Checks in this order:\n 1. override\n 2. cookies\n 3. settings\"\"\"\n\n if override and (override in themes or override == '__common__'):\n return override\n theme_name = request.args.get('theme', request.preferences.get_value('theme'))\n if theme_name not in themes:\n theme_name = default_theme\n return theme_name\n\n\ndef get_result_template(theme, template_name):\n themed_path = theme + '/result_templates/' + template_name\n if themed_path in result_templates:\n return themed_path\n return 'result_templates/' + template_name\n\n\ndef url_for_theme(endpoint, override_theme=None, **values):\n if endpoint == 'static' and values.get('filename'):\n theme_name = get_current_theme_name(override=override_theme)\n filename_with_theme = \"themes/{}/{}\".format(theme_name, values['filename'])\n if filename_with_theme in static_files:\n values['filename'] = filename_with_theme\n return url_for(endpoint, **values)\n\n\ndef proxify(url):\n if url.startswith('//'):\n url = 'https:' + url\n\n if not settings.get('result_proxy'):\n return url\n\n url_params = dict(mortyurl=url.encode('utf-8'))\n\n if settings['result_proxy'].get('key'):\n url_params['mortyhash'] = hmac.new(settings['result_proxy']['key'],\n url.encode('utf-8'),\n hashlib.sha256).hexdigest()\n\n return '{0}?{1}'.format(settings['result_proxy']['url'],\n urlencode(url_params))\n\n\ndef image_proxify(url):\n\n if url.startswith('//'):\n url = 'https:' + url\n\n if not request.preferences.get_value('image_proxy'):\n return url\n\n if url.startswith('data:image/'):\n # 50 is an arbitrary number to get only the beginning of the image.\n partial_base64 = url[len('data:image/'):50].split(';')\n if len(partial_base64) == 2 \\\n and partial_base64[0] in ['gif', 'png', 'jpeg', 'pjpeg', 'webp', 'tiff', 'bmp']\\\n and partial_base64[1].startswith('base64,'):\n return url\n else:\n return None\n\n if settings.get('result_proxy'):\n return proxify(url)\n\n h = new_hmac(settings['server']['secret_key'], url.encode('utf-8'))\n\n return '{0}?{1}'.format(url_for('image_proxy'),\n urlencode(dict(url=url.encode('utf-8'), h=h)))\n\n\ndef render(template_name, override_theme=None, **kwargs):\n disabled_engines = request.preferences.engines.get_disabled()\n\n enabled_categories = set(category for engine_name in engines\n for category in engines[engine_name].categories\n if (engine_name, category) not in disabled_engines)\n\n if 'categories' not in kwargs:\n kwargs['categories'] = [x for x in\n _get_ordered_categories()\n if x in enabled_categories]\n\n if 'all_categories' not in kwargs:\n kwargs['all_categories'] = _get_ordered_categories()\n\n if 'selected_categories' not in kwargs:\n kwargs['selected_categories'] = []\n for arg in request.args:\n if arg.startswith('category_'):\n c = arg.split('_', 1)[1]\n if c in categories:\n kwargs['selected_categories'].append(c)\n\n if not kwargs['selected_categories']:\n cookie_categories = request.preferences.get_value('categories')\n for ccateg in cookie_categories:\n kwargs['selected_categories'].append(ccateg)\n\n if not kwargs['selected_categories']:\n kwargs['selected_categories'] = ['general']\n\n if 'autocomplete' not in kwargs:\n kwargs['autocomplete'] = request.preferences.get_value('autocomplete')\n\n locale = request.preferences.get_value('locale')\n\n if locale in rtl_locales and 'rtl' not in kwargs:\n kwargs['rtl'] = True\n\n kwargs['searx_version'] = VERSION_STRING\n\n kwargs['method'] = request.preferences.get_value('method')\n\n kwargs['safesearch'] = str(request.preferences.get_value('safesearch'))\n\n kwargs['language_codes'] = languages\n if 'current_language' not in kwargs:\n kwargs['current_language'] = match_language(request.preferences.get_value('language'),\n LANGUAGE_CODES)\n\n # override url_for function in templates\n kwargs['url_for'] = url_for_theme\n\n kwargs['image_proxify'] = image_proxify\n\n kwargs['proxify'] = proxify if settings.get('result_proxy', {}).get('url') else None\n\n kwargs['get_result_template'] = get_result_template\n\n kwargs['theme'] = get_current_theme_name(override=override_theme)\n\n kwargs['template_name'] = template_name\n\n kwargs['cookies'] = request.cookies\n\n kwargs['errors'] = request.errors\n\n kwargs['instance_name'] = settings['general']['instance_name']\n\n kwargs['results_on_new_tab'] = request.preferences.get_value('results_on_new_tab')\n\n kwargs['unicode'] = unicode\n\n kwargs['preferences'] = request.preferences\n\n kwargs['brand'] = brand\n\n kwargs['scripts'] = set()\n kwargs['endpoint'] = 'results' if 'q' in kwargs else request.endpoint\n for plugin in request.user_plugins:\n for script in plugin.js_dependencies:\n kwargs['scripts'].add(script)\n\n kwargs['styles'] = set()\n for plugin in request.user_plugins:\n for css in plugin.css_dependencies:\n kwargs['styles'].add(css)\n\n return render_template(\n '{}/{}'.format(kwargs['theme'], template_name), **kwargs)\n\n\ndef _get_ordered_categories():\n ordered_categories = []\n if 'categories_order' not in settings['ui']:\n ordered_categories = ['general']\n ordered_categories.extend(x for x in sorted(categories.keys()) if x != 'general')\n return ordered_categories\n ordered_categories = settings['ui']['categories_order']\n ordered_categories.extend(x for x in sorted(categories.keys()) if x not in ordered_categories)\n return ordered_categories\n\n\[email protected]_request\ndef pre_request():\n request.start_time = time()\n request.timings = []\n request.errors = []\n\n preferences = Preferences(themes, list(categories.keys()), engines, plugins)\n request.preferences = preferences\n try:\n preferences.parse_dict(request.cookies)\n except:\n request.errors.append(gettext('Invalid settings, please edit your preferences'))\n\n # merge GET, POST vars\n # request.form\n request.form = dict(request.form.items())\n for k, v in request.args.items():\n if k not in request.form:\n request.form[k] = v\n\n if request.form.get('preferences'):\n preferences.parse_encoded_data(request.form['preferences'])\n else:\n try:\n preferences.parse_dict(request.form)\n except Exception as e:\n logger.exception('invalid settings')\n request.errors.append(gettext('Invalid settings'))\n\n # init search language and locale\n if not preferences.get_value(\"language\"):\n preferences.parse_dict({\"language\": _get_browser_language(request, LANGUAGE_CODES)})\n if not preferences.get_value(\"locale\"):\n preferences.parse_dict({\"locale\": get_locale()})\n\n # request.user_plugins\n request.user_plugins = []\n allowed_plugins = preferences.plugins.get_enabled()\n disabled_plugins = preferences.plugins.get_disabled()\n for plugin in plugins:\n if ((plugin.default_on and plugin.id not in disabled_plugins)\n or plugin.id in allowed_plugins):\n request.user_plugins.append(plugin)\n\n\[email protected]_request\ndef post_request(response):\n total_time = time() - request.start_time\n timings_all = ['total;dur=' + str(round(total_time * 1000, 3))]\n if len(request.timings) > 0:\n timings = sorted(request.timings, key=lambda v: v['total'])\n timings_total = ['total_' + str(i) + '_' + v['engine'] +\n ';dur=' + str(round(v['total'] * 1000, 3)) for i, v in enumerate(timings)]\n timings_load = ['load_' + str(i) + '_' + v['engine'] +\n ';dur=' + str(round(v['load'] * 1000, 3)) for i, v in enumerate(timings)]\n timings_all = timings_all + timings_total + timings_load\n response.headers.add('Server-Timing', ', '.join(timings_all))\n return response\n\n\ndef index_error(output_format, error_message):\n if output_format == 'json':\n return Response(json.dumps({'error': error_message}),\n mimetype='application/json')\n elif output_format == 'csv':\n response = Response('', mimetype='application/csv')\n cont_disp = 'attachment;Filename=searx.csv'\n response.headers.add('Content-Disposition', cont_disp)\n return response\n elif output_format == 'rss':\n response_rss = render(\n 'opensearch_response_rss.xml',\n results=[],\n q=request.form['q'] if 'q' in request.form else '',\n number_of_results=0,\n base_url=get_base_url(),\n error_message=error_message,\n override_theme='__common__',\n )\n return Response(response_rss, mimetype='text/xml')\n else:\n # html\n request.errors.append(gettext('search error'))\n return render(\n 'index.html',\n )\n\n\[email protected]('/search', methods=['GET', 'POST'])\[email protected]('/', methods=['GET', 'POST'])\ndef index():\n \"\"\"Render index page.\n\n Supported outputs: html, json, csv, rss.\n \"\"\"\n\n # output_format\n output_format = request.form.get('format', 'html')\n if output_format not in ['html', 'csv', 'json', 'rss']:\n output_format = 'html'\n\n # check if there is query\n if request.form.get('q') is None:\n if output_format == 'html':\n return render(\n 'index.html',\n )\n else:\n return index_error(output_format, 'No query'), 400\n\n # search\n search_query = None\n raw_text_query = None\n result_container = None\n try:\n search_query, raw_text_query = get_search_query_from_webapp(request.preferences, request.form)\n # search = Search(search_query) # without plugins\n search = SearchWithPlugins(search_query, request.user_plugins, request)\n\n result_container = search.search()\n\n except Exception as e:\n # log exception\n logger.exception('search error')\n\n # is it an invalid input parameter or something else ?\n if (issubclass(e.__class__, SearxParameterException)):\n return index_error(output_format, e.message), 400\n else:\n return index_error(output_format, gettext('search error')), 500\n\n # results\n results = result_container.get_ordered_results()\n number_of_results = result_container.results_number()\n if number_of_results < result_container.results_length():\n number_of_results = 0\n\n # checkin for a external bang\n if result_container.redirect_url:\n return redirect(result_container.redirect_url)\n\n # UI\n advanced_search = request.form.get('advanced_search', None)\n\n # Server-Timing header\n request.timings = result_container.get_timings()\n\n # output\n for result in results:\n if output_format == 'html':\n if 'content' in result and result['content']:\n result['content'] = highlight_content(escape(result['content'][:1024]), search_query.query)\n if 'title' in result and result['title']:\n result['title'] = highlight_content(escape(result['title'] or u''), search_query.query)\n else:\n if result.get('content'):\n result['content'] = html_to_text(result['content']).strip()\n # removing html content and whitespace duplications\n result['title'] = ' '.join(html_to_text(result['title']).strip().split())\n\n if 'url' in result:\n result['pretty_url'] = prettify_url(result['url'])\n\n # TODO, check if timezone is calculated right\n if 'publishedDate' in result:\n try: # test if publishedDate >= 1900 (datetime module bug)\n result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')\n except ValueError:\n result['publishedDate'] = None\n else:\n if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):\n timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)\n minutes = int((timedifference.seconds / 60) % 60)\n hours = int(timedifference.seconds / 60 / 60)\n if hours == 0:\n result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes)\n else:\n result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa\n else:\n result['publishedDate'] = format_date(result['publishedDate'])\n\n if output_format == 'json':\n return Response(json.dumps({'query': search_query.query.decode('utf-8'),\n 'number_of_results': number_of_results,\n 'results': results,\n 'answers': list(result_container.answers),\n 'corrections': list(result_container.corrections),\n 'infoboxes': result_container.infoboxes,\n 'suggestions': list(result_container.suggestions),\n 'unresponsive_engines': __get_translated_errors(result_container.unresponsive_engines)}, # noqa\n default=lambda item: list(item) if isinstance(item, set) else item),\n mimetype='application/json')\n elif output_format == 'csv':\n csv = UnicodeWriter(StringIO())\n keys = ('title', 'url', 'content', 'host', 'engine', 'score', 'type')\n csv.writerow(keys)\n for row in results:\n row['host'] = row['parsed_url'].netloc\n row['type'] = 'result'\n csv.writerow([row.get(key, '') for key in keys])\n for a in result_container.answers:\n row = {'title': a, 'type': 'answer'}\n csv.writerow([row.get(key, '') for key in keys])\n for a in result_container.suggestions:\n row = {'title': a, 'type': 'suggestion'}\n csv.writerow([row.get(key, '') for key in keys])\n for a in result_container.corrections:\n row = {'title': a, 'type': 'correction'}\n csv.writerow([row.get(key, '') for key in keys])\n csv.stream.seek(0)\n response = Response(csv.stream.read(), mimetype='application/csv')\n cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search_query.query.decode('utf-8'))\n response.headers.add('Content-Disposition', cont_disp)\n return response\n\n elif output_format == 'rss':\n response_rss = render(\n 'opensearch_response_rss.xml',\n results=results,\n answers=result_container.answers,\n corrections=result_container.corrections,\n suggestions=result_container.suggestions,\n q=request.form['q'],\n number_of_results=number_of_results,\n base_url=get_base_url(),\n override_theme='__common__',\n )\n return Response(response_rss, mimetype='text/xml')\n\n # HTML output format\n\n # suggestions: use RawTextQuery to get the suggestion URLs with the same bang\n suggestion_urls = list(map(lambda suggestion: {\n 'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(),\n 'title': suggestion\n },\n result_container.suggestions))\n\n correction_urls = list(map(lambda correction: {\n 'url': raw_text_query.changeSearchQuery(correction).getFullQuery(),\n 'title': correction\n },\n result_container.corrections))\n #\n return render(\n 'results.html',\n results=results,\n q=request.form['q'],\n selected_categories=search_query.categories,\n pageno=search_query.pageno,\n time_range=search_query.time_range,\n number_of_results=format_decimal(number_of_results),\n advanced_search=advanced_search,\n suggestions=suggestion_urls,\n answers=result_container.answers,\n corrections=correction_urls,\n infoboxes=result_container.infoboxes,\n paging=result_container.paging,\n unresponsive_engines=__get_translated_errors(result_container.unresponsive_engines),\n current_language=match_language(search_query.lang,\n LANGUAGE_CODES,\n fallback=request.preferences.get_value(\"language\")),\n base_url=get_base_url(),\n theme=get_current_theme_name(),\n favicons=global_favicons[themes.index(get_current_theme_name())],\n timeout_limit=request.form.get('timeout_limit', None)\n )\n\n\ndef __get_translated_errors(unresponsive_engines):\n translated_errors = []\n for unresponsive_engine in unresponsive_engines:\n error_msg = gettext(unresponsive_engine[1])\n if unresponsive_engine[2]:\n error_msg = \"{} {}\".format(error_msg, unresponsive_engine[2])\n translated_errors.append((unresponsive_engine[0], error_msg))\n return translated_errors\n\n\[email protected]('/about', methods=['GET'])\ndef about():\n \"\"\"Render about page\"\"\"\n return render(\n 'about.html',\n )\n\n\[email protected]('/autocompleter', methods=['GET', 'POST'])\ndef autocompleter():\n \"\"\"Return autocompleter results\"\"\"\n\n # set blocked engines\n disabled_engines = request.preferences.engines.get_disabled()\n\n # parse query\n if PY3:\n raw_text_query = RawTextQuery(request.form.get('q', b''), disabled_engines)\n else:\n raw_text_query = RawTextQuery(request.form.get('q', u'').encode('utf-8'), disabled_engines)\n raw_text_query.parse_query()\n\n # check if search query is set\n if not raw_text_query.getSearchQuery():\n return '', 400\n\n # run autocompleter\n completer = autocomplete_backends.get(request.preferences.get_value('autocomplete'))\n\n # parse searx specific autocompleter results like !bang\n raw_results = searx_bang(raw_text_query)\n\n # normal autocompletion results only appear if no inner results returned\n # and there is a query part besides the engine and language bangs\n if len(raw_results) == 0 and completer and (len(raw_text_query.query_parts) > 1 or\n (len(raw_text_query.languages) == 0 and\n not raw_text_query.specific)):\n # get language from cookie\n language = request.preferences.get_value('language')\n if not language or language == 'all':\n language = 'en'\n else:\n language = language.split('-')[0]\n # run autocompletion\n raw_results.extend(completer(raw_text_query.getSearchQuery(), language))\n\n # parse results (write :language and !engine back to result string)\n results = []\n for result in raw_results:\n raw_text_query.changeSearchQuery(result)\n\n # add parsed result\n results.append(raw_text_query.getFullQuery())\n\n # return autocompleter results\n if request.headers.get('X-Requested-With') == 'XMLHttpRequest':\n return Response(json.dumps(results),\n mimetype='application/json')\n\n return Response(json.dumps([raw_text_query.query, results]),\n mimetype='application/x-suggestions+json')\n\n\[email protected]('/preferences', methods=['GET', 'POST'])\ndef preferences():\n \"\"\"Render preferences page && save user preferences\"\"\"\n\n # save preferences\n if request.method == 'POST':\n resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))\n try:\n request.preferences.parse_form(request.form)\n except ValidationException:\n request.errors.append(gettext('Invalid settings, please edit your preferences'))\n return resp\n return request.preferences.save(resp)\n\n # render preferences\n image_proxy = request.preferences.get_value('image_proxy')\n lang = request.preferences.get_value('language')\n disabled_engines = request.preferences.engines.get_disabled()\n allowed_plugins = request.preferences.plugins.get_enabled()\n\n # stats for preferences page\n stats = {}\n\n engines_by_category = {}\n for c in categories:\n engines_by_category[c] = []\n for e in categories[c]:\n if not request.preferences.validate_token(e):\n continue\n\n stats[e.name] = {'time': None,\n 'warn_timeout': False,\n 'warn_time': False}\n if e.timeout > settings['outgoing']['request_timeout']:\n stats[e.name]['warn_timeout'] = True\n stats[e.name]['supports_selected_language'] = _is_selected_language_supported(e, request.preferences)\n\n engines_by_category[c].append(e)\n\n # get first element [0], the engine time,\n # and then the second element [1] : the time (the first one is the label)\n for engine_stat in get_engines_stats(request.preferences)[0][1]:\n stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3)\n if engine_stat.get('avg') > settings['outgoing']['request_timeout']:\n stats[engine_stat.get('name')]['warn_time'] = True\n # end of stats\n\n return render('preferences.html',\n locales=settings['locales'],\n current_locale=request.preferences.get_value(\"locale\"),\n image_proxy=image_proxy,\n engines_by_category=engines_by_category,\n stats=stats,\n answerers=[{'info': a.self_info(), 'keywords': a.keywords} for a in answerers],\n disabled_engines=disabled_engines,\n autocomplete_backends=autocomplete_backends,\n shortcuts={y: x for x, y in engine_shortcuts.items()},\n themes=themes,\n plugins=plugins,\n doi_resolvers=settings['doi_resolvers'],\n current_doi_resolver=get_doi_resolver(request.args, request.preferences.get_value('doi_resolver')),\n allowed_plugins=allowed_plugins,\n theme=get_current_theme_name(),\n preferences_url_params=request.preferences.get_as_url_params(),\n base_url=get_base_url(),\n preferences=True)\n\n\ndef _is_selected_language_supported(engine, preferences):\n language = preferences.get_value('language')\n return (language == 'all'\n or match_language(language,\n getattr(engine, 'supported_languages', []),\n getattr(engine, 'language_aliases', {}), None))\n\n\[email protected]('/image_proxy', methods=['GET'])\ndef image_proxy():\n url = request.args.get('url').encode('utf-8')\n\n if not url:\n return '', 400\n\n h = new_hmac(settings['server']['secret_key'], url)\n\n if h != request.args.get('h'):\n return '', 400\n\n headers = dict_subset(request.headers, {'If-Modified-Since', 'If-None-Match'})\n headers['User-Agent'] = gen_useragent()\n\n resp = requests.get(url,\n stream=True,\n timeout=settings['outgoing']['request_timeout'],\n headers=headers,\n proxies=outgoing_proxies)\n\n if resp.status_code == 304:\n return '', resp.status_code\n\n if resp.status_code != 200:\n logger.debug('image-proxy: wrong response code: {0}'.format(resp.status_code))\n if resp.status_code >= 400:\n return '', resp.status_code\n return '', 400\n\n if not resp.headers.get('content-type', '').startswith('image/'):\n logger.debug('image-proxy: wrong content-type: {0}'.format(resp.headers.get('content-type')))\n return '', 400\n\n img = b''\n chunk_counter = 0\n\n for chunk in resp.iter_content(1024 * 1024):\n chunk_counter += 1\n if chunk_counter > 5:\n return '', 502 # Bad gateway - file is too big (>5M)\n img += chunk\n\n headers = dict_subset(resp.headers, {'Content-Length', 'Length', 'Date', 'Last-Modified', 'Expires', 'Etag'})\n\n return Response(img, mimetype=resp.headers['content-type'], headers=headers)\n\n\[email protected]('/stats', methods=['GET'])\ndef stats():\n \"\"\"Render engine statistics page.\"\"\"\n stats = get_engines_stats(request.preferences)\n return render(\n 'stats.html',\n stats=stats,\n )\n\n\[email protected]('/robots.txt', methods=['GET'])\ndef robots():\n return Response(\"\"\"User-agent: *\nAllow: /\nAllow: /about\nDisallow: /stats\nDisallow: /preferences\nDisallow: /*?*q=*\n\"\"\", mimetype='text/plain')\n\n\[email protected]('/opensearch.xml', methods=['GET'])\ndef opensearch():\n method = 'post'\n\n if request.preferences.get_value('method') == 'GET':\n method = 'get'\n\n # chrome/chromium only supports HTTP GET....\n if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:\n method = 'get'\n\n ret = render('opensearch.xml',\n opensearch_method=method,\n host=get_base_url(),\n urljoin=urljoin,\n override_theme='__common__')\n\n resp = Response(response=ret,\n status=200,\n mimetype=\"application/opensearchdescription+xml\")\n return resp\n\n\[email protected]('/favicon.ico')\ndef favicon():\n return send_from_directory(os.path.join(app.root_path,\n static_path,\n 'themes',\n get_current_theme_name(),\n 'img'),\n 'favicon.png',\n mimetype='image/vnd.microsoft.icon')\n\n\[email protected]('/clear_cookies')\ndef clear_cookies():\n resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))\n for cookie_name in request.cookies:\n resp.delete_cookie(cookie_name)\n return resp\n\n\[email protected]('/config')\ndef config():\n \"\"\"Return configuration in JSON format.\"\"\"\n _engines = []\n for name, engine in engines.items():\n if not request.preferences.validate_token(engine):\n continue\n\n supported_languages = engine.supported_languages\n if isinstance(engine.supported_languages, dict):\n supported_languages = list(engine.supported_languages.keys())\n\n _engines.append({\n 'name': name,\n 'categories': engine.categories,\n 'shortcut': engine.shortcut,\n 'enabled': not engine.disabled,\n 'paging': engine.paging,\n 'language_support': engine.language_support,\n 'supported_languages': supported_languages,\n 'safesearch': engine.safesearch,\n 'time_range_support': engine.time_range_support,\n 'timeout': engine.timeout\n })\n\n _plugins = []\n for _ in plugins:\n _plugins.append({'name': _.name, 'enabled': _.default_on})\n\n return jsonify({\n 'categories': list(categories.keys()),\n 'engines': _engines,\n 'plugins': _plugins,\n 'instance_name': settings['general']['instance_name'],\n 'locales': settings['locales'],\n 'default_locale': settings['ui']['default_locale'],\n 'autocomplete': settings['search']['autocomplete'],\n 'safe_search': settings['search']['safe_search'],\n 'default_theme': settings['ui']['default_theme'],\n 'version': VERSION_STRING,\n 'brand': {\n 'GIT_URL': brand.GIT_URL,\n 'DOCS_URL': brand.DOCS_URL\n },\n 'doi_resolvers': [r for r in settings['doi_resolvers']],\n 'default_doi_resolver': settings['default_doi_resolver'],\n })\n\n\[email protected]('/translations.js')\ndef js_translations():\n return render(\n 'translations.js.tpl',\n override_theme='__common__',\n ), {'Content-Type': 'text/javascript; charset=UTF-8'}\n\n\[email protected](404)\ndef page_not_found(e):\n return render('404.html'), 404\n\n\ndef run():\n logger.debug('starting webserver on %s:%s', settings['server']['bind_address'], settings['server']['port'])\n app.run(\n debug=searx_debug,\n use_debugger=searx_debug,\n port=settings['server']['port'],\n host=settings['server']['bind_address'],\n threaded=True\n )\n\n\nclass ReverseProxyPathFix(object):\n '''Wrap the application in this middleware and configure the\n front-end server to add these headers, to let you quietly bind\n this to a URL other than / and to an HTTP scheme that is\n different than what is used locally.\n\n http://flask.pocoo.org/snippets/35/\n\n In nginx:\n location /myprefix {\n proxy_pass http://127.0.0.1:8000;\n proxy_set_header Host $host;\n proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n proxy_set_header X-Scheme $scheme;\n proxy_set_header X-Script-Name /myprefix;\n }\n\n :param app: the WSGI application\n '''\n\n def __init__(self, app):\n self.app = app\n\n def __call__(self, environ, start_response):\n script_name = environ.get('HTTP_X_SCRIPT_NAME', '')\n if script_name:\n environ['SCRIPT_NAME'] = script_name\n path_info = environ['PATH_INFO']\n if path_info.startswith(script_name):\n environ['PATH_INFO'] = path_info[len(script_name):]\n\n scheme = environ.get('HTTP_X_SCHEME', '')\n if scheme:\n environ['wsgi.url_scheme'] = scheme\n return self.app(environ, start_response)\n\n\napplication = app\n# patch app to handle non root url-s behind proxy & wsgi\napp.wsgi_app = ReverseProxyPathFix(ProxyFix(application.wsgi_app))\n\nif __name__ == \"__main__\":\n run()\n", "path": "searx/webapp.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n'''\nsearx is free software: you can redistribute it and/or modify\nit under the terms of the GNU Affero General Public License as published by\nthe Free Software Foundation, either version 3 of the License, or\n(at your option) any later version.\n\nsearx is distributed in the hope that it will be useful,\nbut WITHOUT ANY WARRANTY; without even the implied warranty of\nMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\nGNU Affero General Public License for more details.\n\nYou should have received a copy of the GNU Affero General Public License\nalong with searx. If not, see < http://www.gnu.org/licenses/ >.\n\n(C) 2013- by Adam Tauber, <[email protected]>\n'''\n\nif __name__ == '__main__':\n from sys import path\n from os.path import realpath, dirname\n path.append(realpath(dirname(realpath(__file__)) + '/../'))\n\nimport hashlib\nimport hmac\nimport json\nimport os\nimport sys\n\nimport requests\n\nfrom searx import logger\nlogger = logger.getChild('webapp')\n\ntry:\n from pygments import highlight\n from pygments.lexers import get_lexer_by_name\n from pygments.formatters import HtmlFormatter\nexcept:\n logger.critical(\"cannot import dependency: pygments\")\n from sys import exit\n exit(1)\ntry:\n from cgi import escape\nexcept:\n from html import escape\nfrom six import next\nfrom datetime import datetime, timedelta\nfrom time import time\nfrom werkzeug.middleware.proxy_fix import ProxyFix\nfrom flask import (\n Flask, request, render_template, url_for, Response, make_response,\n redirect, send_from_directory\n)\nfrom babel.support import Translations\nimport flask_babel\nfrom flask_babel import Babel, gettext, format_date, format_decimal\nfrom flask.ctx import has_request_context\nfrom flask.json import jsonify\nfrom searx import brand, static_path\nfrom searx import settings, searx_dir, searx_debug\nfrom searx.exceptions import SearxParameterException\nfrom searx.engines import (\n categories, engines, engine_shortcuts, get_engines_stats, initialize_engines\n)\nfrom searx.utils import (\n UnicodeWriter, highlight_content, html_to_text, get_resources_directory,\n get_static_files, get_result_templates, get_themes, gen_useragent,\n dict_subset, prettify_url, match_language\n)\nfrom searx.version import VERSION_STRING\nfrom searx.languages import language_codes as languages\nfrom searx.search import SearchWithPlugins, get_search_query_from_webapp\nfrom searx.query import RawTextQuery\nfrom searx.autocomplete import searx_bang, backends as autocomplete_backends\nfrom searx.plugins import plugins\nfrom searx.plugins.oa_doi_rewrite import get_doi_resolver\nfrom searx.preferences import Preferences, ValidationException, LANGUAGE_CODES\nfrom searx.answerers import answerers\nfrom searx.url_utils import urlencode, urlparse, urljoin\nfrom searx.utils import new_hmac\n\n# check if the pyopenssl package is installed.\n# It is needed for SSL connection without trouble, see #298\ntry:\n import OpenSSL.SSL # NOQA\nexcept ImportError:\n logger.critical(\"The pyopenssl package has to be installed.\\n\"\n \"Some HTTPS connections will fail\")\n\ntry:\n from cStringIO import StringIO\nexcept:\n from io import StringIO\n\n\nif sys.version_info[0] == 3:\n unicode = str\n PY3 = True\nelse:\n logger.warning('\\033[1;31m Python2 is no longer supported\\033[0m')\n exit(1)\n\n# serve pages with HTTP/1.1\nfrom werkzeug.serving import WSGIRequestHandler\nWSGIRequestHandler.protocol_version = \"HTTP/{}\".format(settings['server'].get('http_protocol_version', '1.0'))\n\n# about static\nstatic_path = get_resources_directory(searx_dir, 'static', settings['ui']['static_path'])\nlogger.debug('static directory is %s', static_path)\nstatic_files = get_static_files(static_path)\n\n# about templates\ndefault_theme = settings['ui']['default_theme']\ntemplates_path = get_resources_directory(searx_dir, 'templates', settings['ui']['templates_path'])\nlogger.debug('templates directory is %s', templates_path)\nthemes = get_themes(templates_path)\nresult_templates = get_result_templates(templates_path)\nglobal_favicons = []\nfor indice, theme in enumerate(themes):\n global_favicons.append([])\n theme_img_path = os.path.join(static_path, 'themes', theme, 'img', 'icons')\n for (dirpath, dirnames, filenames) in os.walk(theme_img_path):\n global_favicons[indice].extend(filenames)\n\n# Flask app\napp = Flask(\n __name__,\n static_folder=static_path,\n template_folder=templates_path\n)\n\napp.jinja_env.trim_blocks = True\napp.jinja_env.lstrip_blocks = True\napp.jinja_env.add_extension('jinja2.ext.loopcontrols')\napp.secret_key = settings['server']['secret_key']\n\nif not searx_debug \\\n or os.environ.get(\"WERKZEUG_RUN_MAIN\") == \"true\" \\\n or os.environ.get('UWSGI_ORIGINAL_PROC_NAME') is not None:\n initialize_engines(settings['engines'])\n\nbabel = Babel(app)\n\nrtl_locales = ['ar', 'arc', 'bcc', 'bqi', 'ckb', 'dv', 'fa', 'fa_IR', 'glk', 'he',\n 'ku', 'mzn', 'pnb', 'ps', 'sd', 'ug', 'ur', 'yi']\n\n# used when translating category names\n_category_names = (gettext('files'),\n gettext('general'),\n gettext('music'),\n gettext('social media'),\n gettext('images'),\n gettext('videos'),\n gettext('it'),\n gettext('news'),\n gettext('map'),\n gettext('science'))\n\noutgoing_proxies = settings['outgoing'].get('proxies') or None\n\n_flask_babel_get_translations = flask_babel.get_translations\n\n\n# monkey patch for flask_babel.get_translations\ndef _get_translations():\n if has_request_context() and request.form.get('use-translation') == 'oc':\n babel_ext = flask_babel.current_app.extensions['babel']\n return Translations.load(next(babel_ext.translation_directories), 'oc')\n\n return _flask_babel_get_translations()\n\n\nflask_babel.get_translations = _get_translations\n\n\ndef _get_browser_language(request, lang_list):\n for lang in request.headers.get(\"Accept-Language\", \"en\").split(\",\"):\n if ';' in lang:\n lang = lang.split(';')[0]\n locale = match_language(lang, lang_list, fallback=None)\n if locale is not None:\n return locale\n return settings['search']['default_lang'] or 'en'\n\n\[email protected]\ndef get_locale():\n locale = _get_browser_language(request, settings['locales'].keys())\n\n logger.debug(\"default locale from browser info is `%s`\", locale)\n\n if request.preferences.get_value('locale') != '':\n locale = request.preferences.get_value('locale')\n\n if 'locale' in request.form\\\n and request.form['locale'] in settings['locales']:\n locale = request.form['locale']\n\n if locale == 'zh_TW':\n locale = 'zh_Hant_TW'\n\n if locale == 'oc':\n request.form['use-translation'] = 'oc'\n locale = 'fr_FR'\n\n logger.debug(\"selected locale is `%s`\", locale)\n\n return locale\n\n\n# code-highlighter\[email protected]_filter('code_highlighter')\ndef code_highlighter(codelines, language=None):\n if not language:\n language = 'text'\n\n try:\n # find lexer by programing language\n lexer = get_lexer_by_name(language, stripall=True)\n except:\n # if lexer is not found, using default one\n logger.debug('highlighter cannot find lexer for {0}'.format(language))\n lexer = get_lexer_by_name('text', stripall=True)\n\n html_code = ''\n tmp_code = ''\n last_line = None\n\n # parse lines\n for line, code in codelines:\n if not last_line:\n line_code_start = line\n\n # new codeblock is detected\n if last_line is not None and\\\n last_line + 1 != line:\n\n # highlight last codepart\n formatter = HtmlFormatter(linenos='inline',\n linenostart=line_code_start)\n html_code = html_code + highlight(tmp_code, lexer, formatter)\n\n # reset conditions for next codepart\n tmp_code = ''\n line_code_start = line\n\n # add codepart\n tmp_code += code + '\\n'\n\n # update line\n last_line = line\n\n # highlight last codepart\n formatter = HtmlFormatter(linenos='inline', linenostart=line_code_start)\n html_code = html_code + highlight(tmp_code, lexer, formatter)\n\n return html_code\n\n\n# Extract domain from url\[email protected]_filter('extract_domain')\ndef extract_domain(url):\n return urlparse(url)[1]\n\n\ndef get_base_url():\n if settings['server']['base_url']:\n hostname = settings['server']['base_url']\n else:\n scheme = 'http'\n if request.is_secure:\n scheme = 'https'\n hostname = url_for('index', _external=True, _scheme=scheme)\n return hostname\n\n\ndef get_current_theme_name(override=None):\n \"\"\"Returns theme name.\n\n Checks in this order:\n 1. override\n 2. cookies\n 3. settings\"\"\"\n\n if override and (override in themes or override == '__common__'):\n return override\n theme_name = request.args.get('theme', request.preferences.get_value('theme'))\n if theme_name not in themes:\n theme_name = default_theme\n return theme_name\n\n\ndef get_result_template(theme, template_name):\n themed_path = theme + '/result_templates/' + template_name\n if themed_path in result_templates:\n return themed_path\n return 'result_templates/' + template_name\n\n\ndef url_for_theme(endpoint, override_theme=None, **values):\n if endpoint == 'static' and values.get('filename'):\n theme_name = get_current_theme_name(override=override_theme)\n filename_with_theme = \"themes/{}/{}\".format(theme_name, values['filename'])\n if filename_with_theme in static_files:\n values['filename'] = filename_with_theme\n return url_for(endpoint, **values)\n\n\ndef proxify(url):\n if url.startswith('//'):\n url = 'https:' + url\n\n if not settings.get('result_proxy'):\n return url\n\n url_params = dict(mortyurl=url.encode('utf-8'))\n\n if settings['result_proxy'].get('key'):\n url_params['mortyhash'] = hmac.new(settings['result_proxy']['key'],\n url.encode('utf-8'),\n hashlib.sha256).hexdigest()\n\n return '{0}?{1}'.format(settings['result_proxy']['url'],\n urlencode(url_params))\n\n\ndef image_proxify(url):\n\n if url.startswith('//'):\n url = 'https:' + url\n\n if not request.preferences.get_value('image_proxy'):\n return url\n\n if url.startswith('data:image/'):\n # 50 is an arbitrary number to get only the beginning of the image.\n partial_base64 = url[len('data:image/'):50].split(';')\n if len(partial_base64) == 2 \\\n and partial_base64[0] in ['gif', 'png', 'jpeg', 'pjpeg', 'webp', 'tiff', 'bmp']\\\n and partial_base64[1].startswith('base64,'):\n return url\n else:\n return None\n\n if settings.get('result_proxy'):\n return proxify(url)\n\n h = new_hmac(settings['server']['secret_key'], url.encode('utf-8'))\n\n return '{0}?{1}'.format(url_for('image_proxy'),\n urlencode(dict(url=url.encode('utf-8'), h=h)))\n\n\ndef render(template_name, override_theme=None, **kwargs):\n disabled_engines = request.preferences.engines.get_disabled()\n\n enabled_categories = set(category for engine_name in engines\n for category in engines[engine_name].categories\n if (engine_name, category) not in disabled_engines)\n\n if 'categories' not in kwargs:\n kwargs['categories'] = [x for x in\n _get_ordered_categories()\n if x in enabled_categories]\n\n if 'all_categories' not in kwargs:\n kwargs['all_categories'] = _get_ordered_categories()\n\n if 'selected_categories' not in kwargs:\n kwargs['selected_categories'] = []\n for arg in request.args:\n if arg.startswith('category_'):\n c = arg.split('_', 1)[1]\n if c in categories:\n kwargs['selected_categories'].append(c)\n\n if not kwargs['selected_categories']:\n cookie_categories = request.preferences.get_value('categories')\n for ccateg in cookie_categories:\n kwargs['selected_categories'].append(ccateg)\n\n if not kwargs['selected_categories']:\n kwargs['selected_categories'] = ['general']\n\n if 'autocomplete' not in kwargs:\n kwargs['autocomplete'] = request.preferences.get_value('autocomplete')\n\n locale = request.preferences.get_value('locale')\n\n if locale in rtl_locales and 'rtl' not in kwargs:\n kwargs['rtl'] = True\n\n kwargs['searx_version'] = VERSION_STRING\n\n kwargs['method'] = request.preferences.get_value('method')\n\n kwargs['safesearch'] = str(request.preferences.get_value('safesearch'))\n\n kwargs['language_codes'] = languages\n if 'current_language' not in kwargs:\n kwargs['current_language'] = match_language(request.preferences.get_value('language'),\n LANGUAGE_CODES)\n\n # override url_for function in templates\n kwargs['url_for'] = url_for_theme\n\n kwargs['image_proxify'] = image_proxify\n\n kwargs['proxify'] = proxify if settings.get('result_proxy', {}).get('url') else None\n\n kwargs['get_result_template'] = get_result_template\n\n kwargs['theme'] = get_current_theme_name(override=override_theme)\n\n kwargs['template_name'] = template_name\n\n kwargs['cookies'] = request.cookies\n\n kwargs['errors'] = request.errors\n\n kwargs['instance_name'] = settings['general']['instance_name']\n\n kwargs['results_on_new_tab'] = request.preferences.get_value('results_on_new_tab')\n\n kwargs['unicode'] = unicode\n\n kwargs['preferences'] = request.preferences\n\n kwargs['brand'] = brand\n\n kwargs['scripts'] = set()\n kwargs['endpoint'] = 'results' if 'q' in kwargs else request.endpoint\n for plugin in request.user_plugins:\n for script in plugin.js_dependencies:\n kwargs['scripts'].add(script)\n\n kwargs['styles'] = set()\n for plugin in request.user_plugins:\n for css in plugin.css_dependencies:\n kwargs['styles'].add(css)\n\n return render_template(\n '{}/{}'.format(kwargs['theme'], template_name), **kwargs)\n\n\ndef _get_ordered_categories():\n ordered_categories = []\n if 'categories_order' not in settings['ui']:\n ordered_categories = ['general']\n ordered_categories.extend(x for x in sorted(categories.keys()) if x != 'general')\n return ordered_categories\n ordered_categories = settings['ui']['categories_order']\n ordered_categories.extend(x for x in sorted(categories.keys()) if x not in ordered_categories)\n return ordered_categories\n\n\[email protected]_request\ndef pre_request():\n request.start_time = time()\n request.timings = []\n request.errors = []\n\n preferences = Preferences(themes, list(categories.keys()), engines, plugins)\n user_agent = request.headers.get('User-Agent', '').lower()\n if 'webkit' in user_agent and 'android' in user_agent:\n preferences.key_value_settings['method'].value = 'GET'\n request.preferences = preferences\n try:\n preferences.parse_dict(request.cookies)\n except:\n request.errors.append(gettext('Invalid settings, please edit your preferences'))\n\n # merge GET, POST vars\n # request.form\n request.form = dict(request.form.items())\n for k, v in request.args.items():\n if k not in request.form:\n request.form[k] = v\n\n if request.form.get('preferences'):\n preferences.parse_encoded_data(request.form['preferences'])\n else:\n try:\n preferences.parse_dict(request.form)\n except Exception as e:\n logger.exception('invalid settings')\n request.errors.append(gettext('Invalid settings'))\n\n # init search language and locale\n if not preferences.get_value(\"language\"):\n preferences.parse_dict({\"language\": _get_browser_language(request, LANGUAGE_CODES)})\n if not preferences.get_value(\"locale\"):\n preferences.parse_dict({\"locale\": get_locale()})\n\n # request.user_plugins\n request.user_plugins = []\n allowed_plugins = preferences.plugins.get_enabled()\n disabled_plugins = preferences.plugins.get_disabled()\n for plugin in plugins:\n if ((plugin.default_on and plugin.id not in disabled_plugins)\n or plugin.id in allowed_plugins):\n request.user_plugins.append(plugin)\n\n\[email protected]_request\ndef post_request(response):\n total_time = time() - request.start_time\n timings_all = ['total;dur=' + str(round(total_time * 1000, 3))]\n if len(request.timings) > 0:\n timings = sorted(request.timings, key=lambda v: v['total'])\n timings_total = ['total_' + str(i) + '_' + v['engine'] +\n ';dur=' + str(round(v['total'] * 1000, 3)) for i, v in enumerate(timings)]\n timings_load = ['load_' + str(i) + '_' + v['engine'] +\n ';dur=' + str(round(v['load'] * 1000, 3)) for i, v in enumerate(timings)]\n timings_all = timings_all + timings_total + timings_load\n response.headers.add('Server-Timing', ', '.join(timings_all))\n return response\n\n\ndef index_error(output_format, error_message):\n if output_format == 'json':\n return Response(json.dumps({'error': error_message}),\n mimetype='application/json')\n elif output_format == 'csv':\n response = Response('', mimetype='application/csv')\n cont_disp = 'attachment;Filename=searx.csv'\n response.headers.add('Content-Disposition', cont_disp)\n return response\n elif output_format == 'rss':\n response_rss = render(\n 'opensearch_response_rss.xml',\n results=[],\n q=request.form['q'] if 'q' in request.form else '',\n number_of_results=0,\n base_url=get_base_url(),\n error_message=error_message,\n override_theme='__common__',\n )\n return Response(response_rss, mimetype='text/xml')\n else:\n # html\n request.errors.append(gettext('search error'))\n return render(\n 'index.html',\n )\n\n\[email protected]('/search', methods=['GET', 'POST'])\[email protected]('/', methods=['GET', 'POST'])\ndef index():\n \"\"\"Render index page.\n\n Supported outputs: html, json, csv, rss.\n \"\"\"\n\n # output_format\n output_format = request.form.get('format', 'html')\n if output_format not in ['html', 'csv', 'json', 'rss']:\n output_format = 'html'\n\n # check if there is query\n if request.form.get('q') is None:\n if output_format == 'html':\n return render(\n 'index.html',\n )\n else:\n return index_error(output_format, 'No query'), 400\n\n # search\n search_query = None\n raw_text_query = None\n result_container = None\n try:\n search_query, raw_text_query = get_search_query_from_webapp(request.preferences, request.form)\n # search = Search(search_query) # without plugins\n search = SearchWithPlugins(search_query, request.user_plugins, request)\n\n result_container = search.search()\n\n except Exception as e:\n # log exception\n logger.exception('search error')\n\n # is it an invalid input parameter or something else ?\n if (issubclass(e.__class__, SearxParameterException)):\n return index_error(output_format, e.message), 400\n else:\n return index_error(output_format, gettext('search error')), 500\n\n # results\n results = result_container.get_ordered_results()\n number_of_results = result_container.results_number()\n if number_of_results < result_container.results_length():\n number_of_results = 0\n\n # checkin for a external bang\n if result_container.redirect_url:\n return redirect(result_container.redirect_url)\n\n # UI\n advanced_search = request.form.get('advanced_search', None)\n\n # Server-Timing header\n request.timings = result_container.get_timings()\n\n # output\n for result in results:\n if output_format == 'html':\n if 'content' in result and result['content']:\n result['content'] = highlight_content(escape(result['content'][:1024]), search_query.query)\n if 'title' in result and result['title']:\n result['title'] = highlight_content(escape(result['title'] or u''), search_query.query)\n else:\n if result.get('content'):\n result['content'] = html_to_text(result['content']).strip()\n # removing html content and whitespace duplications\n result['title'] = ' '.join(html_to_text(result['title']).strip().split())\n\n if 'url' in result:\n result['pretty_url'] = prettify_url(result['url'])\n\n # TODO, check if timezone is calculated right\n if 'publishedDate' in result:\n try: # test if publishedDate >= 1900 (datetime module bug)\n result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S%z')\n except ValueError:\n result['publishedDate'] = None\n else:\n if result['publishedDate'].replace(tzinfo=None) >= datetime.now() - timedelta(days=1):\n timedifference = datetime.now() - result['publishedDate'].replace(tzinfo=None)\n minutes = int((timedifference.seconds / 60) % 60)\n hours = int(timedifference.seconds / 60 / 60)\n if hours == 0:\n result['publishedDate'] = gettext(u'{minutes} minute(s) ago').format(minutes=minutes)\n else:\n result['publishedDate'] = gettext(u'{hours} hour(s), {minutes} minute(s) ago').format(hours=hours, minutes=minutes) # noqa\n else:\n result['publishedDate'] = format_date(result['publishedDate'])\n\n if output_format == 'json':\n return Response(json.dumps({'query': search_query.query.decode('utf-8'),\n 'number_of_results': number_of_results,\n 'results': results,\n 'answers': list(result_container.answers),\n 'corrections': list(result_container.corrections),\n 'infoboxes': result_container.infoboxes,\n 'suggestions': list(result_container.suggestions),\n 'unresponsive_engines': __get_translated_errors(result_container.unresponsive_engines)}, # noqa\n default=lambda item: list(item) if isinstance(item, set) else item),\n mimetype='application/json')\n elif output_format == 'csv':\n csv = UnicodeWriter(StringIO())\n keys = ('title', 'url', 'content', 'host', 'engine', 'score', 'type')\n csv.writerow(keys)\n for row in results:\n row['host'] = row['parsed_url'].netloc\n row['type'] = 'result'\n csv.writerow([row.get(key, '') for key in keys])\n for a in result_container.answers:\n row = {'title': a, 'type': 'answer'}\n csv.writerow([row.get(key, '') for key in keys])\n for a in result_container.suggestions:\n row = {'title': a, 'type': 'suggestion'}\n csv.writerow([row.get(key, '') for key in keys])\n for a in result_container.corrections:\n row = {'title': a, 'type': 'correction'}\n csv.writerow([row.get(key, '') for key in keys])\n csv.stream.seek(0)\n response = Response(csv.stream.read(), mimetype='application/csv')\n cont_disp = 'attachment;Filename=searx_-_{0}.csv'.format(search_query.query.decode('utf-8'))\n response.headers.add('Content-Disposition', cont_disp)\n return response\n\n elif output_format == 'rss':\n response_rss = render(\n 'opensearch_response_rss.xml',\n results=results,\n answers=result_container.answers,\n corrections=result_container.corrections,\n suggestions=result_container.suggestions,\n q=request.form['q'],\n number_of_results=number_of_results,\n base_url=get_base_url(),\n override_theme='__common__',\n )\n return Response(response_rss, mimetype='text/xml')\n\n # HTML output format\n\n # suggestions: use RawTextQuery to get the suggestion URLs with the same bang\n suggestion_urls = list(map(lambda suggestion: {\n 'url': raw_text_query.changeSearchQuery(suggestion).getFullQuery(),\n 'title': suggestion\n },\n result_container.suggestions))\n\n correction_urls = list(map(lambda correction: {\n 'url': raw_text_query.changeSearchQuery(correction).getFullQuery(),\n 'title': correction\n },\n result_container.corrections))\n #\n return render(\n 'results.html',\n results=results,\n q=request.form['q'],\n selected_categories=search_query.categories,\n pageno=search_query.pageno,\n time_range=search_query.time_range,\n number_of_results=format_decimal(number_of_results),\n advanced_search=advanced_search,\n suggestions=suggestion_urls,\n answers=result_container.answers,\n corrections=correction_urls,\n infoboxes=result_container.infoboxes,\n paging=result_container.paging,\n unresponsive_engines=__get_translated_errors(result_container.unresponsive_engines),\n current_language=match_language(search_query.lang,\n LANGUAGE_CODES,\n fallback=request.preferences.get_value(\"language\")),\n base_url=get_base_url(),\n theme=get_current_theme_name(),\n favicons=global_favicons[themes.index(get_current_theme_name())],\n timeout_limit=request.form.get('timeout_limit', None)\n )\n\n\ndef __get_translated_errors(unresponsive_engines):\n translated_errors = []\n for unresponsive_engine in unresponsive_engines:\n error_msg = gettext(unresponsive_engine[1])\n if unresponsive_engine[2]:\n error_msg = \"{} {}\".format(error_msg, unresponsive_engine[2])\n translated_errors.append((unresponsive_engine[0], error_msg))\n return translated_errors\n\n\[email protected]('/about', methods=['GET'])\ndef about():\n \"\"\"Render about page\"\"\"\n return render(\n 'about.html',\n )\n\n\[email protected]('/autocompleter', methods=['GET', 'POST'])\ndef autocompleter():\n \"\"\"Return autocompleter results\"\"\"\n\n # set blocked engines\n disabled_engines = request.preferences.engines.get_disabled()\n\n # parse query\n if PY3:\n raw_text_query = RawTextQuery(request.form.get('q', b''), disabled_engines)\n else:\n raw_text_query = RawTextQuery(request.form.get('q', u'').encode('utf-8'), disabled_engines)\n raw_text_query.parse_query()\n\n # check if search query is set\n if not raw_text_query.getSearchQuery():\n return '', 400\n\n # run autocompleter\n completer = autocomplete_backends.get(request.preferences.get_value('autocomplete'))\n\n # parse searx specific autocompleter results like !bang\n raw_results = searx_bang(raw_text_query)\n\n # normal autocompletion results only appear if no inner results returned\n # and there is a query part besides the engine and language bangs\n if len(raw_results) == 0 and completer and (len(raw_text_query.query_parts) > 1 or\n (len(raw_text_query.languages) == 0 and\n not raw_text_query.specific)):\n # get language from cookie\n language = request.preferences.get_value('language')\n if not language or language == 'all':\n language = 'en'\n else:\n language = language.split('-')[0]\n # run autocompletion\n raw_results.extend(completer(raw_text_query.getSearchQuery(), language))\n\n # parse results (write :language and !engine back to result string)\n results = []\n for result in raw_results:\n raw_text_query.changeSearchQuery(result)\n\n # add parsed result\n results.append(raw_text_query.getFullQuery())\n\n # return autocompleter results\n if request.headers.get('X-Requested-With') == 'XMLHttpRequest':\n return Response(json.dumps(results),\n mimetype='application/json')\n\n return Response(json.dumps([raw_text_query.query, results]),\n mimetype='application/x-suggestions+json')\n\n\[email protected]('/preferences', methods=['GET', 'POST'])\ndef preferences():\n \"\"\"Render preferences page && save user preferences\"\"\"\n\n # save preferences\n if request.method == 'POST':\n resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))\n try:\n request.preferences.parse_form(request.form)\n except ValidationException:\n request.errors.append(gettext('Invalid settings, please edit your preferences'))\n return resp\n return request.preferences.save(resp)\n\n # render preferences\n image_proxy = request.preferences.get_value('image_proxy')\n lang = request.preferences.get_value('language')\n disabled_engines = request.preferences.engines.get_disabled()\n allowed_plugins = request.preferences.plugins.get_enabled()\n\n # stats for preferences page\n stats = {}\n\n engines_by_category = {}\n for c in categories:\n engines_by_category[c] = []\n for e in categories[c]:\n if not request.preferences.validate_token(e):\n continue\n\n stats[e.name] = {'time': None,\n 'warn_timeout': False,\n 'warn_time': False}\n if e.timeout > settings['outgoing']['request_timeout']:\n stats[e.name]['warn_timeout'] = True\n stats[e.name]['supports_selected_language'] = _is_selected_language_supported(e, request.preferences)\n\n engines_by_category[c].append(e)\n\n # get first element [0], the engine time,\n # and then the second element [1] : the time (the first one is the label)\n for engine_stat in get_engines_stats(request.preferences)[0][1]:\n stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3)\n if engine_stat.get('avg') > settings['outgoing']['request_timeout']:\n stats[engine_stat.get('name')]['warn_time'] = True\n # end of stats\n\n return render('preferences.html',\n locales=settings['locales'],\n current_locale=request.preferences.get_value(\"locale\"),\n image_proxy=image_proxy,\n engines_by_category=engines_by_category,\n stats=stats,\n answerers=[{'info': a.self_info(), 'keywords': a.keywords} for a in answerers],\n disabled_engines=disabled_engines,\n autocomplete_backends=autocomplete_backends,\n shortcuts={y: x for x, y in engine_shortcuts.items()},\n themes=themes,\n plugins=plugins,\n doi_resolvers=settings['doi_resolvers'],\n current_doi_resolver=get_doi_resolver(request.args, request.preferences.get_value('doi_resolver')),\n allowed_plugins=allowed_plugins,\n theme=get_current_theme_name(),\n preferences_url_params=request.preferences.get_as_url_params(),\n base_url=get_base_url(),\n preferences=True)\n\n\ndef _is_selected_language_supported(engine, preferences):\n language = preferences.get_value('language')\n return (language == 'all'\n or match_language(language,\n getattr(engine, 'supported_languages', []),\n getattr(engine, 'language_aliases', {}), None))\n\n\[email protected]('/image_proxy', methods=['GET'])\ndef image_proxy():\n url = request.args.get('url').encode('utf-8')\n\n if not url:\n return '', 400\n\n h = new_hmac(settings['server']['secret_key'], url)\n\n if h != request.args.get('h'):\n return '', 400\n\n headers = dict_subset(request.headers, {'If-Modified-Since', 'If-None-Match'})\n headers['User-Agent'] = gen_useragent()\n\n resp = requests.get(url,\n stream=True,\n timeout=settings['outgoing']['request_timeout'],\n headers=headers,\n proxies=outgoing_proxies)\n\n if resp.status_code == 304:\n return '', resp.status_code\n\n if resp.status_code != 200:\n logger.debug('image-proxy: wrong response code: {0}'.format(resp.status_code))\n if resp.status_code >= 400:\n return '', resp.status_code\n return '', 400\n\n if not resp.headers.get('content-type', '').startswith('image/'):\n logger.debug('image-proxy: wrong content-type: {0}'.format(resp.headers.get('content-type')))\n return '', 400\n\n img = b''\n chunk_counter = 0\n\n for chunk in resp.iter_content(1024 * 1024):\n chunk_counter += 1\n if chunk_counter > 5:\n return '', 502 # Bad gateway - file is too big (>5M)\n img += chunk\n\n headers = dict_subset(resp.headers, {'Content-Length', 'Length', 'Date', 'Last-Modified', 'Expires', 'Etag'})\n\n return Response(img, mimetype=resp.headers['content-type'], headers=headers)\n\n\[email protected]('/stats', methods=['GET'])\ndef stats():\n \"\"\"Render engine statistics page.\"\"\"\n stats = get_engines_stats(request.preferences)\n return render(\n 'stats.html',\n stats=stats,\n )\n\n\[email protected]('/robots.txt', methods=['GET'])\ndef robots():\n return Response(\"\"\"User-agent: *\nAllow: /\nAllow: /about\nDisallow: /stats\nDisallow: /preferences\nDisallow: /*?*q=*\n\"\"\", mimetype='text/plain')\n\n\[email protected]('/opensearch.xml', methods=['GET'])\ndef opensearch():\n method = 'post'\n\n if request.preferences.get_value('method') == 'GET':\n method = 'get'\n\n # chrome/chromium only supports HTTP GET....\n if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:\n method = 'get'\n\n ret = render('opensearch.xml',\n opensearch_method=method,\n host=get_base_url(),\n urljoin=urljoin,\n override_theme='__common__')\n\n resp = Response(response=ret,\n status=200,\n mimetype=\"application/opensearchdescription+xml\")\n return resp\n\n\[email protected]('/favicon.ico')\ndef favicon():\n return send_from_directory(os.path.join(app.root_path,\n static_path,\n 'themes',\n get_current_theme_name(),\n 'img'),\n 'favicon.png',\n mimetype='image/vnd.microsoft.icon')\n\n\[email protected]('/clear_cookies')\ndef clear_cookies():\n resp = make_response(redirect(urljoin(settings['server']['base_url'], url_for('index'))))\n for cookie_name in request.cookies:\n resp.delete_cookie(cookie_name)\n return resp\n\n\[email protected]('/config')\ndef config():\n \"\"\"Return configuration in JSON format.\"\"\"\n _engines = []\n for name, engine in engines.items():\n if not request.preferences.validate_token(engine):\n continue\n\n supported_languages = engine.supported_languages\n if isinstance(engine.supported_languages, dict):\n supported_languages = list(engine.supported_languages.keys())\n\n _engines.append({\n 'name': name,\n 'categories': engine.categories,\n 'shortcut': engine.shortcut,\n 'enabled': not engine.disabled,\n 'paging': engine.paging,\n 'language_support': engine.language_support,\n 'supported_languages': supported_languages,\n 'safesearch': engine.safesearch,\n 'time_range_support': engine.time_range_support,\n 'timeout': engine.timeout\n })\n\n _plugins = []\n for _ in plugins:\n _plugins.append({'name': _.name, 'enabled': _.default_on})\n\n return jsonify({\n 'categories': list(categories.keys()),\n 'engines': _engines,\n 'plugins': _plugins,\n 'instance_name': settings['general']['instance_name'],\n 'locales': settings['locales'],\n 'default_locale': settings['ui']['default_locale'],\n 'autocomplete': settings['search']['autocomplete'],\n 'safe_search': settings['search']['safe_search'],\n 'default_theme': settings['ui']['default_theme'],\n 'version': VERSION_STRING,\n 'brand': {\n 'GIT_URL': brand.GIT_URL,\n 'DOCS_URL': brand.DOCS_URL\n },\n 'doi_resolvers': [r for r in settings['doi_resolvers']],\n 'default_doi_resolver': settings['default_doi_resolver'],\n })\n\n\[email protected]('/translations.js')\ndef js_translations():\n return render(\n 'translations.js.tpl',\n override_theme='__common__',\n ), {'Content-Type': 'text/javascript; charset=UTF-8'}\n\n\[email protected](404)\ndef page_not_found(e):\n return render('404.html'), 404\n\n\ndef run():\n logger.debug('starting webserver on %s:%s', settings['server']['bind_address'], settings['server']['port'])\n app.run(\n debug=searx_debug,\n use_debugger=searx_debug,\n port=settings['server']['port'],\n host=settings['server']['bind_address'],\n threaded=True\n )\n\n\nclass ReverseProxyPathFix(object):\n '''Wrap the application in this middleware and configure the\n front-end server to add these headers, to let you quietly bind\n this to a URL other than / and to an HTTP scheme that is\n different than what is used locally.\n\n http://flask.pocoo.org/snippets/35/\n\n In nginx:\n location /myprefix {\n proxy_pass http://127.0.0.1:8000;\n proxy_set_header Host $host;\n proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;\n proxy_set_header X-Scheme $scheme;\n proxy_set_header X-Script-Name /myprefix;\n }\n\n :param app: the WSGI application\n '''\n\n def __init__(self, app):\n self.app = app\n\n def __call__(self, environ, start_response):\n script_name = environ.get('HTTP_X_SCRIPT_NAME', '')\n if script_name:\n environ['SCRIPT_NAME'] = script_name\n path_info = environ['PATH_INFO']\n if path_info.startswith(script_name):\n environ['PATH_INFO'] = path_info[len(script_name):]\n\n scheme = environ.get('HTTP_X_SCHEME', '')\n if scheme:\n environ['wsgi.url_scheme'] = scheme\n return self.app(environ, start_response)\n\n\napplication = app\n# patch app to handle non root url-s behind proxy & wsgi\napp.wsgi_app = ReverseProxyPathFix(ProxyFix(application.wsgi_app))\n\nif __name__ == \"__main__\":\n run()\n", "path": "searx/webapp.py"}]} |
gh_patches_debug_1601 | rasdani/github-patches | git_diff | sanic-org__sanic-2452 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Error Handler mismatch warning
The warning for error handler mismatch is triggering on v22.3 accidentally when setting `FALLBACK_ERROR_FORMAT`.
```python
app.config.FALLBACK_ERROR_FORMAT = "text"
@app.get("/")
async def handler(request: Request):
1 / 0
```
This can be resolved as follows:
```python
@classmethod
def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):
if error_handler._fallback is not _default:
if config._FALLBACK_ERROR_FORMAT == error_handler._fallback: # <<<<< This line needs this change
return error_handler.fallback
error_logger.warning(
"Conflicting error fallback values were found in the "
"error handler and in the app.config while handling an "
"exception. Using the value from app.config."
)
return config.FALLBACK_ERROR_FORMAT
```
https://github.com/sanic-org/sanic/blob/5d683c6ea4b615e80c51d80189436437b824cce6/sanic/handlers.py#L79
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `sanic/handlers.py`
Content:
```
1 from __future__ import annotations
2
3 from typing import Dict, List, Optional, Tuple, Type, Union
4
5 from sanic.config import Config
6 from sanic.errorpages import (
7 DEFAULT_FORMAT,
8 BaseRenderer,
9 TextRenderer,
10 exception_response,
11 )
12 from sanic.exceptions import (
13 ContentRangeError,
14 HeaderNotFound,
15 InvalidRangeType,
16 SanicException,
17 )
18 from sanic.helpers import Default, _default
19 from sanic.log import deprecation, error_logger
20 from sanic.models.handler_types import RouteHandler
21 from sanic.response import text
22
23
24 class ErrorHandler:
25 """
26 Provide :class:`sanic.app.Sanic` application with a mechanism to handle
27 and process any and all uncaught exceptions in a way the application
28 developer will set fit.
29
30 This error handling framework is built into the core that can be extended
31 by the developers to perform a wide range of tasks from recording the error
32 stats to reporting them to an external service that can be used for
33 realtime alerting system.
34
35 """
36
37 def __init__(
38 self,
39 fallback: Union[str, Default] = _default,
40 base: Type[BaseRenderer] = TextRenderer,
41 ):
42 self.cached_handlers: Dict[
43 Tuple[Type[BaseException], Optional[str]], Optional[RouteHandler]
44 ] = {}
45 self.debug = False
46 self._fallback = fallback
47 self.base = base
48
49 if fallback is not _default:
50 self._warn_fallback_deprecation()
51
52 @property
53 def fallback(self): # no cov
54 # This is for backwards compat and can be removed in v22.6
55 if self._fallback is _default:
56 return DEFAULT_FORMAT
57 return self._fallback
58
59 @fallback.setter
60 def fallback(self, value: str): # no cov
61 self._warn_fallback_deprecation()
62 if not isinstance(value, str):
63 raise SanicException(
64 f"Cannot set error handler fallback to: value={value}"
65 )
66 self._fallback = value
67
68 @staticmethod
69 def _warn_fallback_deprecation():
70 deprecation(
71 "Setting the ErrorHandler fallback value directly is "
72 "deprecated and no longer supported. This feature will "
73 "be removed in v22.6. Instead, use "
74 "app.config.FALLBACK_ERROR_FORMAT.",
75 22.6,
76 )
77
78 @classmethod
79 def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):
80 if error_handler._fallback is not _default:
81 if config._FALLBACK_ERROR_FORMAT is _default:
82 return error_handler.fallback
83
84 error_logger.warning(
85 "Conflicting error fallback values were found in the "
86 "error handler and in the app.config while handling an "
87 "exception. Using the value from app.config."
88 )
89 return config.FALLBACK_ERROR_FORMAT
90
91 @classmethod
92 def finalize(
93 cls,
94 error_handler: ErrorHandler,
95 config: Config,
96 fallback: Optional[str] = None,
97 ):
98 if fallback:
99 deprecation(
100 "Setting the ErrorHandler fallback value via finalize() "
101 "is deprecated and no longer supported. This feature will "
102 "be removed in v22.6. Instead, use "
103 "app.config.FALLBACK_ERROR_FORMAT.",
104 22.6,
105 )
106
107 if not fallback:
108 fallback = config.FALLBACK_ERROR_FORMAT
109
110 if fallback != DEFAULT_FORMAT:
111 if error_handler._fallback is not _default:
112 error_logger.warning(
113 f"Setting the fallback value to {fallback}. This changes "
114 "the current non-default value "
115 f"'{error_handler._fallback}'."
116 )
117 error_handler._fallback = fallback
118
119 if not isinstance(error_handler, cls):
120 error_logger.warning(
121 f"Error handler is non-conforming: {type(error_handler)}"
122 )
123
124 def _full_lookup(self, exception, route_name: Optional[str] = None):
125 return self.lookup(exception, route_name)
126
127 def add(self, exception, handler, route_names: Optional[List[str]] = None):
128 """
129 Add a new exception handler to an already existing handler object.
130
131 :param exception: Type of exception that need to be handled
132 :param handler: Reference to the method that will handle the exception
133
134 :type exception: :class:`sanic.exceptions.SanicException` or
135 :class:`Exception`
136 :type handler: ``function``
137
138 :return: None
139 """
140 if route_names:
141 for route in route_names:
142 self.cached_handlers[(exception, route)] = handler
143 else:
144 self.cached_handlers[(exception, None)] = handler
145
146 def lookup(self, exception, route_name: Optional[str] = None):
147 """
148 Lookup the existing instance of :class:`ErrorHandler` and fetch the
149 registered handler for a specific type of exception.
150
151 This method leverages a dict lookup to speedup the retrieval process.
152
153 :param exception: Type of exception
154
155 :type exception: :class:`sanic.exceptions.SanicException` or
156 :class:`Exception`
157
158 :return: Registered function if found ``None`` otherwise
159 """
160 exception_class = type(exception)
161
162 for name in (route_name, None):
163 exception_key = (exception_class, name)
164 handler = self.cached_handlers.get(exception_key)
165 if handler:
166 return handler
167
168 for name in (route_name, None):
169 for ancestor in type.mro(exception_class):
170 exception_key = (ancestor, name)
171 if exception_key in self.cached_handlers:
172 handler = self.cached_handlers[exception_key]
173 self.cached_handlers[
174 (exception_class, route_name)
175 ] = handler
176 return handler
177
178 if ancestor is BaseException:
179 break
180 self.cached_handlers[(exception_class, route_name)] = None
181 handler = None
182 return handler
183
184 _lookup = _full_lookup
185
186 def response(self, request, exception):
187 """Fetches and executes an exception handler and returns a response
188 object
189
190 :param request: Instance of :class:`sanic.request.Request`
191 :param exception: Exception to handle
192
193 :type request: :class:`sanic.request.Request`
194 :type exception: :class:`sanic.exceptions.SanicException` or
195 :class:`Exception`
196
197 :return: Wrap the return value obtained from :func:`default`
198 or registered handler for that type of exception.
199 """
200 route_name = request.name if request else None
201 handler = self._lookup(exception, route_name)
202 response = None
203 try:
204 if handler:
205 response = handler(request, exception)
206 if response is None:
207 response = self.default(request, exception)
208 except Exception:
209 try:
210 url = repr(request.url)
211 except AttributeError: # no cov
212 url = "unknown"
213 response_message = (
214 "Exception raised in exception handler " '"%s" for uri: %s'
215 )
216 error_logger.exception(response_message, handler.__name__, url)
217
218 if self.debug:
219 return text(response_message % (handler.__name__, url), 500)
220 else:
221 return text("An error occurred while handling an error", 500)
222 return response
223
224 def default(self, request, exception):
225 """
226 Provide a default behavior for the objects of :class:`ErrorHandler`.
227 If a developer chooses to extent the :class:`ErrorHandler` they can
228 provide a custom implementation for this method to behave in a way
229 they see fit.
230
231 :param request: Incoming request
232 :param exception: Exception object
233
234 :type request: :class:`sanic.request.Request`
235 :type exception: :class:`sanic.exceptions.SanicException` or
236 :class:`Exception`
237 :return:
238 """
239 self.log(request, exception)
240 fallback = ErrorHandler._get_fallback_value(self, request.app.config)
241 return exception_response(
242 request,
243 exception,
244 debug=self.debug,
245 base=self.base,
246 fallback=fallback,
247 )
248
249 @staticmethod
250 def log(request, exception):
251 quiet = getattr(exception, "quiet", False)
252 noisy = getattr(request.app.config, "NOISY_EXCEPTIONS", False)
253 if quiet is False or noisy is True:
254 try:
255 url = repr(request.url)
256 except AttributeError: # no cov
257 url = "unknown"
258
259 error_logger.exception(
260 "Exception occurred while handling uri: %s", url
261 )
262
263
264 class ContentRangeHandler:
265 """
266 A mechanism to parse and process the incoming request headers to
267 extract the content range information.
268
269 :param request: Incoming api request
270 :param stats: Stats related to the content
271
272 :type request: :class:`sanic.request.Request`
273 :type stats: :class:`posix.stat_result`
274
275 :ivar start: Content Range start
276 :ivar end: Content Range end
277 :ivar size: Length of the content
278 :ivar total: Total size identified by the :class:`posix.stat_result`
279 instance
280 :ivar ContentRangeHandler.headers: Content range header ``dict``
281 """
282
283 __slots__ = ("start", "end", "size", "total", "headers")
284
285 def __init__(self, request, stats):
286 self.total = stats.st_size
287 _range = request.headers.getone("range", None)
288 if _range is None:
289 raise HeaderNotFound("Range Header Not Found")
290 unit, _, value = tuple(map(str.strip, _range.partition("=")))
291 if unit != "bytes":
292 raise InvalidRangeType(
293 "%s is not a valid Range Type" % (unit,), self
294 )
295 start_b, _, end_b = tuple(map(str.strip, value.partition("-")))
296 try:
297 self.start = int(start_b) if start_b else None
298 except ValueError:
299 raise ContentRangeError(
300 "'%s' is invalid for Content Range" % (start_b,), self
301 )
302 try:
303 self.end = int(end_b) if end_b else None
304 except ValueError:
305 raise ContentRangeError(
306 "'%s' is invalid for Content Range" % (end_b,), self
307 )
308 if self.end is None:
309 if self.start is None:
310 raise ContentRangeError(
311 "Invalid for Content Range parameters", self
312 )
313 else:
314 # this case represents `Content-Range: bytes 5-`
315 self.end = self.total - 1
316 else:
317 if self.start is None:
318 # this case represents `Content-Range: bytes -5`
319 self.start = self.total - self.end
320 self.end = self.total - 1
321 if self.start >= self.end:
322 raise ContentRangeError(
323 "Invalid for Content Range parameters", self
324 )
325 self.size = self.end - self.start + 1
326 self.headers = {
327 "Content-Range": "bytes %s-%s/%s"
328 % (self.start, self.end, self.total)
329 }
330
331 def __bool__(self):
332 return self.size > 0
333
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/sanic/handlers.py b/sanic/handlers.py
--- a/sanic/handlers.py
+++ b/sanic/handlers.py
@@ -78,7 +78,7 @@
@classmethod
def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):
if error_handler._fallback is not _default:
- if config._FALLBACK_ERROR_FORMAT is _default:
+ if config._FALLBACK_ERROR_FORMAT == error_handler._fallback:
return error_handler.fallback
error_logger.warning(
| {"golden_diff": "diff --git a/sanic/handlers.py b/sanic/handlers.py\n--- a/sanic/handlers.py\n+++ b/sanic/handlers.py\n@@ -78,7 +78,7 @@\n @classmethod\n def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):\n if error_handler._fallback is not _default:\n- if config._FALLBACK_ERROR_FORMAT is _default:\n+ if config._FALLBACK_ERROR_FORMAT == error_handler._fallback:\n return error_handler.fallback\n \n error_logger.warning(\n", "issue": "Error Handler mismatch warning\nThe warning for error handler mismatch is triggering on v22.3 accidentally when setting `FALLBACK_ERROR_FORMAT`.\r\n\r\n```python\r\napp.config.FALLBACK_ERROR_FORMAT = \"text\"\r\n\r\n\r\[email protected](\"/\")\r\nasync def handler(request: Request):\r\n 1 / 0\r\n```\r\n\r\nThis can be resolved as follows:\r\n\r\n```python\r\n @classmethod\r\n def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):\r\n if error_handler._fallback is not _default:\r\n if config._FALLBACK_ERROR_FORMAT == error_handler._fallback: # <<<<< This line needs this change\r\n return error_handler.fallback\r\n\r\n error_logger.warning(\r\n \"Conflicting error fallback values were found in the \"\r\n \"error handler and in the app.config while handling an \"\r\n \"exception. Using the value from app.config.\"\r\n )\r\n return config.FALLBACK_ERROR_FORMAT\r\n```\r\n\r\nhttps://github.com/sanic-org/sanic/blob/5d683c6ea4b615e80c51d80189436437b824cce6/sanic/handlers.py#L79\n", "before_files": [{"content": "from __future__ import annotations\n\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nfrom sanic.config import Config\nfrom sanic.errorpages import (\n DEFAULT_FORMAT,\n BaseRenderer,\n TextRenderer,\n exception_response,\n)\nfrom sanic.exceptions import (\n ContentRangeError,\n HeaderNotFound,\n InvalidRangeType,\n SanicException,\n)\nfrom sanic.helpers import Default, _default\nfrom sanic.log import deprecation, error_logger\nfrom sanic.models.handler_types import RouteHandler\nfrom sanic.response import text\n\n\nclass ErrorHandler:\n \"\"\"\n Provide :class:`sanic.app.Sanic` application with a mechanism to handle\n and process any and all uncaught exceptions in a way the application\n developer will set fit.\n\n This error handling framework is built into the core that can be extended\n by the developers to perform a wide range of tasks from recording the error\n stats to reporting them to an external service that can be used for\n realtime alerting system.\n\n \"\"\"\n\n def __init__(\n self,\n fallback: Union[str, Default] = _default,\n base: Type[BaseRenderer] = TextRenderer,\n ):\n self.cached_handlers: Dict[\n Tuple[Type[BaseException], Optional[str]], Optional[RouteHandler]\n ] = {}\n self.debug = False\n self._fallback = fallback\n self.base = base\n\n if fallback is not _default:\n self._warn_fallback_deprecation()\n\n @property\n def fallback(self): # no cov\n # This is for backwards compat and can be removed in v22.6\n if self._fallback is _default:\n return DEFAULT_FORMAT\n return self._fallback\n\n @fallback.setter\n def fallback(self, value: str): # no cov\n self._warn_fallback_deprecation()\n if not isinstance(value, str):\n raise SanicException(\n f\"Cannot set error handler fallback to: value={value}\"\n )\n self._fallback = value\n\n @staticmethod\n def _warn_fallback_deprecation():\n deprecation(\n \"Setting the ErrorHandler fallback value directly is \"\n \"deprecated and no longer supported. This feature will \"\n \"be removed in v22.6. Instead, use \"\n \"app.config.FALLBACK_ERROR_FORMAT.\",\n 22.6,\n )\n\n @classmethod\n def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):\n if error_handler._fallback is not _default:\n if config._FALLBACK_ERROR_FORMAT is _default:\n return error_handler.fallback\n\n error_logger.warning(\n \"Conflicting error fallback values were found in the \"\n \"error handler and in the app.config while handling an \"\n \"exception. Using the value from app.config.\"\n )\n return config.FALLBACK_ERROR_FORMAT\n\n @classmethod\n def finalize(\n cls,\n error_handler: ErrorHandler,\n config: Config,\n fallback: Optional[str] = None,\n ):\n if fallback:\n deprecation(\n \"Setting the ErrorHandler fallback value via finalize() \"\n \"is deprecated and no longer supported. This feature will \"\n \"be removed in v22.6. Instead, use \"\n \"app.config.FALLBACK_ERROR_FORMAT.\",\n 22.6,\n )\n\n if not fallback:\n fallback = config.FALLBACK_ERROR_FORMAT\n\n if fallback != DEFAULT_FORMAT:\n if error_handler._fallback is not _default:\n error_logger.warning(\n f\"Setting the fallback value to {fallback}. This changes \"\n \"the current non-default value \"\n f\"'{error_handler._fallback}'.\"\n )\n error_handler._fallback = fallback\n\n if not isinstance(error_handler, cls):\n error_logger.warning(\n f\"Error handler is non-conforming: {type(error_handler)}\"\n )\n\n def _full_lookup(self, exception, route_name: Optional[str] = None):\n return self.lookup(exception, route_name)\n\n def add(self, exception, handler, route_names: Optional[List[str]] = None):\n \"\"\"\n Add a new exception handler to an already existing handler object.\n\n :param exception: Type of exception that need to be handled\n :param handler: Reference to the method that will handle the exception\n\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n :type handler: ``function``\n\n :return: None\n \"\"\"\n if route_names:\n for route in route_names:\n self.cached_handlers[(exception, route)] = handler\n else:\n self.cached_handlers[(exception, None)] = handler\n\n def lookup(self, exception, route_name: Optional[str] = None):\n \"\"\"\n Lookup the existing instance of :class:`ErrorHandler` and fetch the\n registered handler for a specific type of exception.\n\n This method leverages a dict lookup to speedup the retrieval process.\n\n :param exception: Type of exception\n\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n\n :return: Registered function if found ``None`` otherwise\n \"\"\"\n exception_class = type(exception)\n\n for name in (route_name, None):\n exception_key = (exception_class, name)\n handler = self.cached_handlers.get(exception_key)\n if handler:\n return handler\n\n for name in (route_name, None):\n for ancestor in type.mro(exception_class):\n exception_key = (ancestor, name)\n if exception_key in self.cached_handlers:\n handler = self.cached_handlers[exception_key]\n self.cached_handlers[\n (exception_class, route_name)\n ] = handler\n return handler\n\n if ancestor is BaseException:\n break\n self.cached_handlers[(exception_class, route_name)] = None\n handler = None\n return handler\n\n _lookup = _full_lookup\n\n def response(self, request, exception):\n \"\"\"Fetches and executes an exception handler and returns a response\n object\n\n :param request: Instance of :class:`sanic.request.Request`\n :param exception: Exception to handle\n\n :type request: :class:`sanic.request.Request`\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n\n :return: Wrap the return value obtained from :func:`default`\n or registered handler for that type of exception.\n \"\"\"\n route_name = request.name if request else None\n handler = self._lookup(exception, route_name)\n response = None\n try:\n if handler:\n response = handler(request, exception)\n if response is None:\n response = self.default(request, exception)\n except Exception:\n try:\n url = repr(request.url)\n except AttributeError: # no cov\n url = \"unknown\"\n response_message = (\n \"Exception raised in exception handler \" '\"%s\" for uri: %s'\n )\n error_logger.exception(response_message, handler.__name__, url)\n\n if self.debug:\n return text(response_message % (handler.__name__, url), 500)\n else:\n return text(\"An error occurred while handling an error\", 500)\n return response\n\n def default(self, request, exception):\n \"\"\"\n Provide a default behavior for the objects of :class:`ErrorHandler`.\n If a developer chooses to extent the :class:`ErrorHandler` they can\n provide a custom implementation for this method to behave in a way\n they see fit.\n\n :param request: Incoming request\n :param exception: Exception object\n\n :type request: :class:`sanic.request.Request`\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n :return:\n \"\"\"\n self.log(request, exception)\n fallback = ErrorHandler._get_fallback_value(self, request.app.config)\n return exception_response(\n request,\n exception,\n debug=self.debug,\n base=self.base,\n fallback=fallback,\n )\n\n @staticmethod\n def log(request, exception):\n quiet = getattr(exception, \"quiet\", False)\n noisy = getattr(request.app.config, \"NOISY_EXCEPTIONS\", False)\n if quiet is False or noisy is True:\n try:\n url = repr(request.url)\n except AttributeError: # no cov\n url = \"unknown\"\n\n error_logger.exception(\n \"Exception occurred while handling uri: %s\", url\n )\n\n\nclass ContentRangeHandler:\n \"\"\"\n A mechanism to parse and process the incoming request headers to\n extract the content range information.\n\n :param request: Incoming api request\n :param stats: Stats related to the content\n\n :type request: :class:`sanic.request.Request`\n :type stats: :class:`posix.stat_result`\n\n :ivar start: Content Range start\n :ivar end: Content Range end\n :ivar size: Length of the content\n :ivar total: Total size identified by the :class:`posix.stat_result`\n instance\n :ivar ContentRangeHandler.headers: Content range header ``dict``\n \"\"\"\n\n __slots__ = (\"start\", \"end\", \"size\", \"total\", \"headers\")\n\n def __init__(self, request, stats):\n self.total = stats.st_size\n _range = request.headers.getone(\"range\", None)\n if _range is None:\n raise HeaderNotFound(\"Range Header Not Found\")\n unit, _, value = tuple(map(str.strip, _range.partition(\"=\")))\n if unit != \"bytes\":\n raise InvalidRangeType(\n \"%s is not a valid Range Type\" % (unit,), self\n )\n start_b, _, end_b = tuple(map(str.strip, value.partition(\"-\")))\n try:\n self.start = int(start_b) if start_b else None\n except ValueError:\n raise ContentRangeError(\n \"'%s' is invalid for Content Range\" % (start_b,), self\n )\n try:\n self.end = int(end_b) if end_b else None\n except ValueError:\n raise ContentRangeError(\n \"'%s' is invalid for Content Range\" % (end_b,), self\n )\n if self.end is None:\n if self.start is None:\n raise ContentRangeError(\n \"Invalid for Content Range parameters\", self\n )\n else:\n # this case represents `Content-Range: bytes 5-`\n self.end = self.total - 1\n else:\n if self.start is None:\n # this case represents `Content-Range: bytes -5`\n self.start = self.total - self.end\n self.end = self.total - 1\n if self.start >= self.end:\n raise ContentRangeError(\n \"Invalid for Content Range parameters\", self\n )\n self.size = self.end - self.start + 1\n self.headers = {\n \"Content-Range\": \"bytes %s-%s/%s\"\n % (self.start, self.end, self.total)\n }\n\n def __bool__(self):\n return self.size > 0\n", "path": "sanic/handlers.py"}], "after_files": [{"content": "from __future__ import annotations\n\nfrom typing import Dict, List, Optional, Tuple, Type, Union\n\nfrom sanic.config import Config\nfrom sanic.errorpages import (\n DEFAULT_FORMAT,\n BaseRenderer,\n TextRenderer,\n exception_response,\n)\nfrom sanic.exceptions import (\n ContentRangeError,\n HeaderNotFound,\n InvalidRangeType,\n SanicException,\n)\nfrom sanic.helpers import Default, _default\nfrom sanic.log import deprecation, error_logger\nfrom sanic.models.handler_types import RouteHandler\nfrom sanic.response import text\n\n\nclass ErrorHandler:\n \"\"\"\n Provide :class:`sanic.app.Sanic` application with a mechanism to handle\n and process any and all uncaught exceptions in a way the application\n developer will set fit.\n\n This error handling framework is built into the core that can be extended\n by the developers to perform a wide range of tasks from recording the error\n stats to reporting them to an external service that can be used for\n realtime alerting system.\n\n \"\"\"\n\n def __init__(\n self,\n fallback: Union[str, Default] = _default,\n base: Type[BaseRenderer] = TextRenderer,\n ):\n self.cached_handlers: Dict[\n Tuple[Type[BaseException], Optional[str]], Optional[RouteHandler]\n ] = {}\n self.debug = False\n self._fallback = fallback\n self.base = base\n\n if fallback is not _default:\n self._warn_fallback_deprecation()\n\n @property\n def fallback(self): # no cov\n # This is for backwards compat and can be removed in v22.6\n if self._fallback is _default:\n return DEFAULT_FORMAT\n return self._fallback\n\n @fallback.setter\n def fallback(self, value: str): # no cov\n self._warn_fallback_deprecation()\n if not isinstance(value, str):\n raise SanicException(\n f\"Cannot set error handler fallback to: value={value}\"\n )\n self._fallback = value\n\n @staticmethod\n def _warn_fallback_deprecation():\n deprecation(\n \"Setting the ErrorHandler fallback value directly is \"\n \"deprecated and no longer supported. This feature will \"\n \"be removed in v22.6. Instead, use \"\n \"app.config.FALLBACK_ERROR_FORMAT.\",\n 22.6,\n )\n\n @classmethod\n def _get_fallback_value(cls, error_handler: ErrorHandler, config: Config):\n if error_handler._fallback is not _default:\n if config._FALLBACK_ERROR_FORMAT == error_handler._fallback:\n return error_handler.fallback\n\n error_logger.warning(\n \"Conflicting error fallback values were found in the \"\n \"error handler and in the app.config while handling an \"\n \"exception. Using the value from app.config.\"\n )\n return config.FALLBACK_ERROR_FORMAT\n\n @classmethod\n def finalize(\n cls,\n error_handler: ErrorHandler,\n config: Config,\n fallback: Optional[str] = None,\n ):\n if fallback:\n deprecation(\n \"Setting the ErrorHandler fallback value via finalize() \"\n \"is deprecated and no longer supported. This feature will \"\n \"be removed in v22.6. Instead, use \"\n \"app.config.FALLBACK_ERROR_FORMAT.\",\n 22.6,\n )\n\n if not fallback:\n fallback = config.FALLBACK_ERROR_FORMAT\n\n if fallback != DEFAULT_FORMAT:\n if error_handler._fallback is not _default:\n error_logger.warning(\n f\"Setting the fallback value to {fallback}. This changes \"\n \"the current non-default value \"\n f\"'{error_handler._fallback}'.\"\n )\n error_handler._fallback = fallback\n\n if not isinstance(error_handler, cls):\n error_logger.warning(\n f\"Error handler is non-conforming: {type(error_handler)}\"\n )\n\n def _full_lookup(self, exception, route_name: Optional[str] = None):\n return self.lookup(exception, route_name)\n\n def add(self, exception, handler, route_names: Optional[List[str]] = None):\n \"\"\"\n Add a new exception handler to an already existing handler object.\n\n :param exception: Type of exception that need to be handled\n :param handler: Reference to the method that will handle the exception\n\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n :type handler: ``function``\n\n :return: None\n \"\"\"\n if route_names:\n for route in route_names:\n self.cached_handlers[(exception, route)] = handler\n else:\n self.cached_handlers[(exception, None)] = handler\n\n def lookup(self, exception, route_name: Optional[str] = None):\n \"\"\"\n Lookup the existing instance of :class:`ErrorHandler` and fetch the\n registered handler for a specific type of exception.\n\n This method leverages a dict lookup to speedup the retrieval process.\n\n :param exception: Type of exception\n\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n\n :return: Registered function if found ``None`` otherwise\n \"\"\"\n exception_class = type(exception)\n\n for name in (route_name, None):\n exception_key = (exception_class, name)\n handler = self.cached_handlers.get(exception_key)\n if handler:\n return handler\n\n for name in (route_name, None):\n for ancestor in type.mro(exception_class):\n exception_key = (ancestor, name)\n if exception_key in self.cached_handlers:\n handler = self.cached_handlers[exception_key]\n self.cached_handlers[\n (exception_class, route_name)\n ] = handler\n return handler\n\n if ancestor is BaseException:\n break\n self.cached_handlers[(exception_class, route_name)] = None\n handler = None\n return handler\n\n _lookup = _full_lookup\n\n def response(self, request, exception):\n \"\"\"Fetches and executes an exception handler and returns a response\n object\n\n :param request: Instance of :class:`sanic.request.Request`\n :param exception: Exception to handle\n\n :type request: :class:`sanic.request.Request`\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n\n :return: Wrap the return value obtained from :func:`default`\n or registered handler for that type of exception.\n \"\"\"\n route_name = request.name if request else None\n handler = self._lookup(exception, route_name)\n response = None\n try:\n if handler:\n response = handler(request, exception)\n if response is None:\n response = self.default(request, exception)\n except Exception:\n try:\n url = repr(request.url)\n except AttributeError: # no cov\n url = \"unknown\"\n response_message = (\n \"Exception raised in exception handler \" '\"%s\" for uri: %s'\n )\n error_logger.exception(response_message, handler.__name__, url)\n\n if self.debug:\n return text(response_message % (handler.__name__, url), 500)\n else:\n return text(\"An error occurred while handling an error\", 500)\n return response\n\n def default(self, request, exception):\n \"\"\"\n Provide a default behavior for the objects of :class:`ErrorHandler`.\n If a developer chooses to extent the :class:`ErrorHandler` they can\n provide a custom implementation for this method to behave in a way\n they see fit.\n\n :param request: Incoming request\n :param exception: Exception object\n\n :type request: :class:`sanic.request.Request`\n :type exception: :class:`sanic.exceptions.SanicException` or\n :class:`Exception`\n :return:\n \"\"\"\n self.log(request, exception)\n fallback = ErrorHandler._get_fallback_value(self, request.app.config)\n return exception_response(\n request,\n exception,\n debug=self.debug,\n base=self.base,\n fallback=fallback,\n )\n\n @staticmethod\n def log(request, exception):\n quiet = getattr(exception, \"quiet\", False)\n noisy = getattr(request.app.config, \"NOISY_EXCEPTIONS\", False)\n if quiet is False or noisy is True:\n try:\n url = repr(request.url)\n except AttributeError: # no cov\n url = \"unknown\"\n\n error_logger.exception(\n \"Exception occurred while handling uri: %s\", url\n )\n\n\nclass ContentRangeHandler:\n \"\"\"\n A mechanism to parse and process the incoming request headers to\n extract the content range information.\n\n :param request: Incoming api request\n :param stats: Stats related to the content\n\n :type request: :class:`sanic.request.Request`\n :type stats: :class:`posix.stat_result`\n\n :ivar start: Content Range start\n :ivar end: Content Range end\n :ivar size: Length of the content\n :ivar total: Total size identified by the :class:`posix.stat_result`\n instance\n :ivar ContentRangeHandler.headers: Content range header ``dict``\n \"\"\"\n\n __slots__ = (\"start\", \"end\", \"size\", \"total\", \"headers\")\n\n def __init__(self, request, stats):\n self.total = stats.st_size\n _range = request.headers.getone(\"range\", None)\n if _range is None:\n raise HeaderNotFound(\"Range Header Not Found\")\n unit, _, value = tuple(map(str.strip, _range.partition(\"=\")))\n if unit != \"bytes\":\n raise InvalidRangeType(\n \"%s is not a valid Range Type\" % (unit,), self\n )\n start_b, _, end_b = tuple(map(str.strip, value.partition(\"-\")))\n try:\n self.start = int(start_b) if start_b else None\n except ValueError:\n raise ContentRangeError(\n \"'%s' is invalid for Content Range\" % (start_b,), self\n )\n try:\n self.end = int(end_b) if end_b else None\n except ValueError:\n raise ContentRangeError(\n \"'%s' is invalid for Content Range\" % (end_b,), self\n )\n if self.end is None:\n if self.start is None:\n raise ContentRangeError(\n \"Invalid for Content Range parameters\", self\n )\n else:\n # this case represents `Content-Range: bytes 5-`\n self.end = self.total - 1\n else:\n if self.start is None:\n # this case represents `Content-Range: bytes -5`\n self.start = self.total - self.end\n self.end = self.total - 1\n if self.start >= self.end:\n raise ContentRangeError(\n \"Invalid for Content Range parameters\", self\n )\n self.size = self.end - self.start + 1\n self.headers = {\n \"Content-Range\": \"bytes %s-%s/%s\"\n % (self.start, self.end, self.total)\n }\n\n def __bool__(self):\n return self.size > 0\n", "path": "sanic/handlers.py"}]} |
gh_patches_debug_1602 | rasdani/github-patches | git_diff | microsoft__knossos-ksc-1027 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Bug: Segmentation fault in sqrl_pytorch-PyTorch CUDA
Just saw this while working on something else. I haven't done a lot to debug it, but note that it's in copydown, on a fairly innocuous operation (aten::sum(Tensor 2) -> Float), so might be something to do with KS_ALLOCATOR not being defined?
Or could just be out of memory not caught?

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/dl-capsule/sqrl.py`
Content:
```
1 import torch
2 import ksc.torch_frontend as knossos
3
4 # run-bench: Knossos source, and "nice" PyTorch implementation
5 # BEGINDOC
6 @knossos.register
7 def sqrl(x: torch.Tensor):
8 """
9 sqrl: Squared Leaky Relu
10 Like a capsule from /Stuck in a Rut/
11 Typically x is a 4x4 tensor, possibly
12 packed in a 4n x 4m array
13 """
14 y = torch.sum(x)
15 if y < 0.0:
16 t = -0.125 * x
17 else:
18 t = 1 / 2 * x ** 2
19 return torch.mean(torch.sin(t) * t)
20
21
22 # ENDDOC
23
24 # run-bench: PyTorch "fast" implementation
25 def sqrl_pytorch(x: torch.Tensor):
26 return sqrl(x)
27
28
29 # run-bench: PyTorch "nice" implementation
30 def sqrl_pytorch_nice(x: torch.Tensor):
31 return sqrl(x)
32
33
34 # run-bench: Define a range of values at which to call the methods
35 def sqrl_bench_configs():
36 yield torch.randn((4, 4))
37 yield torch.randn((16, 16))
38
39
40 #################################
41 #
42 # vsqrl - vectorized sqrl
43 #
44
45 vsqrl = knossos.vmap(sqrl)
46
47
48 # run-bench: Define a range of values at which to call the methods
49 def vsqrl_bench_configs():
50 yield torch.randn((10, 4, 4))
51 yield torch.randn((1000, 4, 4))
52 yield torch.randn((1000, 16, 16))
53
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/dl-capsule/sqrl.py b/examples/dl-capsule/sqrl.py
--- a/examples/dl-capsule/sqrl.py
+++ b/examples/dl-capsule/sqrl.py
@@ -23,12 +23,12 @@
# run-bench: PyTorch "fast" implementation
def sqrl_pytorch(x: torch.Tensor):
- return sqrl(x)
+ return sqrl.raw_f(x)
# run-bench: PyTorch "nice" implementation
def sqrl_pytorch_nice(x: torch.Tensor):
- return sqrl(x)
+ return sqrl.raw_f(x)
# run-bench: Define a range of values at which to call the methods
| {"golden_diff": "diff --git a/examples/dl-capsule/sqrl.py b/examples/dl-capsule/sqrl.py\n--- a/examples/dl-capsule/sqrl.py\n+++ b/examples/dl-capsule/sqrl.py\n@@ -23,12 +23,12 @@\n \n # run-bench: PyTorch \"fast\" implementation\n def sqrl_pytorch(x: torch.Tensor):\n- return sqrl(x)\n+ return sqrl.raw_f(x)\n \n \n # run-bench: PyTorch \"nice\" implementation\n def sqrl_pytorch_nice(x: torch.Tensor):\n- return sqrl(x)\n+ return sqrl.raw_f(x)\n \n \n # run-bench: Define a range of values at which to call the methods\n", "issue": "Bug: Segmentation fault in sqrl_pytorch-PyTorch CUDA\nJust saw this while working on something else. I haven't done a lot to debug it, but note that it's in copydown, on a fairly innocuous operation (aten::sum(Tensor 2) -> Float), so might be something to do with KS_ALLOCATOR not being defined?\r\nOr could just be out of memory not caught?\r\n\r\n\n", "before_files": [{"content": "import torch\nimport ksc.torch_frontend as knossos\n\n# run-bench: Knossos source, and \"nice\" PyTorch implementation\n# BEGINDOC\[email protected]\ndef sqrl(x: torch.Tensor):\n \"\"\"\n sqrl: Squared Leaky Relu\n Like a capsule from /Stuck in a Rut/\n Typically x is a 4x4 tensor, possibly\n packed in a 4n x 4m array\n \"\"\"\n y = torch.sum(x)\n if y < 0.0:\n t = -0.125 * x\n else:\n t = 1 / 2 * x ** 2\n return torch.mean(torch.sin(t) * t)\n\n\n# ENDDOC\n\n# run-bench: PyTorch \"fast\" implementation\ndef sqrl_pytorch(x: torch.Tensor):\n return sqrl(x)\n\n\n# run-bench: PyTorch \"nice\" implementation\ndef sqrl_pytorch_nice(x: torch.Tensor):\n return sqrl(x)\n\n\n# run-bench: Define a range of values at which to call the methods\ndef sqrl_bench_configs():\n yield torch.randn((4, 4))\n yield torch.randn((16, 16))\n\n\n#################################\n#\n# vsqrl - vectorized sqrl\n#\n\nvsqrl = knossos.vmap(sqrl)\n\n\n# run-bench: Define a range of values at which to call the methods\ndef vsqrl_bench_configs():\n yield torch.randn((10, 4, 4))\n yield torch.randn((1000, 4, 4))\n yield torch.randn((1000, 16, 16))\n", "path": "examples/dl-capsule/sqrl.py"}], "after_files": [{"content": "import torch\nimport ksc.torch_frontend as knossos\n\n# run-bench: Knossos source, and \"nice\" PyTorch implementation\n# BEGINDOC\[email protected]\ndef sqrl(x: torch.Tensor):\n \"\"\"\n sqrl: Squared Leaky Relu\n Like a capsule from /Stuck in a Rut/\n Typically x is a 4x4 tensor, possibly\n packed in a 4n x 4m array\n \"\"\"\n y = torch.sum(x)\n if y < 0.0:\n t = -0.125 * x\n else:\n t = 1 / 2 * x ** 2\n return torch.mean(torch.sin(t) * t)\n\n\n# ENDDOC\n\n# run-bench: PyTorch \"fast\" implementation\ndef sqrl_pytorch(x: torch.Tensor):\n return sqrl.raw_f(x)\n\n\n# run-bench: PyTorch \"nice\" implementation\ndef sqrl_pytorch_nice(x: torch.Tensor):\n return sqrl.raw_f(x)\n\n\n# run-bench: Define a range of values at which to call the methods\ndef sqrl_bench_configs():\n yield torch.randn((4, 4))\n yield torch.randn((16, 16))\n\n\n#################################\n#\n# vsqrl - vectorized sqrl\n#\n\nvsqrl = knossos.vmap(sqrl)\n\n\n# run-bench: Define a range of values at which to call the methods\ndef vsqrl_bench_configs():\n yield torch.randn((10, 4, 4))\n yield torch.randn((1000, 4, 4))\n yield torch.randn((1000, 16, 16))\n", "path": "examples/dl-capsule/sqrl.py"}]} |
gh_patches_debug_1603 | rasdani/github-patches | git_diff | learningequality__kolibri-7238 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
import footer styling regression
### Observed behavior

### Expected behavior
button and text should be vertically centered, or the footer should be shorter in height
### Steps to reproduce
import
### Context
0.14 beta 3
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `kolibri/core/content/utils/channels.py`
Content:
```
1 import fnmatch
2 import logging
3 import os
4
5 from django.core.cache import cache
6 from sqlalchemy.exc import DatabaseError
7
8 from .paths import get_content_database_dir_path
9 from .sqlalchemybridge import Bridge
10 from kolibri.core.discovery.utils.filesystem import enumerate_mounted_disk_partitions
11 from kolibri.utils.uuids import is_valid_uuid
12
13 logger = logging.getLogger(__name__)
14
15
16 def get_channel_ids_for_content_dirs(content_dirs):
17 database_dir_paths = [
18 get_content_database_dir_path(contentfolder=path) for path in content_dirs
19 ]
20 channel_ids = set()
21 for path in database_dir_paths:
22 channel_ids.update(get_channel_ids_for_content_database_dir(path))
23 return list(channel_ids)
24
25
26 def get_channel_ids_for_content_database_dir(content_database_dir):
27 """
28 Returns a list of channel IDs for the channel databases that exist in a content database directory.
29 """
30
31 # immediately return an empty list if the content database directory doesn't exist
32 if not os.path.isdir(content_database_dir):
33 return []
34
35 # get a list of all the database files in the directory, and extract IDs
36 db_list = fnmatch.filter(os.listdir(content_database_dir), "*.sqlite3")
37 db_names = [db.split(".sqlite3", 1)[0] for db in db_list]
38
39 # determine which database names are valid, and only use those ones
40 valid_db_names = [name for name in db_names if is_valid_uuid(name)]
41 invalid_db_names = set(db_names) - set(valid_db_names)
42 if invalid_db_names:
43 logger.warning(
44 "Ignoring databases in content database directory '{directory}' with invalid names: {names}".format(
45 directory=content_database_dir, names=invalid_db_names
46 )
47 )
48
49 # nonexistent database files are created if we delete the files that have broken symbolic links;
50 # empty database files are created if we delete a database file while the server is running and connected to it;
51 # here, we delete and exclude such databases to avoid errors when we try to connect to them
52 db_files_to_remove = set({})
53 for db_name in valid_db_names:
54 filename = os.path.join(content_database_dir, "{}.sqlite3".format(db_name))
55 if not os.path.exists(filename) or os.path.getsize(filename) == 0:
56 db_files_to_remove.add(db_name)
57 os.remove(filename)
58
59 if db_files_to_remove:
60 err_msg = (
61 "Removing nonexistent or empty databases in content database directory "
62 "'{directory}' with IDs: {names}.\nPlease import the channels again."
63 )
64 logger.warning(
65 err_msg.format(directory=content_database_dir, names=db_files_to_remove)
66 )
67 valid_dbs = list(set(valid_db_names) - set(db_files_to_remove))
68
69 return valid_dbs
70
71
72 def enumerate_content_database_file_paths(content_database_dir):
73 full_dir_template = os.path.join(content_database_dir, "{}.sqlite3")
74 channel_ids = get_channel_ids_for_content_database_dir(content_database_dir)
75 return [full_dir_template.format(f) for f in channel_ids]
76
77
78 def read_channel_metadata_from_db_file(channeldbpath):
79 # import here to avoid circular imports whenever kolibri.core.content.models imports utils too
80 from kolibri.core.content.models import ChannelMetadata
81
82 source = Bridge(sqlite_file_path=channeldbpath)
83
84 ChannelMetadataClass = source.get_class(ChannelMetadata)
85
86 source_channel_metadata = source.session.query(ChannelMetadataClass).all()[0]
87
88 # Use the inferred version from the SQLAlchemy Bridge object, and set it as additional
89 # metadata on the channel data
90
91 source_channel_metadata.inferred_schema_version = source.schema_version
92
93 source.end()
94
95 # Adds an attribute `root_id` when `root_id` does not exist to match with
96 # the latest schema.
97 if not hasattr(source_channel_metadata, "root_id"):
98 setattr(
99 source_channel_metadata,
100 "root_id",
101 getattr(source_channel_metadata, "root_pk"),
102 )
103
104 return source_channel_metadata
105
106
107 def get_channels_for_data_folder(datafolder):
108 channels = []
109 for path in enumerate_content_database_file_paths(
110 get_content_database_dir_path(datafolder)
111 ):
112 try:
113 channel = read_channel_metadata_from_db_file(path)
114 except DatabaseError:
115 logger.warning(
116 "Tried to import channel from database file {}, but the file was corrupted.".format(
117 path
118 )
119 )
120 continue
121 channel_data = {
122 "path": path,
123 "id": channel.id,
124 "name": channel.name,
125 "description": channel.description,
126 "tagline": channel.tagline,
127 "thumbnail": channel.thumbnail,
128 "version": channel.version,
129 "root": channel.root_id,
130 "author": channel.author,
131 "last_updated": getattr(channel, "last_updated", None),
132 "lang_code": getattr(channel, "lang_code", None),
133 "lang_name": getattr(channel, "lang_name", None),
134 }
135 channels.append(channel_data)
136 return channels
137
138
139 # Use this to cache mounted drive information when
140 # it has already been fetched for querying by drive id
141 MOUNTED_DRIVES_CACHE_KEY = "mounted_drives_cache_key"
142
143
144 def get_mounted_drives_with_channel_info():
145 drives = enumerate_mounted_disk_partitions()
146 for drive in drives.values():
147 drive.metadata["channels"] = (
148 get_channels_for_data_folder(drive.datafolder) if drive.datafolder else []
149 )
150 cache.set(MOUNTED_DRIVES_CACHE_KEY, drives, 3600)
151 return drives
152
153
154 def get_mounted_drive_by_id(drive_id):
155 drives = cache.get(MOUNTED_DRIVES_CACHE_KEY)
156 if drives is None or drives.get(drive_id, None) is None:
157 drives = get_mounted_drives_with_channel_info()
158 return drives[drive_id]
159
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/kolibri/core/content/utils/channels.py b/kolibri/core/content/utils/channels.py
--- a/kolibri/core/content/utils/channels.py
+++ b/kolibri/core/content/utils/channels.py
@@ -123,7 +123,7 @@
"id": channel.id,
"name": channel.name,
"description": channel.description,
- "tagline": channel.tagline,
+ "tagline": getattr(channel, "tagline", ""),
"thumbnail": channel.thumbnail,
"version": channel.version,
"root": channel.root_id,
| {"golden_diff": "diff --git a/kolibri/core/content/utils/channels.py b/kolibri/core/content/utils/channels.py\n--- a/kolibri/core/content/utils/channels.py\n+++ b/kolibri/core/content/utils/channels.py\n@@ -123,7 +123,7 @@\n \"id\": channel.id,\n \"name\": channel.name,\n \"description\": channel.description,\n- \"tagline\": channel.tagline,\n+ \"tagline\": getattr(channel, \"tagline\", \"\"),\n \"thumbnail\": channel.thumbnail,\n \"version\": channel.version,\n \"root\": channel.root_id,\n", "issue": "import footer styling regression\n\r\n\r\n### Observed behavior\r\n\r\n\r\n\r\n### Expected behavior\r\n\r\nbutton and text should be vertically centered, or the footer should be shorter in height\r\n\r\n\r\n\r\n### Steps to reproduce\r\n\r\nimport\r\n\r\n### Context\r\n\r\n\r\n0.14 beta 3\n", "before_files": [{"content": "import fnmatch\nimport logging\nimport os\n\nfrom django.core.cache import cache\nfrom sqlalchemy.exc import DatabaseError\n\nfrom .paths import get_content_database_dir_path\nfrom .sqlalchemybridge import Bridge\nfrom kolibri.core.discovery.utils.filesystem import enumerate_mounted_disk_partitions\nfrom kolibri.utils.uuids import is_valid_uuid\n\nlogger = logging.getLogger(__name__)\n\n\ndef get_channel_ids_for_content_dirs(content_dirs):\n database_dir_paths = [\n get_content_database_dir_path(contentfolder=path) for path in content_dirs\n ]\n channel_ids = set()\n for path in database_dir_paths:\n channel_ids.update(get_channel_ids_for_content_database_dir(path))\n return list(channel_ids)\n\n\ndef get_channel_ids_for_content_database_dir(content_database_dir):\n \"\"\"\n Returns a list of channel IDs for the channel databases that exist in a content database directory.\n \"\"\"\n\n # immediately return an empty list if the content database directory doesn't exist\n if not os.path.isdir(content_database_dir):\n return []\n\n # get a list of all the database files in the directory, and extract IDs\n db_list = fnmatch.filter(os.listdir(content_database_dir), \"*.sqlite3\")\n db_names = [db.split(\".sqlite3\", 1)[0] for db in db_list]\n\n # determine which database names are valid, and only use those ones\n valid_db_names = [name for name in db_names if is_valid_uuid(name)]\n invalid_db_names = set(db_names) - set(valid_db_names)\n if invalid_db_names:\n logger.warning(\n \"Ignoring databases in content database directory '{directory}' with invalid names: {names}\".format(\n directory=content_database_dir, names=invalid_db_names\n )\n )\n\n # nonexistent database files are created if we delete the files that have broken symbolic links;\n # empty database files are created if we delete a database file while the server is running and connected to it;\n # here, we delete and exclude such databases to avoid errors when we try to connect to them\n db_files_to_remove = set({})\n for db_name in valid_db_names:\n filename = os.path.join(content_database_dir, \"{}.sqlite3\".format(db_name))\n if not os.path.exists(filename) or os.path.getsize(filename) == 0:\n db_files_to_remove.add(db_name)\n os.remove(filename)\n\n if db_files_to_remove:\n err_msg = (\n \"Removing nonexistent or empty databases in content database directory \"\n \"'{directory}' with IDs: {names}.\\nPlease import the channels again.\"\n )\n logger.warning(\n err_msg.format(directory=content_database_dir, names=db_files_to_remove)\n )\n valid_dbs = list(set(valid_db_names) - set(db_files_to_remove))\n\n return valid_dbs\n\n\ndef enumerate_content_database_file_paths(content_database_dir):\n full_dir_template = os.path.join(content_database_dir, \"{}.sqlite3\")\n channel_ids = get_channel_ids_for_content_database_dir(content_database_dir)\n return [full_dir_template.format(f) for f in channel_ids]\n\n\ndef read_channel_metadata_from_db_file(channeldbpath):\n # import here to avoid circular imports whenever kolibri.core.content.models imports utils too\n from kolibri.core.content.models import ChannelMetadata\n\n source = Bridge(sqlite_file_path=channeldbpath)\n\n ChannelMetadataClass = source.get_class(ChannelMetadata)\n\n source_channel_metadata = source.session.query(ChannelMetadataClass).all()[0]\n\n # Use the inferred version from the SQLAlchemy Bridge object, and set it as additional\n # metadata on the channel data\n\n source_channel_metadata.inferred_schema_version = source.schema_version\n\n source.end()\n\n # Adds an attribute `root_id` when `root_id` does not exist to match with\n # the latest schema.\n if not hasattr(source_channel_metadata, \"root_id\"):\n setattr(\n source_channel_metadata,\n \"root_id\",\n getattr(source_channel_metadata, \"root_pk\"),\n )\n\n return source_channel_metadata\n\n\ndef get_channels_for_data_folder(datafolder):\n channels = []\n for path in enumerate_content_database_file_paths(\n get_content_database_dir_path(datafolder)\n ):\n try:\n channel = read_channel_metadata_from_db_file(path)\n except DatabaseError:\n logger.warning(\n \"Tried to import channel from database file {}, but the file was corrupted.\".format(\n path\n )\n )\n continue\n channel_data = {\n \"path\": path,\n \"id\": channel.id,\n \"name\": channel.name,\n \"description\": channel.description,\n \"tagline\": channel.tagline,\n \"thumbnail\": channel.thumbnail,\n \"version\": channel.version,\n \"root\": channel.root_id,\n \"author\": channel.author,\n \"last_updated\": getattr(channel, \"last_updated\", None),\n \"lang_code\": getattr(channel, \"lang_code\", None),\n \"lang_name\": getattr(channel, \"lang_name\", None),\n }\n channels.append(channel_data)\n return channels\n\n\n# Use this to cache mounted drive information when\n# it has already been fetched for querying by drive id\nMOUNTED_DRIVES_CACHE_KEY = \"mounted_drives_cache_key\"\n\n\ndef get_mounted_drives_with_channel_info():\n drives = enumerate_mounted_disk_partitions()\n for drive in drives.values():\n drive.metadata[\"channels\"] = (\n get_channels_for_data_folder(drive.datafolder) if drive.datafolder else []\n )\n cache.set(MOUNTED_DRIVES_CACHE_KEY, drives, 3600)\n return drives\n\n\ndef get_mounted_drive_by_id(drive_id):\n drives = cache.get(MOUNTED_DRIVES_CACHE_KEY)\n if drives is None or drives.get(drive_id, None) is None:\n drives = get_mounted_drives_with_channel_info()\n return drives[drive_id]\n", "path": "kolibri/core/content/utils/channels.py"}], "after_files": [{"content": "import fnmatch\nimport logging\nimport os\n\nfrom django.core.cache import cache\nfrom sqlalchemy.exc import DatabaseError\n\nfrom .paths import get_content_database_dir_path\nfrom .sqlalchemybridge import Bridge\nfrom kolibri.core.discovery.utils.filesystem import enumerate_mounted_disk_partitions\nfrom kolibri.utils.uuids import is_valid_uuid\n\nlogger = logging.getLogger(__name__)\n\n\ndef get_channel_ids_for_content_dirs(content_dirs):\n database_dir_paths = [\n get_content_database_dir_path(contentfolder=path) for path in content_dirs\n ]\n channel_ids = set()\n for path in database_dir_paths:\n channel_ids.update(get_channel_ids_for_content_database_dir(path))\n return list(channel_ids)\n\n\ndef get_channel_ids_for_content_database_dir(content_database_dir):\n \"\"\"\n Returns a list of channel IDs for the channel databases that exist in a content database directory.\n \"\"\"\n\n # immediately return an empty list if the content database directory doesn't exist\n if not os.path.isdir(content_database_dir):\n return []\n\n # get a list of all the database files in the directory, and extract IDs\n db_list = fnmatch.filter(os.listdir(content_database_dir), \"*.sqlite3\")\n db_names = [db.split(\".sqlite3\", 1)[0] for db in db_list]\n\n # determine which database names are valid, and only use those ones\n valid_db_names = [name for name in db_names if is_valid_uuid(name)]\n invalid_db_names = set(db_names) - set(valid_db_names)\n if invalid_db_names:\n logger.warning(\n \"Ignoring databases in content database directory '{directory}' with invalid names: {names}\".format(\n directory=content_database_dir, names=invalid_db_names\n )\n )\n\n # nonexistent database files are created if we delete the files that have broken symbolic links;\n # empty database files are created if we delete a database file while the server is running and connected to it;\n # here, we delete and exclude such databases to avoid errors when we try to connect to them\n db_files_to_remove = set({})\n for db_name in valid_db_names:\n filename = os.path.join(content_database_dir, \"{}.sqlite3\".format(db_name))\n if not os.path.exists(filename) or os.path.getsize(filename) == 0:\n db_files_to_remove.add(db_name)\n os.remove(filename)\n\n if db_files_to_remove:\n err_msg = (\n \"Removing nonexistent or empty databases in content database directory \"\n \"'{directory}' with IDs: {names}.\\nPlease import the channels again.\"\n )\n logger.warning(\n err_msg.format(directory=content_database_dir, names=db_files_to_remove)\n )\n valid_dbs = list(set(valid_db_names) - set(db_files_to_remove))\n\n return valid_dbs\n\n\ndef enumerate_content_database_file_paths(content_database_dir):\n full_dir_template = os.path.join(content_database_dir, \"{}.sqlite3\")\n channel_ids = get_channel_ids_for_content_database_dir(content_database_dir)\n return [full_dir_template.format(f) for f in channel_ids]\n\n\ndef read_channel_metadata_from_db_file(channeldbpath):\n # import here to avoid circular imports whenever kolibri.core.content.models imports utils too\n from kolibri.core.content.models import ChannelMetadata\n\n source = Bridge(sqlite_file_path=channeldbpath)\n\n ChannelMetadataClass = source.get_class(ChannelMetadata)\n\n source_channel_metadata = source.session.query(ChannelMetadataClass).all()[0]\n\n # Use the inferred version from the SQLAlchemy Bridge object, and set it as additional\n # metadata on the channel data\n\n source_channel_metadata.inferred_schema_version = source.schema_version\n\n source.end()\n\n # Adds an attribute `root_id` when `root_id` does not exist to match with\n # the latest schema.\n if not hasattr(source_channel_metadata, \"root_id\"):\n setattr(\n source_channel_metadata,\n \"root_id\",\n getattr(source_channel_metadata, \"root_pk\"),\n )\n\n return source_channel_metadata\n\n\ndef get_channels_for_data_folder(datafolder):\n channels = []\n for path in enumerate_content_database_file_paths(\n get_content_database_dir_path(datafolder)\n ):\n try:\n channel = read_channel_metadata_from_db_file(path)\n except DatabaseError:\n logger.warning(\n \"Tried to import channel from database file {}, but the file was corrupted.\".format(\n path\n )\n )\n continue\n channel_data = {\n \"path\": path,\n \"id\": channel.id,\n \"name\": channel.name,\n \"description\": channel.description,\n \"tagline\": getattr(channel, \"tagline\", \"\"),\n \"thumbnail\": channel.thumbnail,\n \"version\": channel.version,\n \"root\": channel.root_id,\n \"author\": channel.author,\n \"last_updated\": getattr(channel, \"last_updated\", None),\n \"lang_code\": getattr(channel, \"lang_code\", None),\n \"lang_name\": getattr(channel, \"lang_name\", None),\n }\n channels.append(channel_data)\n return channels\n\n\n# Use this to cache mounted drive information when\n# it has already been fetched for querying by drive id\nMOUNTED_DRIVES_CACHE_KEY = \"mounted_drives_cache_key\"\n\n\ndef get_mounted_drives_with_channel_info():\n drives = enumerate_mounted_disk_partitions()\n for drive in drives.values():\n drive.metadata[\"channels\"] = (\n get_channels_for_data_folder(drive.datafolder) if drive.datafolder else []\n )\n cache.set(MOUNTED_DRIVES_CACHE_KEY, drives, 3600)\n return drives\n\n\ndef get_mounted_drive_by_id(drive_id):\n drives = cache.get(MOUNTED_DRIVES_CACHE_KEY)\n if drives is None or drives.get(drive_id, None) is None:\n drives = get_mounted_drives_with_channel_info()\n return drives[drive_id]\n", "path": "kolibri/core/content/utils/channels.py"}]} |
gh_patches_debug_1604 | rasdani/github-patches | git_diff | cocotb__cocotb-2079 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Hierarchy access for generate statement
When we instantiate some block with generate statement.
How to access that hierarchical module?
example) When I want to access "SubBlock[3].u_SubModule.a".
genvar i;
generate
for(i=0;i<5;i=i+1) begin : SubBlock
SubModule u_SubModule(
.a(...),
.b(...),
);
end
endgenerate
First, I tried to do this
RisingEdge(dut.SubBlock[3].u_SubModule.a) -> cocotb can't find SubBlock
and, Second
RisingEdge(dut.u_SubModule.a) -> cocotb find their object. but, which module accessed?
I instantiated five modules.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cocotb/handle.py`
Content:
```
1 #!/usr/bin/env python
2
3 # Copyright (c) 2013 Potential Ventures Ltd
4 # Copyright (c) 2013 SolarFlare Communications Inc
5 # All rights reserved.
6 #
7 # Redistribution and use in source and binary forms, with or without
8 # modification, are permitted provided that the following conditions are met:
9 # * Redistributions of source code must retain the above copyright
10 # notice, this list of conditions and the following disclaimer.
11 # * Redistributions in binary form must reproduce the above copyright
12 # notice, this list of conditions and the following disclaimer in the
13 # documentation and/or other materials provided with the distribution.
14 # * Neither the name of Potential Ventures Ltd,
15 # SolarFlare Communications Inc nor the
16 # names of its contributors may be used to endorse or promote products
17 # derived from this software without specific prior written permission.
18 #
19 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
20 # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
21 # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 # DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY
23 # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
26 # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
28 # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 # -*- coding: utf-8 -*-
31
32 import ctypes
33 import warnings
34
35 import cocotb
36 from cocotb import simulator
37 from cocotb.binary import BinaryValue
38 from cocotb.log import SimLog
39 from cocotb.result import TestError
40
41 # Only issue a warning for each deprecated attribute access
42 _deprecation_warned = set()
43
44
45 class SimHandleBase:
46 """Base class for all simulation objects.
47
48 We maintain a handle which we can use for GPI calls.
49 """
50
51 # For backwards compatibility we support a mapping of old member names
52 # which may alias with the simulator hierarchy. In these cases the
53 # simulator result takes priority, only falling back to the python member
54 # if there is no colliding object in the elaborated design.
55 _compat_mapping = {
56 "log" : "_log",
57 "fullname" : "_fullname",
58 "name" : "_name",
59 }
60
61 def __init__(self, handle, path):
62 """
63 .. Constructor. This RST comment works around sphinx-doc/sphinx#6885
64
65 Args:
66 handle (int): The GPI handle to the simulator object.
67 path (str): Path to this handle, ``None`` if root.
68 """
69 self._handle = handle
70 self._len = None # type: int
71 """The "length" (the number of elements) of the underlying object. For vectors this is the number of bits."""
72 self._sub_handles = {} # type: dict
73 """Dictionary of this handle's children."""
74 self._invalid_sub_handles = set() # type: set
75 """Python :class:`set` of invalid queries, for caching purposes."""
76 self._name = self._handle.get_name_string() # type: str
77 """The name of an object.
78
79 :meta public:
80 """
81 self._type = self._handle.get_type_string() # type: str
82 """The type of an object as a string.
83
84 :meta public:
85 """
86 self._fullname = self._name + "(%s)" % self._type # type: str
87 """The name of an object with its type appended in parentheses."""
88 self._path = self._name if path is None else path # type: str
89 """The path to this handle, or its name if this is the root handle.
90
91 :meta public:
92 """
93 self._log = SimLog("cocotb.%s" % self._name)
94 """The logging object."""
95 self._log.debug("Created")
96 self._def_name = self._handle.get_definition_name() # type: str
97 """The name of a GPI object's definition.
98
99 This is the value of ``vpiDefName`` for VPI, ``vhpiNameP`` for VHPI,
100 and ``mti_GetPrimaryName`` for FLI.
101 Support for this depends on the specific object type and simulator used.
102
103 :meta public:
104 """
105 self._def_file = self._handle.get_definition_file() # type: str
106 """The name of the file that sources the object's definition.
107
108 This is the value of ``vpiDefFile`` for VPI, ``vhpiFileNameP`` for VHPI,
109 and ``mti_GetRegionSourceName`` for FLI.
110 Support for this depends on the specific object type and simulator used.
111
112 :meta public:
113 """
114
115 def get_definition_name(self):
116 return self._def_name
117
118 def get_definition_file(self):
119 return self._def_file
120
121 def __hash__(self):
122 return hash(self._handle)
123
124 def __len__(self):
125 """Return the "length" (the number of elements) of the underlying object.
126
127 For vectors this is the number of bits.
128 """
129 if self._len is None:
130 self._len = self._handle.get_num_elems()
131 return self._len
132
133 def __eq__(self, other):
134 """Equality comparator for handles
135
136 Example usage::
137
138 if clk == dut.clk:
139 do_something()
140 """
141 if not isinstance(other, SimHandleBase):
142 return NotImplemented
143 return self._handle == other._handle
144
145 def __ne__(self, other):
146 if not isinstance(other, SimHandleBase):
147 return NotImplemented
148 return self._handle != other._handle
149
150 def __repr__(self):
151 desc = self._path
152 defname = self._def_name
153 if defname:
154 desc += " with definition "+defname
155 deffile = self._def_file
156 if deffile:
157 desc += " (at "+deffile+")"
158 return type(self).__qualname__ + "(" + desc + ")"
159
160 def __str__(self):
161 return self._path
162
163 def __setattr__(self, name, value):
164 if name in self._compat_mapping:
165 if name not in _deprecation_warned:
166 warnings.warn("Use of attribute %r is deprecated, use %r instead" % (name, self._compat_mapping[name]))
167 _deprecation_warned.add(name)
168 return setattr(self, self._compat_mapping[name], value)
169 else:
170 return object.__setattr__(self, name, value)
171
172 def __getattr__(self, name):
173 if name in self._compat_mapping:
174 if name not in _deprecation_warned:
175 warnings.warn("Use of attribute %r is deprecated, use %r instead" % (name, self._compat_mapping[name]))
176 _deprecation_warned.add(name)
177 return getattr(self, self._compat_mapping[name])
178 else:
179 return object.__getattribute__(self, name)
180
181
182 class RegionObject(SimHandleBase):
183 """A region object, such as a scope or namespace.
184
185 Region objects don't have values, they are effectively scopes or namespaces.
186 """
187
188 def __init__(self, handle, path):
189 SimHandleBase.__init__(self, handle, path)
190 self._discovered = False # True if this object has already been discovered
191
192 def __iter__(self):
193 """Iterate over all known objects in this layer of hierarchy."""
194 if not self._discovered:
195 self._discover_all()
196
197 for name, handle in self._sub_handles.items():
198 if isinstance(handle, list):
199 self._log.debug("Found index list length %d", len(handle))
200 for subindex, subhdl in enumerate(handle):
201 if subhdl is None:
202 self._log.warning("Index %d doesn't exist in %s.%s", subindex, self._name, name)
203 continue
204 self._log.debug("Yielding index %d from %s (%s)", subindex, name, type(subhdl))
205 yield subhdl
206 else:
207 self._log.debug("Yielding %s (%s)", name, handle)
208 yield handle
209
210 def _discover_all(self):
211 """When iterating or performing IPython tab completion, we run through ahead of
212 time and discover all possible children, populating the :any:`_sub_handles`
213 mapping. Hierarchy can't change after elaboration so we only have to
214 do this once.
215 """
216 if self._discovered:
217 return
218 self._log.debug("Discovering all on %s", self._name)
219 for thing in self._handle.iterate(simulator.OBJECTS):
220 name = thing.get_name_string()
221 try:
222 hdl = SimHandle(thing, self._child_path(name))
223 except TestError as e:
224 self._log.debug("%s", e)
225 continue
226
227 try:
228 key = self._sub_handle_key(name)
229 except ValueError:
230 self._log.debug("Unable to translate handle >%s< to a valid _sub_handle key", hdl._name)
231 continue
232
233 self._sub_handles[key] = hdl
234
235 self._discovered = True
236
237 def _child_path(self, name) -> str:
238 """Return a string of the path of the child :any:`SimHandle` for a given *name*."""
239 return self._path + "." + name
240
241 def _sub_handle_key(self, name):
242 """Translate the handle name to a key to use in :any:`_sub_handles` dictionary."""
243 return name.split(".")[-1]
244
245 def __dir__(self):
246 """Permits IPython tab completion to work."""
247 self._discover_all()
248 return super(RegionObject, self).__dir__() + [str(k) for k in self._sub_handles]
249
250
251 class HierarchyObject(RegionObject):
252 """Hierarchy objects are namespace/scope objects."""
253
254 def __get_sub_handle_by_name(self, name):
255 try:
256 return self._sub_handles[name]
257 except KeyError:
258 pass
259
260 # Cache to avoid a call to the simulator if we already know the name is
261 # invalid. Unclear if we care, but we had this before.
262 if name in self._invalid_sub_handles:
263 return None
264
265 new_handle = self._handle.get_handle_by_name(name)
266
267 if not new_handle:
268 self._invalid_sub_handles.add(name)
269 return None
270
271 sub_handle = SimHandle(new_handle, self._child_path(name))
272 self._sub_handles[name] = sub_handle
273 return sub_handle
274
275 def __setattr__(self, name, value):
276 """Provide transparent access to signals via the hierarchy.
277
278 Slightly hacky version of operator overloading in Python.
279
280 Raise an :exc:`AttributeError` if users attempt to create new members which
281 don't exist in the design.
282 """
283
284 # private attributes pass through directly
285 if name.startswith("_"):
286 return SimHandleBase.__setattr__(self, name, value)
287
288 # then try handles
289 sub = self.__get_sub_handle_by_name(name)
290 if sub is not None:
291 sub.value = value
292 return
293
294 # compat behavior
295 if name in self._compat_mapping:
296 return SimHandleBase.__setattr__(self, name, value)
297
298 raise AttributeError("%s contains no object named %s" % (self._name, name))
299
300 def __getattr__(self, name):
301 """Query the simulator for an object with the specified name
302 and cache the result to build a tree of objects.
303 """
304 if name.startswith("_"):
305 return SimHandleBase.__getattr__(self, name)
306
307 handle = self.__get_sub_handle_by_name(name)
308 if handle is not None:
309 return handle
310
311 if name in self._compat_mapping:
312 return SimHandleBase.__getattr__(self, name)
313
314 raise AttributeError("%s contains no object named %s" % (self._name, name))
315
316 def _id(self, name, extended: bool = True):
317 """Query the simulator for an object with the specified *name*,
318 and cache the result to build a tree of objects.
319
320 If *extended* is ``True``, run the query only for VHDL extended identifiers.
321 For Verilog, only ``extended=False`` is supported.
322
323 :meta public:
324 """
325 if extended:
326 name = "\\"+name+"\\"
327
328 handle = self.__get_sub_handle_by_name(name)
329 if handle is not None:
330 return handle
331
332 raise AttributeError("%s contains no object named %s" % (self._name, name))
333
334
335 class HierarchyArrayObject(RegionObject):
336 """Hierarchy Arrays are containers of Hierarchy Objects."""
337
338 def _sub_handle_key(self, name):
339 """Translate the handle name to a key to use in :any:`_sub_handles` dictionary."""
340 # This is slightly hacky, but we need to extract the index from the name
341 #
342 # FLI and VHPI(IUS): _name(X) where X is the index
343 # VHPI(ALDEC): _name__X where X is the index
344 # VPI: _name[X] where X is the index
345 import re
346 result = re.match(r"{0}__(?P<index>\d+)$".format(self._name), name)
347 if not result:
348 result = re.match(r"{0}\((?P<index>\d+)\)$".format(self._name), name)
349 if not result:
350 result = re.match(r"{0}\[(?P<index>\d+)\]$".format(self._name), name)
351
352 if result:
353 return int(result.group("index"))
354 else:
355 raise ValueError("Unable to match an index pattern: {}".format(name))
356
357 def __len__(self):
358 """Return the "length" of the generate block."""
359 if self._len is None:
360 if not self._discovered:
361 self._discover_all()
362
363 self._len = len(self._sub_handles)
364 return self._len
365
366 def __getitem__(self, index):
367 if isinstance(index, slice):
368 raise IndexError("Slice indexing is not supported")
369 if index in self._sub_handles:
370 return self._sub_handles[index]
371 new_handle = self._handle.get_handle_by_index(index)
372 if not new_handle:
373 raise IndexError("%s contains no object at index %d" % (self._name, index))
374 path = self._path + "[" + str(index) + "]"
375 self._sub_handles[index] = SimHandle(new_handle, path)
376 return self._sub_handles[index]
377
378 def _child_path(self, name):
379 """Return a string of the path of the child :any:`SimHandle` for a given name."""
380 index = self._sub_handle_key(name)
381 return self._path + "[" + str(index) + "]"
382
383 def __setitem__(self, index, value):
384 raise TypeError("Not permissible to set %s at index %d" % (self._name, index))
385
386
387 class _AssignmentResult:
388 """
389 An object that exists solely to provide an error message if the caller
390 is not aware of cocotb's meaning of ``<=``.
391 """
392
393 def __init__(self, signal, value):
394 self._signal = signal
395 self._value = value
396
397 def __bool__(self):
398 raise TypeError(
399 "Attempted to use `{0._signal!r} <= {0._value!r}` (a cocotb "
400 "delayed write) as if it were a numeric comparison. To perform "
401 "comparison, use `{0._signal!r}.value <= {0._value!r}` instead."
402 .format(self)
403 )
404
405
406 class NonHierarchyObject(SimHandleBase):
407 """Common base class for all non-hierarchy objects."""
408
409 def __iter__(self):
410 return iter(())
411
412 @property
413 def value(self):
414 """The value of this simulation object.
415
416 .. note::
417 When setting this property, the value is stored by the :class:`~cocotb.scheduler.Scheduler`
418 and all stored values are written at the same time at the end of the current simulator time step.
419
420 Use :meth:`setimmediatevalue` to set the value immediately.
421 """
422 raise TypeError("Not permissible to get values of object %s of type %s" % (self._name, type(self)))
423
424 @value.setter
425 def value(self, value):
426 self._set_value(value, cocotb.scheduler._schedule_write)
427
428 def setimmediatevalue(self, value):
429 """ Assign a value to this simulation object immediately. """
430 def _call_now(handle, f, *args):
431 f(*args)
432 self._set_value(value, _call_now)
433
434 def _set_value(self, value, call_sim):
435 """ This should be overriden in subclasses.
436
437 This is used to implement both the setter for :attr:`value`, and the
438 :meth:`setimmediatevalue` method.
439
440 ``call_sim(handle, f, *args)`` should be used to schedule simulator writes,
441 rather than performing them directly as ``f(*args)``.
442 """
443 raise TypeError("Not permissible to set values on object %s of type %s" % (self._name, type(self)))
444
445 def __le__(self, value):
446 """Overload less-than-or-equal-to operator to provide an HDL-like shortcut.
447
448 Example:
449 >>> module.signal <= 2
450 """
451 self.value = value
452 return _AssignmentResult(self, value)
453
454 def __eq__(self, other):
455 """Equality comparator for non-hierarchy objects
456
457 If ``other`` is not a :class:`SimHandleBase` instance the comparision
458 uses the comparison method of the ``other`` object against our
459 ``.value``.
460 """
461 if isinstance(other, SimHandleBase):
462 return SimHandleBase.__eq__(self, other)
463 return self.value == other
464
465 def __ne__(self, other):
466 if isinstance(other, SimHandleBase):
467 return SimHandleBase.__ne__(self, other)
468 return self.value != other
469
470 # Re-define hash because we defined __eq__
471 def __hash__(self):
472 return SimHandleBase.__hash__(self)
473
474
475 class ConstantObject(NonHierarchyObject):
476 """An object which has a value that can be read, but not set.
477
478 The value is cached in the class since it is fixed at elaboration
479 time and won't change within a simulation.
480 """
481
482 def __init__(self, handle, path, handle_type):
483 """
484 Args:
485 handle (int): The GPI handle to the simulator object.
486 path (str): Path to this handle, ``None`` if root.
487 handle_type: The type of the handle
488 (``simulator.INTEGER``, ``simulator.ENUM``,
489 ``simulator.REAL``, ``simulator.STRING``).
490 """
491 NonHierarchyObject.__init__(self, handle, path)
492 if handle_type in [simulator.INTEGER, simulator.ENUM]:
493 self._value = self._handle.get_signal_val_long()
494 elif handle_type == simulator.REAL:
495 self._value = self._handle.get_signal_val_real()
496 elif handle_type == simulator.STRING:
497 self._value = self._handle.get_signal_val_str()
498 else:
499 val = self._handle.get_signal_val_binstr()
500 self._value = BinaryValue(n_bits=len(val))
501 try:
502 self._value.binstr = val
503 except Exception:
504 self._value = val
505
506 def __int__(self):
507 return int(self.value)
508
509 def __float__(self):
510 return float(self.value)
511
512 @NonHierarchyObject.value.getter
513 def value(self):
514 """The value of this simulation object."""
515 return self._value
516
517 def __str__(self):
518 if isinstance(self.value, bytes):
519 StringObject._emit_str_warning(self)
520 return self.value.decode('ascii')
521 else:
522 ModifiableObject._emit_str_warning(self)
523 return str(self.value)
524
525
526 class NonHierarchyIndexableObject(NonHierarchyObject):
527 """ A non-hierarchy indexable object.
528
529 Getting and setting the current value of an array is done
530 by iterating through sub-handles in left-to-right order.
531
532 Given an HDL array ``arr``:
533
534 +--------------+---------------------+--------------------------------------------------------------+
535 | Verilog | VHDL | ``arr.value`` is equivalent to |
536 +==============+=====================+==============================================================+
537 | ``arr[4:7]`` | ``arr(4 to 7)`` | ``[arr[4].value, arr[5].value, arr[6].value, arr[7].value]`` |
538 +--------------+---------------------+--------------------------------------------------------------+
539 | ``arr[7:4]`` | ``arr(7 downto 4)`` | ``[arr[7].value, arr[6].value, arr[5].value, arr[4].value]`` |
540 +--------------+---------------------+--------------------------------------------------------------+
541
542 When setting the signal as in ``arr.value = ...``, the same index equivalence as noted in the table holds.
543
544 .. warning::
545 Assigning a value to a sub-handle:
546
547 - **Wrong**: ``dut.some_array.value[0] = 1`` (gets value as a list then updates index 0)
548 - **Correct**: ``dut.some_array[0].value = 1``
549 """
550
551 def __init__(self, handle, path):
552 NonHierarchyObject.__init__(self, handle, path)
553 self._range = self._handle.get_range()
554
555 def __setitem__(self, index, value):
556 """Provide transparent assignment to indexed array handles."""
557 self[index].value = value
558
559 def __getitem__(self, index):
560 if isinstance(index, slice):
561 raise IndexError("Slice indexing is not supported")
562 if self._range is None:
563 raise IndexError("%s is not indexable. Unable to get object at index %d" % (self._fullname, index))
564 if index in self._sub_handles:
565 return self._sub_handles[index]
566 new_handle = self._handle.get_handle_by_index(index)
567 if not new_handle:
568 raise IndexError("%s contains no object at index %d" % (self._fullname, index))
569 path = self._path + "[" + str(index) + "]"
570 self._sub_handles[index] = SimHandle(new_handle, path)
571 return self._sub_handles[index]
572
573 def __iter__(self):
574 if self._range is None:
575 return
576
577 self._log.debug("Iterating with range [%d:%d]", self._range[0], self._range[1])
578 for i in self._range_iter(self._range[0], self._range[1]):
579 try:
580 result = self[i]
581 yield result
582 except IndexError:
583 continue
584
585 def _range_iter(self, left, right):
586 if left > right:
587 while left >= right:
588 yield left
589 left = left - 1
590 else:
591 while left <= right:
592 yield left
593 left = left + 1
594
595 @NonHierarchyObject.value.getter
596 def value(self) -> list:
597 # Don't use self.__iter__, because it has an unwanted `except IndexError`
598 return [
599 self[i].value
600 for i in self._range_iter(self._range[0], self._range[1])
601 ]
602
603 def _set_value(self, value, call_sim):
604 """Assign value from a list of same length to an array in left-to-right order.
605 Index 0 of the list maps to the left-most index in the array.
606
607 See the docstring for this class.
608 """
609 if type(value) is not list:
610 raise TypeError("Assigning non-list value to object %s of type %s" % (self._name, type(self)))
611 if len(value) != len(self):
612 raise ValueError("Assigning list of length %d to object %s of length %d" % (
613 len(value), self._name, len(self)))
614 for val_idx, self_idx in enumerate(self._range_iter(self._range[0], self._range[1])):
615 self[self_idx]._set_value(value[val_idx], call_sim)
616
617
618 class NonConstantObject(NonHierarchyIndexableObject):
619 """ A non-constant object"""
620 # FIXME: what is the difference to ModifiableObject? Explain in docstring.
621
622 def drivers(self):
623 """An iterator for gathering all drivers for a signal."""
624 return self._handle.iterate(simulator.DRIVERS)
625
626 def loads(self):
627 """An iterator for gathering all loads on a signal."""
628 return self._handle.iterate(simulator.LOADS)
629
630
631 class _SetAction:
632 """Base class representing the type of action used while write-accessing a handle."""
633 pass
634
635
636 class _SetValueAction(_SetAction):
637 __slots__ = ("value",)
638 """Base class representing the type of action used while write-accessing a handle with a value."""
639
640 def __init__(self, value):
641 self.value = value
642
643
644 class Deposit(_SetValueAction):
645 """Action used for placing a value into a given handle."""
646
647 def _as_gpi_args_for(self, hdl):
648 return self.value, 0 # GPI_DEPOSIT
649
650
651 class Force(_SetValueAction):
652 """Action used to force a handle to a given value until a release is applied."""
653
654 def _as_gpi_args_for(self, hdl):
655 return self.value, 1 # GPI_FORCE
656
657
658 class Freeze(_SetAction):
659 """Action used to make a handle keep its current value until a release is used."""
660
661 def _as_gpi_args_for(self, hdl):
662 return hdl.value, 1 # GPI_FORCE
663
664
665 class Release(_SetAction):
666 """Action used to stop the effects of a previously applied force/freeze action."""
667
668 def _as_gpi_args_for(self, hdl):
669 return 0, 2 # GPI_RELEASE
670
671
672 class ModifiableObject(NonConstantObject):
673 """Base class for simulator objects whose values can be modified."""
674
675 def _set_value(self, value, call_sim):
676 """Set the value of the underlying simulation object to *value*.
677
678 This operation will fail unless the handle refers to a modifiable
679 object, e.g. net, signal or variable.
680
681 We determine the library call to make based on the type of the value
682 because assigning integers less than 32 bits is faster.
683
684 Args:
685 value (ctypes.Structure, cocotb.binary.BinaryValue, int, double):
686 The value to drive onto the simulator object.
687
688 Raises:
689 TypeError: If target is not wide enough or has an unsupported type
690 for value assignment.
691 """
692 value, set_action = self._check_for_set_action(value)
693
694 if isinstance(value, int) and value < 0x7fffffff and len(self) <= 32:
695 call_sim(self, self._handle.set_signal_val_long, set_action, value)
696 return
697 if isinstance(value, ctypes.Structure):
698 value = BinaryValue(value=cocotb.utils.pack(value), n_bits=len(self))
699 elif isinstance(value, int):
700 value = BinaryValue(value=value, n_bits=len(self), bigEndian=False)
701 elif isinstance(value, dict):
702 # We're given a dictionary with a list of values and a bit size...
703 num = 0
704 vallist = list(value["values"])
705 vallist.reverse()
706 if len(vallist) * value["bits"] != len(self):
707 raise TypeError("Unable to set with array length %d of %d bit entries = %d total, target is only %d bits long" %
708 (len(value["values"]), value["bits"], len(value["values"]) * value["bits"], len(self)))
709
710 for val in vallist:
711 num = (num << value["bits"]) + val
712 value = BinaryValue(value=num, n_bits=len(self), bigEndian=False)
713
714 elif not isinstance(value, BinaryValue):
715 raise TypeError(
716 "Unsupported type for value assignment: {} ({!r})"
717 .format(type(value), value))
718
719 call_sim(self, self._handle.set_signal_val_binstr, set_action, value.binstr)
720
721 def _check_for_set_action(self, value):
722 if not isinstance(value, _SetAction):
723 return value, 0 # GPI_DEPOSIT
724 return value._as_gpi_args_for(self)
725
726 @NonConstantObject.value.getter
727 def value(self) -> BinaryValue:
728 binstr = self._handle.get_signal_val_binstr()
729 result = BinaryValue(binstr, len(binstr))
730 return result
731
732 def __int__(self):
733 return int(self.value)
734
735 def _emit_str_warning(self):
736 warnings.warn(
737 "`str({t})` is deprecated, and in future will return `{t}._path`. "
738 "To get a string representation of the value, use `str({t}.value)`."
739 .format(t=type(self).__qualname__),
740 FutureWarning, stacklevel=3)
741
742 def __str__(self):
743 self._emit_str_warning()
744 return str(self.value)
745
746
747 class RealObject(ModifiableObject):
748 """Specific object handle for Real signals and variables."""
749
750 def _set_value(self, value, call_sim):
751 """Set the value of the underlying simulation object to value.
752
753 This operation will fail unless the handle refers to a modifiable
754 object, e.g. net, signal or variable.
755
756 Args:
757 value (float): The value to drive onto the simulator object.
758
759 Raises:
760 TypeError: If target has an unsupported type for
761 real value assignment.
762 """
763 value, set_action = self._check_for_set_action(value)
764
765 try:
766 value = float(value)
767 except ValueError:
768 raise TypeError(
769 "Unsupported type for real value assignment: {} ({!r})"
770 .format(type(value), value))
771
772 call_sim(self, self._handle.set_signal_val_real, set_action, value)
773
774 @ModifiableObject.value.getter
775 def value(self) -> float:
776 return self._handle.get_signal_val_real()
777
778 def __float__(self):
779 return float(self.value)
780
781
782 class EnumObject(ModifiableObject):
783 """Specific object handle for enumeration signals and variables."""
784
785 def _set_value(self, value, call_sim):
786 """Set the value of the underlying simulation object to *value*.
787
788 This operation will fail unless the handle refers to a modifiable
789 object, e.g. net, signal or variable.
790
791 Args:
792 value (int): The value to drive onto the simulator object.
793
794 Raises:
795 TypeError: If target has an unsupported type for
796 integer value assignment.
797 """
798 value, set_action = self._check_for_set_action(value)
799
800 if isinstance(value, BinaryValue):
801 value = int(value)
802 elif not isinstance(value, int):
803 raise TypeError(
804 "Unsupported type for enum value assignment: {} ({!r})"
805 .format(type(value), value))
806
807 call_sim(self, self._handle.set_signal_val_long, set_action, value)
808
809 @ModifiableObject.value.getter
810 def value(self) -> int:
811 return self._handle.get_signal_val_long()
812
813
814 class IntegerObject(ModifiableObject):
815 """Specific object handle for Integer and Enum signals and variables."""
816
817 def _set_value(self, value, call_sim):
818 """Set the value of the underlying simulation object to *value*.
819
820 This operation will fail unless the handle refers to a modifiable
821 object, e.g. net, signal or variable.
822
823 Args:
824 value (int): The value to drive onto the simulator object.
825
826 Raises:
827 TypeError: If target has an unsupported type for
828 integer value assignment.
829 """
830 value, set_action = self._check_for_set_action(value)
831
832 if isinstance(value, BinaryValue):
833 value = int(value)
834 elif not isinstance(value, int):
835 raise TypeError(
836 "Unsupported type for integer value assignment: {} ({!r})"
837 .format(type(value), value))
838
839 call_sim(self, self._handle.set_signal_val_long, set_action, value)
840
841 @ModifiableObject.value.getter
842 def value(self) -> int:
843 return self._handle.get_signal_val_long()
844
845
846 class StringObject(ModifiableObject):
847 """Specific object handle for String variables."""
848
849 def _set_value(self, value, call_sim):
850 """Set the value of the underlying simulation object to *value*.
851
852 This operation will fail unless the handle refers to a modifiable
853 object, e.g. net, signal or variable.
854
855 Args:
856 value (bytes): The value to drive onto the simulator object.
857
858 Raises:
859 TypeError: If target has an unsupported type for
860 string value assignment.
861
862 .. versionchanged:: 1.4
863 Takes :class:`bytes` instead of :class:`str`.
864 Users are now expected to choose an encoding when using these objects.
865 As a convenience, when assigning :class:`str` values, ASCII encoding will be used as a safe default.
866
867 """
868 value, set_action = self._check_for_set_action(value)
869
870 if isinstance(value, str):
871 warnings.warn(
872 "Handles on string objects will soon not accept `str` objects. "
873 "Please use a bytes object by encoding the string as you see fit. "
874 "`str.encode('ascii')` is typically sufficient.", DeprecationWarning, stacklevel=2)
875 value = value.encode('ascii') # may throw UnicodeEncodeError
876
877 if not isinstance(value, bytes):
878 raise TypeError(
879 "Unsupported type for string value assignment: {} ({!r})"
880 .format(type(value), value))
881
882 call_sim(self, self._handle.set_signal_val_str, set_action, value)
883
884 @ModifiableObject.value.getter
885 def value(self) -> bytes:
886 return self._handle.get_signal_val_str()
887
888 def _emit_str_warning(self):
889 warnings.warn(
890 "`str({t})` is deprecated, and in future will return `{t}._path`. "
891 "To access the `bytes` value of this handle, use `{t}.value`."
892 .format(t=type(self).__qualname__),
893 FutureWarning, stacklevel=3)
894
895 def __str__(self):
896 self._emit_str_warning()
897 return self.value.decode('ascii')
898
899
900 _handle2obj = {}
901
902
903 def SimHandle(handle, path=None):
904 """Factory function to create the correct type of `SimHandle` object.
905
906 Args:
907 handle (int): The GPI handle to the simulator object.
908 path (str): Path to this handle, ``None`` if root.
909
910 Returns:
911 The `SimHandle` object.
912
913 Raises:
914 TestError: If no matching object for GPI type could be found.
915 """
916 _type2cls = {
917 simulator.MODULE: HierarchyObject,
918 simulator.STRUCTURE: HierarchyObject,
919 simulator.REG: ModifiableObject,
920 simulator.NET: ModifiableObject,
921 simulator.NETARRAY: NonHierarchyIndexableObject,
922 simulator.REAL: RealObject,
923 simulator.INTEGER: IntegerObject,
924 simulator.ENUM: EnumObject,
925 simulator.STRING: StringObject,
926 simulator.GENARRAY: HierarchyArrayObject,
927 }
928
929 # Enforce singletons since it's possible to retrieve handles avoiding
930 # the hierarchy by getting driver/load information
931 global _handle2obj
932 try:
933 return _handle2obj[handle]
934 except KeyError:
935 pass
936
937 t = handle.get_type()
938
939 # Special case for constants
940 if handle.get_const() and t not in [
941 simulator.MODULE,
942 simulator.STRUCTURE,
943 simulator.NETARRAY,
944 simulator.GENARRAY,
945 ]:
946 obj = ConstantObject(handle, path, t)
947 _handle2obj[handle] = obj
948 return obj
949
950 if t not in _type2cls:
951 raise TestError("Couldn't find a matching object for GPI type %d (path=%s)" % (t, path))
952 obj = _type2cls[t](handle, path)
953 _handle2obj[handle] = obj
954 return obj
955
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/cocotb/handle.py b/cocotb/handle.py
--- a/cocotb/handle.py
+++ b/cocotb/handle.py
@@ -249,6 +249,10 @@
except KeyError:
pass
+ if not self._discovered:
+ self._discover_all()
+ return self.__get_sub_handle_by_name(name)
+
# Cache to avoid a call to the simulator if we already know the name is
# invalid. Unclear if we care, but we had this before.
if name in self._invalid_sub_handles:
| {"golden_diff": "diff --git a/cocotb/handle.py b/cocotb/handle.py\n--- a/cocotb/handle.py\n+++ b/cocotb/handle.py\n@@ -249,6 +249,10 @@\n except KeyError:\n pass\n \n+ if not self._discovered:\n+ self._discover_all()\n+ return self.__get_sub_handle_by_name(name)\n+\n # Cache to avoid a call to the simulator if we already know the name is\n # invalid. Unclear if we care, but we had this before.\n if name in self._invalid_sub_handles:\n", "issue": "Hierarchy access for generate statement\nWhen we instantiate some block with generate statement. \r\nHow to access that hierarchical module?\r\n\r\nexample) When I want to access \"SubBlock[3].u_SubModule.a\". \r\ngenvar i;\r\ngenerate \r\n for(i=0;i<5;i=i+1) begin : SubBlock\r\n SubModule u_SubModule(\r\n .a(...),\r\n .b(...),\r\n );\r\n end\r\nendgenerate\r\n\r\nFirst, I tried to do this \r\nRisingEdge(dut.SubBlock[3].u_SubModule.a) -> cocotb can't find SubBlock\r\n\r\nand, Second\r\nRisingEdge(dut.u_SubModule.a) -> cocotb find their object. but, which module accessed? \r\n I instantiated five modules.\r\n\r\n\n", "before_files": [{"content": "#!/usr/bin/env python\n\n# Copyright (c) 2013 Potential Ventures Ltd\n# Copyright (c) 2013 SolarFlare Communications Inc\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n# * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above copyright\n# notice, this list of conditions and the following disclaimer in the\n# documentation and/or other materials provided with the distribution.\n# * Neither the name of Potential Ventures Ltd,\n# SolarFlare Communications Inc nor the\n# names of its contributors may be used to endorse or promote products\n# derived from this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND\n# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\n# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY\n# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND\n# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n# -*- coding: utf-8 -*-\n\nimport ctypes\nimport warnings\n\nimport cocotb\nfrom cocotb import simulator\nfrom cocotb.binary import BinaryValue\nfrom cocotb.log import SimLog\nfrom cocotb.result import TestError\n\n# Only issue a warning for each deprecated attribute access\n_deprecation_warned = set()\n\n\nclass SimHandleBase:\n \"\"\"Base class for all simulation objects.\n\n We maintain a handle which we can use for GPI calls.\n \"\"\"\n\n # For backwards compatibility we support a mapping of old member names\n # which may alias with the simulator hierarchy. In these cases the\n # simulator result takes priority, only falling back to the python member\n # if there is no colliding object in the elaborated design.\n _compat_mapping = {\n \"log\" : \"_log\",\n \"fullname\" : \"_fullname\",\n \"name\" : \"_name\",\n }\n\n def __init__(self, handle, path):\n \"\"\"\n .. Constructor. This RST comment works around sphinx-doc/sphinx#6885\n\n Args:\n handle (int): The GPI handle to the simulator object.\n path (str): Path to this handle, ``None`` if root.\n \"\"\"\n self._handle = handle\n self._len = None # type: int\n \"\"\"The \"length\" (the number of elements) of the underlying object. For vectors this is the number of bits.\"\"\"\n self._sub_handles = {} # type: dict\n \"\"\"Dictionary of this handle's children.\"\"\"\n self._invalid_sub_handles = set() # type: set\n \"\"\"Python :class:`set` of invalid queries, for caching purposes.\"\"\"\n self._name = self._handle.get_name_string() # type: str\n \"\"\"The name of an object.\n\n :meta public:\n \"\"\"\n self._type = self._handle.get_type_string() # type: str\n \"\"\"The type of an object as a string.\n\n :meta public:\n \"\"\"\n self._fullname = self._name + \"(%s)\" % self._type # type: str\n \"\"\"The name of an object with its type appended in parentheses.\"\"\"\n self._path = self._name if path is None else path # type: str\n \"\"\"The path to this handle, or its name if this is the root handle.\n\n :meta public:\n \"\"\"\n self._log = SimLog(\"cocotb.%s\" % self._name)\n \"\"\"The logging object.\"\"\"\n self._log.debug(\"Created\")\n self._def_name = self._handle.get_definition_name() # type: str\n \"\"\"The name of a GPI object's definition.\n\n This is the value of ``vpiDefName`` for VPI, ``vhpiNameP`` for VHPI,\n and ``mti_GetPrimaryName`` for FLI.\n Support for this depends on the specific object type and simulator used.\n\n :meta public:\n \"\"\"\n self._def_file = self._handle.get_definition_file() # type: str\n \"\"\"The name of the file that sources the object's definition.\n\n This is the value of ``vpiDefFile`` for VPI, ``vhpiFileNameP`` for VHPI,\n and ``mti_GetRegionSourceName`` for FLI.\n Support for this depends on the specific object type and simulator used.\n\n :meta public:\n \"\"\"\n\n def get_definition_name(self):\n return self._def_name\n\n def get_definition_file(self):\n return self._def_file\n\n def __hash__(self):\n return hash(self._handle)\n\n def __len__(self):\n \"\"\"Return the \"length\" (the number of elements) of the underlying object.\n\n For vectors this is the number of bits.\n \"\"\"\n if self._len is None:\n self._len = self._handle.get_num_elems()\n return self._len\n\n def __eq__(self, other):\n \"\"\"Equality comparator for handles\n\n Example usage::\n\n if clk == dut.clk:\n do_something()\n \"\"\"\n if not isinstance(other, SimHandleBase):\n return NotImplemented\n return self._handle == other._handle\n\n def __ne__(self, other):\n if not isinstance(other, SimHandleBase):\n return NotImplemented\n return self._handle != other._handle\n\n def __repr__(self):\n desc = self._path\n defname = self._def_name\n if defname:\n desc += \" with definition \"+defname\n deffile = self._def_file\n if deffile:\n desc += \" (at \"+deffile+\")\"\n return type(self).__qualname__ + \"(\" + desc + \")\"\n\n def __str__(self):\n return self._path\n\n def __setattr__(self, name, value):\n if name in self._compat_mapping:\n if name not in _deprecation_warned:\n warnings.warn(\"Use of attribute %r is deprecated, use %r instead\" % (name, self._compat_mapping[name]))\n _deprecation_warned.add(name)\n return setattr(self, self._compat_mapping[name], value)\n else:\n return object.__setattr__(self, name, value)\n\n def __getattr__(self, name):\n if name in self._compat_mapping:\n if name not in _deprecation_warned:\n warnings.warn(\"Use of attribute %r is deprecated, use %r instead\" % (name, self._compat_mapping[name]))\n _deprecation_warned.add(name)\n return getattr(self, self._compat_mapping[name])\n else:\n return object.__getattribute__(self, name)\n\n\nclass RegionObject(SimHandleBase):\n \"\"\"A region object, such as a scope or namespace.\n\n Region objects don't have values, they are effectively scopes or namespaces.\n \"\"\"\n\n def __init__(self, handle, path):\n SimHandleBase.__init__(self, handle, path)\n self._discovered = False # True if this object has already been discovered\n\n def __iter__(self):\n \"\"\"Iterate over all known objects in this layer of hierarchy.\"\"\"\n if not self._discovered:\n self._discover_all()\n\n for name, handle in self._sub_handles.items():\n if isinstance(handle, list):\n self._log.debug(\"Found index list length %d\", len(handle))\n for subindex, subhdl in enumerate(handle):\n if subhdl is None:\n self._log.warning(\"Index %d doesn't exist in %s.%s\", subindex, self._name, name)\n continue\n self._log.debug(\"Yielding index %d from %s (%s)\", subindex, name, type(subhdl))\n yield subhdl\n else:\n self._log.debug(\"Yielding %s (%s)\", name, handle)\n yield handle\n\n def _discover_all(self):\n \"\"\"When iterating or performing IPython tab completion, we run through ahead of\n time and discover all possible children, populating the :any:`_sub_handles`\n mapping. Hierarchy can't change after elaboration so we only have to\n do this once.\n \"\"\"\n if self._discovered:\n return\n self._log.debug(\"Discovering all on %s\", self._name)\n for thing in self._handle.iterate(simulator.OBJECTS):\n name = thing.get_name_string()\n try:\n hdl = SimHandle(thing, self._child_path(name))\n except TestError as e:\n self._log.debug(\"%s\", e)\n continue\n\n try:\n key = self._sub_handle_key(name)\n except ValueError:\n self._log.debug(\"Unable to translate handle >%s< to a valid _sub_handle key\", hdl._name)\n continue\n\n self._sub_handles[key] = hdl\n\n self._discovered = True\n\n def _child_path(self, name) -> str:\n \"\"\"Return a string of the path of the child :any:`SimHandle` for a given *name*.\"\"\"\n return self._path + \".\" + name\n\n def _sub_handle_key(self, name):\n \"\"\"Translate the handle name to a key to use in :any:`_sub_handles` dictionary.\"\"\"\n return name.split(\".\")[-1]\n\n def __dir__(self):\n \"\"\"Permits IPython tab completion to work.\"\"\"\n self._discover_all()\n return super(RegionObject, self).__dir__() + [str(k) for k in self._sub_handles]\n\n\nclass HierarchyObject(RegionObject):\n \"\"\"Hierarchy objects are namespace/scope objects.\"\"\"\n\n def __get_sub_handle_by_name(self, name):\n try:\n return self._sub_handles[name]\n except KeyError:\n pass\n\n # Cache to avoid a call to the simulator if we already know the name is\n # invalid. Unclear if we care, but we had this before.\n if name in self._invalid_sub_handles:\n return None\n\n new_handle = self._handle.get_handle_by_name(name)\n\n if not new_handle:\n self._invalid_sub_handles.add(name)\n return None\n\n sub_handle = SimHandle(new_handle, self._child_path(name))\n self._sub_handles[name] = sub_handle\n return sub_handle\n\n def __setattr__(self, name, value):\n \"\"\"Provide transparent access to signals via the hierarchy.\n\n Slightly hacky version of operator overloading in Python.\n\n Raise an :exc:`AttributeError` if users attempt to create new members which\n don't exist in the design.\n \"\"\"\n\n # private attributes pass through directly\n if name.startswith(\"_\"):\n return SimHandleBase.__setattr__(self, name, value)\n\n # then try handles\n sub = self.__get_sub_handle_by_name(name)\n if sub is not None:\n sub.value = value\n return\n\n # compat behavior\n if name in self._compat_mapping:\n return SimHandleBase.__setattr__(self, name, value)\n\n raise AttributeError(\"%s contains no object named %s\" % (self._name, name))\n\n def __getattr__(self, name):\n \"\"\"Query the simulator for an object with the specified name\n and cache the result to build a tree of objects.\n \"\"\"\n if name.startswith(\"_\"):\n return SimHandleBase.__getattr__(self, name)\n\n handle = self.__get_sub_handle_by_name(name)\n if handle is not None:\n return handle\n\n if name in self._compat_mapping:\n return SimHandleBase.__getattr__(self, name)\n\n raise AttributeError(\"%s contains no object named %s\" % (self._name, name))\n\n def _id(self, name, extended: bool = True):\n \"\"\"Query the simulator for an object with the specified *name*,\n and cache the result to build a tree of objects.\n\n If *extended* is ``True``, run the query only for VHDL extended identifiers.\n For Verilog, only ``extended=False`` is supported.\n\n :meta public:\n \"\"\"\n if extended:\n name = \"\\\\\"+name+\"\\\\\"\n\n handle = self.__get_sub_handle_by_name(name)\n if handle is not None:\n return handle\n\n raise AttributeError(\"%s contains no object named %s\" % (self._name, name))\n\n\nclass HierarchyArrayObject(RegionObject):\n \"\"\"Hierarchy Arrays are containers of Hierarchy Objects.\"\"\"\n\n def _sub_handle_key(self, name):\n \"\"\"Translate the handle name to a key to use in :any:`_sub_handles` dictionary.\"\"\"\n # This is slightly hacky, but we need to extract the index from the name\n #\n # FLI and VHPI(IUS): _name(X) where X is the index\n # VHPI(ALDEC): _name__X where X is the index\n # VPI: _name[X] where X is the index\n import re\n result = re.match(r\"{0}__(?P<index>\\d+)$\".format(self._name), name)\n if not result:\n result = re.match(r\"{0}\\((?P<index>\\d+)\\)$\".format(self._name), name)\n if not result:\n result = re.match(r\"{0}\\[(?P<index>\\d+)\\]$\".format(self._name), name)\n\n if result:\n return int(result.group(\"index\"))\n else:\n raise ValueError(\"Unable to match an index pattern: {}\".format(name))\n\n def __len__(self):\n \"\"\"Return the \"length\" of the generate block.\"\"\"\n if self._len is None:\n if not self._discovered:\n self._discover_all()\n\n self._len = len(self._sub_handles)\n return self._len\n\n def __getitem__(self, index):\n if isinstance(index, slice):\n raise IndexError(\"Slice indexing is not supported\")\n if index in self._sub_handles:\n return self._sub_handles[index]\n new_handle = self._handle.get_handle_by_index(index)\n if not new_handle:\n raise IndexError(\"%s contains no object at index %d\" % (self._name, index))\n path = self._path + \"[\" + str(index) + \"]\"\n self._sub_handles[index] = SimHandle(new_handle, path)\n return self._sub_handles[index]\n\n def _child_path(self, name):\n \"\"\"Return a string of the path of the child :any:`SimHandle` for a given name.\"\"\"\n index = self._sub_handle_key(name)\n return self._path + \"[\" + str(index) + \"]\"\n\n def __setitem__(self, index, value):\n raise TypeError(\"Not permissible to set %s at index %d\" % (self._name, index))\n\n\nclass _AssignmentResult:\n \"\"\"\n An object that exists solely to provide an error message if the caller\n is not aware of cocotb's meaning of ``<=``.\n \"\"\"\n\n def __init__(self, signal, value):\n self._signal = signal\n self._value = value\n\n def __bool__(self):\n raise TypeError(\n \"Attempted to use `{0._signal!r} <= {0._value!r}` (a cocotb \"\n \"delayed write) as if it were a numeric comparison. To perform \"\n \"comparison, use `{0._signal!r}.value <= {0._value!r}` instead.\"\n .format(self)\n )\n\n\nclass NonHierarchyObject(SimHandleBase):\n \"\"\"Common base class for all non-hierarchy objects.\"\"\"\n\n def __iter__(self):\n return iter(())\n\n @property\n def value(self):\n \"\"\"The value of this simulation object.\n\n .. note::\n When setting this property, the value is stored by the :class:`~cocotb.scheduler.Scheduler`\n and all stored values are written at the same time at the end of the current simulator time step.\n\n Use :meth:`setimmediatevalue` to set the value immediately.\n \"\"\"\n raise TypeError(\"Not permissible to get values of object %s of type %s\" % (self._name, type(self)))\n\n @value.setter\n def value(self, value):\n self._set_value(value, cocotb.scheduler._schedule_write)\n\n def setimmediatevalue(self, value):\n \"\"\" Assign a value to this simulation object immediately. \"\"\"\n def _call_now(handle, f, *args):\n f(*args)\n self._set_value(value, _call_now)\n\n def _set_value(self, value, call_sim):\n \"\"\" This should be overriden in subclasses.\n\n This is used to implement both the setter for :attr:`value`, and the\n :meth:`setimmediatevalue` method.\n\n ``call_sim(handle, f, *args)`` should be used to schedule simulator writes,\n rather than performing them directly as ``f(*args)``.\n \"\"\"\n raise TypeError(\"Not permissible to set values on object %s of type %s\" % (self._name, type(self)))\n\n def __le__(self, value):\n \"\"\"Overload less-than-or-equal-to operator to provide an HDL-like shortcut.\n\n Example:\n >>> module.signal <= 2\n \"\"\"\n self.value = value\n return _AssignmentResult(self, value)\n\n def __eq__(self, other):\n \"\"\"Equality comparator for non-hierarchy objects\n\n If ``other`` is not a :class:`SimHandleBase` instance the comparision\n uses the comparison method of the ``other`` object against our\n ``.value``.\n \"\"\"\n if isinstance(other, SimHandleBase):\n return SimHandleBase.__eq__(self, other)\n return self.value == other\n\n def __ne__(self, other):\n if isinstance(other, SimHandleBase):\n return SimHandleBase.__ne__(self, other)\n return self.value != other\n\n # Re-define hash because we defined __eq__\n def __hash__(self):\n return SimHandleBase.__hash__(self)\n\n\nclass ConstantObject(NonHierarchyObject):\n \"\"\"An object which has a value that can be read, but not set.\n\n The value is cached in the class since it is fixed at elaboration\n time and won't change within a simulation.\n \"\"\"\n\n def __init__(self, handle, path, handle_type):\n \"\"\"\n Args:\n handle (int): The GPI handle to the simulator object.\n path (str): Path to this handle, ``None`` if root.\n handle_type: The type of the handle\n (``simulator.INTEGER``, ``simulator.ENUM``,\n ``simulator.REAL``, ``simulator.STRING``).\n \"\"\"\n NonHierarchyObject.__init__(self, handle, path)\n if handle_type in [simulator.INTEGER, simulator.ENUM]:\n self._value = self._handle.get_signal_val_long()\n elif handle_type == simulator.REAL:\n self._value = self._handle.get_signal_val_real()\n elif handle_type == simulator.STRING:\n self._value = self._handle.get_signal_val_str()\n else:\n val = self._handle.get_signal_val_binstr()\n self._value = BinaryValue(n_bits=len(val))\n try:\n self._value.binstr = val\n except Exception:\n self._value = val\n\n def __int__(self):\n return int(self.value)\n\n def __float__(self):\n return float(self.value)\n\n @NonHierarchyObject.value.getter\n def value(self):\n \"\"\"The value of this simulation object.\"\"\"\n return self._value\n\n def __str__(self):\n if isinstance(self.value, bytes):\n StringObject._emit_str_warning(self)\n return self.value.decode('ascii')\n else:\n ModifiableObject._emit_str_warning(self)\n return str(self.value)\n\n\nclass NonHierarchyIndexableObject(NonHierarchyObject):\n \"\"\" A non-hierarchy indexable object.\n\n Getting and setting the current value of an array is done\n by iterating through sub-handles in left-to-right order.\n\n Given an HDL array ``arr``:\n\n +--------------+---------------------+--------------------------------------------------------------+\n | Verilog | VHDL | ``arr.value`` is equivalent to |\n +==============+=====================+==============================================================+\n | ``arr[4:7]`` | ``arr(4 to 7)`` | ``[arr[4].value, arr[5].value, arr[6].value, arr[7].value]`` |\n +--------------+---------------------+--------------------------------------------------------------+\n | ``arr[7:4]`` | ``arr(7 downto 4)`` | ``[arr[7].value, arr[6].value, arr[5].value, arr[4].value]`` |\n +--------------+---------------------+--------------------------------------------------------------+\n\n When setting the signal as in ``arr.value = ...``, the same index equivalence as noted in the table holds.\n\n .. warning::\n Assigning a value to a sub-handle:\n\n - **Wrong**: ``dut.some_array.value[0] = 1`` (gets value as a list then updates index 0)\n - **Correct**: ``dut.some_array[0].value = 1``\n \"\"\"\n\n def __init__(self, handle, path):\n NonHierarchyObject.__init__(self, handle, path)\n self._range = self._handle.get_range()\n\n def __setitem__(self, index, value):\n \"\"\"Provide transparent assignment to indexed array handles.\"\"\"\n self[index].value = value\n\n def __getitem__(self, index):\n if isinstance(index, slice):\n raise IndexError(\"Slice indexing is not supported\")\n if self._range is None:\n raise IndexError(\"%s is not indexable. Unable to get object at index %d\" % (self._fullname, index))\n if index in self._sub_handles:\n return self._sub_handles[index]\n new_handle = self._handle.get_handle_by_index(index)\n if not new_handle:\n raise IndexError(\"%s contains no object at index %d\" % (self._fullname, index))\n path = self._path + \"[\" + str(index) + \"]\"\n self._sub_handles[index] = SimHandle(new_handle, path)\n return self._sub_handles[index]\n\n def __iter__(self):\n if self._range is None:\n return\n\n self._log.debug(\"Iterating with range [%d:%d]\", self._range[0], self._range[1])\n for i in self._range_iter(self._range[0], self._range[1]):\n try:\n result = self[i]\n yield result\n except IndexError:\n continue\n\n def _range_iter(self, left, right):\n if left > right:\n while left >= right:\n yield left\n left = left - 1\n else:\n while left <= right:\n yield left\n left = left + 1\n\n @NonHierarchyObject.value.getter\n def value(self) -> list:\n # Don't use self.__iter__, because it has an unwanted `except IndexError`\n return [\n self[i].value\n for i in self._range_iter(self._range[0], self._range[1])\n ]\n\n def _set_value(self, value, call_sim):\n \"\"\"Assign value from a list of same length to an array in left-to-right order.\n Index 0 of the list maps to the left-most index in the array.\n\n See the docstring for this class.\n \"\"\"\n if type(value) is not list:\n raise TypeError(\"Assigning non-list value to object %s of type %s\" % (self._name, type(self)))\n if len(value) != len(self):\n raise ValueError(\"Assigning list of length %d to object %s of length %d\" % (\n len(value), self._name, len(self)))\n for val_idx, self_idx in enumerate(self._range_iter(self._range[0], self._range[1])):\n self[self_idx]._set_value(value[val_idx], call_sim)\n\n\nclass NonConstantObject(NonHierarchyIndexableObject):\n \"\"\" A non-constant object\"\"\"\n # FIXME: what is the difference to ModifiableObject? Explain in docstring.\n\n def drivers(self):\n \"\"\"An iterator for gathering all drivers for a signal.\"\"\"\n return self._handle.iterate(simulator.DRIVERS)\n\n def loads(self):\n \"\"\"An iterator for gathering all loads on a signal.\"\"\"\n return self._handle.iterate(simulator.LOADS)\n\n\nclass _SetAction:\n \"\"\"Base class representing the type of action used while write-accessing a handle.\"\"\"\n pass\n\n\nclass _SetValueAction(_SetAction):\n __slots__ = (\"value\",)\n \"\"\"Base class representing the type of action used while write-accessing a handle with a value.\"\"\"\n\n def __init__(self, value):\n self.value = value\n\n\nclass Deposit(_SetValueAction):\n \"\"\"Action used for placing a value into a given handle.\"\"\"\n\n def _as_gpi_args_for(self, hdl):\n return self.value, 0 # GPI_DEPOSIT\n\n\nclass Force(_SetValueAction):\n \"\"\"Action used to force a handle to a given value until a release is applied.\"\"\"\n\n def _as_gpi_args_for(self, hdl):\n return self.value, 1 # GPI_FORCE\n\n\nclass Freeze(_SetAction):\n \"\"\"Action used to make a handle keep its current value until a release is used.\"\"\"\n\n def _as_gpi_args_for(self, hdl):\n return hdl.value, 1 # GPI_FORCE\n\n\nclass Release(_SetAction):\n \"\"\"Action used to stop the effects of a previously applied force/freeze action.\"\"\"\n\n def _as_gpi_args_for(self, hdl):\n return 0, 2 # GPI_RELEASE\n\n\nclass ModifiableObject(NonConstantObject):\n \"\"\"Base class for simulator objects whose values can be modified.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to *value*.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n We determine the library call to make based on the type of the value\n because assigning integers less than 32 bits is faster.\n\n Args:\n value (ctypes.Structure, cocotb.binary.BinaryValue, int, double):\n The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target is not wide enough or has an unsupported type\n for value assignment.\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n if isinstance(value, int) and value < 0x7fffffff and len(self) <= 32:\n call_sim(self, self._handle.set_signal_val_long, set_action, value)\n return\n if isinstance(value, ctypes.Structure):\n value = BinaryValue(value=cocotb.utils.pack(value), n_bits=len(self))\n elif isinstance(value, int):\n value = BinaryValue(value=value, n_bits=len(self), bigEndian=False)\n elif isinstance(value, dict):\n # We're given a dictionary with a list of values and a bit size...\n num = 0\n vallist = list(value[\"values\"])\n vallist.reverse()\n if len(vallist) * value[\"bits\"] != len(self):\n raise TypeError(\"Unable to set with array length %d of %d bit entries = %d total, target is only %d bits long\" %\n (len(value[\"values\"]), value[\"bits\"], len(value[\"values\"]) * value[\"bits\"], len(self)))\n\n for val in vallist:\n num = (num << value[\"bits\"]) + val\n value = BinaryValue(value=num, n_bits=len(self), bigEndian=False)\n\n elif not isinstance(value, BinaryValue):\n raise TypeError(\n \"Unsupported type for value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_binstr, set_action, value.binstr)\n\n def _check_for_set_action(self, value):\n if not isinstance(value, _SetAction):\n return value, 0 # GPI_DEPOSIT\n return value._as_gpi_args_for(self)\n\n @NonConstantObject.value.getter\n def value(self) -> BinaryValue:\n binstr = self._handle.get_signal_val_binstr()\n result = BinaryValue(binstr, len(binstr))\n return result\n\n def __int__(self):\n return int(self.value)\n\n def _emit_str_warning(self):\n warnings.warn(\n \"`str({t})` is deprecated, and in future will return `{t}._path`. \"\n \"To get a string representation of the value, use `str({t}.value)`.\"\n .format(t=type(self).__qualname__),\n FutureWarning, stacklevel=3)\n\n def __str__(self):\n self._emit_str_warning()\n return str(self.value)\n\n\nclass RealObject(ModifiableObject):\n \"\"\"Specific object handle for Real signals and variables.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to value.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n Args:\n value (float): The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target has an unsupported type for\n real value assignment.\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n try:\n value = float(value)\n except ValueError:\n raise TypeError(\n \"Unsupported type for real value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_real, set_action, value)\n\n @ModifiableObject.value.getter\n def value(self) -> float:\n return self._handle.get_signal_val_real()\n\n def __float__(self):\n return float(self.value)\n\n\nclass EnumObject(ModifiableObject):\n \"\"\"Specific object handle for enumeration signals and variables.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to *value*.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n Args:\n value (int): The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target has an unsupported type for\n integer value assignment.\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n if isinstance(value, BinaryValue):\n value = int(value)\n elif not isinstance(value, int):\n raise TypeError(\n \"Unsupported type for enum value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_long, set_action, value)\n\n @ModifiableObject.value.getter\n def value(self) -> int:\n return self._handle.get_signal_val_long()\n\n\nclass IntegerObject(ModifiableObject):\n \"\"\"Specific object handle for Integer and Enum signals and variables.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to *value*.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n Args:\n value (int): The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target has an unsupported type for\n integer value assignment.\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n if isinstance(value, BinaryValue):\n value = int(value)\n elif not isinstance(value, int):\n raise TypeError(\n \"Unsupported type for integer value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_long, set_action, value)\n\n @ModifiableObject.value.getter\n def value(self) -> int:\n return self._handle.get_signal_val_long()\n\n\nclass StringObject(ModifiableObject):\n \"\"\"Specific object handle for String variables.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to *value*.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n Args:\n value (bytes): The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target has an unsupported type for\n string value assignment.\n\n .. versionchanged:: 1.4\n Takes :class:`bytes` instead of :class:`str`.\n Users are now expected to choose an encoding when using these objects.\n As a convenience, when assigning :class:`str` values, ASCII encoding will be used as a safe default.\n\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n if isinstance(value, str):\n warnings.warn(\n \"Handles on string objects will soon not accept `str` objects. \"\n \"Please use a bytes object by encoding the string as you see fit. \"\n \"`str.encode('ascii')` is typically sufficient.\", DeprecationWarning, stacklevel=2)\n value = value.encode('ascii') # may throw UnicodeEncodeError\n\n if not isinstance(value, bytes):\n raise TypeError(\n \"Unsupported type for string value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_str, set_action, value)\n\n @ModifiableObject.value.getter\n def value(self) -> bytes:\n return self._handle.get_signal_val_str()\n\n def _emit_str_warning(self):\n warnings.warn(\n \"`str({t})` is deprecated, and in future will return `{t}._path`. \"\n \"To access the `bytes` value of this handle, use `{t}.value`.\"\n .format(t=type(self).__qualname__),\n FutureWarning, stacklevel=3)\n\n def __str__(self):\n self._emit_str_warning()\n return self.value.decode('ascii')\n\n\n_handle2obj = {}\n\n\ndef SimHandle(handle, path=None):\n \"\"\"Factory function to create the correct type of `SimHandle` object.\n\n Args:\n handle (int): The GPI handle to the simulator object.\n path (str): Path to this handle, ``None`` if root.\n\n Returns:\n The `SimHandle` object.\n\n Raises:\n TestError: If no matching object for GPI type could be found.\n \"\"\"\n _type2cls = {\n simulator.MODULE: HierarchyObject,\n simulator.STRUCTURE: HierarchyObject,\n simulator.REG: ModifiableObject,\n simulator.NET: ModifiableObject,\n simulator.NETARRAY: NonHierarchyIndexableObject,\n simulator.REAL: RealObject,\n simulator.INTEGER: IntegerObject,\n simulator.ENUM: EnumObject,\n simulator.STRING: StringObject,\n simulator.GENARRAY: HierarchyArrayObject,\n }\n\n # Enforce singletons since it's possible to retrieve handles avoiding\n # the hierarchy by getting driver/load information\n global _handle2obj\n try:\n return _handle2obj[handle]\n except KeyError:\n pass\n\n t = handle.get_type()\n\n # Special case for constants\n if handle.get_const() and t not in [\n simulator.MODULE,\n simulator.STRUCTURE,\n simulator.NETARRAY,\n simulator.GENARRAY,\n ]:\n obj = ConstantObject(handle, path, t)\n _handle2obj[handle] = obj\n return obj\n\n if t not in _type2cls:\n raise TestError(\"Couldn't find a matching object for GPI type %d (path=%s)\" % (t, path))\n obj = _type2cls[t](handle, path)\n _handle2obj[handle] = obj\n return obj\n", "path": "cocotb/handle.py"}], "after_files": [{"content": "#!/usr/bin/env python\n\n# Copyright (c) 2013 Potential Ventures Ltd\n# Copyright (c) 2013 SolarFlare Communications Inc\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are met:\n# * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n# * Redistributions in binary form must reproduce the above copyright\n# notice, this list of conditions and the following disclaimer in the\n# documentation and/or other materials provided with the distribution.\n# * Neither the name of Potential Ventures Ltd,\n# SolarFlare Communications Inc nor the\n# names of its contributors may be used to endorse or promote products\n# derived from this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND\n# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\n# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n# DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY\n# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\n# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND\n# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n# -*- coding: utf-8 -*-\n\nimport ctypes\nimport warnings\n\nimport cocotb\nfrom cocotb import simulator\nfrom cocotb.binary import BinaryValue\nfrom cocotb.log import SimLog\nfrom cocotb.result import TestError\n\n# Only issue a warning for each deprecated attribute access\n_deprecation_warned = set()\n\n\nclass SimHandleBase:\n \"\"\"Base class for all simulation objects.\n\n We maintain a handle which we can use for GPI calls.\n \"\"\"\n\n # For backwards compatibility we support a mapping of old member names\n # which may alias with the simulator hierarchy. In these cases the\n # simulator result takes priority, only falling back to the python member\n # if there is no colliding object in the elaborated design.\n _compat_mapping = {\n \"log\" : \"_log\",\n \"fullname\" : \"_fullname\",\n \"name\" : \"_name\",\n }\n\n def __init__(self, handle, path):\n \"\"\"\n .. Constructor. This RST comment works around sphinx-doc/sphinx#6885\n\n Args:\n handle (int): The GPI handle to the simulator object.\n path (str): Path to this handle, ``None`` if root.\n \"\"\"\n self._handle = handle\n self._len = None # type: int\n \"\"\"The \"length\" (the number of elements) of the underlying object. For vectors this is the number of bits.\"\"\"\n self._sub_handles = {} # type: dict\n \"\"\"Dictionary of this handle's children.\"\"\"\n self._invalid_sub_handles = set() # type: set\n \"\"\"Python :class:`set` of invalid queries, for caching purposes.\"\"\"\n self._name = self._handle.get_name_string() # type: str\n \"\"\"The name of an object.\n\n :meta public:\n \"\"\"\n self._type = self._handle.get_type_string() # type: str\n \"\"\"The type of an object as a string.\n\n :meta public:\n \"\"\"\n self._fullname = self._name + \"(%s)\" % self._type # type: str\n \"\"\"The name of an object with its type appended in parentheses.\"\"\"\n self._path = self._name if path is None else path # type: str\n \"\"\"The path to this handle, or its name if this is the root handle.\n\n :meta public:\n \"\"\"\n self._log = SimLog(\"cocotb.%s\" % self._name)\n \"\"\"The logging object.\"\"\"\n self._log.debug(\"Created\")\n self._def_name = self._handle.get_definition_name() # type: str\n \"\"\"The name of a GPI object's definition.\n\n :meta public:\n \"\"\"\n self._def_file = self._handle.get_definition_file() # type: str\n \"\"\"The file that sources the object's definition.\n\n :meta public:\n \"\"\"\n\n def get_definition_name(self):\n return self._def_name\n\n def get_definition_file(self):\n return self._def_file\n\n def __hash__(self):\n return hash(self._handle)\n\n def __len__(self):\n \"\"\"Return the \"length\" (the number of elements) of the underlying object.\n\n For vectors this is the number of bits.\n \"\"\"\n if self._len is None:\n self._len = self._handle.get_num_elems()\n return self._len\n\n def __eq__(self, other):\n \"\"\"Equality comparator for handles\n\n Example usage::\n\n if clk == dut.clk:\n do_something()\n \"\"\"\n if not isinstance(other, SimHandleBase):\n return NotImplemented\n return self._handle == other._handle\n\n def __ne__(self, other):\n if not isinstance(other, SimHandleBase):\n return NotImplemented\n return self._handle != other._handle\n\n def __repr__(self):\n desc = self._path\n defname = self._def_name\n if defname:\n desc += \" with definition \"+defname\n deffile = self._def_file\n if deffile:\n desc += \" (at \"+deffile+\")\"\n return type(self).__qualname__ + \"(\" + desc + \")\"\n\n def __str__(self):\n return self._path\n\n def __setattr__(self, name, value):\n if name in self._compat_mapping:\n if name not in _deprecation_warned:\n warnings.warn(\"Use of attribute %r is deprecated, use %r instead\" % (name, self._compat_mapping[name]))\n _deprecation_warned.add(name)\n return setattr(self, self._compat_mapping[name], value)\n else:\n return object.__setattr__(self, name, value)\n\n def __getattr__(self, name):\n if name in self._compat_mapping:\n if name not in _deprecation_warned:\n warnings.warn(\"Use of attribute %r is deprecated, use %r instead\" % (name, self._compat_mapping[name]))\n _deprecation_warned.add(name)\n return getattr(self, self._compat_mapping[name])\n else:\n return object.__getattribute__(self, name)\n\n\nclass RegionObject(SimHandleBase):\n \"\"\"A region object, such as a scope or namespace.\n\n Region objects don't have values, they are effectively scopes or namespaces.\n \"\"\"\n\n def __init__(self, handle, path):\n SimHandleBase.__init__(self, handle, path)\n self._discovered = False # True if this object has already been discovered\n\n def __iter__(self):\n \"\"\"Iterate over all known objects in this layer of hierarchy.\"\"\"\n if not self._discovered:\n self._discover_all()\n\n for name, handle in self._sub_handles.items():\n if isinstance(handle, list):\n self._log.debug(\"Found index list length %d\", len(handle))\n for subindex, subhdl in enumerate(handle):\n if subhdl is None:\n self._log.warning(\"Index %d doesn't exist in %s.%s\", subindex, self._name, name)\n continue\n self._log.debug(\"Yielding index %d from %s (%s)\", subindex, name, type(subhdl))\n yield subhdl\n else:\n self._log.debug(\"Yielding %s (%s)\", name, handle)\n yield handle\n\n def _discover_all(self):\n \"\"\"When iterating or performing IPython tab completion, we run through ahead of\n time and discover all possible children, populating the :any:`_sub_handles`\n mapping. Hierarchy can't change after elaboration so we only have to\n do this once.\n \"\"\"\n if self._discovered:\n return\n self._log.debug(\"Discovering all on %s\", self._name)\n for thing in self._handle.iterate(simulator.OBJECTS):\n name = thing.get_name_string()\n try:\n hdl = SimHandle(thing, self._child_path(name))\n except TestError as e:\n self._log.debug(\"%s\", e)\n continue\n\n try:\n key = self._sub_handle_key(name)\n except ValueError:\n self._log.debug(\"Unable to translate handle >%s< to a valid _sub_handle key\", hdl._name)\n continue\n\n self._sub_handles[key] = hdl\n\n self._discovered = True\n\n def _child_path(self, name) -> str:\n \"\"\"Return a string of the path of the child :any:`SimHandle` for a given *name*.\"\"\"\n return self._path + \".\" + name\n\n def _sub_handle_key(self, name):\n \"\"\"Translate the handle name to a key to use in :any:`_sub_handles` dictionary.\"\"\"\n return name.split(\".\")[-1]\n\n def __dir__(self):\n \"\"\"Permits IPython tab completion to work.\"\"\"\n self._discover_all()\n return super(RegionObject, self).__dir__() + [str(k) for k in self._sub_handles]\n\n\nclass HierarchyObject(RegionObject):\n \"\"\"Hierarchy objects are namespace/scope objects.\"\"\"\n\n def __get_sub_handle_by_name(self, name):\n try:\n return self._sub_handles[name]\n except KeyError:\n pass\n\n if not self._discovered:\n self._discover_all()\n return self.__get_sub_handle_by_name(name)\n\n # Cache to avoid a call to the simulator if we already know the name is\n # invalid. Unclear if we care, but we had this before.\n if name in self._invalid_sub_handles:\n return None\n\n new_handle = self._handle.get_handle_by_name(name)\n\n if not new_handle:\n self._invalid_sub_handles.add(name)\n return None\n\n sub_handle = SimHandle(new_handle, self._child_path(name))\n self._sub_handles[name] = sub_handle\n return sub_handle\n\n def __setattr__(self, name, value):\n \"\"\"Provide transparent access to signals via the hierarchy.\n\n Slightly hacky version of operator overloading in Python.\n\n Raise an :exc:`AttributeError` if users attempt to create new members which\n don't exist in the design.\n \"\"\"\n\n # private attributes pass through directly\n if name.startswith(\"_\"):\n return SimHandleBase.__setattr__(self, name, value)\n\n # then try handles\n sub = self.__get_sub_handle_by_name(name)\n if sub is not None:\n sub.value = value\n return\n\n # compat behavior\n if name in self._compat_mapping:\n return SimHandleBase.__setattr__(self, name, value)\n\n raise AttributeError(\"%s contains no object named %s\" % (self._name, name))\n\n def __getattr__(self, name):\n \"\"\"Query the simulator for an object with the specified name\n and cache the result to build a tree of objects.\n \"\"\"\n if name.startswith(\"_\"):\n return SimHandleBase.__getattr__(self, name)\n\n handle = self.__get_sub_handle_by_name(name)\n if handle is not None:\n return handle\n\n if name in self._compat_mapping:\n return SimHandleBase.__getattr__(self, name)\n\n raise AttributeError(\"%s contains no object named %s\" % (self._name, name))\n\n def _id(self, name, extended: bool = True):\n \"\"\"Query the simulator for an object with the specified *name*,\n and cache the result to build a tree of objects.\n\n If *extended* is ``True``, run the query only for VHDL extended identifiers.\n For Verilog, only ``extended=False`` is supported.\n\n :meta public:\n \"\"\"\n if extended:\n name = \"\\\\\"+name+\"\\\\\"\n\n handle = self.__get_sub_handle_by_name(name)\n if handle is not None:\n return handle\n\n raise AttributeError(\"%s contains no object named %s\" % (self._name, name))\n\n\nclass HierarchyArrayObject(RegionObject):\n \"\"\"Hierarchy Arrays are containers of Hierarchy Objects.\"\"\"\n\n def _sub_handle_key(self, name):\n \"\"\"Translate the handle name to a key to use in :any:`_sub_handles` dictionary.\"\"\"\n # This is slightly hacky, but we need to extract the index from the name\n #\n # FLI and VHPI(IUS): _name(X) where X is the index\n # VHPI(ALDEC): _name__X where X is the index\n # VPI: _name[X] where X is the index\n import re\n result = re.match(r\"{0}__(?P<index>\\d+)$\".format(self._name), name)\n if not result:\n result = re.match(r\"{0}\\((?P<index>\\d+)\\)$\".format(self._name), name)\n if not result:\n result = re.match(r\"{0}\\[(?P<index>\\d+)\\]$\".format(self._name), name)\n\n if result:\n return int(result.group(\"index\"))\n else:\n raise ValueError(\"Unable to match an index pattern: {}\".format(name))\n\n def __len__(self):\n \"\"\"Return the \"length\" of the generate block.\"\"\"\n if self._len is None:\n if not self._discovered:\n self._discover_all()\n\n self._len = len(self._sub_handles)\n return self._len\n\n def __getitem__(self, index):\n if isinstance(index, slice):\n raise IndexError(\"Slice indexing is not supported\")\n if index in self._sub_handles:\n return self._sub_handles[index]\n new_handle = self._handle.get_handle_by_index(index)\n if not new_handle:\n raise IndexError(\"%s contains no object at index %d\" % (self._name, index))\n path = self._path + \"[\" + str(index) + \"]\"\n self._sub_handles[index] = SimHandle(new_handle, path)\n return self._sub_handles[index]\n\n def _child_path(self, name):\n \"\"\"Return a string of the path of the child :any:`SimHandle` for a given name.\"\"\"\n index = self._sub_handle_key(name)\n return self._path + \"[\" + str(index) + \"]\"\n\n def __setitem__(self, index, value):\n raise TypeError(\"Not permissible to set %s at index %d\" % (self._name, index))\n\n\nclass _AssignmentResult:\n \"\"\"\n An object that exists solely to provide an error message if the caller\n is not aware of cocotb's meaning of ``<=``.\n \"\"\"\n\n def __init__(self, signal, value):\n self._signal = signal\n self._value = value\n\n def __bool__(self):\n raise TypeError(\n \"Attempted to use `{0._signal!r} <= {0._value!r}` (a cocotb \"\n \"delayed write) as if it were a numeric comparison. To perform \"\n \"comparison, use `{0._signal!r}.value <= {0._value!r}` instead.\"\n .format(self)\n )\n\n\nclass NonHierarchyObject(SimHandleBase):\n \"\"\"Common base class for all non-hierarchy objects.\"\"\"\n\n def __iter__(self):\n return iter(())\n\n @property\n def value(self):\n \"\"\"The value of this simulation object.\n\n .. note::\n When setting this property, the value is stored by the :class:`~cocotb.scheduler.Scheduler`\n and all stored values are written at the same time at the end of the current simulator time step.\n\n Use :meth:`setimmediatevalue` to set the value immediately.\n \"\"\"\n raise TypeError(\"Not permissible to get values of object %s of type %s\" % (self._name, type(self)))\n\n @value.setter\n def value(self, value):\n self._set_value(value, cocotb.scheduler._schedule_write)\n\n def setimmediatevalue(self, value):\n \"\"\" Assign a value to this simulation object immediately. \"\"\"\n def _call_now(handle, f, *args):\n f(*args)\n self._set_value(value, _call_now)\n\n def _set_value(self, value, call_sim):\n \"\"\" This should be overriden in subclasses.\n\n This is used to implement both the setter for :attr:`value`, and the\n :meth:`setimmediatevalue` method.\n\n ``call_sim(handle, f, *args)`` should be used to schedule simulator writes,\n rather than performing them directly as ``f(*args)``.\n \"\"\"\n raise TypeError(\"Not permissible to set values on object %s of type %s\" % (self._name, type(self)))\n\n def __le__(self, value):\n \"\"\"Overload less-than-or-equal-to operator to provide an HDL-like shortcut.\n\n Example:\n >>> module.signal <= 2\n \"\"\"\n self.value = value\n return _AssignmentResult(self, value)\n\n def __eq__(self, other):\n \"\"\"Equality comparator for non-hierarchy objects\n\n If ``other`` is not a :class:`SimHandleBase` instance the comparision\n uses the comparison method of the ``other`` object against our\n ``.value``.\n \"\"\"\n if isinstance(other, SimHandleBase):\n return SimHandleBase.__eq__(self, other)\n return self.value == other\n\n def __ne__(self, other):\n if isinstance(other, SimHandleBase):\n return SimHandleBase.__ne__(self, other)\n return self.value != other\n\n # Re-define hash because we defined __eq__\n def __hash__(self):\n return SimHandleBase.__hash__(self)\n\n\nclass ConstantObject(NonHierarchyObject):\n \"\"\"An object which has a value that can be read, but not set.\n\n The value is cached in the class since it is fixed at elaboration\n time and won't change within a simulation.\n \"\"\"\n\n def __init__(self, handle, path, handle_type):\n \"\"\"\n Args:\n handle (int): The GPI handle to the simulator object.\n path (str): Path to this handle, ``None`` if root.\n handle_type: The type of the handle\n (``simulator.INTEGER``, ``simulator.ENUM``,\n ``simulator.REAL``, ``simulator.STRING``).\n \"\"\"\n NonHierarchyObject.__init__(self, handle, path)\n if handle_type in [simulator.INTEGER, simulator.ENUM]:\n self._value = self._handle.get_signal_val_long()\n elif handle_type == simulator.REAL:\n self._value = self._handle.get_signal_val_real()\n elif handle_type == simulator.STRING:\n self._value = self._handle.get_signal_val_str()\n else:\n val = self._handle.get_signal_val_binstr()\n self._value = BinaryValue(n_bits=len(val))\n try:\n self._value.binstr = val\n except Exception:\n self._value = val\n\n def __int__(self):\n return int(self.value)\n\n def __float__(self):\n return float(self.value)\n\n @NonHierarchyObject.value.getter\n def value(self):\n \"\"\"The value of this simulation object.\"\"\"\n return self._value\n\n def __str__(self):\n if isinstance(self.value, bytes):\n StringObject._emit_str_warning(self)\n return self.value.decode('ascii')\n else:\n ModifiableObject._emit_str_warning(self)\n return str(self.value)\n\n\nclass NonHierarchyIndexableObject(NonHierarchyObject):\n \"\"\" A non-hierarchy indexable object.\n\n Getting and setting the current value of an array is done\n by iterating through sub-handles in left-to-right order.\n\n Given an HDL array ``arr``:\n\n +--------------+---------------------+--------------------------------------------------------------+\n | Verilog | VHDL | ``arr.value`` is equivalent to |\n +==============+=====================+==============================================================+\n | ``arr[4:7]`` | ``arr(4 to 7)`` | ``[arr[4].value, arr[5].value, arr[6].value, arr[7].value]`` |\n +--------------+---------------------+--------------------------------------------------------------+\n | ``arr[7:4]`` | ``arr(7 downto 4)`` | ``[arr[7].value, arr[6].value, arr[5].value, arr[4].value]`` |\n +--------------+---------------------+--------------------------------------------------------------+\n\n When setting the signal as in ``arr.value = ...``, the same index equivalence as noted in the table holds.\n\n .. warning::\n Assigning a value to a sub-handle:\n\n - **Wrong**: ``dut.some_array.value[0] = 1`` (gets value as a list then updates index 0)\n - **Correct**: ``dut.some_array[0].value = 1``\n \"\"\"\n\n def __init__(self, handle, path):\n NonHierarchyObject.__init__(self, handle, path)\n self._range = self._handle.get_range()\n\n def __setitem__(self, index, value):\n \"\"\"Provide transparent assignment to indexed array handles.\"\"\"\n self[index].value = value\n\n def __getitem__(self, index):\n if isinstance(index, slice):\n raise IndexError(\"Slice indexing is not supported\")\n if self._range is None:\n raise IndexError(\"%s is not indexable. Unable to get object at index %d\" % (self._fullname, index))\n if index in self._sub_handles:\n return self._sub_handles[index]\n new_handle = self._handle.get_handle_by_index(index)\n if not new_handle:\n raise IndexError(\"%s contains no object at index %d\" % (self._fullname, index))\n path = self._path + \"[\" + str(index) + \"]\"\n self._sub_handles[index] = SimHandle(new_handle, path)\n return self._sub_handles[index]\n\n def __iter__(self):\n if self._range is None:\n return\n\n self._log.debug(\"Iterating with range [%d:%d]\", self._range[0], self._range[1])\n for i in self._range_iter(self._range[0], self._range[1]):\n try:\n result = self[i]\n yield result\n except IndexError:\n continue\n\n def _range_iter(self, left, right):\n if left > right:\n while left >= right:\n yield left\n left = left - 1\n else:\n while left <= right:\n yield left\n left = left + 1\n\n @NonHierarchyObject.value.getter\n def value(self) -> list:\n # Don't use self.__iter__, because it has an unwanted `except IndexError`\n return [\n self[i].value\n for i in self._range_iter(self._range[0], self._range[1])\n ]\n\n def _set_value(self, value, call_sim):\n \"\"\"Assign value from a list of same length to an array in left-to-right order.\n Index 0 of the list maps to the left-most index in the array.\n\n See the docstring for this class.\n \"\"\"\n if type(value) is not list:\n raise TypeError(\"Assigning non-list value to object %s of type %s\" % (self._name, type(self)))\n if len(value) != len(self):\n raise ValueError(\"Assigning list of length %d to object %s of length %d\" % (\n len(value), self._name, len(self)))\n for val_idx, self_idx in enumerate(self._range_iter(self._range[0], self._range[1])):\n self[self_idx]._set_value(value[val_idx], call_sim)\n\n\nclass NonConstantObject(NonHierarchyIndexableObject):\n \"\"\" A non-constant object\"\"\"\n # FIXME: what is the difference to ModifiableObject? Explain in docstring.\n\n def drivers(self):\n \"\"\"An iterator for gathering all drivers for a signal.\"\"\"\n return self._handle.iterate(simulator.DRIVERS)\n\n def loads(self):\n \"\"\"An iterator for gathering all loads on a signal.\"\"\"\n return self._handle.iterate(simulator.LOADS)\n\n\nclass _SetAction:\n \"\"\"Base class representing the type of action used while write-accessing a handle.\"\"\"\n pass\n\n\nclass _SetValueAction(_SetAction):\n __slots__ = (\"value\",)\n \"\"\"Base class representing the type of action used while write-accessing a handle with a value.\"\"\"\n\n def __init__(self, value):\n self.value = value\n\n\nclass Deposit(_SetValueAction):\n \"\"\"Action used for placing a value into a given handle.\"\"\"\n\n def _as_gpi_args_for(self, hdl):\n return self.value, 0 # GPI_DEPOSIT\n\n\nclass Force(_SetValueAction):\n \"\"\"Action used to force a handle to a given value until a release is applied.\"\"\"\n\n def _as_gpi_args_for(self, hdl):\n return self.value, 1 # GPI_FORCE\n\n\nclass Freeze(_SetAction):\n \"\"\"Action used to make a handle keep its current value until a release is used.\"\"\"\n\n def _as_gpi_args_for(self, hdl):\n return hdl.value, 1 # GPI_FORCE\n\n\nclass Release(_SetAction):\n \"\"\"Action used to stop the effects of a previously applied force/freeze action.\"\"\"\n\n def _as_gpi_args_for(self, hdl):\n return 0, 2 # GPI_RELEASE\n\n\nclass ModifiableObject(NonConstantObject):\n \"\"\"Base class for simulator objects whose values can be modified.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to *value*.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n We determine the library call to make based on the type of the value\n because assigning integers less than 32 bits is faster.\n\n Args:\n value (ctypes.Structure, cocotb.binary.BinaryValue, int, double):\n The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target is not wide enough or has an unsupported type\n for value assignment.\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n if isinstance(value, int) and value < 0x7fffffff and len(self) <= 32:\n call_sim(self, self._handle.set_signal_val_long, set_action, value)\n return\n if isinstance(value, ctypes.Structure):\n value = BinaryValue(value=cocotb.utils.pack(value), n_bits=len(self))\n elif isinstance(value, int):\n value = BinaryValue(value=value, n_bits=len(self), bigEndian=False)\n elif isinstance(value, dict):\n # We're given a dictionary with a list of values and a bit size...\n num = 0\n vallist = list(value[\"values\"])\n vallist.reverse()\n if len(vallist) * value[\"bits\"] != len(self):\n raise TypeError(\"Unable to set with array length %d of %d bit entries = %d total, target is only %d bits long\" %\n (len(value[\"values\"]), value[\"bits\"], len(value[\"values\"]) * value[\"bits\"], len(self)))\n\n for val in vallist:\n num = (num << value[\"bits\"]) + val\n value = BinaryValue(value=num, n_bits=len(self), bigEndian=False)\n\n elif not isinstance(value, BinaryValue):\n raise TypeError(\n \"Unsupported type for value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_binstr, set_action, value.binstr)\n\n def _check_for_set_action(self, value):\n if not isinstance(value, _SetAction):\n return value, 0 # GPI_DEPOSIT\n return value._as_gpi_args_for(self)\n\n @NonConstantObject.value.getter\n def value(self) -> BinaryValue:\n binstr = self._handle.get_signal_val_binstr()\n result = BinaryValue(binstr, len(binstr))\n return result\n\n def __int__(self):\n return int(self.value)\n\n def _emit_str_warning(self):\n warnings.warn(\n \"`str({t})` is deprecated, and in future will return `{t}._path`. \"\n \"To get a string representation of the value, use `str({t}.value)`.\"\n .format(t=type(self).__qualname__),\n FutureWarning, stacklevel=3)\n\n def __str__(self):\n self._emit_str_warning()\n return str(self.value)\n\n\nclass RealObject(ModifiableObject):\n \"\"\"Specific object handle for Real signals and variables.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to value.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n Args:\n value (float): The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target has an unsupported type for\n real value assignment.\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n try:\n value = float(value)\n except ValueError:\n raise TypeError(\n \"Unsupported type for real value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_real, set_action, value)\n\n @ModifiableObject.value.getter\n def value(self) -> float:\n return self._handle.get_signal_val_real()\n\n def __float__(self):\n return float(self.value)\n\n\nclass EnumObject(ModifiableObject):\n \"\"\"Specific object handle for enumeration signals and variables.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to *value*.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n Args:\n value (int): The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target has an unsupported type for\n integer value assignment.\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n if isinstance(value, BinaryValue):\n value = int(value)\n elif not isinstance(value, int):\n raise TypeError(\n \"Unsupported type for enum value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_long, set_action, value)\n\n @ModifiableObject.value.getter\n def value(self) -> int:\n return self._handle.get_signal_val_long()\n\n\nclass IntegerObject(ModifiableObject):\n \"\"\"Specific object handle for Integer and Enum signals and variables.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to *value*.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n Args:\n value (int): The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target has an unsupported type for\n integer value assignment.\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n if isinstance(value, BinaryValue):\n value = int(value)\n elif not isinstance(value, int):\n raise TypeError(\n \"Unsupported type for integer value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_long, set_action, value)\n\n @ModifiableObject.value.getter\n def value(self) -> int:\n return self._handle.get_signal_val_long()\n\n\nclass StringObject(ModifiableObject):\n \"\"\"Specific object handle for String variables.\"\"\"\n\n def _set_value(self, value, call_sim):\n \"\"\"Set the value of the underlying simulation object to *value*.\n\n This operation will fail unless the handle refers to a modifiable\n object, e.g. net, signal or variable.\n\n Args:\n value (bytes): The value to drive onto the simulator object.\n\n Raises:\n TypeError: If target has an unsupported type for\n string value assignment.\n\n .. versionchanged:: 1.4\n Takes :class:`bytes` instead of :class:`str`.\n Users are now expected to choose an encoding when using these objects.\n As a convenience, when assigning :class:`str` values, ASCII encoding will be used as a safe default.\n\n \"\"\"\n value, set_action = self._check_for_set_action(value)\n\n if isinstance(value, str):\n warnings.warn(\n \"Handles on string objects will soon not accept `str` objects. \"\n \"Please use a bytes object by encoding the string as you see fit. \"\n \"`str.encode('ascii')` is typically sufficient.\", DeprecationWarning, stacklevel=2)\n value = value.encode('ascii') # may throw UnicodeEncodeError\n\n if not isinstance(value, bytes):\n raise TypeError(\n \"Unsupported type for string value assignment: {} ({!r})\"\n .format(type(value), value))\n\n call_sim(self, self._handle.set_signal_val_str, set_action, value)\n\n @ModifiableObject.value.getter\n def value(self) -> bytes:\n return self._handle.get_signal_val_str()\n\n def _emit_str_warning(self):\n warnings.warn(\n \"`str({t})` is deprecated, and in future will return `{t}._path`. \"\n \"To access the `bytes` value of this handle, use `{t}.value`.\"\n .format(t=type(self).__qualname__),\n FutureWarning, stacklevel=3)\n\n def __str__(self):\n self._emit_str_warning()\n return self.value.decode('ascii')\n\n\n_handle2obj = {}\n\n\ndef SimHandle(handle, path=None):\n \"\"\"Factory function to create the correct type of `SimHandle` object.\n\n Args:\n handle (int): The GPI handle to the simulator object.\n path (str): Path to this handle, ``None`` if root.\n\n Returns:\n The `SimHandle` object.\n\n Raises:\n TestError: If no matching object for GPI type could be found.\n \"\"\"\n _type2cls = {\n simulator.MODULE: HierarchyObject,\n simulator.STRUCTURE: HierarchyObject,\n simulator.REG: ModifiableObject,\n simulator.NET: ModifiableObject,\n simulator.NETARRAY: NonHierarchyIndexableObject,\n simulator.REAL: RealObject,\n simulator.INTEGER: IntegerObject,\n simulator.ENUM: EnumObject,\n simulator.STRING: StringObject,\n simulator.GENARRAY: HierarchyArrayObject,\n }\n\n # Enforce singletons since it's possible to retrieve handles avoiding\n # the hierarchy by getting driver/load information\n global _handle2obj\n try:\n return _handle2obj[handle]\n except KeyError:\n pass\n\n t = handle.get_type()\n\n # Special case for constants\n if handle.get_const() and t not in [\n simulator.MODULE,\n simulator.STRUCTURE,\n simulator.NETARRAY,\n simulator.GENARRAY,\n ]:\n obj = ConstantObject(handle, path, t)\n _handle2obj[handle] = obj\n return obj\n\n if t not in _type2cls:\n raise TestError(\"Couldn't find a matching object for GPI type %d (path=%s)\" % (t, path))\n obj = _type2cls[t](handle, path)\n _handle2obj[handle] = obj\n return obj\n", "path": "cocotb/handle.py"}]} |
gh_patches_debug_1605 | rasdani/github-patches | git_diff | hylang__hy-92 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Allow (raise)
That's actually valid in Python to re-raise the last catched exception.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `hy/compiler.py`
Content:
```
1 # -*- encoding: utf-8 -*-
2 #
3 # Copyright (c) 2013 Paul Tagliamonte <[email protected]>
4 # Copyright (c) 2013 Julien Danjou <[email protected]>
5 #
6 # Permission is hereby granted, free of charge, to any person obtaining a
7 # copy of this software and associated documentation files (the "Software"),
8 # to deal in the Software without restriction, including without limitation
9 # the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 # and/or sell copies of the Software, and to permit persons to whom the
11 # Software is furnished to do so, subject to the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be included in
14 # all copies or substantial portions of the Software.
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21 # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 # DEALINGS IN THE SOFTWARE.
23
24 from hy.errors import HyError
25
26 from hy.models.expression import HyExpression
27 from hy.models.integer import HyInteger
28 from hy.models.string import HyString
29 from hy.models.symbol import HySymbol
30 from hy.models.list import HyList
31 from hy.models.dict import HyDict
32
33 from hy.util import flatten_literal_list
34
35 import codecs
36 import ast
37 import sys
38
39
40 class HyCompileError(HyError):
41 pass
42
43
44 _compile_table = {}
45
46
47 def ast_str(foobar):
48 if sys.version_info[0] >= 3:
49 return str(foobar)
50
51 try:
52 return str(foobar)
53 except UnicodeEncodeError:
54 pass
55
56 enc = codecs.getencoder('punycode')
57 foobar, _ = enc(foobar)
58 return "__hy_%s" % (str(foobar).replace("-", "_"))
59
60
61 def builds(_type):
62 def _dec(fn):
63 _compile_table[_type] = fn
64
65 def shim(*args, **kwargs):
66 return fn(*args, **kwargs)
67 return shim
68 return _dec
69
70
71 def _raise_wrong_args_number(expression, error):
72 err = TypeError(error % (expression.pop(0),
73 len(expression)))
74 err.start_line = expression.start_line
75 err.start_column = expression.start_column
76 raise err
77
78
79 def checkargs(exact=None, min=None, max=None):
80 def _dec(fn):
81 def checker(self, expression):
82 if exact is not None and (len(expression) - 1) != exact:
83 _raise_wrong_args_number(expression,
84 "`%%s' needs %d arguments, got %%d" %
85 exact)
86
87 if min is not None and (len(expression) - 1) < min:
88 _raise_wrong_args_number(
89 expression,
90 "`%%s' needs at least %d arguments, got %%d" % (min))
91
92 if max is not None and (len(expression) - 1) > max:
93 _raise_wrong_args_number(
94 expression,
95 "`%%s' needs at most %d arguments, got %%d" % (max))
96
97 return fn(self, expression)
98
99 return checker
100 return _dec
101
102
103 class HyASTCompiler(object):
104
105 def __init__(self):
106 self.returnable = False
107 self.anon_fn_count = 0
108
109 def compile(self, tree):
110 try:
111 for _type in _compile_table:
112 if type(tree) == _type:
113 return _compile_table[_type](self, tree)
114 except Exception as e:
115 err = HyCompileError(str(e))
116 err.exception = e
117 err.start_line = getattr(e, "start_line", None)
118 err.start_column = getattr(e, "start_column", None)
119 raise err
120
121 raise HyCompileError("Unknown type - `%s'" % (str(type(tree))))
122
123 def _mangle_branch(self, tree, start_line, start_column):
124 # If tree is empty, just return a pass statement
125 if tree == []:
126 return [ast.Pass(lineno=start_line,
127 col_offset=start_column)]
128
129 ret = []
130 tree = list(flatten_literal_list(tree))
131 tree.reverse()
132
133 if self.returnable and len(tree) > 0:
134 el = tree[0]
135 if not isinstance(el, ast.stmt):
136 el = tree.pop(0)
137 ret.append(ast.Return(value=el,
138 lineno=el.lineno,
139 col_offset=el.col_offset))
140 if isinstance(el, ast.FunctionDef):
141 ret.append(ast.Return(
142 value=ast.Name(
143 arg=el.name, id=el.name, ctx=ast.Load(),
144 lineno=el.lineno, col_offset=el.col_offset),
145 lineno=el.lineno, col_offset=el.col_offset))
146
147 for el in tree:
148 if isinstance(el, ast.stmt):
149 ret.append(el)
150 continue
151
152 ret.append(ast.Expr(value=el,
153 lineno=el.lineno,
154 col_offset=el.col_offset))
155
156 ret.reverse()
157 return ret
158
159 @builds(list)
160 def compile_raw_list(self, entries):
161 return [self.compile(x) for x in entries]
162
163 @builds("do")
164 @builds("progn")
165 def compile_do_expression(self, expr):
166 return [self.compile(x) for x in expr[1:]]
167
168 @builds("throw")
169 @builds("raise")
170 @checkargs(min=1)
171 def compile_throw_expression(self, expr):
172 expr.pop(0)
173 exc = self.compile(expr.pop(0))
174 return ast.Raise(
175 lineno=expr.start_line,
176 col_offset=expr.start_column,
177 type=exc,
178 exc=exc,
179 inst=None,
180 tback=None)
181
182 @builds("try")
183 def compile_try_expression(self, expr):
184 expr.pop(0) # try
185
186 if sys.version_info[0] >= 3 and sys.version_info[1] >= 3:
187 # Python 3.3 features a rename of TryExcept to Try.
188 Try = ast.Try
189 else:
190 Try = ast.TryExcept
191
192 try:
193 body = expr.pop(0)
194 except IndexError:
195 body = []
196
197 # (try something…)
198 body = self._code_branch(self.compile(body),
199 expr.start_line,
200 expr.start_column)
201
202 if len(expr) == 0:
203 # (try) or (try body)
204 handlers = [ast.ExceptHandler(
205 lineno=expr.start_line,
206 col_offset=expr.start_column,
207 type=None,
208 name=None,
209 body=[ast.Pass(lineno=expr.start_line,
210 col_offset=expr.start_column)])]
211 else:
212 # (try body except except…)
213 handlers = [self.compile(s) for s in expr]
214
215 return Try(
216 lineno=expr.start_line,
217 col_offset=expr.start_column,
218 body=body,
219 handlers=handlers,
220 finalbody=[],
221 orelse=[])
222
223 @builds("catch")
224 @builds("except")
225 def compile_catch_expression(self, expr):
226 catch = expr.pop(0) # catch
227
228 try:
229 exceptions = expr.pop(0)
230 except IndexError:
231 exceptions = HyList()
232 # exceptions catch should be either:
233 # [[list of exceptions]]
234 # or
235 # [variable [list of exceptions]]
236 # or
237 # [variable exception]
238 # or
239 # [exception]
240 # or
241 # []
242 if not isinstance(exceptions, HyList):
243 raise TypeError("`%s' exceptions list is not a list" % catch)
244 if len(exceptions) > 2:
245 raise TypeError("`%s' exceptions list is too long" % catch)
246
247 # [variable [list of exceptions]]
248 # let's pop variable and use it as name
249 if len(exceptions) == 2:
250 name = exceptions.pop(0)
251 if sys.version_info[0] >= 3:
252 # Python3 features a change where the Exception handler
253 # moved the name from a Name() to a pure Python String type.
254 #
255 # We'll just make sure it's a pure "string", and let it work
256 # it's magic.
257 name = ast_str(name)
258 else:
259 # Python2 requires an ast.Name, set to ctx Store.
260 name = self._storeize(self.compile(name))
261 else:
262 name = None
263
264 try:
265 exceptions_list = exceptions.pop(0)
266 except IndexError:
267 exceptions_list = []
268
269 if isinstance(exceptions_list, list):
270 if len(exceptions_list):
271 # [FooBar BarFoo] → catch Foobar and BarFoo exceptions
272 _type = ast.Tuple(elts=[self.compile(x)
273 for x in exceptions_list],
274 lineno=expr.start_line,
275 col_offset=expr.start_column,
276 ctx=ast.Load())
277 else:
278 # [] → all exceptions catched
279 _type = None
280 elif isinstance(exceptions_list, HySymbol):
281 _type = self.compile(exceptions_list)
282 else:
283 raise TypeError("`%s' needs a valid exception list" % catch)
284
285 body = self._code_branch([self.compile(x) for x in expr],
286 expr.start_line,
287 expr.start_column)
288
289 return ast.ExceptHandler(
290 lineno=expr.start_line,
291 col_offset=expr.start_column,
292 type=_type,
293 name=name,
294 body=body)
295
296 def _code_branch(self, branch, start_line, start_column):
297 return self._mangle_branch((branch
298 if isinstance(branch, list)
299 else [branch]),
300 start_line,
301 start_column)
302
303 @builds("if")
304 @checkargs(min=2, max=3)
305 def compile_if_expression(self, expr):
306 expr.pop(0) # if
307 test = self.compile(expr.pop(0))
308 body = self._code_branch(self.compile(expr.pop(0)),
309 expr.start_line,
310 expr.start_column)
311
312 if len(expr) == 1:
313 orel = self._code_branch(self.compile(expr.pop(0)),
314 expr.start_line,
315 expr.start_column)
316 else:
317 orel = []
318
319 return ast.If(test=test,
320 body=body,
321 orelse=orel,
322 lineno=expr.start_line,
323 col_offset=expr.start_column)
324
325 @builds("print")
326 def compile_print_expression(self, expr):
327 call = expr.pop(0) # print
328 if sys.version_info[0] >= 3:
329 call = self.compile(call)
330 # AST changed with Python 3, we now just call it.
331 return ast.Call(
332 keywords=[],
333 func=call,
334 args=[self.compile(x) for x in expr],
335 lineno=expr.start_line,
336 col_offset=expr.start_column)
337
338 return ast.Print(
339 lineno=expr.start_line,
340 col_offset=expr.start_column,
341 dest=None,
342 values=[self.compile(x) for x in expr],
343 nl=True)
344
345 @builds("assert")
346 @checkargs(1)
347 def compile_assert_expression(self, expr):
348 expr.pop(0) # assert
349 e = expr.pop(0)
350 return ast.Assert(test=self.compile(e),
351 msg=None,
352 lineno=e.start_line,
353 col_offset=e.start_column)
354
355 @builds("lambda")
356 @checkargs(min=2)
357 def compile_lambda_expression(self, expr):
358 expr.pop(0)
359 sig = expr.pop(0)
360 body = expr.pop(0)
361 # assert expr is empty
362 return ast.Lambda(
363 lineno=expr.start_line,
364 col_offset=expr.start_column,
365 args=ast.arguments(args=[
366 ast.Name(arg=ast_str(x), id=ast_str(x),
367 ctx=ast.Param(),
368 lineno=x.start_line,
369 col_offset=x.start_column)
370 for x in sig],
371 vararg=None,
372 kwarg=None,
373 defaults=[],
374 kwonlyargs=[],
375 kw_defaults=[]),
376 body=self.compile(body))
377
378 @builds("pass")
379 @checkargs(0)
380 def compile_pass_expression(self, expr):
381 return ast.Pass(lineno=expr.start_line, col_offset=expr.start_column)
382
383 @builds("yield")
384 @checkargs(1)
385 def compile_yield_expression(self, expr):
386 expr.pop(0)
387 return ast.Yield(
388 value=self.compile(expr.pop(0)),
389 lineno=expr.start_line,
390 col_offset=expr.start_column)
391
392 @builds("import")
393 def compile_import_expression(self, expr):
394 expr.pop(0) # index
395 return ast.Import(
396 lineno=expr.start_line,
397 col_offset=expr.start_column,
398 names=[ast.alias(name=ast_str(x), asname=None) for x in expr])
399
400 @builds("import_as")
401 def compile_import_as_expression(self, expr):
402 expr.pop(0) # index
403 modlist = [expr[i:i + 2] for i in range(0, len(expr), 2)]
404 return ast.Import(
405 lineno=expr.start_line,
406 col_offset=expr.start_column,
407 module=ast_str(expr.pop(0)),
408 names=[ast.alias(name=ast_str(x[0]),
409 asname=ast_str(x[1])) for x in modlist])
410
411 @builds("import_from")
412 @checkargs(min=1)
413 def compile_import_from_expression(self, expr):
414 expr.pop(0) # index
415 return ast.ImportFrom(
416 lineno=expr.start_line,
417 col_offset=expr.start_column,
418 module=ast_str(expr.pop(0)),
419 names=[ast.alias(name=ast_str(x), asname=None) for x in expr],
420 level=0)
421
422 @builds("get")
423 @checkargs(2)
424 def compile_index_expression(self, expr):
425 expr.pop(0) # index
426 val = self.compile(expr.pop(0)) # target
427 sli = self.compile(expr.pop(0)) # slice
428
429 return ast.Subscript(
430 lineno=expr.start_line,
431 col_offset=expr.start_column,
432 value=val,
433 slice=ast.Index(value=sli),
434 ctx=ast.Load())
435
436 @builds("slice")
437 @checkargs(min=1, max=3)
438 def compile_slice_expression(self, expr):
439 expr.pop(0) # index
440 val = self.compile(expr.pop(0)) # target
441
442 low = None
443 if expr != []:
444 low = self.compile(expr.pop(0))
445
446 high = None
447 if expr != []:
448 high = self.compile(expr.pop(0))
449
450 return ast.Subscript(
451 lineno=expr.start_line,
452 col_offset=expr.start_column,
453 value=val,
454 slice=ast.Slice(lower=low,
455 upper=high,
456 step=None),
457 ctx=ast.Load())
458
459 @builds("assoc")
460 @checkargs(3)
461 def compile_assoc_expression(self, expr):
462 expr.pop(0) # assoc
463 # (assoc foo bar baz) => foo[bar] = baz
464 target = expr.pop(0)
465 key = expr.pop(0)
466 val = expr.pop(0)
467
468 return ast.Assign(
469 lineno=expr.start_line,
470 col_offset=expr.start_column,
471 targets=[
472 ast.Subscript(
473 lineno=expr.start_line,
474 col_offset=expr.start_column,
475 value=self.compile(target),
476 slice=ast.Index(value=self.compile(key)),
477 ctx=ast.Store())],
478 value=self.compile(val))
479
480 @builds("decorate_with")
481 @checkargs(min=1)
482 def compile_decorate_expression(self, expr):
483 expr.pop(0) # decorate-with
484 fn = self.compile(expr.pop(-1))
485 if type(fn) != ast.FunctionDef:
486 raise TypeError("Decorated a non-function")
487 fn.decorator_list = [self.compile(x) for x in expr]
488 return fn
489
490 @builds("with")
491 @checkargs(min=2)
492 def compile_with_expression(self, expr):
493 expr.pop(0) # with
494
495 args = expr.pop(0)
496 if len(args) > 2 or len(args) < 1:
497 raise TypeError("with needs [arg (expr)] or [(expr)]")
498
499 args.reverse()
500 ctx = self.compile(args.pop(0))
501
502 thing = None
503 if args != []:
504 thing = self._storeize(self.compile(args.pop(0)))
505
506 ret = ast.With(context_expr=ctx,
507 lineno=expr.start_line,
508 col_offset=expr.start_column,
509 optional_vars=thing,
510 body=self._code_branch(
511 [self.compile(x) for x in expr],
512 expr.start_line,
513 expr.start_column))
514
515 if sys.version_info[0] >= 3 and sys.version_info[1] >= 3:
516 ret.items = [ast.withitem(context_expr=ctx, optional_vars=thing)]
517
518 return ret
519
520 @builds(",")
521 def compile_tuple(self, expr):
522 expr.pop(0)
523 return ast.Tuple(elts=[self.compile(x) for x in expr],
524 lineno=expr.start_line,
525 col_offset=expr.start_column,
526 ctx=ast.Load())
527
528 @builds("list_comp")
529 @checkargs(min=2, max=3)
530 def compile_list_comprehension(self, expr):
531 # (list-comp expr (target iter) cond?)
532 expr.pop(0)
533 expression = expr.pop(0)
534 tar_it = iter(expr.pop(0))
535 targets = zip(tar_it, tar_it)
536
537 cond = self.compile(expr.pop(0)) if expr != [] else None
538
539 ret = ast.ListComp(
540 lineno=expr.start_line,
541 col_offset=expr.start_column,
542 elt=self.compile(expression),
543 generators=[])
544
545 for target, iterable in targets:
546 ret.generators.append(ast.comprehension(
547 target=self._storeize(self.compile(target)),
548 iter=self.compile(iterable),
549 ifs=[]))
550
551 if cond:
552 ret.generators[-1].ifs.append(cond)
553
554 return ret
555
556 def _storeize(self, name):
557 if isinstance(name, ast.Tuple):
558 for x in name.elts:
559 x.ctx = ast.Store()
560 name.ctx = ast.Store()
561 return name
562
563 @builds("kwapply")
564 @checkargs(2)
565 def compile_kwapply_expression(self, expr):
566 expr.pop(0) # kwapply
567 call = self.compile(expr.pop(0))
568 kwargs = expr.pop(0)
569
570 if type(call) != ast.Call:
571 raise TypeError("kwapplying a non-call")
572
573 call.keywords = [ast.keyword(arg=ast_str(x),
574 value=self.compile(kwargs[x])) for x in kwargs]
575
576 return call
577
578 @builds("not")
579 @builds("~")
580 @checkargs(1)
581 def compile_unary_operator(self, expression):
582 ops = {"not": ast.Not,
583 "~": ast.Invert}
584 operator = expression.pop(0)
585 operand = expression.pop(0)
586 return ast.UnaryOp(op=ops[operator](),
587 operand=self.compile(operand),
588 lineno=operator.start_line,
589 col_offset=operator.start_column)
590
591 @builds("and")
592 @builds("or")
593 @checkargs(min=2)
594 def compile_logical_or_and_and_operator(self, expression):
595 ops = {"and": ast.And,
596 "or": ast.Or}
597 operator = expression.pop(0)
598 values = []
599 for child in expression:
600 values.append(self.compile(child))
601 return ast.BoolOp(op=ops[operator](),
602 lineno=operator.start_line,
603 col_offset=operator.start_column,
604 values=values)
605
606 @builds("=")
607 @builds("!=")
608 @builds("<")
609 @builds("<=")
610 @builds(">")
611 @builds(">=")
612 @builds("is")
613 @builds("in")
614 @builds("is_not")
615 @builds("not_in")
616 @checkargs(min=2)
617 def compile_compare_op_expression(self, expression):
618 ops = {"=": ast.Eq, "!=": ast.NotEq,
619 "<": ast.Lt, "<=": ast.LtE,
620 ">": ast.Gt, ">=": ast.GtE,
621 "is": ast.Is, "is_not": ast.IsNot,
622 "in": ast.In, "not_in": ast.NotIn}
623
624 inv = expression.pop(0)
625 op = ops[inv]
626 ops = [op() for x in range(1, len(expression))]
627 e = expression.pop(0)
628
629 return ast.Compare(left=self.compile(e),
630 ops=ops,
631 comparators=[self.compile(x) for x in expression],
632 lineno=e.start_line,
633 col_offset=e.start_column)
634
635 @builds("+")
636 @builds("%")
637 @builds("-")
638 @builds("/")
639 @builds("*")
640 @checkargs(min=2)
641 def compile_maths_expression(self, expression):
642 # operator = Mod | Pow | LShift | RShift | BitOr |
643 # BitXor | BitAnd | FloorDiv
644 # (to implement list) XXX
645
646 ops = {"+": ast.Add,
647 "/": ast.Div,
648 "*": ast.Mult,
649 "-": ast.Sub,
650 "%": ast.Mod}
651
652 inv = expression.pop(0)
653 op = ops[inv]
654
655 left = self.compile(expression.pop(0))
656 calc = None
657 for child in expression:
658 calc = ast.BinOp(left=left,
659 op=op(),
660 right=self.compile(child),
661 lineno=child.start_line,
662 col_offset=child.start_column)
663 left = calc
664 return calc
665
666 def compile_dotted_expression(self, expr):
667 ofn = expr.pop(0) # .join
668
669 fn = HySymbol(ofn[1:])
670 fn.replace(ofn)
671
672 obj = expr.pop(0) # [1 2 3 4]
673
674 return ast.Call(
675 func=ast.Attribute(
676 lineno=expr.start_line,
677 col_offset=expr.start_column,
678 value=self.compile(obj),
679 attr=ast_str(fn),
680 ctx=ast.Load()),
681 args=[self.compile(x) for x in expr],
682 keywords=[],
683 lineno=expr.start_line,
684 col_offset=expr.start_column,
685 starargs=None,
686 kwargs=None)
687
688 @builds(HyExpression)
689 def compile_expression(self, expression):
690 fn = expression[0]
691 if isinstance(fn, HyString):
692 if fn in _compile_table:
693 return _compile_table[fn](self, expression)
694
695 if expression[0].startswith("."):
696 return self.compile_dotted_expression(expression)
697
698 return ast.Call(func=self.compile(fn),
699 args=[self.compile(x) for x in expression[1:]],
700 keywords=[],
701 starargs=None,
702 kwargs=None,
703 lineno=expression.start_line,
704 col_offset=expression.start_column)
705
706 @builds("def")
707 @builds("setf")
708 @builds("setv")
709 @checkargs(2)
710 def compile_def_expression(self, expression):
711 expression.pop(0) # "def"
712 name = expression.pop(0)
713
714 what = self.compile(expression.pop(0))
715
716 if type(what) == ast.FunctionDef:
717 # We special case a FunctionDef, since we can define by setting
718 # FunctionDef's .name attribute, rather then foo == anon_fn. This
719 # helps keep things clean.
720 what.name = ast_str(name)
721 return what
722
723 name = self._storeize(self.compile(name))
724
725 return ast.Assign(
726 lineno=expression.start_line,
727 col_offset=expression.start_column,
728 targets=[name], value=what)
729
730 @builds("foreach")
731 @checkargs(min=1)
732 def compile_for_expression(self, expression):
733 ret_status = self.returnable
734 self.returnable = False
735
736 expression.pop(0) # for
737 name, iterable = expression.pop(0)
738 target = self._storeize(self.compile_symbol(name))
739
740 ret = ast.For(lineno=expression.start_line,
741 col_offset=expression.start_column,
742 target=target,
743 iter=self.compile(iterable),
744 body=self._code_branch(
745 [self.compile(x) for x in expression],
746 expression.start_line,
747 expression.start_column),
748 orelse=[])
749
750 self.returnable = ret_status
751 return ret
752
753 @builds("while")
754 @checkargs(min=2)
755 def compile_while_expression(self, expr):
756 expr.pop(0) # "while"
757 test = self.compile(expr.pop(0))
758
759 return ast.While(test=test,
760 body=self._code_branch(
761 [self.compile(x) for x in expr],
762 expr.start_line,
763 expr.start_column),
764 orelse=[],
765 lineno=expr.start_line,
766 col_offset=expr.start_column)
767
768 @builds(HyList)
769 def compile_list(self, expr):
770 return ast.List(
771 elts=[self.compile(x) for x in expr],
772 ctx=ast.Load(),
773 lineno=expr.start_line,
774 col_offset=expr.start_column)
775
776 @builds("fn")
777 @checkargs(min=2)
778 def compile_fn_expression(self, expression):
779 expression.pop(0) # fn
780
781 ret_status = self.returnable
782
783 self.anon_fn_count += 1
784 name = "_hy_anon_fn_%d" % (self.anon_fn_count)
785 sig = expression.pop(0)
786
787 body = []
788 if expression != []:
789 self.returnable = True
790 tailop = self.compile(expression.pop(-1))
791 self.returnable = False
792 for el in expression:
793 body.append(self.compile(el))
794 body.append(tailop)
795
796 self.returnable = True
797 body = self._code_branch(body,
798 expression.start_line,
799 expression.start_column)
800
801 ret = ast.FunctionDef(
802 name=name,
803 lineno=expression.start_line,
804 col_offset=expression.start_column,
805 args=ast.arguments(
806 args=[
807 ast.Name(
808 arg=ast_str(x), id=ast_str(x),
809 ctx=ast.Param(),
810 lineno=x.start_line,
811 col_offset=x.start_column)
812 for x in sig],
813 vararg=None,
814 kwarg=None,
815 kwonlyargs=[],
816 kw_defaults=[],
817 defaults=[]),
818 body=body,
819 decorator_list=[])
820
821 self.returnable = ret_status
822 return ret
823
824 @builds(HyInteger)
825 def compile_number(self, number):
826 return ast.Num(n=int(number), # See HyInteger above.
827 lineno=number.start_line,
828 col_offset=number.start_column)
829
830 @builds(HySymbol)
831 def compile_symbol(self, symbol):
832 if "." in symbol:
833 glob, local = symbol.rsplit(".", 1)
834 glob = HySymbol(glob)
835 glob.replace(symbol)
836
837 return ast.Attribute(
838 lineno=symbol.start_line,
839 col_offset=symbol.start_column,
840 value=self.compile_symbol(glob),
841 attr=ast_str(local),
842 ctx=ast.Load()
843 )
844
845 return ast.Name(id=ast_str(symbol),
846 arg=ast_str(symbol),
847 ctx=ast.Load(),
848 lineno=symbol.start_line,
849 col_offset=symbol.start_column)
850
851 @builds(HyString)
852 def compile_string(self, string):
853 return ast.Str(s=ast_str(string), lineno=string.start_line,
854 col_offset=string.start_column)
855
856 @builds(HyDict)
857 def compile_dict(self, m):
858 keys = []
859 vals = []
860 for entry in m:
861 keys.append(self.compile(entry))
862 vals.append(self.compile(m[entry]))
863
864 return ast.Dict(
865 lineno=m.start_line,
866 col_offset=m.start_column,
867 keys=keys,
868 values=vals)
869
870
871 def hy_compile(tree, root=None):
872 " Compile a HyObject tree into a Python AST tree. "
873 compiler = HyASTCompiler()
874 tlo = root
875 if root is None:
876 tlo = ast.Module
877 ret = tlo(body=compiler._mangle_branch(compiler.compile(tree), 0, 0))
878 return ret
879
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/hy/compiler.py b/hy/compiler.py
--- a/hy/compiler.py
+++ b/hy/compiler.py
@@ -167,10 +167,10 @@
@builds("throw")
@builds("raise")
- @checkargs(min=1)
+ @checkargs(max=1)
def compile_throw_expression(self, expr):
expr.pop(0)
- exc = self.compile(expr.pop(0))
+ exc = self.compile(expr.pop(0)) if expr else None
return ast.Raise(
lineno=expr.start_line,
col_offset=expr.start_column,
| {"golden_diff": "diff --git a/hy/compiler.py b/hy/compiler.py\n--- a/hy/compiler.py\n+++ b/hy/compiler.py\n@@ -167,10 +167,10 @@\n \n @builds(\"throw\")\n @builds(\"raise\")\n- @checkargs(min=1)\n+ @checkargs(max=1)\n def compile_throw_expression(self, expr):\n expr.pop(0)\n- exc = self.compile(expr.pop(0))\n+ exc = self.compile(expr.pop(0)) if expr else None\n return ast.Raise(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n", "issue": "Allow (raise)\nThat's actually valid in Python to re-raise the last catched exception.\n\n", "before_files": [{"content": "# -*- encoding: utf-8 -*-\n#\n# Copyright (c) 2013 Paul Tagliamonte <[email protected]>\n# Copyright (c) 2013 Julien Danjou <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a\n# copy of this software and associated documentation files (the \"Software\"),\n# to deal in the Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish, distribute, sublicense,\n# and/or sell copies of the Software, and to permit persons to whom the\n# Software is furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n# DEALINGS IN THE SOFTWARE.\n\nfrom hy.errors import HyError\n\nfrom hy.models.expression import HyExpression\nfrom hy.models.integer import HyInteger\nfrom hy.models.string import HyString\nfrom hy.models.symbol import HySymbol\nfrom hy.models.list import HyList\nfrom hy.models.dict import HyDict\n\nfrom hy.util import flatten_literal_list\n\nimport codecs\nimport ast\nimport sys\n\n\nclass HyCompileError(HyError):\n pass\n\n\n_compile_table = {}\n\n\ndef ast_str(foobar):\n if sys.version_info[0] >= 3:\n return str(foobar)\n\n try:\n return str(foobar)\n except UnicodeEncodeError:\n pass\n\n enc = codecs.getencoder('punycode')\n foobar, _ = enc(foobar)\n return \"__hy_%s\" % (str(foobar).replace(\"-\", \"_\"))\n\n\ndef builds(_type):\n def _dec(fn):\n _compile_table[_type] = fn\n\n def shim(*args, **kwargs):\n return fn(*args, **kwargs)\n return shim\n return _dec\n\n\ndef _raise_wrong_args_number(expression, error):\n err = TypeError(error % (expression.pop(0),\n len(expression)))\n err.start_line = expression.start_line\n err.start_column = expression.start_column\n raise err\n\n\ndef checkargs(exact=None, min=None, max=None):\n def _dec(fn):\n def checker(self, expression):\n if exact is not None and (len(expression) - 1) != exact:\n _raise_wrong_args_number(expression,\n \"`%%s' needs %d arguments, got %%d\" %\n exact)\n\n if min is not None and (len(expression) - 1) < min:\n _raise_wrong_args_number(\n expression,\n \"`%%s' needs at least %d arguments, got %%d\" % (min))\n\n if max is not None and (len(expression) - 1) > max:\n _raise_wrong_args_number(\n expression,\n \"`%%s' needs at most %d arguments, got %%d\" % (max))\n\n return fn(self, expression)\n\n return checker\n return _dec\n\n\nclass HyASTCompiler(object):\n\n def __init__(self):\n self.returnable = False\n self.anon_fn_count = 0\n\n def compile(self, tree):\n try:\n for _type in _compile_table:\n if type(tree) == _type:\n return _compile_table[_type](self, tree)\n except Exception as e:\n err = HyCompileError(str(e))\n err.exception = e\n err.start_line = getattr(e, \"start_line\", None)\n err.start_column = getattr(e, \"start_column\", None)\n raise err\n\n raise HyCompileError(\"Unknown type - `%s'\" % (str(type(tree))))\n\n def _mangle_branch(self, tree, start_line, start_column):\n # If tree is empty, just return a pass statement\n if tree == []:\n return [ast.Pass(lineno=start_line,\n col_offset=start_column)]\n\n ret = []\n tree = list(flatten_literal_list(tree))\n tree.reverse()\n\n if self.returnable and len(tree) > 0:\n el = tree[0]\n if not isinstance(el, ast.stmt):\n el = tree.pop(0)\n ret.append(ast.Return(value=el,\n lineno=el.lineno,\n col_offset=el.col_offset))\n if isinstance(el, ast.FunctionDef):\n ret.append(ast.Return(\n value=ast.Name(\n arg=el.name, id=el.name, ctx=ast.Load(),\n lineno=el.lineno, col_offset=el.col_offset),\n lineno=el.lineno, col_offset=el.col_offset))\n\n for el in tree:\n if isinstance(el, ast.stmt):\n ret.append(el)\n continue\n\n ret.append(ast.Expr(value=el,\n lineno=el.lineno,\n col_offset=el.col_offset))\n\n ret.reverse()\n return ret\n\n @builds(list)\n def compile_raw_list(self, entries):\n return [self.compile(x) for x in entries]\n\n @builds(\"do\")\n @builds(\"progn\")\n def compile_do_expression(self, expr):\n return [self.compile(x) for x in expr[1:]]\n\n @builds(\"throw\")\n @builds(\"raise\")\n @checkargs(min=1)\n def compile_throw_expression(self, expr):\n expr.pop(0)\n exc = self.compile(expr.pop(0))\n return ast.Raise(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n type=exc,\n exc=exc,\n inst=None,\n tback=None)\n\n @builds(\"try\")\n def compile_try_expression(self, expr):\n expr.pop(0) # try\n\n if sys.version_info[0] >= 3 and sys.version_info[1] >= 3:\n # Python 3.3 features a rename of TryExcept to Try.\n Try = ast.Try\n else:\n Try = ast.TryExcept\n\n try:\n body = expr.pop(0)\n except IndexError:\n body = []\n\n # (try something\u2026)\n body = self._code_branch(self.compile(body),\n expr.start_line,\n expr.start_column)\n\n if len(expr) == 0:\n # (try) or (try body)\n handlers = [ast.ExceptHandler(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n type=None,\n name=None,\n body=[ast.Pass(lineno=expr.start_line,\n col_offset=expr.start_column)])]\n else:\n # (try body except except\u2026)\n handlers = [self.compile(s) for s in expr]\n\n return Try(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n body=body,\n handlers=handlers,\n finalbody=[],\n orelse=[])\n\n @builds(\"catch\")\n @builds(\"except\")\n def compile_catch_expression(self, expr):\n catch = expr.pop(0) # catch\n\n try:\n exceptions = expr.pop(0)\n except IndexError:\n exceptions = HyList()\n # exceptions catch should be either:\n # [[list of exceptions]]\n # or\n # [variable [list of exceptions]]\n # or\n # [variable exception]\n # or\n # [exception]\n # or\n # []\n if not isinstance(exceptions, HyList):\n raise TypeError(\"`%s' exceptions list is not a list\" % catch)\n if len(exceptions) > 2:\n raise TypeError(\"`%s' exceptions list is too long\" % catch)\n\n # [variable [list of exceptions]]\n # let's pop variable and use it as name\n if len(exceptions) == 2:\n name = exceptions.pop(0)\n if sys.version_info[0] >= 3:\n # Python3 features a change where the Exception handler\n # moved the name from a Name() to a pure Python String type.\n #\n # We'll just make sure it's a pure \"string\", and let it work\n # it's magic.\n name = ast_str(name)\n else:\n # Python2 requires an ast.Name, set to ctx Store.\n name = self._storeize(self.compile(name))\n else:\n name = None\n\n try:\n exceptions_list = exceptions.pop(0)\n except IndexError:\n exceptions_list = []\n\n if isinstance(exceptions_list, list):\n if len(exceptions_list):\n # [FooBar BarFoo] \u2192 catch Foobar and BarFoo exceptions\n _type = ast.Tuple(elts=[self.compile(x)\n for x in exceptions_list],\n lineno=expr.start_line,\n col_offset=expr.start_column,\n ctx=ast.Load())\n else:\n # [] \u2192 all exceptions catched\n _type = None\n elif isinstance(exceptions_list, HySymbol):\n _type = self.compile(exceptions_list)\n else:\n raise TypeError(\"`%s' needs a valid exception list\" % catch)\n\n body = self._code_branch([self.compile(x) for x in expr],\n expr.start_line,\n expr.start_column)\n\n return ast.ExceptHandler(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n type=_type,\n name=name,\n body=body)\n\n def _code_branch(self, branch, start_line, start_column):\n return self._mangle_branch((branch\n if isinstance(branch, list)\n else [branch]),\n start_line,\n start_column)\n\n @builds(\"if\")\n @checkargs(min=2, max=3)\n def compile_if_expression(self, expr):\n expr.pop(0) # if\n test = self.compile(expr.pop(0))\n body = self._code_branch(self.compile(expr.pop(0)),\n expr.start_line,\n expr.start_column)\n\n if len(expr) == 1:\n orel = self._code_branch(self.compile(expr.pop(0)),\n expr.start_line,\n expr.start_column)\n else:\n orel = []\n\n return ast.If(test=test,\n body=body,\n orelse=orel,\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n @builds(\"print\")\n def compile_print_expression(self, expr):\n call = expr.pop(0) # print\n if sys.version_info[0] >= 3:\n call = self.compile(call)\n # AST changed with Python 3, we now just call it.\n return ast.Call(\n keywords=[],\n func=call,\n args=[self.compile(x) for x in expr],\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n return ast.Print(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n dest=None,\n values=[self.compile(x) for x in expr],\n nl=True)\n\n @builds(\"assert\")\n @checkargs(1)\n def compile_assert_expression(self, expr):\n expr.pop(0) # assert\n e = expr.pop(0)\n return ast.Assert(test=self.compile(e),\n msg=None,\n lineno=e.start_line,\n col_offset=e.start_column)\n\n @builds(\"lambda\")\n @checkargs(min=2)\n def compile_lambda_expression(self, expr):\n expr.pop(0)\n sig = expr.pop(0)\n body = expr.pop(0)\n # assert expr is empty\n return ast.Lambda(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n args=ast.arguments(args=[\n ast.Name(arg=ast_str(x), id=ast_str(x),\n ctx=ast.Param(),\n lineno=x.start_line,\n col_offset=x.start_column)\n for x in sig],\n vararg=None,\n kwarg=None,\n defaults=[],\n kwonlyargs=[],\n kw_defaults=[]),\n body=self.compile(body))\n\n @builds(\"pass\")\n @checkargs(0)\n def compile_pass_expression(self, expr):\n return ast.Pass(lineno=expr.start_line, col_offset=expr.start_column)\n\n @builds(\"yield\")\n @checkargs(1)\n def compile_yield_expression(self, expr):\n expr.pop(0)\n return ast.Yield(\n value=self.compile(expr.pop(0)),\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n @builds(\"import\")\n def compile_import_expression(self, expr):\n expr.pop(0) # index\n return ast.Import(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n names=[ast.alias(name=ast_str(x), asname=None) for x in expr])\n\n @builds(\"import_as\")\n def compile_import_as_expression(self, expr):\n expr.pop(0) # index\n modlist = [expr[i:i + 2] for i in range(0, len(expr), 2)]\n return ast.Import(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n module=ast_str(expr.pop(0)),\n names=[ast.alias(name=ast_str(x[0]),\n asname=ast_str(x[1])) for x in modlist])\n\n @builds(\"import_from\")\n @checkargs(min=1)\n def compile_import_from_expression(self, expr):\n expr.pop(0) # index\n return ast.ImportFrom(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n module=ast_str(expr.pop(0)),\n names=[ast.alias(name=ast_str(x), asname=None) for x in expr],\n level=0)\n\n @builds(\"get\")\n @checkargs(2)\n def compile_index_expression(self, expr):\n expr.pop(0) # index\n val = self.compile(expr.pop(0)) # target\n sli = self.compile(expr.pop(0)) # slice\n\n return ast.Subscript(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n value=val,\n slice=ast.Index(value=sli),\n ctx=ast.Load())\n\n @builds(\"slice\")\n @checkargs(min=1, max=3)\n def compile_slice_expression(self, expr):\n expr.pop(0) # index\n val = self.compile(expr.pop(0)) # target\n\n low = None\n if expr != []:\n low = self.compile(expr.pop(0))\n\n high = None\n if expr != []:\n high = self.compile(expr.pop(0))\n\n return ast.Subscript(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n value=val,\n slice=ast.Slice(lower=low,\n upper=high,\n step=None),\n ctx=ast.Load())\n\n @builds(\"assoc\")\n @checkargs(3)\n def compile_assoc_expression(self, expr):\n expr.pop(0) # assoc\n # (assoc foo bar baz) => foo[bar] = baz\n target = expr.pop(0)\n key = expr.pop(0)\n val = expr.pop(0)\n\n return ast.Assign(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n targets=[\n ast.Subscript(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n value=self.compile(target),\n slice=ast.Index(value=self.compile(key)),\n ctx=ast.Store())],\n value=self.compile(val))\n\n @builds(\"decorate_with\")\n @checkargs(min=1)\n def compile_decorate_expression(self, expr):\n expr.pop(0) # decorate-with\n fn = self.compile(expr.pop(-1))\n if type(fn) != ast.FunctionDef:\n raise TypeError(\"Decorated a non-function\")\n fn.decorator_list = [self.compile(x) for x in expr]\n return fn\n\n @builds(\"with\")\n @checkargs(min=2)\n def compile_with_expression(self, expr):\n expr.pop(0) # with\n\n args = expr.pop(0)\n if len(args) > 2 or len(args) < 1:\n raise TypeError(\"with needs [arg (expr)] or [(expr)]\")\n\n args.reverse()\n ctx = self.compile(args.pop(0))\n\n thing = None\n if args != []:\n thing = self._storeize(self.compile(args.pop(0)))\n\n ret = ast.With(context_expr=ctx,\n lineno=expr.start_line,\n col_offset=expr.start_column,\n optional_vars=thing,\n body=self._code_branch(\n [self.compile(x) for x in expr],\n expr.start_line,\n expr.start_column))\n\n if sys.version_info[0] >= 3 and sys.version_info[1] >= 3:\n ret.items = [ast.withitem(context_expr=ctx, optional_vars=thing)]\n\n return ret\n\n @builds(\",\")\n def compile_tuple(self, expr):\n expr.pop(0)\n return ast.Tuple(elts=[self.compile(x) for x in expr],\n lineno=expr.start_line,\n col_offset=expr.start_column,\n ctx=ast.Load())\n\n @builds(\"list_comp\")\n @checkargs(min=2, max=3)\n def compile_list_comprehension(self, expr):\n # (list-comp expr (target iter) cond?)\n expr.pop(0)\n expression = expr.pop(0)\n tar_it = iter(expr.pop(0))\n targets = zip(tar_it, tar_it)\n\n cond = self.compile(expr.pop(0)) if expr != [] else None\n\n ret = ast.ListComp(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n elt=self.compile(expression),\n generators=[])\n\n for target, iterable in targets:\n ret.generators.append(ast.comprehension(\n target=self._storeize(self.compile(target)),\n iter=self.compile(iterable),\n ifs=[]))\n\n if cond:\n ret.generators[-1].ifs.append(cond)\n\n return ret\n\n def _storeize(self, name):\n if isinstance(name, ast.Tuple):\n for x in name.elts:\n x.ctx = ast.Store()\n name.ctx = ast.Store()\n return name\n\n @builds(\"kwapply\")\n @checkargs(2)\n def compile_kwapply_expression(self, expr):\n expr.pop(0) # kwapply\n call = self.compile(expr.pop(0))\n kwargs = expr.pop(0)\n\n if type(call) != ast.Call:\n raise TypeError(\"kwapplying a non-call\")\n\n call.keywords = [ast.keyword(arg=ast_str(x),\n value=self.compile(kwargs[x])) for x in kwargs]\n\n return call\n\n @builds(\"not\")\n @builds(\"~\")\n @checkargs(1)\n def compile_unary_operator(self, expression):\n ops = {\"not\": ast.Not,\n \"~\": ast.Invert}\n operator = expression.pop(0)\n operand = expression.pop(0)\n return ast.UnaryOp(op=ops[operator](),\n operand=self.compile(operand),\n lineno=operator.start_line,\n col_offset=operator.start_column)\n\n @builds(\"and\")\n @builds(\"or\")\n @checkargs(min=2)\n def compile_logical_or_and_and_operator(self, expression):\n ops = {\"and\": ast.And,\n \"or\": ast.Or}\n operator = expression.pop(0)\n values = []\n for child in expression:\n values.append(self.compile(child))\n return ast.BoolOp(op=ops[operator](),\n lineno=operator.start_line,\n col_offset=operator.start_column,\n values=values)\n\n @builds(\"=\")\n @builds(\"!=\")\n @builds(\"<\")\n @builds(\"<=\")\n @builds(\">\")\n @builds(\">=\")\n @builds(\"is\")\n @builds(\"in\")\n @builds(\"is_not\")\n @builds(\"not_in\")\n @checkargs(min=2)\n def compile_compare_op_expression(self, expression):\n ops = {\"=\": ast.Eq, \"!=\": ast.NotEq,\n \"<\": ast.Lt, \"<=\": ast.LtE,\n \">\": ast.Gt, \">=\": ast.GtE,\n \"is\": ast.Is, \"is_not\": ast.IsNot,\n \"in\": ast.In, \"not_in\": ast.NotIn}\n\n inv = expression.pop(0)\n op = ops[inv]\n ops = [op() for x in range(1, len(expression))]\n e = expression.pop(0)\n\n return ast.Compare(left=self.compile(e),\n ops=ops,\n comparators=[self.compile(x) for x in expression],\n lineno=e.start_line,\n col_offset=e.start_column)\n\n @builds(\"+\")\n @builds(\"%\")\n @builds(\"-\")\n @builds(\"/\")\n @builds(\"*\")\n @checkargs(min=2)\n def compile_maths_expression(self, expression):\n # operator = Mod | Pow | LShift | RShift | BitOr |\n # BitXor | BitAnd | FloorDiv\n # (to implement list) XXX\n\n ops = {\"+\": ast.Add,\n \"/\": ast.Div,\n \"*\": ast.Mult,\n \"-\": ast.Sub,\n \"%\": ast.Mod}\n\n inv = expression.pop(0)\n op = ops[inv]\n\n left = self.compile(expression.pop(0))\n calc = None\n for child in expression:\n calc = ast.BinOp(left=left,\n op=op(),\n right=self.compile(child),\n lineno=child.start_line,\n col_offset=child.start_column)\n left = calc\n return calc\n\n def compile_dotted_expression(self, expr):\n ofn = expr.pop(0) # .join\n\n fn = HySymbol(ofn[1:])\n fn.replace(ofn)\n\n obj = expr.pop(0) # [1 2 3 4]\n\n return ast.Call(\n func=ast.Attribute(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n value=self.compile(obj),\n attr=ast_str(fn),\n ctx=ast.Load()),\n args=[self.compile(x) for x in expr],\n keywords=[],\n lineno=expr.start_line,\n col_offset=expr.start_column,\n starargs=None,\n kwargs=None)\n\n @builds(HyExpression)\n def compile_expression(self, expression):\n fn = expression[0]\n if isinstance(fn, HyString):\n if fn in _compile_table:\n return _compile_table[fn](self, expression)\n\n if expression[0].startswith(\".\"):\n return self.compile_dotted_expression(expression)\n\n return ast.Call(func=self.compile(fn),\n args=[self.compile(x) for x in expression[1:]],\n keywords=[],\n starargs=None,\n kwargs=None,\n lineno=expression.start_line,\n col_offset=expression.start_column)\n\n @builds(\"def\")\n @builds(\"setf\")\n @builds(\"setv\")\n @checkargs(2)\n def compile_def_expression(self, expression):\n expression.pop(0) # \"def\"\n name = expression.pop(0)\n\n what = self.compile(expression.pop(0))\n\n if type(what) == ast.FunctionDef:\n # We special case a FunctionDef, since we can define by setting\n # FunctionDef's .name attribute, rather then foo == anon_fn. This\n # helps keep things clean.\n what.name = ast_str(name)\n return what\n\n name = self._storeize(self.compile(name))\n\n return ast.Assign(\n lineno=expression.start_line,\n col_offset=expression.start_column,\n targets=[name], value=what)\n\n @builds(\"foreach\")\n @checkargs(min=1)\n def compile_for_expression(self, expression):\n ret_status = self.returnable\n self.returnable = False\n\n expression.pop(0) # for\n name, iterable = expression.pop(0)\n target = self._storeize(self.compile_symbol(name))\n\n ret = ast.For(lineno=expression.start_line,\n col_offset=expression.start_column,\n target=target,\n iter=self.compile(iterable),\n body=self._code_branch(\n [self.compile(x) for x in expression],\n expression.start_line,\n expression.start_column),\n orelse=[])\n\n self.returnable = ret_status\n return ret\n\n @builds(\"while\")\n @checkargs(min=2)\n def compile_while_expression(self, expr):\n expr.pop(0) # \"while\"\n test = self.compile(expr.pop(0))\n\n return ast.While(test=test,\n body=self._code_branch(\n [self.compile(x) for x in expr],\n expr.start_line,\n expr.start_column),\n orelse=[],\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n @builds(HyList)\n def compile_list(self, expr):\n return ast.List(\n elts=[self.compile(x) for x in expr],\n ctx=ast.Load(),\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n @builds(\"fn\")\n @checkargs(min=2)\n def compile_fn_expression(self, expression):\n expression.pop(0) # fn\n\n ret_status = self.returnable\n\n self.anon_fn_count += 1\n name = \"_hy_anon_fn_%d\" % (self.anon_fn_count)\n sig = expression.pop(0)\n\n body = []\n if expression != []:\n self.returnable = True\n tailop = self.compile(expression.pop(-1))\n self.returnable = False\n for el in expression:\n body.append(self.compile(el))\n body.append(tailop)\n\n self.returnable = True\n body = self._code_branch(body,\n expression.start_line,\n expression.start_column)\n\n ret = ast.FunctionDef(\n name=name,\n lineno=expression.start_line,\n col_offset=expression.start_column,\n args=ast.arguments(\n args=[\n ast.Name(\n arg=ast_str(x), id=ast_str(x),\n ctx=ast.Param(),\n lineno=x.start_line,\n col_offset=x.start_column)\n for x in sig],\n vararg=None,\n kwarg=None,\n kwonlyargs=[],\n kw_defaults=[],\n defaults=[]),\n body=body,\n decorator_list=[])\n\n self.returnable = ret_status\n return ret\n\n @builds(HyInteger)\n def compile_number(self, number):\n return ast.Num(n=int(number), # See HyInteger above.\n lineno=number.start_line,\n col_offset=number.start_column)\n\n @builds(HySymbol)\n def compile_symbol(self, symbol):\n if \".\" in symbol:\n glob, local = symbol.rsplit(\".\", 1)\n glob = HySymbol(glob)\n glob.replace(symbol)\n\n return ast.Attribute(\n lineno=symbol.start_line,\n col_offset=symbol.start_column,\n value=self.compile_symbol(glob),\n attr=ast_str(local),\n ctx=ast.Load()\n )\n\n return ast.Name(id=ast_str(symbol),\n arg=ast_str(symbol),\n ctx=ast.Load(),\n lineno=symbol.start_line,\n col_offset=symbol.start_column)\n\n @builds(HyString)\n def compile_string(self, string):\n return ast.Str(s=ast_str(string), lineno=string.start_line,\n col_offset=string.start_column)\n\n @builds(HyDict)\n def compile_dict(self, m):\n keys = []\n vals = []\n for entry in m:\n keys.append(self.compile(entry))\n vals.append(self.compile(m[entry]))\n\n return ast.Dict(\n lineno=m.start_line,\n col_offset=m.start_column,\n keys=keys,\n values=vals)\n\n\ndef hy_compile(tree, root=None):\n \" Compile a HyObject tree into a Python AST tree. \"\n compiler = HyASTCompiler()\n tlo = root\n if root is None:\n tlo = ast.Module\n ret = tlo(body=compiler._mangle_branch(compiler.compile(tree), 0, 0))\n return ret\n", "path": "hy/compiler.py"}], "after_files": [{"content": "# -*- encoding: utf-8 -*-\n#\n# Copyright (c) 2013 Paul Tagliamonte <[email protected]>\n# Copyright (c) 2013 Julien Danjou <[email protected]>\n#\n# Permission is hereby granted, free of charge, to any person obtaining a\n# copy of this software and associated documentation files (the \"Software\"),\n# to deal in the Software without restriction, including without limitation\n# the rights to use, copy, modify, merge, publish, distribute, sublicense,\n# and/or sell copies of the Software, and to permit persons to whom the\n# Software is furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\n# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n# DEALINGS IN THE SOFTWARE.\n\nfrom hy.errors import HyError\n\nfrom hy.models.expression import HyExpression\nfrom hy.models.integer import HyInteger\nfrom hy.models.string import HyString\nfrom hy.models.symbol import HySymbol\nfrom hy.models.list import HyList\nfrom hy.models.dict import HyDict\n\nfrom hy.util import flatten_literal_list\n\nimport codecs\nimport ast\nimport sys\n\n\nclass HyCompileError(HyError):\n pass\n\n\n_compile_table = {}\n\n\ndef ast_str(foobar):\n if sys.version_info[0] >= 3:\n return str(foobar)\n\n try:\n return str(foobar)\n except UnicodeEncodeError:\n pass\n\n enc = codecs.getencoder('punycode')\n foobar, _ = enc(foobar)\n return \"__hy_%s\" % (str(foobar).replace(\"-\", \"_\"))\n\n\ndef builds(_type):\n def _dec(fn):\n _compile_table[_type] = fn\n\n def shim(*args, **kwargs):\n return fn(*args, **kwargs)\n return shim\n return _dec\n\n\ndef _raise_wrong_args_number(expression, error):\n err = TypeError(error % (expression.pop(0),\n len(expression)))\n err.start_line = expression.start_line\n err.start_column = expression.start_column\n raise err\n\n\ndef checkargs(exact=None, min=None, max=None):\n def _dec(fn):\n def checker(self, expression):\n if exact is not None and (len(expression) - 1) != exact:\n _raise_wrong_args_number(expression,\n \"`%%s' needs %d arguments, got %%d\" %\n exact)\n\n if min is not None and (len(expression) - 1) < min:\n _raise_wrong_args_number(\n expression,\n \"`%%s' needs at least %d arguments, got %%d\" % (min))\n\n if max is not None and (len(expression) - 1) > max:\n _raise_wrong_args_number(\n expression,\n \"`%%s' needs at most %d arguments, got %%d\" % (max))\n\n return fn(self, expression)\n\n return checker\n return _dec\n\n\nclass HyASTCompiler(object):\n\n def __init__(self):\n self.returnable = False\n self.anon_fn_count = 0\n\n def compile(self, tree):\n try:\n for _type in _compile_table:\n if type(tree) == _type:\n return _compile_table[_type](self, tree)\n except Exception as e:\n err = HyCompileError(str(e))\n err.exception = e\n err.start_line = getattr(e, \"start_line\", None)\n err.start_column = getattr(e, \"start_column\", None)\n raise err\n\n raise HyCompileError(\"Unknown type - `%s'\" % (str(type(tree))))\n\n def _mangle_branch(self, tree, start_line, start_column):\n # If tree is empty, just return a pass statement\n if tree == []:\n return [ast.Pass(lineno=start_line,\n col_offset=start_column)]\n\n ret = []\n tree = list(flatten_literal_list(tree))\n tree.reverse()\n\n if self.returnable and len(tree) > 0:\n el = tree[0]\n if not isinstance(el, ast.stmt):\n el = tree.pop(0)\n ret.append(ast.Return(value=el,\n lineno=el.lineno,\n col_offset=el.col_offset))\n if isinstance(el, ast.FunctionDef):\n ret.append(ast.Return(\n value=ast.Name(\n arg=el.name, id=el.name, ctx=ast.Load(),\n lineno=el.lineno, col_offset=el.col_offset),\n lineno=el.lineno, col_offset=el.col_offset))\n\n for el in tree:\n if isinstance(el, ast.stmt):\n ret.append(el)\n continue\n\n ret.append(ast.Expr(value=el,\n lineno=el.lineno,\n col_offset=el.col_offset))\n\n ret.reverse()\n return ret\n\n @builds(list)\n def compile_raw_list(self, entries):\n return [self.compile(x) for x in entries]\n\n @builds(\"do\")\n @builds(\"progn\")\n def compile_do_expression(self, expr):\n return [self.compile(x) for x in expr[1:]]\n\n @builds(\"throw\")\n @builds(\"raise\")\n @checkargs(max=1)\n def compile_throw_expression(self, expr):\n expr.pop(0)\n exc = self.compile(expr.pop(0)) if expr else None\n return ast.Raise(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n type=exc,\n exc=exc,\n inst=None,\n tback=None)\n\n @builds(\"try\")\n def compile_try_expression(self, expr):\n expr.pop(0) # try\n\n if sys.version_info[0] >= 3 and sys.version_info[1] >= 3:\n # Python 3.3 features a rename of TryExcept to Try.\n Try = ast.Try\n else:\n Try = ast.TryExcept\n\n try:\n body = expr.pop(0)\n except IndexError:\n body = []\n\n # (try something\u2026)\n body = self._code_branch(self.compile(body),\n expr.start_line,\n expr.start_column)\n\n if len(expr) == 0:\n # (try) or (try body)\n handlers = [ast.ExceptHandler(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n type=None,\n name=None,\n body=[ast.Pass(lineno=expr.start_line,\n col_offset=expr.start_column)])]\n else:\n # (try body except except\u2026)\n handlers = [self.compile(s) for s in expr]\n\n return Try(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n body=body,\n handlers=handlers,\n finalbody=[],\n orelse=[])\n\n @builds(\"catch\")\n @builds(\"except\")\n def compile_catch_expression(self, expr):\n expr.pop(0) # catch\n\n try:\n exceptions = expr.pop(0)\n except IndexError:\n exceptions = []\n # exceptions catch should be either:\n # [[list of exceptions]]\n # or\n # [variable [list of exceptions]]\n # or\n # [variable exception]\n # or\n # [exception]\n # or\n # []\n if len(exceptions) > 2:\n raise TypeError(\"`catch' exceptions list is too long\")\n\n # [variable [list of exceptions]]\n # let's pop variable and use it as name\n if len(exceptions) == 2:\n name = exceptions.pop(0)\n if sys.version_info[0] >= 3:\n # Python3 features a change where the Exception handler\n # moved the name from a Name() to a pure Python String type.\n #\n # We'll just make sure it's a pure \"string\", and let it work\n # it's magic.\n name = ast_str(name)\n else:\n # Python2 requires an ast.Name, set to ctx Store.\n name = self._storeize(self.compile(name))\n else:\n name = None\n\n try:\n exceptions_list = exceptions.pop(0)\n except IndexError:\n exceptions_list = []\n\n if isinstance(exceptions_list, list):\n if len(exceptions_list):\n # [FooBar BarFoo] \u2192 catch Foobar and BarFoo exceptions\n _type = ast.Tuple(elts=[self.compile(x)\n for x in exceptions_list],\n lineno=expr.start_line,\n col_offset=expr.start_column,\n ctx=ast.Load())\n else:\n # [] \u2192 all exceptions catched\n _type = None\n elif isinstance(exceptions_list, HySymbol):\n _type = self.compile(exceptions_list)\n else:\n raise TypeError(\"`catch' needs a valid exception list to catch\")\n\n body = self._code_branch([self.compile(x) for x in expr],\n expr.start_line,\n expr.start_column)\n\n return ast.ExceptHandler(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n type=_type,\n name=name,\n body=body)\n\n def _code_branch(self, branch, start_line, start_column):\n return self._mangle_branch((branch\n if isinstance(branch, list)\n else [branch]),\n start_line,\n start_column)\n\n @builds(\"if\")\n @checkargs(min=2, max=3)\n def compile_if_expression(self, expr):\n expr.pop(0) # if\n test = self.compile(expr.pop(0))\n body = self._code_branch(self.compile(expr.pop(0)),\n expr.start_line,\n expr.start_column)\n\n if len(expr) == 1:\n orel = self._code_branch(self.compile(expr.pop(0)),\n expr.start_line,\n expr.start_column)\n else:\n orel = []\n\n return ast.If(test=test,\n body=body,\n orelse=orel,\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n @builds(\"print\")\n def compile_print_expression(self, expr):\n call = expr.pop(0) # print\n if sys.version_info[0] >= 3:\n call = self.compile(call)\n # AST changed with Python 3, we now just call it.\n return ast.Call(\n keywords=[],\n func=call,\n args=[self.compile(x) for x in expr],\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n return ast.Print(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n dest=None,\n values=[self.compile(x) for x in expr],\n nl=True)\n\n @builds(\"assert\")\n @checkargs(1)\n def compile_assert_expression(self, expr):\n expr.pop(0) # assert\n e = expr.pop(0)\n return ast.Assert(test=self.compile(e),\n msg=None,\n lineno=e.start_line,\n col_offset=e.start_column)\n\n @builds(\"lambda\")\n @checkargs(min=2)\n def compile_lambda_expression(self, expr):\n expr.pop(0)\n sig = expr.pop(0)\n body = expr.pop(0)\n # assert expr is empty\n return ast.Lambda(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n args=ast.arguments(args=[\n ast.Name(arg=ast_str(x), id=ast_str(x),\n ctx=ast.Param(),\n lineno=x.start_line,\n col_offset=x.start_column)\n for x in sig],\n vararg=None,\n kwarg=None,\n defaults=[],\n kwonlyargs=[],\n kw_defaults=[]),\n body=self.compile(body))\n\n @builds(\"pass\")\n @checkargs(0)\n def compile_pass_expression(self, expr):\n return ast.Pass(lineno=expr.start_line, col_offset=expr.start_column)\n\n @builds(\"yield\")\n @checkargs(1)\n def compile_yield_expression(self, expr):\n expr.pop(0)\n return ast.Yield(\n value=self.compile(expr.pop(0)),\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n @builds(\"import\")\n def compile_import_expression(self, expr):\n expr.pop(0) # index\n return ast.Import(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n names=[ast.alias(name=ast_str(x), asname=None) for x in expr])\n\n @builds(\"import_as\")\n def compile_import_as_expression(self, expr):\n expr.pop(0) # index\n modlist = [expr[i:i + 2] for i in range(0, len(expr), 2)]\n return ast.Import(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n module=ast_str(expr.pop(0)),\n names=[ast.alias(name=ast_str(x[0]),\n asname=ast_str(x[1])) for x in modlist])\n\n @builds(\"import_from\")\n @checkargs(min=1)\n def compile_import_from_expression(self, expr):\n expr.pop(0) # index\n return ast.ImportFrom(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n module=ast_str(expr.pop(0)),\n names=[ast.alias(name=ast_str(x), asname=None) for x in expr],\n level=0)\n\n @builds(\"get\")\n @checkargs(2)\n def compile_index_expression(self, expr):\n expr.pop(0) # index\n val = self.compile(expr.pop(0)) # target\n sli = self.compile(expr.pop(0)) # slice\n\n return ast.Subscript(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n value=val,\n slice=ast.Index(value=sli),\n ctx=ast.Load())\n\n @builds(\"slice\")\n @checkargs(min=1, max=3)\n def compile_slice_expression(self, expr):\n expr.pop(0) # index\n val = self.compile(expr.pop(0)) # target\n\n low = None\n if expr != []:\n low = self.compile(expr.pop(0))\n\n high = None\n if expr != []:\n high = self.compile(expr.pop(0))\n\n return ast.Subscript(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n value=val,\n slice=ast.Slice(lower=low,\n upper=high,\n step=None),\n ctx=ast.Load())\n\n @builds(\"assoc\")\n @checkargs(3)\n def compile_assoc_expression(self, expr):\n expr.pop(0) # assoc\n # (assoc foo bar baz) => foo[bar] = baz\n target = expr.pop(0)\n key = expr.pop(0)\n val = expr.pop(0)\n\n return ast.Assign(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n targets=[\n ast.Subscript(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n value=self.compile(target),\n slice=ast.Index(value=self.compile(key)),\n ctx=ast.Store())],\n value=self.compile(val))\n\n @builds(\"decorate_with\")\n @checkargs(min=1)\n def compile_decorate_expression(self, expr):\n expr.pop(0) # decorate-with\n fn = self.compile(expr.pop(-1))\n if type(fn) != ast.FunctionDef:\n raise TypeError(\"Decorated a non-function\")\n fn.decorator_list = [self.compile(x) for x in expr]\n return fn\n\n @builds(\"with\")\n @checkargs(min=2)\n def compile_with_expression(self, expr):\n expr.pop(0) # with\n\n args = expr.pop(0)\n if len(args) > 2 or len(args) < 1:\n raise TypeError(\"with needs [arg (expr)] or [(expr)]\")\n\n args.reverse()\n ctx = self.compile(args.pop(0))\n\n thing = None\n if args != []:\n thing = self._storeize(self.compile(args.pop(0)))\n\n ret = ast.With(context_expr=ctx,\n lineno=expr.start_line,\n col_offset=expr.start_column,\n optional_vars=thing,\n body=self._code_branch(\n [self.compile(x) for x in expr],\n expr.start_line,\n expr.start_column))\n\n if sys.version_info[0] >= 3 and sys.version_info[1] >= 3:\n ret.items = [ast.withitem(context_expr=ctx, optional_vars=thing)]\n\n return ret\n\n @builds(\",\")\n def compile_tuple(self, expr):\n expr.pop(0)\n return ast.Tuple(elts=[self.compile(x) for x in expr],\n lineno=expr.start_line,\n col_offset=expr.start_column,\n ctx=ast.Load())\n\n @builds(\"list_comp\")\n @checkargs(min=2, max=3)\n def compile_list_comprehension(self, expr):\n # (list-comp expr (target iter) cond?)\n expr.pop(0)\n expression = expr.pop(0)\n tar_it = iter(expr.pop(0))\n targets = zip(tar_it, tar_it)\n\n cond = self.compile(expr.pop(0)) if expr != [] else None\n\n ret = ast.ListComp(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n elt=self.compile(expression),\n generators=[])\n\n for target, iterable in targets:\n ret.generators.append(ast.comprehension(\n target=self._storeize(self.compile(target)),\n iter=self.compile(iterable),\n ifs=[]))\n\n if cond:\n ret.generators[-1].ifs.append(cond)\n\n return ret\n\n def _storeize(self, name):\n if isinstance(name, ast.Tuple):\n for x in name.elts:\n x.ctx = ast.Store()\n name.ctx = ast.Store()\n return name\n\n @builds(\"kwapply\")\n @checkargs(2)\n def compile_kwapply_expression(self, expr):\n expr.pop(0) # kwapply\n call = self.compile(expr.pop(0))\n kwargs = expr.pop(0)\n\n if type(call) != ast.Call:\n raise TypeError(\"kwapplying a non-call\")\n\n call.keywords = [ast.keyword(arg=ast_str(x),\n value=self.compile(kwargs[x])) for x in kwargs]\n\n return call\n\n @builds(\"not\")\n @builds(\"~\")\n @checkargs(1)\n def compile_unary_operator(self, expression):\n ops = {\"not\": ast.Not,\n \"~\": ast.Invert}\n operator = expression.pop(0)\n operand = expression.pop(0)\n return ast.UnaryOp(op=ops[operator](),\n operand=self.compile(operand),\n lineno=operator.start_line,\n col_offset=operator.start_column)\n\n @builds(\"and\")\n @builds(\"or\")\n @checkargs(min=2)\n def compile_logical_or_and_and_operator(self, expression):\n ops = {\"and\": ast.And,\n \"or\": ast.Or}\n operator = expression.pop(0)\n values = []\n for child in expression:\n values.append(self.compile(child))\n return ast.BoolOp(op=ops[operator](),\n lineno=operator.start_line,\n col_offset=operator.start_column,\n values=values)\n\n @builds(\"=\")\n @builds(\"!=\")\n @builds(\"<\")\n @builds(\"<=\")\n @builds(\">\")\n @builds(\">=\")\n @builds(\"is\")\n @builds(\"in\")\n @builds(\"is_not\")\n @builds(\"not_in\")\n @checkargs(min=2)\n def compile_compare_op_expression(self, expression):\n ops = {\"=\": ast.Eq, \"!=\": ast.NotEq,\n \"<\": ast.Lt, \"<=\": ast.LtE,\n \">\": ast.Gt, \">=\": ast.GtE,\n \"is\": ast.Is, \"is_not\": ast.IsNot,\n \"in\": ast.In, \"not_in\": ast.NotIn}\n\n inv = expression.pop(0)\n op = ops[inv]\n ops = [op() for x in range(1, len(expression))]\n e = expression.pop(0)\n\n return ast.Compare(left=self.compile(e),\n ops=ops,\n comparators=[self.compile(x) for x in expression],\n lineno=e.start_line,\n col_offset=e.start_column)\n\n @builds(\"+\")\n @builds(\"%\")\n @builds(\"-\")\n @builds(\"/\")\n @builds(\"*\")\n @checkargs(min=2)\n def compile_maths_expression(self, expression):\n # operator = Mod | Pow | LShift | RShift | BitOr |\n # BitXor | BitAnd | FloorDiv\n # (to implement list) XXX\n\n ops = {\"+\": ast.Add,\n \"/\": ast.Div,\n \"*\": ast.Mult,\n \"-\": ast.Sub,\n \"%\": ast.Mod}\n\n inv = expression.pop(0)\n op = ops[inv]\n\n left = self.compile(expression.pop(0))\n calc = None\n for child in expression:\n calc = ast.BinOp(left=left,\n op=op(),\n right=self.compile(child),\n lineno=child.start_line,\n col_offset=child.start_column)\n left = calc\n return calc\n\n def compile_dotted_expression(self, expr):\n ofn = expr.pop(0) # .join\n\n fn = HySymbol(ofn[1:])\n fn.replace(ofn)\n\n obj = expr.pop(0) # [1 2 3 4]\n\n return ast.Call(\n func=ast.Attribute(\n lineno=expr.start_line,\n col_offset=expr.start_column,\n value=self.compile(obj),\n attr=ast_str(fn),\n ctx=ast.Load()),\n args=[self.compile(x) for x in expr],\n keywords=[],\n lineno=expr.start_line,\n col_offset=expr.start_column,\n starargs=None,\n kwargs=None)\n\n @builds(HyExpression)\n def compile_expression(self, expression):\n fn = expression[0]\n if isinstance(fn, HyString):\n if fn in _compile_table:\n return _compile_table[fn](self, expression)\n\n if expression[0].startswith(\".\"):\n return self.compile_dotted_expression(expression)\n\n return ast.Call(func=self.compile(fn),\n args=[self.compile(x) for x in expression[1:]],\n keywords=[],\n starargs=None,\n kwargs=None,\n lineno=expression.start_line,\n col_offset=expression.start_column)\n\n @builds(\"def\")\n @builds(\"setf\")\n @builds(\"setv\")\n @checkargs(2)\n def compile_def_expression(self, expression):\n expression.pop(0) # \"def\"\n name = expression.pop(0)\n\n what = self.compile(expression.pop(0))\n\n if type(what) == ast.FunctionDef:\n # We special case a FunctionDef, since we can define by setting\n # FunctionDef's .name attribute, rather then foo == anon_fn. This\n # helps keep things clean.\n what.name = ast_str(name)\n return what\n\n name = self._storeize(self.compile(name))\n\n return ast.Assign(\n lineno=expression.start_line,\n col_offset=expression.start_column,\n targets=[name], value=what)\n\n @builds(\"foreach\")\n @checkargs(min=1)\n def compile_for_expression(self, expression):\n ret_status = self.returnable\n self.returnable = False\n\n expression.pop(0) # for\n name, iterable = expression.pop(0)\n target = self._storeize(self.compile_symbol(name))\n\n ret = ast.For(lineno=expression.start_line,\n col_offset=expression.start_column,\n target=target,\n iter=self.compile(iterable),\n body=self._code_branch(\n [self.compile(x) for x in expression],\n expression.start_line,\n expression.start_column),\n orelse=[])\n\n self.returnable = ret_status\n return ret\n\n @builds(\"while\")\n @checkargs(min=2)\n def compile_while_expression(self, expr):\n expr.pop(0) # \"while\"\n test = self.compile(expr.pop(0))\n\n return ast.While(test=test,\n body=self._code_branch(\n [self.compile(x) for x in expr],\n expr.start_line,\n expr.start_column),\n orelse=[],\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n @builds(HyList)\n def compile_list(self, expr):\n return ast.List(\n elts=[self.compile(x) for x in expr],\n ctx=ast.Load(),\n lineno=expr.start_line,\n col_offset=expr.start_column)\n\n @builds(\"fn\")\n @checkargs(min=2)\n def compile_fn_expression(self, expression):\n expression.pop(0) # fn\n\n ret_status = self.returnable\n\n self.anon_fn_count += 1\n name = \"_hy_anon_fn_%d\" % (self.anon_fn_count)\n sig = expression.pop(0)\n\n body = []\n if expression != []:\n self.returnable = True\n tailop = self.compile(expression.pop(-1))\n self.returnable = False\n for el in expression:\n body.append(self.compile(el))\n body.append(tailop)\n\n self.returnable = True\n body = self._code_branch(body,\n expression.start_line,\n expression.start_column)\n\n ret = ast.FunctionDef(\n name=name,\n lineno=expression.start_line,\n col_offset=expression.start_column,\n args=ast.arguments(\n args=[\n ast.Name(\n arg=ast_str(x), id=ast_str(x),\n ctx=ast.Param(),\n lineno=x.start_line,\n col_offset=x.start_column)\n for x in sig],\n vararg=None,\n kwarg=None,\n kwonlyargs=[],\n kw_defaults=[],\n defaults=[]),\n body=body,\n decorator_list=[])\n\n self.returnable = ret_status\n return ret\n\n @builds(HyInteger)\n def compile_number(self, number):\n return ast.Num(n=int(number), # See HyInteger above.\n lineno=number.start_line,\n col_offset=number.start_column)\n\n @builds(HySymbol)\n def compile_symbol(self, symbol):\n if \".\" in symbol:\n glob, local = symbol.rsplit(\".\", 1)\n glob = HySymbol(glob)\n glob.replace(symbol)\n\n return ast.Attribute(\n lineno=symbol.start_line,\n col_offset=symbol.start_column,\n value=self.compile_symbol(glob),\n attr=ast_str(local),\n ctx=ast.Load()\n )\n\n return ast.Name(id=ast_str(symbol),\n arg=ast_str(symbol),\n ctx=ast.Load(),\n lineno=symbol.start_line,\n col_offset=symbol.start_column)\n\n @builds(HyString)\n def compile_string(self, string):\n return ast.Str(s=ast_str(string), lineno=string.start_line,\n col_offset=string.start_column)\n\n @builds(HyDict)\n def compile_dict(self, m):\n keys = []\n vals = []\n for entry in m:\n keys.append(self.compile(entry))\n vals.append(self.compile(m[entry]))\n\n return ast.Dict(\n lineno=m.start_line,\n col_offset=m.start_column,\n keys=keys,\n values=vals)\n\n\ndef hy_compile(tree, root=None):\n \" Compile a HyObject tree into a Python AST tree. \"\n compiler = HyASTCompiler()\n tlo = root\n if root is None:\n tlo = ast.Module\n ret = tlo(body=compiler._mangle_branch(compiler.compile(tree), 0, 0))\n return ret\n", "path": "hy/compiler.py"}]} |
gh_patches_debug_1606 | rasdani/github-patches | git_diff | zulip__zulip-14678 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
AttributeError: 'LogRecord' object has no attribute 'status_code'
I have a development environment with the latest Git version. After performing many requests, I get blocked because of rate limiting. Then, the following error is logged in the console:
```
----------------------------------------
Exception happened during processing of request from ('127.0.0.1', 56444)
2020-04-16 11:35:49.159 INFO [zr] 127.0.0.1 POST 429 65ms (mem: 57ms/4) (+start: 24ms) /json/messages (10@zulip via website)
2020-04-16 11:35:49.160 INFO [zr] status=429, data=b'{"result":"error","msg":"API usage exceeded rate limit","retry-after":2.6131470203}\n', uid=10@zulip
2020-04-16 11:35:49.162 INFO [zr] 127.0.0.1 POST 429 11ms (mem: 7ms/2) /json/messages (10@zulip via website)
2020-04-16 11:35:49.162 WARN [django.server] "POST /json/messages HTTP/1.1" 429 84
2020-04-16 11:35:49.173 INFO [zr] status=429, data=b'{"result":"error","msg":"API usage exceeded rate limit","retry-after":2.6109778881}\n', uid=10@zulip
2020-04-16 11:35:49.179 INFO [zr] 127.0.0.1 POST 429 20ms (+start: 51ms) /json/messages (10@zulip via website)
2020-04-16 11:35:49.182 WARN [django.server] "POST /json/messages HTTP/1.1" 429 84
2020-04-16 11:35:49.195 INFO [zr] status=429, data=b'{"result":"error","msg":"API usage exceeded rate limit","retry-after":2.5940015316}\n', uid=10@zulip
Traceback (most recent call last):
File "/usr/lib/python3.7/socketserver.py", line 650, in process_request_thread
self.finish_request(request, client_address)
File "/usr/lib/python3.7/socketserver.py", line 360, in finish_request
self.RequestHandlerClass(request, client_address, self)
File "/usr/lib/python3.7/socketserver.py", line 720, in __init__
self.handle()
File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/servers/basehttp.py", line 171, in handle
self.handle_one_request()
File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/servers/basehttp.py", line 187, in handle_one_request
if not self.parse_request(): # An error code has been sent, just exit
File "/usr/lib/python3.7/http/server.py", line 322, in parse_request
"Bad request syntax (%r)" % requestline)
File "/usr/lib/python3.7/http/server.py", line 456, in send_error
self.log_error("code %d, message %s", code, message)
File "/usr/lib/python3.7/http/server.py", line 558, in log_error
self.log_message(format, *args)
File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/servers/basehttp.py", line 154, in log_message
level(format, *args, extra=extra)
File "/usr/lib/python3.7/logging/__init__.py", line 1383, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/lib/python3.7/logging/__init__.py", line 1519, in _log
self.handle(record)
File "/usr/lib/python3.7/logging/__init__.py", line 1528, in handle
if (not self.disabled) and self.filter(record):
File "/usr/lib/python3.7/logging/__init__.py", line 762, in filter
result = f.filter(record)
File "/srv/zulip-py3-venv/lib/python3.7/site-packages/django/utils/log.py", line 147, in filter
if self.callback(record):
File "/home/sjoerd/zulip/zerver/lib/logging_util.py", line 122, in skip_200_and_304
if getattr(record, 'status_code') in [200, 304]:
AttributeError: 'LogRecord' object has no attribute 'status_code'
----------------------------------------
```
Normally, [http.server logs request, status code, size](https://github.com/python/cpython/blob/master/Lib/http/server.py#L544-L545), and [Django extracts the status code from that](https://github.com/django/django/blob/master/django/core/servers/basehttp.py#L144-L157). However, [on errors http.server logs code and message](https://github.com/python/cpython/blob/master/Lib/http/server.py#L457) and Django doesn't extract the status code.
Parsing arguments to log messages seems pretty fragile to me, so maybe it's better to accept that there isn't always a status code on a log record. Making `getattr` default to `None` in [`skip_200_and_304`](https://github.com/zulip/zulip/blob/master/zerver/lib/logging_util.py#L122) is probably the best option.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `zerver/lib/logging_util.py`
Content:
```
1 # System documented in https://zulip.readthedocs.io/en/latest/subsystems/logging.html
2
3 from django.utils.timezone import now as timezone_now
4 from django.utils.timezone import utc as timezone_utc
5
6 import hashlib
7 import logging
8 import threading
9 import traceback
10 from typing import Optional, Tuple
11 from datetime import datetime, timedelta
12 from django.conf import settings
13 from django.core.cache import cache
14 from logging import Logger
15
16 class _RateLimitFilter:
17 """This class is designed to rate-limit Django error reporting
18 notifications so that it won't send thousands of emails if the
19 database or cache is completely down. It uses a remote shared
20 cache (shared by all Django processes) for its default behavior
21 (so that the deduplication is global, not per-process), and a
22 local in-process cache for when it can't access the remote cache.
23
24 This is critical code because it is called every time
25 `logging.error` or `logging.exception` (or an exception) happens
26 in the codebase.
27
28 Adapted from https://djangosnippets.org/snippets/2242/.
29
30 """
31 last_error = datetime.min.replace(tzinfo=timezone_utc)
32 # This thread-local variable is used to detect recursive
33 # exceptions during exception handling (primarily intended for
34 # when accessing the shared cache throws an exception).
35 handling_exception = threading.local()
36 should_reset_handling_exception = False
37
38 def can_use_remote_cache(self) -> Tuple[bool, bool]:
39 if getattr(self.handling_exception, 'value', False):
40 # If we're processing an exception that occurred
41 # while handling an exception, this almost
42 # certainly was because interacting with the
43 # remote cache is failing (e.g. because the cache
44 # is down). Fall back to tracking duplicate
45 # exceptions in memory without the remote shared cache.
46 return False, False
47
48 # Now we test if the remote cache is accessible.
49 #
50 # This code path can only be reached if we are not potentially
51 # handling a recursive exception, so here we set
52 # self.handling_exception (in case the cache access we're
53 # about to do triggers a `logging.error` or exception that
54 # might recurse into this filter class), and actually record
55 # that this is the main exception handler thread.
56 try:
57 self.handling_exception.value = True
58 cache.set('RLF_TEST_KEY', 1, 1)
59 return cache.get('RLF_TEST_KEY') == 1, True
60 except Exception:
61 return False, True
62
63 def filter(self, record: logging.LogRecord) -> bool:
64 # When the original filter() call finishes executing, it's
65 # going to change handling_exception.value to False. The
66 # local variable below tracks whether the *current*,
67 # potentially recursive, filter() call is allowed to touch
68 # that value (only the original will find this to be True
69 # at the end of its execution)
70 should_reset_handling_exception = False
71 try:
72 # Track duplicate errors
73 duplicate = False
74 rate = getattr(settings, '%s_LIMIT' % (self.__class__.__name__.upper(),),
75 600) # seconds
76
77 if rate > 0:
78 (use_cache, should_reset_handling_exception) = self.can_use_remote_cache()
79 if use_cache:
80 if record.exc_info is not None:
81 tb = '\n'.join(traceback.format_exception(*record.exc_info))
82 else:
83 tb = str(record)
84 key = self.__class__.__name__.upper() + hashlib.sha1(tb.encode()).hexdigest()
85 duplicate = cache.get(key) == 1
86 if not duplicate:
87 cache.set(key, 1, rate)
88 else:
89 min_date = timezone_now() - timedelta(seconds=rate)
90 duplicate = (self.last_error >= min_date)
91 if not duplicate:
92 self.last_error = timezone_now()
93
94 return not duplicate
95 finally:
96 if should_reset_handling_exception:
97 self.handling_exception.value = False
98
99 class ZulipLimiter(_RateLimitFilter):
100 pass
101
102 class EmailLimiter(_RateLimitFilter):
103 pass
104
105 class ReturnTrue(logging.Filter):
106 def filter(self, record: logging.LogRecord) -> bool:
107 return True
108
109 class ReturnEnabled(logging.Filter):
110 def filter(self, record: logging.LogRecord) -> bool:
111 return settings.LOGGING_ENABLED
112
113 class RequireReallyDeployed(logging.Filter):
114 def filter(self, record: logging.LogRecord) -> bool:
115 from django.conf import settings
116 return settings.PRODUCTION
117
118 def skip_200_and_304(record: logging.LogRecord) -> bool:
119 # Apparently, `status_code` is added by Django and is not an actual
120 # attribute of LogRecord; as a result, mypy throws an error if we
121 # access the `status_code` attribute directly.
122 if getattr(record, 'status_code') in [200, 304]:
123 return False
124
125 return True
126
127 def skip_site_packages_logs(record: logging.LogRecord) -> bool:
128 # This skips the log records that are generated from libraries
129 # installed in site packages.
130 # Workaround for https://code.djangoproject.com/ticket/26886
131 if 'site-packages' in record.pathname:
132 return False
133 return True
134
135 def find_log_caller_module(record: logging.LogRecord) -> Optional[str]:
136 '''Find the module name corresponding to where this record was logged.
137
138 Sadly `record.module` is just the innermost component of the full
139 module name, so we have to go reconstruct this ourselves.
140 '''
141 # Repeat a search similar to that in logging.Logger.findCaller.
142 # The logging call should still be on the stack somewhere; search until
143 # we find something in the same source file, and that should give the
144 # right module name.
145 f = logging.currentframe()
146 while True:
147 if f.f_code.co_filename == record.pathname:
148 return f.f_globals.get('__name__')
149 if f.f_back is None:
150 return None
151 f = f.f_back
152
153 logger_nicknames = {
154 'root': '', # This one is more like undoing a nickname.
155 'zulip.requests': 'zr', # Super common.
156 }
157
158 def find_log_origin(record: logging.LogRecord) -> str:
159 logger_name = logger_nicknames.get(record.name, record.name)
160
161 if settings.LOGGING_SHOW_MODULE:
162 module_name = find_log_caller_module(record)
163 if module_name == logger_name or module_name == record.name:
164 # Abbreviate a bit.
165 pass
166 else:
167 logger_name = '{}/{}'.format(logger_name, module_name or '?')
168
169 if settings.RUNNING_INSIDE_TORNADO:
170 # In multi-sharded Tornado, it's often valuable to have which shard is
171 # responsible for the request in the logs.
172 from zerver.tornado.ioloop_logging import logging_data
173 shard = logging_data.get('port', 'unknown')
174 logger_name = "{}:{}".format(logger_name, shard)
175
176 return logger_name
177
178 log_level_abbrevs = {
179 'DEBUG': 'DEBG',
180 'INFO': 'INFO',
181 'WARNING': 'WARN',
182 'ERROR': 'ERR',
183 'CRITICAL': 'CRIT',
184 }
185
186 def abbrev_log_levelname(levelname: str) -> str:
187 # It's unlikely someone will set a custom log level with a custom name,
188 # but it's an option, so we shouldn't crash if someone does.
189 return log_level_abbrevs.get(levelname, levelname[:4])
190
191 class ZulipFormatter(logging.Formatter):
192 # Used in the base implementation. Default uses `,`.
193 default_msec_format = '%s.%03d'
194
195 def __init__(self) -> None:
196 super().__init__(fmt=self._compute_fmt())
197
198 def _compute_fmt(self) -> str:
199 pieces = ['%(asctime)s', '%(zulip_level_abbrev)-4s']
200 if settings.LOGGING_SHOW_PID:
201 pieces.append('pid:%(process)d')
202 pieces.extend(['[%(zulip_origin)s]', '%(message)s'])
203 return ' '.join(pieces)
204
205 def format(self, record: logging.LogRecord) -> str:
206 if not getattr(record, 'zulip_decorated', False):
207 # The `setattr` calls put this logic explicitly outside the bounds of the
208 # type system; otherwise mypy would complain LogRecord lacks these attributes.
209 setattr(record, 'zulip_level_abbrev', abbrev_log_levelname(record.levelname))
210 setattr(record, 'zulip_origin', find_log_origin(record))
211 setattr(record, 'zulip_decorated', True)
212 return super().format(record)
213
214 def log_to_file(logger: Logger,
215 filename: str,
216 log_format: str="%(asctime)s %(levelname)-8s %(message)s",
217 ) -> None:
218 """Note: `filename` should be declared in zproject/settings.py with zulip_path."""
219 formatter = logging.Formatter(log_format)
220 handler = logging.FileHandler(filename)
221 handler.setFormatter(formatter)
222 logger.addHandler(handler)
223
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/zerver/lib/logging_util.py b/zerver/lib/logging_util.py
--- a/zerver/lib/logging_util.py
+++ b/zerver/lib/logging_util.py
@@ -119,7 +119,7 @@
# Apparently, `status_code` is added by Django and is not an actual
# attribute of LogRecord; as a result, mypy throws an error if we
# access the `status_code` attribute directly.
- if getattr(record, 'status_code') in [200, 304]:
+ if getattr(record, 'status_code', None) in [200, 304]:
return False
return True
| {"golden_diff": "diff --git a/zerver/lib/logging_util.py b/zerver/lib/logging_util.py\n--- a/zerver/lib/logging_util.py\n+++ b/zerver/lib/logging_util.py\n@@ -119,7 +119,7 @@\n # Apparently, `status_code` is added by Django and is not an actual\n # attribute of LogRecord; as a result, mypy throws an error if we\n # access the `status_code` attribute directly.\n- if getattr(record, 'status_code') in [200, 304]:\n+ if getattr(record, 'status_code', None) in [200, 304]:\n return False\n \n return True\n", "issue": "AttributeError: 'LogRecord' object has no attribute 'status_code'\nI have a development environment with the latest Git version. After performing many requests, I get blocked because of rate limiting. Then, the following error is logged in the console:\r\n\r\n```\r\n----------------------------------------\r\nException happened during processing of request from ('127.0.0.1', 56444)\r\n2020-04-16 11:35:49.159 INFO [zr] 127.0.0.1 POST 429 65ms (mem: 57ms/4) (+start: 24ms) /json/messages (10@zulip via website)\r\n2020-04-16 11:35:49.160 INFO [zr] status=429, data=b'{\"result\":\"error\",\"msg\":\"API usage exceeded rate limit\",\"retry-after\":2.6131470203}\\n', uid=10@zulip\r\n2020-04-16 11:35:49.162 INFO [zr] 127.0.0.1 POST 429 11ms (mem: 7ms/2) /json/messages (10@zulip via website)\r\n2020-04-16 11:35:49.162 WARN [django.server] \"POST /json/messages HTTP/1.1\" 429 84\r\n2020-04-16 11:35:49.173 INFO [zr] status=429, data=b'{\"result\":\"error\",\"msg\":\"API usage exceeded rate limit\",\"retry-after\":2.6109778881}\\n', uid=10@zulip\r\n2020-04-16 11:35:49.179 INFO [zr] 127.0.0.1 POST 429 20ms (+start: 51ms) /json/messages (10@zulip via website)\r\n2020-04-16 11:35:49.182 WARN [django.server] \"POST /json/messages HTTP/1.1\" 429 84\r\n2020-04-16 11:35:49.195 INFO [zr] status=429, data=b'{\"result\":\"error\",\"msg\":\"API usage exceeded rate limit\",\"retry-after\":2.5940015316}\\n', uid=10@zulip\r\nTraceback (most recent call last):\r\n File \"/usr/lib/python3.7/socketserver.py\", line 650, in process_request_thread\r\n self.finish_request(request, client_address)\r\n File \"/usr/lib/python3.7/socketserver.py\", line 360, in finish_request\r\n self.RequestHandlerClass(request, client_address, self)\r\n File \"/usr/lib/python3.7/socketserver.py\", line 720, in __init__\r\n self.handle()\r\n File \"/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/servers/basehttp.py\", line 171, in handle\r\n self.handle_one_request()\r\n File \"/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/servers/basehttp.py\", line 187, in handle_one_request\r\n if not self.parse_request(): # An error code has been sent, just exit\r\n File \"/usr/lib/python3.7/http/server.py\", line 322, in parse_request\r\n \"Bad request syntax (%r)\" % requestline)\r\n File \"/usr/lib/python3.7/http/server.py\", line 456, in send_error\r\n self.log_error(\"code %d, message %s\", code, message)\r\n File \"/usr/lib/python3.7/http/server.py\", line 558, in log_error\r\n self.log_message(format, *args)\r\n File \"/srv/zulip-py3-venv/lib/python3.7/site-packages/django/core/servers/basehttp.py\", line 154, in log_message\r\n level(format, *args, extra=extra)\r\n File \"/usr/lib/python3.7/logging/__init__.py\", line 1383, in info\r\n self._log(INFO, msg, args, **kwargs)\r\n File \"/usr/lib/python3.7/logging/__init__.py\", line 1519, in _log\r\n self.handle(record)\r\n File \"/usr/lib/python3.7/logging/__init__.py\", line 1528, in handle\r\n if (not self.disabled) and self.filter(record):\r\n File \"/usr/lib/python3.7/logging/__init__.py\", line 762, in filter\r\n result = f.filter(record)\r\n File \"/srv/zulip-py3-venv/lib/python3.7/site-packages/django/utils/log.py\", line 147, in filter\r\n if self.callback(record):\r\n File \"/home/sjoerd/zulip/zerver/lib/logging_util.py\", line 122, in skip_200_and_304\r\n if getattr(record, 'status_code') in [200, 304]:\r\nAttributeError: 'LogRecord' object has no attribute 'status_code'\r\n----------------------------------------\r\n```\r\n\r\nNormally, [http.server logs request, status code, size](https://github.com/python/cpython/blob/master/Lib/http/server.py#L544-L545), and [Django extracts the status code from that](https://github.com/django/django/blob/master/django/core/servers/basehttp.py#L144-L157). However, [on errors http.server logs code and message](https://github.com/python/cpython/blob/master/Lib/http/server.py#L457) and Django doesn't extract the status code.\r\n\r\nParsing arguments to log messages seems pretty fragile to me, so maybe it's better to accept that there isn't always a status code on a log record. Making `getattr` default to `None` in [`skip_200_and_304`](https://github.com/zulip/zulip/blob/master/zerver/lib/logging_util.py#L122) is probably the best option.\r\n\r\n\n", "before_files": [{"content": "# System documented in https://zulip.readthedocs.io/en/latest/subsystems/logging.html\n\nfrom django.utils.timezone import now as timezone_now\nfrom django.utils.timezone import utc as timezone_utc\n\nimport hashlib\nimport logging\nimport threading\nimport traceback\nfrom typing import Optional, Tuple\nfrom datetime import datetime, timedelta\nfrom django.conf import settings\nfrom django.core.cache import cache\nfrom logging import Logger\n\nclass _RateLimitFilter:\n \"\"\"This class is designed to rate-limit Django error reporting\n notifications so that it won't send thousands of emails if the\n database or cache is completely down. It uses a remote shared\n cache (shared by all Django processes) for its default behavior\n (so that the deduplication is global, not per-process), and a\n local in-process cache for when it can't access the remote cache.\n\n This is critical code because it is called every time\n `logging.error` or `logging.exception` (or an exception) happens\n in the codebase.\n\n Adapted from https://djangosnippets.org/snippets/2242/.\n\n \"\"\"\n last_error = datetime.min.replace(tzinfo=timezone_utc)\n # This thread-local variable is used to detect recursive\n # exceptions during exception handling (primarily intended for\n # when accessing the shared cache throws an exception).\n handling_exception = threading.local()\n should_reset_handling_exception = False\n\n def can_use_remote_cache(self) -> Tuple[bool, bool]:\n if getattr(self.handling_exception, 'value', False):\n # If we're processing an exception that occurred\n # while handling an exception, this almost\n # certainly was because interacting with the\n # remote cache is failing (e.g. because the cache\n # is down). Fall back to tracking duplicate\n # exceptions in memory without the remote shared cache.\n return False, False\n\n # Now we test if the remote cache is accessible.\n #\n # This code path can only be reached if we are not potentially\n # handling a recursive exception, so here we set\n # self.handling_exception (in case the cache access we're\n # about to do triggers a `logging.error` or exception that\n # might recurse into this filter class), and actually record\n # that this is the main exception handler thread.\n try:\n self.handling_exception.value = True\n cache.set('RLF_TEST_KEY', 1, 1)\n return cache.get('RLF_TEST_KEY') == 1, True\n except Exception:\n return False, True\n\n def filter(self, record: logging.LogRecord) -> bool:\n # When the original filter() call finishes executing, it's\n # going to change handling_exception.value to False. The\n # local variable below tracks whether the *current*,\n # potentially recursive, filter() call is allowed to touch\n # that value (only the original will find this to be True\n # at the end of its execution)\n should_reset_handling_exception = False\n try:\n # Track duplicate errors\n duplicate = False\n rate = getattr(settings, '%s_LIMIT' % (self.__class__.__name__.upper(),),\n 600) # seconds\n\n if rate > 0:\n (use_cache, should_reset_handling_exception) = self.can_use_remote_cache()\n if use_cache:\n if record.exc_info is not None:\n tb = '\\n'.join(traceback.format_exception(*record.exc_info))\n else:\n tb = str(record)\n key = self.__class__.__name__.upper() + hashlib.sha1(tb.encode()).hexdigest()\n duplicate = cache.get(key) == 1\n if not duplicate:\n cache.set(key, 1, rate)\n else:\n min_date = timezone_now() - timedelta(seconds=rate)\n duplicate = (self.last_error >= min_date)\n if not duplicate:\n self.last_error = timezone_now()\n\n return not duplicate\n finally:\n if should_reset_handling_exception:\n self.handling_exception.value = False\n\nclass ZulipLimiter(_RateLimitFilter):\n pass\n\nclass EmailLimiter(_RateLimitFilter):\n pass\n\nclass ReturnTrue(logging.Filter):\n def filter(self, record: logging.LogRecord) -> bool:\n return True\n\nclass ReturnEnabled(logging.Filter):\n def filter(self, record: logging.LogRecord) -> bool:\n return settings.LOGGING_ENABLED\n\nclass RequireReallyDeployed(logging.Filter):\n def filter(self, record: logging.LogRecord) -> bool:\n from django.conf import settings\n return settings.PRODUCTION\n\ndef skip_200_and_304(record: logging.LogRecord) -> bool:\n # Apparently, `status_code` is added by Django and is not an actual\n # attribute of LogRecord; as a result, mypy throws an error if we\n # access the `status_code` attribute directly.\n if getattr(record, 'status_code') in [200, 304]:\n return False\n\n return True\n\ndef skip_site_packages_logs(record: logging.LogRecord) -> bool:\n # This skips the log records that are generated from libraries\n # installed in site packages.\n # Workaround for https://code.djangoproject.com/ticket/26886\n if 'site-packages' in record.pathname:\n return False\n return True\n\ndef find_log_caller_module(record: logging.LogRecord) -> Optional[str]:\n '''Find the module name corresponding to where this record was logged.\n\n Sadly `record.module` is just the innermost component of the full\n module name, so we have to go reconstruct this ourselves.\n '''\n # Repeat a search similar to that in logging.Logger.findCaller.\n # The logging call should still be on the stack somewhere; search until\n # we find something in the same source file, and that should give the\n # right module name.\n f = logging.currentframe()\n while True:\n if f.f_code.co_filename == record.pathname:\n return f.f_globals.get('__name__')\n if f.f_back is None:\n return None\n f = f.f_back\n\nlogger_nicknames = {\n 'root': '', # This one is more like undoing a nickname.\n 'zulip.requests': 'zr', # Super common.\n}\n\ndef find_log_origin(record: logging.LogRecord) -> str:\n logger_name = logger_nicknames.get(record.name, record.name)\n\n if settings.LOGGING_SHOW_MODULE:\n module_name = find_log_caller_module(record)\n if module_name == logger_name or module_name == record.name:\n # Abbreviate a bit.\n pass\n else:\n logger_name = '{}/{}'.format(logger_name, module_name or '?')\n\n if settings.RUNNING_INSIDE_TORNADO:\n # In multi-sharded Tornado, it's often valuable to have which shard is\n # responsible for the request in the logs.\n from zerver.tornado.ioloop_logging import logging_data\n shard = logging_data.get('port', 'unknown')\n logger_name = \"{}:{}\".format(logger_name, shard)\n\n return logger_name\n\nlog_level_abbrevs = {\n 'DEBUG': 'DEBG',\n 'INFO': 'INFO',\n 'WARNING': 'WARN',\n 'ERROR': 'ERR',\n 'CRITICAL': 'CRIT',\n}\n\ndef abbrev_log_levelname(levelname: str) -> str:\n # It's unlikely someone will set a custom log level with a custom name,\n # but it's an option, so we shouldn't crash if someone does.\n return log_level_abbrevs.get(levelname, levelname[:4])\n\nclass ZulipFormatter(logging.Formatter):\n # Used in the base implementation. Default uses `,`.\n default_msec_format = '%s.%03d'\n\n def __init__(self) -> None:\n super().__init__(fmt=self._compute_fmt())\n\n def _compute_fmt(self) -> str:\n pieces = ['%(asctime)s', '%(zulip_level_abbrev)-4s']\n if settings.LOGGING_SHOW_PID:\n pieces.append('pid:%(process)d')\n pieces.extend(['[%(zulip_origin)s]', '%(message)s'])\n return ' '.join(pieces)\n\n def format(self, record: logging.LogRecord) -> str:\n if not getattr(record, 'zulip_decorated', False):\n # The `setattr` calls put this logic explicitly outside the bounds of the\n # type system; otherwise mypy would complain LogRecord lacks these attributes.\n setattr(record, 'zulip_level_abbrev', abbrev_log_levelname(record.levelname))\n setattr(record, 'zulip_origin', find_log_origin(record))\n setattr(record, 'zulip_decorated', True)\n return super().format(record)\n\ndef log_to_file(logger: Logger,\n filename: str,\n log_format: str=\"%(asctime)s %(levelname)-8s %(message)s\",\n ) -> None:\n \"\"\"Note: `filename` should be declared in zproject/settings.py with zulip_path.\"\"\"\n formatter = logging.Formatter(log_format)\n handler = logging.FileHandler(filename)\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n", "path": "zerver/lib/logging_util.py"}], "after_files": [{"content": "# System documented in https://zulip.readthedocs.io/en/latest/subsystems/logging.html\n\nfrom django.utils.timezone import now as timezone_now\nfrom django.utils.timezone import utc as timezone_utc\n\nimport hashlib\nimport logging\nimport threading\nimport traceback\nfrom typing import Optional, Tuple\nfrom datetime import datetime, timedelta\nfrom django.conf import settings\nfrom django.core.cache import cache\nfrom logging import Logger\n\nclass _RateLimitFilter:\n \"\"\"This class is designed to rate-limit Django error reporting\n notifications so that it won't send thousands of emails if the\n database or cache is completely down. It uses a remote shared\n cache (shared by all Django processes) for its default behavior\n (so that the deduplication is global, not per-process), and a\n local in-process cache for when it can't access the remote cache.\n\n This is critical code because it is called every time\n `logging.error` or `logging.exception` (or an exception) happens\n in the codebase.\n\n Adapted from https://djangosnippets.org/snippets/2242/.\n\n \"\"\"\n last_error = datetime.min.replace(tzinfo=timezone_utc)\n # This thread-local variable is used to detect recursive\n # exceptions during exception handling (primarily intended for\n # when accessing the shared cache throws an exception).\n handling_exception = threading.local()\n should_reset_handling_exception = False\n\n def can_use_remote_cache(self) -> Tuple[bool, bool]:\n if getattr(self.handling_exception, 'value', False):\n # If we're processing an exception that occurred\n # while handling an exception, this almost\n # certainly was because interacting with the\n # remote cache is failing (e.g. because the cache\n # is down). Fall back to tracking duplicate\n # exceptions in memory without the remote shared cache.\n return False, False\n\n # Now we test if the remote cache is accessible.\n #\n # This code path can only be reached if we are not potentially\n # handling a recursive exception, so here we set\n # self.handling_exception (in case the cache access we're\n # about to do triggers a `logging.error` or exception that\n # might recurse into this filter class), and actually record\n # that this is the main exception handler thread.\n try:\n self.handling_exception.value = True\n cache.set('RLF_TEST_KEY', 1, 1)\n return cache.get('RLF_TEST_KEY') == 1, True\n except Exception:\n return False, True\n\n def filter(self, record: logging.LogRecord) -> bool:\n # When the original filter() call finishes executing, it's\n # going to change handling_exception.value to False. The\n # local variable below tracks whether the *current*,\n # potentially recursive, filter() call is allowed to touch\n # that value (only the original will find this to be True\n # at the end of its execution)\n should_reset_handling_exception = False\n try:\n # Track duplicate errors\n duplicate = False\n rate = getattr(settings, '%s_LIMIT' % (self.__class__.__name__.upper(),),\n 600) # seconds\n\n if rate > 0:\n (use_cache, should_reset_handling_exception) = self.can_use_remote_cache()\n if use_cache:\n if record.exc_info is not None:\n tb = '\\n'.join(traceback.format_exception(*record.exc_info))\n else:\n tb = str(record)\n key = self.__class__.__name__.upper() + hashlib.sha1(tb.encode()).hexdigest()\n duplicate = cache.get(key) == 1\n if not duplicate:\n cache.set(key, 1, rate)\n else:\n min_date = timezone_now() - timedelta(seconds=rate)\n duplicate = (self.last_error >= min_date)\n if not duplicate:\n self.last_error = timezone_now()\n\n return not duplicate\n finally:\n if should_reset_handling_exception:\n self.handling_exception.value = False\n\nclass ZulipLimiter(_RateLimitFilter):\n pass\n\nclass EmailLimiter(_RateLimitFilter):\n pass\n\nclass ReturnTrue(logging.Filter):\n def filter(self, record: logging.LogRecord) -> bool:\n return True\n\nclass ReturnEnabled(logging.Filter):\n def filter(self, record: logging.LogRecord) -> bool:\n return settings.LOGGING_ENABLED\n\nclass RequireReallyDeployed(logging.Filter):\n def filter(self, record: logging.LogRecord) -> bool:\n from django.conf import settings\n return settings.PRODUCTION\n\ndef skip_200_and_304(record: logging.LogRecord) -> bool:\n # Apparently, `status_code` is added by Django and is not an actual\n # attribute of LogRecord; as a result, mypy throws an error if we\n # access the `status_code` attribute directly.\n if getattr(record, 'status_code', None) in [200, 304]:\n return False\n\n return True\n\ndef skip_site_packages_logs(record: logging.LogRecord) -> bool:\n # This skips the log records that are generated from libraries\n # installed in site packages.\n # Workaround for https://code.djangoproject.com/ticket/26886\n if 'site-packages' in record.pathname:\n return False\n return True\n\ndef find_log_caller_module(record: logging.LogRecord) -> Optional[str]:\n '''Find the module name corresponding to where this record was logged.\n\n Sadly `record.module` is just the innermost component of the full\n module name, so we have to go reconstruct this ourselves.\n '''\n # Repeat a search similar to that in logging.Logger.findCaller.\n # The logging call should still be on the stack somewhere; search until\n # we find something in the same source file, and that should give the\n # right module name.\n f = logging.currentframe()\n while f is not None:\n if f.f_code.co_filename == record.pathname:\n return f.f_globals.get('__name__')\n f = f.f_back\n return None # type: ignore # required because of previous ignore on f\n\nlogger_nicknames = {\n 'root': '', # This one is more like undoing a nickname.\n 'zulip.requests': 'zr', # Super common.\n}\n\ndef find_log_origin(record: logging.LogRecord) -> str:\n logger_name = logger_nicknames.get(record.name, record.name)\n\n if settings.LOGGING_SHOW_MODULE:\n module_name = find_log_caller_module(record)\n if module_name == logger_name or module_name == record.name:\n # Abbreviate a bit.\n pass\n else:\n logger_name = '{}/{}'.format(logger_name, module_name or '?')\n\n if settings.RUNNING_INSIDE_TORNADO:\n # In multi-sharded Tornado, it's often valuable to have which shard is\n # responsible for the request in the logs.\n from zerver.tornado.ioloop_logging import logging_data\n shard = logging_data.get('port', 'unknown')\n logger_name = \"{}:{}\".format(logger_name, shard)\n\n return logger_name\n\nlog_level_abbrevs = {\n 'DEBUG': 'DEBG',\n 'INFO': 'INFO',\n 'WARNING': 'WARN',\n 'ERROR': 'ERR',\n 'CRITICAL': 'CRIT',\n}\n\ndef abbrev_log_levelname(levelname: str) -> str:\n # It's unlikely someone will set a custom log level with a custom name,\n # but it's an option, so we shouldn't crash if someone does.\n return log_level_abbrevs.get(levelname, levelname[:4])\n\nclass ZulipFormatter(logging.Formatter):\n # Used in the base implementation. Default uses `,`.\n default_msec_format = '%s.%03d'\n\n def __init__(self) -> None:\n super().__init__(fmt=self._compute_fmt())\n\n def _compute_fmt(self) -> str:\n pieces = ['%(asctime)s', '%(zulip_level_abbrev)-4s']\n if settings.LOGGING_SHOW_PID:\n pieces.append('pid:%(process)d')\n pieces.extend(['[%(zulip_origin)s]', '%(message)s'])\n return ' '.join(pieces)\n\n def format(self, record: logging.LogRecord) -> str:\n if not getattr(record, 'zulip_decorated', False):\n # The `setattr` calls put this logic explicitly outside the bounds of the\n # type system; otherwise mypy would complain LogRecord lacks these attributes.\n setattr(record, 'zulip_level_abbrev', abbrev_log_levelname(record.levelname))\n setattr(record, 'zulip_origin', find_log_origin(record))\n setattr(record, 'zulip_decorated', True)\n return super().format(record)\n\ndef log_to_file(logger: Logger,\n filename: str,\n log_format: str=\"%(asctime)s %(levelname)-8s %(message)s\",\n ) -> None:\n \"\"\"Note: `filename` should be declared in zproject/settings.py with zulip_path.\"\"\"\n formatter = logging.Formatter(log_format)\n handler = logging.FileHandler(filename)\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n", "path": "zerver/lib/logging_util.py"}]} |
gh_patches_debug_1607 | rasdani/github-patches | git_diff | mkdocs__mkdocs-2800 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
jinja2 3.1.0 breaks mkdocs
since the jinja2 3.1.0 release mkdocs does not work any more:
```
admin@host ui % pip install jinja2==3.1.0
Collecting jinja2==3.1.0
Using cached Jinja2-3.1.0-py3-none-any.whl (132 kB)
Installing collected packages: jinja2
Attempting uninstall: jinja2
Found existing installation: Jinja2 3.0.0
Uninstalling Jinja2-3.0.0:
Successfully uninstalled Jinja2-3.0.0
Successfully installed jinja2-3.1.0
admin@host ui % mkdocs build
Traceback (most recent call last):
File "/usr/local/bin/mkdocs", line 8, in <module>
sys.exit(cli())
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.9/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.9/site-packages/mkdocs/__main__.py", line 187, in build_command
build.build(config.load_config(**kwargs), dirty=not clean)
File "/usr/local/lib/python3.9/site-packages/mkdocs/config/base.py", line 216, in load_config
from mkdocs.config.defaults import get_schema
File "/usr/local/lib/python3.9/site-packages/mkdocs/config/defaults.py", line 1, in <module>
from mkdocs.config import config_options
File "/usr/local/lib/python3.9/site-packages/mkdocs/config/config_options.py", line 8, in <module>
from mkdocs import utils, theme, plugins
File "/usr/local/lib/python3.9/site-packages/mkdocs/theme.py", line 6, in <module>
from mkdocs.utils import filters
File "/usr/local/lib/python3.9/site-packages/mkdocs/utils/filters.py", line 13, in <module>
@jinja2.contextfilter
AttributeError: module 'jinja2' has no attribute 'contextfilter'
```
However, if I install jinja2 3.0.0:
```
admin@host ui % pip install jinja2==3.0.0
Collecting jinja2==3.0.0
Using cached Jinja2-3.0.0-py3-none-any.whl (133 kB)
Requirement already satisfied: MarkupSafe>=2.0.0rc2 in /usr/local/lib/python3.9/site-packages (from jinja2==3.0.0) (2.1.1)
Installing collected packages: jinja2
Attempting uninstall: jinja2
Found existing installation: Jinja2 3.1.0
Uninstalling Jinja2-3.1.0:
Successfully uninstalled Jinja2-3.1.0
Successfully installed jinja2-3.0.0
admin@host ui % mkdocs build
INFO - Cleaning site directory
INFO - Building documentation to directory: /Users/admin/git/searchlab/ui/site
INFO - Documentation built in 0.33 seconds
```
- mkdocs can be patched by explicitly installing jinja2 3.0.0.
- maybe this is not a mkdocs bug, but a jinja2 bug; however, this could be patched in mkdocs as well.
Prevent error with Jinja2 v3.1
Fixes #2794
See [Jinja2 v3.0.0 changes](https://jinja.palletsprojects.com/en/3.0.x/changes/#version-3-0-0):
> The function and filter decorators have been renamed and unified. The old names are deprecated...
>
> - `pass_context` replaces `contextfunction` and `contextfilter`.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `mkdocs/utils/filters.py`
Content:
```
1 import jinja2
2
3 from mkdocs.utils import normalize_url
4
5
6 @jinja2.contextfilter
7 def url_filter(context, value):
8 """ A Template filter to normalize URLs. """
9 return normalize_url(value, page=context['page'], base=context['base_url'])
10
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/mkdocs/utils/filters.py b/mkdocs/utils/filters.py
--- a/mkdocs/utils/filters.py
+++ b/mkdocs/utils/filters.py
@@ -1,9 +1,12 @@
-import jinja2
+try:
+ from jinja2 import pass_context as contextfilter
+except ImportError:
+ from jinja2 import contextfilter
from mkdocs.utils import normalize_url
[email protected]
+@contextfilter
def url_filter(context, value):
""" A Template filter to normalize URLs. """
return normalize_url(value, page=context['page'], base=context['base_url'])
| {"golden_diff": "diff --git a/mkdocs/utils/filters.py b/mkdocs/utils/filters.py\n--- a/mkdocs/utils/filters.py\n+++ b/mkdocs/utils/filters.py\n@@ -1,9 +1,12 @@\n-import jinja2\n+try:\n+ from jinja2 import pass_context as contextfilter\n+except ImportError:\n+ from jinja2 import contextfilter\n \n from mkdocs.utils import normalize_url\n \n \[email protected]\n+@contextfilter\n def url_filter(context, value):\n \"\"\" A Template filter to normalize URLs. \"\"\"\n return normalize_url(value, page=context['page'], base=context['base_url'])\n", "issue": "jinja2 3.1.0 breaks mkdocs\nsince the jinja2 3.1.0 release mkdocs does not work any more:\r\n\r\n```\r\nadmin@host ui % pip install jinja2==3.1.0\r\nCollecting jinja2==3.1.0\r\n Using cached Jinja2-3.1.0-py3-none-any.whl (132 kB)\r\nInstalling collected packages: jinja2\r\n Attempting uninstall: jinja2\r\n Found existing installation: Jinja2 3.0.0\r\n Uninstalling Jinja2-3.0.0:\r\n Successfully uninstalled Jinja2-3.0.0\r\nSuccessfully installed jinja2-3.1.0\r\nadmin@host ui % mkdocs build\r\nTraceback (most recent call last):\r\n File \"/usr/local/bin/mkdocs\", line 8, in <module>\r\n sys.exit(cli())\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 829, in __call__\r\n return self.main(*args, **kwargs)\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 782, in main\r\n rv = self.invoke(ctx)\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 1259, in invoke\r\n return _process_result(sub_ctx.command.invoke(sub_ctx))\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 1066, in invoke\r\n return ctx.invoke(self.callback, **ctx.params)\r\n File \"/usr/local/lib/python3.9/site-packages/click/core.py\", line 610, in invoke\r\n return callback(*args, **kwargs)\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/__main__.py\", line 187, in build_command\r\n build.build(config.load_config(**kwargs), dirty=not clean)\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/config/base.py\", line 216, in load_config\r\n from mkdocs.config.defaults import get_schema\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/config/defaults.py\", line 1, in <module>\r\n from mkdocs.config import config_options\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/config/config_options.py\", line 8, in <module>\r\n from mkdocs import utils, theme, plugins\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/theme.py\", line 6, in <module>\r\n from mkdocs.utils import filters\r\n File \"/usr/local/lib/python3.9/site-packages/mkdocs/utils/filters.py\", line 13, in <module>\r\n @jinja2.contextfilter\r\nAttributeError: module 'jinja2' has no attribute 'contextfilter'\r\n```\r\nHowever, if I install jinja2 3.0.0:\r\n```\r\nadmin@host ui % pip install jinja2==3.0.0\r\nCollecting jinja2==3.0.0\r\n Using cached Jinja2-3.0.0-py3-none-any.whl (133 kB)\r\nRequirement already satisfied: MarkupSafe>=2.0.0rc2 in /usr/local/lib/python3.9/site-packages (from jinja2==3.0.0) (2.1.1)\r\nInstalling collected packages: jinja2\r\n Attempting uninstall: jinja2\r\n Found existing installation: Jinja2 3.1.0\r\n Uninstalling Jinja2-3.1.0:\r\n Successfully uninstalled Jinja2-3.1.0\r\nSuccessfully installed jinja2-3.0.0\r\nadmin@host ui % mkdocs build\r\nINFO - Cleaning site directory\r\nINFO - Building documentation to directory: /Users/admin/git/searchlab/ui/site\r\nINFO - Documentation built in 0.33 seconds\r\n```\r\n\r\n- mkdocs can be patched by explicitly installing jinja2 3.0.0.\r\n- maybe this is not a mkdocs bug, but a jinja2 bug; however, this could be patched in mkdocs as well.\r\n\nPrevent error with Jinja2 v3.1\nFixes #2794\r\n\r\nSee [Jinja2 v3.0.0 changes](https://jinja.palletsprojects.com/en/3.0.x/changes/#version-3-0-0):\r\n\r\n> The function and filter decorators have been renamed and unified. The old names are deprecated...\r\n> \r\n> - `pass_context` replaces `contextfunction` and `contextfilter`.\n", "before_files": [{"content": "import jinja2\n\nfrom mkdocs.utils import normalize_url\n\n\[email protected]\ndef url_filter(context, value):\n \"\"\" A Template filter to normalize URLs. \"\"\"\n return normalize_url(value, page=context['page'], base=context['base_url'])\n", "path": "mkdocs/utils/filters.py"}], "after_files": [{"content": "try:\n from jinja2 import pass_context as contextfilter\nexcept ImportError:\n from jinja2 import contextfilter\n\nfrom mkdocs.utils import normalize_url\n\n\n@contextfilter\ndef url_filter(context, value):\n \"\"\" A Template filter to normalize URLs. \"\"\"\n return normalize_url(value, page=context['page'], base=context['base_url'])\n", "path": "mkdocs/utils/filters.py"}]} |
gh_patches_debug_1608 | rasdani/github-patches | git_diff | cloud-custodian__cloud-custodian-4194 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Can't filter log-group using creationTime age
I'm trying, but failing to filter log groups that were created more than 30 days ago using the following filter:
```
policies:
- name: OldLogGroups
resource: log-group
filters:
- type: value
key: creationTime
op: gt
value_type: age
value: 30
```
According to [AWS docs](https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_LogGroup.html), the property `creationTime` stores the creation time of the log group, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.
I understand 'age' value_type already supports standard unix epoch expressed in seconds, but not milliseconds (#2051).
Support for date/time values expressed in milliseconds or adding a new filter like `log-group-age` would allow to cleanup old log groups that have been created certain time ago and either have 0 stored bytes or have not had any writes for a while.
custodian version: 0.8.42.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `c7n/resources/cw.py`
Content:
```
1 # Copyright 2016-2017 Capital One Services, LLC
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 from __future__ import absolute_import, division, print_function, unicode_literals
15
16 from concurrent.futures import as_completed
17 from datetime import datetime, timedelta
18
19 from c7n.actions import BaseAction
20 from c7n.exceptions import PolicyValidationError
21 from c7n.filters import Filter, MetricsFilter
22 from c7n.filters.iamaccess import CrossAccountAccessFilter
23 from c7n.query import QueryResourceManager, ChildResourceManager
24 from c7n.manager import resources
25 from c7n.resolver import ValuesFrom
26 from c7n.tags import universal_augment, register_universal_tags
27 from c7n.utils import type_schema, local_session, chunks, get_retry
28
29
30 @resources.register('alarm')
31 class Alarm(QueryResourceManager):
32
33 class resource_type(object):
34 service = 'cloudwatch'
35 type = 'alarm'
36 enum_spec = ('describe_alarms', 'MetricAlarms', None)
37 id = 'AlarmArn'
38 filter_name = 'AlarmNames'
39 filter_type = 'list'
40 name = 'AlarmName'
41 date = 'AlarmConfigurationUpdatedTimestamp'
42 dimension = None
43 config_type = 'AWS::CloudWatch::Alarm'
44
45 retry = staticmethod(get_retry(('Throttled',)))
46
47
48 @Alarm.action_registry.register('delete')
49 class AlarmDelete(BaseAction):
50 """Delete a cloudwatch alarm.
51
52 :example:
53
54 .. code-block:: yaml
55
56 policies:
57 - name: cloudwatch-delete-stale-alarms
58 resource: alarm
59 filters:
60 - type: value
61 value_type: age
62 key: StateUpdatedTimestamp
63 value: 30
64 op: ge
65 - StateValue: INSUFFICIENT_DATA
66 actions:
67 - delete
68 """
69
70 schema = type_schema('delete')
71 permissions = ('cloudwatch:DeleteAlarms',)
72
73 def process(self, resources):
74 client = local_session(
75 self.manager.session_factory).client('cloudwatch')
76
77 for resource_set in chunks(resources, size=100):
78 self.manager.retry(
79 client.delete_alarms,
80 AlarmNames=[r['AlarmName'] for r in resource_set])
81
82
83 @resources.register('event-rule')
84 class EventRule(QueryResourceManager):
85
86 class resource_type(object):
87 service = 'events'
88 type = 'event-rule'
89 enum_spec = ('list_rules', 'Rules', None)
90 name = "Name"
91 id = "Name"
92 filter_name = "NamePrefix"
93 filter_type = "scalar"
94 dimension = None
95
96
97 @EventRule.filter_registry.register('metrics')
98 class EventRuleMetrics(MetricsFilter):
99
100 def get_dimensions(self, resource):
101 return [{'Name': 'RuleName', 'Value': resource['Name']}]
102
103
104 @resources.register('event-rule-target')
105 class EventRuleTarget(ChildResourceManager):
106
107 class resource_type(object):
108 service = 'events'
109 type = 'event-rule-target'
110 enum_spec = ('list_targets_by_rule', 'Targets', None)
111 parent_spec = ('event-rule', 'Rule', True)
112 name = id = 'Id'
113 dimension = None
114 filter_type = filter_name = None
115
116
117 @EventRuleTarget.filter_registry.register('cross-account')
118 class CrossAccountFilter(CrossAccountAccessFilter):
119
120 schema = type_schema(
121 'cross-account',
122 # white list accounts
123 whitelist_from=ValuesFrom.schema,
124 whitelist={'type': 'array', 'items': {'type': 'string'}})
125
126 # dummy permission
127 permissions = ('events:ListTargetsByRule',)
128
129 def __call__(self, r):
130 account_id = r['Arn'].split(':', 5)[4]
131 return account_id not in self.accounts
132
133
134 @EventRuleTarget.action_registry.register('delete')
135 class DeleteTarget(BaseAction):
136
137 schema = type_schema('delete')
138 permissions = ('events:RemoveTargets',)
139
140 def process(self, resources):
141 client = local_session(self.manager.session_factory).client('events')
142 rule_targets = {}
143 for r in resources:
144 rule_targets.setdefault(r['c7n:parent-id'], []).append(r['Id'])
145
146 for rule_id, target_ids in rule_targets.items():
147 client.remove_targets(
148 Ids=target_ids,
149 Rule=rule_id)
150
151
152 @resources.register('log-group')
153 class LogGroup(QueryResourceManager):
154
155 class resource_type(object):
156 service = 'logs'
157 type = 'log-group'
158 enum_spec = ('describe_log_groups', 'logGroups', None)
159 name = 'logGroupName'
160 id = 'arn'
161 filter_name = 'logGroupNamePrefix'
162 filter_type = 'scalar'
163 dimension = 'LogGroupName'
164 date = 'creationTime'
165
166 augment = universal_augment
167
168 def get_arns(self, resources):
169 # log group arn in resource describe has ':*' suffix, not all
170 # apis can use that form, so normalize to standard arn.
171 return [r['arn'][:-2] for r in resources]
172
173
174 register_universal_tags(LogGroup.filter_registry, LogGroup.action_registry)
175
176
177 @LogGroup.action_registry.register('retention')
178 class Retention(BaseAction):
179 """Action to set the retention period (in days) for CloudWatch log groups
180
181 :example:
182
183 .. code-block:: yaml
184
185 policies:
186 - name: cloudwatch-set-log-group-retention
187 resource: log-group
188 actions:
189 - type: retention
190 days: 200
191 """
192
193 schema = type_schema('retention', days={'type': 'integer'})
194 permissions = ('logs:PutRetentionPolicy',)
195
196 def process(self, resources):
197 client = local_session(self.manager.session_factory).client('logs')
198 days = self.data['days']
199 for r in resources:
200 client.put_retention_policy(
201 logGroupName=r['logGroupName'],
202 retentionInDays=days)
203
204
205 @LogGroup.action_registry.register('delete')
206 class Delete(BaseAction):
207 """
208
209 :example:
210
211 .. code-block:: yaml
212
213 policies:
214 - name: cloudwatch-delete-stale-log-group
215 resource: log-group
216 filters:
217 - type: last-write
218 days: 182.5
219 actions:
220 - delete
221 """
222
223 schema = type_schema('delete')
224 permissions = ('logs:DeleteLogGroup',)
225
226 def process(self, resources):
227 client = local_session(self.manager.session_factory).client('logs')
228 for r in resources:
229 client.delete_log_group(logGroupName=r['logGroupName'])
230
231
232 @LogGroup.filter_registry.register('last-write')
233 class LastWriteDays(Filter):
234 """Filters CloudWatch log groups by last write
235
236 :example:
237
238 .. code-block:: yaml
239
240 policies:
241 - name: cloudwatch-stale-groups
242 resource: log-group
243 filters:
244 - type: last-write
245 days: 60
246 """
247
248 schema = type_schema(
249 'last-write', days={'type': 'number'})
250 permissions = ('logs:DescribeLogStreams',)
251
252 def process(self, resources, event=None):
253 client = local_session(self.manager.session_factory).client('logs')
254 self.date_threshold = datetime.utcnow() - timedelta(
255 days=self.data['days'])
256 return [r for r in resources if self.check_group(client, r)]
257
258 def check_group(self, client, group):
259 streams = client.describe_log_streams(
260 logGroupName=group['logGroupName'],
261 orderBy='LastEventTime',
262 descending=True,
263 limit=3).get('logStreams')
264 group['streams'] = streams
265 if not streams:
266 last_timestamp = group['creationTime']
267 elif streams[0]['storedBytes'] == 0:
268 last_timestamp = streams[0]['creationTime']
269 else:
270 last_timestamp = streams[0]['lastIngestionTime']
271
272 last_write = datetime.fromtimestamp(last_timestamp / 1000.0)
273 group['lastWrite'] = last_write
274 return self.date_threshold > last_write
275
276
277 @LogGroup.filter_registry.register('cross-account')
278 class LogCrossAccountFilter(CrossAccountAccessFilter):
279
280 schema = type_schema(
281 'cross-account',
282 # white list accounts
283 whitelist_from=ValuesFrom.schema,
284 whitelist={'type': 'array', 'items': {'type': 'string'}})
285
286 permissions = ('logs:DescribeSubscriptionFilters',)
287
288 def process(self, resources, event=None):
289 client = local_session(self.manager.session_factory).client('logs')
290 accounts = self.get_accounts()
291 results = []
292 with self.executor_factory(max_workers=1) as w:
293 futures = []
294 for rset in chunks(resources, 50):
295 futures.append(
296 w.submit(
297 self.process_resource_set, client, accounts, rset))
298 for f in as_completed(futures):
299 if f.exception():
300 self.log.error(
301 "Error checking log groups cross-account %s",
302 f.exception())
303 continue
304 results.extend(f.result())
305 return results
306
307 def process_resource_set(self, client, accounts, resources):
308 results = []
309 for r in resources:
310 found = False
311 filters = self.manager.retry(
312 client.describe_subscription_filters,
313 logGroupName=r['logGroupName']).get('subscriptionFilters', ())
314 for f in filters:
315 if 'destinationArn' not in f:
316 continue
317 account_id = f['destinationArn'].split(':', 5)[4]
318 if account_id not in accounts:
319 r.setdefault('c7n:CrossAccountViolations', []).append(
320 account_id)
321 found = True
322 if found:
323 results.append(r)
324 return results
325
326
327 @LogGroup.action_registry.register('set-encryption')
328 class EncryptLogGroup(BaseAction):
329 """Encrypt/Decrypt a log group
330
331 :example:
332
333 .. code-block:: yaml
334
335 policies:
336 - name: encrypt-log-group
337 resource: log-group
338 filters:
339 - kmsKeyId: absent
340 actions:
341 - type: set-encryption
342 kms-key: alias/mylogkey
343 state: True
344
345 - name: decrypt-log-group
346 resource: log-group
347 filters:
348 - kmsKeyId: kms:key:arn
349 actions:
350 - type: set-encryption
351 state: False
352 """
353 schema = type_schema(
354 'set-encryption',
355 **{'kms-key': {'type': 'string'},
356 'state': {'type': 'boolean'}})
357 permissions = (
358 'logs:AssociateKmsKey', 'logs:DisassociateKmsKey', 'kms:DescribeKey')
359
360 def validate(self):
361 if not self.data.get('state', True):
362 return self
363 key = self.data.get('kms-key', '')
364 if not key:
365 raise ValueError('Must specify either a KMS key ARN or Alias')
366 if 'alias/' not in key and ':key/' not in key:
367 raise PolicyValidationError(
368 "Invalid kms key format %s" % key)
369 return self
370
371 def resolve_key(self, key):
372 if not key:
373 return
374
375 # Qualified arn for key
376 if key.startswith('arn:') and ':key/' in key:
377 return key
378
379 # Alias
380 key = local_session(
381 self.manager.session_factory).client(
382 'kms').describe_key(
383 KeyId=key)['KeyMetadata']['Arn']
384 return key
385
386 def process(self, resources):
387 session = local_session(self.manager.session_factory)
388 client = session.client('logs')
389
390 state = self.data.get('state', True)
391 key = self.resolve_key(self.data.get('kms-key'))
392
393 for r in resources:
394 try:
395 if state:
396 client.associate_kms_key(
397 logGroupName=r['logGroupName'], kmsKeyId=key)
398 else:
399 client.disassociate_kms_key(logGroupName=r['logGroupName'])
400 except client.exceptions.ResourceNotFoundException:
401 continue
402
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/c7n/resources/cw.py b/c7n/resources/cw.py
--- a/c7n/resources/cw.py
+++ b/c7n/resources/cw.py
@@ -163,7 +163,11 @@
dimension = 'LogGroupName'
date = 'creationTime'
- augment = universal_augment
+ def augment(self, resources):
+ resources = universal_augment(self, resources)
+ for r in resources:
+ r['creationTime'] = r['creationTime'] / 1000.0
+ return resources
def get_arns(self, resources):
# log group arn in resource describe has ':*' suffix, not all
| {"golden_diff": "diff --git a/c7n/resources/cw.py b/c7n/resources/cw.py\n--- a/c7n/resources/cw.py\n+++ b/c7n/resources/cw.py\n@@ -163,7 +163,11 @@\n dimension = 'LogGroupName'\n date = 'creationTime'\n \n- augment = universal_augment\n+ def augment(self, resources):\n+ resources = universal_augment(self, resources)\n+ for r in resources:\n+ r['creationTime'] = r['creationTime'] / 1000.0\n+ return resources\n \n def get_arns(self, resources):\n # log group arn in resource describe has ':*' suffix, not all\n", "issue": "Can't filter log-group using creationTime age\nI'm trying, but failing to filter log groups that were created more than 30 days ago using the following filter:\r\n\r\n```\r\npolicies:\r\n- name: OldLogGroups\r\n resource: log-group\r\n filters:\r\n - type: value\r\n key: creationTime\r\n op: gt\r\n value_type: age\r\n value: 30\r\n```\r\n\r\nAccording to [AWS docs](https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_LogGroup.html), the property `creationTime` stores the creation time of the log group, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.\r\n\r\nI understand 'age' value_type already supports standard unix epoch expressed in seconds, but not milliseconds (#2051).\r\n\r\nSupport for date/time values expressed in milliseconds or adding a new filter like `log-group-age` would allow to cleanup old log groups that have been created certain time ago and either have 0 stored bytes or have not had any writes for a while.\r\n\r\ncustodian version: 0.8.42.1\n", "before_files": [{"content": "# Copyright 2016-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom concurrent.futures import as_completed\nfrom datetime import datetime, timedelta\n\nfrom c7n.actions import BaseAction\nfrom c7n.exceptions import PolicyValidationError\nfrom c7n.filters import Filter, MetricsFilter\nfrom c7n.filters.iamaccess import CrossAccountAccessFilter\nfrom c7n.query import QueryResourceManager, ChildResourceManager\nfrom c7n.manager import resources\nfrom c7n.resolver import ValuesFrom\nfrom c7n.tags import universal_augment, register_universal_tags\nfrom c7n.utils import type_schema, local_session, chunks, get_retry\n\n\[email protected]('alarm')\nclass Alarm(QueryResourceManager):\n\n class resource_type(object):\n service = 'cloudwatch'\n type = 'alarm'\n enum_spec = ('describe_alarms', 'MetricAlarms', None)\n id = 'AlarmArn'\n filter_name = 'AlarmNames'\n filter_type = 'list'\n name = 'AlarmName'\n date = 'AlarmConfigurationUpdatedTimestamp'\n dimension = None\n config_type = 'AWS::CloudWatch::Alarm'\n\n retry = staticmethod(get_retry(('Throttled',)))\n\n\[email protected]_registry.register('delete')\nclass AlarmDelete(BaseAction):\n \"\"\"Delete a cloudwatch alarm.\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: cloudwatch-delete-stale-alarms\n resource: alarm\n filters:\n - type: value\n value_type: age\n key: StateUpdatedTimestamp\n value: 30\n op: ge\n - StateValue: INSUFFICIENT_DATA\n actions:\n - delete\n \"\"\"\n\n schema = type_schema('delete')\n permissions = ('cloudwatch:DeleteAlarms',)\n\n def process(self, resources):\n client = local_session(\n self.manager.session_factory).client('cloudwatch')\n\n for resource_set in chunks(resources, size=100):\n self.manager.retry(\n client.delete_alarms,\n AlarmNames=[r['AlarmName'] for r in resource_set])\n\n\[email protected]('event-rule')\nclass EventRule(QueryResourceManager):\n\n class resource_type(object):\n service = 'events'\n type = 'event-rule'\n enum_spec = ('list_rules', 'Rules', None)\n name = \"Name\"\n id = \"Name\"\n filter_name = \"NamePrefix\"\n filter_type = \"scalar\"\n dimension = None\n\n\[email protected]_registry.register('metrics')\nclass EventRuleMetrics(MetricsFilter):\n\n def get_dimensions(self, resource):\n return [{'Name': 'RuleName', 'Value': resource['Name']}]\n\n\[email protected]('event-rule-target')\nclass EventRuleTarget(ChildResourceManager):\n\n class resource_type(object):\n service = 'events'\n type = 'event-rule-target'\n enum_spec = ('list_targets_by_rule', 'Targets', None)\n parent_spec = ('event-rule', 'Rule', True)\n name = id = 'Id'\n dimension = None\n filter_type = filter_name = None\n\n\[email protected]_registry.register('cross-account')\nclass CrossAccountFilter(CrossAccountAccessFilter):\n\n schema = type_schema(\n 'cross-account',\n # white list accounts\n whitelist_from=ValuesFrom.schema,\n whitelist={'type': 'array', 'items': {'type': 'string'}})\n\n # dummy permission\n permissions = ('events:ListTargetsByRule',)\n\n def __call__(self, r):\n account_id = r['Arn'].split(':', 5)[4]\n return account_id not in self.accounts\n\n\[email protected]_registry.register('delete')\nclass DeleteTarget(BaseAction):\n\n schema = type_schema('delete')\n permissions = ('events:RemoveTargets',)\n\n def process(self, resources):\n client = local_session(self.manager.session_factory).client('events')\n rule_targets = {}\n for r in resources:\n rule_targets.setdefault(r['c7n:parent-id'], []).append(r['Id'])\n\n for rule_id, target_ids in rule_targets.items():\n client.remove_targets(\n Ids=target_ids,\n Rule=rule_id)\n\n\[email protected]('log-group')\nclass LogGroup(QueryResourceManager):\n\n class resource_type(object):\n service = 'logs'\n type = 'log-group'\n enum_spec = ('describe_log_groups', 'logGroups', None)\n name = 'logGroupName'\n id = 'arn'\n filter_name = 'logGroupNamePrefix'\n filter_type = 'scalar'\n dimension = 'LogGroupName'\n date = 'creationTime'\n\n augment = universal_augment\n\n def get_arns(self, resources):\n # log group arn in resource describe has ':*' suffix, not all\n # apis can use that form, so normalize to standard arn.\n return [r['arn'][:-2] for r in resources]\n\n\nregister_universal_tags(LogGroup.filter_registry, LogGroup.action_registry)\n\n\[email protected]_registry.register('retention')\nclass Retention(BaseAction):\n \"\"\"Action to set the retention period (in days) for CloudWatch log groups\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: cloudwatch-set-log-group-retention\n resource: log-group\n actions:\n - type: retention\n days: 200\n \"\"\"\n\n schema = type_schema('retention', days={'type': 'integer'})\n permissions = ('logs:PutRetentionPolicy',)\n\n def process(self, resources):\n client = local_session(self.manager.session_factory).client('logs')\n days = self.data['days']\n for r in resources:\n client.put_retention_policy(\n logGroupName=r['logGroupName'],\n retentionInDays=days)\n\n\[email protected]_registry.register('delete')\nclass Delete(BaseAction):\n \"\"\"\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: cloudwatch-delete-stale-log-group\n resource: log-group\n filters:\n - type: last-write\n days: 182.5\n actions:\n - delete\n \"\"\"\n\n schema = type_schema('delete')\n permissions = ('logs:DeleteLogGroup',)\n\n def process(self, resources):\n client = local_session(self.manager.session_factory).client('logs')\n for r in resources:\n client.delete_log_group(logGroupName=r['logGroupName'])\n\n\[email protected]_registry.register('last-write')\nclass LastWriteDays(Filter):\n \"\"\"Filters CloudWatch log groups by last write\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: cloudwatch-stale-groups\n resource: log-group\n filters:\n - type: last-write\n days: 60\n \"\"\"\n\n schema = type_schema(\n 'last-write', days={'type': 'number'})\n permissions = ('logs:DescribeLogStreams',)\n\n def process(self, resources, event=None):\n client = local_session(self.manager.session_factory).client('logs')\n self.date_threshold = datetime.utcnow() - timedelta(\n days=self.data['days'])\n return [r for r in resources if self.check_group(client, r)]\n\n def check_group(self, client, group):\n streams = client.describe_log_streams(\n logGroupName=group['logGroupName'],\n orderBy='LastEventTime',\n descending=True,\n limit=3).get('logStreams')\n group['streams'] = streams\n if not streams:\n last_timestamp = group['creationTime']\n elif streams[0]['storedBytes'] == 0:\n last_timestamp = streams[0]['creationTime']\n else:\n last_timestamp = streams[0]['lastIngestionTime']\n\n last_write = datetime.fromtimestamp(last_timestamp / 1000.0)\n group['lastWrite'] = last_write\n return self.date_threshold > last_write\n\n\[email protected]_registry.register('cross-account')\nclass LogCrossAccountFilter(CrossAccountAccessFilter):\n\n schema = type_schema(\n 'cross-account',\n # white list accounts\n whitelist_from=ValuesFrom.schema,\n whitelist={'type': 'array', 'items': {'type': 'string'}})\n\n permissions = ('logs:DescribeSubscriptionFilters',)\n\n def process(self, resources, event=None):\n client = local_session(self.manager.session_factory).client('logs')\n accounts = self.get_accounts()\n results = []\n with self.executor_factory(max_workers=1) as w:\n futures = []\n for rset in chunks(resources, 50):\n futures.append(\n w.submit(\n self.process_resource_set, client, accounts, rset))\n for f in as_completed(futures):\n if f.exception():\n self.log.error(\n \"Error checking log groups cross-account %s\",\n f.exception())\n continue\n results.extend(f.result())\n return results\n\n def process_resource_set(self, client, accounts, resources):\n results = []\n for r in resources:\n found = False\n filters = self.manager.retry(\n client.describe_subscription_filters,\n logGroupName=r['logGroupName']).get('subscriptionFilters', ())\n for f in filters:\n if 'destinationArn' not in f:\n continue\n account_id = f['destinationArn'].split(':', 5)[4]\n if account_id not in accounts:\n r.setdefault('c7n:CrossAccountViolations', []).append(\n account_id)\n found = True\n if found:\n results.append(r)\n return results\n\n\[email protected]_registry.register('set-encryption')\nclass EncryptLogGroup(BaseAction):\n \"\"\"Encrypt/Decrypt a log group\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: encrypt-log-group\n resource: log-group\n filters:\n - kmsKeyId: absent\n actions:\n - type: set-encryption\n kms-key: alias/mylogkey\n state: True\n\n - name: decrypt-log-group\n resource: log-group\n filters:\n - kmsKeyId: kms:key:arn\n actions:\n - type: set-encryption\n state: False\n \"\"\"\n schema = type_schema(\n 'set-encryption',\n **{'kms-key': {'type': 'string'},\n 'state': {'type': 'boolean'}})\n permissions = (\n 'logs:AssociateKmsKey', 'logs:DisassociateKmsKey', 'kms:DescribeKey')\n\n def validate(self):\n if not self.data.get('state', True):\n return self\n key = self.data.get('kms-key', '')\n if not key:\n raise ValueError('Must specify either a KMS key ARN or Alias')\n if 'alias/' not in key and ':key/' not in key:\n raise PolicyValidationError(\n \"Invalid kms key format %s\" % key)\n return self\n\n def resolve_key(self, key):\n if not key:\n return\n\n # Qualified arn for key\n if key.startswith('arn:') and ':key/' in key:\n return key\n\n # Alias\n key = local_session(\n self.manager.session_factory).client(\n 'kms').describe_key(\n KeyId=key)['KeyMetadata']['Arn']\n return key\n\n def process(self, resources):\n session = local_session(self.manager.session_factory)\n client = session.client('logs')\n\n state = self.data.get('state', True)\n key = self.resolve_key(self.data.get('kms-key'))\n\n for r in resources:\n try:\n if state:\n client.associate_kms_key(\n logGroupName=r['logGroupName'], kmsKeyId=key)\n else:\n client.disassociate_kms_key(logGroupName=r['logGroupName'])\n except client.exceptions.ResourceNotFoundException:\n continue\n", "path": "c7n/resources/cw.py"}], "after_files": [{"content": "# Copyright 2016-2017 Capital One Services, LLC\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\n\nfrom concurrent.futures import as_completed\nfrom datetime import datetime, timedelta\n\nfrom c7n.actions import BaseAction\nfrom c7n.exceptions import PolicyValidationError\nfrom c7n.filters import Filter, MetricsFilter\nfrom c7n.filters.iamaccess import CrossAccountAccessFilter\nfrom c7n.query import QueryResourceManager, ChildResourceManager\nfrom c7n.manager import resources\nfrom c7n.resolver import ValuesFrom\nfrom c7n.tags import universal_augment, register_universal_tags\nfrom c7n.utils import type_schema, local_session, chunks, get_retry\n\n\[email protected]('alarm')\nclass Alarm(QueryResourceManager):\n\n class resource_type(object):\n service = 'cloudwatch'\n type = 'alarm'\n enum_spec = ('describe_alarms', 'MetricAlarms', None)\n id = 'AlarmArn'\n filter_name = 'AlarmNames'\n filter_type = 'list'\n name = 'AlarmName'\n date = 'AlarmConfigurationUpdatedTimestamp'\n dimension = None\n config_type = 'AWS::CloudWatch::Alarm'\n\n retry = staticmethod(get_retry(('Throttled',)))\n\n\[email protected]_registry.register('delete')\nclass AlarmDelete(BaseAction):\n \"\"\"Delete a cloudwatch alarm.\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: cloudwatch-delete-stale-alarms\n resource: alarm\n filters:\n - type: value\n value_type: age\n key: StateUpdatedTimestamp\n value: 30\n op: ge\n - StateValue: INSUFFICIENT_DATA\n actions:\n - delete\n \"\"\"\n\n schema = type_schema('delete')\n permissions = ('cloudwatch:DeleteAlarms',)\n\n def process(self, resources):\n client = local_session(\n self.manager.session_factory).client('cloudwatch')\n\n for resource_set in chunks(resources, size=100):\n self.manager.retry(\n client.delete_alarms,\n AlarmNames=[r['AlarmName'] for r in resource_set])\n\n\[email protected]('event-rule')\nclass EventRule(QueryResourceManager):\n\n class resource_type(object):\n service = 'events'\n type = 'event-rule'\n enum_spec = ('list_rules', 'Rules', None)\n name = \"Name\"\n id = \"Name\"\n filter_name = \"NamePrefix\"\n filter_type = \"scalar\"\n dimension = None\n\n\[email protected]_registry.register('metrics')\nclass EventRuleMetrics(MetricsFilter):\n\n def get_dimensions(self, resource):\n return [{'Name': 'RuleName', 'Value': resource['Name']}]\n\n\[email protected]('event-rule-target')\nclass EventRuleTarget(ChildResourceManager):\n\n class resource_type(object):\n service = 'events'\n type = 'event-rule-target'\n enum_spec = ('list_targets_by_rule', 'Targets', None)\n parent_spec = ('event-rule', 'Rule', True)\n name = id = 'Id'\n dimension = None\n filter_type = filter_name = None\n\n\[email protected]_registry.register('cross-account')\nclass CrossAccountFilter(CrossAccountAccessFilter):\n\n schema = type_schema(\n 'cross-account',\n # white list accounts\n whitelist_from=ValuesFrom.schema,\n whitelist={'type': 'array', 'items': {'type': 'string'}})\n\n # dummy permission\n permissions = ('events:ListTargetsByRule',)\n\n def __call__(self, r):\n account_id = r['Arn'].split(':', 5)[4]\n return account_id not in self.accounts\n\n\[email protected]_registry.register('delete')\nclass DeleteTarget(BaseAction):\n\n schema = type_schema('delete')\n permissions = ('events:RemoveTargets',)\n\n def process(self, resources):\n client = local_session(self.manager.session_factory).client('events')\n rule_targets = {}\n for r in resources:\n rule_targets.setdefault(r['c7n:parent-id'], []).append(r['Id'])\n\n for rule_id, target_ids in rule_targets.items():\n client.remove_targets(\n Ids=target_ids,\n Rule=rule_id)\n\n\[email protected]('log-group')\nclass LogGroup(QueryResourceManager):\n\n class resource_type(object):\n service = 'logs'\n type = 'log-group'\n enum_spec = ('describe_log_groups', 'logGroups', None)\n name = 'logGroupName'\n id = 'arn'\n filter_name = 'logGroupNamePrefix'\n filter_type = 'scalar'\n dimension = 'LogGroupName'\n date = 'creationTime'\n\n def augment(self, resources):\n resources = universal_augment(self, resources)\n for r in resources:\n r['creationTime'] = r['creationTime'] / 1000.0\n return resources\n\n def get_arns(self, resources):\n # log group arn in resource describe has ':*' suffix, not all\n # apis can use that form, so normalize to standard arn.\n return [r['arn'][:-2] for r in resources]\n\n\nregister_universal_tags(LogGroup.filter_registry, LogGroup.action_registry)\n\n\[email protected]_registry.register('retention')\nclass Retention(BaseAction):\n \"\"\"Action to set the retention period (in days) for CloudWatch log groups\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: cloudwatch-set-log-group-retention\n resource: log-group\n actions:\n - type: retention\n days: 200\n \"\"\"\n\n schema = type_schema('retention', days={'type': 'integer'})\n permissions = ('logs:PutRetentionPolicy',)\n\n def process(self, resources):\n client = local_session(self.manager.session_factory).client('logs')\n days = self.data['days']\n for r in resources:\n client.put_retention_policy(\n logGroupName=r['logGroupName'],\n retentionInDays=days)\n\n\[email protected]_registry.register('delete')\nclass Delete(BaseAction):\n \"\"\"\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: cloudwatch-delete-stale-log-group\n resource: log-group\n filters:\n - type: last-write\n days: 182.5\n actions:\n - delete\n \"\"\"\n\n schema = type_schema('delete')\n permissions = ('logs:DeleteLogGroup',)\n\n def process(self, resources):\n client = local_session(self.manager.session_factory).client('logs')\n for r in resources:\n client.delete_log_group(logGroupName=r['logGroupName'])\n\n\[email protected]_registry.register('last-write')\nclass LastWriteDays(Filter):\n \"\"\"Filters CloudWatch log groups by last write\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: cloudwatch-stale-groups\n resource: log-group\n filters:\n - type: last-write\n days: 60\n \"\"\"\n\n schema = type_schema(\n 'last-write', days={'type': 'number'})\n permissions = ('logs:DescribeLogStreams',)\n\n def process(self, resources, event=None):\n client = local_session(self.manager.session_factory).client('logs')\n self.date_threshold = datetime.utcnow() - timedelta(\n days=self.data['days'])\n return [r for r in resources if self.check_group(client, r)]\n\n def check_group(self, client, group):\n streams = client.describe_log_streams(\n logGroupName=group['logGroupName'],\n orderBy='LastEventTime',\n descending=True,\n limit=3).get('logStreams')\n group['streams'] = streams\n if not streams:\n last_timestamp = group['creationTime']\n elif streams[0]['storedBytes'] == 0:\n last_timestamp = streams[0]['creationTime']\n else:\n last_timestamp = streams[0]['lastIngestionTime']\n\n last_write = datetime.fromtimestamp(last_timestamp / 1000.0)\n group['lastWrite'] = last_write\n return self.date_threshold > last_write\n\n\[email protected]_registry.register('cross-account')\nclass LogCrossAccountFilter(CrossAccountAccessFilter):\n\n schema = type_schema(\n 'cross-account',\n # white list accounts\n whitelist_from=ValuesFrom.schema,\n whitelist={'type': 'array', 'items': {'type': 'string'}})\n\n permissions = ('logs:DescribeSubscriptionFilters',)\n\n def process(self, resources, event=None):\n client = local_session(self.manager.session_factory).client('logs')\n accounts = self.get_accounts()\n results = []\n with self.executor_factory(max_workers=1) as w:\n futures = []\n for rset in chunks(resources, 50):\n futures.append(\n w.submit(\n self.process_resource_set, client, accounts, rset))\n for f in as_completed(futures):\n if f.exception():\n self.log.error(\n \"Error checking log groups cross-account %s\",\n f.exception())\n continue\n results.extend(f.result())\n return results\n\n def process_resource_set(self, client, accounts, resources):\n results = []\n for r in resources:\n found = False\n filters = self.manager.retry(\n client.describe_subscription_filters,\n logGroupName=r['logGroupName']).get('subscriptionFilters', ())\n for f in filters:\n if 'destinationArn' not in f:\n continue\n account_id = f['destinationArn'].split(':', 5)[4]\n if account_id not in accounts:\n r.setdefault('c7n:CrossAccountViolations', []).append(\n account_id)\n found = True\n if found:\n results.append(r)\n return results\n\n\[email protected]_registry.register('set-encryption')\nclass EncryptLogGroup(BaseAction):\n \"\"\"Encrypt/Decrypt a log group\n\n :example:\n\n .. code-block:: yaml\n\n policies:\n - name: encrypt-log-group\n resource: log-group\n filters:\n - kmsKeyId: absent\n actions:\n - type: set-encryption\n kms-key: alias/mylogkey\n state: True\n\n - name: decrypt-log-group\n resource: log-group\n filters:\n - kmsKeyId: kms:key:arn\n actions:\n - type: set-encryption\n state: False\n \"\"\"\n schema = type_schema(\n 'set-encryption',\n **{'kms-key': {'type': 'string'},\n 'state': {'type': 'boolean'}})\n permissions = (\n 'logs:AssociateKmsKey', 'logs:DisassociateKmsKey', 'kms:DescribeKey')\n\n def validate(self):\n if not self.data.get('state', True):\n return self\n key = self.data.get('kms-key', '')\n if not key:\n raise ValueError('Must specify either a KMS key ARN or Alias')\n if 'alias/' not in key and ':key/' not in key:\n raise PolicyValidationError(\n \"Invalid kms key format %s\" % key)\n return self\n\n def resolve_key(self, key):\n if not key:\n return\n\n # Qualified arn for key\n if key.startswith('arn:') and ':key/' in key:\n return key\n\n # Alias\n key = local_session(\n self.manager.session_factory).client(\n 'kms').describe_key(\n KeyId=key)['KeyMetadata']['Arn']\n return key\n\n def process(self, resources):\n session = local_session(self.manager.session_factory)\n client = session.client('logs')\n\n state = self.data.get('state', True)\n key = self.resolve_key(self.data.get('kms-key'))\n\n for r in resources:\n try:\n if state:\n client.associate_kms_key(\n logGroupName=r['logGroupName'], kmsKeyId=key)\n else:\n client.disassociate_kms_key(logGroupName=r['logGroupName'])\n except client.exceptions.ResourceNotFoundException:\n continue\n", "path": "c7n/resources/cw.py"}]} |
gh_patches_debug_1609 | rasdani/github-patches | git_diff | gratipay__gratipay.com-302 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Show how many total fundees and the donation amount they're in for.
As a reciever of money I'd like to know how diversified my $13/wk is so that I know how volatile my income is likely to be.
How I see this working is a Histogram similar to:
$1 [---] 1
$3 []
$6 []
$12 [---] 1
$24 []
which shows that I have 2 funders, one in for $1 and one in for $12.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `gittip/__init__.py`
Content:
```
1 import datetime
2 import locale
3 import os
4 from decimal import Decimal
5
6
7 try: # XXX This can't be right.
8 locale.setlocale(locale.LC_ALL, "en_US.utf8")
9 except locale.Error:
10 locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
11
12
13 BIRTHDAY = datetime.date(2012, 6, 1)
14 CARDINALS = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine']
15 MONTHS = [None, 'January', 'February', 'March', 'April', 'May', 'June', 'July',
16 'August', 'September', 'October', 'November', 'December']
17
18 def age():
19 today = datetime.date.today()
20 nmonths = today.month - BIRTHDAY.month
21 plural = 's' if nmonths != 1 else ''
22 if nmonths < 10:
23 nmonths = CARDINALS[nmonths]
24 else:
25 nmonths = str(nmonths)
26 return "%s month%s" % (nmonths, plural)
27
28
29 db = None # This global is wired in wireup. It's an instance of
30 # gittip.postgres.PostgresManager.
31
32 # Not sure we won't want this for something yet. Prune if you don't find it in
33 # the codebase in a month.
34 OLD_OLD_AMOUNTS= [Decimal(a) for a in ('0.00', '0.08', '0.16', '0.32', '0.64', '1.28')]
35 OLD_AMOUNTS= [Decimal(a) for a in ('0.25',)]
36
37 AMOUNTS= [Decimal(a) for a in ('0.00', '1.00', '3.00', '6.00', '12.00', '24.00')]
38
39
40 __version__ = "~~VERSION~~"
41
42
43 def get_tip(tipper, tippee):
44 """Given two user ids, return a Decimal.
45 """
46 TIP = """\
47
48 SELECT amount
49 FROM tips
50 WHERE tipper=%s
51 AND tippee=%s
52 ORDER BY mtime DESC
53 LIMIT 1
54
55 """
56 rec = db.fetchone(TIP, (tipper, tippee))
57 if rec is None:
58 tip = Decimal(0.00)
59 else:
60 tip = rec['amount']
61 return tip
62
63
64 def get_backed_amount(participant_id):
65 """Given a unicode, return a Decimal.
66 """
67
68 BACKED = """\
69
70 SELECT sum(amount) AS backed
71 FROM ( SELECT DISTINCT ON (tipper)
72 amount
73 , tipper
74 FROM tips
75 JOIN participants p ON p.id = tipper
76 WHERE tippee=%s
77 AND last_bill_result = ''
78 ORDER BY tipper
79 , mtime DESC
80 ) AS foo
81
82 """
83 rec = db.fetchone(BACKED, (participant_id,))
84 if rec is None:
85 amount = None
86 else:
87 amount = rec['backed'] # might be None
88
89 if amount is None:
90 amount = Decimal(0.00)
91
92 return amount
93
94
95 def get_number_of_backers(participant_id):
96 """Given a unicode, return an int.
97 """
98
99 BACKED = """\
100
101 SELECT count(amount) AS nbackers
102 FROM ( SELECT DISTINCT ON (tipper)
103 amount
104 , tipper
105 FROM tips
106 JOIN participants p ON p.id = tipper
107 WHERE tippee=%s
108 AND last_bill_result = ''
109 ORDER BY tipper
110 , mtime DESC
111 ) AS foo
112 WHERE amount > 0
113
114 """
115 rec = db.fetchone(BACKED, (participant_id,))
116 if rec is None:
117 nbackers = None
118 else:
119 nbackers = rec['nbackers'] # might be None
120
121 if nbackers is None:
122 nbackers = 0
123
124 return nbackers
125
126
127 def get_tips_and_total(tipper, for_payday=False, db=None):
128 """Given a participant id and a date, return a list and a Decimal.
129
130 This function is used to populate a participant's page for their own
131 viewing pleasure, and also by the payday function. If for_payday is not
132 False it must be a date object.
133
134 A half-injected dependency, that's what db is.
135
136 """
137 if db is None:
138 from gittip import db
139
140 if for_payday:
141
142 # For payday we want the oldest relationship to be paid first.
143 order_by = "ctime ASC"
144
145
146 # This is where it gets crash-proof.
147 # ==================================
148 # We need to account for the fact that we may have crashed during
149 # Payday and we're re-running that function. We only want to select
150 # tips that existed before Payday started, but haven't been processed
151 # as part of this Payday yet.
152 #
153 # It's a bug if the paydays subselect returns > 1 rows.
154 #
155 # XXX If we crash during Payday and we rerun it after a timezone
156 # change, will we get burned? How?
157
158 ts_filter = """\
159
160 AND mtime < %s
161 AND ( SELECT id
162 FROM transfers
163 WHERE tipper=t.tipper
164 AND tippee=t.tippee
165 AND timestamp >= %s
166 ) IS NULL
167
168 """
169 args = (tipper, for_payday, for_payday)
170 else:
171 order_by = "amount DESC"
172 ts_filter = ""
173 args = (tipper,)
174
175 TIPS = """\
176
177 SELECT * FROM (
178 SELECT DISTINCT ON (tippee)
179 amount
180 , tippee
181 , t.ctime
182 , p.claimed_time
183 FROM tips t
184 JOIN participants p ON p.id = t.tippee
185 WHERE tipper = %%s
186 %s
187 ORDER BY tippee
188 , t.mtime DESC
189 ) AS foo
190 ORDER BY %s
191 , tippee
192
193 """ % (ts_filter, order_by) # XXX, No injections here, right?!
194 tips = list(db.fetchall(TIPS, args))
195
196
197 # Compute the total.
198 # ==================
199 # For payday we only want to process payments to tippees who have
200 # themselves opted into Gittip. For the tipper's profile page we want to
201 # show the total amount they've pledged (so they're not surprised when
202 # someone *does* start accepting tips and all of a sudden they're hit with
203 # bigger charges.
204
205 if for_payday:
206 to_total = [t for t in tips if t['claimed_time'] is not None]
207 else:
208 to_total = tips
209 total = sum([t['amount'] for t in to_total])
210
211 if not total:
212 # If to_total is an empty list then total is int 0. We want a Decimal.
213 total = Decimal('0.00')
214
215 return tips, total
216
217
218 # canonizer
219 # =========
220 # This is an Aspen hook to ensure that requests are served on a certain root
221 # URL, even if multiple domains point to the application.
222
223 class X: pass
224 canonical_scheme = None
225 canonical_host = None
226
227 def canonize(request):
228 """Enforce a certain scheme and hostname. Store these on request as well.
229 """
230 scheme = request.headers.get('X-Forwarded-Proto', 'http') # per Heroku
231 host = request.headers['Host']
232 bad_scheme = scheme != canonical_scheme
233 bad_host = bool(canonical_host) and (host != canonical_host)
234 # '' and False => ''
235 if bad_scheme or bad_host:
236 url = '%s://%s' % (canonical_scheme, canonical_host)
237 if request.line.method in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):
238 # Redirect to a particular path for idempotent methods.
239 url += request.line.uri.path.raw
240 if request.line.uri.querystring:
241 url += '?' + request.line.uri.querystring.raw
242 else:
243 # For non-idempotent methods, redirect to homepage.
244 url += '/'
245 request.redirect(url, permanent=True)
246
247
248 def configure_payments(request):
249 # Work-around for https://github.com/balanced/balanced-python/issues/5
250 import balanced
251 balanced.configure(os.environ['BALANCED_API_SECRET'])
252
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/gittip/__init__.py b/gittip/__init__.py
--- a/gittip/__init__.py
+++ b/gittip/__init__.py
@@ -215,6 +215,17 @@
return tips, total
+def get_histogram_of_giving(user):
+ SQL = """
+ SELECT amount, count(amount) num_contributing FROM tips t WHERE
+ tippee=%s GROUP BY (amount)
+ """
+ results = dict()
+ for amount_dict in db.fetchall(SQL, (user,)):
+ results[amount_dict['amount']] = amount_dict['num_contributing']
+ return results
+
+
# canonizer
# =========
# This is an Aspen hook to ensure that requests are served on a certain root
| {"golden_diff": "diff --git a/gittip/__init__.py b/gittip/__init__.py\n--- a/gittip/__init__.py\n+++ b/gittip/__init__.py\n@@ -215,6 +215,17 @@\n return tips, total\n \n \n+def get_histogram_of_giving(user):\n+ SQL = \"\"\"\n+ SELECT amount, count(amount) num_contributing FROM tips t WHERE\n+ tippee=%s GROUP BY (amount)\n+ \"\"\"\n+ results = dict()\n+ for amount_dict in db.fetchall(SQL, (user,)):\n+ results[amount_dict['amount']] = amount_dict['num_contributing']\n+ return results\n+\n+\n # canonizer\n # =========\n # This is an Aspen hook to ensure that requests are served on a certain root\n", "issue": "Show how many total fundees and the donation amount they're in for.\nAs a reciever of money I'd like to know how diversified my $13/wk is so that I know how volatile my income is likely to be.\n\nHow I see this working is a Histogram similar to:\n\n $1 [---] 1\n $3 []\n $6 []\n$12 [---] 1\n$24 []\n\nwhich shows that I have 2 funders, one in for $1 and one in for $12.\n\n", "before_files": [{"content": "import datetime\nimport locale\nimport os\nfrom decimal import Decimal\n\n\ntry: # XXX This can't be right.\n locale.setlocale(locale.LC_ALL, \"en_US.utf8\")\nexcept locale.Error:\n locale.setlocale(locale.LC_ALL, \"en_US.UTF-8\")\n\n\nBIRTHDAY = datetime.date(2012, 6, 1)\nCARDINALS = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine']\nMONTHS = [None, 'January', 'February', 'March', 'April', 'May', 'June', 'July',\n 'August', 'September', 'October', 'November', 'December']\n\ndef age():\n today = datetime.date.today()\n nmonths = today.month - BIRTHDAY.month\n plural = 's' if nmonths != 1 else ''\n if nmonths < 10:\n nmonths = CARDINALS[nmonths]\n else:\n nmonths = str(nmonths)\n return \"%s month%s\" % (nmonths, plural)\n\n\ndb = None # This global is wired in wireup. It's an instance of\n # gittip.postgres.PostgresManager.\n\n# Not sure we won't want this for something yet. Prune if you don't find it in\n# the codebase in a month.\nOLD_OLD_AMOUNTS= [Decimal(a) for a in ('0.00', '0.08', '0.16', '0.32', '0.64', '1.28')]\nOLD_AMOUNTS= [Decimal(a) for a in ('0.25',)]\n\nAMOUNTS= [Decimal(a) for a in ('0.00', '1.00', '3.00', '6.00', '12.00', '24.00')]\n\n\n__version__ = \"~~VERSION~~\"\n\n\ndef get_tip(tipper, tippee):\n \"\"\"Given two user ids, return a Decimal.\n \"\"\"\n TIP = \"\"\"\\\n\n SELECT amount\n FROM tips\n WHERE tipper=%s\n AND tippee=%s\n ORDER BY mtime DESC\n LIMIT 1\n\n \"\"\"\n rec = db.fetchone(TIP, (tipper, tippee))\n if rec is None:\n tip = Decimal(0.00)\n else:\n tip = rec['amount']\n return tip\n\n\ndef get_backed_amount(participant_id):\n \"\"\"Given a unicode, return a Decimal.\n \"\"\"\n\n BACKED = \"\"\"\\\n\n SELECT sum(amount) AS backed\n FROM ( SELECT DISTINCT ON (tipper)\n amount\n , tipper\n FROM tips\n JOIN participants p ON p.id = tipper\n WHERE tippee=%s\n AND last_bill_result = ''\n ORDER BY tipper\n , mtime DESC\n ) AS foo\n\n \"\"\"\n rec = db.fetchone(BACKED, (participant_id,))\n if rec is None:\n amount = None\n else:\n amount = rec['backed'] # might be None\n\n if amount is None:\n amount = Decimal(0.00)\n\n return amount\n\n\ndef get_number_of_backers(participant_id):\n \"\"\"Given a unicode, return an int.\n \"\"\"\n\n BACKED = \"\"\"\\\n\n SELECT count(amount) AS nbackers\n FROM ( SELECT DISTINCT ON (tipper)\n amount\n , tipper\n FROM tips\n JOIN participants p ON p.id = tipper\n WHERE tippee=%s\n AND last_bill_result = ''\n ORDER BY tipper\n , mtime DESC\n ) AS foo\n WHERE amount > 0\n\n \"\"\"\n rec = db.fetchone(BACKED, (participant_id,))\n if rec is None:\n nbackers = None\n else:\n nbackers = rec['nbackers'] # might be None\n\n if nbackers is None:\n nbackers = 0\n\n return nbackers\n\n\ndef get_tips_and_total(tipper, for_payday=False, db=None):\n \"\"\"Given a participant id and a date, return a list and a Decimal.\n\n This function is used to populate a participant's page for their own\n viewing pleasure, and also by the payday function. If for_payday is not\n False it must be a date object.\n\n A half-injected dependency, that's what db is.\n\n \"\"\"\n if db is None:\n from gittip import db\n\n if for_payday:\n\n # For payday we want the oldest relationship to be paid first.\n order_by = \"ctime ASC\"\n\n\n # This is where it gets crash-proof.\n # ==================================\n # We need to account for the fact that we may have crashed during\n # Payday and we're re-running that function. We only want to select\n # tips that existed before Payday started, but haven't been processed\n # as part of this Payday yet.\n #\n # It's a bug if the paydays subselect returns > 1 rows.\n #\n # XXX If we crash during Payday and we rerun it after a timezone\n # change, will we get burned? How?\n\n ts_filter = \"\"\"\\\n\n AND mtime < %s\n AND ( SELECT id\n FROM transfers\n WHERE tipper=t.tipper\n AND tippee=t.tippee\n AND timestamp >= %s\n ) IS NULL\n\n \"\"\"\n args = (tipper, for_payday, for_payday)\n else:\n order_by = \"amount DESC\"\n ts_filter = \"\"\n args = (tipper,)\n\n TIPS = \"\"\"\\\n\n SELECT * FROM (\n SELECT DISTINCT ON (tippee)\n amount\n , tippee\n , t.ctime\n , p.claimed_time\n FROM tips t\n JOIN participants p ON p.id = t.tippee\n WHERE tipper = %%s\n %s\n ORDER BY tippee\n , t.mtime DESC\n ) AS foo\n ORDER BY %s\n , tippee\n\n \"\"\" % (ts_filter, order_by) # XXX, No injections here, right?!\n tips = list(db.fetchall(TIPS, args))\n\n\n # Compute the total.\n # ==================\n # For payday we only want to process payments to tippees who have\n # themselves opted into Gittip. For the tipper's profile page we want to\n # show the total amount they've pledged (so they're not surprised when\n # someone *does* start accepting tips and all of a sudden they're hit with\n # bigger charges.\n\n if for_payday:\n to_total = [t for t in tips if t['claimed_time'] is not None]\n else:\n to_total = tips\n total = sum([t['amount'] for t in to_total])\n\n if not total:\n # If to_total is an empty list then total is int 0. We want a Decimal.\n total = Decimal('0.00')\n\n return tips, total\n\n\n# canonizer\n# =========\n# This is an Aspen hook to ensure that requests are served on a certain root\n# URL, even if multiple domains point to the application.\n\nclass X: pass\ncanonical_scheme = None\ncanonical_host = None\n\ndef canonize(request):\n \"\"\"Enforce a certain scheme and hostname. Store these on request as well.\n \"\"\"\n scheme = request.headers.get('X-Forwarded-Proto', 'http') # per Heroku\n host = request.headers['Host']\n bad_scheme = scheme != canonical_scheme\n bad_host = bool(canonical_host) and (host != canonical_host)\n # '' and False => ''\n if bad_scheme or bad_host:\n url = '%s://%s' % (canonical_scheme, canonical_host)\n if request.line.method in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):\n # Redirect to a particular path for idempotent methods.\n url += request.line.uri.path.raw\n if request.line.uri.querystring:\n url += '?' + request.line.uri.querystring.raw\n else:\n # For non-idempotent methods, redirect to homepage.\n url += '/'\n request.redirect(url, permanent=True)\n\n\ndef configure_payments(request):\n # Work-around for https://github.com/balanced/balanced-python/issues/5\n import balanced\n balanced.configure(os.environ['BALANCED_API_SECRET'])\n", "path": "gittip/__init__.py"}], "after_files": [{"content": "import datetime\nimport locale\nimport os\nfrom decimal import Decimal\n\n\ntry: # XXX This can't be right.\n locale.setlocale(locale.LC_ALL, \"en_US.utf8\")\nexcept locale.Error:\n locale.setlocale(locale.LC_ALL, \"en_US.UTF-8\")\n\n\nBIRTHDAY = datetime.date(2012, 6, 1)\nCARDINALS = ['zero', 'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine']\nMONTHS = [None, 'January', 'February', 'March', 'April', 'May', 'June', 'July',\n 'August', 'September', 'October', 'November', 'December']\n\ndef age():\n today = datetime.date.today()\n nmonths = today.month - BIRTHDAY.month\n plural = 's' if nmonths != 1 else ''\n if nmonths < 10:\n nmonths = CARDINALS[nmonths]\n else:\n nmonths = str(nmonths)\n return \"%s month%s\" % (nmonths, plural)\n\n\ndb = None # This global is wired in wireup. It's an instance of\n # gittip.postgres.PostgresManager.\n\n# Not sure we won't want this for something yet. Prune if you don't find it in\n# the codebase in a month.\nOLD_OLD_AMOUNTS= [Decimal(a) for a in ('0.00', '0.08', '0.16', '0.32', '0.64', '1.28')]\nOLD_AMOUNTS= [Decimal(a) for a in ('0.25',)]\n\nAMOUNTS= [Decimal(a) for a in ('0.00', '1.00', '3.00', '6.00', '12.00', '24.00')]\n\n\n__version__ = \"~~VERSION~~\"\n\n\ndef get_tip(tipper, tippee):\n \"\"\"Given two user ids, return a Decimal.\n \"\"\"\n TIP = \"\"\"\\\n\n SELECT amount\n FROM tips\n WHERE tipper=%s\n AND tippee=%s\n ORDER BY mtime DESC\n LIMIT 1\n\n \"\"\"\n rec = db.fetchone(TIP, (tipper, tippee))\n if rec is None:\n tip = Decimal(0.00)\n else:\n tip = rec['amount']\n return tip\n\n\ndef get_backed_amount(participant_id):\n \"\"\"Given a unicode, return a Decimal.\n \"\"\"\n\n BACKED = \"\"\"\\\n\n SELECT sum(amount) AS backed\n FROM ( SELECT DISTINCT ON (tipper)\n amount\n , tipper\n FROM tips\n JOIN participants p ON p.id = tipper\n WHERE tippee=%s\n AND last_bill_result = ''\n ORDER BY tipper\n , mtime DESC\n ) AS foo\n\n \"\"\"\n rec = db.fetchone(BACKED, (participant_id,))\n if rec is None:\n amount = None\n else:\n amount = rec['backed'] # might be None\n\n if amount is None:\n amount = Decimal(0.00)\n\n return amount\n\n\ndef get_number_of_backers(participant_id):\n \"\"\"Given a unicode, return an int.\n \"\"\"\n\n BACKED = \"\"\"\\\n\n SELECT count(amount) AS nbackers\n FROM ( SELECT DISTINCT ON (tipper)\n amount\n , tipper\n FROM tips\n JOIN participants p ON p.id = tipper\n WHERE tippee=%s\n AND last_bill_result = ''\n ORDER BY tipper\n , mtime DESC\n ) AS foo\n WHERE amount > 0\n\n \"\"\"\n rec = db.fetchone(BACKED, (participant_id,))\n if rec is None:\n nbackers = None\n else:\n nbackers = rec['nbackers'] # might be None\n\n if nbackers is None:\n nbackers = 0\n\n return nbackers\n\n\ndef get_tips_and_total(tipper, for_payday=False, db=None):\n \"\"\"Given a participant id and a date, return a list and a Decimal.\n\n This function is used to populate a participant's page for their own\n viewing pleasure, and also by the payday function. If for_payday is not\n False it must be a date object.\n\n A half-injected dependency, that's what db is.\n\n \"\"\"\n if db is None:\n from gittip import db\n\n if for_payday:\n\n # For payday we want the oldest relationship to be paid first.\n order_by = \"ctime ASC\"\n\n\n # This is where it gets crash-proof.\n # ==================================\n # We need to account for the fact that we may have crashed during\n # Payday and we're re-running that function. We only want to select\n # tips that existed before Payday started, but haven't been processed\n # as part of this Payday yet.\n #\n # It's a bug if the paydays subselect returns > 1 rows.\n #\n # XXX If we crash during Payday and we rerun it after a timezone\n # change, will we get burned? How?\n\n ts_filter = \"\"\"\\\n\n AND mtime < %s\n AND ( SELECT id\n FROM transfers\n WHERE tipper=t.tipper\n AND tippee=t.tippee\n AND timestamp >= %s\n ) IS NULL\n\n \"\"\"\n args = (tipper, for_payday, for_payday)\n else:\n order_by = \"amount DESC\"\n ts_filter = \"\"\n args = (tipper,)\n\n TIPS = \"\"\"\\\n\n SELECT * FROM (\n SELECT DISTINCT ON (tippee)\n amount\n , tippee\n , t.ctime\n , p.claimed_time\n FROM tips t\n JOIN participants p ON p.id = t.tippee\n WHERE tipper = %%s\n %s\n ORDER BY tippee\n , t.mtime DESC\n ) AS foo\n ORDER BY %s\n , tippee\n\n \"\"\" % (ts_filter, order_by) # XXX, No injections here, right?!\n tips = list(db.fetchall(TIPS, args))\n\n\n # Compute the total.\n # ==================\n # For payday we only want to process payments to tippees who have\n # themselves opted into Gittip. For the tipper's profile page we want to\n # show the total amount they've pledged (so they're not surprised when\n # someone *does* start accepting tips and all of a sudden they're hit with\n # bigger charges.\n\n if for_payday:\n to_total = [t for t in tips if t['claimed_time'] is not None]\n else:\n to_total = tips\n total = sum([t['amount'] for t in to_total])\n\n if not total:\n # If to_total is an empty list then total is int 0. We want a Decimal.\n total = Decimal('0.00')\n\n return tips, total\n\n\ndef get_histogram_of_giving(user):\n SQL = \"\"\"\n SELECT amount, count(amount) num_contributing FROM tips t WHERE\n tippee=%s GROUP BY (amount)\n \"\"\"\n results = dict()\n for amount_dict in db.fetchall(SQL, (user,)):\n results[amount_dict['amount']] = amount_dict['num_contributing']\n return results\n\n\n# canonizer\n# =========\n# This is an Aspen hook to ensure that requests are served on a certain root\n# URL, even if multiple domains point to the application.\n\nclass X: pass\ncanonical_scheme = None\ncanonical_host = None\n\ndef canonize(request):\n \"\"\"Enforce a certain scheme and hostname. Store these on request as well.\n \"\"\"\n scheme = request.headers.get('X-Forwarded-Proto', 'http') # per Heroku\n host = request.headers['Host']\n bad_scheme = scheme != canonical_scheme\n bad_host = bool(canonical_host) and (host != canonical_host)\n # '' and False => ''\n if bad_scheme or bad_host:\n url = '%s://%s' % (canonical_scheme, canonical_host)\n if request.line.method in ('GET', 'HEAD', 'OPTIONS', 'TRACE'):\n # Redirect to a particular path for idempotent methods.\n url += request.line.uri.path.raw\n if request.line.uri.querystring:\n url += '?' + request.line.uri.querystring.raw\n else:\n # For non-idempotent methods, redirect to homepage.\n url += '/'\n request.redirect(url, permanent=True)\n\n\ndef configure_payments(request):\n # Work-around for https://github.com/balanced/balanced-python/issues/5\n import balanced\n balanced.configure(os.environ['BALANCED_API_SECRET'])\n", "path": "gittip/__init__.py"}]} |
gh_patches_debug_1610 | rasdani/github-patches | git_diff | chainer__chainer-3129 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ChainList doesn't warn self.init_scope()
The following code has a bug. Chainer doesn't warn the usage of `with self.init_scope()` in the subclass of ChainList. Could you add a warning message for such mistakes?
* Code to reproduce
```python
import chainer
from chainer import Chain, ChainList
import chainer.functions as F
import chainer.links as L
import numpy as np
class C(ChainList): # should be 'class C(Chain)'
def __init__(self):
super().__init__()
with self.init_scope():
self.l1 = L.Linear(5, 5)
self.l2 = L.Linear(5, 5)
def __call__(self, x):
return self.l2(F.relu(self.l1(x)))
c = C()
print(c.l1.W.data)
opt = chainer.optimizers.SGD()
opt.setup(c)
x = np.arange(10).reshape((2, 5)).astype(np.float32)
loss = F.sum(c(x))
opt.update(lambda: loss)
print(c.l1.W.data)
```
output:
```
[[ 0.22224635 0.13709065 -0.0590423 0.31734523 0.76646286]
[-0.09569775 -0.00810872 0.72896075 -0.50144166 -0.23909038]
[ 0.24655567 -0.59849507 0.05945947 -0.06633393 -0.05738653]
[-0.85418522 0.56111503 -0.4280332 -0.19436245 -0.09941436]
[-0.06522682 -0.43800679 0.7132498 0.49363273 -0.2827867 ]]
[[ 0.22224635 0.13709065 -0.0590423 0.31734523 0.76646286]
[-0.09569775 -0.00810872 0.72896075 -0.50144166 -0.23909038]
[ 0.24655567 -0.59849507 0.05945947 -0.06633393 -0.05738653]
[-0.85418522 0.56111503 -0.4280332 -0.19436245 -0.09941436]
[-0.06522682 -0.43800679 0.7132498 0.49363273 -0.2827867 ]]
```
No update is taken because the link l1 is not registered.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `chainer/link.py`
Content:
```
1 import collections
2 import contextlib
3 import copy
4 import warnings
5
6 import numpy
7 import six
8
9 from chainer import cuda
10 from chainer import initializers
11 from chainer import variable
12
13
14 def _is_shape(value):
15 if value is None:
16 return True
17 elif isinstance(value, collections.Sequence):
18 try:
19 return all(int(x) for x in value)
20 except TypeError:
21 return False
22 try:
23 return int(value)
24 except TypeError:
25 return False
26
27
28 def _ensure_shape_dtype(value):
29 # Return value paired with dtype FP32 if it is a shape.
30 if _is_shape(value):
31 return value, 'f'
32 # Otherwise, returns it with assuming a shape-dtype pair.
33 else:
34 return value
35
36
37 class Link(object):
38
39 """Building block of model definitions.
40
41 Link is a building block of neural network models that support various
42 features like handling parameters, defining network fragments,
43 serialization, etc.
44
45 Link is the primitive structure for the model definitions. It supports
46 management of parameter variables and *persistent values* that should be
47 incorporated to serialization.
48
49 Parameter is an instance of :class:`~chainer.Parameter` registered to a
50 link. A :class:`~chainer.Parameter` object can be registered as a
51 parameter of the link by assigning it to an attribute within *an
52 initialization scope*, which is a code surrounded by a
53 :meth:`init_scope` context manager using the ``with`` statement.
54
55 Persistent values are arrays, scalars, or any other serializable values
56 registered via :meth:`register_persistent` or :meth:`add_persistent`.
57
58 .. note::
59 Whereas arbitrary serializable objects can be registered as persistent
60 values, it is strongly recommended to just register values that should
61 be treated as results of learning. A typical example of persistent
62 values is ones computed during training and required for testing, e.g.
63 running statistics for batch normalization.
64
65 Parameters and persistent values are referred by their names. They can be
66 accessed as attributes of the links. Link class itself manages the lists
67 of names of parameters and persistent values to distinguish parameters and
68 persistent values from other attributes.
69
70 Link can be composed into more complex models. This composition feature is
71 supported by child classes like :class:`Chain` and :class:`ChainList`. One
72 can create a chain by combining one or more links. See the documents for
73 these classes for details.
74
75 As noted above, Link supports the serialization protocol of the
76 :class:`~chainer.Serializer` class. **Note that only parameters and
77 persistent values are saved and loaded.** Other attributes are considered
78 as a part of user program (i.e. a part of network definition). In order to
79 construct a link from saved file, other attributes must be identically
80 reconstructed by user codes.
81
82 .. admonition:: Example
83
84 This is a simple example of custom link definition. Chainer itself also
85 provides many links defined under the :mod:`~chainer.links` module. They
86 might serve as examples, too.
87
88 Consider we want to define a simple primitive link that implements a
89 fully-connected layer based on the :func:`~functions.linear` function.
90 Note that this function takes input units, a weight variable, and a bias
91 variable as arguments. Then, the fully-connected layer can be defined as
92 follows::
93
94 import chainer
95 import chainer.functions as F
96 from chainer import initializers
97 import numpy as np
98
99 class LinearLayer(chainer.Link):
100
101 def __init__(self, n_in, n_out):
102 super(LinearLayer, self).__init__()
103 with self.init_scope():
104 self.W = chainer.Parameter(
105 initializers.Normal(), (n_out, n_in))
106 self.b = chainer.Parameter(
107 initializers.Zero(), (n_out,))
108
109 def __call__(self, x):
110 return F.linear(x, self.W, self.b)
111
112 This example shows that a user can define arbitrary parameters and use
113 them in any methods. Links typically implement the ``__call__``
114 operator, although they can also provide other methods to implement the
115 forward propagation.
116
117 Args:
118 params: *(deprecated since v2.0.0)* Names, shapes, and optional dtypes
119 of initial parameters. The keywords are used as the parameter
120 names and the corresponding values consist either of the shape or
121 a tuple of shape and a dtype ``(shape, dtype)``. If only the shape
122 is supplied, the default dtype will be used.
123
124 Attributes:
125 ~Link.name (str): Name of this link, given by the parent chain (if
126 exists).
127
128 """
129
130 def __init__(self, **params):
131 self._params = set()
132 self._persistent = set()
133 self._cpu = True
134 self._device_id = None
135 self._within_init_scope = False
136 self.name = None
137
138 for name, value in six.iteritems(params):
139 # Note: deprecation warning will be raised in add_param
140 shape, dtype = _ensure_shape_dtype(value)
141 self.add_param(name, shape, dtype=dtype)
142
143 @property
144 def xp(self):
145 """Array module for this link.
146
147 Depending on which of CPU/GPU this link is on, this property returns
148 :mod:`numpy` or :mod:`cupy`.
149
150 """
151 return numpy if self._cpu else cuda.cupy
152
153 @property
154 def within_init_scope(self):
155 """True if the current code is inside of an initialization scope.
156
157 See :meth:`init_scope` for the details of the initialization scope.
158
159 """
160 return getattr(self, '_within_init_scope', False)
161
162 @contextlib.contextmanager
163 def init_scope(self):
164 """Creates an initialization scope.
165
166 This method returns a context manager object that enables registration
167 of parameters (and links for :class:`~chainer.Chain`) by an assignment.
168 A :class:`~chainer.Parameter` object can be automatically registered
169 by assigning it to an attribute under this context manager.
170
171 .. admonition:: Example
172
173 In most cases, the parameter registration is done in the
174 initializer method. Using the ``init_scope`` method, we can
175 simply assign a :class:`~chainer.Parameter` object to register
176 it to the link.
177
178 .. code-block:: python
179
180 class MyLink(chainer.Link):
181 def __init__(self):
182 super().__init__()
183 with self.init_scope():
184 self.W = chainer.Parameter(0, (10, 5))
185 self.b = chainer.Parameter(0, (5,))
186
187 """
188 old_flag = self.within_init_scope
189 self._within_init_scope = True
190 try:
191 yield
192 finally:
193 self._within_init_scope = old_flag
194
195 def __setattr__(self, name, value):
196 if self.within_init_scope and isinstance(value, variable.Parameter):
197 value.name = name
198 if not self._cpu:
199 value.to_gpu(self._device_id)
200 self._params.add(name)
201 self._persistent.discard(name)
202 super(Link, self).__setattr__(name, value)
203
204 def __delattr__(self, name):
205 self._params.discard(name)
206 self._persistent.discard(name)
207 super(Link, self).__delattr__(name)
208
209 def add_param(self, name, shape=None, dtype=numpy.float32,
210 initializer=None):
211 """Registers a parameter to the link.
212
213 .. deprecated:: v2.0.0
214
215 Assign a :class:`~chainer.Parameter` object directly to an
216 attribute within :meth:`an initialization scope <init_scope>`
217 instead. For example, the following code
218
219 .. code-block:: python
220
221 link.add_param('W', shape=(5, 3))
222
223 can be replaced by the following assignment.
224
225 .. code-block:: python
226
227 with self.init_scope():
228 link.W = chainer.Parameter(None, (5, 3))
229
230 The latter one is easier for IDEs to keep track of the attribute's
231 type.
232
233 Args:
234 name (str): Name of the parameter. This name is also used as the
235 attribute name.
236 shape (int or tuple of ints): Shape of the parameter array. If it
237 is omitted, the parameter variable is left uninitialized.
238 dtype: Data type of the parameter array.
239 initializer: If it is not ``None``, the data is initialized with
240 the given initializer. If it is an array, the data is directly
241 initialized by it. If it is callable, it is used as a weight
242 initializer. Note that in these cases, ``dtype`` argument is
243 ignored.
244
245 """
246 warnings.warn('''\
247 Parameter registeration via Link.__init__ and Link.add_param are deprecated.
248 Assign a Parameter object directly to an attribute within a \
249 "with Link.init_scope():" block instead.
250 ''', DeprecationWarning)
251 if name in self.__dict__:
252 raise AttributeError(
253 'cannot register a new parameter %s: attribute exists'
254 % name)
255 if initializer is None:
256 initializer = initializers.NaN(dtype)
257 param = variable.Parameter(initializer, shape)
258 with self.init_scope():
259 setattr(self, name, param)
260
261 def add_persistent(self, name, value):
262 """Registers a persistent value to the link.
263
264 The registered value is saved and loaded on serialization and
265 deserialization. The value is set to an attribute of the link.
266
267 Args:
268 name (str): Name of the persistent value. This name is also used
269 for the attribute name.
270 value: Value to be registered.
271
272 """
273 d = self.__dict__
274 if name in d:
275 raise AttributeError(
276 'cannot register a new persistent value %s: attribute exists'
277 % name)
278 self._persistent.add(name)
279 self._params.discard(name)
280 d[name] = value
281
282 def register_persistent(self, name):
283 """Registers an attribute of a given name as a persistent value.
284
285 This is a convenient method to register an existing attribute as a
286 persistent value. If ``name`` has been already registered as a
287 parameter, this method removes it from the list of parameter names
288 and re-registers it as a persistent value.
289
290 Args:
291 name (str): Name of the attribute to be registered.
292
293 """
294 if not hasattr(self, name):
295 raise AttributeError(
296 'cannot register non-existent attribute %s as a persistent '
297 'value' % name)
298 self._persistent.add(name)
299 self._params.discard(name)
300
301 def copy(self):
302 """Copies the link hierarchy to new one.
303
304 The whole hierarchy rooted by this link is copied. The copy is
305 basically shallow, except that the parameter variables are also
306 shallowly copied. It means that the parameter variables of copied one
307 are different from ones of original link, while they share the data and
308 gradient arrays.
309
310 The name of the link is reset on the copy, since the copied instance
311 does not belong to the original parent chain (even if exists).
312
313 Returns:
314 Link: Copied link object.
315
316 """
317 ret = copy.copy(self)
318 ret._params = set(self._params)
319 ret._persistent = set(self._persistent)
320 ret.name = None
321 d = ret.__dict__
322 for name in ret._params:
323 d[name] = copy.copy(d[name])
324 d[name].grad = None
325 return ret
326
327 def to_cpu(self):
328 """Copies parameter variables and persistent values to CPU.
329
330 This method does not handle non-registered attributes. If some of such
331 attributes must be copied to CPU, the link implementation must
332 override this method to do so.
333
334 Returns: self
335
336 """
337 if self._cpu:
338 return self
339 d = self.__dict__
340 for name in self._params:
341 d[name].to_cpu()
342 for name in self._persistent:
343 value = d[name]
344 if isinstance(value, cuda.ndarray):
345 d[name] = value.get()
346 self._cpu = True
347 self._device_id = None
348 return self
349
350 def to_gpu(self, device=None):
351 """Copies parameter variables and persistent values to GPU.
352
353 This method does not handle non-registered attributes. If some of such
354 attributes must be copied to GPU, the link implementation must
355 override this method to do so.
356
357 Args:
358 device: Target device specifier. If omitted, the current device is
359 used.
360
361 Returns: self
362
363 """
364 cuda.check_cuda_available()
365 if not self._cpu:
366 return self
367 d = self.__dict__
368 with cuda._get_device(device):
369 for name in self._params:
370 d[name].to_gpu()
371 for name in self._persistent:
372 value = d[name]
373 if isinstance(value, numpy.ndarray):
374 d[name] = cuda.to_gpu(value)
375 self._device_id = cuda.cupy.cuda.get_device_id()
376 self._cpu = False
377 return self
378
379 def params(self, include_uninit=True):
380 """Returns a generator of all parameters under the link hierarchy.
381
382 Args:
383 include_uninit (bool): If ``True``, it also generates uninitialized
384 parameters.
385
386 Returns:
387 A generator object that generates all parameters.
388
389 """
390 d = self.__dict__
391 for name in self._params:
392 if include_uninit or d[name].data is not None:
393 yield d[name]
394
395 def namedparams(self, include_uninit=True):
396 """Returns a generator of all (path, param) pairs under the hierarchy.
397
398 Args:
399 include_uninit (bool): If ``True``, it also generates uninitialized
400 parameters.
401
402 Returns:
403 A generator object that generates all (path, parameter) pairs. The
404 paths are relative from this link.
405
406 """
407 d = self.__dict__
408 for name in self._params:
409 if include_uninit or d[name].data is not None:
410 yield '/' + name, d[name]
411
412 def links(self, skipself=False):
413 """Returns a generator of all links under the hierarchy.
414
415 Args:
416 skipself (bool): If ``True``, then the generator skips this link
417 and starts with the first child link.
418
419 Returns:
420 A generator object that generates all links.
421
422 """
423 if not skipself:
424 yield self
425
426 def namedlinks(self, skipself=False):
427 """Returns a generator of all (path, link) pairs under the hierarchy.
428
429 Args:
430 skipself (bool): If ``True``, then the generator skips this link
431 and starts with the first child link.
432
433 Returns:
434 A generator object that generates all (path, link) pairs.
435
436 """
437 if not skipself:
438 yield '/', self
439
440 def children(self):
441 """Returns a generator of all child links.
442
443 Returns:
444 A generator object that generates all child links.
445
446 """
447 if 0:
448 yield
449
450 def copyparams(self, link):
451 """Copies all parameters from given link.
452
453 This method copies data arrays of all parameters in the hierarchy. The
454 copy is even done across the host and devices. Note that this method
455 does not copy the gradient arrays.
456
457 Args:
458 link (Link): Source link object.
459
460 """
461 src = link.__dict__
462 dst = self.__dict__
463 for name in self._params:
464 dst[name].copydata(src[name])
465
466 def cleargrads(self):
467 """Clears all gradient arrays.
468
469 This method should be called before the backward computation at every
470 iteration of the optimization.
471
472 """
473 for param in self.params():
474 param.cleargrad()
475
476 def zerograds(self):
477 """Initializes all gradient arrays by zero.
478
479 This method can be used for the same purpose of cleargrads, but less
480 efficient. This method is left for backward compatibility.
481
482 .. deprecated:: v1.15
483 Use :meth:`cleargrads` instead.
484
485 """
486 warnings.warn(
487 'Link.zerograds is deprecated. Use Link.cleargrads instead.',
488 DeprecationWarning)
489 for param in self.params():
490 param.zerograd()
491
492 def addgrads(self, link):
493 """Accumulates gradient values from given link.
494
495 This method adds each gradient array of the given link to corresponding
496 gradient array of this link. The accumulation is even done across
497 host and different devices.
498
499 Args:
500 link (Link): Source link object.
501
502 """
503 src = link.__dict__
504 dst = self.__dict__
505 for name in self._params:
506 dst[name].addgrad(src[name])
507
508 def enable_update(self):
509 """Enables update rules of all parameters under the link hierarchy.
510
511 This method sets the :attr:`~chainer.UpdateRule.enabled` flag of the
512 update rule of each parameter variable to ``True``.
513
514 """
515 for param in self.params():
516 rule = param.update_rule
517 if rule is not None:
518 rule.enabled = True
519
520 def disable_update(self):
521 """Disables update rules of all parameters under the link hierarchy.
522
523 This method sets the :attr:`~chainer.UpdateRule.enabled` flag of the
524 update rule of each parameter variable to ``False``.
525
526 """
527 for param in self.params():
528 rule = param.update_rule
529 if rule is not None:
530 rule.enabled = False
531
532 @property
533 def update_enabled(self):
534 """``True`` if at least one parameter has an update rule enabled."""
535 for param in self.params():
536 rule = param.update_rule
537 if rule is not None and rule.enabled:
538 return True
539 return False
540
541 def serialize(self, serializer):
542 """Serializes the link object.
543
544 Args:
545 serializer (~chainer.AbstractSerializer): Serializer object.
546
547 """
548 d = self.__dict__
549 for name in self._params:
550 param = d[name]
551 data = serializer(name, param.data)
552 if param.data is None and data is not None:
553 # Initialize the parameter here
554 param.initialize(data.shape)
555 if isinstance(param.data, numpy.ndarray):
556 numpy.copyto(param.data, data)
557 else:
558 param.data.set(numpy.asarray(data))
559 for name in self._persistent:
560 d[name] = serializer(name, d[name])
561
562
563 class Chain(Link):
564
565 """Composable link with object-like interface.
566
567 Composability is one of the most important features of neural nets. Neural
568 net models consist of many reusable fragments, and each model itself might
569 be embedded into a larger learnable system. Chain enables us to write a
570 neural net based on composition, without bothering about routine works like
571 collecting parameters, serialization, copying the structure with parameters
572 shared, etc.
573
574 This class actually provides a way to compose one or more links into one
575 structure. A chain can contain one or more *child links*. Child link is a
576 link registered to the chain with its own name. The child link is stored to
577 an attribute of the chain with the name. User can write a whole model or a
578 fragment of neural nets as a child class of Chain.
579
580 Each chain itself is also a link. Therefore, one can combine chains into
581 higher-level chains. In this way, links and chains construct a *link
582 hierarchy*. Link hierarchy forms a tree structure, where each node is
583 identified by the path from the root. The path is represented by a string
584 like a file path in UNIX, consisting of names of nodes on the path, joined
585 by slashes ``/``.
586
587 A child link can be added just by assigning it to an attribute of the
588 chain within :meth:`an initialization scope <chainer.Link.init_scope>`.
589
590 The registered child link is saved and loaded on serialization and
591 deserialization, and involved in the optimization. The registered link
592 is called a child. The child link is accessible via :meth:`children`
593 generator, which returns a generator running through the children in
594 registered order.
595
596 On registration of a child link, its :attr:`~Link.name` attribute is also
597 set (or overwritten if the link has already been registered to another
598 chain).
599
600 .. admonition:: Example
601
602 This is a simple example of custom chain definition. Chainer itself also
603 provides some chains defined under the :mod:`~chainer.links` module.
604 They might serve as examples, too.
605
606 Consider we want to define a multi-layer perceptron consisting of two
607 hidden layers with rectifiers as activation functions. We can use the
608 :class:`~chainer.links.Linear` link as a building block::
609
610 import chainer
611 import chainer.functions as F
612 import chainer.links as L
613
614 class MultiLayerPerceptron(chainer.Chain):
615
616 def __init__(self, n_in, n_hidden, n_out):
617 super(MultilayerPerceptron, self).__init__()
618 with self.init_scope():
619 self.layer1 = L.Linear(n_in, n_hidden)
620 self.layer2 = L.Linear(n_hidden, n_hidden)
621 self.layer3 = L.Linear(n_hidden, n_out)
622
623 def __call__(self, x):
624 # Forward propagation
625 h1 = F.relu(self.layer1(x))
626 h2 = F.relu(self.layer2(h1))
627 return self.layer3(h2)
628
629 Child links are registered via the assignment within a
630 ``with self.init_scope():`` block. The forward propagation is often
631 implemented as the ``__call__`` operator as the above example, though
632 it is not mandatory.
633
634 Args:
635 links: Child links. The keywords are used as their names. The names are
636 also set to the links.
637
638 .. deprecated:: v2.0.0
639
640 Assign child links directly to attributes, instead.
641
642 """
643
644 def __init__(self, **links):
645 super(Chain, self).__init__()
646 self._children = set()
647
648 for name, link in six.iteritems(links):
649 self.add_link(name, link)
650
651 def __getitem__(self, name):
652 """Equivalent to getattr."""
653 return getattr(self, name)
654
655 def __setattr__(self, name, value):
656 if self.within_init_scope and isinstance(value, Link):
657 if hasattr(self, name):
658 raise AttributeError(
659 'cannot register a new link %s: attribute exists' % name)
660 value.name = name
661 self._children.add(name)
662 super(Chain, self).__setattr__(name, value)
663
664 def __delattr__(self, name):
665 self._children.discard(name)
666 super(Chain, self).__delattr__(name)
667
668 def add_link(self, name, link):
669 """Registers a child link to this chain.
670
671 .. deprecated:: v2.0.0
672
673 Assign the child link directly to an attribute within
674 :meth:`an initialization scope <chainer.Link.init_scope>`, instead.
675 For example, the following code
676
677 .. code-block:: python
678
679 chain.add_link('l1', L.Linear(3, 5))
680
681 can be replaced by the following line.
682
683 .. code-block:: python
684
685 with self.init_scope():
686 chain.l1 = L.Linear(3, 5)
687
688 The latter one is easier for IDEs to keep track of the attribute's
689 type.
690
691 Args:
692 name (str): Name of the child link. This name is also used as the
693 attribute name.
694 link (Link): The link object to be registered.
695
696 """
697 warnings.warn('''\
698 Child link registeration via Chain.__init__ and Chain.add_link are deprecated.
699 Assign a Link object directly to an attribute within a \
700 "with link.init_scope():" block instead.
701 ''', DeprecationWarning)
702 if name in self.__dict__:
703 raise AttributeError(
704 'cannot register a new link %s: attribute exists' % name)
705 if not isinstance(link, Link):
706 raise TypeError('cannot register a non-link object as a child')
707 with self.init_scope():
708 setattr(self, name, link)
709
710 def copy(self):
711 ret = super(Chain, self).copy()
712 ret._children = set(ret._children)
713 d = ret.__dict__
714 for name in ret._children:
715 # copy child links recursively
716 copied = d[name].copy()
717 copied.name = name
718 d[name] = copied
719 return ret
720
721 def to_cpu(self):
722 super(Chain, self).to_cpu()
723 d = self.__dict__
724 for name in self._children:
725 d[name].to_cpu()
726 return self
727
728 def to_gpu(self, device=None):
729 with cuda._get_device(device):
730 super(Chain, self).to_gpu()
731 d = self.__dict__
732 for name in self._children:
733 d[name].to_gpu()
734 return self
735
736 def params(self, include_uninit=True):
737 for param in super(Chain, self).params(include_uninit):
738 yield param
739 d = self.__dict__
740 for name in self._children:
741 for param in d[name].params(include_uninit):
742 yield param
743
744 def namedparams(self, include_uninit=True):
745 for ret in super(Chain, self).namedparams(include_uninit):
746 yield ret
747 d = self.__dict__
748 for name in self._children:
749 prefix = '/' + name
750 for path, param in d[name].namedparams(include_uninit):
751 yield prefix + path, param
752
753 def links(self, skipself=False):
754 if not skipself:
755 yield self
756 d = self.__dict__
757 for name in self._children:
758 for link in d[name].links():
759 yield link
760
761 def namedlinks(self, skipself=False):
762 if not skipself:
763 yield '/', self
764 d = self.__dict__
765 for name in self._children:
766 child = d[name]
767 prefix = '/' + name
768 yield prefix, child
769 for path, link in d[name].namedlinks(True):
770 yield prefix + path, link
771
772 def children(self):
773 d = self.__dict__
774 for name in self._children:
775 yield d[name]
776
777 def copyparams(self, link):
778 super(Chain, self).copyparams(link)
779 src = link.__dict__
780 dst = self.__dict__
781 for name in self._children:
782 dst[name].copyparams(src[name])
783
784 def addgrads(self, link):
785 super(Chain, self).addgrads(link)
786 src = link.__dict__
787 dst = self.__dict__
788 for name in self._children:
789 dst[name].addgrads(src[name])
790
791 def serialize(self, serializer):
792 super(Chain, self).serialize(serializer)
793 d = self.__dict__
794 for name in self._children:
795 d[name].serialize(serializer[name])
796
797
798 class ChainList(Link):
799
800 """Composable link with list-like interface.
801
802 This is another example of compositional link. Unlike :class:`Chain`, this
803 class can be used like a list of child links. Each child link is indexed by
804 a non-negative integer, and it maintains the current number of registered
805 child links. The :meth:`add_link` method inserts a new link at the end of
806 the list. It is useful to write a chain with arbitrary number of child
807 links, e.g. an arbitrarily deep multi-layer perceptron.
808
809 Note that this class does not implement all methods of :class:`list`.
810
811 Args:
812 links: Initial child links.
813
814 """
815
816 def __init__(self, *links):
817 super(ChainList, self).__init__()
818 self._children = []
819
820 for link in links:
821 self.add_link(link)
822
823 def __getitem__(self, index):
824 """Returns the child at given index.
825
826 Args:
827 index (int): Index of the child in the list.
828
829 Returns:
830 Link: The ``index``-th child link.
831
832 """
833 return self._children[index]
834
835 def __iter__(self):
836 return iter(self._children)
837
838 def __len__(self):
839 """Returns the number of children."""
840 return len(self._children)
841
842 def append(self, link):
843 """Registers a child link and adds it to the tail of the list.
844
845 This is equivalent to :meth:`add_link`. This method has been added to
846 emulate the ``list`` interface.
847
848 Args:
849 link (Link): The link object to be regsitered.
850
851 """
852 self.add_link(link)
853
854 def add_link(self, link):
855 """Registers a child link and adds it to the tail of the list.
856
857 Args:
858 link (Link): The link object to be registered.
859
860 """
861 link.name = str(len(self._children))
862 self._children.append(link)
863
864 def copy(self):
865 ret = super(ChainList, self).copy()
866 ret._children = list(ret._children) # copy
867 children = ret._children
868 for i, child in enumerate(children):
869 child = child.copy()
870 child.name = str(i)
871 children[i] = child
872 return ret
873
874 def to_cpu(self):
875 super(ChainList, self).to_cpu()
876 for link in self._children:
877 link.to_cpu()
878 return self
879
880 def to_gpu(self, device=None):
881 with cuda._get_device(device):
882 super(ChainList, self).to_gpu()
883 for link in self._children:
884 link.to_gpu()
885 return self
886
887 def params(self, include_uninit=True):
888 for param in super(ChainList, self).params(include_uninit):
889 yield param
890 for link in self._children:
891 for param in link.params(include_uninit):
892 yield param
893
894 def namedparams(self, include_uninit=True):
895 for ret in super(ChainList, self).namedparams(include_uninit):
896 yield ret
897 for idx, link in enumerate(self._children):
898 prefix = '/%d' % idx
899 for path, param in link.namedparams(include_uninit):
900 yield prefix + path, param
901
902 def links(self, skipself=False):
903 if not skipself:
904 yield self
905 for child in self._children:
906 for link in child.links():
907 yield link
908
909 def namedlinks(self, skipself=False):
910 if not skipself:
911 yield '/', self
912 for idx, child in enumerate(self._children):
913 prefix = '/%d' % idx
914 yield prefix, child
915 for path, link in child.namedlinks(True):
916 yield prefix + path, link
917
918 def children(self):
919 for child in self._children:
920 yield child
921
922 def copyparams(self, link):
923 super(ChainList, self).copyparams(link)
924 for idx, child in enumerate(self._children):
925 child.copyparams(link[idx])
926
927 def addgrads(self, link):
928 super(ChainList, self).addgrads(link)
929 for idx, child in enumerate(self._children):
930 child.addgrads(link[idx])
931
932 def serialize(self, serializer):
933 super(ChainList, self).serialize(serializer)
934 for idx, child in enumerate(self._children):
935 child.serialize(serializer['%d' % idx])
936
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/chainer/link.py b/chainer/link.py
--- a/chainer/link.py
+++ b/chainer/link.py
@@ -820,6 +820,13 @@
for link in links:
self.add_link(link)
+ def __setattr__(self, name, value):
+ if self.within_init_scope and isinstance(value, Link):
+ raise TypeError(
+ 'cannot register a new link'
+ ' within a "with chainlist.init_scope():" block.')
+ super(ChainList, self).__setattr__(name, value)
+
def __getitem__(self, index):
"""Returns the child at given index.
| {"golden_diff": "diff --git a/chainer/link.py b/chainer/link.py\n--- a/chainer/link.py\n+++ b/chainer/link.py\n@@ -820,6 +820,13 @@\n for link in links:\n self.add_link(link)\n \n+ def __setattr__(self, name, value):\n+ if self.within_init_scope and isinstance(value, Link):\n+ raise TypeError(\n+ 'cannot register a new link'\n+ ' within a \"with chainlist.init_scope():\" block.')\n+ super(ChainList, self).__setattr__(name, value)\n+\n def __getitem__(self, index):\n \"\"\"Returns the child at given index.\n", "issue": "ChainList doesn't warn self.init_scope()\nThe following code has a bug. Chainer doesn't warn the usage of `with self.init_scope()` in the subclass of ChainList. Could you add a warning message for such mistakes?\r\n\r\n* Code to reproduce\r\n```python\r\nimport chainer\r\nfrom chainer import Chain, ChainList\r\nimport chainer.functions as F\r\nimport chainer.links as L\r\nimport numpy as np\r\n\r\n\r\nclass C(ChainList): # should be 'class C(Chain)'\r\n def __init__(self):\r\n super().__init__()\r\n with self.init_scope():\r\n self.l1 = L.Linear(5, 5)\r\n self.l2 = L.Linear(5, 5)\r\n\r\n def __call__(self, x):\r\n return self.l2(F.relu(self.l1(x)))\r\n\r\n\r\nc = C()\r\nprint(c.l1.W.data)\r\n\r\nopt = chainer.optimizers.SGD()\r\nopt.setup(c)\r\nx = np.arange(10).reshape((2, 5)).astype(np.float32)\r\nloss = F.sum(c(x))\r\nopt.update(lambda: loss)\r\nprint(c.l1.W.data)\r\n```\r\n\r\noutput:\r\n```\r\n[[ 0.22224635 0.13709065 -0.0590423 0.31734523 0.76646286]\r\n [-0.09569775 -0.00810872 0.72896075 -0.50144166 -0.23909038]\r\n [ 0.24655567 -0.59849507 0.05945947 -0.06633393 -0.05738653]\r\n [-0.85418522 0.56111503 -0.4280332 -0.19436245 -0.09941436]\r\n [-0.06522682 -0.43800679 0.7132498 0.49363273 -0.2827867 ]]\r\n[[ 0.22224635 0.13709065 -0.0590423 0.31734523 0.76646286]\r\n [-0.09569775 -0.00810872 0.72896075 -0.50144166 -0.23909038]\r\n [ 0.24655567 -0.59849507 0.05945947 -0.06633393 -0.05738653]\r\n [-0.85418522 0.56111503 -0.4280332 -0.19436245 -0.09941436]\r\n [-0.06522682 -0.43800679 0.7132498 0.49363273 -0.2827867 ]]\r\n```\r\nNo update is taken because the link l1 is not registered.\n", "before_files": [{"content": "import collections\nimport contextlib\nimport copy\nimport warnings\n\nimport numpy\nimport six\n\nfrom chainer import cuda\nfrom chainer import initializers\nfrom chainer import variable\n\n\ndef _is_shape(value):\n if value is None:\n return True\n elif isinstance(value, collections.Sequence):\n try:\n return all(int(x) for x in value)\n except TypeError:\n return False\n try:\n return int(value)\n except TypeError:\n return False\n\n\ndef _ensure_shape_dtype(value):\n # Return value paired with dtype FP32 if it is a shape.\n if _is_shape(value):\n return value, 'f'\n # Otherwise, returns it with assuming a shape-dtype pair.\n else:\n return value\n\n\nclass Link(object):\n\n \"\"\"Building block of model definitions.\n\n Link is a building block of neural network models that support various\n features like handling parameters, defining network fragments,\n serialization, etc.\n\n Link is the primitive structure for the model definitions. It supports\n management of parameter variables and *persistent values* that should be\n incorporated to serialization.\n\n Parameter is an instance of :class:`~chainer.Parameter` registered to a\n link. A :class:`~chainer.Parameter` object can be registered as a\n parameter of the link by assigning it to an attribute within *an\n initialization scope*, which is a code surrounded by a\n :meth:`init_scope` context manager using the ``with`` statement.\n\n Persistent values are arrays, scalars, or any other serializable values\n registered via :meth:`register_persistent` or :meth:`add_persistent`.\n\n .. note::\n Whereas arbitrary serializable objects can be registered as persistent\n values, it is strongly recommended to just register values that should\n be treated as results of learning. A typical example of persistent\n values is ones computed during training and required for testing, e.g.\n running statistics for batch normalization.\n\n Parameters and persistent values are referred by their names. They can be\n accessed as attributes of the links. Link class itself manages the lists\n of names of parameters and persistent values to distinguish parameters and\n persistent values from other attributes.\n\n Link can be composed into more complex models. This composition feature is\n supported by child classes like :class:`Chain` and :class:`ChainList`. One\n can create a chain by combining one or more links. See the documents for\n these classes for details.\n\n As noted above, Link supports the serialization protocol of the\n :class:`~chainer.Serializer` class. **Note that only parameters and\n persistent values are saved and loaded.** Other attributes are considered\n as a part of user program (i.e. a part of network definition). In order to\n construct a link from saved file, other attributes must be identically\n reconstructed by user codes.\n\n .. admonition:: Example\n\n This is a simple example of custom link definition. Chainer itself also\n provides many links defined under the :mod:`~chainer.links` module. They\n might serve as examples, too.\n\n Consider we want to define a simple primitive link that implements a\n fully-connected layer based on the :func:`~functions.linear` function.\n Note that this function takes input units, a weight variable, and a bias\n variable as arguments. Then, the fully-connected layer can be defined as\n follows::\n\n import chainer\n import chainer.functions as F\n from chainer import initializers\n import numpy as np\n\n class LinearLayer(chainer.Link):\n\n def __init__(self, n_in, n_out):\n super(LinearLayer, self).__init__()\n with self.init_scope():\n self.W = chainer.Parameter(\n initializers.Normal(), (n_out, n_in))\n self.b = chainer.Parameter(\n initializers.Zero(), (n_out,))\n\n def __call__(self, x):\n return F.linear(x, self.W, self.b)\n\n This example shows that a user can define arbitrary parameters and use\n them in any methods. Links typically implement the ``__call__``\n operator, although they can also provide other methods to implement the\n forward propagation.\n\n Args:\n params: *(deprecated since v2.0.0)* Names, shapes, and optional dtypes\n of initial parameters. The keywords are used as the parameter\n names and the corresponding values consist either of the shape or\n a tuple of shape and a dtype ``(shape, dtype)``. If only the shape\n is supplied, the default dtype will be used.\n\n Attributes:\n ~Link.name (str): Name of this link, given by the parent chain (if\n exists).\n\n \"\"\"\n\n def __init__(self, **params):\n self._params = set()\n self._persistent = set()\n self._cpu = True\n self._device_id = None\n self._within_init_scope = False\n self.name = None\n\n for name, value in six.iteritems(params):\n # Note: deprecation warning will be raised in add_param\n shape, dtype = _ensure_shape_dtype(value)\n self.add_param(name, shape, dtype=dtype)\n\n @property\n def xp(self):\n \"\"\"Array module for this link.\n\n Depending on which of CPU/GPU this link is on, this property returns\n :mod:`numpy` or :mod:`cupy`.\n\n \"\"\"\n return numpy if self._cpu else cuda.cupy\n\n @property\n def within_init_scope(self):\n \"\"\"True if the current code is inside of an initialization scope.\n\n See :meth:`init_scope` for the details of the initialization scope.\n\n \"\"\"\n return getattr(self, '_within_init_scope', False)\n\n @contextlib.contextmanager\n def init_scope(self):\n \"\"\"Creates an initialization scope.\n\n This method returns a context manager object that enables registration\n of parameters (and links for :class:`~chainer.Chain`) by an assignment.\n A :class:`~chainer.Parameter` object can be automatically registered\n by assigning it to an attribute under this context manager.\n\n .. admonition:: Example\n\n In most cases, the parameter registration is done in the\n initializer method. Using the ``init_scope`` method, we can\n simply assign a :class:`~chainer.Parameter` object to register\n it to the link.\n\n .. code-block:: python\n\n class MyLink(chainer.Link):\n def __init__(self):\n super().__init__()\n with self.init_scope():\n self.W = chainer.Parameter(0, (10, 5))\n self.b = chainer.Parameter(0, (5,))\n\n \"\"\"\n old_flag = self.within_init_scope\n self._within_init_scope = True\n try:\n yield\n finally:\n self._within_init_scope = old_flag\n\n def __setattr__(self, name, value):\n if self.within_init_scope and isinstance(value, variable.Parameter):\n value.name = name\n if not self._cpu:\n value.to_gpu(self._device_id)\n self._params.add(name)\n self._persistent.discard(name)\n super(Link, self).__setattr__(name, value)\n\n def __delattr__(self, name):\n self._params.discard(name)\n self._persistent.discard(name)\n super(Link, self).__delattr__(name)\n\n def add_param(self, name, shape=None, dtype=numpy.float32,\n initializer=None):\n \"\"\"Registers a parameter to the link.\n\n .. deprecated:: v2.0.0\n\n Assign a :class:`~chainer.Parameter` object directly to an\n attribute within :meth:`an initialization scope <init_scope>`\n instead. For example, the following code\n\n .. code-block:: python\n\n link.add_param('W', shape=(5, 3))\n\n can be replaced by the following assignment.\n\n .. code-block:: python\n\n with self.init_scope():\n link.W = chainer.Parameter(None, (5, 3))\n\n The latter one is easier for IDEs to keep track of the attribute's\n type.\n\n Args:\n name (str): Name of the parameter. This name is also used as the\n attribute name.\n shape (int or tuple of ints): Shape of the parameter array. If it\n is omitted, the parameter variable is left uninitialized.\n dtype: Data type of the parameter array.\n initializer: If it is not ``None``, the data is initialized with\n the given initializer. If it is an array, the data is directly\n initialized by it. If it is callable, it is used as a weight\n initializer. Note that in these cases, ``dtype`` argument is\n ignored.\n\n \"\"\"\n warnings.warn('''\\\nParameter registeration via Link.__init__ and Link.add_param are deprecated.\nAssign a Parameter object directly to an attribute within a \\\n\"with Link.init_scope():\" block instead.\n''', DeprecationWarning)\n if name in self.__dict__:\n raise AttributeError(\n 'cannot register a new parameter %s: attribute exists'\n % name)\n if initializer is None:\n initializer = initializers.NaN(dtype)\n param = variable.Parameter(initializer, shape)\n with self.init_scope():\n setattr(self, name, param)\n\n def add_persistent(self, name, value):\n \"\"\"Registers a persistent value to the link.\n\n The registered value is saved and loaded on serialization and\n deserialization. The value is set to an attribute of the link.\n\n Args:\n name (str): Name of the persistent value. This name is also used\n for the attribute name.\n value: Value to be registered.\n\n \"\"\"\n d = self.__dict__\n if name in d:\n raise AttributeError(\n 'cannot register a new persistent value %s: attribute exists'\n % name)\n self._persistent.add(name)\n self._params.discard(name)\n d[name] = value\n\n def register_persistent(self, name):\n \"\"\"Registers an attribute of a given name as a persistent value.\n\n This is a convenient method to register an existing attribute as a\n persistent value. If ``name`` has been already registered as a\n parameter, this method removes it from the list of parameter names\n and re-registers it as a persistent value.\n\n Args:\n name (str): Name of the attribute to be registered.\n\n \"\"\"\n if not hasattr(self, name):\n raise AttributeError(\n 'cannot register non-existent attribute %s as a persistent '\n 'value' % name)\n self._persistent.add(name)\n self._params.discard(name)\n\n def copy(self):\n \"\"\"Copies the link hierarchy to new one.\n\n The whole hierarchy rooted by this link is copied. The copy is\n basically shallow, except that the parameter variables are also\n shallowly copied. It means that the parameter variables of copied one\n are different from ones of original link, while they share the data and\n gradient arrays.\n\n The name of the link is reset on the copy, since the copied instance\n does not belong to the original parent chain (even if exists).\n\n Returns:\n Link: Copied link object.\n\n \"\"\"\n ret = copy.copy(self)\n ret._params = set(self._params)\n ret._persistent = set(self._persistent)\n ret.name = None\n d = ret.__dict__\n for name in ret._params:\n d[name] = copy.copy(d[name])\n d[name].grad = None\n return ret\n\n def to_cpu(self):\n \"\"\"Copies parameter variables and persistent values to CPU.\n\n This method does not handle non-registered attributes. If some of such\n attributes must be copied to CPU, the link implementation must\n override this method to do so.\n\n Returns: self\n\n \"\"\"\n if self._cpu:\n return self\n d = self.__dict__\n for name in self._params:\n d[name].to_cpu()\n for name in self._persistent:\n value = d[name]\n if isinstance(value, cuda.ndarray):\n d[name] = value.get()\n self._cpu = True\n self._device_id = None\n return self\n\n def to_gpu(self, device=None):\n \"\"\"Copies parameter variables and persistent values to GPU.\n\n This method does not handle non-registered attributes. If some of such\n attributes must be copied to GPU, the link implementation must\n override this method to do so.\n\n Args:\n device: Target device specifier. If omitted, the current device is\n used.\n\n Returns: self\n\n \"\"\"\n cuda.check_cuda_available()\n if not self._cpu:\n return self\n d = self.__dict__\n with cuda._get_device(device):\n for name in self._params:\n d[name].to_gpu()\n for name in self._persistent:\n value = d[name]\n if isinstance(value, numpy.ndarray):\n d[name] = cuda.to_gpu(value)\n self._device_id = cuda.cupy.cuda.get_device_id()\n self._cpu = False\n return self\n\n def params(self, include_uninit=True):\n \"\"\"Returns a generator of all parameters under the link hierarchy.\n\n Args:\n include_uninit (bool): If ``True``, it also generates uninitialized\n parameters.\n\n Returns:\n A generator object that generates all parameters.\n\n \"\"\"\n d = self.__dict__\n for name in self._params:\n if include_uninit or d[name].data is not None:\n yield d[name]\n\n def namedparams(self, include_uninit=True):\n \"\"\"Returns a generator of all (path, param) pairs under the hierarchy.\n\n Args:\n include_uninit (bool): If ``True``, it also generates uninitialized\n parameters.\n\n Returns:\n A generator object that generates all (path, parameter) pairs. The\n paths are relative from this link.\n\n \"\"\"\n d = self.__dict__\n for name in self._params:\n if include_uninit or d[name].data is not None:\n yield '/' + name, d[name]\n\n def links(self, skipself=False):\n \"\"\"Returns a generator of all links under the hierarchy.\n\n Args:\n skipself (bool): If ``True``, then the generator skips this link\n and starts with the first child link.\n\n Returns:\n A generator object that generates all links.\n\n \"\"\"\n if not skipself:\n yield self\n\n def namedlinks(self, skipself=False):\n \"\"\"Returns a generator of all (path, link) pairs under the hierarchy.\n\n Args:\n skipself (bool): If ``True``, then the generator skips this link\n and starts with the first child link.\n\n Returns:\n A generator object that generates all (path, link) pairs.\n\n \"\"\"\n if not skipself:\n yield '/', self\n\n def children(self):\n \"\"\"Returns a generator of all child links.\n\n Returns:\n A generator object that generates all child links.\n\n \"\"\"\n if 0:\n yield\n\n def copyparams(self, link):\n \"\"\"Copies all parameters from given link.\n\n This method copies data arrays of all parameters in the hierarchy. The\n copy is even done across the host and devices. Note that this method\n does not copy the gradient arrays.\n\n Args:\n link (Link): Source link object.\n\n \"\"\"\n src = link.__dict__\n dst = self.__dict__\n for name in self._params:\n dst[name].copydata(src[name])\n\n def cleargrads(self):\n \"\"\"Clears all gradient arrays.\n\n This method should be called before the backward computation at every\n iteration of the optimization.\n\n \"\"\"\n for param in self.params():\n param.cleargrad()\n\n def zerograds(self):\n \"\"\"Initializes all gradient arrays by zero.\n\n This method can be used for the same purpose of cleargrads, but less\n efficient. This method is left for backward compatibility.\n\n .. deprecated:: v1.15\n Use :meth:`cleargrads` instead.\n\n \"\"\"\n warnings.warn(\n 'Link.zerograds is deprecated. Use Link.cleargrads instead.',\n DeprecationWarning)\n for param in self.params():\n param.zerograd()\n\n def addgrads(self, link):\n \"\"\"Accumulates gradient values from given link.\n\n This method adds each gradient array of the given link to corresponding\n gradient array of this link. The accumulation is even done across\n host and different devices.\n\n Args:\n link (Link): Source link object.\n\n \"\"\"\n src = link.__dict__\n dst = self.__dict__\n for name in self._params:\n dst[name].addgrad(src[name])\n\n def enable_update(self):\n \"\"\"Enables update rules of all parameters under the link hierarchy.\n\n This method sets the :attr:`~chainer.UpdateRule.enabled` flag of the\n update rule of each parameter variable to ``True``.\n\n \"\"\"\n for param in self.params():\n rule = param.update_rule\n if rule is not None:\n rule.enabled = True\n\n def disable_update(self):\n \"\"\"Disables update rules of all parameters under the link hierarchy.\n\n This method sets the :attr:`~chainer.UpdateRule.enabled` flag of the\n update rule of each parameter variable to ``False``.\n\n \"\"\"\n for param in self.params():\n rule = param.update_rule\n if rule is not None:\n rule.enabled = False\n\n @property\n def update_enabled(self):\n \"\"\"``True`` if at least one parameter has an update rule enabled.\"\"\"\n for param in self.params():\n rule = param.update_rule\n if rule is not None and rule.enabled:\n return True\n return False\n\n def serialize(self, serializer):\n \"\"\"Serializes the link object.\n\n Args:\n serializer (~chainer.AbstractSerializer): Serializer object.\n\n \"\"\"\n d = self.__dict__\n for name in self._params:\n param = d[name]\n data = serializer(name, param.data)\n if param.data is None and data is not None:\n # Initialize the parameter here\n param.initialize(data.shape)\n if isinstance(param.data, numpy.ndarray):\n numpy.copyto(param.data, data)\n else:\n param.data.set(numpy.asarray(data))\n for name in self._persistent:\n d[name] = serializer(name, d[name])\n\n\nclass Chain(Link):\n\n \"\"\"Composable link with object-like interface.\n\n Composability is one of the most important features of neural nets. Neural\n net models consist of many reusable fragments, and each model itself might\n be embedded into a larger learnable system. Chain enables us to write a\n neural net based on composition, without bothering about routine works like\n collecting parameters, serialization, copying the structure with parameters\n shared, etc.\n\n This class actually provides a way to compose one or more links into one\n structure. A chain can contain one or more *child links*. Child link is a\n link registered to the chain with its own name. The child link is stored to\n an attribute of the chain with the name. User can write a whole model or a\n fragment of neural nets as a child class of Chain.\n\n Each chain itself is also a link. Therefore, one can combine chains into\n higher-level chains. In this way, links and chains construct a *link\n hierarchy*. Link hierarchy forms a tree structure, where each node is\n identified by the path from the root. The path is represented by a string\n like a file path in UNIX, consisting of names of nodes on the path, joined\n by slashes ``/``.\n\n A child link can be added just by assigning it to an attribute of the\n chain within :meth:`an initialization scope <chainer.Link.init_scope>`.\n\n The registered child link is saved and loaded on serialization and\n deserialization, and involved in the optimization. The registered link\n is called a child. The child link is accessible via :meth:`children`\n generator, which returns a generator running through the children in\n registered order.\n\n On registration of a child link, its :attr:`~Link.name` attribute is also\n set (or overwritten if the link has already been registered to another\n chain).\n\n .. admonition:: Example\n\n This is a simple example of custom chain definition. Chainer itself also\n provides some chains defined under the :mod:`~chainer.links` module.\n They might serve as examples, too.\n\n Consider we want to define a multi-layer perceptron consisting of two\n hidden layers with rectifiers as activation functions. We can use the\n :class:`~chainer.links.Linear` link as a building block::\n\n import chainer\n import chainer.functions as F\n import chainer.links as L\n\n class MultiLayerPerceptron(chainer.Chain):\n\n def __init__(self, n_in, n_hidden, n_out):\n super(MultilayerPerceptron, self).__init__()\n with self.init_scope():\n self.layer1 = L.Linear(n_in, n_hidden)\n self.layer2 = L.Linear(n_hidden, n_hidden)\n self.layer3 = L.Linear(n_hidden, n_out)\n\n def __call__(self, x):\n # Forward propagation\n h1 = F.relu(self.layer1(x))\n h2 = F.relu(self.layer2(h1))\n return self.layer3(h2)\n\n Child links are registered via the assignment within a\n ``with self.init_scope():`` block. The forward propagation is often\n implemented as the ``__call__`` operator as the above example, though\n it is not mandatory.\n\n Args:\n links: Child links. The keywords are used as their names. The names are\n also set to the links.\n\n .. deprecated:: v2.0.0\n\n Assign child links directly to attributes, instead.\n\n \"\"\"\n\n def __init__(self, **links):\n super(Chain, self).__init__()\n self._children = set()\n\n for name, link in six.iteritems(links):\n self.add_link(name, link)\n\n def __getitem__(self, name):\n \"\"\"Equivalent to getattr.\"\"\"\n return getattr(self, name)\n\n def __setattr__(self, name, value):\n if self.within_init_scope and isinstance(value, Link):\n if hasattr(self, name):\n raise AttributeError(\n 'cannot register a new link %s: attribute exists' % name)\n value.name = name\n self._children.add(name)\n super(Chain, self).__setattr__(name, value)\n\n def __delattr__(self, name):\n self._children.discard(name)\n super(Chain, self).__delattr__(name)\n\n def add_link(self, name, link):\n \"\"\"Registers a child link to this chain.\n\n .. deprecated:: v2.0.0\n\n Assign the child link directly to an attribute within\n :meth:`an initialization scope <chainer.Link.init_scope>`, instead.\n For example, the following code\n\n .. code-block:: python\n\n chain.add_link('l1', L.Linear(3, 5))\n\n can be replaced by the following line.\n\n .. code-block:: python\n\n with self.init_scope():\n chain.l1 = L.Linear(3, 5)\n\n The latter one is easier for IDEs to keep track of the attribute's\n type.\n\n Args:\n name (str): Name of the child link. This name is also used as the\n attribute name.\n link (Link): The link object to be registered.\n\n \"\"\"\n warnings.warn('''\\\nChild link registeration via Chain.__init__ and Chain.add_link are deprecated.\nAssign a Link object directly to an attribute within a \\\n\"with link.init_scope():\" block instead.\n ''', DeprecationWarning)\n if name in self.__dict__:\n raise AttributeError(\n 'cannot register a new link %s: attribute exists' % name)\n if not isinstance(link, Link):\n raise TypeError('cannot register a non-link object as a child')\n with self.init_scope():\n setattr(self, name, link)\n\n def copy(self):\n ret = super(Chain, self).copy()\n ret._children = set(ret._children)\n d = ret.__dict__\n for name in ret._children:\n # copy child links recursively\n copied = d[name].copy()\n copied.name = name\n d[name] = copied\n return ret\n\n def to_cpu(self):\n super(Chain, self).to_cpu()\n d = self.__dict__\n for name in self._children:\n d[name].to_cpu()\n return self\n\n def to_gpu(self, device=None):\n with cuda._get_device(device):\n super(Chain, self).to_gpu()\n d = self.__dict__\n for name in self._children:\n d[name].to_gpu()\n return self\n\n def params(self, include_uninit=True):\n for param in super(Chain, self).params(include_uninit):\n yield param\n d = self.__dict__\n for name in self._children:\n for param in d[name].params(include_uninit):\n yield param\n\n def namedparams(self, include_uninit=True):\n for ret in super(Chain, self).namedparams(include_uninit):\n yield ret\n d = self.__dict__\n for name in self._children:\n prefix = '/' + name\n for path, param in d[name].namedparams(include_uninit):\n yield prefix + path, param\n\n def links(self, skipself=False):\n if not skipself:\n yield self\n d = self.__dict__\n for name in self._children:\n for link in d[name].links():\n yield link\n\n def namedlinks(self, skipself=False):\n if not skipself:\n yield '/', self\n d = self.__dict__\n for name in self._children:\n child = d[name]\n prefix = '/' + name\n yield prefix, child\n for path, link in d[name].namedlinks(True):\n yield prefix + path, link\n\n def children(self):\n d = self.__dict__\n for name in self._children:\n yield d[name]\n\n def copyparams(self, link):\n super(Chain, self).copyparams(link)\n src = link.__dict__\n dst = self.__dict__\n for name in self._children:\n dst[name].copyparams(src[name])\n\n def addgrads(self, link):\n super(Chain, self).addgrads(link)\n src = link.__dict__\n dst = self.__dict__\n for name in self._children:\n dst[name].addgrads(src[name])\n\n def serialize(self, serializer):\n super(Chain, self).serialize(serializer)\n d = self.__dict__\n for name in self._children:\n d[name].serialize(serializer[name])\n\n\nclass ChainList(Link):\n\n \"\"\"Composable link with list-like interface.\n\n This is another example of compositional link. Unlike :class:`Chain`, this\n class can be used like a list of child links. Each child link is indexed by\n a non-negative integer, and it maintains the current number of registered\n child links. The :meth:`add_link` method inserts a new link at the end of\n the list. It is useful to write a chain with arbitrary number of child\n links, e.g. an arbitrarily deep multi-layer perceptron.\n\n Note that this class does not implement all methods of :class:`list`.\n\n Args:\n links: Initial child links.\n\n \"\"\"\n\n def __init__(self, *links):\n super(ChainList, self).__init__()\n self._children = []\n\n for link in links:\n self.add_link(link)\n\n def __getitem__(self, index):\n \"\"\"Returns the child at given index.\n\n Args:\n index (int): Index of the child in the list.\n\n Returns:\n Link: The ``index``-th child link.\n\n \"\"\"\n return self._children[index]\n\n def __iter__(self):\n return iter(self._children)\n\n def __len__(self):\n \"\"\"Returns the number of children.\"\"\"\n return len(self._children)\n\n def append(self, link):\n \"\"\"Registers a child link and adds it to the tail of the list.\n\n This is equivalent to :meth:`add_link`. This method has been added to\n emulate the ``list`` interface.\n\n Args:\n link (Link): The link object to be regsitered.\n\n \"\"\"\n self.add_link(link)\n\n def add_link(self, link):\n \"\"\"Registers a child link and adds it to the tail of the list.\n\n Args:\n link (Link): The link object to be registered.\n\n \"\"\"\n link.name = str(len(self._children))\n self._children.append(link)\n\n def copy(self):\n ret = super(ChainList, self).copy()\n ret._children = list(ret._children) # copy\n children = ret._children\n for i, child in enumerate(children):\n child = child.copy()\n child.name = str(i)\n children[i] = child\n return ret\n\n def to_cpu(self):\n super(ChainList, self).to_cpu()\n for link in self._children:\n link.to_cpu()\n return self\n\n def to_gpu(self, device=None):\n with cuda._get_device(device):\n super(ChainList, self).to_gpu()\n for link in self._children:\n link.to_gpu()\n return self\n\n def params(self, include_uninit=True):\n for param in super(ChainList, self).params(include_uninit):\n yield param\n for link in self._children:\n for param in link.params(include_uninit):\n yield param\n\n def namedparams(self, include_uninit=True):\n for ret in super(ChainList, self).namedparams(include_uninit):\n yield ret\n for idx, link in enumerate(self._children):\n prefix = '/%d' % idx\n for path, param in link.namedparams(include_uninit):\n yield prefix + path, param\n\n def links(self, skipself=False):\n if not skipself:\n yield self\n for child in self._children:\n for link in child.links():\n yield link\n\n def namedlinks(self, skipself=False):\n if not skipself:\n yield '/', self\n for idx, child in enumerate(self._children):\n prefix = '/%d' % idx\n yield prefix, child\n for path, link in child.namedlinks(True):\n yield prefix + path, link\n\n def children(self):\n for child in self._children:\n yield child\n\n def copyparams(self, link):\n super(ChainList, self).copyparams(link)\n for idx, child in enumerate(self._children):\n child.copyparams(link[idx])\n\n def addgrads(self, link):\n super(ChainList, self).addgrads(link)\n for idx, child in enumerate(self._children):\n child.addgrads(link[idx])\n\n def serialize(self, serializer):\n super(ChainList, self).serialize(serializer)\n for idx, child in enumerate(self._children):\n child.serialize(serializer['%d' % idx])\n", "path": "chainer/link.py"}], "after_files": [{"content": "import collections\nimport contextlib\nimport copy\nimport warnings\n\nimport numpy\nimport six\n\nfrom chainer import cuda\nfrom chainer import initializers\nfrom chainer import variable\n\n\ndef _is_shape(value):\n if value is None:\n return True\n elif isinstance(value, collections.Sequence):\n try:\n return all(int(x) for x in value)\n except TypeError:\n return False\n try:\n return int(value)\n except TypeError:\n return False\n\n\ndef _ensure_shape_dtype(value):\n # Return value paired with dtype FP32 if it is a shape.\n if _is_shape(value):\n return value, 'f'\n # Otherwise, returns it with assuming a shape-dtype pair.\n else:\n return value\n\n\nclass Link(object):\n\n \"\"\"Building block of model definitions.\n\n Link is a building block of neural network models that support various\n features like handling parameters, defining network fragments,\n serialization, etc.\n\n Link is the primitive structure for the model definitions. It supports\n management of parameter variables and *persistent values* that should be\n incorporated to serialization.\n\n Parameter is an instance of :class:`~chainer.Parameter` registered to a\n link. A :class:`~chainer.Parameter` object can be registered as a\n parameter of the link by assigning it to an attribute within *an\n initialization scope*, which is a code surrounded by a\n :meth:`init_scope` context manager using the ``with`` statement.\n\n Persistent values are arrays, scalars, or any other serializable values\n registered via :meth:`register_persistent` or :meth:`add_persistent`.\n\n .. note::\n Whereas arbitrary serializable objects can be registered as persistent\n values, it is strongly recommended to just register values that should\n be treated as results of learning. A typical example of persistent\n values is ones computed during training and required for testing, e.g.\n running statistics for batch normalization.\n\n Parameters and persistent values are referred by their names. They can be\n accessed as attributes of the links. Link class itself manages the lists\n of names of parameters and persistent values to distinguish parameters and\n persistent values from other attributes.\n\n Link can be composed into more complex models. This composition feature is\n supported by child classes like :class:`Chain` and :class:`ChainList`. One\n can create a chain by combining one or more links. See the documents for\n these classes for details.\n\n As noted above, Link supports the serialization protocol of the\n :class:`~chainer.Serializer` class. **Note that only parameters and\n persistent values are saved and loaded.** Other attributes are considered\n as a part of user program (i.e. a part of network definition). In order to\n construct a link from saved file, other attributes must be identically\n reconstructed by user codes.\n\n .. admonition:: Example\n\n This is a simple example of custom link definition. Chainer itself also\n provides many links defined under the :mod:`~chainer.links` module. They\n might serve as examples, too.\n\n Consider we want to define a simple primitive link that implements a\n fully-connected layer based on the :func:`~functions.linear` function.\n Note that this function takes input units, a weight variable, and a bias\n variable as arguments. Then, the fully-connected layer can be defined as\n follows::\n\n import chainer\n import chainer.functions as F\n from chainer import initializers\n import numpy as np\n\n class LinearLayer(chainer.Link):\n\n def __init__(self, n_in, n_out):\n super(LinearLayer, self).__init__()\n with self.init_scope():\n self.W = chainer.Parameter(\n initializers.Normal(), (n_out, n_in))\n self.b = chainer.Parameter(\n initializers.Zero(), (n_out,))\n\n def __call__(self, x):\n return F.linear(x, self.W, self.b)\n\n This example shows that a user can define arbitrary parameters and use\n them in any methods. Links typically implement the ``__call__``\n operator, although they can also provide other methods to implement the\n forward propagation.\n\n Args:\n params: *(deprecated since v2.0.0)* Names, shapes, and optional dtypes\n of initial parameters. The keywords are used as the parameter\n names and the corresponding values consist either of the shape or\n a tuple of shape and a dtype ``(shape, dtype)``. If only the shape\n is supplied, the default dtype will be used.\n\n Attributes:\n ~Link.name (str): Name of this link, given by the parent chain (if\n exists).\n\n \"\"\"\n\n def __init__(self, **params):\n self._params = set()\n self._persistent = set()\n self._cpu = True\n self._device_id = None\n self._within_init_scope = False\n self.name = None\n\n for name, value in six.iteritems(params):\n # Note: deprecation warning will be raised in add_param\n shape, dtype = _ensure_shape_dtype(value)\n self.add_param(name, shape, dtype=dtype)\n\n @property\n def xp(self):\n \"\"\"Array module for this link.\n\n Depending on which of CPU/GPU this link is on, this property returns\n :mod:`numpy` or :mod:`cupy`.\n\n \"\"\"\n return numpy if self._cpu else cuda.cupy\n\n @property\n def within_init_scope(self):\n \"\"\"True if the current code is inside of an initialization scope.\n\n See :meth:`init_scope` for the details of the initialization scope.\n\n \"\"\"\n return getattr(self, '_within_init_scope', False)\n\n @contextlib.contextmanager\n def init_scope(self):\n \"\"\"Creates an initialization scope.\n\n This method returns a context manager object that enables registration\n of parameters (and links for :class:`~chainer.Chain`) by an assignment.\n A :class:`~chainer.Parameter` object can be automatically registered\n by assigning it to an attribute under this context manager.\n\n .. admonition:: Example\n\n In most cases, the parameter registration is done in the\n initializer method. Using the ``init_scope`` method, we can\n simply assign a :class:`~chainer.Parameter` object to register\n it to the link.\n\n .. code-block:: python\n\n class MyLink(chainer.Link):\n def __init__(self):\n super().__init__()\n with self.init_scope():\n self.W = chainer.Parameter(0, (10, 5))\n self.b = chainer.Parameter(0, (5,))\n\n \"\"\"\n old_flag = self.within_init_scope\n self._within_init_scope = True\n try:\n yield\n finally:\n self._within_init_scope = old_flag\n\n def __setattr__(self, name, value):\n if self.within_init_scope and isinstance(value, variable.Parameter):\n value.name = name\n if not self._cpu:\n value.to_gpu(self._device_id)\n self._params.add(name)\n self._persistent.discard(name)\n super(Link, self).__setattr__(name, value)\n\n def __delattr__(self, name):\n self._params.discard(name)\n self._persistent.discard(name)\n super(Link, self).__delattr__(name)\n\n def add_param(self, name, shape=None, dtype=numpy.float32,\n initializer=None):\n \"\"\"Registers a parameter to the link.\n\n .. deprecated:: v2.0.0\n\n Assign a :class:`~chainer.Parameter` object directly to an\n attribute within :meth:`an initialization scope <init_scope>`\n instead. For example, the following code\n\n .. code-block:: python\n\n link.add_param('W', shape=(5, 3))\n\n can be replaced by the following assignment.\n\n .. code-block:: python\n\n with self.init_scope():\n link.W = chainer.Parameter(None, (5, 3))\n\n The latter one is easier for IDEs to keep track of the attribute's\n type.\n\n Args:\n name (str): Name of the parameter. This name is also used as the\n attribute name.\n shape (int or tuple of ints): Shape of the parameter array. If it\n is omitted, the parameter variable is left uninitialized.\n dtype: Data type of the parameter array.\n initializer: If it is not ``None``, the data is initialized with\n the given initializer. If it is an array, the data is directly\n initialized by it. If it is callable, it is used as a weight\n initializer. Note that in these cases, ``dtype`` argument is\n ignored.\n\n \"\"\"\n warnings.warn('''\\\nParameter registeration via Link.__init__ and Link.add_param are deprecated.\nAssign a Parameter object directly to an attribute within a \\\n\"with Link.init_scope():\" block instead.\n''', DeprecationWarning)\n if name in self.__dict__:\n raise AttributeError(\n 'cannot register a new parameter %s: attribute exists'\n % name)\n if initializer is None:\n initializer = initializers.NaN(dtype)\n param = variable.Parameter(initializer, shape)\n with self.init_scope():\n setattr(self, name, param)\n\n def add_persistent(self, name, value):\n \"\"\"Registers a persistent value to the link.\n\n The registered value is saved and loaded on serialization and\n deserialization. The value is set to an attribute of the link.\n\n Args:\n name (str): Name of the persistent value. This name is also used\n for the attribute name.\n value: Value to be registered.\n\n \"\"\"\n d = self.__dict__\n if name in d:\n raise AttributeError(\n 'cannot register a new persistent value %s: attribute exists'\n % name)\n self._persistent.add(name)\n self._params.discard(name)\n d[name] = value\n\n def register_persistent(self, name):\n \"\"\"Registers an attribute of a given name as a persistent value.\n\n This is a convenient method to register an existing attribute as a\n persistent value. If ``name`` has been already registered as a\n parameter, this method removes it from the list of parameter names\n and re-registers it as a persistent value.\n\n Args:\n name (str): Name of the attribute to be registered.\n\n \"\"\"\n if not hasattr(self, name):\n raise AttributeError(\n 'cannot register non-existent attribute %s as a persistent '\n 'value' % name)\n self._persistent.add(name)\n self._params.discard(name)\n\n def copy(self):\n \"\"\"Copies the link hierarchy to new one.\n\n The whole hierarchy rooted by this link is copied. The copy is\n basically shallow, except that the parameter variables are also\n shallowly copied. It means that the parameter variables of copied one\n are different from ones of original link, while they share the data and\n gradient arrays.\n\n The name of the link is reset on the copy, since the copied instance\n does not belong to the original parent chain (even if exists).\n\n Returns:\n Link: Copied link object.\n\n \"\"\"\n ret = copy.copy(self)\n ret._params = set(self._params)\n ret._persistent = set(self._persistent)\n ret.name = None\n d = ret.__dict__\n for name in ret._params:\n d[name] = copy.copy(d[name])\n d[name].grad = None\n return ret\n\n def to_cpu(self):\n \"\"\"Copies parameter variables and persistent values to CPU.\n\n This method does not handle non-registered attributes. If some of such\n attributes must be copied to CPU, the link implementation must\n override this method to do so.\n\n Returns: self\n\n \"\"\"\n if self._cpu:\n return self\n d = self.__dict__\n for name in self._params:\n d[name].to_cpu()\n for name in self._persistent:\n value = d[name]\n if isinstance(value, cuda.ndarray):\n d[name] = value.get()\n self._cpu = True\n self._device_id = None\n return self\n\n def to_gpu(self, device=None):\n \"\"\"Copies parameter variables and persistent values to GPU.\n\n This method does not handle non-registered attributes. If some of such\n attributes must be copied to GPU, the link implementation must\n override this method to do so.\n\n Args:\n device: Target device specifier. If omitted, the current device is\n used.\n\n Returns: self\n\n \"\"\"\n cuda.check_cuda_available()\n if not self._cpu:\n return self\n d = self.__dict__\n with cuda._get_device(device):\n for name in self._params:\n d[name].to_gpu()\n for name in self._persistent:\n value = d[name]\n if isinstance(value, numpy.ndarray):\n d[name] = cuda.to_gpu(value)\n self._device_id = cuda.cupy.cuda.get_device_id()\n self._cpu = False\n return self\n\n def params(self, include_uninit=True):\n \"\"\"Returns a generator of all parameters under the link hierarchy.\n\n Args:\n include_uninit (bool): If ``True``, it also generates uninitialized\n parameters.\n\n Returns:\n A generator object that generates all parameters.\n\n \"\"\"\n d = self.__dict__\n for name in self._params:\n if include_uninit or d[name].data is not None:\n yield d[name]\n\n def namedparams(self, include_uninit=True):\n \"\"\"Returns a generator of all (path, param) pairs under the hierarchy.\n\n Args:\n include_uninit (bool): If ``True``, it also generates uninitialized\n parameters.\n\n Returns:\n A generator object that generates all (path, parameter) pairs. The\n paths are relative from this link.\n\n \"\"\"\n d = self.__dict__\n for name in self._params:\n if include_uninit or d[name].data is not None:\n yield '/' + name, d[name]\n\n def links(self, skipself=False):\n \"\"\"Returns a generator of all links under the hierarchy.\n\n Args:\n skipself (bool): If ``True``, then the generator skips this link\n and starts with the first child link.\n\n Returns:\n A generator object that generates all links.\n\n \"\"\"\n if not skipself:\n yield self\n\n def namedlinks(self, skipself=False):\n \"\"\"Returns a generator of all (path, link) pairs under the hierarchy.\n\n Args:\n skipself (bool): If ``True``, then the generator skips this link\n and starts with the first child link.\n\n Returns:\n A generator object that generates all (path, link) pairs.\n\n \"\"\"\n if not skipself:\n yield '/', self\n\n def children(self):\n \"\"\"Returns a generator of all child links.\n\n Returns:\n A generator object that generates all child links.\n\n \"\"\"\n if 0:\n yield\n\n def copyparams(self, link):\n \"\"\"Copies all parameters from given link.\n\n This method copies data arrays of all parameters in the hierarchy. The\n copy is even done across the host and devices. Note that this method\n does not copy the gradient arrays.\n\n Args:\n link (Link): Source link object.\n\n \"\"\"\n src = link.__dict__\n dst = self.__dict__\n for name in self._params:\n dst[name].copydata(src[name])\n\n def cleargrads(self):\n \"\"\"Clears all gradient arrays.\n\n This method should be called before the backward computation at every\n iteration of the optimization.\n\n \"\"\"\n for param in self.params():\n param.cleargrad()\n\n def zerograds(self):\n \"\"\"Initializes all gradient arrays by zero.\n\n This method can be used for the same purpose of cleargrads, but less\n efficient. This method is left for backward compatibility.\n\n .. deprecated:: v1.15\n Use :meth:`cleargrads` instead.\n\n \"\"\"\n warnings.warn(\n 'Link.zerograds is deprecated. Use Link.cleargrads instead.',\n DeprecationWarning)\n for param in self.params():\n param.zerograd()\n\n def addgrads(self, link):\n \"\"\"Accumulates gradient values from given link.\n\n This method adds each gradient array of the given link to corresponding\n gradient array of this link. The accumulation is even done across\n host and different devices.\n\n Args:\n link (Link): Source link object.\n\n \"\"\"\n src = link.__dict__\n dst = self.__dict__\n for name in self._params:\n dst[name].addgrad(src[name])\n\n def enable_update(self):\n \"\"\"Enables update rules of all parameters under the link hierarchy.\n\n This method sets the :attr:`~chainer.UpdateRule.enabled` flag of the\n update rule of each parameter variable to ``True``.\n\n \"\"\"\n for param in self.params():\n rule = param.update_rule\n if rule is not None:\n rule.enabled = True\n\n def disable_update(self):\n \"\"\"Disables update rules of all parameters under the link hierarchy.\n\n This method sets the :attr:`~chainer.UpdateRule.enabled` flag of the\n update rule of each parameter variable to ``False``.\n\n \"\"\"\n for param in self.params():\n rule = param.update_rule\n if rule is not None:\n rule.enabled = False\n\n @property\n def update_enabled(self):\n \"\"\"``True`` if at least one parameter has an update rule enabled.\"\"\"\n for param in self.params():\n rule = param.update_rule\n if rule is not None and rule.enabled:\n return True\n return False\n\n def serialize(self, serializer):\n \"\"\"Serializes the link object.\n\n Args:\n serializer (~chainer.AbstractSerializer): Serializer object.\n\n \"\"\"\n d = self.__dict__\n for name in self._params:\n param = d[name]\n data = serializer(name, param.data)\n if param.data is None and data is not None:\n # Initialize the parameter here\n param.initialize(data.shape)\n if isinstance(param.data, numpy.ndarray):\n numpy.copyto(param.data, data)\n else:\n param.data.set(numpy.asarray(data))\n for name in self._persistent:\n d[name] = serializer(name, d[name])\n\n\nclass Chain(Link):\n\n \"\"\"Composable link with object-like interface.\n\n Composability is one of the most important features of neural nets. Neural\n net models consist of many reusable fragments, and each model itself might\n be embedded into a larger learnable system. Chain enables us to write a\n neural net based on composition, without bothering about routine works like\n collecting parameters, serialization, copying the structure with parameters\n shared, etc.\n\n This class actually provides a way to compose one or more links into one\n structure. A chain can contain one or more *child links*. Child link is a\n link registered to the chain with its own name. The child link is stored to\n an attribute of the chain with the name. User can write a whole model or a\n fragment of neural nets as a child class of Chain.\n\n Each chain itself is also a link. Therefore, one can combine chains into\n higher-level chains. In this way, links and chains construct a *link\n hierarchy*. Link hierarchy forms a tree structure, where each node is\n identified by the path from the root. The path is represented by a string\n like a file path in UNIX, consisting of names of nodes on the path, joined\n by slashes ``/``.\n\n A child link can be added just by assigning it to an attribute of the\n chain within :meth:`an initialization scope <chainer.Link.init_scope>`.\n\n The registered child link is saved and loaded on serialization and\n deserialization, and involved in the optimization. The registered link\n is called a child. The child link is accessible via :meth:`children`\n generator, which returns a generator running through the children in\n registered order.\n\n On registration of a child link, its :attr:`~Link.name` attribute is also\n set (or overwritten if the link has already been registered to another\n chain).\n\n .. admonition:: Example\n\n This is a simple example of custom chain definition. Chainer itself also\n provides some chains defined under the :mod:`~chainer.links` module.\n They might serve as examples, too.\n\n Consider we want to define a multi-layer perceptron consisting of two\n hidden layers with rectifiers as activation functions. We can use the\n :class:`~chainer.links.Linear` link as a building block::\n\n import chainer\n import chainer.functions as F\n import chainer.links as L\n\n class MultiLayerPerceptron(chainer.Chain):\n\n def __init__(self, n_in, n_hidden, n_out):\n super(MultilayerPerceptron, self).__init__()\n with self.init_scope():\n self.layer1 = L.Linear(n_in, n_hidden)\n self.layer2 = L.Linear(n_hidden, n_hidden)\n self.layer3 = L.Linear(n_hidden, n_out)\n\n def __call__(self, x):\n # Forward propagation\n h1 = F.relu(self.layer1(x))\n h2 = F.relu(self.layer2(h1))\n return self.layer3(h2)\n\n Child links are registered via the assignment within a\n ``with self.init_scope():`` block. The forward propagation is often\n implemented as the ``__call__`` operator as the above example, though\n it is not mandatory.\n\n Args:\n links: Child links. The keywords are used as their names. The names are\n also set to the links.\n\n .. deprecated:: v2.0.0\n\n Assign child links directly to attributes, instead.\n\n \"\"\"\n\n def __init__(self, **links):\n super(Chain, self).__init__()\n self._children = set()\n\n for name, link in six.iteritems(links):\n self.add_link(name, link)\n\n def __getitem__(self, name):\n \"\"\"Equivalent to getattr.\"\"\"\n return getattr(self, name)\n\n def __setattr__(self, name, value):\n if self.within_init_scope and isinstance(value, Link):\n if hasattr(self, name):\n raise AttributeError(\n 'cannot register a new link %s: attribute exists' % name)\n value.name = name\n self._children.add(name)\n super(Chain, self).__setattr__(name, value)\n\n def __delattr__(self, name):\n self._children.discard(name)\n super(Chain, self).__delattr__(name)\n\n def add_link(self, name, link):\n \"\"\"Registers a child link to this chain.\n\n .. deprecated:: v2.0.0\n\n Assign the child link directly to an attribute within\n :meth:`an initialization scope <chainer.Link.init_scope>`, instead.\n For example, the following code\n\n .. code-block:: python\n\n chain.add_link('l1', L.Linear(3, 5))\n\n can be replaced by the following line.\n\n .. code-block:: python\n\n with self.init_scope():\n chain.l1 = L.Linear(3, 5)\n\n The latter one is easier for IDEs to keep track of the attribute's\n type.\n\n Args:\n name (str): Name of the child link. This name is also used as the\n attribute name.\n link (Link): The link object to be registered.\n\n \"\"\"\n warnings.warn('''\\\nChild link registeration via Chain.__init__ and Chain.add_link are deprecated.\nAssign a Link object directly to an attribute within a \\\n\"with link.init_scope():\" block instead.\n ''', DeprecationWarning)\n if name in self.__dict__:\n raise AttributeError(\n 'cannot register a new link %s: attribute exists' % name)\n if not isinstance(link, Link):\n raise TypeError('cannot register a non-link object as a child')\n with self.init_scope():\n setattr(self, name, link)\n\n def copy(self):\n ret = super(Chain, self).copy()\n ret._children = set(ret._children)\n d = ret.__dict__\n for name in ret._children:\n # copy child links recursively\n copied = d[name].copy()\n copied.name = name\n d[name] = copied\n return ret\n\n def to_cpu(self):\n super(Chain, self).to_cpu()\n d = self.__dict__\n for name in self._children:\n d[name].to_cpu()\n return self\n\n def to_gpu(self, device=None):\n with cuda._get_device(device):\n super(Chain, self).to_gpu()\n d = self.__dict__\n for name in self._children:\n d[name].to_gpu()\n return self\n\n def params(self, include_uninit=True):\n for param in super(Chain, self).params(include_uninit):\n yield param\n d = self.__dict__\n for name in self._children:\n for param in d[name].params(include_uninit):\n yield param\n\n def namedparams(self, include_uninit=True):\n for ret in super(Chain, self).namedparams(include_uninit):\n yield ret\n d = self.__dict__\n for name in self._children:\n prefix = '/' + name\n for path, param in d[name].namedparams(include_uninit):\n yield prefix + path, param\n\n def links(self, skipself=False):\n if not skipself:\n yield self\n d = self.__dict__\n for name in self._children:\n for link in d[name].links():\n yield link\n\n def namedlinks(self, skipself=False):\n if not skipself:\n yield '/', self\n d = self.__dict__\n for name in self._children:\n child = d[name]\n prefix = '/' + name\n yield prefix, child\n for path, link in d[name].namedlinks(True):\n yield prefix + path, link\n\n def children(self):\n d = self.__dict__\n for name in self._children:\n yield d[name]\n\n def copyparams(self, link):\n super(Chain, self).copyparams(link)\n src = link.__dict__\n dst = self.__dict__\n for name in self._children:\n dst[name].copyparams(src[name])\n\n def addgrads(self, link):\n super(Chain, self).addgrads(link)\n src = link.__dict__\n dst = self.__dict__\n for name in self._children:\n dst[name].addgrads(src[name])\n\n def serialize(self, serializer):\n super(Chain, self).serialize(serializer)\n d = self.__dict__\n for name in self._children:\n d[name].serialize(serializer[name])\n\n\nclass ChainList(Link):\n\n \"\"\"Composable link with list-like interface.\n\n This is another example of compositional link. Unlike :class:`Chain`, this\n class can be used like a list of child links. Each child link is indexed by\n a non-negative integer, and it maintains the current number of registered\n child links. The :meth:`add_link` method inserts a new link at the end of\n the list. It is useful to write a chain with arbitrary number of child\n links, e.g. an arbitrarily deep multi-layer perceptron.\n\n Note that this class does not implement all methods of :class:`list`.\n\n Args:\n links: Initial child links.\n\n \"\"\"\n\n def __init__(self, *links):\n super(ChainList, self).__init__()\n self._children = []\n\n for link in links:\n self.add_link(link)\n\n def __setattr__(self, name, value):\n if self.within_init_scope and isinstance(value, Link):\n raise TypeError(\n 'cannot register a new link'\n ' within a \"with chainlist.init_scope():\" block.')\n super(ChainList, self).__setattr__(name, value)\n\n def __getitem__(self, index):\n \"\"\"Returns the child at given index.\n\n Args:\n index (int): Index of the child in the list.\n\n Returns:\n Link: The ``index``-th child link.\n\n \"\"\"\n return self._children[index]\n\n def __iter__(self):\n return iter(self._children)\n\n def __len__(self):\n \"\"\"Returns the number of children.\"\"\"\n return len(self._children)\n\n def append(self, link):\n \"\"\"Registers a child link and adds it to the tail of the list.\n\n This is equivalent to :meth:`add_link`. This method has been added to\n emulate the ``list`` interface.\n\n Args:\n link (Link): The link object to be regsitered.\n\n \"\"\"\n self.add_link(link)\n\n def add_link(self, link):\n \"\"\"Registers a child link and adds it to the tail of the list.\n\n Args:\n link (Link): The link object to be registered.\n\n \"\"\"\n link.name = str(len(self._children))\n self._children.append(link)\n\n def copy(self):\n ret = super(ChainList, self).copy()\n ret._children = list(ret._children) # copy\n children = ret._children\n for i, child in enumerate(children):\n child = child.copy()\n child.name = str(i)\n children[i] = child\n return ret\n\n def to_cpu(self):\n super(ChainList, self).to_cpu()\n for link in self._children:\n link.to_cpu()\n return self\n\n def to_gpu(self, device=None):\n with cuda._get_device(device):\n super(ChainList, self).to_gpu()\n for link in self._children:\n link.to_gpu()\n return self\n\n def params(self, include_uninit=True):\n for param in super(ChainList, self).params(include_uninit):\n yield param\n for link in self._children:\n for param in link.params(include_uninit):\n yield param\n\n def namedparams(self, include_uninit=True):\n for ret in super(ChainList, self).namedparams(include_uninit):\n yield ret\n for idx, link in enumerate(self._children):\n prefix = '/%d' % idx\n for path, param in link.namedparams(include_uninit):\n yield prefix + path, param\n\n def links(self, skipself=False):\n if not skipself:\n yield self\n for child in self._children:\n for link in child.links():\n yield link\n\n def namedlinks(self, skipself=False):\n if not skipself:\n yield '/', self\n for idx, child in enumerate(self._children):\n prefix = '/%d' % idx\n yield prefix, child\n for path, link in child.namedlinks(True):\n yield prefix + path, link\n\n def children(self):\n for child in self._children:\n yield child\n\n def copyparams(self, link):\n super(ChainList, self).copyparams(link)\n for idx, child in enumerate(self._children):\n child.copyparams(link[idx])\n\n def addgrads(self, link):\n super(ChainList, self).addgrads(link)\n for idx, child in enumerate(self._children):\n child.addgrads(link[idx])\n\n def serialize(self, serializer):\n super(ChainList, self).serialize(serializer)\n for idx, child in enumerate(self._children):\n child.serialize(serializer['%d' % idx])\n", "path": "chainer/link.py"}]} |
gh_patches_debug_1611 | rasdani/github-patches | git_diff | fonttools__fonttools-1715 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
ascender and ascent
The [opentype spec ](https://docs.microsoft.com/en-gb/typography/opentype/spec/hhea) calls the first two substantive entries in the `hhea` table "`ascender`" and "`descender`". fonttools calls them "`ascent`" and "`descent`".
This was surprising! Maybe it's too late to change then but can we at least have an alias?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `Lib/fontTools/ttLib/tables/_h_h_e_a.py`
Content:
```
1 from fontTools.misc.py23 import *
2 from fontTools.misc import sstruct
3 from fontTools.misc.textTools import safeEval
4 from fontTools.misc.fixedTools import (
5 ensureVersionIsLong as fi2ve, versionToFixed as ve2fi)
6 from . import DefaultTable
7 import math
8
9
10 hheaFormat = """
11 > # big endian
12 tableVersion: L
13 ascent: h
14 descent: h
15 lineGap: h
16 advanceWidthMax: H
17 minLeftSideBearing: h
18 minRightSideBearing: h
19 xMaxExtent: h
20 caretSlopeRise: h
21 caretSlopeRun: h
22 caretOffset: h
23 reserved0: h
24 reserved1: h
25 reserved2: h
26 reserved3: h
27 metricDataFormat: h
28 numberOfHMetrics: H
29 """
30
31
32 class table__h_h_e_a(DefaultTable.DefaultTable):
33
34 # Note: Keep in sync with table__v_h_e_a
35
36 dependencies = ['hmtx', 'glyf', 'CFF ']
37
38 def decompile(self, data, ttFont):
39 sstruct.unpack(hheaFormat, data, self)
40
41 def compile(self, ttFont):
42 if ttFont.recalcBBoxes and (ttFont.isLoaded('glyf') or ttFont.isLoaded('CFF ')):
43 self.recalc(ttFont)
44 self.tableVersion = fi2ve(self.tableVersion)
45 return sstruct.pack(hheaFormat, self)
46
47 def recalc(self, ttFont):
48 if 'hmtx' in ttFont:
49 hmtxTable = ttFont['hmtx']
50 self.advanceWidthMax = max(adv for adv, _ in hmtxTable.metrics.values())
51
52 boundsWidthDict = {}
53 if 'glyf' in ttFont:
54 glyfTable = ttFont['glyf']
55 for name in ttFont.getGlyphOrder():
56 g = glyfTable[name]
57 if g.numberOfContours == 0:
58 continue
59 if g.numberOfContours < 0 and not hasattr(g, "xMax"):
60 # Composite glyph without extents set.
61 # Calculate those.
62 g.recalcBounds(glyfTable)
63 boundsWidthDict[name] = g.xMax - g.xMin
64 elif 'CFF ' in ttFont:
65 topDict = ttFont['CFF '].cff.topDictIndex[0]
66 charStrings = topDict.CharStrings
67 for name in ttFont.getGlyphOrder():
68 cs = charStrings[name]
69 bounds = cs.calcBounds(charStrings)
70 if bounds is not None:
71 boundsWidthDict[name] = int(
72 math.ceil(bounds[2]) - math.floor(bounds[0]))
73
74 if boundsWidthDict:
75 minLeftSideBearing = float('inf')
76 minRightSideBearing = float('inf')
77 xMaxExtent = -float('inf')
78 for name, boundsWidth in boundsWidthDict.items():
79 advanceWidth, lsb = hmtxTable[name]
80 rsb = advanceWidth - lsb - boundsWidth
81 extent = lsb + boundsWidth
82 minLeftSideBearing = min(minLeftSideBearing, lsb)
83 minRightSideBearing = min(minRightSideBearing, rsb)
84 xMaxExtent = max(xMaxExtent, extent)
85 self.minLeftSideBearing = minLeftSideBearing
86 self.minRightSideBearing = minRightSideBearing
87 self.xMaxExtent = xMaxExtent
88
89 else: # No glyph has outlines.
90 self.minLeftSideBearing = 0
91 self.minRightSideBearing = 0
92 self.xMaxExtent = 0
93
94 def toXML(self, writer, ttFont):
95 formatstring, names, fixes = sstruct.getformat(hheaFormat)
96 for name in names:
97 value = getattr(self, name)
98 if name == "tableVersion":
99 value = fi2ve(value)
100 value = "0x%08x" % value
101 writer.simpletag(name, value=value)
102 writer.newline()
103
104 def fromXML(self, name, attrs, content, ttFont):
105 if name == "tableVersion":
106 setattr(self, name, ve2fi(attrs["value"]))
107 return
108 setattr(self, name, safeEval(attrs["value"]))
109
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/Lib/fontTools/ttLib/tables/_h_h_e_a.py b/Lib/fontTools/ttLib/tables/_h_h_e_a.py
--- a/Lib/fontTools/ttLib/tables/_h_h_e_a.py
+++ b/Lib/fontTools/ttLib/tables/_h_h_e_a.py
@@ -35,6 +35,19 @@
dependencies = ['hmtx', 'glyf', 'CFF ']
+ # OpenType spec renamed these, add aliases for compatibility
+ @property
+ def ascender(self): return self.ascent
+
+ @ascender.setter
+ def ascender(self,value): self.ascent = value
+
+ @property
+ def descender(self): return self.descent
+
+ @descender.setter
+ def descender(self,value): self.descent = value
+
def decompile(self, data, ttFont):
sstruct.unpack(hheaFormat, data, self)
| {"golden_diff": "diff --git a/Lib/fontTools/ttLib/tables/_h_h_e_a.py b/Lib/fontTools/ttLib/tables/_h_h_e_a.py\n--- a/Lib/fontTools/ttLib/tables/_h_h_e_a.py\n+++ b/Lib/fontTools/ttLib/tables/_h_h_e_a.py\n@@ -35,6 +35,19 @@\n \n \tdependencies = ['hmtx', 'glyf', 'CFF ']\n \n+\t# OpenType spec renamed these, add aliases for compatibility\n+\t@property\n+\tdef ascender(self): return self.ascent\n+\n+\[email protected]\n+\tdef ascender(self,value): self.ascent = value\n+\n+\t@property\n+\tdef descender(self): return self.descent\n+\n+\[email protected]\n+\tdef descender(self,value): self.descent = value\n+\n \tdef decompile(self, data, ttFont):\n \t\tsstruct.unpack(hheaFormat, data, self)\n", "issue": "ascender and ascent\nThe [opentype spec ](https://docs.microsoft.com/en-gb/typography/opentype/spec/hhea) calls the first two substantive entries in the `hhea` table \"`ascender`\" and \"`descender`\". fonttools calls them \"`ascent`\" and \"`descent`\".\r\n\r\nThis was surprising! Maybe it's too late to change then but can we at least have an alias?\n", "before_files": [{"content": "from fontTools.misc.py23 import *\nfrom fontTools.misc import sstruct\nfrom fontTools.misc.textTools import safeEval\nfrom fontTools.misc.fixedTools import (\n\tensureVersionIsLong as fi2ve, versionToFixed as ve2fi)\nfrom . import DefaultTable\nimport math\n\n\nhheaFormat = \"\"\"\n\t\t> # big endian\n\t\ttableVersion: L\n\t\tascent: h\n\t\tdescent: h\n\t\tlineGap: h\n\t\tadvanceWidthMax: H\n\t\tminLeftSideBearing: h\n\t\tminRightSideBearing: h\n\t\txMaxExtent: h\n\t\tcaretSlopeRise: h\n\t\tcaretSlopeRun: h\n\t\tcaretOffset: h\n\t\treserved0: h\n\t\treserved1: h\n\t\treserved2: h\n\t\treserved3: h\n\t\tmetricDataFormat: h\n\t\tnumberOfHMetrics: H\n\"\"\"\n\n\nclass table__h_h_e_a(DefaultTable.DefaultTable):\n\n\t# Note: Keep in sync with table__v_h_e_a\n\n\tdependencies = ['hmtx', 'glyf', 'CFF ']\n\n\tdef decompile(self, data, ttFont):\n\t\tsstruct.unpack(hheaFormat, data, self)\n\n\tdef compile(self, ttFont):\n\t\tif ttFont.recalcBBoxes and (ttFont.isLoaded('glyf') or ttFont.isLoaded('CFF ')):\n\t\t\tself.recalc(ttFont)\n\t\tself.tableVersion = fi2ve(self.tableVersion)\n\t\treturn sstruct.pack(hheaFormat, self)\n\n\tdef recalc(self, ttFont):\n\t\tif 'hmtx' in ttFont:\n\t\t\thmtxTable = ttFont['hmtx']\n\t\t\tself.advanceWidthMax = max(adv for adv, _ in hmtxTable.metrics.values())\n\n\t\tboundsWidthDict = {}\n\t\tif 'glyf' in ttFont:\n\t\t\tglyfTable = ttFont['glyf']\n\t\t\tfor name in ttFont.getGlyphOrder():\n\t\t\t\tg = glyfTable[name]\n\t\t\t\tif g.numberOfContours == 0:\n\t\t\t\t\tcontinue\n\t\t\t\tif g.numberOfContours < 0 and not hasattr(g, \"xMax\"):\n\t\t\t\t\t# Composite glyph without extents set.\n\t\t\t\t\t# Calculate those.\n\t\t\t\t\tg.recalcBounds(glyfTable)\n\t\t\t\tboundsWidthDict[name] = g.xMax - g.xMin\n\t\telif 'CFF ' in ttFont:\n\t\t\ttopDict = ttFont['CFF '].cff.topDictIndex[0]\n\t\t\tcharStrings = topDict.CharStrings\n\t\t\tfor name in ttFont.getGlyphOrder():\n\t\t\t\tcs = charStrings[name]\n\t\t\t\tbounds = cs.calcBounds(charStrings)\n\t\t\t\tif bounds is not None:\n\t\t\t\t\tboundsWidthDict[name] = int(\n\t\t\t\t\t\tmath.ceil(bounds[2]) - math.floor(bounds[0]))\n\n\t\tif boundsWidthDict:\n\t\t\tminLeftSideBearing = float('inf')\n\t\t\tminRightSideBearing = float('inf')\n\t\t\txMaxExtent = -float('inf')\n\t\t\tfor name, boundsWidth in boundsWidthDict.items():\n\t\t\t\tadvanceWidth, lsb = hmtxTable[name]\n\t\t\t\trsb = advanceWidth - lsb - boundsWidth\n\t\t\t\textent = lsb + boundsWidth\n\t\t\t\tminLeftSideBearing = min(minLeftSideBearing, lsb)\n\t\t\t\tminRightSideBearing = min(minRightSideBearing, rsb)\n\t\t\t\txMaxExtent = max(xMaxExtent, extent)\n\t\t\tself.minLeftSideBearing = minLeftSideBearing\n\t\t\tself.minRightSideBearing = minRightSideBearing\n\t\t\tself.xMaxExtent = xMaxExtent\n\n\t\telse: # No glyph has outlines.\n\t\t\tself.minLeftSideBearing = 0\n\t\t\tself.minRightSideBearing = 0\n\t\t\tself.xMaxExtent = 0\n\n\tdef toXML(self, writer, ttFont):\n\t\tformatstring, names, fixes = sstruct.getformat(hheaFormat)\n\t\tfor name in names:\n\t\t\tvalue = getattr(self, name)\n\t\t\tif name == \"tableVersion\":\n\t\t\t\tvalue = fi2ve(value)\n\t\t\t\tvalue = \"0x%08x\" % value\n\t\t\twriter.simpletag(name, value=value)\n\t\t\twriter.newline()\n\n\tdef fromXML(self, name, attrs, content, ttFont):\n\t\tif name == \"tableVersion\":\n\t\t\tsetattr(self, name, ve2fi(attrs[\"value\"]))\n\t\t\treturn\n\t\tsetattr(self, name, safeEval(attrs[\"value\"]))\n", "path": "Lib/fontTools/ttLib/tables/_h_h_e_a.py"}], "after_files": [{"content": "from fontTools.misc.py23 import *\nfrom fontTools.misc import sstruct\nfrom fontTools.misc.textTools import safeEval\nfrom fontTools.misc.fixedTools import (\n\tensureVersionIsLong as fi2ve, versionToFixed as ve2fi)\nfrom . import DefaultTable\nimport math\n\n\nhheaFormat = \"\"\"\n\t\t> # big endian\n\t\ttableVersion: L\n\t\tascent: h\n\t\tdescent: h\n\t\tlineGap: h\n\t\tadvanceWidthMax: H\n\t\tminLeftSideBearing: h\n\t\tminRightSideBearing: h\n\t\txMaxExtent: h\n\t\tcaretSlopeRise: h\n\t\tcaretSlopeRun: h\n\t\tcaretOffset: h\n\t\treserved0: h\n\t\treserved1: h\n\t\treserved2: h\n\t\treserved3: h\n\t\tmetricDataFormat: h\n\t\tnumberOfHMetrics: H\n\"\"\"\n\n\nclass table__h_h_e_a(DefaultTable.DefaultTable):\n\n\t# Note: Keep in sync with table__v_h_e_a\n\n\tdependencies = ['hmtx', 'glyf', 'CFF ']\n\n\t# OpenType spec renamed these, add aliases for compatibility\n\t@property\n\tdef ascender(self): return self.ascent\n\n\[email protected]\n\tdef ascender(self,value): self.ascent = value\n\n\t@property\n\tdef descender(self): return self.descent\n\n\[email protected]\n\tdef descender(self,value): self.descent = value\n\n\tdef decompile(self, data, ttFont):\n\t\tsstruct.unpack(hheaFormat, data, self)\n\n\tdef compile(self, ttFont):\n\t\tif ttFont.recalcBBoxes and (ttFont.isLoaded('glyf') or ttFont.isLoaded('CFF ')):\n\t\t\tself.recalc(ttFont)\n\t\tself.tableVersion = fi2ve(self.tableVersion)\n\t\treturn sstruct.pack(hheaFormat, self)\n\n\tdef recalc(self, ttFont):\n\t\tif 'hmtx' in ttFont:\n\t\t\thmtxTable = ttFont['hmtx']\n\t\t\tself.advanceWidthMax = max(adv for adv, _ in hmtxTable.metrics.values())\n\n\t\tboundsWidthDict = {}\n\t\tif 'glyf' in ttFont:\n\t\t\tglyfTable = ttFont['glyf']\n\t\t\tfor name in ttFont.getGlyphOrder():\n\t\t\t\tg = glyfTable[name]\n\t\t\t\tif g.numberOfContours == 0:\n\t\t\t\t\tcontinue\n\t\t\t\tif g.numberOfContours < 0 and not hasattr(g, \"xMax\"):\n\t\t\t\t\t# Composite glyph without extents set.\n\t\t\t\t\t# Calculate those.\n\t\t\t\t\tg.recalcBounds(glyfTable)\n\t\t\t\tboundsWidthDict[name] = g.xMax - g.xMin\n\t\telif 'CFF ' in ttFont:\n\t\t\ttopDict = ttFont['CFF '].cff.topDictIndex[0]\n\t\t\tcharStrings = topDict.CharStrings\n\t\t\tfor name in ttFont.getGlyphOrder():\n\t\t\t\tcs = charStrings[name]\n\t\t\t\tbounds = cs.calcBounds(charStrings)\n\t\t\t\tif bounds is not None:\n\t\t\t\t\tboundsWidthDict[name] = int(\n\t\t\t\t\t\tmath.ceil(bounds[2]) - math.floor(bounds[0]))\n\n\t\tif boundsWidthDict:\n\t\t\tminLeftSideBearing = float('inf')\n\t\t\tminRightSideBearing = float('inf')\n\t\t\txMaxExtent = -float('inf')\n\t\t\tfor name, boundsWidth in boundsWidthDict.items():\n\t\t\t\tadvanceWidth, lsb = hmtxTable[name]\n\t\t\t\trsb = advanceWidth - lsb - boundsWidth\n\t\t\t\textent = lsb + boundsWidth\n\t\t\t\tminLeftSideBearing = min(minLeftSideBearing, lsb)\n\t\t\t\tminRightSideBearing = min(minRightSideBearing, rsb)\n\t\t\t\txMaxExtent = max(xMaxExtent, extent)\n\t\t\tself.minLeftSideBearing = minLeftSideBearing\n\t\t\tself.minRightSideBearing = minRightSideBearing\n\t\t\tself.xMaxExtent = xMaxExtent\n\n\t\telse: # No glyph has outlines.\n\t\t\tself.minLeftSideBearing = 0\n\t\t\tself.minRightSideBearing = 0\n\t\t\tself.xMaxExtent = 0\n\n\tdef toXML(self, writer, ttFont):\n\t\tformatstring, names, fixes = sstruct.getformat(hheaFormat)\n\t\tfor name in names:\n\t\t\tvalue = getattr(self, name)\n\t\t\tif name == \"tableVersion\":\n\t\t\t\tvalue = fi2ve(value)\n\t\t\t\tvalue = \"0x%08x\" % value\n\t\t\twriter.simpletag(name, value=value)\n\t\t\twriter.newline()\n\n\tdef fromXML(self, name, attrs, content, ttFont):\n\t\tif name == \"tableVersion\":\n\t\t\tsetattr(self, name, ve2fi(attrs[\"value\"]))\n\t\t\treturn\n\t\tsetattr(self, name, safeEval(attrs[\"value\"]))\n", "path": "Lib/fontTools/ttLib/tables/_h_h_e_a.py"}]} |
gh_patches_debug_1612 | rasdani/github-patches | git_diff | celery__celery-7553 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Use callable in utils.functional.head_from_fun
After the discussion in #3952, we should investigate whether an improvement can be applied using the builtin `callable` instead of `hasattr(fun, '__call__')`.
https://docs.python.org/2/library/functions.html#callable
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `celery/utils/functional.py`
Content:
```
1 """Functional-style utilities."""
2 import inspect
3 import sys
4 from collections import UserList
5 from functools import partial
6 from itertools import islice, tee, zip_longest
7
8 from kombu.utils.functional import LRUCache, dictfilter, is_list, lazy, maybe_evaluate, maybe_list, memoize
9 from vine import promise
10
11 __all__ = (
12 'LRUCache', 'is_list', 'maybe_list', 'memoize', 'mlazy', 'noop',
13 'first', 'firstmethod', 'chunks', 'padlist', 'mattrgetter', 'uniq',
14 'regen', 'dictfilter', 'lazy', 'maybe_evaluate', 'head_from_fun',
15 'maybe', 'fun_accepts_kwargs',
16 )
17
18 FUNHEAD_TEMPLATE = """
19 def {fun_name}({fun_args}):
20 return {fun_value}
21 """
22
23
24 class DummyContext:
25
26 def __enter__(self):
27 return self
28
29 def __exit__(self, *exc_info):
30 pass
31
32
33 class mlazy(lazy):
34 """Memoized lazy evaluation.
35
36 The function is only evaluated once, every subsequent access
37 will return the same value.
38 """
39
40 #: Set to :const:`True` after the object has been evaluated.
41 evaluated = False
42 _value = None
43
44 def evaluate(self):
45 if not self.evaluated:
46 self._value = super().evaluate()
47 self.evaluated = True
48 return self._value
49
50
51 def noop(*args, **kwargs):
52 """No operation.
53
54 Takes any arguments/keyword arguments and does nothing.
55 """
56
57
58 def pass1(arg, *args, **kwargs):
59 """Return the first positional argument."""
60 return arg
61
62
63 def evaluate_promises(it):
64 for value in it:
65 if isinstance(value, promise):
66 value = value()
67 yield value
68
69
70 def first(predicate, it):
71 """Return the first element in ``it`` that ``predicate`` accepts.
72
73 If ``predicate`` is None it will return the first item that's not
74 :const:`None`.
75 """
76 return next(
77 (v for v in evaluate_promises(it) if (
78 predicate(v) if predicate is not None else v is not None)),
79 None,
80 )
81
82
83 def firstmethod(method, on_call=None):
84 """Multiple dispatch.
85
86 Return a function that with a list of instances,
87 finds the first instance that gives a value for the given method.
88
89 The list can also contain lazy instances
90 (:class:`~kombu.utils.functional.lazy`.)
91 """
92
93 def _matcher(it, *args, **kwargs):
94 for obj in it:
95 try:
96 meth = getattr(maybe_evaluate(obj), method)
97 reply = (on_call(meth, *args, **kwargs) if on_call
98 else meth(*args, **kwargs))
99 except AttributeError:
100 pass
101 else:
102 if reply is not None:
103 return reply
104
105 return _matcher
106
107
108 def chunks(it, n):
109 """Split an iterator into chunks with `n` elements each.
110
111 Warning:
112 ``it`` must be an actual iterator, if you pass this a
113 concrete sequence will get you repeating elements.
114
115 So ``chunks(iter(range(1000)), 10)`` is fine, but
116 ``chunks(range(1000), 10)`` is not.
117
118 Example:
119 # n == 2
120 >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2)
121 >>> list(x)
122 [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]]
123
124 # n == 3
125 >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3)
126 >>> list(x)
127 [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]]
128 """
129 for item in it:
130 yield [item] + list(islice(it, n - 1))
131
132
133 def padlist(container, size, default=None):
134 """Pad list with default elements.
135
136 Example:
137 >>> first, last, city = padlist(['George', 'Costanza', 'NYC'], 3)
138 ('George', 'Costanza', 'NYC')
139 >>> first, last, city = padlist(['George', 'Costanza'], 3)
140 ('George', 'Costanza', None)
141 >>> first, last, city, planet = padlist(
142 ... ['George', 'Costanza', 'NYC'], 4, default='Earth',
143 ... )
144 ('George', 'Costanza', 'NYC', 'Earth')
145 """
146 return list(container)[:size] + [default] * (size - len(container))
147
148
149 def mattrgetter(*attrs):
150 """Get attributes, ignoring attribute errors.
151
152 Like :func:`operator.itemgetter` but return :const:`None` on missing
153 attributes instead of raising :exc:`AttributeError`.
154 """
155 return lambda obj: {attr: getattr(obj, attr, None) for attr in attrs}
156
157
158 def uniq(it):
159 """Return all unique elements in ``it``, preserving order."""
160 seen = set()
161 return (seen.add(obj) or obj for obj in it if obj not in seen)
162
163
164 def lookahead(it):
165 """Yield pairs of (current, next) items in `it`.
166
167 `next` is None if `current` is the last item.
168 Example:
169 >>> list(lookahead(x for x in range(6)))
170 [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, None)]
171 """
172 a, b = tee(it)
173 next(b, None)
174 return zip_longest(a, b)
175
176
177 def regen(it):
178 """Convert iterator to an object that can be consumed multiple times.
179
180 ``Regen`` takes any iterable, and if the object is an
181 generator it will cache the evaluated list on first access,
182 so that the generator can be "consumed" multiple times.
183 """
184 if isinstance(it, (list, tuple)):
185 return it
186 return _regen(it)
187
188
189 class _regen(UserList, list):
190 # must be subclass of list so that json can encode.
191
192 def __init__(self, it):
193 # pylint: disable=super-init-not-called
194 # UserList creates a new list and sets .data, so we don't
195 # want to call init here.
196 self.__it = it
197 self.__consumed = []
198 self.__done = False
199
200 def __reduce__(self):
201 return list, (self.data,)
202
203 def __length_hint__(self):
204 return self.__it.__length_hint__()
205
206 def __lookahead_consume(self, limit=None):
207 if not self.__done and (limit is None or limit > 0):
208 it = iter(self.__it)
209 try:
210 now = next(it)
211 except StopIteration:
212 return
213 self.__consumed.append(now)
214 # Maintain a single look-ahead to ensure we set `__done` when the
215 # underlying iterator gets exhausted
216 while not self.__done:
217 try:
218 next_ = next(it)
219 self.__consumed.append(next_)
220 except StopIteration:
221 self.__done = True
222 break
223 finally:
224 yield now
225 now = next_
226 # We can break out when `limit` is exhausted
227 if limit is not None:
228 limit -= 1
229 if limit <= 0:
230 break
231
232 def __iter__(self):
233 yield from self.__consumed
234 yield from self.__lookahead_consume()
235
236 def __getitem__(self, index):
237 if index < 0:
238 return self.data[index]
239 # Consume elements up to the desired index prior to attempting to
240 # access it from within `__consumed`
241 consume_count = index - len(self.__consumed) + 1
242 for _ in self.__lookahead_consume(limit=consume_count):
243 pass
244 return self.__consumed[index]
245
246 def __bool__(self):
247 if len(self.__consumed):
248 return True
249
250 try:
251 next(iter(self))
252 except StopIteration:
253 return False
254 else:
255 return True
256
257 @property
258 def data(self):
259 if not self.__done:
260 self.__consumed.extend(self.__it)
261 self.__done = True
262 return self.__consumed
263
264 def __repr__(self):
265 return "<{}: [{}{}]>".format(
266 self.__class__.__name__,
267 ", ".join(repr(e) for e in self.__consumed),
268 "..." if not self.__done else "",
269 )
270
271
272 def _argsfromspec(spec, replace_defaults=True):
273 if spec.defaults:
274 split = len(spec.defaults)
275 defaults = (list(range(len(spec.defaults))) if replace_defaults
276 else spec.defaults)
277 positional = spec.args[:-split]
278 optional = list(zip(spec.args[-split:], defaults))
279 else:
280 positional, optional = spec.args, []
281
282 varargs = spec.varargs
283 varkw = spec.varkw
284 if spec.kwonlydefaults:
285 kwonlyargs = set(spec.kwonlyargs) - set(spec.kwonlydefaults.keys())
286 if replace_defaults:
287 kwonlyargs_optional = [
288 (kw, i) for i, kw in enumerate(spec.kwonlydefaults.keys())
289 ]
290 else:
291 kwonlyargs_optional = list(spec.kwonlydefaults.items())
292 else:
293 kwonlyargs, kwonlyargs_optional = spec.kwonlyargs, []
294
295 return ', '.join(filter(None, [
296 ', '.join(positional),
297 ', '.join(f'{k}={v}' for k, v in optional),
298 f'*{varargs}' if varargs else None,
299 '*' if (kwonlyargs or kwonlyargs_optional) and not varargs else None,
300 ', '.join(kwonlyargs) if kwonlyargs else None,
301 ', '.join(f'{k}="{v}"' for k, v in kwonlyargs_optional),
302 f'**{varkw}' if varkw else None,
303 ]))
304
305
306 def head_from_fun(fun, bound=False, debug=False):
307 """Generate signature function from actual function."""
308 # we could use inspect.Signature here, but that implementation
309 # is very slow since it implements the argument checking
310 # in pure-Python. Instead we use exec to create a new function
311 # with an empty body, meaning it has the same performance as
312 # as just calling a function.
313 is_function = inspect.isfunction(fun)
314 is_callable = hasattr(fun, '__call__')
315 is_cython = fun.__class__.__name__ == 'cython_function_or_method'
316 is_method = inspect.ismethod(fun)
317
318 if not is_function and is_callable and not is_method and not is_cython:
319 name, fun = fun.__class__.__name__, fun.__call__
320 else:
321 name = fun.__name__
322 definition = FUNHEAD_TEMPLATE.format(
323 fun_name=name,
324 fun_args=_argsfromspec(inspect.getfullargspec(fun)),
325 fun_value=1,
326 )
327 if debug: # pragma: no cover
328 print(definition, file=sys.stderr)
329 namespace = {'__name__': fun.__module__}
330 # pylint: disable=exec-used
331 # Tasks are rarely, if ever, created at runtime - exec here is fine.
332 exec(definition, namespace)
333 result = namespace[name]
334 result._source = definition
335 if bound:
336 return partial(result, object())
337 return result
338
339
340 def arity_greater(fun, n):
341 argspec = inspect.getfullargspec(fun)
342 return argspec.varargs or len(argspec.args) > n
343
344
345 def fun_takes_argument(name, fun, position=None):
346 spec = inspect.getfullargspec(fun)
347 return (
348 spec.varkw or spec.varargs or
349 (len(spec.args) >= position if position else name in spec.args)
350 )
351
352
353 def fun_accepts_kwargs(fun):
354 """Return true if function accepts arbitrary keyword arguments."""
355 return any(
356 p for p in inspect.signature(fun).parameters.values()
357 if p.kind == p.VAR_KEYWORD
358 )
359
360
361 def maybe(typ, val):
362 """Call typ on value if val is defined."""
363 return typ(val) if val is not None else val
364
365
366 def seq_concat_item(seq, item):
367 """Return copy of sequence seq with item added.
368
369 Returns:
370 Sequence: if seq is a tuple, the result will be a tuple,
371 otherwise it depends on the implementation of ``__add__``.
372 """
373 return seq + (item,) if isinstance(seq, tuple) else seq + [item]
374
375
376 def seq_concat_seq(a, b):
377 """Concatenate two sequences: ``a + b``.
378
379 Returns:
380 Sequence: The return value will depend on the largest sequence
381 - if b is larger and is a tuple, the return value will be a tuple.
382 - if a is larger and is a list, the return value will be a list,
383 """
384 # find the type of the largest sequence
385 prefer = type(max([a, b], key=len))
386 # convert the smallest list to the type of the largest sequence.
387 if not isinstance(a, prefer):
388 a = prefer(a)
389 if not isinstance(b, prefer):
390 b = prefer(b)
391 return a + b
392
393
394 def is_numeric_value(value):
395 return isinstance(value, (int, float)) and not isinstance(value, bool)
396
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/celery/utils/functional.py b/celery/utils/functional.py
--- a/celery/utils/functional.py
+++ b/celery/utils/functional.py
@@ -311,7 +311,7 @@
# with an empty body, meaning it has the same performance as
# as just calling a function.
is_function = inspect.isfunction(fun)
- is_callable = hasattr(fun, '__call__')
+ is_callable = callable(fun)
is_cython = fun.__class__.__name__ == 'cython_function_or_method'
is_method = inspect.ismethod(fun)
| {"golden_diff": "diff --git a/celery/utils/functional.py b/celery/utils/functional.py\n--- a/celery/utils/functional.py\n+++ b/celery/utils/functional.py\n@@ -311,7 +311,7 @@\n # with an empty body, meaning it has the same performance as\n # as just calling a function.\n is_function = inspect.isfunction(fun)\n- is_callable = hasattr(fun, '__call__')\n+ is_callable = callable(fun)\n is_cython = fun.__class__.__name__ == 'cython_function_or_method'\n is_method = inspect.ismethod(fun)\n", "issue": "Use callable in utils.functional.head_from_fun\nAfter the discussion in #3952, we should investigate whether an improvement can be applied using the builtin `callable` instead of `hasattr(fun, '__call__')`.\n\nhttps://docs.python.org/2/library/functions.html#callable\n\n\n", "before_files": [{"content": "\"\"\"Functional-style utilities.\"\"\"\nimport inspect\nimport sys\nfrom collections import UserList\nfrom functools import partial\nfrom itertools import islice, tee, zip_longest\n\nfrom kombu.utils.functional import LRUCache, dictfilter, is_list, lazy, maybe_evaluate, maybe_list, memoize\nfrom vine import promise\n\n__all__ = (\n 'LRUCache', 'is_list', 'maybe_list', 'memoize', 'mlazy', 'noop',\n 'first', 'firstmethod', 'chunks', 'padlist', 'mattrgetter', 'uniq',\n 'regen', 'dictfilter', 'lazy', 'maybe_evaluate', 'head_from_fun',\n 'maybe', 'fun_accepts_kwargs',\n)\n\nFUNHEAD_TEMPLATE = \"\"\"\ndef {fun_name}({fun_args}):\n return {fun_value}\n\"\"\"\n\n\nclass DummyContext:\n\n def __enter__(self):\n return self\n\n def __exit__(self, *exc_info):\n pass\n\n\nclass mlazy(lazy):\n \"\"\"Memoized lazy evaluation.\n\n The function is only evaluated once, every subsequent access\n will return the same value.\n \"\"\"\n\n #: Set to :const:`True` after the object has been evaluated.\n evaluated = False\n _value = None\n\n def evaluate(self):\n if not self.evaluated:\n self._value = super().evaluate()\n self.evaluated = True\n return self._value\n\n\ndef noop(*args, **kwargs):\n \"\"\"No operation.\n\n Takes any arguments/keyword arguments and does nothing.\n \"\"\"\n\n\ndef pass1(arg, *args, **kwargs):\n \"\"\"Return the first positional argument.\"\"\"\n return arg\n\n\ndef evaluate_promises(it):\n for value in it:\n if isinstance(value, promise):\n value = value()\n yield value\n\n\ndef first(predicate, it):\n \"\"\"Return the first element in ``it`` that ``predicate`` accepts.\n\n If ``predicate`` is None it will return the first item that's not\n :const:`None`.\n \"\"\"\n return next(\n (v for v in evaluate_promises(it) if (\n predicate(v) if predicate is not None else v is not None)),\n None,\n )\n\n\ndef firstmethod(method, on_call=None):\n \"\"\"Multiple dispatch.\n\n Return a function that with a list of instances,\n finds the first instance that gives a value for the given method.\n\n The list can also contain lazy instances\n (:class:`~kombu.utils.functional.lazy`.)\n \"\"\"\n\n def _matcher(it, *args, **kwargs):\n for obj in it:\n try:\n meth = getattr(maybe_evaluate(obj), method)\n reply = (on_call(meth, *args, **kwargs) if on_call\n else meth(*args, **kwargs))\n except AttributeError:\n pass\n else:\n if reply is not None:\n return reply\n\n return _matcher\n\n\ndef chunks(it, n):\n \"\"\"Split an iterator into chunks with `n` elements each.\n\n Warning:\n ``it`` must be an actual iterator, if you pass this a\n concrete sequence will get you repeating elements.\n\n So ``chunks(iter(range(1000)), 10)`` is fine, but\n ``chunks(range(1000), 10)`` is not.\n\n Example:\n # n == 2\n >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2)\n >>> list(x)\n [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]]\n\n # n == 3\n >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3)\n >>> list(x)\n [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]]\n \"\"\"\n for item in it:\n yield [item] + list(islice(it, n - 1))\n\n\ndef padlist(container, size, default=None):\n \"\"\"Pad list with default elements.\n\n Example:\n >>> first, last, city = padlist(['George', 'Costanza', 'NYC'], 3)\n ('George', 'Costanza', 'NYC')\n >>> first, last, city = padlist(['George', 'Costanza'], 3)\n ('George', 'Costanza', None)\n >>> first, last, city, planet = padlist(\n ... ['George', 'Costanza', 'NYC'], 4, default='Earth',\n ... )\n ('George', 'Costanza', 'NYC', 'Earth')\n \"\"\"\n return list(container)[:size] + [default] * (size - len(container))\n\n\ndef mattrgetter(*attrs):\n \"\"\"Get attributes, ignoring attribute errors.\n\n Like :func:`operator.itemgetter` but return :const:`None` on missing\n attributes instead of raising :exc:`AttributeError`.\n \"\"\"\n return lambda obj: {attr: getattr(obj, attr, None) for attr in attrs}\n\n\ndef uniq(it):\n \"\"\"Return all unique elements in ``it``, preserving order.\"\"\"\n seen = set()\n return (seen.add(obj) or obj for obj in it if obj not in seen)\n\n\ndef lookahead(it):\n \"\"\"Yield pairs of (current, next) items in `it`.\n\n `next` is None if `current` is the last item.\n Example:\n >>> list(lookahead(x for x in range(6)))\n [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, None)]\n \"\"\"\n a, b = tee(it)\n next(b, None)\n return zip_longest(a, b)\n\n\ndef regen(it):\n \"\"\"Convert iterator to an object that can be consumed multiple times.\n\n ``Regen`` takes any iterable, and if the object is an\n generator it will cache the evaluated list on first access,\n so that the generator can be \"consumed\" multiple times.\n \"\"\"\n if isinstance(it, (list, tuple)):\n return it\n return _regen(it)\n\n\nclass _regen(UserList, list):\n # must be subclass of list so that json can encode.\n\n def __init__(self, it):\n # pylint: disable=super-init-not-called\n # UserList creates a new list and sets .data, so we don't\n # want to call init here.\n self.__it = it\n self.__consumed = []\n self.__done = False\n\n def __reduce__(self):\n return list, (self.data,)\n\n def __length_hint__(self):\n return self.__it.__length_hint__()\n\n def __lookahead_consume(self, limit=None):\n if not self.__done and (limit is None or limit > 0):\n it = iter(self.__it)\n try:\n now = next(it)\n except StopIteration:\n return\n self.__consumed.append(now)\n # Maintain a single look-ahead to ensure we set `__done` when the\n # underlying iterator gets exhausted\n while not self.__done:\n try:\n next_ = next(it)\n self.__consumed.append(next_)\n except StopIteration:\n self.__done = True\n break\n finally:\n yield now\n now = next_\n # We can break out when `limit` is exhausted\n if limit is not None:\n limit -= 1\n if limit <= 0:\n break\n\n def __iter__(self):\n yield from self.__consumed\n yield from self.__lookahead_consume()\n\n def __getitem__(self, index):\n if index < 0:\n return self.data[index]\n # Consume elements up to the desired index prior to attempting to\n # access it from within `__consumed`\n consume_count = index - len(self.__consumed) + 1\n for _ in self.__lookahead_consume(limit=consume_count):\n pass\n return self.__consumed[index]\n\n def __bool__(self):\n if len(self.__consumed):\n return True\n\n try:\n next(iter(self))\n except StopIteration:\n return False\n else:\n return True\n\n @property\n def data(self):\n if not self.__done:\n self.__consumed.extend(self.__it)\n self.__done = True\n return self.__consumed\n\n def __repr__(self):\n return \"<{}: [{}{}]>\".format(\n self.__class__.__name__,\n \", \".join(repr(e) for e in self.__consumed),\n \"...\" if not self.__done else \"\",\n )\n\n\ndef _argsfromspec(spec, replace_defaults=True):\n if spec.defaults:\n split = len(spec.defaults)\n defaults = (list(range(len(spec.defaults))) if replace_defaults\n else spec.defaults)\n positional = spec.args[:-split]\n optional = list(zip(spec.args[-split:], defaults))\n else:\n positional, optional = spec.args, []\n\n varargs = spec.varargs\n varkw = spec.varkw\n if spec.kwonlydefaults:\n kwonlyargs = set(spec.kwonlyargs) - set(spec.kwonlydefaults.keys())\n if replace_defaults:\n kwonlyargs_optional = [\n (kw, i) for i, kw in enumerate(spec.kwonlydefaults.keys())\n ]\n else:\n kwonlyargs_optional = list(spec.kwonlydefaults.items())\n else:\n kwonlyargs, kwonlyargs_optional = spec.kwonlyargs, []\n\n return ', '.join(filter(None, [\n ', '.join(positional),\n ', '.join(f'{k}={v}' for k, v in optional),\n f'*{varargs}' if varargs else None,\n '*' if (kwonlyargs or kwonlyargs_optional) and not varargs else None,\n ', '.join(kwonlyargs) if kwonlyargs else None,\n ', '.join(f'{k}=\"{v}\"' for k, v in kwonlyargs_optional),\n f'**{varkw}' if varkw else None,\n ]))\n\n\ndef head_from_fun(fun, bound=False, debug=False):\n \"\"\"Generate signature function from actual function.\"\"\"\n # we could use inspect.Signature here, but that implementation\n # is very slow since it implements the argument checking\n # in pure-Python. Instead we use exec to create a new function\n # with an empty body, meaning it has the same performance as\n # as just calling a function.\n is_function = inspect.isfunction(fun)\n is_callable = hasattr(fun, '__call__')\n is_cython = fun.__class__.__name__ == 'cython_function_or_method'\n is_method = inspect.ismethod(fun)\n\n if not is_function and is_callable and not is_method and not is_cython:\n name, fun = fun.__class__.__name__, fun.__call__\n else:\n name = fun.__name__\n definition = FUNHEAD_TEMPLATE.format(\n fun_name=name,\n fun_args=_argsfromspec(inspect.getfullargspec(fun)),\n fun_value=1,\n )\n if debug: # pragma: no cover\n print(definition, file=sys.stderr)\n namespace = {'__name__': fun.__module__}\n # pylint: disable=exec-used\n # Tasks are rarely, if ever, created at runtime - exec here is fine.\n exec(definition, namespace)\n result = namespace[name]\n result._source = definition\n if bound:\n return partial(result, object())\n return result\n\n\ndef arity_greater(fun, n):\n argspec = inspect.getfullargspec(fun)\n return argspec.varargs or len(argspec.args) > n\n\n\ndef fun_takes_argument(name, fun, position=None):\n spec = inspect.getfullargspec(fun)\n return (\n spec.varkw or spec.varargs or\n (len(spec.args) >= position if position else name in spec.args)\n )\n\n\ndef fun_accepts_kwargs(fun):\n \"\"\"Return true if function accepts arbitrary keyword arguments.\"\"\"\n return any(\n p for p in inspect.signature(fun).parameters.values()\n if p.kind == p.VAR_KEYWORD\n )\n\n\ndef maybe(typ, val):\n \"\"\"Call typ on value if val is defined.\"\"\"\n return typ(val) if val is not None else val\n\n\ndef seq_concat_item(seq, item):\n \"\"\"Return copy of sequence seq with item added.\n\n Returns:\n Sequence: if seq is a tuple, the result will be a tuple,\n otherwise it depends on the implementation of ``__add__``.\n \"\"\"\n return seq + (item,) if isinstance(seq, tuple) else seq + [item]\n\n\ndef seq_concat_seq(a, b):\n \"\"\"Concatenate two sequences: ``a + b``.\n\n Returns:\n Sequence: The return value will depend on the largest sequence\n - if b is larger and is a tuple, the return value will be a tuple.\n - if a is larger and is a list, the return value will be a list,\n \"\"\"\n # find the type of the largest sequence\n prefer = type(max([a, b], key=len))\n # convert the smallest list to the type of the largest sequence.\n if not isinstance(a, prefer):\n a = prefer(a)\n if not isinstance(b, prefer):\n b = prefer(b)\n return a + b\n\n\ndef is_numeric_value(value):\n return isinstance(value, (int, float)) and not isinstance(value, bool)\n", "path": "celery/utils/functional.py"}], "after_files": [{"content": "\"\"\"Functional-style utilities.\"\"\"\nimport inspect\nimport sys\nfrom collections import UserList\nfrom functools import partial\nfrom itertools import islice, tee, zip_longest\n\nfrom kombu.utils.functional import LRUCache, dictfilter, is_list, lazy, maybe_evaluate, maybe_list, memoize\nfrom vine import promise\n\n__all__ = (\n 'LRUCache', 'is_list', 'maybe_list', 'memoize', 'mlazy', 'noop',\n 'first', 'firstmethod', 'chunks', 'padlist', 'mattrgetter', 'uniq',\n 'regen', 'dictfilter', 'lazy', 'maybe_evaluate', 'head_from_fun',\n 'maybe', 'fun_accepts_kwargs',\n)\n\nFUNHEAD_TEMPLATE = \"\"\"\ndef {fun_name}({fun_args}):\n return {fun_value}\n\"\"\"\n\n\nclass DummyContext:\n\n def __enter__(self):\n return self\n\n def __exit__(self, *exc_info):\n pass\n\n\nclass mlazy(lazy):\n \"\"\"Memoized lazy evaluation.\n\n The function is only evaluated once, every subsequent access\n will return the same value.\n \"\"\"\n\n #: Set to :const:`True` after the object has been evaluated.\n evaluated = False\n _value = None\n\n def evaluate(self):\n if not self.evaluated:\n self._value = super().evaluate()\n self.evaluated = True\n return self._value\n\n\ndef noop(*args, **kwargs):\n \"\"\"No operation.\n\n Takes any arguments/keyword arguments and does nothing.\n \"\"\"\n\n\ndef pass1(arg, *args, **kwargs):\n \"\"\"Return the first positional argument.\"\"\"\n return arg\n\n\ndef evaluate_promises(it):\n for value in it:\n if isinstance(value, promise):\n value = value()\n yield value\n\n\ndef first(predicate, it):\n \"\"\"Return the first element in ``it`` that ``predicate`` accepts.\n\n If ``predicate`` is None it will return the first item that's not\n :const:`None`.\n \"\"\"\n return next(\n (v for v in evaluate_promises(it) if (\n predicate(v) if predicate is not None else v is not None)),\n None,\n )\n\n\ndef firstmethod(method, on_call=None):\n \"\"\"Multiple dispatch.\n\n Return a function that with a list of instances,\n finds the first instance that gives a value for the given method.\n\n The list can also contain lazy instances\n (:class:`~kombu.utils.functional.lazy`.)\n \"\"\"\n\n def _matcher(it, *args, **kwargs):\n for obj in it:\n try:\n meth = getattr(maybe_evaluate(obj), method)\n reply = (on_call(meth, *args, **kwargs) if on_call\n else meth(*args, **kwargs))\n except AttributeError:\n pass\n else:\n if reply is not None:\n return reply\n\n return _matcher\n\n\ndef chunks(it, n):\n \"\"\"Split an iterator into chunks with `n` elements each.\n\n Warning:\n ``it`` must be an actual iterator, if you pass this a\n concrete sequence will get you repeating elements.\n\n So ``chunks(iter(range(1000)), 10)`` is fine, but\n ``chunks(range(1000), 10)`` is not.\n\n Example:\n # n == 2\n >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2)\n >>> list(x)\n [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]]\n\n # n == 3\n >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3)\n >>> list(x)\n [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]]\n \"\"\"\n for item in it:\n yield [item] + list(islice(it, n - 1))\n\n\ndef padlist(container, size, default=None):\n \"\"\"Pad list with default elements.\n\n Example:\n >>> first, last, city = padlist(['George', 'Costanza', 'NYC'], 3)\n ('George', 'Costanza', 'NYC')\n >>> first, last, city = padlist(['George', 'Costanza'], 3)\n ('George', 'Costanza', None)\n >>> first, last, city, planet = padlist(\n ... ['George', 'Costanza', 'NYC'], 4, default='Earth',\n ... )\n ('George', 'Costanza', 'NYC', 'Earth')\n \"\"\"\n return list(container)[:size] + [default] * (size - len(container))\n\n\ndef mattrgetter(*attrs):\n \"\"\"Get attributes, ignoring attribute errors.\n\n Like :func:`operator.itemgetter` but return :const:`None` on missing\n attributes instead of raising :exc:`AttributeError`.\n \"\"\"\n return lambda obj: {attr: getattr(obj, attr, None) for attr in attrs}\n\n\ndef uniq(it):\n \"\"\"Return all unique elements in ``it``, preserving order.\"\"\"\n seen = set()\n return (seen.add(obj) or obj for obj in it if obj not in seen)\n\n\ndef lookahead(it):\n \"\"\"Yield pairs of (current, next) items in `it`.\n\n `next` is None if `current` is the last item.\n Example:\n >>> list(lookahead(x for x in range(6)))\n [(0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, None)]\n \"\"\"\n a, b = tee(it)\n next(b, None)\n return zip_longest(a, b)\n\n\ndef regen(it):\n \"\"\"Convert iterator to an object that can be consumed multiple times.\n\n ``Regen`` takes any iterable, and if the object is an\n generator it will cache the evaluated list on first access,\n so that the generator can be \"consumed\" multiple times.\n \"\"\"\n if isinstance(it, (list, tuple)):\n return it\n return _regen(it)\n\n\nclass _regen(UserList, list):\n # must be subclass of list so that json can encode.\n\n def __init__(self, it):\n # pylint: disable=super-init-not-called\n # UserList creates a new list and sets .data, so we don't\n # want to call init here.\n self.__it = it\n self.__consumed = []\n self.__done = False\n\n def __reduce__(self):\n return list, (self.data,)\n\n def __length_hint__(self):\n return self.__it.__length_hint__()\n\n def __lookahead_consume(self, limit=None):\n if not self.__done and (limit is None or limit > 0):\n it = iter(self.__it)\n try:\n now = next(it)\n except StopIteration:\n return\n self.__consumed.append(now)\n # Maintain a single look-ahead to ensure we set `__done` when the\n # underlying iterator gets exhausted\n while not self.__done:\n try:\n next_ = next(it)\n self.__consumed.append(next_)\n except StopIteration:\n self.__done = True\n break\n finally:\n yield now\n now = next_\n # We can break out when `limit` is exhausted\n if limit is not None:\n limit -= 1\n if limit <= 0:\n break\n\n def __iter__(self):\n yield from self.__consumed\n yield from self.__lookahead_consume()\n\n def __getitem__(self, index):\n if index < 0:\n return self.data[index]\n # Consume elements up to the desired index prior to attempting to\n # access it from within `__consumed`\n consume_count = index - len(self.__consumed) + 1\n for _ in self.__lookahead_consume(limit=consume_count):\n pass\n return self.__consumed[index]\n\n def __bool__(self):\n if len(self.__consumed):\n return True\n\n try:\n next(iter(self))\n except StopIteration:\n return False\n else:\n return True\n\n @property\n def data(self):\n if not self.__done:\n self.__consumed.extend(self.__it)\n self.__done = True\n return self.__consumed\n\n def __repr__(self):\n return \"<{}: [{}{}]>\".format(\n self.__class__.__name__,\n \", \".join(repr(e) for e in self.__consumed),\n \"...\" if not self.__done else \"\",\n )\n\n\ndef _argsfromspec(spec, replace_defaults=True):\n if spec.defaults:\n split = len(spec.defaults)\n defaults = (list(range(len(spec.defaults))) if replace_defaults\n else spec.defaults)\n positional = spec.args[:-split]\n optional = list(zip(spec.args[-split:], defaults))\n else:\n positional, optional = spec.args, []\n\n varargs = spec.varargs\n varkw = spec.varkw\n if spec.kwonlydefaults:\n kwonlyargs = set(spec.kwonlyargs) - set(spec.kwonlydefaults.keys())\n if replace_defaults:\n kwonlyargs_optional = [\n (kw, i) for i, kw in enumerate(spec.kwonlydefaults.keys())\n ]\n else:\n kwonlyargs_optional = list(spec.kwonlydefaults.items())\n else:\n kwonlyargs, kwonlyargs_optional = spec.kwonlyargs, []\n\n return ', '.join(filter(None, [\n ', '.join(positional),\n ', '.join(f'{k}={v}' for k, v in optional),\n f'*{varargs}' if varargs else None,\n '*' if (kwonlyargs or kwonlyargs_optional) and not varargs else None,\n ', '.join(kwonlyargs) if kwonlyargs else None,\n ', '.join(f'{k}=\"{v}\"' for k, v in kwonlyargs_optional),\n f'**{varkw}' if varkw else None,\n ]))\n\n\ndef head_from_fun(fun, bound=False, debug=False):\n \"\"\"Generate signature function from actual function.\"\"\"\n # we could use inspect.Signature here, but that implementation\n # is very slow since it implements the argument checking\n # in pure-Python. Instead we use exec to create a new function\n # with an empty body, meaning it has the same performance as\n # as just calling a function.\n is_function = inspect.isfunction(fun)\n is_callable = callable(fun)\n is_cython = fun.__class__.__name__ == 'cython_function_or_method'\n is_method = inspect.ismethod(fun)\n\n if not is_function and is_callable and not is_method and not is_cython:\n name, fun = fun.__class__.__name__, fun.__call__\n else:\n name = fun.__name__\n definition = FUNHEAD_TEMPLATE.format(\n fun_name=name,\n fun_args=_argsfromspec(inspect.getfullargspec(fun)),\n fun_value=1,\n )\n if debug: # pragma: no cover\n print(definition, file=sys.stderr)\n namespace = {'__name__': fun.__module__}\n # pylint: disable=exec-used\n # Tasks are rarely, if ever, created at runtime - exec here is fine.\n exec(definition, namespace)\n result = namespace[name]\n result._source = definition\n if bound:\n return partial(result, object())\n return result\n\n\ndef arity_greater(fun, n):\n argspec = inspect.getfullargspec(fun)\n return argspec.varargs or len(argspec.args) > n\n\n\ndef fun_takes_argument(name, fun, position=None):\n spec = inspect.getfullargspec(fun)\n return (\n spec.varkw or spec.varargs or\n (len(spec.args) >= position if position else name in spec.args)\n )\n\n\ndef fun_accepts_kwargs(fun):\n \"\"\"Return true if function accepts arbitrary keyword arguments.\"\"\"\n return any(\n p for p in inspect.signature(fun).parameters.values()\n if p.kind == p.VAR_KEYWORD\n )\n\n\ndef maybe(typ, val):\n \"\"\"Call typ on value if val is defined.\"\"\"\n return typ(val) if val is not None else val\n\n\ndef seq_concat_item(seq, item):\n \"\"\"Return copy of sequence seq with item added.\n\n Returns:\n Sequence: if seq is a tuple, the result will be a tuple,\n otherwise it depends on the implementation of ``__add__``.\n \"\"\"\n return seq + (item,) if isinstance(seq, tuple) else seq + [item]\n\n\ndef seq_concat_seq(a, b):\n \"\"\"Concatenate two sequences: ``a + b``.\n\n Returns:\n Sequence: The return value will depend on the largest sequence\n - if b is larger and is a tuple, the return value will be a tuple.\n - if a is larger and is a list, the return value will be a list,\n \"\"\"\n # find the type of the largest sequence\n prefer = type(max([a, b], key=len))\n # convert the smallest list to the type of the largest sequence.\n if not isinstance(a, prefer):\n a = prefer(a)\n if not isinstance(b, prefer):\n b = prefer(b)\n return a + b\n\n\ndef is_numeric_value(value):\n return isinstance(value, (int, float)) and not isinstance(value, bool)\n", "path": "celery/utils/functional.py"}]} |
gh_patches_debug_1613 | rasdani/github-patches | git_diff | translate__pootle-4270 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
PootleCommand handles obsolete translation projects
`update_stores` command can make obsolete TP directory many times.
It doesn't affect us much except we get extra unnecessary log messages like this:
```
set(['get_last_updated', 'get_checks', 'get_mtime', 'get_suggestion_count', 'get_last_action', 'get_wordcount_stats']) deleted from /uk/android_announcements_evernote/ cache
```
which tells us that cache for TP has been deleted.
I think any `PootleCommand` shouldn't handle TP with obsolete directories. `update_stores` command will resurrect TP directory when it's added.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pootle/apps/pootle_app/management/commands/__init__.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 #
4 # Copyright (C) Pootle contributors.
5 #
6 # This file is a part of the Pootle project. It is distributed under the GPL3
7 # or later license. See the LICENSE file for a copy of the license and the
8 # AUTHORS file for copyright and authorship information.
9
10 import datetime
11 import logging
12
13 from optparse import make_option
14
15 from django.core.management.base import BaseCommand, NoArgsCommand
16
17 from pootle.runner import set_sync_mode
18 from pootle_project.models import Project
19 from pootle_translationproject.models import TranslationProject
20
21
22 class PootleCommand(NoArgsCommand):
23 """Base class for handling recursive pootle store management commands."""
24
25 shared_option_list = (
26 make_option(
27 '--project',
28 action='append',
29 dest='projects',
30 help='Project to refresh',
31 ),
32 make_option(
33 '--language',
34 action='append',
35 dest='languages',
36 help='Language to refresh',
37 ),
38 make_option(
39 "--noinput",
40 action="store_true",
41 default=False,
42 help=u"Never prompt for input",
43 ),
44 make_option(
45 "--no-rq",
46 action="store_true",
47 default=False,
48 help=(u"Run all jobs in a single process, without "
49 "using rq workers"),
50 ),
51 )
52 option_list = NoArgsCommand.option_list + shared_option_list
53 process_disabled_projects = False
54
55 def __init__(self, *args, **kwargs):
56 self.languages = []
57 self.projects = []
58 super(PootleCommand, self).__init__(*args, **kwargs)
59
60 def do_translation_project(self, tp, **options):
61 process_stores = True
62
63 if hasattr(self, "handle_translation_project"):
64 logging.info(u"Running %s over %s", self.name, tp)
65 try:
66 process_stores = self.handle_translation_project(tp, **options)
67 except Exception:
68 logging.exception(u"Failed to run %s over %s", self.name, tp)
69 return
70
71 if not process_stores:
72 return
73
74 if hasattr(self, "handle_all_stores"):
75 logging.info(u"Running %s over %s's files", self.name, tp)
76 try:
77 self.handle_all_stores(tp, **options)
78 except Exception:
79 logging.exception(u"Failed to run %s over %s's files",
80 self.name, tp)
81 return
82 elif hasattr(self, "handle_store"):
83 store_query = tp.stores.live()
84 for store in store_query.iterator():
85 logging.info(u"Running %s over %s",
86 self.name, store.pootle_path)
87 try:
88 self.handle_store(store, **options)
89 except Exception:
90 logging.exception(u"Failed to run %s over %s",
91 self.name, store.pootle_path)
92
93 def handle_noargs(self, **options):
94 # adjust debug level to the verbosity option
95 verbosity = int(options.get('verbosity', 1))
96 debug_levels = {
97 0: logging.ERROR,
98 1: logging.WARNING,
99 2: logging.INFO,
100 3: logging.DEBUG
101 }
102 debug_level = debug_levels.get(verbosity, logging.DEBUG)
103 logging.getLogger().setLevel(debug_level)
104
105 # reduce size of parse pool early on
106 self.name = self.__class__.__module__.split('.')[-1]
107 from pootle_store.fields import TranslationStoreFieldFile
108 TranslationStoreFieldFile._store_cache.maxsize = 2
109 TranslationStoreFieldFile._store_cache.cullsize = 2
110 TranslationProject._non_db_state_cache.maxsize = 2
111 TranslationProject._non_db_state_cache.cullsize = 2
112
113 self.projects = options.pop('projects', [])
114 self.languages = options.pop('languages', [])
115
116 # info start
117 start = datetime.datetime.now()
118 logging.info('Start running of %s', self.name)
119
120 self.handle_all(**options)
121
122 # info finish
123 end = datetime.datetime.now()
124 logging.info('All done for %s in %s', self.name, end - start)
125
126 def handle_all(self, **options):
127 if options.get("no_rq", False):
128 set_sync_mode(options.get('noinput', False))
129
130 if self.process_disabled_projects:
131 project_query = Project.objects.all()
132 else:
133 project_query = Project.objects.enabled()
134
135 if self.projects:
136 project_query = project_query.filter(code__in=self.projects)
137
138 for project in project_query.iterator():
139 tp_query = project.translationproject_set \
140 .order_by('language__code')
141
142 if self.languages:
143 tp_query = tp_query.filter(language__code__in=self.languages)
144
145 for tp in tp_query.iterator():
146 self.do_translation_project(tp, **options)
147
148
149 class BaseRunCommand(BaseCommand):
150 """Base class to build new server runners.
151
152 Based on code from `django-shoes
153 <https://bitbucket.org/mlzboy/django-shoes/>`_.
154 """
155
156 hostport_option_list = (
157 make_option(
158 '--host',
159 action='store',
160 dest='host',
161 default='127.0.0.1',
162 help='Hostname to listen on.',
163 ),
164 make_option(
165 '--port',
166 action='store',
167 dest='port',
168 default=8000,
169 type=int,
170 help='The TCP port to listen on.',
171 ),
172 )
173
174 option_list = BaseCommand.option_list + hostport_option_list
175
176 def handle(self, *args, **options):
177 return self.serve_forever(*args, **options)
178
179 def get_app(self):
180 from django.contrib.staticfiles.handlers import StaticFilesHandler
181 from django.core.handlers.wsgi import WSGIHandler
182
183 app = StaticFilesHandler(WSGIHandler())
184 return app
185
186 def serve_forever(self, *args, **kwargs):
187 raise NotImplementedError
188
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pootle/apps/pootle_app/management/commands/__init__.py b/pootle/apps/pootle_app/management/commands/__init__.py
--- a/pootle/apps/pootle_app/management/commands/__init__.py
+++ b/pootle/apps/pootle_app/management/commands/__init__.py
@@ -136,7 +136,7 @@
project_query = project_query.filter(code__in=self.projects)
for project in project_query.iterator():
- tp_query = project.translationproject_set \
+ tp_query = project.translationproject_set.live() \
.order_by('language__code')
if self.languages:
| {"golden_diff": "diff --git a/pootle/apps/pootle_app/management/commands/__init__.py b/pootle/apps/pootle_app/management/commands/__init__.py\n--- a/pootle/apps/pootle_app/management/commands/__init__.py\n+++ b/pootle/apps/pootle_app/management/commands/__init__.py\n@@ -136,7 +136,7 @@\n project_query = project_query.filter(code__in=self.projects)\n \n for project in project_query.iterator():\n- tp_query = project.translationproject_set \\\n+ tp_query = project.translationproject_set.live() \\\n .order_by('language__code')\n \n if self.languages:\n", "issue": "PootleCommand handles obsolete translation projects\n`update_stores` command can make obsolete TP directory many times. \nIt doesn't affect us much except we get extra unnecessary log messages like this:\n\n```\nset(['get_last_updated', 'get_checks', 'get_mtime', 'get_suggestion_count', 'get_last_action', 'get_wordcount_stats']) deleted from /uk/android_announcements_evernote/ cache\n```\n\nwhich tells us that cache for TP has been deleted.\n\nI think any `PootleCommand` shouldn't handle TP with obsolete directories. `update_stores` command will resurrect TP directory when it's added.\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport datetime\nimport logging\n\nfrom optparse import make_option\n\nfrom django.core.management.base import BaseCommand, NoArgsCommand\n\nfrom pootle.runner import set_sync_mode\nfrom pootle_project.models import Project\nfrom pootle_translationproject.models import TranslationProject\n\n\nclass PootleCommand(NoArgsCommand):\n \"\"\"Base class for handling recursive pootle store management commands.\"\"\"\n\n shared_option_list = (\n make_option(\n '--project',\n action='append',\n dest='projects',\n help='Project to refresh',\n ),\n make_option(\n '--language',\n action='append',\n dest='languages',\n help='Language to refresh',\n ),\n make_option(\n \"--noinput\",\n action=\"store_true\",\n default=False,\n help=u\"Never prompt for input\",\n ),\n make_option(\n \"--no-rq\",\n action=\"store_true\",\n default=False,\n help=(u\"Run all jobs in a single process, without \"\n \"using rq workers\"),\n ),\n )\n option_list = NoArgsCommand.option_list + shared_option_list\n process_disabled_projects = False\n\n def __init__(self, *args, **kwargs):\n self.languages = []\n self.projects = []\n super(PootleCommand, self).__init__(*args, **kwargs)\n\n def do_translation_project(self, tp, **options):\n process_stores = True\n\n if hasattr(self, \"handle_translation_project\"):\n logging.info(u\"Running %s over %s\", self.name, tp)\n try:\n process_stores = self.handle_translation_project(tp, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s\", self.name, tp)\n return\n\n if not process_stores:\n return\n\n if hasattr(self, \"handle_all_stores\"):\n logging.info(u\"Running %s over %s's files\", self.name, tp)\n try:\n self.handle_all_stores(tp, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s's files\",\n self.name, tp)\n return\n elif hasattr(self, \"handle_store\"):\n store_query = tp.stores.live()\n for store in store_query.iterator():\n logging.info(u\"Running %s over %s\",\n self.name, store.pootle_path)\n try:\n self.handle_store(store, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s\",\n self.name, store.pootle_path)\n\n def handle_noargs(self, **options):\n # adjust debug level to the verbosity option\n verbosity = int(options.get('verbosity', 1))\n debug_levels = {\n 0: logging.ERROR,\n 1: logging.WARNING,\n 2: logging.INFO,\n 3: logging.DEBUG\n }\n debug_level = debug_levels.get(verbosity, logging.DEBUG)\n logging.getLogger().setLevel(debug_level)\n\n # reduce size of parse pool early on\n self.name = self.__class__.__module__.split('.')[-1]\n from pootle_store.fields import TranslationStoreFieldFile\n TranslationStoreFieldFile._store_cache.maxsize = 2\n TranslationStoreFieldFile._store_cache.cullsize = 2\n TranslationProject._non_db_state_cache.maxsize = 2\n TranslationProject._non_db_state_cache.cullsize = 2\n\n self.projects = options.pop('projects', [])\n self.languages = options.pop('languages', [])\n\n # info start\n start = datetime.datetime.now()\n logging.info('Start running of %s', self.name)\n\n self.handle_all(**options)\n\n # info finish\n end = datetime.datetime.now()\n logging.info('All done for %s in %s', self.name, end - start)\n\n def handle_all(self, **options):\n if options.get(\"no_rq\", False):\n set_sync_mode(options.get('noinput', False))\n\n if self.process_disabled_projects:\n project_query = Project.objects.all()\n else:\n project_query = Project.objects.enabled()\n\n if self.projects:\n project_query = project_query.filter(code__in=self.projects)\n\n for project in project_query.iterator():\n tp_query = project.translationproject_set \\\n .order_by('language__code')\n\n if self.languages:\n tp_query = tp_query.filter(language__code__in=self.languages)\n\n for tp in tp_query.iterator():\n self.do_translation_project(tp, **options)\n\n\nclass BaseRunCommand(BaseCommand):\n \"\"\"Base class to build new server runners.\n\n Based on code from `django-shoes\n <https://bitbucket.org/mlzboy/django-shoes/>`_.\n \"\"\"\n\n hostport_option_list = (\n make_option(\n '--host',\n action='store',\n dest='host',\n default='127.0.0.1',\n help='Hostname to listen on.',\n ),\n make_option(\n '--port',\n action='store',\n dest='port',\n default=8000,\n type=int,\n help='The TCP port to listen on.',\n ),\n )\n\n option_list = BaseCommand.option_list + hostport_option_list\n\n def handle(self, *args, **options):\n return self.serve_forever(*args, **options)\n\n def get_app(self):\n from django.contrib.staticfiles.handlers import StaticFilesHandler\n from django.core.handlers.wsgi import WSGIHandler\n\n app = StaticFilesHandler(WSGIHandler())\n return app\n\n def serve_forever(self, *args, **kwargs):\n raise NotImplementedError\n", "path": "pootle/apps/pootle_app/management/commands/__init__.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright (C) Pootle contributors.\n#\n# This file is a part of the Pootle project. It is distributed under the GPL3\n# or later license. See the LICENSE file for a copy of the license and the\n# AUTHORS file for copyright and authorship information.\n\nimport datetime\nimport logging\n\nfrom optparse import make_option\n\nfrom django.core.management.base import BaseCommand, NoArgsCommand\n\nfrom pootle.runner import set_sync_mode\nfrom pootle_project.models import Project\nfrom pootle_translationproject.models import TranslationProject\n\n\nclass PootleCommand(NoArgsCommand):\n \"\"\"Base class for handling recursive pootle store management commands.\"\"\"\n\n shared_option_list = (\n make_option(\n '--project',\n action='append',\n dest='projects',\n help='Project to refresh',\n ),\n make_option(\n '--language',\n action='append',\n dest='languages',\n help='Language to refresh',\n ),\n make_option(\n \"--noinput\",\n action=\"store_true\",\n default=False,\n help=u\"Never prompt for input\",\n ),\n make_option(\n \"--no-rq\",\n action=\"store_true\",\n default=False,\n help=(u\"Run all jobs in a single process, without \"\n \"using rq workers\"),\n ),\n )\n option_list = NoArgsCommand.option_list + shared_option_list\n process_disabled_projects = False\n\n def __init__(self, *args, **kwargs):\n self.languages = []\n self.projects = []\n super(PootleCommand, self).__init__(*args, **kwargs)\n\n def do_translation_project(self, tp, **options):\n process_stores = True\n\n if hasattr(self, \"handle_translation_project\"):\n logging.info(u\"Running %s over %s\", self.name, tp)\n try:\n process_stores = self.handle_translation_project(tp, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s\", self.name, tp)\n return\n\n if not process_stores:\n return\n\n if hasattr(self, \"handle_all_stores\"):\n logging.info(u\"Running %s over %s's files\", self.name, tp)\n try:\n self.handle_all_stores(tp, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s's files\",\n self.name, tp)\n return\n elif hasattr(self, \"handle_store\"):\n store_query = tp.stores.live()\n for store in store_query.iterator():\n logging.info(u\"Running %s over %s\",\n self.name, store.pootle_path)\n try:\n self.handle_store(store, **options)\n except Exception:\n logging.exception(u\"Failed to run %s over %s\",\n self.name, store.pootle_path)\n\n def handle_noargs(self, **options):\n # adjust debug level to the verbosity option\n verbosity = int(options.get('verbosity', 1))\n debug_levels = {\n 0: logging.ERROR,\n 1: logging.WARNING,\n 2: logging.INFO,\n 3: logging.DEBUG\n }\n debug_level = debug_levels.get(verbosity, logging.DEBUG)\n logging.getLogger().setLevel(debug_level)\n\n # reduce size of parse pool early on\n self.name = self.__class__.__module__.split('.')[-1]\n from pootle_store.fields import TranslationStoreFieldFile\n TranslationStoreFieldFile._store_cache.maxsize = 2\n TranslationStoreFieldFile._store_cache.cullsize = 2\n TranslationProject._non_db_state_cache.maxsize = 2\n TranslationProject._non_db_state_cache.cullsize = 2\n\n self.projects = options.pop('projects', [])\n self.languages = options.pop('languages', [])\n\n # info start\n start = datetime.datetime.now()\n logging.info('Start running of %s', self.name)\n\n self.handle_all(**options)\n\n # info finish\n end = datetime.datetime.now()\n logging.info('All done for %s in %s', self.name, end - start)\n\n def handle_all(self, **options):\n if options.get(\"no_rq\", False):\n set_sync_mode(options.get('noinput', False))\n\n if self.process_disabled_projects:\n project_query = Project.objects.all()\n else:\n project_query = Project.objects.enabled()\n\n if self.projects:\n project_query = project_query.filter(code__in=self.projects)\n\n for project in project_query.iterator():\n tp_query = project.translationproject_set.live() \\\n .order_by('language__code')\n\n if self.languages:\n tp_query = tp_query.filter(language__code__in=self.languages)\n\n for tp in tp_query.iterator():\n self.do_translation_project(tp, **options)\n\n\nclass BaseRunCommand(BaseCommand):\n \"\"\"Base class to build new server runners.\n\n Based on code from `django-shoes\n <https://bitbucket.org/mlzboy/django-shoes/>`_.\n \"\"\"\n\n hostport_option_list = (\n make_option(\n '--host',\n action='store',\n dest='host',\n default='127.0.0.1',\n help='Hostname to listen on.',\n ),\n make_option(\n '--port',\n action='store',\n dest='port',\n default=8000,\n type=int,\n help='The TCP port to listen on.',\n ),\n )\n\n option_list = BaseCommand.option_list + hostport_option_list\n\n def handle(self, *args, **options):\n return self.serve_forever(*args, **options)\n\n def get_app(self):\n from django.contrib.staticfiles.handlers import StaticFilesHandler\n from django.core.handlers.wsgi import WSGIHandler\n\n app = StaticFilesHandler(WSGIHandler())\n return app\n\n def serve_forever(self, *args, **kwargs):\n raise NotImplementedError\n", "path": "pootle/apps/pootle_app/management/commands/__init__.py"}]} |
gh_patches_debug_1614 | rasdani/github-patches | git_diff | Pylons__pyramid-3076 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`AllPermissionsList.__iter__` returns tuple(). Should return iter(tuple())?
My understanding is that __iter__ should [return an iterator object](https://docs.python.org/3.5/library/stdtypes.html#container.__iter__).
However, [`AllPermissionsList.__iter__`](https://github.com/Pylons/pyramid/blob/master/pyramid/security.py#L25) returns `()`.
Indeed, this raises a TypeError as expected:
```
In [1]: from pyramid.security import ALL_PERMISSIONS
In [2]: iter(ALL_PERMISSIONS)
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-22-f8a3d5a1d337> in <module>()
----> 1 iter(ALL_PERMISSIONS)
TypeError: iter() returned non-iterator of type 'tuple'
```
Lastly, I don't see where this method is used in Pyramid. Should this code (the `__iter__` method on `AllPermissionsList`) even exist?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `pyramid/security.py`
Content:
```
1 from zope.deprecation import deprecated
2 from zope.interface import providedBy
3
4 from pyramid.interfaces import (
5 IAuthenticationPolicy,
6 IAuthorizationPolicy,
7 ISecuredView,
8 IView,
9 IViewClassifier,
10 )
11
12 from pyramid.compat import map_
13 from pyramid.threadlocal import get_current_registry
14
15 Everyone = 'system.Everyone'
16 Authenticated = 'system.Authenticated'
17 Allow = 'Allow'
18 Deny = 'Deny'
19
20 _marker = object()
21
22 class AllPermissionsList(object):
23 """ Stand in 'permission list' to represent all permissions """
24 def __iter__(self):
25 return ()
26 def __contains__(self, other):
27 return True
28 def __eq__(self, other):
29 return isinstance(other, self.__class__)
30
31 ALL_PERMISSIONS = AllPermissionsList()
32 DENY_ALL = (Deny, Everyone, ALL_PERMISSIONS)
33
34 NO_PERMISSION_REQUIRED = '__no_permission_required__'
35
36 def _get_registry(request):
37 try:
38 reg = request.registry
39 except AttributeError:
40 reg = get_current_registry() # b/c
41 return reg
42
43 def _get_authentication_policy(request):
44 registry = _get_registry(request)
45 return registry.queryUtility(IAuthenticationPolicy)
46
47 def has_permission(permission, context, request):
48 """
49 A function that calls :meth:`pyramid.request.Request.has_permission`
50 and returns its result.
51
52 .. deprecated:: 1.5
53 Use :meth:`pyramid.request.Request.has_permission` instead.
54
55 .. versionchanged:: 1.5a3
56 If context is None, then attempt to use the context attribute of self;
57 if not set, then the AttributeError is propagated.
58 """
59 return request.has_permission(permission, context)
60
61 deprecated(
62 'has_permission',
63 'As of Pyramid 1.5 the "pyramid.security.has_permission" API is now '
64 'deprecated. It will be removed in Pyramid 1.8. Use the '
65 '"has_permission" method of the Pyramid request instead.'
66 )
67
68
69 def authenticated_userid(request):
70 """
71 A function that returns the value of the property
72 :attr:`pyramid.request.Request.authenticated_userid`.
73
74 .. deprecated:: 1.5
75 Use :attr:`pyramid.request.Request.authenticated_userid` instead.
76 """
77 return request.authenticated_userid
78
79 deprecated(
80 'authenticated_userid',
81 'As of Pyramid 1.5 the "pyramid.security.authenticated_userid" API is now '
82 'deprecated. It will be removed in Pyramid 1.8. Use the '
83 '"authenticated_userid" attribute of the Pyramid request instead.'
84 )
85
86 def unauthenticated_userid(request):
87 """
88 A function that returns the value of the property
89 :attr:`pyramid.request.Request.unauthenticated_userid`.
90
91 .. deprecated:: 1.5
92 Use :attr:`pyramid.request.Request.unauthenticated_userid` instead.
93 """
94 return request.unauthenticated_userid
95
96 deprecated(
97 'unauthenticated_userid',
98 'As of Pyramid 1.5 the "pyramid.security.unauthenticated_userid" API is '
99 'now deprecated. It will be removed in Pyramid 1.8. Use the '
100 '"unauthenticated_userid" attribute of the Pyramid request instead.'
101 )
102
103 def effective_principals(request):
104 """
105 A function that returns the value of the property
106 :attr:`pyramid.request.Request.effective_principals`.
107
108 .. deprecated:: 1.5
109 Use :attr:`pyramid.request.Request.effective_principals` instead.
110 """
111 return request.effective_principals
112
113 deprecated(
114 'effective_principals',
115 'As of Pyramid 1.5 the "pyramid.security.effective_principals" API is '
116 'now deprecated. It will be removed in Pyramid 1.8. Use the '
117 '"effective_principals" attribute of the Pyramid request instead.'
118 )
119
120 def remember(request, userid=_marker, **kw):
121 """
122 Returns a sequence of header tuples (e.g. ``[('Set-Cookie', 'foo=abc')]``)
123 on this request's response.
124 These headers are suitable for 'remembering' a set of credentials
125 implied by the data passed as ``userid`` and ``*kw`` using the
126 current :term:`authentication policy`. Common usage might look
127 like so within the body of a view function (``response`` is
128 assumed to be a :term:`WebOb` -style :term:`response` object
129 computed previously by the view code):
130
131 .. code-block:: python
132
133 from pyramid.security import remember
134 headers = remember(request, 'chrism', password='123', max_age='86400')
135 response = request.response
136 response.headerlist.extend(headers)
137 return response
138
139 If no :term:`authentication policy` is in use, this function will
140 always return an empty sequence. If used, the composition and
141 meaning of ``**kw`` must be agreed upon by the calling code and
142 the effective authentication policy.
143
144 .. deprecated:: 1.6
145 Renamed the ``principal`` argument to ``userid`` to clarify its
146 purpose.
147 """
148 if userid is _marker:
149 principal = kw.pop('principal', _marker)
150 if principal is _marker:
151 raise TypeError(
152 'remember() missing 1 required positional argument: '
153 '\'userid\'')
154 else:
155 deprecated(
156 'principal',
157 'The "principal" argument was deprecated in Pyramid 1.6. '
158 'It will be removed in Pyramid 1.9. Use the "userid" '
159 'argument instead.')
160 userid = principal
161 policy = _get_authentication_policy(request)
162 if policy is None:
163 return []
164 return policy.remember(request, userid, **kw)
165
166 def forget(request):
167 """
168 Return a sequence of header tuples (e.g. ``[('Set-Cookie',
169 'foo=abc')]``) suitable for 'forgetting' the set of credentials
170 possessed by the currently authenticated user. A common usage
171 might look like so within the body of a view function
172 (``response`` is assumed to be an :term:`WebOb` -style
173 :term:`response` object computed previously by the view code):
174
175 .. code-block:: python
176
177 from pyramid.security import forget
178 headers = forget(request)
179 response.headerlist.extend(headers)
180 return response
181
182 If no :term:`authentication policy` is in use, this function will
183 always return an empty sequence.
184 """
185 policy = _get_authentication_policy(request)
186 if policy is None:
187 return []
188 return policy.forget(request)
189
190 def principals_allowed_by_permission(context, permission):
191 """ Provided a ``context`` (a resource object), and a ``permission``
192 (a string or unicode object), if a :term:`authorization policy` is
193 in effect, return a sequence of :term:`principal` ids that possess
194 the permission in the ``context``. If no authorization policy is
195 in effect, this will return a sequence with the single value
196 :mod:`pyramid.security.Everyone` (the special principal
197 identifier representing all principals).
198
199 .. note::
200
201 even if an :term:`authorization policy` is in effect,
202 some (exotic) authorization policies may not implement the
203 required machinery for this function; those will cause a
204 :exc:`NotImplementedError` exception to be raised when this
205 function is invoked.
206 """
207 reg = get_current_registry()
208 policy = reg.queryUtility(IAuthorizationPolicy)
209 if policy is None:
210 return [Everyone]
211 return policy.principals_allowed_by_permission(context, permission)
212
213 def view_execution_permitted(context, request, name=''):
214 """ If the view specified by ``context`` and ``name`` is protected
215 by a :term:`permission`, check the permission associated with the
216 view using the effective authentication/authorization policies and
217 the ``request``. Return a boolean result. If no
218 :term:`authorization policy` is in effect, or if the view is not
219 protected by a permission, return ``True``. If no view can view found,
220 an exception will be raised.
221
222 .. versionchanged:: 1.4a4
223 An exception is raised if no view is found.
224
225 """
226 reg = _get_registry(request)
227 provides = [IViewClassifier] + map_(providedBy, (request, context))
228 # XXX not sure what to do here about using _find_views or analogue;
229 # for now let's just keep it as-is
230 view = reg.adapters.lookup(provides, ISecuredView, name=name)
231 if view is None:
232 view = reg.adapters.lookup(provides, IView, name=name)
233 if view is None:
234 raise TypeError('No registered view satisfies the constraints. '
235 'It would not make sense to claim that this view '
236 '"is" or "is not" permitted.')
237 return Allowed(
238 'Allowed: view name %r in context %r (no permission defined)' %
239 (name, context))
240 return view.__permitted__(context, request)
241
242
243 class PermitsResult(int):
244 def __new__(cls, s, *args):
245 inst = int.__new__(cls, cls.boolval)
246 inst.s = s
247 inst.args = args
248 return inst
249
250 @property
251 def msg(self):
252 return self.s % self.args
253
254 def __str__(self):
255 return self.msg
256
257 def __repr__(self):
258 return '<%s instance at %s with msg %r>' % (self.__class__.__name__,
259 id(self),
260 self.msg)
261
262 class Denied(PermitsResult):
263 """ An instance of ``Denied`` is returned when a security-related
264 API or other :app:`Pyramid` code denies an action unrelated to
265 an ACL check. It evaluates equal to all boolean false types. It
266 has an attribute named ``msg`` describing the circumstances for
267 the deny."""
268 boolval = 0
269
270 class Allowed(PermitsResult):
271 """ An instance of ``Allowed`` is returned when a security-related
272 API or other :app:`Pyramid` code allows an action unrelated to
273 an ACL check. It evaluates equal to all boolean true types. It
274 has an attribute named ``msg`` describing the circumstances for
275 the allow."""
276 boolval = 1
277
278 class ACLPermitsResult(int):
279 def __new__(cls, ace, acl, permission, principals, context):
280 inst = int.__new__(cls, cls.boolval)
281 inst.permission = permission
282 inst.ace = ace
283 inst.acl = acl
284 inst.principals = principals
285 inst.context = context
286 return inst
287
288 @property
289 def msg(self):
290 s = ('%s permission %r via ACE %r in ACL %r on context %r for '
291 'principals %r')
292 return s % (self.__class__.__name__,
293 self.permission,
294 self.ace,
295 self.acl,
296 self.context,
297 self.principals)
298
299 def __str__(self):
300 return self.msg
301
302 def __repr__(self):
303 return '<%s instance at %s with msg %r>' % (self.__class__.__name__,
304 id(self),
305 self.msg)
306
307 class ACLDenied(ACLPermitsResult):
308 """ An instance of ``ACLDenied`` represents that a security check made
309 explicitly against ACL was denied. It evaluates equal to all boolean
310 false types. It also has the following attributes: ``acl``, ``ace``,
311 ``permission``, ``principals``, and ``context``. These attributes
312 indicate the security values involved in the request. Its __str__ method
313 prints a summary of these attributes for debugging purposes. The same
314 summary is available as the ``msg`` attribute."""
315 boolval = 0
316
317 class ACLAllowed(ACLPermitsResult):
318 """ An instance of ``ACLAllowed`` represents that a security check made
319 explicitly against ACL was allowed. It evaluates equal to all boolean
320 true types. It also has the following attributes: ``acl``, ``ace``,
321 ``permission``, ``principals``, and ``context``. These attributes
322 indicate the security values involved in the request. Its __str__ method
323 prints a summary of these attributes for debugging purposes. The same
324 summary is available as the ``msg`` attribute."""
325 boolval = 1
326
327 class AuthenticationAPIMixin(object):
328
329 def _get_authentication_policy(self):
330 reg = _get_registry(self)
331 return reg.queryUtility(IAuthenticationPolicy)
332
333 @property
334 def authenticated_userid(self):
335 """ Return the userid of the currently authenticated user or
336 ``None`` if there is no :term:`authentication policy` in effect or
337 there is no currently authenticated user.
338
339 .. versionadded:: 1.5
340 """
341 policy = self._get_authentication_policy()
342 if policy is None:
343 return None
344 return policy.authenticated_userid(self)
345
346 @property
347 def unauthenticated_userid(self):
348 """ Return an object which represents the *claimed* (not verified) user
349 id of the credentials present in the request. ``None`` if there is no
350 :term:`authentication policy` in effect or there is no user data
351 associated with the current request. This differs from
352 :attr:`~pyramid.request.Request.authenticated_userid`, because the
353 effective authentication policy will not ensure that a record
354 associated with the userid exists in persistent storage.
355
356 .. versionadded:: 1.5
357 """
358 policy = self._get_authentication_policy()
359 if policy is None:
360 return None
361 return policy.unauthenticated_userid(self)
362
363 @property
364 def effective_principals(self):
365 """ Return the list of 'effective' :term:`principal` identifiers
366 for the ``request``. If no :term:`authentication policy` is in effect,
367 this will return a one-element list containing the
368 :data:`pyramid.security.Everyone` principal.
369
370 .. versionadded:: 1.5
371 """
372 policy = self._get_authentication_policy()
373 if policy is None:
374 return [Everyone]
375 return policy.effective_principals(self)
376
377 class AuthorizationAPIMixin(object):
378
379 def has_permission(self, permission, context=None):
380 """ Given a permission and an optional context, returns an instance of
381 :data:`pyramid.security.Allowed` if the permission is granted to this
382 request with the provided context, or the context already associated
383 with the request. Otherwise, returns an instance of
384 :data:`pyramid.security.Denied`. This method delegates to the current
385 authentication and authorization policies. Returns
386 :data:`pyramid.security.Allowed` unconditionally if no authentication
387 policy has been registered for this request. If ``context`` is not
388 supplied or is supplied as ``None``, the context used is the
389 ``request.context`` attribute.
390
391 :param permission: Does this request have the given permission?
392 :type permission: unicode, str
393 :param context: A resource object or ``None``
394 :type context: object
395 :returns: `pyramid.security.PermitsResult`
396
397 .. versionadded:: 1.5
398
399 """
400 if context is None:
401 context = self.context
402 reg = _get_registry(self)
403 authn_policy = reg.queryUtility(IAuthenticationPolicy)
404 if authn_policy is None:
405 return Allowed('No authentication policy in use.')
406 authz_policy = reg.queryUtility(IAuthorizationPolicy)
407 if authz_policy is None:
408 raise ValueError('Authentication policy registered without '
409 'authorization policy') # should never happen
410 principals = authn_policy.effective_principals(self)
411 return authz_policy.permits(context, principals, permission)
412
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/pyramid/security.py b/pyramid/security.py
--- a/pyramid/security.py
+++ b/pyramid/security.py
@@ -21,10 +21,13 @@
class AllPermissionsList(object):
""" Stand in 'permission list' to represent all permissions """
+
def __iter__(self):
- return ()
+ return iter(())
+
def __contains__(self, other):
return True
+
def __eq__(self, other):
return isinstance(other, self.__class__)
| {"golden_diff": "diff --git a/pyramid/security.py b/pyramid/security.py\n--- a/pyramid/security.py\n+++ b/pyramid/security.py\n@@ -21,10 +21,13 @@\n \n class AllPermissionsList(object):\n \"\"\" Stand in 'permission list' to represent all permissions \"\"\"\n+\n def __iter__(self):\n- return ()\n+ return iter(())\n+\n def __contains__(self, other):\n return True\n+\n def __eq__(self, other):\n return isinstance(other, self.__class__)\n", "issue": "`AllPermissionsList.__iter__` returns tuple(). Should return iter(tuple())?\nMy understanding is that __iter__ should [return an iterator object](https://docs.python.org/3.5/library/stdtypes.html#container.__iter__).\r\n\r\nHowever, [`AllPermissionsList.__iter__`](https://github.com/Pylons/pyramid/blob/master/pyramid/security.py#L25) returns `()`.\r\n\r\nIndeed, this raises a TypeError as expected:\r\n```\r\nIn [1]: from pyramid.security import ALL_PERMISSIONS\r\n\r\nIn [2]: iter(ALL_PERMISSIONS)\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n<ipython-input-22-f8a3d5a1d337> in <module>()\r\n----> 1 iter(ALL_PERMISSIONS)\r\n\r\nTypeError: iter() returned non-iterator of type 'tuple'\r\n```\r\n\r\nLastly, I don't see where this method is used in Pyramid. Should this code (the `__iter__` method on `AllPermissionsList`) even exist?\n", "before_files": [{"content": "from zope.deprecation import deprecated\nfrom zope.interface import providedBy\n\nfrom pyramid.interfaces import (\n IAuthenticationPolicy,\n IAuthorizationPolicy,\n ISecuredView,\n IView,\n IViewClassifier,\n )\n\nfrom pyramid.compat import map_\nfrom pyramid.threadlocal import get_current_registry\n\nEveryone = 'system.Everyone'\nAuthenticated = 'system.Authenticated'\nAllow = 'Allow'\nDeny = 'Deny'\n\n_marker = object()\n\nclass AllPermissionsList(object):\n \"\"\" Stand in 'permission list' to represent all permissions \"\"\"\n def __iter__(self):\n return ()\n def __contains__(self, other):\n return True\n def __eq__(self, other):\n return isinstance(other, self.__class__)\n\nALL_PERMISSIONS = AllPermissionsList()\nDENY_ALL = (Deny, Everyone, ALL_PERMISSIONS)\n\nNO_PERMISSION_REQUIRED = '__no_permission_required__'\n\ndef _get_registry(request):\n try:\n reg = request.registry\n except AttributeError:\n reg = get_current_registry() # b/c\n return reg\n\ndef _get_authentication_policy(request):\n registry = _get_registry(request)\n return registry.queryUtility(IAuthenticationPolicy)\n\ndef has_permission(permission, context, request):\n \"\"\"\n A function that calls :meth:`pyramid.request.Request.has_permission`\n and returns its result.\n \n .. deprecated:: 1.5\n Use :meth:`pyramid.request.Request.has_permission` instead.\n\n .. versionchanged:: 1.5a3\n If context is None, then attempt to use the context attribute of self;\n if not set, then the AttributeError is propagated.\n \"\"\" \n return request.has_permission(permission, context)\n\ndeprecated(\n 'has_permission',\n 'As of Pyramid 1.5 the \"pyramid.security.has_permission\" API is now '\n 'deprecated. It will be removed in Pyramid 1.8. Use the '\n '\"has_permission\" method of the Pyramid request instead.'\n )\n\n\ndef authenticated_userid(request):\n \"\"\"\n A function that returns the value of the property\n :attr:`pyramid.request.Request.authenticated_userid`.\n \n .. deprecated:: 1.5\n Use :attr:`pyramid.request.Request.authenticated_userid` instead.\n \"\"\" \n return request.authenticated_userid\n\ndeprecated(\n 'authenticated_userid',\n 'As of Pyramid 1.5 the \"pyramid.security.authenticated_userid\" API is now '\n 'deprecated. It will be removed in Pyramid 1.8. Use the '\n '\"authenticated_userid\" attribute of the Pyramid request instead.'\n )\n\ndef unauthenticated_userid(request):\n \"\"\" \n A function that returns the value of the property\n :attr:`pyramid.request.Request.unauthenticated_userid`.\n \n .. deprecated:: 1.5\n Use :attr:`pyramid.request.Request.unauthenticated_userid` instead.\n \"\"\" \n return request.unauthenticated_userid\n\ndeprecated(\n 'unauthenticated_userid',\n 'As of Pyramid 1.5 the \"pyramid.security.unauthenticated_userid\" API is '\n 'now deprecated. It will be removed in Pyramid 1.8. Use the '\n '\"unauthenticated_userid\" attribute of the Pyramid request instead.'\n )\n\ndef effective_principals(request):\n \"\"\"\n A function that returns the value of the property\n :attr:`pyramid.request.Request.effective_principals`.\n \n .. deprecated:: 1.5\n Use :attr:`pyramid.request.Request.effective_principals` instead.\n \"\"\" \n return request.effective_principals\n\ndeprecated(\n 'effective_principals',\n 'As of Pyramid 1.5 the \"pyramid.security.effective_principals\" API is '\n 'now deprecated. It will be removed in Pyramid 1.8. Use the '\n '\"effective_principals\" attribute of the Pyramid request instead.'\n )\n\ndef remember(request, userid=_marker, **kw):\n \"\"\"\n Returns a sequence of header tuples (e.g. ``[('Set-Cookie', 'foo=abc')]``)\n on this request's response.\n These headers are suitable for 'remembering' a set of credentials\n implied by the data passed as ``userid`` and ``*kw`` using the\n current :term:`authentication policy`. Common usage might look\n like so within the body of a view function (``response`` is\n assumed to be a :term:`WebOb` -style :term:`response` object\n computed previously by the view code):\n\n .. code-block:: python\n\n from pyramid.security import remember\n headers = remember(request, 'chrism', password='123', max_age='86400')\n response = request.response\n response.headerlist.extend(headers)\n return response\n\n If no :term:`authentication policy` is in use, this function will\n always return an empty sequence. If used, the composition and\n meaning of ``**kw`` must be agreed upon by the calling code and\n the effective authentication policy.\n \n .. deprecated:: 1.6\n Renamed the ``principal`` argument to ``userid`` to clarify its\n purpose.\n \"\"\"\n if userid is _marker:\n principal = kw.pop('principal', _marker)\n if principal is _marker:\n raise TypeError(\n 'remember() missing 1 required positional argument: '\n '\\'userid\\'')\n else:\n deprecated(\n 'principal',\n 'The \"principal\" argument was deprecated in Pyramid 1.6. '\n 'It will be removed in Pyramid 1.9. Use the \"userid\" '\n 'argument instead.')\n userid = principal\n policy = _get_authentication_policy(request)\n if policy is None:\n return []\n return policy.remember(request, userid, **kw)\n\ndef forget(request):\n \"\"\"\n Return a sequence of header tuples (e.g. ``[('Set-Cookie',\n 'foo=abc')]``) suitable for 'forgetting' the set of credentials\n possessed by the currently authenticated user. A common usage\n might look like so within the body of a view function\n (``response`` is assumed to be an :term:`WebOb` -style\n :term:`response` object computed previously by the view code):\n\n .. code-block:: python\n\n from pyramid.security import forget\n headers = forget(request)\n response.headerlist.extend(headers)\n return response\n\n If no :term:`authentication policy` is in use, this function will\n always return an empty sequence.\n \"\"\" \n policy = _get_authentication_policy(request)\n if policy is None:\n return []\n return policy.forget(request)\n\ndef principals_allowed_by_permission(context, permission):\n \"\"\" Provided a ``context`` (a resource object), and a ``permission``\n (a string or unicode object), if a :term:`authorization policy` is\n in effect, return a sequence of :term:`principal` ids that possess\n the permission in the ``context``. If no authorization policy is\n in effect, this will return a sequence with the single value\n :mod:`pyramid.security.Everyone` (the special principal\n identifier representing all principals).\n\n .. note::\n\n even if an :term:`authorization policy` is in effect,\n some (exotic) authorization policies may not implement the\n required machinery for this function; those will cause a\n :exc:`NotImplementedError` exception to be raised when this\n function is invoked.\n \"\"\"\n reg = get_current_registry()\n policy = reg.queryUtility(IAuthorizationPolicy)\n if policy is None:\n return [Everyone]\n return policy.principals_allowed_by_permission(context, permission)\n\ndef view_execution_permitted(context, request, name=''):\n \"\"\" If the view specified by ``context`` and ``name`` is protected\n by a :term:`permission`, check the permission associated with the\n view using the effective authentication/authorization policies and\n the ``request``. Return a boolean result. If no\n :term:`authorization policy` is in effect, or if the view is not\n protected by a permission, return ``True``. If no view can view found,\n an exception will be raised.\n\n .. versionchanged:: 1.4a4\n An exception is raised if no view is found.\n\n \"\"\"\n reg = _get_registry(request)\n provides = [IViewClassifier] + map_(providedBy, (request, context))\n # XXX not sure what to do here about using _find_views or analogue;\n # for now let's just keep it as-is\n view = reg.adapters.lookup(provides, ISecuredView, name=name)\n if view is None:\n view = reg.adapters.lookup(provides, IView, name=name)\n if view is None:\n raise TypeError('No registered view satisfies the constraints. '\n 'It would not make sense to claim that this view '\n '\"is\" or \"is not\" permitted.')\n return Allowed(\n 'Allowed: view name %r in context %r (no permission defined)' %\n (name, context))\n return view.__permitted__(context, request)\n\n\nclass PermitsResult(int):\n def __new__(cls, s, *args):\n inst = int.__new__(cls, cls.boolval)\n inst.s = s\n inst.args = args\n return inst\n\n @property\n def msg(self):\n return self.s % self.args\n\n def __str__(self):\n return self.msg\n\n def __repr__(self):\n return '<%s instance at %s with msg %r>' % (self.__class__.__name__,\n id(self),\n self.msg)\n\nclass Denied(PermitsResult):\n \"\"\" An instance of ``Denied`` is returned when a security-related\n API or other :app:`Pyramid` code denies an action unrelated to\n an ACL check. It evaluates equal to all boolean false types. It\n has an attribute named ``msg`` describing the circumstances for\n the deny.\"\"\"\n boolval = 0\n\nclass Allowed(PermitsResult):\n \"\"\" An instance of ``Allowed`` is returned when a security-related\n API or other :app:`Pyramid` code allows an action unrelated to\n an ACL check. It evaluates equal to all boolean true types. It\n has an attribute named ``msg`` describing the circumstances for\n the allow.\"\"\"\n boolval = 1\n\nclass ACLPermitsResult(int):\n def __new__(cls, ace, acl, permission, principals, context):\n inst = int.__new__(cls, cls.boolval)\n inst.permission = permission\n inst.ace = ace\n inst.acl = acl\n inst.principals = principals\n inst.context = context\n return inst\n\n @property\n def msg(self):\n s = ('%s permission %r via ACE %r in ACL %r on context %r for '\n 'principals %r')\n return s % (self.__class__.__name__,\n self.permission,\n self.ace,\n self.acl,\n self.context,\n self.principals)\n\n def __str__(self):\n return self.msg\n\n def __repr__(self):\n return '<%s instance at %s with msg %r>' % (self.__class__.__name__,\n id(self),\n self.msg)\n\nclass ACLDenied(ACLPermitsResult):\n \"\"\" An instance of ``ACLDenied`` represents that a security check made\n explicitly against ACL was denied. It evaluates equal to all boolean\n false types. It also has the following attributes: ``acl``, ``ace``,\n ``permission``, ``principals``, and ``context``. These attributes\n indicate the security values involved in the request. Its __str__ method\n prints a summary of these attributes for debugging purposes. The same\n summary is available as the ``msg`` attribute.\"\"\"\n boolval = 0\n\nclass ACLAllowed(ACLPermitsResult):\n \"\"\" An instance of ``ACLAllowed`` represents that a security check made\n explicitly against ACL was allowed. It evaluates equal to all boolean\n true types. It also has the following attributes: ``acl``, ``ace``,\n ``permission``, ``principals``, and ``context``. These attributes\n indicate the security values involved in the request. Its __str__ method\n prints a summary of these attributes for debugging purposes. The same\n summary is available as the ``msg`` attribute.\"\"\"\n boolval = 1\n\nclass AuthenticationAPIMixin(object):\n\n def _get_authentication_policy(self):\n reg = _get_registry(self)\n return reg.queryUtility(IAuthenticationPolicy)\n\n @property\n def authenticated_userid(self):\n \"\"\" Return the userid of the currently authenticated user or\n ``None`` if there is no :term:`authentication policy` in effect or\n there is no currently authenticated user.\n\n .. versionadded:: 1.5\n \"\"\"\n policy = self._get_authentication_policy()\n if policy is None:\n return None\n return policy.authenticated_userid(self)\n\n @property\n def unauthenticated_userid(self):\n \"\"\" Return an object which represents the *claimed* (not verified) user\n id of the credentials present in the request. ``None`` if there is no\n :term:`authentication policy` in effect or there is no user data\n associated with the current request. This differs from\n :attr:`~pyramid.request.Request.authenticated_userid`, because the\n effective authentication policy will not ensure that a record\n associated with the userid exists in persistent storage.\n\n .. versionadded:: 1.5\n \"\"\"\n policy = self._get_authentication_policy()\n if policy is None:\n return None\n return policy.unauthenticated_userid(self)\n\n @property\n def effective_principals(self):\n \"\"\" Return the list of 'effective' :term:`principal` identifiers\n for the ``request``. If no :term:`authentication policy` is in effect,\n this will return a one-element list containing the\n :data:`pyramid.security.Everyone` principal.\n\n .. versionadded:: 1.5\n \"\"\"\n policy = self._get_authentication_policy()\n if policy is None:\n return [Everyone]\n return policy.effective_principals(self)\n\nclass AuthorizationAPIMixin(object):\n\n def has_permission(self, permission, context=None):\n \"\"\" Given a permission and an optional context, returns an instance of\n :data:`pyramid.security.Allowed` if the permission is granted to this\n request with the provided context, or the context already associated\n with the request. Otherwise, returns an instance of\n :data:`pyramid.security.Denied`. This method delegates to the current\n authentication and authorization policies. Returns\n :data:`pyramid.security.Allowed` unconditionally if no authentication\n policy has been registered for this request. If ``context`` is not\n supplied or is supplied as ``None``, the context used is the\n ``request.context`` attribute.\n\n :param permission: Does this request have the given permission?\n :type permission: unicode, str\n :param context: A resource object or ``None``\n :type context: object\n :returns: `pyramid.security.PermitsResult`\n\n .. versionadded:: 1.5\n\n \"\"\"\n if context is None:\n context = self.context\n reg = _get_registry(self)\n authn_policy = reg.queryUtility(IAuthenticationPolicy)\n if authn_policy is None:\n return Allowed('No authentication policy in use.')\n authz_policy = reg.queryUtility(IAuthorizationPolicy)\n if authz_policy is None:\n raise ValueError('Authentication policy registered without '\n 'authorization policy') # should never happen\n principals = authn_policy.effective_principals(self)\n return authz_policy.permits(context, principals, permission)\n", "path": "pyramid/security.py"}], "after_files": [{"content": "from zope.deprecation import deprecated\nfrom zope.interface import providedBy\n\nfrom pyramid.interfaces import (\n IAuthenticationPolicy,\n IAuthorizationPolicy,\n ISecuredView,\n IView,\n IViewClassifier,\n )\n\nfrom pyramid.compat import map_\nfrom pyramid.threadlocal import get_current_registry\n\nEveryone = 'system.Everyone'\nAuthenticated = 'system.Authenticated'\nAllow = 'Allow'\nDeny = 'Deny'\n\n_marker = object()\n\nclass AllPermissionsList(object):\n \"\"\" Stand in 'permission list' to represent all permissions \"\"\"\n\n def __iter__(self):\n return iter(())\n\n def __contains__(self, other):\n return True\n\n def __eq__(self, other):\n return isinstance(other, self.__class__)\n\nALL_PERMISSIONS = AllPermissionsList()\nDENY_ALL = (Deny, Everyone, ALL_PERMISSIONS)\n\nNO_PERMISSION_REQUIRED = '__no_permission_required__'\n\ndef _get_registry(request):\n try:\n reg = request.registry\n except AttributeError:\n reg = get_current_registry() # b/c\n return reg\n\ndef _get_authentication_policy(request):\n registry = _get_registry(request)\n return registry.queryUtility(IAuthenticationPolicy)\n\ndef has_permission(permission, context, request):\n \"\"\"\n A function that calls :meth:`pyramid.request.Request.has_permission`\n and returns its result.\n \n .. deprecated:: 1.5\n Use :meth:`pyramid.request.Request.has_permission` instead.\n\n .. versionchanged:: 1.5a3\n If context is None, then attempt to use the context attribute of self;\n if not set, then the AttributeError is propagated.\n \"\"\" \n return request.has_permission(permission, context)\n\ndeprecated(\n 'has_permission',\n 'As of Pyramid 1.5 the \"pyramid.security.has_permission\" API is now '\n 'deprecated. It will be removed in Pyramid 1.8. Use the '\n '\"has_permission\" method of the Pyramid request instead.'\n )\n\n\ndef authenticated_userid(request):\n \"\"\"\n A function that returns the value of the property\n :attr:`pyramid.request.Request.authenticated_userid`.\n \n .. deprecated:: 1.5\n Use :attr:`pyramid.request.Request.authenticated_userid` instead.\n \"\"\" \n return request.authenticated_userid\n\ndeprecated(\n 'authenticated_userid',\n 'As of Pyramid 1.5 the \"pyramid.security.authenticated_userid\" API is now '\n 'deprecated. It will be removed in Pyramid 1.8. Use the '\n '\"authenticated_userid\" attribute of the Pyramid request instead.'\n )\n\ndef unauthenticated_userid(request):\n \"\"\" \n A function that returns the value of the property\n :attr:`pyramid.request.Request.unauthenticated_userid`.\n \n .. deprecated:: 1.5\n Use :attr:`pyramid.request.Request.unauthenticated_userid` instead.\n \"\"\" \n return request.unauthenticated_userid\n\ndeprecated(\n 'unauthenticated_userid',\n 'As of Pyramid 1.5 the \"pyramid.security.unauthenticated_userid\" API is '\n 'now deprecated. It will be removed in Pyramid 1.8. Use the '\n '\"unauthenticated_userid\" attribute of the Pyramid request instead.'\n )\n\ndef effective_principals(request):\n \"\"\"\n A function that returns the value of the property\n :attr:`pyramid.request.Request.effective_principals`.\n \n .. deprecated:: 1.5\n Use :attr:`pyramid.request.Request.effective_principals` instead.\n \"\"\" \n return request.effective_principals\n\ndeprecated(\n 'effective_principals',\n 'As of Pyramid 1.5 the \"pyramid.security.effective_principals\" API is '\n 'now deprecated. It will be removed in Pyramid 1.8. Use the '\n '\"effective_principals\" attribute of the Pyramid request instead.'\n )\n\ndef remember(request, userid=_marker, **kw):\n \"\"\"\n Returns a sequence of header tuples (e.g. ``[('Set-Cookie', 'foo=abc')]``)\n on this request's response.\n These headers are suitable for 'remembering' a set of credentials\n implied by the data passed as ``userid`` and ``*kw`` using the\n current :term:`authentication policy`. Common usage might look\n like so within the body of a view function (``response`` is\n assumed to be a :term:`WebOb` -style :term:`response` object\n computed previously by the view code):\n\n .. code-block:: python\n\n from pyramid.security import remember\n headers = remember(request, 'chrism', password='123', max_age='86400')\n response = request.response\n response.headerlist.extend(headers)\n return response\n\n If no :term:`authentication policy` is in use, this function will\n always return an empty sequence. If used, the composition and\n meaning of ``**kw`` must be agreed upon by the calling code and\n the effective authentication policy.\n \n .. deprecated:: 1.6\n Renamed the ``principal`` argument to ``userid`` to clarify its\n purpose.\n \"\"\"\n if userid is _marker:\n principal = kw.pop('principal', _marker)\n if principal is _marker:\n raise TypeError(\n 'remember() missing 1 required positional argument: '\n '\\'userid\\'')\n else:\n deprecated(\n 'principal',\n 'The \"principal\" argument was deprecated in Pyramid 1.6. '\n 'It will be removed in Pyramid 1.9. Use the \"userid\" '\n 'argument instead.')\n userid = principal\n policy = _get_authentication_policy(request)\n if policy is None:\n return []\n return policy.remember(request, userid, **kw)\n\ndef forget(request):\n \"\"\"\n Return a sequence of header tuples (e.g. ``[('Set-Cookie',\n 'foo=abc')]``) suitable for 'forgetting' the set of credentials\n possessed by the currently authenticated user. A common usage\n might look like so within the body of a view function\n (``response`` is assumed to be an :term:`WebOb` -style\n :term:`response` object computed previously by the view code):\n\n .. code-block:: python\n\n from pyramid.security import forget\n headers = forget(request)\n response.headerlist.extend(headers)\n return response\n\n If no :term:`authentication policy` is in use, this function will\n always return an empty sequence.\n \"\"\" \n policy = _get_authentication_policy(request)\n if policy is None:\n return []\n return policy.forget(request)\n\ndef principals_allowed_by_permission(context, permission):\n \"\"\" Provided a ``context`` (a resource object), and a ``permission``\n (a string or unicode object), if a :term:`authorization policy` is\n in effect, return a sequence of :term:`principal` ids that possess\n the permission in the ``context``. If no authorization policy is\n in effect, this will return a sequence with the single value\n :mod:`pyramid.security.Everyone` (the special principal\n identifier representing all principals).\n\n .. note::\n\n even if an :term:`authorization policy` is in effect,\n some (exotic) authorization policies may not implement the\n required machinery for this function; those will cause a\n :exc:`NotImplementedError` exception to be raised when this\n function is invoked.\n \"\"\"\n reg = get_current_registry()\n policy = reg.queryUtility(IAuthorizationPolicy)\n if policy is None:\n return [Everyone]\n return policy.principals_allowed_by_permission(context, permission)\n\ndef view_execution_permitted(context, request, name=''):\n \"\"\" If the view specified by ``context`` and ``name`` is protected\n by a :term:`permission`, check the permission associated with the\n view using the effective authentication/authorization policies and\n the ``request``. Return a boolean result. If no\n :term:`authorization policy` is in effect, or if the view is not\n protected by a permission, return ``True``. If no view can view found,\n an exception will be raised.\n\n .. versionchanged:: 1.4a4\n An exception is raised if no view is found.\n\n \"\"\"\n reg = _get_registry(request)\n provides = [IViewClassifier] + map_(providedBy, (request, context))\n # XXX not sure what to do here about using _find_views or analogue;\n # for now let's just keep it as-is\n view = reg.adapters.lookup(provides, ISecuredView, name=name)\n if view is None:\n view = reg.adapters.lookup(provides, IView, name=name)\n if view is None:\n raise TypeError('No registered view satisfies the constraints. '\n 'It would not make sense to claim that this view '\n '\"is\" or \"is not\" permitted.')\n return Allowed(\n 'Allowed: view name %r in context %r (no permission defined)' %\n (name, context))\n return view.__permitted__(context, request)\n\n\nclass PermitsResult(int):\n def __new__(cls, s, *args):\n inst = int.__new__(cls, cls.boolval)\n inst.s = s\n inst.args = args\n return inst\n\n @property\n def msg(self):\n return self.s % self.args\n\n def __str__(self):\n return self.msg\n\n def __repr__(self):\n return '<%s instance at %s with msg %r>' % (self.__class__.__name__,\n id(self),\n self.msg)\n\nclass Denied(PermitsResult):\n \"\"\" An instance of ``Denied`` is returned when a security-related\n API or other :app:`Pyramid` code denies an action unrelated to\n an ACL check. It evaluates equal to all boolean false types. It\n has an attribute named ``msg`` describing the circumstances for\n the deny.\"\"\"\n boolval = 0\n\nclass Allowed(PermitsResult):\n \"\"\" An instance of ``Allowed`` is returned when a security-related\n API or other :app:`Pyramid` code allows an action unrelated to\n an ACL check. It evaluates equal to all boolean true types. It\n has an attribute named ``msg`` describing the circumstances for\n the allow.\"\"\"\n boolval = 1\n\nclass ACLPermitsResult(int):\n def __new__(cls, ace, acl, permission, principals, context):\n inst = int.__new__(cls, cls.boolval)\n inst.permission = permission\n inst.ace = ace\n inst.acl = acl\n inst.principals = principals\n inst.context = context\n return inst\n\n @property\n def msg(self):\n s = ('%s permission %r via ACE %r in ACL %r on context %r for '\n 'principals %r')\n return s % (self.__class__.__name__,\n self.permission,\n self.ace,\n self.acl,\n self.context,\n self.principals)\n\n def __str__(self):\n return self.msg\n\n def __repr__(self):\n return '<%s instance at %s with msg %r>' % (self.__class__.__name__,\n id(self),\n self.msg)\n\nclass ACLDenied(ACLPermitsResult):\n \"\"\" An instance of ``ACLDenied`` represents that a security check made\n explicitly against ACL was denied. It evaluates equal to all boolean\n false types. It also has the following attributes: ``acl``, ``ace``,\n ``permission``, ``principals``, and ``context``. These attributes\n indicate the security values involved in the request. Its __str__ method\n prints a summary of these attributes for debugging purposes. The same\n summary is available as the ``msg`` attribute.\"\"\"\n boolval = 0\n\nclass ACLAllowed(ACLPermitsResult):\n \"\"\" An instance of ``ACLAllowed`` represents that a security check made\n explicitly against ACL was allowed. It evaluates equal to all boolean\n true types. It also has the following attributes: ``acl``, ``ace``,\n ``permission``, ``principals``, and ``context``. These attributes\n indicate the security values involved in the request. Its __str__ method\n prints a summary of these attributes for debugging purposes. The same\n summary is available as the ``msg`` attribute.\"\"\"\n boolval = 1\n\nclass AuthenticationAPIMixin(object):\n\n def _get_authentication_policy(self):\n reg = _get_registry(self)\n return reg.queryUtility(IAuthenticationPolicy)\n\n @property\n def authenticated_userid(self):\n \"\"\" Return the userid of the currently authenticated user or\n ``None`` if there is no :term:`authentication policy` in effect or\n there is no currently authenticated user.\n\n .. versionadded:: 1.5\n \"\"\"\n policy = self._get_authentication_policy()\n if policy is None:\n return None\n return policy.authenticated_userid(self)\n\n @property\n def unauthenticated_userid(self):\n \"\"\" Return an object which represents the *claimed* (not verified) user\n id of the credentials present in the request. ``None`` if there is no\n :term:`authentication policy` in effect or there is no user data\n associated with the current request. This differs from\n :attr:`~pyramid.request.Request.authenticated_userid`, because the\n effective authentication policy will not ensure that a record\n associated with the userid exists in persistent storage.\n\n .. versionadded:: 1.5\n \"\"\"\n policy = self._get_authentication_policy()\n if policy is None:\n return None\n return policy.unauthenticated_userid(self)\n\n @property\n def effective_principals(self):\n \"\"\" Return the list of 'effective' :term:`principal` identifiers\n for the ``request``. If no :term:`authentication policy` is in effect,\n this will return a one-element list containing the\n :data:`pyramid.security.Everyone` principal.\n\n .. versionadded:: 1.5\n \"\"\"\n policy = self._get_authentication_policy()\n if policy is None:\n return [Everyone]\n return policy.effective_principals(self)\n\nclass AuthorizationAPIMixin(object):\n\n def has_permission(self, permission, context=None):\n \"\"\" Given a permission and an optional context, returns an instance of\n :data:`pyramid.security.Allowed` if the permission is granted to this\n request with the provided context, or the context already associated\n with the request. Otherwise, returns an instance of\n :data:`pyramid.security.Denied`. This method delegates to the current\n authentication and authorization policies. Returns\n :data:`pyramid.security.Allowed` unconditionally if no authentication\n policy has been registered for this request. If ``context`` is not\n supplied or is supplied as ``None``, the context used is the\n ``request.context`` attribute.\n\n :param permission: Does this request have the given permission?\n :type permission: unicode, str\n :param context: A resource object or ``None``\n :type context: object\n :returns: `pyramid.security.PermitsResult`\n\n .. versionadded:: 1.5\n\n \"\"\"\n if context is None:\n context = self.context\n reg = _get_registry(self)\n authn_policy = reg.queryUtility(IAuthenticationPolicy)\n if authn_policy is None:\n return Allowed('No authentication policy in use.')\n authz_policy = reg.queryUtility(IAuthorizationPolicy)\n if authz_policy is None:\n raise ValueError('Authentication policy registered without '\n 'authorization policy') # should never happen\n principals = authn_policy.effective_principals(self)\n return authz_policy.permits(context, principals, permission)\n", "path": "pyramid/security.py"}]} |
gh_patches_debug_1615 | rasdani/github-patches | git_diff | conan-io__conan-center-index-789 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[question] Consistency in package and generator names
After the discussion in https://github.com/conan-io/conan/issues/6269#issuecomment-570182130, most packages got normalized to use the format discussed in this comment (https://github.com/conan-io/conan-center-index/pull/690 and related).
If I understand the intention correctly, in CMake the package should always be added with `CONAN_PKG::{name}` (for example `openssl`) and conan will then expand the correct `Find*`-macro using `self.cpp_info.names["cmake_find_package"]` or `self.cpp_info.names["cmake_find_package_multi"]` (in this case `OpenSSL`).
Some recipes now use an additonal `self.cpp_info.names['cmake']` (a cursory search found [libcurl](https://github.com/conan-io/conan-center-index/blob/ee20546/recipes/libcurl/all/conanfile.py#L374) and [backward-cpp](https://github.com/conan-io/conan-center-index/blob/ee20546/recipes/backward-cpp/all/conanfile.py#L114)).
This leads to strange behavior:
- the `cmake` generator expects `CONAN_PKG::CURL` and will fail with `CONAN_PKG::libcurl`
- the `cmake_multi` works the opposite way, working with `CONAN_PKG::libcurl` and failing with uppercase `CURL`
In terms of consistency, I tend to say that the behavior of `cmake_multi` is the correct one, but either way, both CMake-variants should at least behave the same way.
I'm not sure if there are any side effects in removing the offending lines.
I didn't check if other generators have a similar behavior for different packages.
It might be a good idea to double-check all occurrences of superfluous or missing `cpp_info.names`, ([tcl](https://github.com/conan-io/conan-center-index/blob/ee20546/recipes/tcl/8.6.10/conanfile.py#L198), for example is missing the _multi_ entry). Is there a specific reason to split `cmake` and `cmake_multi` in this case (maybe a question to move to the main conan repo)?
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `recipes/backward-cpp/all/conanfile.py`
Content:
```
1 from conans import ConanFile, CMake, tools
2 from conans.errors import ConanInvalidConfiguration
3 import os
4
5
6 class BackwardCppConan(ConanFile):
7 name = "backward-cpp"
8 description = "A beautiful stack trace pretty printer for C++"
9 homepage = "https://github.com/bombela/backward-cpp"
10 url = "https://github.com/conan-io/conan-center-index"
11 topics = ("conan", "backward-cpp", "stack-trace")
12 license = "MIT"
13 exports_sources = [ "CMakeLists.txt", "patches/backward-cpp-*.patch" ]
14 generators = "cmake"
15
16 settings = "os", "arch", "compiler", "build_type"
17 options = {
18 "stack_walking" : ["unwind", "backtrace"],
19 "stack_details" : ["dw", "bfd", "dwarf", "backtrace_symbol"],
20 "shared": [True, False],
21 "fPIC": [True, False]
22 }
23 default_options = {
24 "stack_walking": "unwind",
25 "stack_details": "dwarf",
26 "shared": True,
27 "fPIC": True
28 }
29
30 _source_subfolder = "source_subfolder"
31 _build_subfolder = "build_subfolder"
32
33 def _has_stack_walking(self, type):
34 return self.options.stack_walking == type
35
36 def _has_stack_details(self, type):
37 return self.options.stack_details == type
38
39 def configure(self):
40 if self.settings.os not in ["Linux", "Macos", "Android"]:
41 raise ConanInvalidConfiguration("upstream backward-cpp v{0} is not \
42 supported in {1}.".format(self.version, self.settings.os))
43 # windows implementation only available in upstream master branch
44
45 if self.settings.os == "Macos" and \
46 not self._has_stack_details("backtrace_symbol"):
47 raise ConanInvalidConfiguration("only stack_details=backtrace_symbol"
48 " is supported on Macos")
49
50 def requirements(self):
51 if self.settings.os in ["Linux", "Android"] and \
52 self._has_stack_details("dwarf"):
53 self.requires("libdwarf/20191104")
54
55 def system_requirements(self):
56 required_package = None
57 if self.settings.os == "Linux":
58 if self._has_stack_details("dw"):
59 if tools.os_info.linux_distro in ["ubuntu", "debian"]:
60 required_package = "libdw-dev"
61 elif tools.os_info.linux_distro in ["fedora", "centos"]:
62 required_package = "elfutils-libs"
63 elif tools.os_info.linux_distro == "opensuse":
64 required_package = "libdw-devel"
65 elif tools.os_info.linux_distro == "arch":
66 required_package = "libelf"
67
68 if self._has_stack_details("bfd"):
69 if tools.os_info.linux_distro in ["ubuntu", "debian"]:
70 required_package = "binutils-dev"
71 elif tools.os_info.linux_distro in ["fedora", "centos", "opensuse"]:
72 required_package = "binutils-devel"
73 elif tools.os_info.linux_distro == "arch":
74 required_package = "binutils"
75 elif tools.os_info.is_freebsd:
76 required_package = "libbfd"
77
78 if required_package != None:
79 installer = tools.SystemPackageTool()
80 if not installer.installed(required_package):
81 raise ConanInvalidConfiguration("backward-cpp requires {}.".format(required_package))
82
83 def source(self):
84 tools.get(**self.conan_data["sources"][self.version])
85 extracted_dir = self.name + "-" + self.version
86 os.rename(extracted_dir, self._source_subfolder)
87
88 def _configure_cmake(self):
89 cmake = CMake(self)
90 cmake.definitions['STACK_WALKING_UNWIND'] = self._has_stack_walking("unwind")
91 cmake.definitions['STACK_WALKING_BACKTRACE'] = self._has_stack_walking("backtrace")
92 cmake.definitions['STACK_DETAILS_AUTO_DETECT'] = False
93 cmake.definitions['STACK_DETAILS_BACKTRACE_SYMBOL'] = self._has_stack_details("backtrace_symbol")
94 cmake.definitions['STACK_DETAILS_DW'] = self._has_stack_details("dw")
95 cmake.definitions['STACK_DETAILS_BFD'] = self._has_stack_details("bfd")
96 cmake.definitions['STACK_DETAILS_DWARF'] = self._has_stack_details("dwarf")
97 cmake.definitions['BACKWARD_SHARED'] = self.options.shared
98 cmake.configure(build_folder=self._build_subfolder)
99 return cmake
100
101 def build(self):
102 for patch in self.conan_data["patches"][self.version]:
103 tools.patch(**patch)
104 cmake = self._configure_cmake()
105 cmake.build()
106
107 def package(self):
108 cmake = self._configure_cmake()
109 cmake.install()
110 self.copy(pattern="LICENSE*", dst="licenses", src=self._source_subfolder)
111 os.remove(os.path.join(self.package_folder, "lib", "backward", "BackwardConfig.cmake"))
112
113 def package_info(self):
114 self.cpp_info.names["cmake"] = "Backward"
115 self.cpp_info.names["cmake_find_package"] = "Backward"
116 self.cpp_info.names["cmake_find_package_multi"] = "Backward"
117
118 self.cpp_info.defines.append('BACKWARD_HAS_UNWIND={}'.format(int(self._has_stack_walking("unwind"))))
119 self.cpp_info.defines.append('BACKWARD_HAS_BACKTRACE={}'.format(int(self._has_stack_walking("backtrace"))))
120
121 self.cpp_info.defines.append('BACKWARD_HAS_BACKTRACE_SYMBOL={}'.format(int(self._has_stack_details("backtrace_symbol"))))
122 self.cpp_info.defines.append('BACKWARD_HAS_DW={}'.format(int(self._has_stack_details("dw"))))
123 self.cpp_info.defines.append('BACKWARD_HAS_BFD={}'.format(int(self._has_stack_details("bfd"))))
124 self.cpp_info.defines.append('BACKWARD_HAS_DWARF={}'.format(int(self._has_stack_details("dwarf"))))
125
126 self.cpp_info.libs = tools.collect_libs(self)
127 if self.settings.os == "Linux":
128 self.cpp_info.system_libs.extend(["dl"])
129 if self._has_stack_details("dw"):
130 self.cpp_info.system_libs.extend(["dw"])
131 if self._has_stack_details("bfd"):
132 self.cpp_info.system_libs.extend(["bfd"])
133
134
135
136
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/recipes/backward-cpp/all/conanfile.py b/recipes/backward-cpp/all/conanfile.py
--- a/recipes/backward-cpp/all/conanfile.py
+++ b/recipes/backward-cpp/all/conanfile.py
@@ -111,7 +111,6 @@
os.remove(os.path.join(self.package_folder, "lib", "backward", "BackwardConfig.cmake"))
def package_info(self):
- self.cpp_info.names["cmake"] = "Backward"
self.cpp_info.names["cmake_find_package"] = "Backward"
self.cpp_info.names["cmake_find_package_multi"] = "Backward"
| {"golden_diff": "diff --git a/recipes/backward-cpp/all/conanfile.py b/recipes/backward-cpp/all/conanfile.py\n--- a/recipes/backward-cpp/all/conanfile.py\n+++ b/recipes/backward-cpp/all/conanfile.py\n@@ -111,7 +111,6 @@\n os.remove(os.path.join(self.package_folder, \"lib\", \"backward\", \"BackwardConfig.cmake\"))\n \n def package_info(self):\n- self.cpp_info.names[\"cmake\"] = \"Backward\"\n self.cpp_info.names[\"cmake_find_package\"] = \"Backward\"\n self.cpp_info.names[\"cmake_find_package_multi\"] = \"Backward\"\n", "issue": "[question] Consistency in package and generator names\nAfter the discussion in https://github.com/conan-io/conan/issues/6269#issuecomment-570182130, most packages got normalized to use the format discussed in this comment (https://github.com/conan-io/conan-center-index/pull/690 and related).\r\n\r\nIf I understand the intention correctly, in CMake the package should always be added with `CONAN_PKG::{name}` (for example `openssl`) and conan will then expand the correct `Find*`-macro using `self.cpp_info.names[\"cmake_find_package\"]` or `self.cpp_info.names[\"cmake_find_package_multi\"]` (in this case `OpenSSL`).\r\n\r\nSome recipes now use an additonal `self.cpp_info.names['cmake']` (a cursory search found [libcurl](https://github.com/conan-io/conan-center-index/blob/ee20546/recipes/libcurl/all/conanfile.py#L374) and [backward-cpp](https://github.com/conan-io/conan-center-index/blob/ee20546/recipes/backward-cpp/all/conanfile.py#L114)).\r\n\r\nThis leads to strange behavior:\r\n- the `cmake` generator expects `CONAN_PKG::CURL` and will fail with `CONAN_PKG::libcurl`\r\n- the `cmake_multi` works the opposite way, working with `CONAN_PKG::libcurl` and failing with uppercase `CURL`\r\n\r\nIn terms of consistency, I tend to say that the behavior of `cmake_multi` is the correct one, but either way, both CMake-variants should at least behave the same way.\r\n\r\nI'm not sure if there are any side effects in removing the offending lines.\r\n\r\nI didn't check if other generators have a similar behavior for different packages.\r\n\r\nIt might be a good idea to double-check all occurrences of superfluous or missing `cpp_info.names`, ([tcl](https://github.com/conan-io/conan-center-index/blob/ee20546/recipes/tcl/8.6.10/conanfile.py#L198), for example is missing the _multi_ entry). Is there a specific reason to split `cmake` and `cmake_multi` in this case (maybe a question to move to the main conan repo)?\n", "before_files": [{"content": "from conans import ConanFile, CMake, tools\nfrom conans.errors import ConanInvalidConfiguration\nimport os\n\n\nclass BackwardCppConan(ConanFile):\n name = \"backward-cpp\"\n description = \"A beautiful stack trace pretty printer for C++\"\n homepage = \"https://github.com/bombela/backward-cpp\"\n url = \"https://github.com/conan-io/conan-center-index\"\n topics = (\"conan\", \"backward-cpp\", \"stack-trace\")\n license = \"MIT\"\n exports_sources = [ \"CMakeLists.txt\", \"patches/backward-cpp-*.patch\" ]\n generators = \"cmake\"\n\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n options = {\n \"stack_walking\" : [\"unwind\", \"backtrace\"],\n \"stack_details\" : [\"dw\", \"bfd\", \"dwarf\", \"backtrace_symbol\"],\n \"shared\": [True, False],\n \"fPIC\": [True, False]\n }\n default_options = {\n \"stack_walking\": \"unwind\",\n \"stack_details\": \"dwarf\",\n \"shared\": True,\n \"fPIC\": True\n }\n\n _source_subfolder = \"source_subfolder\"\n _build_subfolder = \"build_subfolder\"\n\n def _has_stack_walking(self, type):\n return self.options.stack_walking == type\n\n def _has_stack_details(self, type):\n return self.options.stack_details == type\n \n def configure(self):\n if self.settings.os not in [\"Linux\", \"Macos\", \"Android\"]:\n raise ConanInvalidConfiguration(\"upstream backward-cpp v{0} is not \\\n supported in {1}.\".format(self.version, self.settings.os))\n # windows implementation only available in upstream master branch\n\n if self.settings.os == \"Macos\" and \\\n not self._has_stack_details(\"backtrace_symbol\"):\n raise ConanInvalidConfiguration(\"only stack_details=backtrace_symbol\"\n \" is supported on Macos\")\n \n def requirements(self):\n if self.settings.os in [\"Linux\", \"Android\"] and \\\n self._has_stack_details(\"dwarf\"):\n self.requires(\"libdwarf/20191104\")\n \n def system_requirements(self):\n required_package = None\n if self.settings.os == \"Linux\":\n if self._has_stack_details(\"dw\"):\n if tools.os_info.linux_distro in [\"ubuntu\", \"debian\"]:\n required_package = \"libdw-dev\"\n elif tools.os_info.linux_distro in [\"fedora\", \"centos\"]:\n required_package = \"elfutils-libs\"\n elif tools.os_info.linux_distro == \"opensuse\":\n required_package = \"libdw-devel\"\n elif tools.os_info.linux_distro == \"arch\":\n required_package = \"libelf\"\n\n if self._has_stack_details(\"bfd\"):\n if tools.os_info.linux_distro in [\"ubuntu\", \"debian\"]:\n required_package = \"binutils-dev\"\n elif tools.os_info.linux_distro in [\"fedora\", \"centos\", \"opensuse\"]:\n required_package = \"binutils-devel\"\n elif tools.os_info.linux_distro == \"arch\":\n required_package = \"binutils\"\n elif tools.os_info.is_freebsd:\n required_package = \"libbfd\"\n \n if required_package != None:\n installer = tools.SystemPackageTool()\n if not installer.installed(required_package):\n raise ConanInvalidConfiguration(\"backward-cpp requires {}.\".format(required_package))\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n extracted_dir = self.name + \"-\" + self.version\n os.rename(extracted_dir, self._source_subfolder)\n\n def _configure_cmake(self):\n cmake = CMake(self)\n cmake.definitions['STACK_WALKING_UNWIND'] = self._has_stack_walking(\"unwind\")\n cmake.definitions['STACK_WALKING_BACKTRACE'] = self._has_stack_walking(\"backtrace\")\n cmake.definitions['STACK_DETAILS_AUTO_DETECT'] = False\n cmake.definitions['STACK_DETAILS_BACKTRACE_SYMBOL'] = self._has_stack_details(\"backtrace_symbol\")\n cmake.definitions['STACK_DETAILS_DW'] = self._has_stack_details(\"dw\")\n cmake.definitions['STACK_DETAILS_BFD'] = self._has_stack_details(\"bfd\")\n cmake.definitions['STACK_DETAILS_DWARF'] = self._has_stack_details(\"dwarf\")\n cmake.definitions['BACKWARD_SHARED'] = self.options.shared\n cmake.configure(build_folder=self._build_subfolder)\n return cmake\n\n def build(self):\n for patch in self.conan_data[\"patches\"][self.version]:\n tools.patch(**patch)\n cmake = self._configure_cmake()\n cmake.build()\n\n def package(self):\n cmake = self._configure_cmake()\n cmake.install()\n self.copy(pattern=\"LICENSE*\", dst=\"licenses\", src=self._source_subfolder)\n os.remove(os.path.join(self.package_folder, \"lib\", \"backward\", \"BackwardConfig.cmake\"))\n\n def package_info(self):\n self.cpp_info.names[\"cmake\"] = \"Backward\"\n self.cpp_info.names[\"cmake_find_package\"] = \"Backward\"\n self.cpp_info.names[\"cmake_find_package_multi\"] = \"Backward\"\n\n self.cpp_info.defines.append('BACKWARD_HAS_UNWIND={}'.format(int(self._has_stack_walking(\"unwind\"))))\n self.cpp_info.defines.append('BACKWARD_HAS_BACKTRACE={}'.format(int(self._has_stack_walking(\"backtrace\"))))\n \n self.cpp_info.defines.append('BACKWARD_HAS_BACKTRACE_SYMBOL={}'.format(int(self._has_stack_details(\"backtrace_symbol\"))))\n self.cpp_info.defines.append('BACKWARD_HAS_DW={}'.format(int(self._has_stack_details(\"dw\"))))\n self.cpp_info.defines.append('BACKWARD_HAS_BFD={}'.format(int(self._has_stack_details(\"bfd\"))))\n self.cpp_info.defines.append('BACKWARD_HAS_DWARF={}'.format(int(self._has_stack_details(\"dwarf\"))))\n\n self.cpp_info.libs = tools.collect_libs(self)\n if self.settings.os == \"Linux\":\n self.cpp_info.system_libs.extend([\"dl\"])\n if self._has_stack_details(\"dw\"):\n self.cpp_info.system_libs.extend([\"dw\"]) \n if self._has_stack_details(\"bfd\"):\n self.cpp_info.system_libs.extend([\"bfd\"])\n\n\n \n", "path": "recipes/backward-cpp/all/conanfile.py"}], "after_files": [{"content": "from conans import ConanFile, CMake, tools\nfrom conans.errors import ConanInvalidConfiguration\nimport os\n\n\nclass BackwardCppConan(ConanFile):\n name = \"backward-cpp\"\n description = \"A beautiful stack trace pretty printer for C++\"\n homepage = \"https://github.com/bombela/backward-cpp\"\n url = \"https://github.com/conan-io/conan-center-index\"\n topics = (\"conan\", \"backward-cpp\", \"stack-trace\")\n license = \"MIT\"\n exports_sources = [ \"CMakeLists.txt\", \"patches/backward-cpp-*.patch\" ]\n generators = \"cmake\"\n\n settings = \"os\", \"arch\", \"compiler\", \"build_type\"\n options = {\n \"stack_walking\" : [\"unwind\", \"backtrace\"],\n \"stack_details\" : [\"dw\", \"bfd\", \"dwarf\", \"backtrace_symbol\"],\n \"shared\": [True, False],\n \"fPIC\": [True, False]\n }\n default_options = {\n \"stack_walking\": \"unwind\",\n \"stack_details\": \"dwarf\",\n \"shared\": True,\n \"fPIC\": True\n }\n\n _source_subfolder = \"source_subfolder\"\n _build_subfolder = \"build_subfolder\"\n\n def _has_stack_walking(self, type):\n return self.options.stack_walking == type\n\n def _has_stack_details(self, type):\n return self.options.stack_details == type\n \n def configure(self):\n if self.settings.os not in [\"Linux\", \"Macos\", \"Android\"]:\n raise ConanInvalidConfiguration(\"upstream backward-cpp v{0} is not \\\n supported in {1}.\".format(self.version, self.settings.os))\n # windows implementation only available in upstream master branch\n\n if self.settings.os == \"Macos\" and \\\n not self._has_stack_details(\"backtrace_symbol\"):\n raise ConanInvalidConfiguration(\"only stack_details=backtrace_symbol\"\n \" is supported on Macos\")\n \n def requirements(self):\n if self.settings.os in [\"Linux\", \"Android\"] and \\\n self._has_stack_details(\"dwarf\"):\n self.requires(\"libdwarf/20191104\")\n \n def system_requirements(self):\n required_package = None\n if self.settings.os == \"Linux\":\n if self._has_stack_details(\"dw\"):\n if tools.os_info.linux_distro in [\"ubuntu\", \"debian\"]:\n required_package = \"libdw-dev\"\n elif tools.os_info.linux_distro in [\"fedora\", \"centos\"]:\n required_package = \"elfutils-libs\"\n elif tools.os_info.linux_distro == \"opensuse\":\n required_package = \"libdw-devel\"\n elif tools.os_info.linux_distro == \"arch\":\n required_package = \"libelf\"\n\n if self._has_stack_details(\"bfd\"):\n if tools.os_info.linux_distro in [\"ubuntu\", \"debian\"]:\n required_package = \"binutils-dev\"\n elif tools.os_info.linux_distro in [\"fedora\", \"centos\", \"opensuse\"]:\n required_package = \"binutils-devel\"\n elif tools.os_info.linux_distro == \"arch\":\n required_package = \"binutils\"\n elif tools.os_info.is_freebsd:\n required_package = \"libbfd\"\n \n if required_package != None:\n installer = tools.SystemPackageTool()\n if not installer.installed(required_package):\n raise ConanInvalidConfiguration(\"backward-cpp requires {}.\".format(required_package))\n\n def source(self):\n tools.get(**self.conan_data[\"sources\"][self.version])\n extracted_dir = self.name + \"-\" + self.version\n os.rename(extracted_dir, self._source_subfolder)\n\n def _configure_cmake(self):\n cmake = CMake(self)\n cmake.definitions['STACK_WALKING_UNWIND'] = self._has_stack_walking(\"unwind\")\n cmake.definitions['STACK_WALKING_BACKTRACE'] = self._has_stack_walking(\"backtrace\")\n cmake.definitions['STACK_DETAILS_AUTO_DETECT'] = False\n cmake.definitions['STACK_DETAILS_BACKTRACE_SYMBOL'] = self._has_stack_details(\"backtrace_symbol\")\n cmake.definitions['STACK_DETAILS_DW'] = self._has_stack_details(\"dw\")\n cmake.definitions['STACK_DETAILS_BFD'] = self._has_stack_details(\"bfd\")\n cmake.definitions['STACK_DETAILS_DWARF'] = self._has_stack_details(\"dwarf\")\n cmake.definitions['BACKWARD_SHARED'] = self.options.shared\n cmake.configure(build_folder=self._build_subfolder)\n return cmake\n\n def build(self):\n for patch in self.conan_data[\"patches\"][self.version]:\n tools.patch(**patch)\n cmake = self._configure_cmake()\n cmake.build()\n\n def package(self):\n cmake = self._configure_cmake()\n cmake.install()\n self.copy(pattern=\"LICENSE*\", dst=\"licenses\", src=self._source_subfolder)\n os.remove(os.path.join(self.package_folder, \"lib\", \"backward\", \"BackwardConfig.cmake\"))\n\n def package_info(self):\n self.cpp_info.names[\"cmake_find_package\"] = \"Backward\"\n self.cpp_info.names[\"cmake_find_package_multi\"] = \"Backward\"\n\n self.cpp_info.defines.append('BACKWARD_HAS_UNWIND={}'.format(int(self._has_stack_walking(\"unwind\"))))\n self.cpp_info.defines.append('BACKWARD_HAS_BACKTRACE={}'.format(int(self._has_stack_walking(\"backtrace\"))))\n \n self.cpp_info.defines.append('BACKWARD_HAS_BACKTRACE_SYMBOL={}'.format(int(self._has_stack_details(\"backtrace_symbol\"))))\n self.cpp_info.defines.append('BACKWARD_HAS_DW={}'.format(int(self._has_stack_details(\"dw\"))))\n self.cpp_info.defines.append('BACKWARD_HAS_BFD={}'.format(int(self._has_stack_details(\"bfd\"))))\n self.cpp_info.defines.append('BACKWARD_HAS_DWARF={}'.format(int(self._has_stack_details(\"dwarf\"))))\n\n self.cpp_info.libs = tools.collect_libs(self)\n if self.settings.os == \"Linux\":\n self.cpp_info.system_libs.extend([\"dl\"])\n if self._has_stack_details(\"dw\"):\n self.cpp_info.system_libs.extend([\"dw\"]) \n if self._has_stack_details(\"bfd\"):\n self.cpp_info.system_libs.extend([\"bfd\"])\n\n\n \n", "path": "recipes/backward-cpp/all/conanfile.py"}]} |
gh_patches_debug_1616 | rasdani/github-patches | git_diff | openstates__openstates-scrapers-2041 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
OH failing since at least 2017-12-23
OH has been failing since 2017-12-23
Based on automated runs it appears that OH has not run successfully in 2 days (2017-12-23).
```
23:01:27 INFO pupa: save post 85 as post_7fa5619a-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 86 as post_7fa562c6-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 88 as post_7fa56550-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 87 as post_7fa563fc-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 89 as post_7fa56690-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 90 as post_7fa567d0-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 91 as post_7fa56906-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 92 as post_7fa56a32-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 93 as post_7fa56bb8-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 94 as post_7fa56d02-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 95 as post_7fa56e38-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 97 as post_7fa570c2-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 96 as post_7fa56f8c-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 98 as post_7fa57202-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save post 99 as post_7fa57338-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save organization Democratic as organization_7fafd56c-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO pupa: save organization Republican as organization_7fafbd70-e867-11e7-97ee-0242ac110002.json
23:01:27 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/chamber/House/legislators?per_page=100
23:01:27 INFO pupa: no session, using 132
23:01:57 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/chamber/Senate/legislators?per_page=100
23:02:06 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/amendments
23:02:07 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/fiscals
23:02:09 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/synopsiss
23:02:09 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/analysiss
23:02:11 INFO scrapelib: GET - https://www.legislature.ohio.gov/legislation?pageSize=500&start=1&sort=LegislationNumber&dir=asc&statusCode&generalAssemblies=132
23:02:25 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/resolutions/hjr1/
23:02:26 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/resolutions/hjr1/actions
23:02:27 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/resolutions/hjr1/votes
no pupa_settings on path, using defaults
oh (scrape, import)
bills: {}
people: {}
return self.do_handle(args, other, juris)
File "/opt/openstates/venv-pupa//bin/pupa", line 11, in <module>
load_entry_point('pupa', 'console_scripts', 'pupa')()
File "/opt/openstates/venv-pupa/src/pupa/pupa/cli/__main__.py", line 67, in main
subcommands[args.subcommand].handle(args, other)
File "/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py", line 260, in handle
Traceback (most recent call last):
File "/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py", line 305, in do_handle
report['scrape'] = self.do_scrape(juris, args, scrapers)
File "/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py", line 173, in do_scrape
report[scraper_name] = scraper.do_scrape(**scrape_args)
File "/opt/openstates/venv-pupa/src/pupa/pupa/scrape/base.py", line 111, in do_scrape
for obj in self.scrape(**kwargs) or []:
File "/opt/openstates/openstates/openstates/oh/bills.py", line 200, in scrape
chamber_dict, vote_results)
File "/opt/openstates/openstates/openstates/oh/bills.py", line 415, in process_vote
vote.yes(legislators[voter_id])
KeyError: 1605
```
Visit http://bobsled.openstates.org for more info.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `openstates/oh/bills.py`
Content:
```
1 import os
2 import datetime
3
4 from pupa.scrape import Scraper, Bill, VoteEvent
5 from pupa.scrape.base import ScrapeError
6
7 import xlrd
8 import scrapelib
9 import lxml.html
10 import pytz
11
12
13 class OHBillScraper(Scraper):
14 _tz = pytz.timezone('US/Eastern')
15
16 def scrape(self, session=None, chambers=None):
17 # Bills endpoint can sometimes take a very long time to load
18 self.timeout = 300
19
20 if not session:
21 session = self.latest_session()
22 self.info('no session, using %s', session)
23
24 if int(session) < 128:
25 raise AssertionError("No data for period {}".format(session))
26
27 elif int(session) < 131:
28 # they changed their data format starting in 131st and added
29 # an undocumented API
30 yield from self.old_scrape(session)
31
32 else:
33 chamber_dict = {"Senate": "upper", "House": "lower",
34 "House of Representatives": "lower",
35 "house": "lower", "senate": "upper"}
36
37 # so presumanbly not everything passes, but we haven't
38 # seen anything not pass yet, so we'll need to wait
39 # till it fails and get the right language in here
40 vote_results = {"approved": True,
41 "passed": True,
42 "adopted": True,
43 "true": True,
44 "false": False,
45 "failed": False,
46 True: True,
47 False: False}
48
49 action_dict = {"ref_ctte_100": "referral-committee",
50 "intro_100": "introduction",
51 "pass_300": "passage",
52 "intro_110": "reading-1",
53 "refer_210": "referral-committee",
54 "crpt_301": None,
55 "crpt_317": None,
56 "concur_606": "passage",
57 "pass_301": "passage",
58 "refer_220": "referral-committee",
59 "intro_102": ["introduction", "passage"],
60 "intro_105": ["introduction", "passage"],
61 "intro_ref_ctte_100": "referral-committee",
62 "refer_209": None,
63 "intro_108": ["introduction", "passage"],
64 "intro_103": ["introduction", "passage"],
65 "msg_reso_503": "passage",
66 "intro_107": ["introduction", "passage"],
67 "imm_consid_360": "passage",
68 "refer_213": None,
69 "adopt_reso_100": "passage",
70 "msg_507": "amendment-passage",
71 "confer_713": None,
72 "concur_603": None,
73 "confer_712": None,
74 "msg_506": "amendment-failure",
75 "receive_message_100": "passage",
76 "motion_920": None,
77 "concur_611": None,
78 "confer_735": None
79 }
80
81 base_url = "http://search-prod.lis.state.oh.us"
82 first_page = base_url
83 first_page += "/solarapi/v1/general_assembly_{session}/".format(session=session)
84 legislators = self.get_legislator_ids(first_page)
85 all_amendments = self.get_other_data_source(first_page, base_url, "amendments")
86 all_fiscals = self.get_other_data_source(first_page, base_url, "fiscals")
87 all_synopsis = self.get_other_data_source(first_page, base_url, "synopsiss")
88 all_analysis = self.get_other_data_source(first_page, base_url, "analysiss")
89
90 for row in self.get_bill_rows(session):
91 number_link, ga, title, primary_sponsor, status = row.xpath('td')
92
93 bill_id = number_link.text_content()
94 title = title.text_content().strip()
95 chamber = 'lower' if 'H' in bill_id else 'upper'
96 classification = 'bill' if 'B' in bill_id else 'resolution'
97
98 bill = Bill(bill_id, legislative_session=session, chamber=chamber,
99 title=title, classification=classification)
100 bill.add_source(number_link.xpath('a/@href')[0])
101
102 # get bill from API
103 bill_api_url = ('http://search-prod.lis.state.oh.us/solarapi/v1/'
104 'general_assembly_{}/{}/{}/'.format(
105 session,
106 'bills' if 'B' in bill_id else 'resolutions',
107 bill_id.lower().replace(' ', '')
108 ))
109 data = self.get(bill_api_url).json()
110
111 # add title if no short title
112 if not bill.title:
113 bill.title = data['items'][0]['longtitle']
114 bill.add_title(data['items'][0]['longtitle'], 'long title')
115
116 # this stuff is version-specific
117 for version in data['items']:
118 version_name = version["version"]
119 version_link = base_url+version["pdfDownloadLink"]
120 bill.add_version_link(version_name, version_link, media_type='application/pdf')
121
122 # we'll use latest bill_version for everything else
123 bill_version = data['items'][0]
124 bill.add_source(bill_api_url)
125
126 # subjects
127 for subj in bill_version["subjectindexes"]:
128 try:
129 bill.add_subject(subj["primary"])
130 except KeyError:
131 pass
132 try:
133 secondary_subj = subj["secondary"]
134 except KeyError:
135 secondary_subj = ""
136 if secondary_subj:
137 bill.add_subject(secondary_subj)
138
139 # sponsors
140 sponsors = bill_version["sponsors"]
141 for sponsor in sponsors:
142 sponsor_name = self.get_sponsor_name(sponsor)
143 bill.add_sponsorship(
144 sponsor_name,
145 classification='primary',
146 entity_type='person',
147 primary=True
148 )
149
150 cosponsors = bill_version["cosponsors"]
151 for sponsor in cosponsors:
152 sponsor_name = self.get_sponsor_name(sponsor)
153 bill.add_sponsorship(
154 sponsor_name,
155 classification='cosponsor',
156 entity_type='person',
157 primary=False,
158 )
159
160 try:
161 action_doc = self.get(base_url+bill_version["action"][0]["link"])
162 except scrapelib.HTTPError:
163 pass
164 else:
165
166 actions = action_doc.json()
167 for action in reversed(actions["items"]):
168 actor = chamber_dict[action["chamber"]]
169 action_desc = action["description"]
170 try:
171 action_type = action_dict[action["actioncode"]]
172 except KeyError:
173 self.warning("Unknown action {desc} with code {code}."
174 " Add it to the action_dict"
175 ".".format(desc=action_desc,
176 code=action["actioncode"]))
177 action_type = None
178
179 date = self._tz.localize(datetime.datetime.strptime(
180 action["datetime"],
181 "%Y-%m-%dT%H:%M:%S"))
182 date = "{:%Y-%m-%d}".format(date)
183
184 bill.add_action(action_desc,
185 date, chamber=actor,
186 classification=action_type)
187
188 # attach documents gathered earlier
189 self.add_document(all_amendments, bill_id, "amendment", bill, base_url)
190 self.add_document(all_fiscals, bill_id, "fiscal", bill, base_url)
191 self.add_document(all_synopsis, bill_id, "synopsis", bill, base_url)
192 self.add_document(all_analysis, bill_id, "analysis", bill, base_url)
193
194 # votes
195 vote_url = base_url+bill_version["votes"][0]["link"]
196 vote_doc = self.get(vote_url)
197 votes = vote_doc.json()
198 yield from self.process_vote(votes, vote_url,
199 base_url, bill, legislators,
200 chamber_dict, vote_results)
201
202 vote_url = base_url
203 vote_url += bill_version["cmtevotes"][0]["link"]
204 try:
205 vote_doc = self.get(vote_url)
206 except scrapelib.HTTPError:
207 self.warning("Vote page not "
208 "loading; skipping: {}".format(vote_url))
209 continue
210 votes = vote_doc.json()
211 yield from self.process_vote(votes, vote_url,
212 base_url, bill, legislators,
213 chamber_dict, vote_results)
214
215 # we have never seen a veto or a disapprove, but they seem important.
216 # so we'll check and throw an error if we find one
217 # life is fragile. so are our scrapers.
218 if "veto" in bill_version:
219 veto_url = base_url+bill_version["veto"][0]["link"]
220 veto_json = self.get(veto_url).json()
221 if len(veto_json["items"]) > 0:
222 raise AssertionError("Whoa, a veto! We've never"
223 " gotten one before."
224 " Go write some code to deal"
225 " with it: {}".format(veto_url))
226
227 if "disapprove" in bill_version:
228 disapprove_url = base_url+bill_version["disapprove"][0]["link"]
229 disapprove_json = self.get(disapprove_url).json()
230 if len(disapprove_json["items"]) > 0:
231 raise AssertionError("Whoa, a disapprove! We've never"
232 " gotten one before."
233 " Go write some code to deal "
234 "with it: {}".format(disapprove_url))
235
236 yield bill
237
238 def pages(self, base_url, first_page):
239 page = self.get(first_page)
240 page = page.json()
241 yield page
242 while "nextLink" in page:
243 page = self.get(base_url+page["nextLink"])
244 page = page.json()
245 yield page
246
247 def get_bill_rows(self, session, start=1):
248 # bill API endpoint times out so we're now getting this from the normal search
249 bill_url = ('https://www.legislature.ohio.gov/legislation?pageSize=500&start={}&'
250 'sort=LegislationNumber&dir=asc&statusCode&generalAssemblies={}'.format(
251 start, session)
252 )
253 doc = self.get(bill_url)
254 doc = lxml.html.fromstring(doc.text)
255 doc.make_links_absolute(bill_url)
256
257 rows = doc.xpath('//tr')[1:]
258 yield from rows
259 if len(rows) == 500:
260 yield from self.get_bill_rows(session, start+500)
261 # if page is full, get next page - could use pagination info in
262 # //div[id="searchResultsInfo"] to improve this
263
264 def get_other_data_source(self, first_page, base_url, source_name):
265 # produces a dictionary from bill_id to a list of
266 # one of the following:
267 # amendments, analysis, fiscals, synopsis
268 # could pull these by bill, but doing it in bulk
269 # and then matching on our end will get us by with way fewer
270 # api calls
271
272 bill_dict = {}
273 for page in self.pages(base_url, first_page+source_name):
274 for item in page["items"]:
275 billno = item["billno"]
276 if billno not in bill_dict:
277 bill_dict[billno] = []
278 bill_dict[billno].append(item)
279
280 return bill_dict
281
282 def add_document(self, documents, bill_id, type_of_document, bill, base_url):
283 try:
284 documents = documents[bill_id]
285 except KeyError:
286 return
287
288 leg_ver_types = {"IN": "Introduction",
289 "RS": "Reported: Senate",
290 "PS": "Passed: Senate",
291 "RH": "Reported: House",
292 "PH": "Passed: House",
293 "": "",
294 "ICS": "",
295 "IC": "",
296 "RCS": "",
297 "EN": "Enacted",
298 "RCH": "Re-referred",
299 "RRH": "",
300 "PHC": "",
301 "CR": ""
302 }
303
304 for item in documents:
305 if type_of_document == "amendment":
306 name = item["amendnum"] + " " + item["version"]
307 else:
308 name = item["name"] or type_of_document
309 link = base_url+item["link"]+"?format=pdf"
310 try:
311 self.head(link)
312 except scrapelib.HTTPError:
313 self.logger.warning("The link to doc {name}"
314 " does not exist, skipping".format(name=name))
315 continue
316 if "legacyver" in item:
317 try:
318 ver = leg_ver_types[item["legacyver"]]
319 except KeyError:
320 self.logger.warning(
321 "New legacyver; check the type and add it to the "
322 "leg_ver_types dictionary: {} ({})".format(
323 item["legacyver"], item['link']))
324 ver = ""
325 if ver:
326 name = name+": "+ver
327 bill.add_document_link(name, link, media_type="application/pdf")
328
329 def get_legislator_ids(self, base_url):
330 legislators = {}
331 for chamber in ["House", "Senate"]:
332 url = base_url+"chamber/{chamber}/legislators?per_page=100"
333 doc = self.get(url.format(chamber=chamber))
334 leg_json = doc.json()
335 for leg in leg_json["items"]:
336 legislators[leg["med_id"]] = leg["displayname"]
337
338 return legislators
339
340 def get_sponsor_name(self, sponsor):
341 return " ".join([sponsor["firstname"], sponsor["lastname"]])
342
343 def process_vote(self, votes, url, base_url, bill, legislators, chamber_dict, vote_results):
344 for v in votes["items"]:
345 try:
346 v["yeas"]
347 except KeyError:
348 # sometimes the actual vote is buried a second layer deep
349 v = self.get(base_url+v["link"]).json()
350 try:
351 v["yeas"]
352 except KeyError:
353 self.logger.warning("No vote info available, skipping")
354 continue
355
356 try:
357 chamber = chamber_dict[v["chamber"]]
358 except KeyError:
359 chamber = "lower" if "house" in v["apn"] else "upper"
360 try:
361 date = self._tz.localize(datetime.datetime.strptime(v["date"], "%m/%d/%y"))
362 date = "{:%Y-%m-%d}".format(date)
363 except KeyError:
364 try:
365 date = self._tz.localize(datetime.datetime.strptime(v["occurred"], "%m/%d/%y"))
366 date = "{:%Y-%m-%d}".format(date)
367 except KeyError:
368 self.logger.warning("No date found for vote, skipping")
369 continue
370 try:
371 motion = v["action"]
372 except KeyError:
373 motion = v["motiontype"]
374
375 # Sometimes Ohio's SOLAR will only return part of the JSON, so in that case skip
376 if (not motion and isinstance(v['yeas'], str)
377 and isinstance(v['nays'], str)):
378 waringText = 'Malformed JSON found for vote ("revno" of {}); skipping'
379 self.warning(waringText.format(v['revno']))
380 continue
381
382 result = v.get("results") or v.get("passed")
383 if result is None:
384 if len(v['yeas']) > len(v['nays']):
385 result = "passed"
386 else:
387 result = "failed"
388
389 passed = vote_results[result.lower()]
390 if "committee" in v:
391 vote = VoteEvent(chamber=chamber,
392 start_date=date,
393 motion_text=motion,
394 result='pass' if passed else 'fail',
395 # organization=v["committee"],
396 bill=bill,
397 classification='passed'
398 )
399 else:
400 vote = VoteEvent(chamber=chamber,
401 start_date=date,
402 motion_text=motion,
403 result='pass' if passed else 'fail',
404 classification='passed',
405 bill=bill
406 )
407 vote.pupa_id = str(v['revno'])
408 # the yea and nay counts are not displayed, but vote totals are
409 # and passage status is.
410 yes_count = 0
411 no_count = 0
412 absent_count = 0
413 excused_count = 0
414 for voter_id in v["yeas"]:
415 vote.yes(legislators[voter_id])
416 yes_count += 1
417 for voter_id in v["nays"]:
418 vote.no(legislators[voter_id])
419 no_count += 1
420 if "absent" in v:
421 for voter_id in v["absent"]:
422 vote.vote('absent', legislators[voter_id])
423 absent_count += 1
424 if "excused" in v:
425 for voter_id in v["excused"]:
426 vote.vote('excused', legislators[voter_id])
427 excused_count += 1
428
429 vote.set_count('yes', yes_count)
430 vote.set_count('no', no_count)
431 vote.set_count('absent', absent_count)
432 vote.set_count('excused', excused_count)
433 # check to see if there are any other things that look
434 # like vote categories, throw a warning if so
435 for key, val in v.items():
436 if (type(val) == list and len(val) > 0 and
437 key not in ["yeas", "nays", "absent", "excused"]):
438 if val[0] in legislators:
439 self.logger.warning("{k} looks like a vote type that's not being counted."
440 " Double check it?".format(k=key))
441 vote.add_source(url)
442
443 yield vote
444
445 def old_scrape(self, session=None):
446 status_report_url = "http://www.legislature.ohio.gov/legislation/status-reports"
447
448 # ssl verification off due Ohio not correctly implementing SSL
449 if not session:
450 session = self.latest_session()
451 self.info('no session, using %s', session)
452
453 doc = self.get(status_report_url, verify=False).text
454 doc = lxml.html.fromstring(doc)
455 doc.make_links_absolute(status_report_url)
456 xpath = "//div[contains(text(),'{}')]/following-sibling::table"
457 status_table = doc.xpath(xpath.format(session))[0]
458 status_links = status_table.xpath(".//a[contains(text(),'Excel')]/@href")
459
460 for url in status_links:
461
462 try:
463 fname, resp = self.urlretrieve(url)
464 except scrapelib.HTTPError as report:
465 self.logger.warning("Missing report {}".format(report))
466 continue
467
468 sh = xlrd.open_workbook(fname).sheet_by_index(0)
469
470 # once workbook is open, we can remove tempfile
471 os.remove(fname)
472 for rownum in range(1, sh.nrows):
473 bill_id = sh.cell(rownum, 0).value
474
475 bill_type = "resolution" if "R" in bill_id else "bill"
476 chamber = "lower" if "H" in bill_id else "upper"
477
478 bill_title = str(sh.cell(rownum, 3).value)
479
480 bill = Bill(
481 bill_id,
482 legislative_session=session,
483 chamber=chamber,
484 title=bill_title,
485 classification=bill_type
486 )
487 bill.add_source(url)
488 bill.add_sponsor('primary', str(sh.cell(rownum, 1).value))
489
490 # add cosponsor
491 if sh.cell(rownum, 2).value:
492 bill.add_sponsor('cosponsor',
493 str(sh.cell(rownum, 2).value))
494
495 actor = ""
496
497 # Actions start column after bill title
498 for colnum in range(4, sh.ncols - 1):
499 action = str(sh.cell(0, colnum).value)
500 cell = sh.cell(rownum, colnum)
501 date = cell.value
502
503 if len(action) != 0:
504 if action.split()[0] == 'House':
505 actor = "lower"
506 elif action.split()[0] == 'Senate':
507 actor = "upper"
508 elif action.split()[-1] == 'Governor':
509 actor = "executive"
510 elif action.split()[0] == 'Gov.':
511 actor = "executive"
512 elif action.split()[-1] == 'Gov.':
513 actor = "executive"
514
515 if action in ('House Intro. Date', 'Senate Intro. Date'):
516 atype = ['bill:introduced']
517 action = action.replace('Intro. Date', 'Introduced')
518 elif action == '3rd Consideration':
519 atype = ['bill:reading:3', 'bill:passed']
520 elif action == 'Sent to Gov.':
521 atype = ['governor:received']
522 elif action == 'Signed By Governor':
523 atype = ['governor:signed']
524 else:
525 atype = ['other']
526
527 if type(date) == float:
528 date = str(xlrd.xldate_as_tuple(date, 0))
529 date = datetime.datetime.strptime(
530 date, "(%Y, %m, %d, %H, %M, %S)")
531 date = self._tz.localize(date)
532 date = "{:%Y-%m-%d}".format(date)
533 bill.add_action(actor, action, date, type=atype)
534
535 for idx, char in enumerate(bill_id):
536 try:
537 int(char)
538 except ValueError:
539 continue
540
541 underscore_bill = bill_id[:idx]+"_"+bill_id[idx:]
542 break
543
544 yield from self.scrape_votes_old(bill, underscore_bill, session)
545 self.scrape_versions_old(bill, underscore_bill, session)
546 yield bill
547
548 def scrape_versions_old(self, bill, billname, session):
549 base_url = 'http://archives.legislature.state.oh.us/'
550
551 if 'R' in billname:
552 piece = '/res.cfm?ID=%s_%s' % (session, billname)
553 else:
554 piece = '/bills.cfm?ID=%s_%s' % (session, billname)
555
556 def _get_html_or_pdf_version_old(url):
557 doc = lxml.html.fromstring(url)
558 name = doc.xpath('//font[@size="2"]/a/text()')[0]
559 html_links = doc.xpath('//a[text()="(.html format)"]')
560 pdf_links = doc.xpath('//a[text()="(.pdf format)"]')
561 if html_links:
562 link = html_links[0].get('href')
563 bill.add_version_link(name, base_url + link, on_duplicate='use_old',
564 media_type='text/html')
565 elif pdf_links:
566 link = pdf_links[0].get('href')
567 bill.add_version_link(name, base_url + link,
568 media_type='application/pdf')
569
570 html = self.get(base_url + piece).text
571 # pass over missing bills - (unclear why this happens)
572 if 'could not be found.' in html:
573 self.warning('missing page: %s' % base_url + piece)
574 return
575
576 _get_html_or_pdf_version_old(html)
577 doc = lxml.html.fromstring(html)
578 for a in doc.xpath('//a[starts-with(@href, "/bills.cfm")]/@href'):
579 if a != piece:
580 _get_html_or_pdf_version_old(self.get(base_url + a).text)
581 for a in doc.xpath('//a[starts-with(@href, "/res.cfm")]/@href'):
582 if a != piece:
583 _get_html_or_pdf_version_old(self.get(base_url + a).text)
584
585 def scrape_votes_old(self, bill, billname, session):
586 vote_url = ('http://archives.legislature.state.oh.us/bills.cfm?ID=' +
587 session + '_' + billname)
588
589 page = self.get(vote_url).text
590 page = lxml.html.fromstring(page)
591
592 for jlink in page.xpath("//a[contains(@href, 'JournalText')]"):
593 date = self._tz.localize(datetime.datetime.strptime(jlink.text,
594 "%m/%d/%Y")).date()
595 date = "{:%Y-%m-%d}".format(date)
596 details = jlink.xpath("string(../../../td[2])")
597
598 chamber = details.split(" - ")[0]
599 if chamber == 'House':
600 chamber = 'lower'
601 elif chamber == 'Senate':
602 chamber = 'upper'
603 else:
604 raise ScrapeError("Bad chamber: %s" % chamber)
605
606 motion = details.split(" - ")[1].split("\n")[0].strip()
607
608 vote_row = jlink.xpath("../../..")[0].getnext()
609
610 yea_div = vote_row.xpath(
611 "td/font/div[contains(@id, 'Yea')]")[0]
612 yeas = []
613 for td in yea_div.xpath("table/tr/td"):
614 name = td.xpath("string()")
615 if name:
616 yeas.append(name)
617
618 no_div = vote_row.xpath(
619 "td/font/div[contains(@id, 'Nay')]")[0]
620 nays = []
621 for td in no_div.xpath("table/tr/td"):
622 name = td.xpath("string()")
623 if name:
624 nays.append(name)
625
626 yes_count = len(yeas)
627 no_count = len(nays)
628
629 vote = VoteEvent(
630 chamber=chamber,
631 start_date=date,
632 motion_text=motion,
633 result='pass' if yes_count > no_count else 'fail',
634 bill=bill,
635 classification='passed'
636 )
637
638 for yes in yeas:
639 vote.yes(yes)
640 for no in nays:
641 vote.no(no)
642
643 vote.add_source(vote_url)
644
645 yield vote
646
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/openstates/oh/bills.py b/openstates/oh/bills.py
--- a/openstates/oh/bills.py
+++ b/openstates/oh/bills.py
@@ -333,8 +333,8 @@
doc = self.get(url.format(chamber=chamber))
leg_json = doc.json()
for leg in leg_json["items"]:
- legislators[leg["med_id"]] = leg["displayname"]
-
+ if leg["med_id"]:
+ legislators[int(leg["med_id"])] = leg["displayname"]
return legislators
def get_sponsor_name(self, sponsor):
| {"golden_diff": "diff --git a/openstates/oh/bills.py b/openstates/oh/bills.py\n--- a/openstates/oh/bills.py\n+++ b/openstates/oh/bills.py\n@@ -333,8 +333,8 @@\n doc = self.get(url.format(chamber=chamber))\n leg_json = doc.json()\n for leg in leg_json[\"items\"]:\n- legislators[leg[\"med_id\"]] = leg[\"displayname\"]\n-\n+ if leg[\"med_id\"]:\n+ legislators[int(leg[\"med_id\"])] = leg[\"displayname\"]\n return legislators\n \n def get_sponsor_name(self, sponsor):\n", "issue": "OH failing since at least 2017-12-23\nOH has been failing since 2017-12-23\n\nBased on automated runs it appears that OH has not run successfully in 2 days (2017-12-23).\n\n\n```\n 23:01:27 INFO pupa: save post 85 as post_7fa5619a-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 86 as post_7fa562c6-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 88 as post_7fa56550-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 87 as post_7fa563fc-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 89 as post_7fa56690-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 90 as post_7fa567d0-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 91 as post_7fa56906-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 92 as post_7fa56a32-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 93 as post_7fa56bb8-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 94 as post_7fa56d02-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 95 as post_7fa56e38-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 97 as post_7fa570c2-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 96 as post_7fa56f8c-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 98 as post_7fa57202-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save post 99 as post_7fa57338-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save organization Democratic as organization_7fafd56c-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO pupa: save organization Republican as organization_7fafbd70-e867-11e7-97ee-0242ac110002.json\n23:01:27 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/chamber/House/legislators?per_page=100\n23:01:27 INFO pupa: no session, using 132\n23:01:57 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/chamber/Senate/legislators?per_page=100\n23:02:06 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/amendments\n23:02:07 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/fiscals\n23:02:09 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/synopsiss\n23:02:09 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/analysiss\n23:02:11 INFO scrapelib: GET - https://www.legislature.ohio.gov/legislation?pageSize=500&start=1&sort=LegislationNumber&dir=asc&statusCode&generalAssemblies=132\n23:02:25 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/resolutions/hjr1/\n23:02:26 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/resolutions/hjr1/actions\n23:02:27 INFO scrapelib: GET - http://search-prod.lis.state.oh.us/solarapi/v1/general_assembly_132/resolutions/hjr1/votes\nno pupa_settings on path, using defaults\noh (scrape, import)\n bills: {}\n people: {}\n return self.do_handle(args, other, juris)\n File \"/opt/openstates/venv-pupa//bin/pupa\", line 11, in <module>\n load_entry_point('pupa', 'console_scripts', 'pupa')()\n File \"/opt/openstates/venv-pupa/src/pupa/pupa/cli/__main__.py\", line 67, in main\n subcommands[args.subcommand].handle(args, other)\n File \"/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py\", line 260, in handle\nTraceback (most recent call last):\n File \"/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py\", line 305, in do_handle\n report['scrape'] = self.do_scrape(juris, args, scrapers)\n File \"/opt/openstates/venv-pupa/src/pupa/pupa/cli/commands/update.py\", line 173, in do_scrape\n report[scraper_name] = scraper.do_scrape(**scrape_args)\n File \"/opt/openstates/venv-pupa/src/pupa/pupa/scrape/base.py\", line 111, in do_scrape\n for obj in self.scrape(**kwargs) or []:\n File \"/opt/openstates/openstates/openstates/oh/bills.py\", line 200, in scrape\n chamber_dict, vote_results)\n File \"/opt/openstates/openstates/openstates/oh/bills.py\", line 415, in process_vote\n vote.yes(legislators[voter_id])\nKeyError: 1605\n```\n\nVisit http://bobsled.openstates.org for more info.\n\n", "before_files": [{"content": "import os\nimport datetime\n\nfrom pupa.scrape import Scraper, Bill, VoteEvent\nfrom pupa.scrape.base import ScrapeError\n\nimport xlrd\nimport scrapelib\nimport lxml.html\nimport pytz\n\n\nclass OHBillScraper(Scraper):\n _tz = pytz.timezone('US/Eastern')\n\n def scrape(self, session=None, chambers=None):\n # Bills endpoint can sometimes take a very long time to load\n self.timeout = 300\n\n if not session:\n session = self.latest_session()\n self.info('no session, using %s', session)\n\n if int(session) < 128:\n raise AssertionError(\"No data for period {}\".format(session))\n\n elif int(session) < 131:\n # they changed their data format starting in 131st and added\n # an undocumented API\n yield from self.old_scrape(session)\n\n else:\n chamber_dict = {\"Senate\": \"upper\", \"House\": \"lower\",\n \"House of Representatives\": \"lower\",\n \"house\": \"lower\", \"senate\": \"upper\"}\n\n # so presumanbly not everything passes, but we haven't\n # seen anything not pass yet, so we'll need to wait\n # till it fails and get the right language in here\n vote_results = {\"approved\": True,\n \"passed\": True,\n \"adopted\": True,\n \"true\": True,\n \"false\": False,\n \"failed\": False,\n True: True,\n False: False}\n\n action_dict = {\"ref_ctte_100\": \"referral-committee\",\n \"intro_100\": \"introduction\",\n \"pass_300\": \"passage\",\n \"intro_110\": \"reading-1\",\n \"refer_210\": \"referral-committee\",\n \"crpt_301\": None,\n \"crpt_317\": None,\n \"concur_606\": \"passage\",\n \"pass_301\": \"passage\",\n \"refer_220\": \"referral-committee\",\n \"intro_102\": [\"introduction\", \"passage\"],\n \"intro_105\": [\"introduction\", \"passage\"],\n \"intro_ref_ctte_100\": \"referral-committee\",\n \"refer_209\": None,\n \"intro_108\": [\"introduction\", \"passage\"],\n \"intro_103\": [\"introduction\", \"passage\"],\n \"msg_reso_503\": \"passage\",\n \"intro_107\": [\"introduction\", \"passage\"],\n \"imm_consid_360\": \"passage\",\n \"refer_213\": None,\n \"adopt_reso_100\": \"passage\",\n \"msg_507\": \"amendment-passage\",\n \"confer_713\": None,\n \"concur_603\": None,\n \"confer_712\": None,\n \"msg_506\": \"amendment-failure\",\n \"receive_message_100\": \"passage\",\n \"motion_920\": None,\n \"concur_611\": None,\n \"confer_735\": None\n }\n\n base_url = \"http://search-prod.lis.state.oh.us\"\n first_page = base_url\n first_page += \"/solarapi/v1/general_assembly_{session}/\".format(session=session)\n legislators = self.get_legislator_ids(first_page)\n all_amendments = self.get_other_data_source(first_page, base_url, \"amendments\")\n all_fiscals = self.get_other_data_source(first_page, base_url, \"fiscals\")\n all_synopsis = self.get_other_data_source(first_page, base_url, \"synopsiss\")\n all_analysis = self.get_other_data_source(first_page, base_url, \"analysiss\")\n\n for row in self.get_bill_rows(session):\n number_link, ga, title, primary_sponsor, status = row.xpath('td')\n\n bill_id = number_link.text_content()\n title = title.text_content().strip()\n chamber = 'lower' if 'H' in bill_id else 'upper'\n classification = 'bill' if 'B' in bill_id else 'resolution'\n\n bill = Bill(bill_id, legislative_session=session, chamber=chamber,\n title=title, classification=classification)\n bill.add_source(number_link.xpath('a/@href')[0])\n\n # get bill from API\n bill_api_url = ('http://search-prod.lis.state.oh.us/solarapi/v1/'\n 'general_assembly_{}/{}/{}/'.format(\n session,\n 'bills' if 'B' in bill_id else 'resolutions',\n bill_id.lower().replace(' ', '')\n ))\n data = self.get(bill_api_url).json()\n\n # add title if no short title\n if not bill.title:\n bill.title = data['items'][0]['longtitle']\n bill.add_title(data['items'][0]['longtitle'], 'long title')\n\n # this stuff is version-specific\n for version in data['items']:\n version_name = version[\"version\"]\n version_link = base_url+version[\"pdfDownloadLink\"]\n bill.add_version_link(version_name, version_link, media_type='application/pdf')\n\n # we'll use latest bill_version for everything else\n bill_version = data['items'][0]\n bill.add_source(bill_api_url)\n\n # subjects\n for subj in bill_version[\"subjectindexes\"]:\n try:\n bill.add_subject(subj[\"primary\"])\n except KeyError:\n pass\n try:\n secondary_subj = subj[\"secondary\"]\n except KeyError:\n secondary_subj = \"\"\n if secondary_subj:\n bill.add_subject(secondary_subj)\n\n # sponsors\n sponsors = bill_version[\"sponsors\"]\n for sponsor in sponsors:\n sponsor_name = self.get_sponsor_name(sponsor)\n bill.add_sponsorship(\n sponsor_name,\n classification='primary',\n entity_type='person',\n primary=True\n )\n\n cosponsors = bill_version[\"cosponsors\"]\n for sponsor in cosponsors:\n sponsor_name = self.get_sponsor_name(sponsor)\n bill.add_sponsorship(\n sponsor_name,\n classification='cosponsor',\n entity_type='person',\n primary=False,\n )\n\n try:\n action_doc = self.get(base_url+bill_version[\"action\"][0][\"link\"])\n except scrapelib.HTTPError:\n pass\n else:\n\n actions = action_doc.json()\n for action in reversed(actions[\"items\"]):\n actor = chamber_dict[action[\"chamber\"]]\n action_desc = action[\"description\"]\n try:\n action_type = action_dict[action[\"actioncode\"]]\n except KeyError:\n self.warning(\"Unknown action {desc} with code {code}.\"\n \" Add it to the action_dict\"\n \".\".format(desc=action_desc,\n code=action[\"actioncode\"]))\n action_type = None\n\n date = self._tz.localize(datetime.datetime.strptime(\n action[\"datetime\"],\n \"%Y-%m-%dT%H:%M:%S\"))\n date = \"{:%Y-%m-%d}\".format(date)\n\n bill.add_action(action_desc,\n date, chamber=actor,\n classification=action_type)\n\n # attach documents gathered earlier\n self.add_document(all_amendments, bill_id, \"amendment\", bill, base_url)\n self.add_document(all_fiscals, bill_id, \"fiscal\", bill, base_url)\n self.add_document(all_synopsis, bill_id, \"synopsis\", bill, base_url)\n self.add_document(all_analysis, bill_id, \"analysis\", bill, base_url)\n\n # votes\n vote_url = base_url+bill_version[\"votes\"][0][\"link\"]\n vote_doc = self.get(vote_url)\n votes = vote_doc.json()\n yield from self.process_vote(votes, vote_url,\n base_url, bill, legislators,\n chamber_dict, vote_results)\n\n vote_url = base_url\n vote_url += bill_version[\"cmtevotes\"][0][\"link\"]\n try:\n vote_doc = self.get(vote_url)\n except scrapelib.HTTPError:\n self.warning(\"Vote page not \"\n \"loading; skipping: {}\".format(vote_url))\n continue\n votes = vote_doc.json()\n yield from self.process_vote(votes, vote_url,\n base_url, bill, legislators,\n chamber_dict, vote_results)\n\n # we have never seen a veto or a disapprove, but they seem important.\n # so we'll check and throw an error if we find one\n # life is fragile. so are our scrapers.\n if \"veto\" in bill_version:\n veto_url = base_url+bill_version[\"veto\"][0][\"link\"]\n veto_json = self.get(veto_url).json()\n if len(veto_json[\"items\"]) > 0:\n raise AssertionError(\"Whoa, a veto! We've never\"\n \" gotten one before.\"\n \" Go write some code to deal\"\n \" with it: {}\".format(veto_url))\n\n if \"disapprove\" in bill_version:\n disapprove_url = base_url+bill_version[\"disapprove\"][0][\"link\"]\n disapprove_json = self.get(disapprove_url).json()\n if len(disapprove_json[\"items\"]) > 0:\n raise AssertionError(\"Whoa, a disapprove! We've never\"\n \" gotten one before.\"\n \" Go write some code to deal \"\n \"with it: {}\".format(disapprove_url))\n\n yield bill\n\n def pages(self, base_url, first_page):\n page = self.get(first_page)\n page = page.json()\n yield page\n while \"nextLink\" in page:\n page = self.get(base_url+page[\"nextLink\"])\n page = page.json()\n yield page\n\n def get_bill_rows(self, session, start=1):\n # bill API endpoint times out so we're now getting this from the normal search\n bill_url = ('https://www.legislature.ohio.gov/legislation?pageSize=500&start={}&'\n 'sort=LegislationNumber&dir=asc&statusCode&generalAssemblies={}'.format(\n start, session)\n )\n doc = self.get(bill_url)\n doc = lxml.html.fromstring(doc.text)\n doc.make_links_absolute(bill_url)\n\n rows = doc.xpath('//tr')[1:]\n yield from rows\n if len(rows) == 500:\n yield from self.get_bill_rows(session, start+500)\n # if page is full, get next page - could use pagination info in\n # //div[id=\"searchResultsInfo\"] to improve this\n\n def get_other_data_source(self, first_page, base_url, source_name):\n # produces a dictionary from bill_id to a list of\n # one of the following:\n # amendments, analysis, fiscals, synopsis\n # could pull these by bill, but doing it in bulk\n # and then matching on our end will get us by with way fewer\n # api calls\n\n bill_dict = {}\n for page in self.pages(base_url, first_page+source_name):\n for item in page[\"items\"]:\n billno = item[\"billno\"]\n if billno not in bill_dict:\n bill_dict[billno] = []\n bill_dict[billno].append(item)\n\n return bill_dict\n\n def add_document(self, documents, bill_id, type_of_document, bill, base_url):\n try:\n documents = documents[bill_id]\n except KeyError:\n return\n\n leg_ver_types = {\"IN\": \"Introduction\",\n \"RS\": \"Reported: Senate\",\n \"PS\": \"Passed: Senate\",\n \"RH\": \"Reported: House\",\n \"PH\": \"Passed: House\",\n \"\": \"\",\n \"ICS\": \"\",\n \"IC\": \"\",\n \"RCS\": \"\",\n \"EN\": \"Enacted\",\n \"RCH\": \"Re-referred\",\n \"RRH\": \"\",\n \"PHC\": \"\",\n \"CR\": \"\"\n }\n\n for item in documents:\n if type_of_document == \"amendment\":\n name = item[\"amendnum\"] + \" \" + item[\"version\"]\n else:\n name = item[\"name\"] or type_of_document\n link = base_url+item[\"link\"]+\"?format=pdf\"\n try:\n self.head(link)\n except scrapelib.HTTPError:\n self.logger.warning(\"The link to doc {name}\"\n \" does not exist, skipping\".format(name=name))\n continue\n if \"legacyver\" in item:\n try:\n ver = leg_ver_types[item[\"legacyver\"]]\n except KeyError:\n self.logger.warning(\n \"New legacyver; check the type and add it to the \"\n \"leg_ver_types dictionary: {} ({})\".format(\n item[\"legacyver\"], item['link']))\n ver = \"\"\n if ver:\n name = name+\": \"+ver\n bill.add_document_link(name, link, media_type=\"application/pdf\")\n\n def get_legislator_ids(self, base_url):\n legislators = {}\n for chamber in [\"House\", \"Senate\"]:\n url = base_url+\"chamber/{chamber}/legislators?per_page=100\"\n doc = self.get(url.format(chamber=chamber))\n leg_json = doc.json()\n for leg in leg_json[\"items\"]:\n legislators[leg[\"med_id\"]] = leg[\"displayname\"]\n\n return legislators\n\n def get_sponsor_name(self, sponsor):\n return \" \".join([sponsor[\"firstname\"], sponsor[\"lastname\"]])\n\n def process_vote(self, votes, url, base_url, bill, legislators, chamber_dict, vote_results):\n for v in votes[\"items\"]:\n try:\n v[\"yeas\"]\n except KeyError:\n # sometimes the actual vote is buried a second layer deep\n v = self.get(base_url+v[\"link\"]).json()\n try:\n v[\"yeas\"]\n except KeyError:\n self.logger.warning(\"No vote info available, skipping\")\n continue\n\n try:\n chamber = chamber_dict[v[\"chamber\"]]\n except KeyError:\n chamber = \"lower\" if \"house\" in v[\"apn\"] else \"upper\"\n try:\n date = self._tz.localize(datetime.datetime.strptime(v[\"date\"], \"%m/%d/%y\"))\n date = \"{:%Y-%m-%d}\".format(date)\n except KeyError:\n try:\n date = self._tz.localize(datetime.datetime.strptime(v[\"occurred\"], \"%m/%d/%y\"))\n date = \"{:%Y-%m-%d}\".format(date)\n except KeyError:\n self.logger.warning(\"No date found for vote, skipping\")\n continue\n try:\n motion = v[\"action\"]\n except KeyError:\n motion = v[\"motiontype\"]\n\n # Sometimes Ohio's SOLAR will only return part of the JSON, so in that case skip\n if (not motion and isinstance(v['yeas'], str)\n and isinstance(v['nays'], str)):\n waringText = 'Malformed JSON found for vote (\"revno\" of {}); skipping'\n self.warning(waringText.format(v['revno']))\n continue\n\n result = v.get(\"results\") or v.get(\"passed\")\n if result is None:\n if len(v['yeas']) > len(v['nays']):\n result = \"passed\"\n else:\n result = \"failed\"\n\n passed = vote_results[result.lower()]\n if \"committee\" in v:\n vote = VoteEvent(chamber=chamber,\n start_date=date,\n motion_text=motion,\n result='pass' if passed else 'fail',\n # organization=v[\"committee\"],\n bill=bill,\n classification='passed'\n )\n else:\n vote = VoteEvent(chamber=chamber,\n start_date=date,\n motion_text=motion,\n result='pass' if passed else 'fail',\n classification='passed',\n bill=bill\n )\n vote.pupa_id = str(v['revno'])\n # the yea and nay counts are not displayed, but vote totals are\n # and passage status is.\n yes_count = 0\n no_count = 0\n absent_count = 0\n excused_count = 0\n for voter_id in v[\"yeas\"]:\n vote.yes(legislators[voter_id])\n yes_count += 1\n for voter_id in v[\"nays\"]:\n vote.no(legislators[voter_id])\n no_count += 1\n if \"absent\" in v:\n for voter_id in v[\"absent\"]:\n vote.vote('absent', legislators[voter_id])\n absent_count += 1\n if \"excused\" in v:\n for voter_id in v[\"excused\"]:\n vote.vote('excused', legislators[voter_id])\n excused_count += 1\n\n vote.set_count('yes', yes_count)\n vote.set_count('no', no_count)\n vote.set_count('absent', absent_count)\n vote.set_count('excused', excused_count)\n # check to see if there are any other things that look\n # like vote categories, throw a warning if so\n for key, val in v.items():\n if (type(val) == list and len(val) > 0 and\n key not in [\"yeas\", \"nays\", \"absent\", \"excused\"]):\n if val[0] in legislators:\n self.logger.warning(\"{k} looks like a vote type that's not being counted.\"\n \" Double check it?\".format(k=key))\n vote.add_source(url)\n\n yield vote\n\n def old_scrape(self, session=None):\n status_report_url = \"http://www.legislature.ohio.gov/legislation/status-reports\"\n\n # ssl verification off due Ohio not correctly implementing SSL\n if not session:\n session = self.latest_session()\n self.info('no session, using %s', session)\n\n doc = self.get(status_report_url, verify=False).text\n doc = lxml.html.fromstring(doc)\n doc.make_links_absolute(status_report_url)\n xpath = \"//div[contains(text(),'{}')]/following-sibling::table\"\n status_table = doc.xpath(xpath.format(session))[0]\n status_links = status_table.xpath(\".//a[contains(text(),'Excel')]/@href\")\n\n for url in status_links:\n\n try:\n fname, resp = self.urlretrieve(url)\n except scrapelib.HTTPError as report:\n self.logger.warning(\"Missing report {}\".format(report))\n continue\n\n sh = xlrd.open_workbook(fname).sheet_by_index(0)\n\n # once workbook is open, we can remove tempfile\n os.remove(fname)\n for rownum in range(1, sh.nrows):\n bill_id = sh.cell(rownum, 0).value\n\n bill_type = \"resolution\" if \"R\" in bill_id else \"bill\"\n chamber = \"lower\" if \"H\" in bill_id else \"upper\"\n\n bill_title = str(sh.cell(rownum, 3).value)\n\n bill = Bill(\n bill_id,\n legislative_session=session,\n chamber=chamber,\n title=bill_title,\n classification=bill_type\n )\n bill.add_source(url)\n bill.add_sponsor('primary', str(sh.cell(rownum, 1).value))\n\n # add cosponsor\n if sh.cell(rownum, 2).value:\n bill.add_sponsor('cosponsor',\n str(sh.cell(rownum, 2).value))\n\n actor = \"\"\n\n # Actions start column after bill title\n for colnum in range(4, sh.ncols - 1):\n action = str(sh.cell(0, colnum).value)\n cell = sh.cell(rownum, colnum)\n date = cell.value\n\n if len(action) != 0:\n if action.split()[0] == 'House':\n actor = \"lower\"\n elif action.split()[0] == 'Senate':\n actor = \"upper\"\n elif action.split()[-1] == 'Governor':\n actor = \"executive\"\n elif action.split()[0] == 'Gov.':\n actor = \"executive\"\n elif action.split()[-1] == 'Gov.':\n actor = \"executive\"\n\n if action in ('House Intro. Date', 'Senate Intro. Date'):\n atype = ['bill:introduced']\n action = action.replace('Intro. Date', 'Introduced')\n elif action == '3rd Consideration':\n atype = ['bill:reading:3', 'bill:passed']\n elif action == 'Sent to Gov.':\n atype = ['governor:received']\n elif action == 'Signed By Governor':\n atype = ['governor:signed']\n else:\n atype = ['other']\n\n if type(date) == float:\n date = str(xlrd.xldate_as_tuple(date, 0))\n date = datetime.datetime.strptime(\n date, \"(%Y, %m, %d, %H, %M, %S)\")\n date = self._tz.localize(date)\n date = \"{:%Y-%m-%d}\".format(date)\n bill.add_action(actor, action, date, type=atype)\n\n for idx, char in enumerate(bill_id):\n try:\n int(char)\n except ValueError:\n continue\n\n underscore_bill = bill_id[:idx]+\"_\"+bill_id[idx:]\n break\n\n yield from self.scrape_votes_old(bill, underscore_bill, session)\n self.scrape_versions_old(bill, underscore_bill, session)\n yield bill\n\n def scrape_versions_old(self, bill, billname, session):\n base_url = 'http://archives.legislature.state.oh.us/'\n\n if 'R' in billname:\n piece = '/res.cfm?ID=%s_%s' % (session, billname)\n else:\n piece = '/bills.cfm?ID=%s_%s' % (session, billname)\n\n def _get_html_or_pdf_version_old(url):\n doc = lxml.html.fromstring(url)\n name = doc.xpath('//font[@size=\"2\"]/a/text()')[0]\n html_links = doc.xpath('//a[text()=\"(.html format)\"]')\n pdf_links = doc.xpath('//a[text()=\"(.pdf format)\"]')\n if html_links:\n link = html_links[0].get('href')\n bill.add_version_link(name, base_url + link, on_duplicate='use_old',\n media_type='text/html')\n elif pdf_links:\n link = pdf_links[0].get('href')\n bill.add_version_link(name, base_url + link,\n media_type='application/pdf')\n\n html = self.get(base_url + piece).text\n # pass over missing bills - (unclear why this happens)\n if 'could not be found.' in html:\n self.warning('missing page: %s' % base_url + piece)\n return\n\n _get_html_or_pdf_version_old(html)\n doc = lxml.html.fromstring(html)\n for a in doc.xpath('//a[starts-with(@href, \"/bills.cfm\")]/@href'):\n if a != piece:\n _get_html_or_pdf_version_old(self.get(base_url + a).text)\n for a in doc.xpath('//a[starts-with(@href, \"/res.cfm\")]/@href'):\n if a != piece:\n _get_html_or_pdf_version_old(self.get(base_url + a).text)\n\n def scrape_votes_old(self, bill, billname, session):\n vote_url = ('http://archives.legislature.state.oh.us/bills.cfm?ID=' +\n session + '_' + billname)\n\n page = self.get(vote_url).text\n page = lxml.html.fromstring(page)\n\n for jlink in page.xpath(\"//a[contains(@href, 'JournalText')]\"):\n date = self._tz.localize(datetime.datetime.strptime(jlink.text,\n \"%m/%d/%Y\")).date()\n date = \"{:%Y-%m-%d}\".format(date)\n details = jlink.xpath(\"string(../../../td[2])\")\n\n chamber = details.split(\" - \")[0]\n if chamber == 'House':\n chamber = 'lower'\n elif chamber == 'Senate':\n chamber = 'upper'\n else:\n raise ScrapeError(\"Bad chamber: %s\" % chamber)\n\n motion = details.split(\" - \")[1].split(\"\\n\")[0].strip()\n\n vote_row = jlink.xpath(\"../../..\")[0].getnext()\n\n yea_div = vote_row.xpath(\n \"td/font/div[contains(@id, 'Yea')]\")[0]\n yeas = []\n for td in yea_div.xpath(\"table/tr/td\"):\n name = td.xpath(\"string()\")\n if name:\n yeas.append(name)\n\n no_div = vote_row.xpath(\n \"td/font/div[contains(@id, 'Nay')]\")[0]\n nays = []\n for td in no_div.xpath(\"table/tr/td\"):\n name = td.xpath(\"string()\")\n if name:\n nays.append(name)\n\n yes_count = len(yeas)\n no_count = len(nays)\n\n vote = VoteEvent(\n chamber=chamber,\n start_date=date,\n motion_text=motion,\n result='pass' if yes_count > no_count else 'fail',\n bill=bill,\n classification='passed'\n )\n\n for yes in yeas:\n vote.yes(yes)\n for no in nays:\n vote.no(no)\n\n vote.add_source(vote_url)\n\n yield vote\n", "path": "openstates/oh/bills.py"}], "after_files": [{"content": "import os\nimport datetime\n\nfrom pupa.scrape import Scraper, Bill, VoteEvent\nfrom pupa.scrape.base import ScrapeError\n\nimport xlrd\nimport scrapelib\nimport lxml.html\nimport pytz\n\n\nclass OHBillScraper(Scraper):\n _tz = pytz.timezone('US/Eastern')\n\n def scrape(self, session=None, chambers=None):\n # Bills endpoint can sometimes take a very long time to load\n self.timeout = 300\n\n if not session:\n session = self.latest_session()\n self.info('no session, using %s', session)\n\n if int(session) < 128:\n raise AssertionError(\"No data for period {}\".format(session))\n\n elif int(session) < 131:\n # they changed their data format starting in 131st and added\n # an undocumented API\n yield from self.old_scrape(session)\n\n else:\n chamber_dict = {\"Senate\": \"upper\", \"House\": \"lower\",\n \"House of Representatives\": \"lower\",\n \"house\": \"lower\", \"senate\": \"upper\"}\n\n # so presumanbly not everything passes, but we haven't\n # seen anything not pass yet, so we'll need to wait\n # till it fails and get the right language in here\n vote_results = {\"approved\": True,\n \"passed\": True,\n \"adopted\": True,\n \"true\": True,\n \"false\": False,\n \"failed\": False,\n True: True,\n False: False}\n\n action_dict = {\"ref_ctte_100\": \"referral-committee\",\n \"intro_100\": \"introduction\",\n \"pass_300\": \"passage\",\n \"intro_110\": \"reading-1\",\n \"refer_210\": \"referral-committee\",\n \"crpt_301\": None,\n \"crpt_317\": None,\n \"concur_606\": \"passage\",\n \"pass_301\": \"passage\",\n \"refer_220\": \"referral-committee\",\n \"intro_102\": [\"introduction\", \"passage\"],\n \"intro_105\": [\"introduction\", \"passage\"],\n \"intro_ref_ctte_100\": \"referral-committee\",\n \"refer_209\": None,\n \"intro_108\": [\"introduction\", \"passage\"],\n \"intro_103\": [\"introduction\", \"passage\"],\n \"msg_reso_503\": \"passage\",\n \"intro_107\": [\"introduction\", \"passage\"],\n \"imm_consid_360\": \"passage\",\n \"refer_213\": None,\n \"adopt_reso_100\": \"passage\",\n \"msg_507\": \"amendment-passage\",\n \"confer_713\": None,\n \"concur_603\": None,\n \"confer_712\": None,\n \"msg_506\": \"amendment-failure\",\n \"receive_message_100\": \"passage\",\n \"motion_920\": None,\n \"concur_611\": None,\n \"confer_735\": None\n }\n\n base_url = \"http://search-prod.lis.state.oh.us\"\n first_page = base_url\n first_page += \"/solarapi/v1/general_assembly_{session}/\".format(session=session)\n legislators = self.get_legislator_ids(first_page)\n all_amendments = self.get_other_data_source(first_page, base_url, \"amendments\")\n all_fiscals = self.get_other_data_source(first_page, base_url, \"fiscals\")\n all_synopsis = self.get_other_data_source(first_page, base_url, \"synopsiss\")\n all_analysis = self.get_other_data_source(first_page, base_url, \"analysiss\")\n\n for row in self.get_bill_rows(session):\n number_link, ga, title, primary_sponsor, status = row.xpath('td')\n\n bill_id = number_link.text_content()\n title = title.text_content().strip()\n chamber = 'lower' if 'H' in bill_id else 'upper'\n classification = 'bill' if 'B' in bill_id else 'resolution'\n\n bill = Bill(bill_id, legislative_session=session, chamber=chamber,\n title=title, classification=classification)\n bill.add_source(number_link.xpath('a/@href')[0])\n\n # get bill from API\n bill_api_url = ('http://search-prod.lis.state.oh.us/solarapi/v1/'\n 'general_assembly_{}/{}/{}/'.format(\n session,\n 'bills' if 'B' in bill_id else 'resolutions',\n bill_id.lower().replace(' ', '')\n ))\n data = self.get(bill_api_url).json()\n\n # add title if no short title\n if not bill.title:\n bill.title = data['items'][0]['longtitle']\n bill.add_title(data['items'][0]['longtitle'], 'long title')\n\n # this stuff is version-specific\n for version in data['items']:\n version_name = version[\"version\"]\n version_link = base_url+version[\"pdfDownloadLink\"]\n bill.add_version_link(version_name, version_link, media_type='application/pdf')\n\n # we'll use latest bill_version for everything else\n bill_version = data['items'][0]\n bill.add_source(bill_api_url)\n\n # subjects\n for subj in bill_version[\"subjectindexes\"]:\n try:\n bill.add_subject(subj[\"primary\"])\n except KeyError:\n pass\n try:\n secondary_subj = subj[\"secondary\"]\n except KeyError:\n secondary_subj = \"\"\n if secondary_subj:\n bill.add_subject(secondary_subj)\n\n # sponsors\n sponsors = bill_version[\"sponsors\"]\n for sponsor in sponsors:\n sponsor_name = self.get_sponsor_name(sponsor)\n bill.add_sponsorship(\n sponsor_name,\n classification='primary',\n entity_type='person',\n primary=True\n )\n\n cosponsors = bill_version[\"cosponsors\"]\n for sponsor in cosponsors:\n sponsor_name = self.get_sponsor_name(sponsor)\n bill.add_sponsorship(\n sponsor_name,\n classification='cosponsor',\n entity_type='person',\n primary=False,\n )\n\n try:\n action_doc = self.get(base_url+bill_version[\"action\"][0][\"link\"])\n except scrapelib.HTTPError:\n pass\n else:\n\n actions = action_doc.json()\n for action in reversed(actions[\"items\"]):\n actor = chamber_dict[action[\"chamber\"]]\n action_desc = action[\"description\"]\n try:\n action_type = action_dict[action[\"actioncode\"]]\n except KeyError:\n self.warning(\"Unknown action {desc} with code {code}.\"\n \" Add it to the action_dict\"\n \".\".format(desc=action_desc,\n code=action[\"actioncode\"]))\n action_type = None\n\n date = self._tz.localize(datetime.datetime.strptime(\n action[\"datetime\"],\n \"%Y-%m-%dT%H:%M:%S\"))\n date = \"{:%Y-%m-%d}\".format(date)\n\n bill.add_action(action_desc,\n date, chamber=actor,\n classification=action_type)\n\n # attach documents gathered earlier\n self.add_document(all_amendments, bill_id, \"amendment\", bill, base_url)\n self.add_document(all_fiscals, bill_id, \"fiscal\", bill, base_url)\n self.add_document(all_synopsis, bill_id, \"synopsis\", bill, base_url)\n self.add_document(all_analysis, bill_id, \"analysis\", bill, base_url)\n\n # votes\n vote_url = base_url+bill_version[\"votes\"][0][\"link\"]\n vote_doc = self.get(vote_url)\n votes = vote_doc.json()\n yield from self.process_vote(votes, vote_url,\n base_url, bill, legislators,\n chamber_dict, vote_results)\n\n vote_url = base_url\n vote_url += bill_version[\"cmtevotes\"][0][\"link\"]\n try:\n vote_doc = self.get(vote_url)\n except scrapelib.HTTPError:\n self.warning(\"Vote page not \"\n \"loading; skipping: {}\".format(vote_url))\n continue\n votes = vote_doc.json()\n yield from self.process_vote(votes, vote_url,\n base_url, bill, legislators,\n chamber_dict, vote_results)\n\n # we have never seen a veto or a disapprove, but they seem important.\n # so we'll check and throw an error if we find one\n # life is fragile. so are our scrapers.\n if \"veto\" in bill_version:\n veto_url = base_url+bill_version[\"veto\"][0][\"link\"]\n veto_json = self.get(veto_url).json()\n if len(veto_json[\"items\"]) > 0:\n raise AssertionError(\"Whoa, a veto! We've never\"\n \" gotten one before.\"\n \" Go write some code to deal\"\n \" with it: {}\".format(veto_url))\n\n if \"disapprove\" in bill_version:\n disapprove_url = base_url+bill_version[\"disapprove\"][0][\"link\"]\n disapprove_json = self.get(disapprove_url).json()\n if len(disapprove_json[\"items\"]) > 0:\n raise AssertionError(\"Whoa, a disapprove! We've never\"\n \" gotten one before.\"\n \" Go write some code to deal \"\n \"with it: {}\".format(disapprove_url))\n\n yield bill\n\n def pages(self, base_url, first_page):\n page = self.get(first_page)\n page = page.json()\n yield page\n while \"nextLink\" in page:\n page = self.get(base_url+page[\"nextLink\"])\n page = page.json()\n yield page\n\n def get_bill_rows(self, session, start=1):\n # bill API endpoint times out so we're now getting this from the normal search\n bill_url = ('https://www.legislature.ohio.gov/legislation?pageSize=500&start={}&'\n 'sort=LegislationNumber&dir=asc&statusCode&generalAssemblies={}'.format(\n start, session)\n )\n doc = self.get(bill_url)\n doc = lxml.html.fromstring(doc.text)\n doc.make_links_absolute(bill_url)\n\n rows = doc.xpath('//tr')[1:]\n yield from rows\n if len(rows) == 500:\n yield from self.get_bill_rows(session, start+500)\n # if page is full, get next page - could use pagination info in\n # //div[id=\"searchResultsInfo\"] to improve this\n\n def get_other_data_source(self, first_page, base_url, source_name):\n # produces a dictionary from bill_id to a list of\n # one of the following:\n # amendments, analysis, fiscals, synopsis\n # could pull these by bill, but doing it in bulk\n # and then matching on our end will get us by with way fewer\n # api calls\n\n bill_dict = {}\n for page in self.pages(base_url, first_page+source_name):\n for item in page[\"items\"]:\n billno = item[\"billno\"]\n if billno not in bill_dict:\n bill_dict[billno] = []\n bill_dict[billno].append(item)\n\n return bill_dict\n\n def add_document(self, documents, bill_id, type_of_document, bill, base_url):\n try:\n documents = documents[bill_id]\n except KeyError:\n return\n\n leg_ver_types = {\"IN\": \"Introduction\",\n \"RS\": \"Reported: Senate\",\n \"PS\": \"Passed: Senate\",\n \"RH\": \"Reported: House\",\n \"PH\": \"Passed: House\",\n \"\": \"\",\n \"ICS\": \"\",\n \"IC\": \"\",\n \"RCS\": \"\",\n \"EN\": \"Enacted\",\n \"RCH\": \"Re-referred\",\n \"RRH\": \"\",\n \"PHC\": \"\",\n \"CR\": \"\"\n }\n\n for item in documents:\n if type_of_document == \"amendment\":\n name = item[\"amendnum\"] + \" \" + item[\"version\"]\n else:\n name = item[\"name\"] or type_of_document\n link = base_url+item[\"link\"]+\"?format=pdf\"\n try:\n self.head(link)\n except scrapelib.HTTPError:\n self.logger.warning(\"The link to doc {name}\"\n \" does not exist, skipping\".format(name=name))\n continue\n if \"legacyver\" in item:\n try:\n ver = leg_ver_types[item[\"legacyver\"]]\n except KeyError:\n self.logger.warning(\n \"New legacyver; check the type and add it to the \"\n \"leg_ver_types dictionary: {} ({})\".format(\n item[\"legacyver\"], item['link']))\n ver = \"\"\n if ver:\n name = name+\": \"+ver\n bill.add_document_link(name, link, media_type=\"application/pdf\")\n\n def get_legislator_ids(self, base_url):\n legislators = {}\n for chamber in [\"House\", \"Senate\"]:\n url = base_url+\"chamber/{chamber}/legislators?per_page=100\"\n doc = self.get(url.format(chamber=chamber))\n leg_json = doc.json()\n for leg in leg_json[\"items\"]:\n if leg[\"med_id\"]:\n legislators[int(leg[\"med_id\"])] = leg[\"displayname\"]\n return legislators\n\n def get_sponsor_name(self, sponsor):\n return \" \".join([sponsor[\"firstname\"], sponsor[\"lastname\"]])\n\n def process_vote(self, votes, url, base_url, bill, legislators, chamber_dict, vote_results):\n for v in votes[\"items\"]:\n try:\n v[\"yeas\"]\n except KeyError:\n # sometimes the actual vote is buried a second layer deep\n v = self.get(base_url+v[\"link\"]).json()\n try:\n v[\"yeas\"]\n except KeyError:\n self.logger.warning(\"No vote info available, skipping\")\n continue\n\n try:\n chamber = chamber_dict[v[\"chamber\"]]\n except KeyError:\n chamber = \"lower\" if \"house\" in v[\"apn\"] else \"upper\"\n try:\n date = self._tz.localize(datetime.datetime.strptime(v[\"date\"], \"%m/%d/%y\"))\n date = \"{:%Y-%m-%d}\".format(date)\n except KeyError:\n try:\n date = self._tz.localize(datetime.datetime.strptime(v[\"occurred\"], \"%m/%d/%y\"))\n date = \"{:%Y-%m-%d}\".format(date)\n except KeyError:\n self.logger.warning(\"No date found for vote, skipping\")\n continue\n try:\n motion = v[\"action\"]\n except KeyError:\n motion = v[\"motiontype\"]\n\n # Sometimes Ohio's SOLAR will only return part of the JSON, so in that case skip\n if (not motion and isinstance(v['yeas'], str)\n and isinstance(v['nays'], str)):\n waringText = 'Malformed JSON found for vote (\"revno\" of {}); skipping'\n self.warning(waringText.format(v['revno']))\n continue\n\n result = v.get(\"results\") or v.get(\"passed\")\n if result is None:\n if len(v['yeas']) > len(v['nays']):\n result = \"passed\"\n else:\n result = \"failed\"\n\n passed = vote_results[result.lower()]\n if \"committee\" in v:\n vote = VoteEvent(chamber=chamber,\n start_date=date,\n motion_text=motion,\n result='pass' if passed else 'fail',\n # organization=v[\"committee\"],\n bill=bill,\n classification='passed'\n )\n else:\n vote = VoteEvent(chamber=chamber,\n start_date=date,\n motion_text=motion,\n result='pass' if passed else 'fail',\n classification='passed',\n bill=bill\n )\n vote.pupa_id = str(v['revno'])\n # the yea and nay counts are not displayed, but vote totals are\n # and passage status is.\n yes_count = 0\n no_count = 0\n absent_count = 0\n excused_count = 0\n for voter_id in v[\"yeas\"]:\n vote.yes(legislators[voter_id])\n yes_count += 1\n for voter_id in v[\"nays\"]:\n vote.no(legislators[voter_id])\n no_count += 1\n if \"absent\" in v:\n for voter_id in v[\"absent\"]:\n vote.vote('absent', legislators[voter_id])\n absent_count += 1\n if \"excused\" in v:\n for voter_id in v[\"excused\"]:\n vote.vote('excused', legislators[voter_id])\n excused_count += 1\n\n vote.set_count('yes', yes_count)\n vote.set_count('no', no_count)\n vote.set_count('absent', absent_count)\n vote.set_count('excused', excused_count)\n # check to see if there are any other things that look\n # like vote categories, throw a warning if so\n for key, val in v.items():\n if (type(val) == list and len(val) > 0 and\n key not in [\"yeas\", \"nays\", \"absent\", \"excused\"]):\n if val[0] in legislators:\n self.logger.warning(\"{k} looks like a vote type that's not being counted.\"\n \" Double check it?\".format(k=key))\n vote.add_source(url)\n\n yield vote\n\n def old_scrape(self, session=None):\n status_report_url = \"http://www.legislature.ohio.gov/legislation/status-reports\"\n\n # ssl verification off due Ohio not correctly implementing SSL\n if not session:\n session = self.latest_session()\n self.info('no session, using %s', session)\n\n doc = self.get(status_report_url, verify=False).text\n doc = lxml.html.fromstring(doc)\n doc.make_links_absolute(status_report_url)\n xpath = \"//div[contains(text(),'{}')]/following-sibling::table\"\n status_table = doc.xpath(xpath.format(session))[0]\n status_links = status_table.xpath(\".//a[contains(text(),'Excel')]/@href\")\n\n for url in status_links:\n\n try:\n fname, resp = self.urlretrieve(url)\n except scrapelib.HTTPError as report:\n self.logger.warning(\"Missing report {}\".format(report))\n continue\n\n sh = xlrd.open_workbook(fname).sheet_by_index(0)\n\n # once workbook is open, we can remove tempfile\n os.remove(fname)\n for rownum in range(1, sh.nrows):\n bill_id = sh.cell(rownum, 0).value\n\n bill_type = \"resolution\" if \"R\" in bill_id else \"bill\"\n chamber = \"lower\" if \"H\" in bill_id else \"upper\"\n\n bill_title = str(sh.cell(rownum, 3).value)\n\n bill = Bill(\n bill_id,\n legislative_session=session,\n chamber=chamber,\n title=bill_title,\n classification=bill_type\n )\n bill.add_source(url)\n bill.add_sponsor('primary', str(sh.cell(rownum, 1).value))\n\n # add cosponsor\n if sh.cell(rownum, 2).value:\n bill.add_sponsor('cosponsor',\n str(sh.cell(rownum, 2).value))\n\n actor = \"\"\n\n # Actions start column after bill title\n for colnum in range(4, sh.ncols - 1):\n action = str(sh.cell(0, colnum).value)\n cell = sh.cell(rownum, colnum)\n date = cell.value\n\n if len(action) != 0:\n if action.split()[0] == 'House':\n actor = \"lower\"\n elif action.split()[0] == 'Senate':\n actor = \"upper\"\n elif action.split()[-1] == 'Governor':\n actor = \"executive\"\n elif action.split()[0] == 'Gov.':\n actor = \"executive\"\n elif action.split()[-1] == 'Gov.':\n actor = \"executive\"\n\n if action in ('House Intro. Date', 'Senate Intro. Date'):\n atype = ['bill:introduced']\n action = action.replace('Intro. Date', 'Introduced')\n elif action == '3rd Consideration':\n atype = ['bill:reading:3', 'bill:passed']\n elif action == 'Sent to Gov.':\n atype = ['governor:received']\n elif action == 'Signed By Governor':\n atype = ['governor:signed']\n else:\n atype = ['other']\n\n if type(date) == float:\n date = str(xlrd.xldate_as_tuple(date, 0))\n date = datetime.datetime.strptime(\n date, \"(%Y, %m, %d, %H, %M, %S)\")\n date = self._tz.localize(date)\n date = \"{:%Y-%m-%d}\".format(date)\n bill.add_action(actor, action, date, type=atype)\n\n for idx, char in enumerate(bill_id):\n try:\n int(char)\n except ValueError:\n continue\n\n underscore_bill = bill_id[:idx]+\"_\"+bill_id[idx:]\n break\n\n yield from self.scrape_votes_old(bill, underscore_bill, session)\n self.scrape_versions_old(bill, underscore_bill, session)\n yield bill\n\n def scrape_versions_old(self, bill, billname, session):\n base_url = 'http://archives.legislature.state.oh.us/'\n\n if 'R' in billname:\n piece = '/res.cfm?ID=%s_%s' % (session, billname)\n else:\n piece = '/bills.cfm?ID=%s_%s' % (session, billname)\n\n def _get_html_or_pdf_version_old(url):\n doc = lxml.html.fromstring(url)\n name = doc.xpath('//font[@size=\"2\"]/a/text()')[0]\n html_links = doc.xpath('//a[text()=\"(.html format)\"]')\n pdf_links = doc.xpath('//a[text()=\"(.pdf format)\"]')\n if html_links:\n link = html_links[0].get('href')\n bill.add_version_link(name, base_url + link, on_duplicate='use_old',\n media_type='text/html')\n elif pdf_links:\n link = pdf_links[0].get('href')\n bill.add_version_link(name, base_url + link,\n media_type='application/pdf')\n\n html = self.get(base_url + piece).text\n # pass over missing bills - (unclear why this happens)\n if 'could not be found.' in html:\n self.warning('missing page: %s' % base_url + piece)\n return\n\n _get_html_or_pdf_version_old(html)\n doc = lxml.html.fromstring(html)\n for a in doc.xpath('//a[starts-with(@href, \"/bills.cfm\")]/@href'):\n if a != piece:\n _get_html_or_pdf_version_old(self.get(base_url + a).text)\n for a in doc.xpath('//a[starts-with(@href, \"/res.cfm\")]/@href'):\n if a != piece:\n _get_html_or_pdf_version_old(self.get(base_url + a).text)\n\n def scrape_votes_old(self, bill, billname, session):\n vote_url = ('http://archives.legislature.state.oh.us/bills.cfm?ID=' +\n session + '_' + billname)\n\n page = self.get(vote_url).text\n page = lxml.html.fromstring(page)\n\n for jlink in page.xpath(\"//a[contains(@href, 'JournalText')]\"):\n date = self._tz.localize(datetime.datetime.strptime(jlink.text,\n \"%m/%d/%Y\")).date()\n date = \"{:%Y-%m-%d}\".format(date)\n details = jlink.xpath(\"string(../../../td[2])\")\n\n chamber = details.split(\" - \")[0]\n if chamber == 'House':\n chamber = 'lower'\n elif chamber == 'Senate':\n chamber = 'upper'\n else:\n raise ScrapeError(\"Bad chamber: %s\" % chamber)\n\n motion = details.split(\" - \")[1].split(\"\\n\")[0].strip()\n\n vote_row = jlink.xpath(\"../../..\")[0].getnext()\n\n yea_div = vote_row.xpath(\n \"td/font/div[contains(@id, 'Yea')]\")[0]\n yeas = []\n for td in yea_div.xpath(\"table/tr/td\"):\n name = td.xpath(\"string()\")\n if name:\n yeas.append(name)\n\n no_div = vote_row.xpath(\n \"td/font/div[contains(@id, 'Nay')]\")[0]\n nays = []\n for td in no_div.xpath(\"table/tr/td\"):\n name = td.xpath(\"string()\")\n if name:\n nays.append(name)\n\n yes_count = len(yeas)\n no_count = len(nays)\n\n vote = VoteEvent(\n chamber=chamber,\n start_date=date,\n motion_text=motion,\n result='pass' if yes_count > no_count else 'fail',\n bill=bill,\n classification='passed'\n )\n\n for yes in yeas:\n vote.yes(yes)\n for no in nays:\n vote.no(no)\n\n vote.add_source(vote_url)\n\n yield vote\n", "path": "openstates/oh/bills.py"}]} |
gh_patches_debug_1617 | rasdani/github-patches | git_diff | aio-libs__aiohttp-1431 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Method "read_chunk" of "BodyPartReader" returns zero bytes before eof
## Long story short
I've implemented a multipart file upload handler inspired on code from the [docs](http://aiohttp.readthedocs.io/en/stable/web.html#file-uploads). My code is truncating part's data. I believe the problem is in the method `_read_chunk_from_stream`, which is used by `read_chunk` of `BodyPartReader`. That method is returning a zero-length `bytearray` before the part's EOF. This is the pseudo-code.
reader = await request.multipart()
part = await reader.next()
arr = bytearray()
while True:
chunk = await part.read_chunk() # 8192 bytes by default.
if not chunk:
break
arr.extend(chunk)
## Expected behaviour
The loop ends when all the part's data has been read.
## Actual behaviour
The loop ends before the part's data is exhausted, i.e., chunk becomes a zero-length `bytearray` prematurely.
## Steps to reproduce
The code is part of a large web application so it's hard for me to give reproducible steps. But replacing the break condition to `if not part._at_eof` made the problem go away.
reader = await request.multipart()
part = await reader.next()
arr = bytearray()
while True:
chunk = await part.read_chunk() # 8192 bytes by default.
if not part._at_eof: # This fixed the problem.
break
arr.extend(chunk)
## Your environment
Aiohttp 1.1.5
Python 3.5.1 from PSF
macOS Sierra 10.12.1
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `aiohttp/multipart.py`
Content:
```
1 import asyncio
2 import base64
3 import binascii
4 import io
5 import json
6 import mimetypes
7 import os
8 import re
9 import sys
10 import uuid
11 import warnings
12 import zlib
13 from collections import Mapping, Sequence, deque
14 from pathlib import Path
15 from urllib.parse import parse_qsl, quote, unquote, urlencode
16
17 from multidict import CIMultiDict
18
19 from .hdrs import (CONTENT_DISPOSITION, CONTENT_ENCODING, CONTENT_LENGTH,
20 CONTENT_TRANSFER_ENCODING, CONTENT_TYPE)
21 from .helpers import parse_mimetype
22 from .protocol import HttpParser
23
24 __all__ = ('MultipartReader', 'MultipartWriter',
25 'BodyPartReader', 'BodyPartWriter',
26 'BadContentDispositionHeader', 'BadContentDispositionParam',
27 'parse_content_disposition', 'content_disposition_filename')
28
29
30 CHAR = set(chr(i) for i in range(0, 128))
31 CTL = set(chr(i) for i in range(0, 32)) | {chr(127), }
32 SEPARATORS = {'(', ')', '<', '>', '@', ',', ';', ':', '\\', '"', '/', '[', ']',
33 '?', '=', '{', '}', ' ', chr(9)}
34 TOKEN = CHAR ^ CTL ^ SEPARATORS
35
36 PY_35 = sys.version_info >= (3, 5)
37 PY_352 = sys.version_info >= (3, 5, 2)
38
39
40 class BadContentDispositionHeader(RuntimeWarning):
41 pass
42
43
44 class BadContentDispositionParam(RuntimeWarning):
45 pass
46
47
48 def parse_content_disposition(header):
49 def is_token(string):
50 return string and TOKEN >= set(string)
51
52 def is_quoted(string):
53 return string[0] == string[-1] == '"'
54
55 def is_rfc5987(string):
56 return is_token(string) and string.count("'") == 2
57
58 def is_extended_param(string):
59 return string.endswith('*')
60
61 def is_continuous_param(string):
62 pos = string.find('*') + 1
63 if not pos:
64 return False
65 substring = string[pos:-1] if string.endswith('*') else string[pos:]
66 return substring.isdigit()
67
68 def unescape(text, *, chars=''.join(map(re.escape, CHAR))):
69 return re.sub('\\\\([{}])'.format(chars), '\\1', text)
70
71 if not header:
72 return None, {}
73
74 disptype, *parts = header.split(';')
75 if not is_token(disptype):
76 warnings.warn(BadContentDispositionHeader(header))
77 return None, {}
78
79 params = {}
80 for item in parts:
81 if '=' not in item:
82 warnings.warn(BadContentDispositionHeader(header))
83 return None, {}
84
85 key, value = item.split('=', 1)
86 key = key.lower().strip()
87 value = value.lstrip()
88
89 if key in params:
90 warnings.warn(BadContentDispositionHeader(header))
91 return None, {}
92
93 if not is_token(key):
94 warnings.warn(BadContentDispositionParam(item))
95 continue
96
97 elif is_continuous_param(key):
98 if is_quoted(value):
99 value = unescape(value[1:-1])
100 elif not is_token(value):
101 warnings.warn(BadContentDispositionParam(item))
102 continue
103
104 elif is_extended_param(key):
105 if is_rfc5987(value):
106 encoding, _, value = value.split("'", 2)
107 encoding = encoding or 'utf-8'
108 else:
109 warnings.warn(BadContentDispositionParam(item))
110 continue
111
112 try:
113 value = unquote(value, encoding, 'strict')
114 except UnicodeDecodeError: # pragma: nocover
115 warnings.warn(BadContentDispositionParam(item))
116 continue
117
118 else:
119 if is_quoted(value):
120 value = unescape(value[1:-1].lstrip('\\/'))
121 elif not is_token(value):
122 warnings.warn(BadContentDispositionHeader(header))
123 return None, {}
124
125 params[key] = value
126
127 return disptype.lower(), params
128
129
130 def content_disposition_filename(params):
131 if not params:
132 return None
133 elif 'filename*' in params:
134 return params['filename*']
135 elif 'filename' in params:
136 return params['filename']
137 else:
138 parts = []
139 fnparams = sorted((key, value)
140 for key, value in params.items()
141 if key.startswith('filename*'))
142 for num, (key, value) in enumerate(fnparams):
143 _, tail = key.split('*', 1)
144 if tail.endswith('*'):
145 tail = tail[:-1]
146 if tail == str(num):
147 parts.append(value)
148 else:
149 break
150 if not parts:
151 return None
152 value = ''.join(parts)
153 if "'" in value:
154 encoding, _, value = value.split("'", 2)
155 encoding = encoding or 'utf-8'
156 return unquote(value, encoding, 'strict')
157 return value
158
159
160 class MultipartResponseWrapper(object):
161 """Wrapper around the :class:`MultipartBodyReader` to take care about
162 underlying connection and close it when it needs in."""
163
164 def __init__(self, resp, stream):
165 self.resp = resp
166 self.stream = stream
167
168 if PY_35:
169 def __aiter__(self):
170 return self
171
172 if not PY_352: # pragma: no cover
173 __aiter__ = asyncio.coroutine(__aiter__)
174
175 @asyncio.coroutine
176 def __anext__(self):
177 part = yield from self.next()
178 if part is None:
179 raise StopAsyncIteration # NOQA
180 return part
181
182 def at_eof(self):
183 """Returns ``True`` when all response data had been read.
184
185 :rtype: bool
186 """
187 return self.resp.content.at_eof()
188
189 @asyncio.coroutine
190 def next(self):
191 """Emits next multipart reader object."""
192 item = yield from self.stream.next()
193 if self.stream.at_eof():
194 yield from self.release()
195 return item
196
197 @asyncio.coroutine
198 def release(self):
199 """Releases the connection gracefully, reading all the content
200 to the void."""
201 yield from self.resp.release()
202
203
204 class BodyPartReader(object):
205 """Multipart reader for single body part."""
206
207 chunk_size = 8192
208
209 def __init__(self, boundary, headers, content):
210 self.headers = headers
211 self._boundary = boundary
212 self._content = content
213 self._at_eof = False
214 length = self.headers.get(CONTENT_LENGTH, None)
215 self._length = int(length) if length is not None else None
216 self._read_bytes = 0
217 self._unread = deque()
218 self._prev_chunk = None
219 self._content_eof = 0
220
221 if PY_35:
222 def __aiter__(self):
223 return self
224
225 if not PY_352: # pragma: no cover
226 __aiter__ = asyncio.coroutine(__aiter__)
227
228 @asyncio.coroutine
229 def __anext__(self):
230 part = yield from self.next()
231 if part is None:
232 raise StopAsyncIteration # NOQA
233 return part
234
235 @asyncio.coroutine
236 def next(self):
237 item = yield from self.read()
238 if not item:
239 return None
240 return item
241
242 @asyncio.coroutine
243 def read(self, *, decode=False):
244 """Reads body part data.
245
246 :param bool decode: Decodes data following by encoding
247 method from `Content-Encoding` header. If it missed
248 data remains untouched
249
250 :rtype: bytearray
251 """
252 if self._at_eof:
253 return b''
254 data = bytearray()
255 if self._length is None:
256 while not self._at_eof:
257 data.extend((yield from self.readline()))
258 else:
259 while not self._at_eof:
260 data.extend((yield from self.read_chunk(self.chunk_size)))
261 if decode:
262 return self.decode(data)
263 return data
264
265 @asyncio.coroutine
266 def read_chunk(self, size=chunk_size):
267 """Reads body part content chunk of the specified size.
268
269 :param int size: chunk size
270
271 :rtype: bytearray
272 """
273 if self._at_eof:
274 return b''
275 if self._length:
276 chunk = yield from self._read_chunk_from_length(size)
277 else:
278 chunk = yield from self._read_chunk_from_stream(size)
279
280 self._read_bytes += len(chunk)
281 if self._read_bytes == self._length:
282 self._at_eof = True
283 if self._at_eof:
284 assert b'\r\n' == (yield from self._content.readline()), \
285 'reader did not read all the data or it is malformed'
286 return chunk
287
288 @asyncio.coroutine
289 def _read_chunk_from_length(self, size):
290 """Reads body part content chunk of the specified size.
291 The body part must has `Content-Length` header with proper value.
292
293 :param int size: chunk size
294
295 :rtype: bytearray
296 """
297 assert self._length is not None, \
298 'Content-Length required for chunked read'
299 chunk_size = min(size, self._length - self._read_bytes)
300 chunk = yield from self._content.read(chunk_size)
301 return chunk
302
303 @asyncio.coroutine
304 def _read_chunk_from_stream(self, size):
305 """Reads content chunk of body part with unknown length.
306 The `Content-Length` header for body part is not necessary.
307
308 :param int size: chunk size
309
310 :rtype: bytearray
311 """
312 assert size >= len(self._boundary) + 2, \
313 'Chunk size must be greater or equal than boundary length + 2'
314 first_chunk = self._prev_chunk is None
315 if first_chunk:
316 self._prev_chunk = yield from self._content.read(size)
317
318 chunk = yield from self._content.read(size)
319 self._content_eof += int(self._content.at_eof())
320 assert self._content_eof < 3, "Reading after EOF"
321 window = self._prev_chunk + chunk
322 sub = b'\r\n' + self._boundary
323 if first_chunk:
324 idx = window.find(sub)
325 else:
326 idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
327 if idx >= 0:
328 # pushing boundary back to content
329 self._content.unread_data(window[idx:])
330 if size > idx:
331 self._prev_chunk = self._prev_chunk[:idx]
332 chunk = window[len(self._prev_chunk):idx]
333 if not chunk:
334 self._at_eof = True
335 if 0 < len(chunk) < len(sub) and not self._content_eof:
336 self._prev_chunk += chunk
337 self._at_eof = False
338 return b''
339 result = self._prev_chunk
340 self._prev_chunk = chunk
341 return result
342
343 @asyncio.coroutine
344 def readline(self):
345 """Reads body part by line by line.
346
347 :rtype: bytearray
348 """
349 if self._at_eof:
350 return b''
351
352 if self._unread:
353 line = self._unread.popleft()
354 else:
355 line = yield from self._content.readline()
356
357 if line.startswith(self._boundary):
358 # the very last boundary may not come with \r\n,
359 # so set single rules for everyone
360 sline = line.rstrip(b'\r\n')
361 boundary = self._boundary
362 last_boundary = self._boundary + b'--'
363 # ensure that we read exactly the boundary, not something alike
364 if sline == boundary or sline == last_boundary:
365 self._at_eof = True
366 self._unread.append(line)
367 return b''
368 else:
369 next_line = yield from self._content.readline()
370 if next_line.startswith(self._boundary):
371 line = line[:-2] # strip CRLF but only once
372 self._unread.append(next_line)
373
374 return line
375
376 @asyncio.coroutine
377 def release(self):
378 """Like :meth:`read`, but reads all the data to the void.
379
380 :rtype: None
381 """
382 if self._at_eof:
383 return
384 if self._length is None:
385 while not self._at_eof:
386 yield from self.readline()
387 else:
388 while not self._at_eof:
389 yield from self.read_chunk(self.chunk_size)
390
391 @asyncio.coroutine
392 def text(self, *, encoding=None):
393 """Like :meth:`read`, but assumes that body part contains text data.
394
395 :param str encoding: Custom text encoding. Overrides specified
396 in charset param of `Content-Type` header
397
398 :rtype: str
399 """
400 data = yield from self.read(decode=True)
401 encoding = encoding or self.get_charset(default='latin1')
402 return data.decode(encoding)
403
404 @asyncio.coroutine
405 def json(self, *, encoding=None):
406 """Like :meth:`read`, but assumes that body parts contains JSON data.
407
408 :param str encoding: Custom JSON encoding. Overrides specified
409 in charset param of `Content-Type` header
410 """
411 data = yield from self.read(decode=True)
412 if not data:
413 return None
414 encoding = encoding or self.get_charset(default='utf-8')
415 return json.loads(data.decode(encoding))
416
417 @asyncio.coroutine
418 def form(self, *, encoding=None):
419 """Like :meth:`read`, but assumes that body parts contains form
420 urlencoded data.
421
422 :param str encoding: Custom form encoding. Overrides specified
423 in charset param of `Content-Type` header
424 """
425 data = yield from self.read(decode=True)
426 if not data:
427 return None
428 encoding = encoding or self.get_charset(default='utf-8')
429 return parse_qsl(data.rstrip().decode(encoding), encoding=encoding)
430
431 def at_eof(self):
432 """Returns ``True`` if the boundary was reached or
433 ``False`` otherwise.
434
435 :rtype: bool
436 """
437 return self._at_eof
438
439 def decode(self, data):
440 """Decodes data according the specified `Content-Encoding`
441 or `Content-Transfer-Encoding` headers value.
442
443 Supports ``gzip``, ``deflate`` and ``identity`` encodings for
444 `Content-Encoding` header.
445
446 Supports ``base64``, ``quoted-printable``, ``binary`` encodings for
447 `Content-Transfer-Encoding` header.
448
449 :param bytearray data: Data to decode.
450
451 :raises: :exc:`RuntimeError` - if encoding is unknown.
452
453 :rtype: bytes
454 """
455 if CONTENT_TRANSFER_ENCODING in self.headers:
456 data = self._decode_content_transfer(data)
457 if CONTENT_ENCODING in self.headers:
458 return self._decode_content(data)
459 return data
460
461 def _decode_content(self, data):
462 encoding = self.headers[CONTENT_ENCODING].lower()
463
464 if encoding == 'deflate':
465 return zlib.decompress(data, -zlib.MAX_WBITS)
466 elif encoding == 'gzip':
467 return zlib.decompress(data, 16 + zlib.MAX_WBITS)
468 elif encoding == 'identity':
469 return data
470 else:
471 raise RuntimeError('unknown content encoding: {}'.format(encoding))
472
473 def _decode_content_transfer(self, data):
474 encoding = self.headers[CONTENT_TRANSFER_ENCODING].lower()
475
476 if encoding == 'base64':
477 return base64.b64decode(data)
478 elif encoding == 'quoted-printable':
479 return binascii.a2b_qp(data)
480 elif encoding == 'binary':
481 return data
482 else:
483 raise RuntimeError('unknown content transfer encoding: {}'
484 ''.format(encoding))
485
486 def get_charset(self, default=None):
487 """Returns charset parameter from ``Content-Type`` header or default.
488 """
489 ctype = self.headers.get(CONTENT_TYPE, '')
490 *_, params = parse_mimetype(ctype)
491 return params.get('charset', default)
492
493 @property
494 def filename(self):
495 """Returns filename specified in Content-Disposition header or ``None``
496 if missed or header is malformed."""
497 _, params = parse_content_disposition(
498 self.headers.get(CONTENT_DISPOSITION))
499 return content_disposition_filename(params)
500
501
502 class MultipartReader(object):
503 """Multipart body reader."""
504
505 #: Response wrapper, used when multipart readers constructs from response.
506 response_wrapper_cls = MultipartResponseWrapper
507 #: Multipart reader class, used to handle multipart/* body parts.
508 #: None points to type(self)
509 multipart_reader_cls = None
510 #: Body part reader class for non multipart/* content types.
511 part_reader_cls = BodyPartReader
512
513 def __init__(self, headers, content):
514 self.headers = headers
515 self._boundary = ('--' + self._get_boundary()).encode()
516 self._content = content
517 self._last_part = None
518 self._at_eof = False
519 self._at_bof = True
520 self._unread = []
521
522 if PY_35:
523 def __aiter__(self):
524 return self
525
526 if not PY_352: # pragma: no cover
527 __aiter__ = asyncio.coroutine(__aiter__)
528
529 @asyncio.coroutine
530 def __anext__(self):
531 part = yield from self.next()
532 if part is None:
533 raise StopAsyncIteration # NOQA
534 return part
535
536 @classmethod
537 def from_response(cls, response):
538 """Constructs reader instance from HTTP response.
539
540 :param response: :class:`~aiohttp.client.ClientResponse` instance
541 """
542 obj = cls.response_wrapper_cls(response, cls(response.headers,
543 response.content))
544 return obj
545
546 def at_eof(self):
547 """Returns ``True`` if the final boundary was reached or
548 ``False`` otherwise.
549
550 :rtype: bool
551 """
552 return self._at_eof
553
554 @asyncio.coroutine
555 def next(self):
556 """Emits the next multipart body part."""
557 # So, if we're at BOF, we need to skip till the boundary.
558 if self._at_eof:
559 return
560 yield from self._maybe_release_last_part()
561 if self._at_bof:
562 yield from self._read_until_first_boundary()
563 self._at_bof = False
564 else:
565 yield from self._read_boundary()
566 if self._at_eof: # we just read the last boundary, nothing to do there
567 return
568 self._last_part = yield from self.fetch_next_part()
569 return self._last_part
570
571 @asyncio.coroutine
572 def release(self):
573 """Reads all the body parts to the void till the final boundary."""
574 while not self._at_eof:
575 item = yield from self.next()
576 if item is None:
577 break
578 yield from item.release()
579
580 @asyncio.coroutine
581 def fetch_next_part(self):
582 """Returns the next body part reader."""
583 headers = yield from self._read_headers()
584 return self._get_part_reader(headers)
585
586 def _get_part_reader(self, headers):
587 """Dispatches the response by the `Content-Type` header, returning
588 suitable reader instance.
589
590 :param dict headers: Response headers
591 """
592 ctype = headers.get(CONTENT_TYPE, '')
593 mtype, *_ = parse_mimetype(ctype)
594 if mtype == 'multipart':
595 if self.multipart_reader_cls is None:
596 return type(self)(headers, self._content)
597 return self.multipart_reader_cls(headers, self._content)
598 else:
599 return self.part_reader_cls(self._boundary, headers, self._content)
600
601 def _get_boundary(self):
602 mtype, *_, params = parse_mimetype(self.headers[CONTENT_TYPE])
603
604 assert mtype == 'multipart', 'multipart/* content type expected'
605
606 if 'boundary' not in params:
607 raise ValueError('boundary missed for Content-Type: %s'
608 % self.headers[CONTENT_TYPE])
609
610 boundary = params['boundary']
611 if len(boundary) > 70:
612 raise ValueError('boundary %r is too long (70 chars max)'
613 % boundary)
614
615 return boundary
616
617 @asyncio.coroutine
618 def _readline(self):
619 if self._unread:
620 return self._unread.pop()
621 return (yield from self._content.readline())
622
623 @asyncio.coroutine
624 def _read_until_first_boundary(self):
625 while True:
626 chunk = yield from self._readline()
627 if chunk == b'':
628 raise ValueError("Could not find starting boundary %r"
629 % (self._boundary))
630 chunk = chunk.rstrip()
631 if chunk == self._boundary:
632 return
633 elif chunk == self._boundary + b'--':
634 self._at_eof = True
635 return
636
637 @asyncio.coroutine
638 def _read_boundary(self):
639 chunk = (yield from self._readline()).rstrip()
640 if chunk == self._boundary:
641 pass
642 elif chunk == self._boundary + b'--':
643 self._at_eof = True
644 else:
645 raise ValueError('Invalid boundary %r, expected %r'
646 % (chunk, self._boundary))
647
648 @asyncio.coroutine
649 def _read_headers(self):
650 lines = [b'']
651 while True:
652 chunk = yield from self._content.readline()
653 chunk = chunk.strip()
654 lines.append(chunk)
655 if not chunk:
656 break
657 parser = HttpParser()
658 headers, *_ = parser.parse_headers(lines)
659 return headers
660
661 @asyncio.coroutine
662 def _maybe_release_last_part(self):
663 """Ensures that the last read body part is read completely."""
664 if self._last_part is not None:
665 if not self._last_part.at_eof():
666 yield from self._last_part.release()
667 self._unread.extend(self._last_part._unread)
668 self._last_part = None
669
670
671 class BodyPartWriter(object):
672 """Multipart writer for single body part."""
673
674 def __init__(self, obj, headers=None, *, chunk_size=8192):
675 if headers is None:
676 headers = CIMultiDict()
677 elif not isinstance(headers, CIMultiDict):
678 headers = CIMultiDict(headers)
679
680 self.obj = obj
681 self.headers = headers
682 self._chunk_size = chunk_size
683 self._fill_headers_with_defaults()
684
685 self._serialize_map = {
686 bytes: self._serialize_bytes,
687 str: self._serialize_str,
688 io.IOBase: self._serialize_io,
689 MultipartWriter: self._serialize_multipart,
690 ('application', 'json'): self._serialize_json,
691 ('application', 'x-www-form-urlencoded'): self._serialize_form
692 }
693
694 def _fill_headers_with_defaults(self):
695 if CONTENT_TYPE not in self.headers:
696 content_type = self._guess_content_type(self.obj)
697 if content_type is not None:
698 self.headers[CONTENT_TYPE] = content_type
699
700 if CONTENT_LENGTH not in self.headers:
701 content_length = self._guess_content_length(self.obj)
702 if content_length is not None:
703 self.headers[CONTENT_LENGTH] = str(content_length)
704
705 if CONTENT_DISPOSITION not in self.headers:
706 filename = self._guess_filename(self.obj)
707 if filename is not None:
708 self.set_content_disposition('attachment', filename=filename)
709
710 def _guess_content_length(self, obj):
711 if isinstance(obj, bytes):
712 return len(obj)
713 elif isinstance(obj, str):
714 *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))
715 charset = params.get('charset', 'us-ascii')
716 return len(obj.encode(charset))
717 elif isinstance(obj, io.StringIO):
718 *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))
719 charset = params.get('charset', 'us-ascii')
720 return len(obj.getvalue().encode(charset)) - obj.tell()
721 elif isinstance(obj, io.BytesIO):
722 return len(obj.getvalue()) - obj.tell()
723 elif isinstance(obj, io.IOBase):
724 try:
725 return os.fstat(obj.fileno()).st_size - obj.tell()
726 except (AttributeError, OSError):
727 return None
728 else:
729 return None
730
731 def _guess_content_type(self, obj, default='application/octet-stream'):
732 if hasattr(obj, 'name'):
733 name = getattr(obj, 'name')
734 return mimetypes.guess_type(name)[0]
735 elif isinstance(obj, (str, io.StringIO)):
736 return 'text/plain; charset=utf-8'
737 else:
738 return default
739
740 def _guess_filename(self, obj):
741 if isinstance(obj, io.IOBase):
742 name = getattr(obj, 'name', None)
743 if name is not None:
744 return Path(name).name
745
746 def serialize(self):
747 """Yields byte chunks for body part."""
748
749 has_encoding = (
750 CONTENT_ENCODING in self.headers and
751 self.headers[CONTENT_ENCODING] != 'identity' or
752 CONTENT_TRANSFER_ENCODING in self.headers
753 )
754 if has_encoding:
755 # since we're following streaming approach which doesn't assumes
756 # any intermediate buffers, we cannot calculate real content length
757 # with the specified content encoding scheme. So, instead of lying
758 # about content length and cause reading issues, we have to strip
759 # this information.
760 self.headers.pop(CONTENT_LENGTH, None)
761
762 if self.headers:
763 yield b'\r\n'.join(
764 b': '.join(map(lambda i: i.encode('latin1'), item))
765 for item in self.headers.items()
766 )
767 yield b'\r\n\r\n'
768 yield from self._maybe_encode_stream(self._serialize_obj())
769 yield b'\r\n'
770
771 def _serialize_obj(self):
772 obj = self.obj
773 mtype, stype, *_ = parse_mimetype(self.headers.get(CONTENT_TYPE))
774 serializer = self._serialize_map.get((mtype, stype))
775 if serializer is not None:
776 return serializer(obj)
777
778 for key in self._serialize_map:
779 if not isinstance(key, tuple) and isinstance(obj, key):
780 return self._serialize_map[key](obj)
781 return self._serialize_default(obj)
782
783 def _serialize_bytes(self, obj):
784 yield obj
785
786 def _serialize_str(self, obj):
787 *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))
788 yield obj.encode(params.get('charset', 'us-ascii'))
789
790 def _serialize_io(self, obj):
791 while True:
792 chunk = obj.read(self._chunk_size)
793 if not chunk:
794 break
795 if isinstance(chunk, str):
796 yield from self._serialize_str(chunk)
797 else:
798 yield from self._serialize_bytes(chunk)
799
800 def _serialize_multipart(self, obj):
801 yield from obj.serialize()
802
803 def _serialize_json(self, obj):
804 *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))
805 yield json.dumps(obj).encode(params.get('charset', 'utf-8'))
806
807 def _serialize_form(self, obj):
808 if isinstance(obj, Mapping):
809 obj = list(obj.items())
810 return self._serialize_str(urlencode(obj, doseq=True))
811
812 def _serialize_default(self, obj):
813 raise TypeError('unknown body part type %r' % type(obj))
814
815 def _maybe_encode_stream(self, stream):
816 if CONTENT_ENCODING in self.headers:
817 stream = self._apply_content_encoding(stream)
818 if CONTENT_TRANSFER_ENCODING in self.headers:
819 stream = self._apply_content_transfer_encoding(stream)
820 yield from stream
821
822 def _apply_content_encoding(self, stream):
823 encoding = self.headers[CONTENT_ENCODING].lower()
824 if encoding == 'identity':
825 yield from stream
826 elif encoding in ('deflate', 'gzip'):
827 if encoding == 'gzip':
828 zlib_mode = 16 + zlib.MAX_WBITS
829 else:
830 zlib_mode = -zlib.MAX_WBITS
831 zcomp = zlib.compressobj(wbits=zlib_mode)
832 for chunk in stream:
833 yield zcomp.compress(chunk)
834 else:
835 yield zcomp.flush()
836 else:
837 raise RuntimeError('unknown content encoding: {}'
838 ''.format(encoding))
839
840 def _apply_content_transfer_encoding(self, stream):
841 encoding = self.headers[CONTENT_TRANSFER_ENCODING].lower()
842 if encoding == 'base64':
843 buffer = bytearray()
844 while True:
845 if buffer:
846 div, mod = divmod(len(buffer), 3)
847 chunk, buffer = buffer[:div * 3], buffer[div * 3:]
848 if chunk:
849 yield base64.b64encode(chunk)
850 chunk = next(stream, None)
851 if not chunk:
852 if buffer:
853 yield base64.b64encode(buffer[:])
854 return
855 buffer.extend(chunk)
856 elif encoding == 'quoted-printable':
857 for chunk in stream:
858 yield binascii.b2a_qp(chunk)
859 elif encoding == 'binary':
860 yield from stream
861 else:
862 raise RuntimeError('unknown content transfer encoding: {}'
863 ''.format(encoding))
864
865 def set_content_disposition(self, disptype, **params):
866 """Sets ``Content-Disposition`` header.
867
868 :param str disptype: Disposition type: inline, attachment, form-data.
869 Should be valid extension token (see RFC 2183)
870 :param dict params: Disposition params
871 """
872 if not disptype or not (TOKEN > set(disptype)):
873 raise ValueError('bad content disposition type {!r}'
874 ''.format(disptype))
875 value = disptype
876 if params:
877 lparams = []
878 for key, val in params.items():
879 if not key or not (TOKEN > set(key)):
880 raise ValueError('bad content disposition parameter'
881 ' {!r}={!r}'.format(key, val))
882 qval = quote(val, '')
883 lparams.append((key, '"%s"' % qval))
884 if key == 'filename':
885 lparams.append(('filename*', "utf-8''" + qval))
886 sparams = '; '.join('='.join(pair) for pair in lparams)
887 value = '; '.join((value, sparams))
888 self.headers[CONTENT_DISPOSITION] = value
889
890 @property
891 def filename(self):
892 """Returns filename specified in Content-Disposition header or ``None``
893 if missed."""
894 _, params = parse_content_disposition(
895 self.headers.get(CONTENT_DISPOSITION))
896 return content_disposition_filename(params)
897
898
899 class MultipartWriter(object):
900 """Multipart body writer."""
901
902 #: Body part reader class for non multipart/* content types.
903 part_writer_cls = BodyPartWriter
904
905 def __init__(self, subtype='mixed', boundary=None):
906 boundary = boundary if boundary is not None else uuid.uuid4().hex
907 try:
908 boundary.encode('us-ascii')
909 except UnicodeEncodeError:
910 raise ValueError('boundary should contains ASCII only chars')
911 self.headers = CIMultiDict()
912 self.headers[CONTENT_TYPE] = 'multipart/{}; boundary="{}"'.format(
913 subtype, boundary
914 )
915 self.parts = []
916
917 def __enter__(self):
918 return self
919
920 def __exit__(self, exc_type, exc_val, exc_tb):
921 pass
922
923 def __iter__(self):
924 return iter(self.parts)
925
926 def __len__(self):
927 return len(self.parts)
928
929 @property
930 def boundary(self):
931 *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))
932 return params['boundary'].encode('us-ascii')
933
934 def append(self, obj, headers=None):
935 """Adds a new body part to multipart writer."""
936 if isinstance(obj, self.part_writer_cls):
937 if headers:
938 obj.headers.update(headers)
939 self.parts.append(obj)
940 else:
941 if not headers:
942 headers = CIMultiDict()
943 self.parts.append(self.part_writer_cls(obj, headers))
944 return self.parts[-1]
945
946 def append_json(self, obj, headers=None):
947 """Helper to append JSON part."""
948 if not headers:
949 headers = CIMultiDict()
950 headers[CONTENT_TYPE] = 'application/json'
951 return self.append(obj, headers)
952
953 def append_form(self, obj, headers=None):
954 """Helper to append form urlencoded part."""
955 if not headers:
956 headers = CIMultiDict()
957 headers[CONTENT_TYPE] = 'application/x-www-form-urlencoded'
958 assert isinstance(obj, (Sequence, Mapping))
959 return self.append(obj, headers)
960
961 def serialize(self):
962 """Yields multipart byte chunks."""
963 if not self.parts:
964 yield b''
965 return
966
967 for part in self.parts:
968 yield b'--' + self.boundary + b'\r\n'
969 yield from part.serialize()
970 else:
971 yield b'--' + self.boundary + b'--\r\n'
972
973 yield b''
974
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py
--- a/aiohttp/multipart.py
+++ b/aiohttp/multipart.py
@@ -332,10 +332,6 @@
chunk = window[len(self._prev_chunk):idx]
if not chunk:
self._at_eof = True
- if 0 < len(chunk) < len(sub) and not self._content_eof:
- self._prev_chunk += chunk
- self._at_eof = False
- return b''
result = self._prev_chunk
self._prev_chunk = chunk
return result
| {"golden_diff": "diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py\n--- a/aiohttp/multipart.py\n+++ b/aiohttp/multipart.py\n@@ -332,10 +332,6 @@\n chunk = window[len(self._prev_chunk):idx]\n if not chunk:\n self._at_eof = True\n- if 0 < len(chunk) < len(sub) and not self._content_eof:\n- self._prev_chunk += chunk\n- self._at_eof = False\n- return b''\n result = self._prev_chunk\n self._prev_chunk = chunk\n return result\n", "issue": "Method \"read_chunk\" of \"BodyPartReader\" returns zero bytes before eof\n## Long story short\r\n\r\nI've implemented a multipart file upload handler inspired on code from the [docs](http://aiohttp.readthedocs.io/en/stable/web.html#file-uploads). My code is truncating part's data. I believe the problem is in the method `_read_chunk_from_stream`, which is used by `read_chunk` of `BodyPartReader`. That method is returning a zero-length `bytearray` before the part's EOF. This is the pseudo-code.\r\n\r\n reader = await request.multipart()\r\n part = await reader.next()\r\n arr = bytearray()\r\n\r\n while True:\r\n chunk = await part.read_chunk() # 8192 bytes by default.\r\n\r\n if not chunk:\r\n break\r\n\r\n arr.extend(chunk)\r\n\r\n## Expected behaviour\r\n\r\nThe loop ends when all the part's data has been read.\r\n\r\n## Actual behaviour\r\n\r\nThe loop ends before the part's data is exhausted, i.e., chunk becomes a zero-length `bytearray` prematurely.\r\n\r\n## Steps to reproduce\r\n\r\nThe code is part of a large web application so it's hard for me to give reproducible steps. But replacing the break condition to `if not part._at_eof` made the problem go away.\r\n\r\n reader = await request.multipart()\r\n part = await reader.next()\r\n arr = bytearray()\r\n\r\n while True:\r\n chunk = await part.read_chunk() # 8192 bytes by default.\r\n\r\n if not part._at_eof: # This fixed the problem.\r\n break\r\n\r\n arr.extend(chunk)\r\n\r\n## Your environment\r\n\r\nAiohttp 1.1.5\r\nPython 3.5.1 from PSF\r\nmacOS Sierra 10.12.1\r\n\n", "before_files": [{"content": "import asyncio\nimport base64\nimport binascii\nimport io\nimport json\nimport mimetypes\nimport os\nimport re\nimport sys\nimport uuid\nimport warnings\nimport zlib\nfrom collections import Mapping, Sequence, deque\nfrom pathlib import Path\nfrom urllib.parse import parse_qsl, quote, unquote, urlencode\n\nfrom multidict import CIMultiDict\n\nfrom .hdrs import (CONTENT_DISPOSITION, CONTENT_ENCODING, CONTENT_LENGTH,\n CONTENT_TRANSFER_ENCODING, CONTENT_TYPE)\nfrom .helpers import parse_mimetype\nfrom .protocol import HttpParser\n\n__all__ = ('MultipartReader', 'MultipartWriter',\n 'BodyPartReader', 'BodyPartWriter',\n 'BadContentDispositionHeader', 'BadContentDispositionParam',\n 'parse_content_disposition', 'content_disposition_filename')\n\n\nCHAR = set(chr(i) for i in range(0, 128))\nCTL = set(chr(i) for i in range(0, 32)) | {chr(127), }\nSEPARATORS = {'(', ')', '<', '>', '@', ',', ';', ':', '\\\\', '\"', '/', '[', ']',\n '?', '=', '{', '}', ' ', chr(9)}\nTOKEN = CHAR ^ CTL ^ SEPARATORS\n\nPY_35 = sys.version_info >= (3, 5)\nPY_352 = sys.version_info >= (3, 5, 2)\n\n\nclass BadContentDispositionHeader(RuntimeWarning):\n pass\n\n\nclass BadContentDispositionParam(RuntimeWarning):\n pass\n\n\ndef parse_content_disposition(header):\n def is_token(string):\n return string and TOKEN >= set(string)\n\n def is_quoted(string):\n return string[0] == string[-1] == '\"'\n\n def is_rfc5987(string):\n return is_token(string) and string.count(\"'\") == 2\n\n def is_extended_param(string):\n return string.endswith('*')\n\n def is_continuous_param(string):\n pos = string.find('*') + 1\n if not pos:\n return False\n substring = string[pos:-1] if string.endswith('*') else string[pos:]\n return substring.isdigit()\n\n def unescape(text, *, chars=''.join(map(re.escape, CHAR))):\n return re.sub('\\\\\\\\([{}])'.format(chars), '\\\\1', text)\n\n if not header:\n return None, {}\n\n disptype, *parts = header.split(';')\n if not is_token(disptype):\n warnings.warn(BadContentDispositionHeader(header))\n return None, {}\n\n params = {}\n for item in parts:\n if '=' not in item:\n warnings.warn(BadContentDispositionHeader(header))\n return None, {}\n\n key, value = item.split('=', 1)\n key = key.lower().strip()\n value = value.lstrip()\n\n if key in params:\n warnings.warn(BadContentDispositionHeader(header))\n return None, {}\n\n if not is_token(key):\n warnings.warn(BadContentDispositionParam(item))\n continue\n\n elif is_continuous_param(key):\n if is_quoted(value):\n value = unescape(value[1:-1])\n elif not is_token(value):\n warnings.warn(BadContentDispositionParam(item))\n continue\n\n elif is_extended_param(key):\n if is_rfc5987(value):\n encoding, _, value = value.split(\"'\", 2)\n encoding = encoding or 'utf-8'\n else:\n warnings.warn(BadContentDispositionParam(item))\n continue\n\n try:\n value = unquote(value, encoding, 'strict')\n except UnicodeDecodeError: # pragma: nocover\n warnings.warn(BadContentDispositionParam(item))\n continue\n\n else:\n if is_quoted(value):\n value = unescape(value[1:-1].lstrip('\\\\/'))\n elif not is_token(value):\n warnings.warn(BadContentDispositionHeader(header))\n return None, {}\n\n params[key] = value\n\n return disptype.lower(), params\n\n\ndef content_disposition_filename(params):\n if not params:\n return None\n elif 'filename*' in params:\n return params['filename*']\n elif 'filename' in params:\n return params['filename']\n else:\n parts = []\n fnparams = sorted((key, value)\n for key, value in params.items()\n if key.startswith('filename*'))\n for num, (key, value) in enumerate(fnparams):\n _, tail = key.split('*', 1)\n if tail.endswith('*'):\n tail = tail[:-1]\n if tail == str(num):\n parts.append(value)\n else:\n break\n if not parts:\n return None\n value = ''.join(parts)\n if \"'\" in value:\n encoding, _, value = value.split(\"'\", 2)\n encoding = encoding or 'utf-8'\n return unquote(value, encoding, 'strict')\n return value\n\n\nclass MultipartResponseWrapper(object):\n \"\"\"Wrapper around the :class:`MultipartBodyReader` to take care about\n underlying connection and close it when it needs in.\"\"\"\n\n def __init__(self, resp, stream):\n self.resp = resp\n self.stream = stream\n\n if PY_35:\n def __aiter__(self):\n return self\n\n if not PY_352: # pragma: no cover\n __aiter__ = asyncio.coroutine(__aiter__)\n\n @asyncio.coroutine\n def __anext__(self):\n part = yield from self.next()\n if part is None:\n raise StopAsyncIteration # NOQA\n return part\n\n def at_eof(self):\n \"\"\"Returns ``True`` when all response data had been read.\n\n :rtype: bool\n \"\"\"\n return self.resp.content.at_eof()\n\n @asyncio.coroutine\n def next(self):\n \"\"\"Emits next multipart reader object.\"\"\"\n item = yield from self.stream.next()\n if self.stream.at_eof():\n yield from self.release()\n return item\n\n @asyncio.coroutine\n def release(self):\n \"\"\"Releases the connection gracefully, reading all the content\n to the void.\"\"\"\n yield from self.resp.release()\n\n\nclass BodyPartReader(object):\n \"\"\"Multipart reader for single body part.\"\"\"\n\n chunk_size = 8192\n\n def __init__(self, boundary, headers, content):\n self.headers = headers\n self._boundary = boundary\n self._content = content\n self._at_eof = False\n length = self.headers.get(CONTENT_LENGTH, None)\n self._length = int(length) if length is not None else None\n self._read_bytes = 0\n self._unread = deque()\n self._prev_chunk = None\n self._content_eof = 0\n\n if PY_35:\n def __aiter__(self):\n return self\n\n if not PY_352: # pragma: no cover\n __aiter__ = asyncio.coroutine(__aiter__)\n\n @asyncio.coroutine\n def __anext__(self):\n part = yield from self.next()\n if part is None:\n raise StopAsyncIteration # NOQA\n return part\n\n @asyncio.coroutine\n def next(self):\n item = yield from self.read()\n if not item:\n return None\n return item\n\n @asyncio.coroutine\n def read(self, *, decode=False):\n \"\"\"Reads body part data.\n\n :param bool decode: Decodes data following by encoding\n method from `Content-Encoding` header. If it missed\n data remains untouched\n\n :rtype: bytearray\n \"\"\"\n if self._at_eof:\n return b''\n data = bytearray()\n if self._length is None:\n while not self._at_eof:\n data.extend((yield from self.readline()))\n else:\n while not self._at_eof:\n data.extend((yield from self.read_chunk(self.chunk_size)))\n if decode:\n return self.decode(data)\n return data\n\n @asyncio.coroutine\n def read_chunk(self, size=chunk_size):\n \"\"\"Reads body part content chunk of the specified size.\n\n :param int size: chunk size\n\n :rtype: bytearray\n \"\"\"\n if self._at_eof:\n return b''\n if self._length:\n chunk = yield from self._read_chunk_from_length(size)\n else:\n chunk = yield from self._read_chunk_from_stream(size)\n\n self._read_bytes += len(chunk)\n if self._read_bytes == self._length:\n self._at_eof = True\n if self._at_eof:\n assert b'\\r\\n' == (yield from self._content.readline()), \\\n 'reader did not read all the data or it is malformed'\n return chunk\n\n @asyncio.coroutine\n def _read_chunk_from_length(self, size):\n \"\"\"Reads body part content chunk of the specified size.\n The body part must has `Content-Length` header with proper value.\n\n :param int size: chunk size\n\n :rtype: bytearray\n \"\"\"\n assert self._length is not None, \\\n 'Content-Length required for chunked read'\n chunk_size = min(size, self._length - self._read_bytes)\n chunk = yield from self._content.read(chunk_size)\n return chunk\n\n @asyncio.coroutine\n def _read_chunk_from_stream(self, size):\n \"\"\"Reads content chunk of body part with unknown length.\n The `Content-Length` header for body part is not necessary.\n\n :param int size: chunk size\n\n :rtype: bytearray\n \"\"\"\n assert size >= len(self._boundary) + 2, \\\n 'Chunk size must be greater or equal than boundary length + 2'\n first_chunk = self._prev_chunk is None\n if first_chunk:\n self._prev_chunk = yield from self._content.read(size)\n\n chunk = yield from self._content.read(size)\n self._content_eof += int(self._content.at_eof())\n assert self._content_eof < 3, \"Reading after EOF\"\n window = self._prev_chunk + chunk\n sub = b'\\r\\n' + self._boundary\n if first_chunk:\n idx = window.find(sub)\n else:\n idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))\n if idx >= 0:\n # pushing boundary back to content\n self._content.unread_data(window[idx:])\n if size > idx:\n self._prev_chunk = self._prev_chunk[:idx]\n chunk = window[len(self._prev_chunk):idx]\n if not chunk:\n self._at_eof = True\n if 0 < len(chunk) < len(sub) and not self._content_eof:\n self._prev_chunk += chunk\n self._at_eof = False\n return b''\n result = self._prev_chunk\n self._prev_chunk = chunk\n return result\n\n @asyncio.coroutine\n def readline(self):\n \"\"\"Reads body part by line by line.\n\n :rtype: bytearray\n \"\"\"\n if self._at_eof:\n return b''\n\n if self._unread:\n line = self._unread.popleft()\n else:\n line = yield from self._content.readline()\n\n if line.startswith(self._boundary):\n # the very last boundary may not come with \\r\\n,\n # so set single rules for everyone\n sline = line.rstrip(b'\\r\\n')\n boundary = self._boundary\n last_boundary = self._boundary + b'--'\n # ensure that we read exactly the boundary, not something alike\n if sline == boundary or sline == last_boundary:\n self._at_eof = True\n self._unread.append(line)\n return b''\n else:\n next_line = yield from self._content.readline()\n if next_line.startswith(self._boundary):\n line = line[:-2] # strip CRLF but only once\n self._unread.append(next_line)\n\n return line\n\n @asyncio.coroutine\n def release(self):\n \"\"\"Like :meth:`read`, but reads all the data to the void.\n\n :rtype: None\n \"\"\"\n if self._at_eof:\n return\n if self._length is None:\n while not self._at_eof:\n yield from self.readline()\n else:\n while not self._at_eof:\n yield from self.read_chunk(self.chunk_size)\n\n @asyncio.coroutine\n def text(self, *, encoding=None):\n \"\"\"Like :meth:`read`, but assumes that body part contains text data.\n\n :param str encoding: Custom text encoding. Overrides specified\n in charset param of `Content-Type` header\n\n :rtype: str\n \"\"\"\n data = yield from self.read(decode=True)\n encoding = encoding or self.get_charset(default='latin1')\n return data.decode(encoding)\n\n @asyncio.coroutine\n def json(self, *, encoding=None):\n \"\"\"Like :meth:`read`, but assumes that body parts contains JSON data.\n\n :param str encoding: Custom JSON encoding. Overrides specified\n in charset param of `Content-Type` header\n \"\"\"\n data = yield from self.read(decode=True)\n if not data:\n return None\n encoding = encoding or self.get_charset(default='utf-8')\n return json.loads(data.decode(encoding))\n\n @asyncio.coroutine\n def form(self, *, encoding=None):\n \"\"\"Like :meth:`read`, but assumes that body parts contains form\n urlencoded data.\n\n :param str encoding: Custom form encoding. Overrides specified\n in charset param of `Content-Type` header\n \"\"\"\n data = yield from self.read(decode=True)\n if not data:\n return None\n encoding = encoding or self.get_charset(default='utf-8')\n return parse_qsl(data.rstrip().decode(encoding), encoding=encoding)\n\n def at_eof(self):\n \"\"\"Returns ``True`` if the boundary was reached or\n ``False`` otherwise.\n\n :rtype: bool\n \"\"\"\n return self._at_eof\n\n def decode(self, data):\n \"\"\"Decodes data according the specified `Content-Encoding`\n or `Content-Transfer-Encoding` headers value.\n\n Supports ``gzip``, ``deflate`` and ``identity`` encodings for\n `Content-Encoding` header.\n\n Supports ``base64``, ``quoted-printable``, ``binary`` encodings for\n `Content-Transfer-Encoding` header.\n\n :param bytearray data: Data to decode.\n\n :raises: :exc:`RuntimeError` - if encoding is unknown.\n\n :rtype: bytes\n \"\"\"\n if CONTENT_TRANSFER_ENCODING in self.headers:\n data = self._decode_content_transfer(data)\n if CONTENT_ENCODING in self.headers:\n return self._decode_content(data)\n return data\n\n def _decode_content(self, data):\n encoding = self.headers[CONTENT_ENCODING].lower()\n\n if encoding == 'deflate':\n return zlib.decompress(data, -zlib.MAX_WBITS)\n elif encoding == 'gzip':\n return zlib.decompress(data, 16 + zlib.MAX_WBITS)\n elif encoding == 'identity':\n return data\n else:\n raise RuntimeError('unknown content encoding: {}'.format(encoding))\n\n def _decode_content_transfer(self, data):\n encoding = self.headers[CONTENT_TRANSFER_ENCODING].lower()\n\n if encoding == 'base64':\n return base64.b64decode(data)\n elif encoding == 'quoted-printable':\n return binascii.a2b_qp(data)\n elif encoding == 'binary':\n return data\n else:\n raise RuntimeError('unknown content transfer encoding: {}'\n ''.format(encoding))\n\n def get_charset(self, default=None):\n \"\"\"Returns charset parameter from ``Content-Type`` header or default.\n \"\"\"\n ctype = self.headers.get(CONTENT_TYPE, '')\n *_, params = parse_mimetype(ctype)\n return params.get('charset', default)\n\n @property\n def filename(self):\n \"\"\"Returns filename specified in Content-Disposition header or ``None``\n if missed or header is malformed.\"\"\"\n _, params = parse_content_disposition(\n self.headers.get(CONTENT_DISPOSITION))\n return content_disposition_filename(params)\n\n\nclass MultipartReader(object):\n \"\"\"Multipart body reader.\"\"\"\n\n #: Response wrapper, used when multipart readers constructs from response.\n response_wrapper_cls = MultipartResponseWrapper\n #: Multipart reader class, used to handle multipart/* body parts.\n #: None points to type(self)\n multipart_reader_cls = None\n #: Body part reader class for non multipart/* content types.\n part_reader_cls = BodyPartReader\n\n def __init__(self, headers, content):\n self.headers = headers\n self._boundary = ('--' + self._get_boundary()).encode()\n self._content = content\n self._last_part = None\n self._at_eof = False\n self._at_bof = True\n self._unread = []\n\n if PY_35:\n def __aiter__(self):\n return self\n\n if not PY_352: # pragma: no cover\n __aiter__ = asyncio.coroutine(__aiter__)\n\n @asyncio.coroutine\n def __anext__(self):\n part = yield from self.next()\n if part is None:\n raise StopAsyncIteration # NOQA\n return part\n\n @classmethod\n def from_response(cls, response):\n \"\"\"Constructs reader instance from HTTP response.\n\n :param response: :class:`~aiohttp.client.ClientResponse` instance\n \"\"\"\n obj = cls.response_wrapper_cls(response, cls(response.headers,\n response.content))\n return obj\n\n def at_eof(self):\n \"\"\"Returns ``True`` if the final boundary was reached or\n ``False`` otherwise.\n\n :rtype: bool\n \"\"\"\n return self._at_eof\n\n @asyncio.coroutine\n def next(self):\n \"\"\"Emits the next multipart body part.\"\"\"\n # So, if we're at BOF, we need to skip till the boundary.\n if self._at_eof:\n return\n yield from self._maybe_release_last_part()\n if self._at_bof:\n yield from self._read_until_first_boundary()\n self._at_bof = False\n else:\n yield from self._read_boundary()\n if self._at_eof: # we just read the last boundary, nothing to do there\n return\n self._last_part = yield from self.fetch_next_part()\n return self._last_part\n\n @asyncio.coroutine\n def release(self):\n \"\"\"Reads all the body parts to the void till the final boundary.\"\"\"\n while not self._at_eof:\n item = yield from self.next()\n if item is None:\n break\n yield from item.release()\n\n @asyncio.coroutine\n def fetch_next_part(self):\n \"\"\"Returns the next body part reader.\"\"\"\n headers = yield from self._read_headers()\n return self._get_part_reader(headers)\n\n def _get_part_reader(self, headers):\n \"\"\"Dispatches the response by the `Content-Type` header, returning\n suitable reader instance.\n\n :param dict headers: Response headers\n \"\"\"\n ctype = headers.get(CONTENT_TYPE, '')\n mtype, *_ = parse_mimetype(ctype)\n if mtype == 'multipart':\n if self.multipart_reader_cls is None:\n return type(self)(headers, self._content)\n return self.multipart_reader_cls(headers, self._content)\n else:\n return self.part_reader_cls(self._boundary, headers, self._content)\n\n def _get_boundary(self):\n mtype, *_, params = parse_mimetype(self.headers[CONTENT_TYPE])\n\n assert mtype == 'multipart', 'multipart/* content type expected'\n\n if 'boundary' not in params:\n raise ValueError('boundary missed for Content-Type: %s'\n % self.headers[CONTENT_TYPE])\n\n boundary = params['boundary']\n if len(boundary) > 70:\n raise ValueError('boundary %r is too long (70 chars max)'\n % boundary)\n\n return boundary\n\n @asyncio.coroutine\n def _readline(self):\n if self._unread:\n return self._unread.pop()\n return (yield from self._content.readline())\n\n @asyncio.coroutine\n def _read_until_first_boundary(self):\n while True:\n chunk = yield from self._readline()\n if chunk == b'':\n raise ValueError(\"Could not find starting boundary %r\"\n % (self._boundary))\n chunk = chunk.rstrip()\n if chunk == self._boundary:\n return\n elif chunk == self._boundary + b'--':\n self._at_eof = True\n return\n\n @asyncio.coroutine\n def _read_boundary(self):\n chunk = (yield from self._readline()).rstrip()\n if chunk == self._boundary:\n pass\n elif chunk == self._boundary + b'--':\n self._at_eof = True\n else:\n raise ValueError('Invalid boundary %r, expected %r'\n % (chunk, self._boundary))\n\n @asyncio.coroutine\n def _read_headers(self):\n lines = [b'']\n while True:\n chunk = yield from self._content.readline()\n chunk = chunk.strip()\n lines.append(chunk)\n if not chunk:\n break\n parser = HttpParser()\n headers, *_ = parser.parse_headers(lines)\n return headers\n\n @asyncio.coroutine\n def _maybe_release_last_part(self):\n \"\"\"Ensures that the last read body part is read completely.\"\"\"\n if self._last_part is not None:\n if not self._last_part.at_eof():\n yield from self._last_part.release()\n self._unread.extend(self._last_part._unread)\n self._last_part = None\n\n\nclass BodyPartWriter(object):\n \"\"\"Multipart writer for single body part.\"\"\"\n\n def __init__(self, obj, headers=None, *, chunk_size=8192):\n if headers is None:\n headers = CIMultiDict()\n elif not isinstance(headers, CIMultiDict):\n headers = CIMultiDict(headers)\n\n self.obj = obj\n self.headers = headers\n self._chunk_size = chunk_size\n self._fill_headers_with_defaults()\n\n self._serialize_map = {\n bytes: self._serialize_bytes,\n str: self._serialize_str,\n io.IOBase: self._serialize_io,\n MultipartWriter: self._serialize_multipart,\n ('application', 'json'): self._serialize_json,\n ('application', 'x-www-form-urlencoded'): self._serialize_form\n }\n\n def _fill_headers_with_defaults(self):\n if CONTENT_TYPE not in self.headers:\n content_type = self._guess_content_type(self.obj)\n if content_type is not None:\n self.headers[CONTENT_TYPE] = content_type\n\n if CONTENT_LENGTH not in self.headers:\n content_length = self._guess_content_length(self.obj)\n if content_length is not None:\n self.headers[CONTENT_LENGTH] = str(content_length)\n\n if CONTENT_DISPOSITION not in self.headers:\n filename = self._guess_filename(self.obj)\n if filename is not None:\n self.set_content_disposition('attachment', filename=filename)\n\n def _guess_content_length(self, obj):\n if isinstance(obj, bytes):\n return len(obj)\n elif isinstance(obj, str):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n charset = params.get('charset', 'us-ascii')\n return len(obj.encode(charset))\n elif isinstance(obj, io.StringIO):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n charset = params.get('charset', 'us-ascii')\n return len(obj.getvalue().encode(charset)) - obj.tell()\n elif isinstance(obj, io.BytesIO):\n return len(obj.getvalue()) - obj.tell()\n elif isinstance(obj, io.IOBase):\n try:\n return os.fstat(obj.fileno()).st_size - obj.tell()\n except (AttributeError, OSError):\n return None\n else:\n return None\n\n def _guess_content_type(self, obj, default='application/octet-stream'):\n if hasattr(obj, 'name'):\n name = getattr(obj, 'name')\n return mimetypes.guess_type(name)[0]\n elif isinstance(obj, (str, io.StringIO)):\n return 'text/plain; charset=utf-8'\n else:\n return default\n\n def _guess_filename(self, obj):\n if isinstance(obj, io.IOBase):\n name = getattr(obj, 'name', None)\n if name is not None:\n return Path(name).name\n\n def serialize(self):\n \"\"\"Yields byte chunks for body part.\"\"\"\n\n has_encoding = (\n CONTENT_ENCODING in self.headers and\n self.headers[CONTENT_ENCODING] != 'identity' or\n CONTENT_TRANSFER_ENCODING in self.headers\n )\n if has_encoding:\n # since we're following streaming approach which doesn't assumes\n # any intermediate buffers, we cannot calculate real content length\n # with the specified content encoding scheme. So, instead of lying\n # about content length and cause reading issues, we have to strip\n # this information.\n self.headers.pop(CONTENT_LENGTH, None)\n\n if self.headers:\n yield b'\\r\\n'.join(\n b': '.join(map(lambda i: i.encode('latin1'), item))\n for item in self.headers.items()\n )\n yield b'\\r\\n\\r\\n'\n yield from self._maybe_encode_stream(self._serialize_obj())\n yield b'\\r\\n'\n\n def _serialize_obj(self):\n obj = self.obj\n mtype, stype, *_ = parse_mimetype(self.headers.get(CONTENT_TYPE))\n serializer = self._serialize_map.get((mtype, stype))\n if serializer is not None:\n return serializer(obj)\n\n for key in self._serialize_map:\n if not isinstance(key, tuple) and isinstance(obj, key):\n return self._serialize_map[key](obj)\n return self._serialize_default(obj)\n\n def _serialize_bytes(self, obj):\n yield obj\n\n def _serialize_str(self, obj):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n yield obj.encode(params.get('charset', 'us-ascii'))\n\n def _serialize_io(self, obj):\n while True:\n chunk = obj.read(self._chunk_size)\n if not chunk:\n break\n if isinstance(chunk, str):\n yield from self._serialize_str(chunk)\n else:\n yield from self._serialize_bytes(chunk)\n\n def _serialize_multipart(self, obj):\n yield from obj.serialize()\n\n def _serialize_json(self, obj):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n yield json.dumps(obj).encode(params.get('charset', 'utf-8'))\n\n def _serialize_form(self, obj):\n if isinstance(obj, Mapping):\n obj = list(obj.items())\n return self._serialize_str(urlencode(obj, doseq=True))\n\n def _serialize_default(self, obj):\n raise TypeError('unknown body part type %r' % type(obj))\n\n def _maybe_encode_stream(self, stream):\n if CONTENT_ENCODING in self.headers:\n stream = self._apply_content_encoding(stream)\n if CONTENT_TRANSFER_ENCODING in self.headers:\n stream = self._apply_content_transfer_encoding(stream)\n yield from stream\n\n def _apply_content_encoding(self, stream):\n encoding = self.headers[CONTENT_ENCODING].lower()\n if encoding == 'identity':\n yield from stream\n elif encoding in ('deflate', 'gzip'):\n if encoding == 'gzip':\n zlib_mode = 16 + zlib.MAX_WBITS\n else:\n zlib_mode = -zlib.MAX_WBITS\n zcomp = zlib.compressobj(wbits=zlib_mode)\n for chunk in stream:\n yield zcomp.compress(chunk)\n else:\n yield zcomp.flush()\n else:\n raise RuntimeError('unknown content encoding: {}'\n ''.format(encoding))\n\n def _apply_content_transfer_encoding(self, stream):\n encoding = self.headers[CONTENT_TRANSFER_ENCODING].lower()\n if encoding == 'base64':\n buffer = bytearray()\n while True:\n if buffer:\n div, mod = divmod(len(buffer), 3)\n chunk, buffer = buffer[:div * 3], buffer[div * 3:]\n if chunk:\n yield base64.b64encode(chunk)\n chunk = next(stream, None)\n if not chunk:\n if buffer:\n yield base64.b64encode(buffer[:])\n return\n buffer.extend(chunk)\n elif encoding == 'quoted-printable':\n for chunk in stream:\n yield binascii.b2a_qp(chunk)\n elif encoding == 'binary':\n yield from stream\n else:\n raise RuntimeError('unknown content transfer encoding: {}'\n ''.format(encoding))\n\n def set_content_disposition(self, disptype, **params):\n \"\"\"Sets ``Content-Disposition`` header.\n\n :param str disptype: Disposition type: inline, attachment, form-data.\n Should be valid extension token (see RFC 2183)\n :param dict params: Disposition params\n \"\"\"\n if not disptype or not (TOKEN > set(disptype)):\n raise ValueError('bad content disposition type {!r}'\n ''.format(disptype))\n value = disptype\n if params:\n lparams = []\n for key, val in params.items():\n if not key or not (TOKEN > set(key)):\n raise ValueError('bad content disposition parameter'\n ' {!r}={!r}'.format(key, val))\n qval = quote(val, '')\n lparams.append((key, '\"%s\"' % qval))\n if key == 'filename':\n lparams.append(('filename*', \"utf-8''\" + qval))\n sparams = '; '.join('='.join(pair) for pair in lparams)\n value = '; '.join((value, sparams))\n self.headers[CONTENT_DISPOSITION] = value\n\n @property\n def filename(self):\n \"\"\"Returns filename specified in Content-Disposition header or ``None``\n if missed.\"\"\"\n _, params = parse_content_disposition(\n self.headers.get(CONTENT_DISPOSITION))\n return content_disposition_filename(params)\n\n\nclass MultipartWriter(object):\n \"\"\"Multipart body writer.\"\"\"\n\n #: Body part reader class for non multipart/* content types.\n part_writer_cls = BodyPartWriter\n\n def __init__(self, subtype='mixed', boundary=None):\n boundary = boundary if boundary is not None else uuid.uuid4().hex\n try:\n boundary.encode('us-ascii')\n except UnicodeEncodeError:\n raise ValueError('boundary should contains ASCII only chars')\n self.headers = CIMultiDict()\n self.headers[CONTENT_TYPE] = 'multipart/{}; boundary=\"{}\"'.format(\n subtype, boundary\n )\n self.parts = []\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n pass\n\n def __iter__(self):\n return iter(self.parts)\n\n def __len__(self):\n return len(self.parts)\n\n @property\n def boundary(self):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n return params['boundary'].encode('us-ascii')\n\n def append(self, obj, headers=None):\n \"\"\"Adds a new body part to multipart writer.\"\"\"\n if isinstance(obj, self.part_writer_cls):\n if headers:\n obj.headers.update(headers)\n self.parts.append(obj)\n else:\n if not headers:\n headers = CIMultiDict()\n self.parts.append(self.part_writer_cls(obj, headers))\n return self.parts[-1]\n\n def append_json(self, obj, headers=None):\n \"\"\"Helper to append JSON part.\"\"\"\n if not headers:\n headers = CIMultiDict()\n headers[CONTENT_TYPE] = 'application/json'\n return self.append(obj, headers)\n\n def append_form(self, obj, headers=None):\n \"\"\"Helper to append form urlencoded part.\"\"\"\n if not headers:\n headers = CIMultiDict()\n headers[CONTENT_TYPE] = 'application/x-www-form-urlencoded'\n assert isinstance(obj, (Sequence, Mapping))\n return self.append(obj, headers)\n\n def serialize(self):\n \"\"\"Yields multipart byte chunks.\"\"\"\n if not self.parts:\n yield b''\n return\n\n for part in self.parts:\n yield b'--' + self.boundary + b'\\r\\n'\n yield from part.serialize()\n else:\n yield b'--' + self.boundary + b'--\\r\\n'\n\n yield b''\n", "path": "aiohttp/multipart.py"}], "after_files": [{"content": "import asyncio\nimport base64\nimport binascii\nimport io\nimport json\nimport mimetypes\nimport os\nimport re\nimport sys\nimport uuid\nimport warnings\nimport zlib\nfrom collections import Mapping, Sequence, deque\nfrom pathlib import Path\nfrom urllib.parse import parse_qsl, quote, unquote, urlencode\n\nfrom multidict import CIMultiDict\n\nfrom .hdrs import (CONTENT_DISPOSITION, CONTENT_ENCODING, CONTENT_LENGTH,\n CONTENT_TRANSFER_ENCODING, CONTENT_TYPE)\nfrom .helpers import parse_mimetype\nfrom .protocol import HttpParser\n\n__all__ = ('MultipartReader', 'MultipartWriter',\n 'BodyPartReader', 'BodyPartWriter',\n 'BadContentDispositionHeader', 'BadContentDispositionParam',\n 'parse_content_disposition', 'content_disposition_filename')\n\n\nCHAR = set(chr(i) for i in range(0, 128))\nCTL = set(chr(i) for i in range(0, 32)) | {chr(127), }\nSEPARATORS = {'(', ')', '<', '>', '@', ',', ';', ':', '\\\\', '\"', '/', '[', ']',\n '?', '=', '{', '}', ' ', chr(9)}\nTOKEN = CHAR ^ CTL ^ SEPARATORS\n\nPY_35 = sys.version_info >= (3, 5)\nPY_352 = sys.version_info >= (3, 5, 2)\n\n\nclass BadContentDispositionHeader(RuntimeWarning):\n pass\n\n\nclass BadContentDispositionParam(RuntimeWarning):\n pass\n\n\ndef parse_content_disposition(header):\n def is_token(string):\n return string and TOKEN >= set(string)\n\n def is_quoted(string):\n return string[0] == string[-1] == '\"'\n\n def is_rfc5987(string):\n return is_token(string) and string.count(\"'\") == 2\n\n def is_extended_param(string):\n return string.endswith('*')\n\n def is_continuous_param(string):\n pos = string.find('*') + 1\n if not pos:\n return False\n substring = string[pos:-1] if string.endswith('*') else string[pos:]\n return substring.isdigit()\n\n def unescape(text, *, chars=''.join(map(re.escape, CHAR))):\n return re.sub('\\\\\\\\([{}])'.format(chars), '\\\\1', text)\n\n if not header:\n return None, {}\n\n disptype, *parts = header.split(';')\n if not is_token(disptype):\n warnings.warn(BadContentDispositionHeader(header))\n return None, {}\n\n params = {}\n for item in parts:\n if '=' not in item:\n warnings.warn(BadContentDispositionHeader(header))\n return None, {}\n\n key, value = item.split('=', 1)\n key = key.lower().strip()\n value = value.lstrip()\n\n if key in params:\n warnings.warn(BadContentDispositionHeader(header))\n return None, {}\n\n if not is_token(key):\n warnings.warn(BadContentDispositionParam(item))\n continue\n\n elif is_continuous_param(key):\n if is_quoted(value):\n value = unescape(value[1:-1])\n elif not is_token(value):\n warnings.warn(BadContentDispositionParam(item))\n continue\n\n elif is_extended_param(key):\n if is_rfc5987(value):\n encoding, _, value = value.split(\"'\", 2)\n encoding = encoding or 'utf-8'\n else:\n warnings.warn(BadContentDispositionParam(item))\n continue\n\n try:\n value = unquote(value, encoding, 'strict')\n except UnicodeDecodeError: # pragma: nocover\n warnings.warn(BadContentDispositionParam(item))\n continue\n\n else:\n if is_quoted(value):\n value = unescape(value[1:-1].lstrip('\\\\/'))\n elif not is_token(value):\n warnings.warn(BadContentDispositionHeader(header))\n return None, {}\n\n params[key] = value\n\n return disptype.lower(), params\n\n\ndef content_disposition_filename(params):\n if not params:\n return None\n elif 'filename*' in params:\n return params['filename*']\n elif 'filename' in params:\n return params['filename']\n else:\n parts = []\n fnparams = sorted((key, value)\n for key, value in params.items()\n if key.startswith('filename*'))\n for num, (key, value) in enumerate(fnparams):\n _, tail = key.split('*', 1)\n if tail.endswith('*'):\n tail = tail[:-1]\n if tail == str(num):\n parts.append(value)\n else:\n break\n if not parts:\n return None\n value = ''.join(parts)\n if \"'\" in value:\n encoding, _, value = value.split(\"'\", 2)\n encoding = encoding or 'utf-8'\n return unquote(value, encoding, 'strict')\n return value\n\n\nclass MultipartResponseWrapper(object):\n \"\"\"Wrapper around the :class:`MultipartBodyReader` to take care about\n underlying connection and close it when it needs in.\"\"\"\n\n def __init__(self, resp, stream):\n self.resp = resp\n self.stream = stream\n\n if PY_35:\n def __aiter__(self):\n return self\n\n if not PY_352: # pragma: no cover\n __aiter__ = asyncio.coroutine(__aiter__)\n\n @asyncio.coroutine\n def __anext__(self):\n part = yield from self.next()\n if part is None:\n raise StopAsyncIteration # NOQA\n return part\n\n def at_eof(self):\n \"\"\"Returns ``True`` when all response data had been read.\n\n :rtype: bool\n \"\"\"\n return self.resp.content.at_eof()\n\n @asyncio.coroutine\n def next(self):\n \"\"\"Emits next multipart reader object.\"\"\"\n item = yield from self.stream.next()\n if self.stream.at_eof():\n yield from self.release()\n return item\n\n @asyncio.coroutine\n def release(self):\n \"\"\"Releases the connection gracefully, reading all the content\n to the void.\"\"\"\n yield from self.resp.release()\n\n\nclass BodyPartReader(object):\n \"\"\"Multipart reader for single body part.\"\"\"\n\n chunk_size = 8192\n\n def __init__(self, boundary, headers, content):\n self.headers = headers\n self._boundary = boundary\n self._content = content\n self._at_eof = False\n length = self.headers.get(CONTENT_LENGTH, None)\n self._length = int(length) if length is not None else None\n self._read_bytes = 0\n self._unread = deque()\n self._prev_chunk = None\n self._content_eof = 0\n\n if PY_35:\n def __aiter__(self):\n return self\n\n if not PY_352: # pragma: no cover\n __aiter__ = asyncio.coroutine(__aiter__)\n\n @asyncio.coroutine\n def __anext__(self):\n part = yield from self.next()\n if part is None:\n raise StopAsyncIteration # NOQA\n return part\n\n @asyncio.coroutine\n def next(self):\n item = yield from self.read()\n if not item:\n return None\n return item\n\n @asyncio.coroutine\n def read(self, *, decode=False):\n \"\"\"Reads body part data.\n\n :param bool decode: Decodes data following by encoding\n method from `Content-Encoding` header. If it missed\n data remains untouched\n\n :rtype: bytearray\n \"\"\"\n if self._at_eof:\n return b''\n data = bytearray()\n if self._length is None:\n while not self._at_eof:\n data.extend((yield from self.readline()))\n else:\n while not self._at_eof:\n data.extend((yield from self.read_chunk(self.chunk_size)))\n if decode:\n return self.decode(data)\n return data\n\n @asyncio.coroutine\n def read_chunk(self, size=chunk_size):\n \"\"\"Reads body part content chunk of the specified size.\n\n :param int size: chunk size\n\n :rtype: bytearray\n \"\"\"\n if self._at_eof:\n return b''\n if self._length:\n chunk = yield from self._read_chunk_from_length(size)\n else:\n chunk = yield from self._read_chunk_from_stream(size)\n\n self._read_bytes += len(chunk)\n if self._read_bytes == self._length:\n self._at_eof = True\n if self._at_eof:\n assert b'\\r\\n' == (yield from self._content.readline()), \\\n 'reader did not read all the data or it is malformed'\n return chunk\n\n @asyncio.coroutine\n def _read_chunk_from_length(self, size):\n \"\"\"Reads body part content chunk of the specified size.\n The body part must has `Content-Length` header with proper value.\n\n :param int size: chunk size\n\n :rtype: bytearray\n \"\"\"\n assert self._length is not None, \\\n 'Content-Length required for chunked read'\n chunk_size = min(size, self._length - self._read_bytes)\n chunk = yield from self._content.read(chunk_size)\n return chunk\n\n @asyncio.coroutine\n def _read_chunk_from_stream(self, size):\n \"\"\"Reads content chunk of body part with unknown length.\n The `Content-Length` header for body part is not necessary.\n\n :param int size: chunk size\n\n :rtype: bytearray\n \"\"\"\n assert size >= len(self._boundary) + 2, \\\n 'Chunk size must be greater or equal than boundary length + 2'\n first_chunk = self._prev_chunk is None\n if first_chunk:\n self._prev_chunk = yield from self._content.read(size)\n\n chunk = yield from self._content.read(size)\n self._content_eof += int(self._content.at_eof())\n assert self._content_eof < 3, \"Reading after EOF\"\n window = self._prev_chunk + chunk\n sub = b'\\r\\n' + self._boundary\n if first_chunk:\n idx = window.find(sub)\n else:\n idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))\n if idx >= 0:\n # pushing boundary back to content\n self._content.unread_data(window[idx:])\n if size > idx:\n self._prev_chunk = self._prev_chunk[:idx]\n chunk = window[len(self._prev_chunk):idx]\n if not chunk:\n self._at_eof = True\n result = self._prev_chunk\n self._prev_chunk = chunk\n return result\n\n @asyncio.coroutine\n def readline(self):\n \"\"\"Reads body part by line by line.\n\n :rtype: bytearray\n \"\"\"\n if self._at_eof:\n return b''\n\n if self._unread:\n line = self._unread.popleft()\n else:\n line = yield from self._content.readline()\n\n if line.startswith(self._boundary):\n # the very last boundary may not come with \\r\\n,\n # so set single rules for everyone\n sline = line.rstrip(b'\\r\\n')\n boundary = self._boundary\n last_boundary = self._boundary + b'--'\n # ensure that we read exactly the boundary, not something alike\n if sline == boundary or sline == last_boundary:\n self._at_eof = True\n self._unread.append(line)\n return b''\n else:\n next_line = yield from self._content.readline()\n if next_line.startswith(self._boundary):\n line = line[:-2] # strip CRLF but only once\n self._unread.append(next_line)\n\n return line\n\n @asyncio.coroutine\n def release(self):\n \"\"\"Like :meth:`read`, but reads all the data to the void.\n\n :rtype: None\n \"\"\"\n if self._at_eof:\n return\n if self._length is None:\n while not self._at_eof:\n yield from self.readline()\n else:\n while not self._at_eof:\n yield from self.read_chunk(self.chunk_size)\n\n @asyncio.coroutine\n def text(self, *, encoding=None):\n \"\"\"Like :meth:`read`, but assumes that body part contains text data.\n\n :param str encoding: Custom text encoding. Overrides specified\n in charset param of `Content-Type` header\n\n :rtype: str\n \"\"\"\n data = yield from self.read(decode=True)\n encoding = encoding or self.get_charset(default='latin1')\n return data.decode(encoding)\n\n @asyncio.coroutine\n def json(self, *, encoding=None):\n \"\"\"Like :meth:`read`, but assumes that body parts contains JSON data.\n\n :param str encoding: Custom JSON encoding. Overrides specified\n in charset param of `Content-Type` header\n \"\"\"\n data = yield from self.read(decode=True)\n if not data:\n return None\n encoding = encoding or self.get_charset(default='utf-8')\n return json.loads(data.decode(encoding))\n\n @asyncio.coroutine\n def form(self, *, encoding=None):\n \"\"\"Like :meth:`read`, but assumes that body parts contains form\n urlencoded data.\n\n :param str encoding: Custom form encoding. Overrides specified\n in charset param of `Content-Type` header\n \"\"\"\n data = yield from self.read(decode=True)\n if not data:\n return None\n encoding = encoding or self.get_charset(default='utf-8')\n return parse_qsl(data.rstrip().decode(encoding), encoding=encoding)\n\n def at_eof(self):\n \"\"\"Returns ``True`` if the boundary was reached or\n ``False`` otherwise.\n\n :rtype: bool\n \"\"\"\n return self._at_eof\n\n def decode(self, data):\n \"\"\"Decodes data according the specified `Content-Encoding`\n or `Content-Transfer-Encoding` headers value.\n\n Supports ``gzip``, ``deflate`` and ``identity`` encodings for\n `Content-Encoding` header.\n\n Supports ``base64``, ``quoted-printable``, ``binary`` encodings for\n `Content-Transfer-Encoding` header.\n\n :param bytearray data: Data to decode.\n\n :raises: :exc:`RuntimeError` - if encoding is unknown.\n\n :rtype: bytes\n \"\"\"\n if CONTENT_TRANSFER_ENCODING in self.headers:\n data = self._decode_content_transfer(data)\n if CONTENT_ENCODING in self.headers:\n return self._decode_content(data)\n return data\n\n def _decode_content(self, data):\n encoding = self.headers[CONTENT_ENCODING].lower()\n\n if encoding == 'deflate':\n return zlib.decompress(data, -zlib.MAX_WBITS)\n elif encoding == 'gzip':\n return zlib.decompress(data, 16 + zlib.MAX_WBITS)\n elif encoding == 'identity':\n return data\n else:\n raise RuntimeError('unknown content encoding: {}'.format(encoding))\n\n def _decode_content_transfer(self, data):\n encoding = self.headers[CONTENT_TRANSFER_ENCODING].lower()\n\n if encoding == 'base64':\n return base64.b64decode(data)\n elif encoding == 'quoted-printable':\n return binascii.a2b_qp(data)\n elif encoding == 'binary':\n return data\n else:\n raise RuntimeError('unknown content transfer encoding: {}'\n ''.format(encoding))\n\n def get_charset(self, default=None):\n \"\"\"Returns charset parameter from ``Content-Type`` header or default.\n \"\"\"\n ctype = self.headers.get(CONTENT_TYPE, '')\n *_, params = parse_mimetype(ctype)\n return params.get('charset', default)\n\n @property\n def filename(self):\n \"\"\"Returns filename specified in Content-Disposition header or ``None``\n if missed or header is malformed.\"\"\"\n _, params = parse_content_disposition(\n self.headers.get(CONTENT_DISPOSITION))\n return content_disposition_filename(params)\n\n\nclass MultipartReader(object):\n \"\"\"Multipart body reader.\"\"\"\n\n #: Response wrapper, used when multipart readers constructs from response.\n response_wrapper_cls = MultipartResponseWrapper\n #: Multipart reader class, used to handle multipart/* body parts.\n #: None points to type(self)\n multipart_reader_cls = None\n #: Body part reader class for non multipart/* content types.\n part_reader_cls = BodyPartReader\n\n def __init__(self, headers, content):\n self.headers = headers\n self._boundary = ('--' + self._get_boundary()).encode()\n self._content = content\n self._last_part = None\n self._at_eof = False\n self._at_bof = True\n self._unread = []\n\n if PY_35:\n def __aiter__(self):\n return self\n\n if not PY_352: # pragma: no cover\n __aiter__ = asyncio.coroutine(__aiter__)\n\n @asyncio.coroutine\n def __anext__(self):\n part = yield from self.next()\n if part is None:\n raise StopAsyncIteration # NOQA\n return part\n\n @classmethod\n def from_response(cls, response):\n \"\"\"Constructs reader instance from HTTP response.\n\n :param response: :class:`~aiohttp.client.ClientResponse` instance\n \"\"\"\n obj = cls.response_wrapper_cls(response, cls(response.headers,\n response.content))\n return obj\n\n def at_eof(self):\n \"\"\"Returns ``True`` if the final boundary was reached or\n ``False`` otherwise.\n\n :rtype: bool\n \"\"\"\n return self._at_eof\n\n @asyncio.coroutine\n def next(self):\n \"\"\"Emits the next multipart body part.\"\"\"\n # So, if we're at BOF, we need to skip till the boundary.\n if self._at_eof:\n return\n yield from self._maybe_release_last_part()\n if self._at_bof:\n yield from self._read_until_first_boundary()\n self._at_bof = False\n else:\n yield from self._read_boundary()\n if self._at_eof: # we just read the last boundary, nothing to do there\n return\n self._last_part = yield from self.fetch_next_part()\n return self._last_part\n\n @asyncio.coroutine\n def release(self):\n \"\"\"Reads all the body parts to the void till the final boundary.\"\"\"\n while not self._at_eof:\n item = yield from self.next()\n if item is None:\n break\n yield from item.release()\n\n @asyncio.coroutine\n def fetch_next_part(self):\n \"\"\"Returns the next body part reader.\"\"\"\n headers = yield from self._read_headers()\n return self._get_part_reader(headers)\n\n def _get_part_reader(self, headers):\n \"\"\"Dispatches the response by the `Content-Type` header, returning\n suitable reader instance.\n\n :param dict headers: Response headers\n \"\"\"\n ctype = headers.get(CONTENT_TYPE, '')\n mtype, *_ = parse_mimetype(ctype)\n if mtype == 'multipart':\n if self.multipart_reader_cls is None:\n return type(self)(headers, self._content)\n return self.multipart_reader_cls(headers, self._content)\n else:\n return self.part_reader_cls(self._boundary, headers, self._content)\n\n def _get_boundary(self):\n mtype, *_, params = parse_mimetype(self.headers[CONTENT_TYPE])\n\n assert mtype == 'multipart', 'multipart/* content type expected'\n\n if 'boundary' not in params:\n raise ValueError('boundary missed for Content-Type: %s'\n % self.headers[CONTENT_TYPE])\n\n boundary = params['boundary']\n if len(boundary) > 70:\n raise ValueError('boundary %r is too long (70 chars max)'\n % boundary)\n\n return boundary\n\n @asyncio.coroutine\n def _readline(self):\n if self._unread:\n return self._unread.pop()\n return (yield from self._content.readline())\n\n @asyncio.coroutine\n def _read_until_first_boundary(self):\n while True:\n chunk = yield from self._readline()\n if chunk == b'':\n raise ValueError(\"Could not find starting boundary %r\"\n % (self._boundary))\n chunk = chunk.rstrip()\n if chunk == self._boundary:\n return\n elif chunk == self._boundary + b'--':\n self._at_eof = True\n return\n\n @asyncio.coroutine\n def _read_boundary(self):\n chunk = (yield from self._readline()).rstrip()\n if chunk == self._boundary:\n pass\n elif chunk == self._boundary + b'--':\n self._at_eof = True\n else:\n raise ValueError('Invalid boundary %r, expected %r'\n % (chunk, self._boundary))\n\n @asyncio.coroutine\n def _read_headers(self):\n lines = [b'']\n while True:\n chunk = yield from self._content.readline()\n chunk = chunk.strip()\n lines.append(chunk)\n if not chunk:\n break\n parser = HttpParser()\n headers, *_ = parser.parse_headers(lines)\n return headers\n\n @asyncio.coroutine\n def _maybe_release_last_part(self):\n \"\"\"Ensures that the last read body part is read completely.\"\"\"\n if self._last_part is not None:\n if not self._last_part.at_eof():\n yield from self._last_part.release()\n self._unread.extend(self._last_part._unread)\n self._last_part = None\n\n\nclass BodyPartWriter(object):\n \"\"\"Multipart writer for single body part.\"\"\"\n\n def __init__(self, obj, headers=None, *, chunk_size=8192):\n if headers is None:\n headers = CIMultiDict()\n elif not isinstance(headers, CIMultiDict):\n headers = CIMultiDict(headers)\n\n self.obj = obj\n self.headers = headers\n self._chunk_size = chunk_size\n self._fill_headers_with_defaults()\n\n self._serialize_map = {\n bytes: self._serialize_bytes,\n str: self._serialize_str,\n io.IOBase: self._serialize_io,\n MultipartWriter: self._serialize_multipart,\n ('application', 'json'): self._serialize_json,\n ('application', 'x-www-form-urlencoded'): self._serialize_form\n }\n\n def _fill_headers_with_defaults(self):\n if CONTENT_TYPE not in self.headers:\n content_type = self._guess_content_type(self.obj)\n if content_type is not None:\n self.headers[CONTENT_TYPE] = content_type\n\n if CONTENT_LENGTH not in self.headers:\n content_length = self._guess_content_length(self.obj)\n if content_length is not None:\n self.headers[CONTENT_LENGTH] = str(content_length)\n\n if CONTENT_DISPOSITION not in self.headers:\n filename = self._guess_filename(self.obj)\n if filename is not None:\n self.set_content_disposition('attachment', filename=filename)\n\n def _guess_content_length(self, obj):\n if isinstance(obj, bytes):\n return len(obj)\n elif isinstance(obj, str):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n charset = params.get('charset', 'us-ascii')\n return len(obj.encode(charset))\n elif isinstance(obj, io.StringIO):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n charset = params.get('charset', 'us-ascii')\n return len(obj.getvalue().encode(charset)) - obj.tell()\n elif isinstance(obj, io.BytesIO):\n return len(obj.getvalue()) - obj.tell()\n elif isinstance(obj, io.IOBase):\n try:\n return os.fstat(obj.fileno()).st_size - obj.tell()\n except (AttributeError, OSError):\n return None\n else:\n return None\n\n def _guess_content_type(self, obj, default='application/octet-stream'):\n if hasattr(obj, 'name'):\n name = getattr(obj, 'name')\n return mimetypes.guess_type(name)[0]\n elif isinstance(obj, (str, io.StringIO)):\n return 'text/plain; charset=utf-8'\n else:\n return default\n\n def _guess_filename(self, obj):\n if isinstance(obj, io.IOBase):\n name = getattr(obj, 'name', None)\n if name is not None:\n return Path(name).name\n\n def serialize(self):\n \"\"\"Yields byte chunks for body part.\"\"\"\n\n has_encoding = (\n CONTENT_ENCODING in self.headers and\n self.headers[CONTENT_ENCODING] != 'identity' or\n CONTENT_TRANSFER_ENCODING in self.headers\n )\n if has_encoding:\n # since we're following streaming approach which doesn't assumes\n # any intermediate buffers, we cannot calculate real content length\n # with the specified content encoding scheme. So, instead of lying\n # about content length and cause reading issues, we have to strip\n # this information.\n self.headers.pop(CONTENT_LENGTH, None)\n\n if self.headers:\n yield b'\\r\\n'.join(\n b': '.join(map(lambda i: i.encode('latin1'), item))\n for item in self.headers.items()\n )\n yield b'\\r\\n\\r\\n'\n yield from self._maybe_encode_stream(self._serialize_obj())\n yield b'\\r\\n'\n\n def _serialize_obj(self):\n obj = self.obj\n mtype, stype, *_ = parse_mimetype(self.headers.get(CONTENT_TYPE))\n serializer = self._serialize_map.get((mtype, stype))\n if serializer is not None:\n return serializer(obj)\n\n for key in self._serialize_map:\n if not isinstance(key, tuple) and isinstance(obj, key):\n return self._serialize_map[key](obj)\n return self._serialize_default(obj)\n\n def _serialize_bytes(self, obj):\n yield obj\n\n def _serialize_str(self, obj):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n yield obj.encode(params.get('charset', 'us-ascii'))\n\n def _serialize_io(self, obj):\n while True:\n chunk = obj.read(self._chunk_size)\n if not chunk:\n break\n if isinstance(chunk, str):\n yield from self._serialize_str(chunk)\n else:\n yield from self._serialize_bytes(chunk)\n\n def _serialize_multipart(self, obj):\n yield from obj.serialize()\n\n def _serialize_json(self, obj):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n yield json.dumps(obj).encode(params.get('charset', 'utf-8'))\n\n def _serialize_form(self, obj):\n if isinstance(obj, Mapping):\n obj = list(obj.items())\n return self._serialize_str(urlencode(obj, doseq=True))\n\n def _serialize_default(self, obj):\n raise TypeError('unknown body part type %r' % type(obj))\n\n def _maybe_encode_stream(self, stream):\n if CONTENT_ENCODING in self.headers:\n stream = self._apply_content_encoding(stream)\n if CONTENT_TRANSFER_ENCODING in self.headers:\n stream = self._apply_content_transfer_encoding(stream)\n yield from stream\n\n def _apply_content_encoding(self, stream):\n encoding = self.headers[CONTENT_ENCODING].lower()\n if encoding == 'identity':\n yield from stream\n elif encoding in ('deflate', 'gzip'):\n if encoding == 'gzip':\n zlib_mode = 16 + zlib.MAX_WBITS\n else:\n zlib_mode = -zlib.MAX_WBITS\n zcomp = zlib.compressobj(wbits=zlib_mode)\n for chunk in stream:\n yield zcomp.compress(chunk)\n else:\n yield zcomp.flush()\n else:\n raise RuntimeError('unknown content encoding: {}'\n ''.format(encoding))\n\n def _apply_content_transfer_encoding(self, stream):\n encoding = self.headers[CONTENT_TRANSFER_ENCODING].lower()\n if encoding == 'base64':\n buffer = bytearray()\n while True:\n if buffer:\n div, mod = divmod(len(buffer), 3)\n chunk, buffer = buffer[:div * 3], buffer[div * 3:]\n if chunk:\n yield base64.b64encode(chunk)\n chunk = next(stream, None)\n if not chunk:\n if buffer:\n yield base64.b64encode(buffer[:])\n return\n buffer.extend(chunk)\n elif encoding == 'quoted-printable':\n for chunk in stream:\n yield binascii.b2a_qp(chunk)\n elif encoding == 'binary':\n yield from stream\n else:\n raise RuntimeError('unknown content transfer encoding: {}'\n ''.format(encoding))\n\n def set_content_disposition(self, disptype, **params):\n \"\"\"Sets ``Content-Disposition`` header.\n\n :param str disptype: Disposition type: inline, attachment, form-data.\n Should be valid extension token (see RFC 2183)\n :param dict params: Disposition params\n \"\"\"\n if not disptype or not (TOKEN > set(disptype)):\n raise ValueError('bad content disposition type {!r}'\n ''.format(disptype))\n value = disptype\n if params:\n lparams = []\n for key, val in params.items():\n if not key or not (TOKEN > set(key)):\n raise ValueError('bad content disposition parameter'\n ' {!r}={!r}'.format(key, val))\n qval = quote(val, '')\n lparams.append((key, '\"%s\"' % qval))\n if key == 'filename':\n lparams.append(('filename*', \"utf-8''\" + qval))\n sparams = '; '.join('='.join(pair) for pair in lparams)\n value = '; '.join((value, sparams))\n self.headers[CONTENT_DISPOSITION] = value\n\n @property\n def filename(self):\n \"\"\"Returns filename specified in Content-Disposition header or ``None``\n if missed.\"\"\"\n _, params = parse_content_disposition(\n self.headers.get(CONTENT_DISPOSITION))\n return content_disposition_filename(params)\n\n\nclass MultipartWriter(object):\n \"\"\"Multipart body writer.\"\"\"\n\n #: Body part reader class for non multipart/* content types.\n part_writer_cls = BodyPartWriter\n\n def __init__(self, subtype='mixed', boundary=None):\n boundary = boundary if boundary is not None else uuid.uuid4().hex\n try:\n boundary.encode('us-ascii')\n except UnicodeEncodeError:\n raise ValueError('boundary should contains ASCII only chars')\n self.headers = CIMultiDict()\n self.headers[CONTENT_TYPE] = 'multipart/{}; boundary=\"{}\"'.format(\n subtype, boundary\n )\n self.parts = []\n\n def __enter__(self):\n return self\n\n def __exit__(self, exc_type, exc_val, exc_tb):\n pass\n\n def __iter__(self):\n return iter(self.parts)\n\n def __len__(self):\n return len(self.parts)\n\n @property\n def boundary(self):\n *_, params = parse_mimetype(self.headers.get(CONTENT_TYPE))\n return params['boundary'].encode('us-ascii')\n\n def append(self, obj, headers=None):\n \"\"\"Adds a new body part to multipart writer.\"\"\"\n if isinstance(obj, self.part_writer_cls):\n if headers:\n obj.headers.update(headers)\n self.parts.append(obj)\n else:\n if not headers:\n headers = CIMultiDict()\n self.parts.append(self.part_writer_cls(obj, headers))\n return self.parts[-1]\n\n def append_json(self, obj, headers=None):\n \"\"\"Helper to append JSON part.\"\"\"\n if not headers:\n headers = CIMultiDict()\n headers[CONTENT_TYPE] = 'application/json'\n return self.append(obj, headers)\n\n def append_form(self, obj, headers=None):\n \"\"\"Helper to append form urlencoded part.\"\"\"\n if not headers:\n headers = CIMultiDict()\n headers[CONTENT_TYPE] = 'application/x-www-form-urlencoded'\n assert isinstance(obj, (Sequence, Mapping))\n return self.append(obj, headers)\n\n def serialize(self):\n \"\"\"Yields multipart byte chunks.\"\"\"\n if not self.parts:\n yield b''\n return\n\n for part in self.parts:\n yield b'--' + self.boundary + b'\\r\\n'\n yield from part.serialize()\n else:\n yield b'--' + self.boundary + b'--\\r\\n'\n\n yield b''\n", "path": "aiohttp/multipart.py"}]} |
gh_patches_debug_1618 | rasdani/github-patches | git_diff | cookiecutter__cookiecutter-608 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
file_handle.close() is never called -- and it causes a bug!!
In https://github.com/audreyr/cookiecutter/blob/master/cookiecutter/generate.py#L90, `file_handle.close()` is never called, and there's no need for it to remain open.
This is the first time in over 10 years of python programming that I've run into an actual issue with file handles left open, so I felt that the use of two exclamation points in the subject was warranted. I'm removing a temporary template after an unrelated error, and...
```
File "D:\anaconda32\lib\shutil.py", line 250, in rmtree
os.remove(fullname)
WindowsError: [Error 32] The process cannot access the file because it is being
used by another process: '.\\tmpp2duu1\\cookiecutter.json'
```
This change in generate.py:90 helps python not stumble on this particular issue:
```
try:
with open(context_file) as file_handle:
obj = json.load(file_handle, object_pairs_hook=OrderedDict)
except ValueError as e:
# ...
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `cookiecutter/generate.py`
Content:
```
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3
4 """
5 cookiecutter.generate
6 ---------------------
7
8 Functions for generating a project from a project template.
9 """
10 from __future__ import unicode_literals
11 from collections import OrderedDict
12 import fnmatch
13 import io
14 import json
15 import logging
16 import os
17 import shutil
18
19 from jinja2 import FileSystemLoader, Template
20 from jinja2.environment import Environment
21 from jinja2.exceptions import TemplateSyntaxError
22 from binaryornot.check import is_binary
23
24 from .exceptions import (
25 NonTemplatedInputDirException,
26 ContextDecodingException,
27 FailedHookException,
28 OutputDirExistsException
29 )
30 from .find import find_template
31 from .utils import make_sure_path_exists, work_in, rmtree
32 from .hooks import run_hook
33
34
35 def copy_without_render(path, context):
36 """
37 Returns True if `path` matches some pattern in the
38 `_copy_without_render` context setting.
39
40 :param path: A file-system path referring to a file or dir that
41 should be rendered or just copied.
42 :param context: cookiecutter context.
43 """
44 try:
45 for dont_render in context['cookiecutter']['_copy_without_render']:
46 if fnmatch.fnmatch(path, dont_render):
47 return True
48 except KeyError:
49 return False
50
51 return False
52
53
54 def apply_overwrites_to_context(context, overwrite_context):
55 """Modify the given context in place based on the overwrite_context."""
56 for variable, overwrite in overwrite_context.items():
57 if variable not in context:
58 # Do not include variables which are not used in the template
59 continue
60
61 context_value = context[variable]
62
63 if isinstance(context_value, list):
64 # We are dealing with a choice variable
65 if overwrite in context_value:
66 # This overwrite is actually valid for the given context
67 # Let's set it as default (by definition first item in list)
68 # see ``cookiecutter.prompt.prompt_choice_for_config``
69 context_value.remove(overwrite)
70 context_value.insert(0, overwrite)
71 else:
72 # Simply overwrite the value for this variable
73 context[variable] = overwrite
74
75
76 def generate_context(context_file='cookiecutter.json', default_context=None,
77 extra_context=None):
78 """
79 Generates the context for a Cookiecutter project template.
80 Loads the JSON file as a Python object, with key being the JSON filename.
81
82 :param context_file: JSON file containing key/value pairs for populating
83 the cookiecutter's variables.
84 :param default_context: Dictionary containing config to take into account.
85 :param extra_context: Dictionary containing configuration overrides
86 """
87
88 context = {}
89
90 file_handle = open(context_file)
91 try:
92 obj = json.load(file_handle, object_pairs_hook=OrderedDict)
93 except ValueError as e:
94 # JSON decoding error. Let's throw a new exception that is more
95 # friendly for the developer or user.
96 full_fpath = os.path.abspath(context_file)
97 json_exc_message = str(e)
98 our_exc_message = (
99 'JSON decoding error while loading "{0}". Decoding'
100 ' error details: "{1}"'.format(full_fpath, json_exc_message))
101 raise ContextDecodingException(our_exc_message)
102
103 # Add the Python object to the context dictionary
104 file_name = os.path.split(context_file)[1]
105 file_stem = file_name.split('.')[0]
106 context[file_stem] = obj
107
108 # Overwrite context variable defaults with the default context from the
109 # user's global config, if available
110 if default_context:
111 apply_overwrites_to_context(obj, default_context)
112 if extra_context:
113 apply_overwrites_to_context(obj, extra_context)
114
115 logging.debug('Context generated is {0}'.format(context))
116 return context
117
118
119 def generate_file(project_dir, infile, context, env):
120 """
121 1. Render the filename of infile as the name of outfile.
122 2. Deal with infile appropriately:
123
124 a. If infile is a binary file, copy it over without rendering.
125 b. If infile is a text file, render its contents and write the
126 rendered infile to outfile.
127
128 Precondition:
129
130 When calling `generate_file()`, the root template dir must be the
131 current working directory. Using `utils.work_in()` is the recommended
132 way to perform this directory change.
133
134 :param project_dir: Absolute path to the resulting generated project.
135 :param infile: Input file to generate the file from. Relative to the root
136 template dir.
137 :param context: Dict for populating the cookiecutter's variables.
138 :param env: Jinja2 template execution environment.
139 """
140
141 logging.debug('Generating file {0}'.format(infile))
142
143 # Render the path to the output file (not including the root project dir)
144 outfile_tmpl = Template(infile)
145
146 outfile = os.path.join(project_dir, outfile_tmpl.render(**context))
147 file_name_is_empty = os.path.isdir(outfile)
148 if file_name_is_empty:
149 logging.debug('The resulting file name is empty: {0}'.format(outfile))
150 return
151
152 logging.debug('outfile is {0}'.format(outfile))
153
154 # Just copy over binary files. Don't render.
155 logging.debug("Check {0} to see if it's a binary".format(infile))
156 if is_binary(infile):
157 logging.debug('Copying binary {0} to {1} without rendering'
158 .format(infile, outfile))
159 shutil.copyfile(infile, outfile)
160 else:
161 # Force fwd slashes on Windows for get_template
162 # This is a by-design Jinja issue
163 infile_fwd_slashes = infile.replace(os.path.sep, '/')
164
165 # Render the file
166 try:
167 tmpl = env.get_template(infile_fwd_slashes)
168 except TemplateSyntaxError as exception:
169 # Disable translated so that printed exception contains verbose
170 # information about syntax error location
171 exception.translated = False
172 raise
173 rendered_file = tmpl.render(**context)
174
175 logging.debug('Writing {0}'.format(outfile))
176
177 with io.open(outfile, 'w', encoding='utf-8') as fh:
178 fh.write(rendered_file)
179
180 # Apply file permissions to output file
181 shutil.copymode(infile, outfile)
182
183
184 def render_and_create_dir(dirname, context, output_dir,
185 overwrite_if_exists=False):
186 """
187 Renders the name of a directory, creates the directory, and
188 returns its path.
189 """
190
191 name_tmpl = Template(dirname)
192 rendered_dirname = name_tmpl.render(**context)
193 logging.debug('Rendered dir {0} must exist in output_dir {1}'.format(
194 rendered_dirname,
195 output_dir
196 ))
197 dir_to_create = os.path.normpath(
198 os.path.join(output_dir, rendered_dirname)
199 )
200
201 output_dir_exists = os.path.exists(dir_to_create)
202
203 if overwrite_if_exists:
204 if output_dir_exists:
205 logging.debug('Output directory {} already exists,'
206 'overwriting it'.format(dir_to_create))
207 else:
208 if output_dir_exists:
209 msg = 'Error: "{}" directory already exists'.format(dir_to_create)
210 raise OutputDirExistsException(msg)
211
212 make_sure_path_exists(dir_to_create)
213 return dir_to_create
214
215
216 def ensure_dir_is_templated(dirname):
217 """
218 Ensures that dirname is a templated directory name.
219 """
220 if '{{' in dirname and '}}' in dirname:
221 return True
222 else:
223 raise NonTemplatedInputDirException
224
225
226 def _run_hook_from_repo_dir(repo_dir, hook_name, project_dir, context):
227 """
228 Run hook from repo directory, cleaning up project directory if hook fails
229 """
230 with work_in(repo_dir):
231 try:
232 run_hook(hook_name, project_dir, context)
233 except FailedHookException:
234 rmtree(project_dir)
235 logging.error("Stopping generation because %s"
236 " hook script didn't exit sucessfully" % hook_name)
237 raise
238
239
240 def generate_files(repo_dir, context=None, output_dir='.',
241 overwrite_if_exists=False):
242 """
243 Renders the templates and saves them to files.
244
245 :param repo_dir: Project template input directory.
246 :param context: Dict for populating the template's variables.
247 :param output_dir: Where to output the generated project dir into.
248 :param overwrite_if_exists: Overwrite the contents of the output directory
249 if it exists
250 """
251
252 template_dir = find_template(repo_dir)
253 logging.debug('Generating project from {0}...'.format(template_dir))
254 context = context or {}
255
256 unrendered_dir = os.path.split(template_dir)[1]
257 ensure_dir_is_templated(unrendered_dir)
258 project_dir = render_and_create_dir(unrendered_dir,
259 context,
260 output_dir,
261 overwrite_if_exists)
262
263 # We want the Jinja path and the OS paths to match. Consequently, we'll:
264 # + CD to the template folder
265 # + Set Jinja's path to '.'
266 #
267 # In order to build our files to the correct folder(s), we'll use an
268 # absolute path for the target folder (project_dir)
269
270 project_dir = os.path.abspath(project_dir)
271 logging.debug('project_dir is {0}'.format(project_dir))
272
273 _run_hook_from_repo_dir(repo_dir, 'pre_gen_project', project_dir, context)
274
275 with work_in(template_dir):
276 env = Environment(keep_trailing_newline=True)
277 env.loader = FileSystemLoader('.')
278
279 for root, dirs, files in os.walk('.'):
280 # We must separate the two types of dirs into different lists.
281 # The reason is that we don't want ``os.walk`` to go through the
282 # unrendered directories, since they will just be copied.
283 copy_dirs = []
284 render_dirs = []
285
286 for d in dirs:
287 d_ = os.path.normpath(os.path.join(root, d))
288 # We check the full path, because that's how it can be
289 # specified in the ``_copy_without_render`` setting, but
290 # we store just the dir name
291 if copy_without_render(d_, context):
292 copy_dirs.append(d)
293 else:
294 render_dirs.append(d)
295
296 for copy_dir in copy_dirs:
297 indir = os.path.normpath(os.path.join(root, copy_dir))
298 outdir = os.path.normpath(os.path.join(project_dir, indir))
299 logging.debug(
300 'Copying dir {0} to {1} without rendering'
301 ''.format(indir, outdir)
302 )
303 shutil.copytree(indir, outdir)
304
305 # We mutate ``dirs``, because we only want to go through these dirs
306 # recursively
307 dirs[:] = render_dirs
308 for d in dirs:
309 unrendered_dir = os.path.join(project_dir, root, d)
310 render_and_create_dir(unrendered_dir, context, output_dir,
311 overwrite_if_exists)
312
313 for f in files:
314 infile = os.path.normpath(os.path.join(root, f))
315 if copy_without_render(infile, context):
316 outfile_tmpl = Template(infile)
317 outfile_rendered = outfile_tmpl.render(**context)
318 outfile = os.path.join(project_dir, outfile_rendered)
319 logging.debug(
320 'Copying file {0} to {1} without rendering'
321 ''.format(infile, outfile)
322 )
323 shutil.copyfile(infile, outfile)
324 shutil.copymode(infile, outfile)
325 continue
326 logging.debug('f is {0}'.format(f))
327 generate_file(project_dir, infile, context, env)
328
329 _run_hook_from_repo_dir(repo_dir, 'post_gen_project', project_dir, context)
330
331 return project_dir
332
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/cookiecutter/generate.py b/cookiecutter/generate.py
--- a/cookiecutter/generate.py
+++ b/cookiecutter/generate.py
@@ -87,9 +87,9 @@
context = {}
- file_handle = open(context_file)
try:
- obj = json.load(file_handle, object_pairs_hook=OrderedDict)
+ with open(context_file) as file_handle:
+ obj = json.load(file_handle, object_pairs_hook=OrderedDict)
except ValueError as e:
# JSON decoding error. Let's throw a new exception that is more
# friendly for the developer or user.
| {"golden_diff": "diff --git a/cookiecutter/generate.py b/cookiecutter/generate.py\n--- a/cookiecutter/generate.py\n+++ b/cookiecutter/generate.py\n@@ -87,9 +87,9 @@\n \n context = {}\n \n- file_handle = open(context_file)\n try:\n- obj = json.load(file_handle, object_pairs_hook=OrderedDict)\n+ with open(context_file) as file_handle:\n+ obj = json.load(file_handle, object_pairs_hook=OrderedDict)\n except ValueError as e:\n # JSON decoding error. Let's throw a new exception that is more\n # friendly for the developer or user.\n", "issue": "file_handle.close() is never called -- and it causes a bug!!\nIn https://github.com/audreyr/cookiecutter/blob/master/cookiecutter/generate.py#L90, `file_handle.close()` is never called, and there's no need for it to remain open.\n\nThis is the first time in over 10 years of python programming that I've run into an actual issue with file handles left open, so I felt that the use of two exclamation points in the subject was warranted. I'm removing a temporary template after an unrelated error, and...\n\n```\n File \"D:\\anaconda32\\lib\\shutil.py\", line 250, in rmtree\n os.remove(fullname)\nWindowsError: [Error 32] The process cannot access the file because it is being\nused by another process: '.\\\\tmpp2duu1\\\\cookiecutter.json'\n```\n\nThis change in generate.py:90 helps python not stumble on this particular issue:\n\n```\ntry:\n with open(context_file) as file_handle:\n obj = json.load(file_handle, object_pairs_hook=OrderedDict)\nexcept ValueError as e:\n # ...\n```\n\n", "before_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\ncookiecutter.generate\n---------------------\n\nFunctions for generating a project from a project template.\n\"\"\"\nfrom __future__ import unicode_literals\nfrom collections import OrderedDict\nimport fnmatch\nimport io\nimport json\nimport logging\nimport os\nimport shutil\n\nfrom jinja2 import FileSystemLoader, Template\nfrom jinja2.environment import Environment\nfrom jinja2.exceptions import TemplateSyntaxError\nfrom binaryornot.check import is_binary\n\nfrom .exceptions import (\n NonTemplatedInputDirException,\n ContextDecodingException,\n FailedHookException,\n OutputDirExistsException\n)\nfrom .find import find_template\nfrom .utils import make_sure_path_exists, work_in, rmtree\nfrom .hooks import run_hook\n\n\ndef copy_without_render(path, context):\n \"\"\"\n Returns True if `path` matches some pattern in the\n `_copy_without_render` context setting.\n\n :param path: A file-system path referring to a file or dir that\n should be rendered or just copied.\n :param context: cookiecutter context.\n \"\"\"\n try:\n for dont_render in context['cookiecutter']['_copy_without_render']:\n if fnmatch.fnmatch(path, dont_render):\n return True\n except KeyError:\n return False\n\n return False\n\n\ndef apply_overwrites_to_context(context, overwrite_context):\n \"\"\"Modify the given context in place based on the overwrite_context.\"\"\"\n for variable, overwrite in overwrite_context.items():\n if variable not in context:\n # Do not include variables which are not used in the template\n continue\n\n context_value = context[variable]\n\n if isinstance(context_value, list):\n # We are dealing with a choice variable\n if overwrite in context_value:\n # This overwrite is actually valid for the given context\n # Let's set it as default (by definition first item in list)\n # see ``cookiecutter.prompt.prompt_choice_for_config``\n context_value.remove(overwrite)\n context_value.insert(0, overwrite)\n else:\n # Simply overwrite the value for this variable\n context[variable] = overwrite\n\n\ndef generate_context(context_file='cookiecutter.json', default_context=None,\n extra_context=None):\n \"\"\"\n Generates the context for a Cookiecutter project template.\n Loads the JSON file as a Python object, with key being the JSON filename.\n\n :param context_file: JSON file containing key/value pairs for populating\n the cookiecutter's variables.\n :param default_context: Dictionary containing config to take into account.\n :param extra_context: Dictionary containing configuration overrides\n \"\"\"\n\n context = {}\n\n file_handle = open(context_file)\n try:\n obj = json.load(file_handle, object_pairs_hook=OrderedDict)\n except ValueError as e:\n # JSON decoding error. Let's throw a new exception that is more\n # friendly for the developer or user.\n full_fpath = os.path.abspath(context_file)\n json_exc_message = str(e)\n our_exc_message = (\n 'JSON decoding error while loading \"{0}\". Decoding'\n ' error details: \"{1}\"'.format(full_fpath, json_exc_message))\n raise ContextDecodingException(our_exc_message)\n\n # Add the Python object to the context dictionary\n file_name = os.path.split(context_file)[1]\n file_stem = file_name.split('.')[0]\n context[file_stem] = obj\n\n # Overwrite context variable defaults with the default context from the\n # user's global config, if available\n if default_context:\n apply_overwrites_to_context(obj, default_context)\n if extra_context:\n apply_overwrites_to_context(obj, extra_context)\n\n logging.debug('Context generated is {0}'.format(context))\n return context\n\n\ndef generate_file(project_dir, infile, context, env):\n \"\"\"\n 1. Render the filename of infile as the name of outfile.\n 2. Deal with infile appropriately:\n\n a. If infile is a binary file, copy it over without rendering.\n b. If infile is a text file, render its contents and write the\n rendered infile to outfile.\n\n Precondition:\n\n When calling `generate_file()`, the root template dir must be the\n current working directory. Using `utils.work_in()` is the recommended\n way to perform this directory change.\n\n :param project_dir: Absolute path to the resulting generated project.\n :param infile: Input file to generate the file from. Relative to the root\n template dir.\n :param context: Dict for populating the cookiecutter's variables.\n :param env: Jinja2 template execution environment.\n \"\"\"\n\n logging.debug('Generating file {0}'.format(infile))\n\n # Render the path to the output file (not including the root project dir)\n outfile_tmpl = Template(infile)\n\n outfile = os.path.join(project_dir, outfile_tmpl.render(**context))\n file_name_is_empty = os.path.isdir(outfile)\n if file_name_is_empty:\n logging.debug('The resulting file name is empty: {0}'.format(outfile))\n return\n\n logging.debug('outfile is {0}'.format(outfile))\n\n # Just copy over binary files. Don't render.\n logging.debug(\"Check {0} to see if it's a binary\".format(infile))\n if is_binary(infile):\n logging.debug('Copying binary {0} to {1} without rendering'\n .format(infile, outfile))\n shutil.copyfile(infile, outfile)\n else:\n # Force fwd slashes on Windows for get_template\n # This is a by-design Jinja issue\n infile_fwd_slashes = infile.replace(os.path.sep, '/')\n\n # Render the file\n try:\n tmpl = env.get_template(infile_fwd_slashes)\n except TemplateSyntaxError as exception:\n # Disable translated so that printed exception contains verbose\n # information about syntax error location\n exception.translated = False\n raise\n rendered_file = tmpl.render(**context)\n\n logging.debug('Writing {0}'.format(outfile))\n\n with io.open(outfile, 'w', encoding='utf-8') as fh:\n fh.write(rendered_file)\n\n # Apply file permissions to output file\n shutil.copymode(infile, outfile)\n\n\ndef render_and_create_dir(dirname, context, output_dir,\n overwrite_if_exists=False):\n \"\"\"\n Renders the name of a directory, creates the directory, and\n returns its path.\n \"\"\"\n\n name_tmpl = Template(dirname)\n rendered_dirname = name_tmpl.render(**context)\n logging.debug('Rendered dir {0} must exist in output_dir {1}'.format(\n rendered_dirname,\n output_dir\n ))\n dir_to_create = os.path.normpath(\n os.path.join(output_dir, rendered_dirname)\n )\n\n output_dir_exists = os.path.exists(dir_to_create)\n\n if overwrite_if_exists:\n if output_dir_exists:\n logging.debug('Output directory {} already exists,'\n 'overwriting it'.format(dir_to_create))\n else:\n if output_dir_exists:\n msg = 'Error: \"{}\" directory already exists'.format(dir_to_create)\n raise OutputDirExistsException(msg)\n\n make_sure_path_exists(dir_to_create)\n return dir_to_create\n\n\ndef ensure_dir_is_templated(dirname):\n \"\"\"\n Ensures that dirname is a templated directory name.\n \"\"\"\n if '{{' in dirname and '}}' in dirname:\n return True\n else:\n raise NonTemplatedInputDirException\n\n\ndef _run_hook_from_repo_dir(repo_dir, hook_name, project_dir, context):\n \"\"\"\n Run hook from repo directory, cleaning up project directory if hook fails\n \"\"\"\n with work_in(repo_dir):\n try:\n run_hook(hook_name, project_dir, context)\n except FailedHookException:\n rmtree(project_dir)\n logging.error(\"Stopping generation because %s\"\n \" hook script didn't exit sucessfully\" % hook_name)\n raise\n\n\ndef generate_files(repo_dir, context=None, output_dir='.',\n overwrite_if_exists=False):\n \"\"\"\n Renders the templates and saves them to files.\n\n :param repo_dir: Project template input directory.\n :param context: Dict for populating the template's variables.\n :param output_dir: Where to output the generated project dir into.\n :param overwrite_if_exists: Overwrite the contents of the output directory\n if it exists\n \"\"\"\n\n template_dir = find_template(repo_dir)\n logging.debug('Generating project from {0}...'.format(template_dir))\n context = context or {}\n\n unrendered_dir = os.path.split(template_dir)[1]\n ensure_dir_is_templated(unrendered_dir)\n project_dir = render_and_create_dir(unrendered_dir,\n context,\n output_dir,\n overwrite_if_exists)\n\n # We want the Jinja path and the OS paths to match. Consequently, we'll:\n # + CD to the template folder\n # + Set Jinja's path to '.'\n #\n # In order to build our files to the correct folder(s), we'll use an\n # absolute path for the target folder (project_dir)\n\n project_dir = os.path.abspath(project_dir)\n logging.debug('project_dir is {0}'.format(project_dir))\n\n _run_hook_from_repo_dir(repo_dir, 'pre_gen_project', project_dir, context)\n\n with work_in(template_dir):\n env = Environment(keep_trailing_newline=True)\n env.loader = FileSystemLoader('.')\n\n for root, dirs, files in os.walk('.'):\n # We must separate the two types of dirs into different lists.\n # The reason is that we don't want ``os.walk`` to go through the\n # unrendered directories, since they will just be copied.\n copy_dirs = []\n render_dirs = []\n\n for d in dirs:\n d_ = os.path.normpath(os.path.join(root, d))\n # We check the full path, because that's how it can be\n # specified in the ``_copy_without_render`` setting, but\n # we store just the dir name\n if copy_without_render(d_, context):\n copy_dirs.append(d)\n else:\n render_dirs.append(d)\n\n for copy_dir in copy_dirs:\n indir = os.path.normpath(os.path.join(root, copy_dir))\n outdir = os.path.normpath(os.path.join(project_dir, indir))\n logging.debug(\n 'Copying dir {0} to {1} without rendering'\n ''.format(indir, outdir)\n )\n shutil.copytree(indir, outdir)\n\n # We mutate ``dirs``, because we only want to go through these dirs\n # recursively\n dirs[:] = render_dirs\n for d in dirs:\n unrendered_dir = os.path.join(project_dir, root, d)\n render_and_create_dir(unrendered_dir, context, output_dir,\n overwrite_if_exists)\n\n for f in files:\n infile = os.path.normpath(os.path.join(root, f))\n if copy_without_render(infile, context):\n outfile_tmpl = Template(infile)\n outfile_rendered = outfile_tmpl.render(**context)\n outfile = os.path.join(project_dir, outfile_rendered)\n logging.debug(\n 'Copying file {0} to {1} without rendering'\n ''.format(infile, outfile)\n )\n shutil.copyfile(infile, outfile)\n shutil.copymode(infile, outfile)\n continue\n logging.debug('f is {0}'.format(f))\n generate_file(project_dir, infile, context, env)\n\n _run_hook_from_repo_dir(repo_dir, 'post_gen_project', project_dir, context)\n\n return project_dir\n", "path": "cookiecutter/generate.py"}], "after_files": [{"content": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n\"\"\"\ncookiecutter.generate\n---------------------\n\nFunctions for generating a project from a project template.\n\"\"\"\nfrom __future__ import unicode_literals\nfrom collections import OrderedDict\nimport fnmatch\nimport io\nimport json\nimport logging\nimport os\nimport shutil\n\nfrom jinja2 import FileSystemLoader, Template\nfrom jinja2.environment import Environment\nfrom jinja2.exceptions import TemplateSyntaxError\nfrom binaryornot.check import is_binary\n\nfrom .exceptions import (\n NonTemplatedInputDirException,\n ContextDecodingException,\n FailedHookException,\n OutputDirExistsException\n)\nfrom .find import find_template\nfrom .utils import make_sure_path_exists, work_in, rmtree\nfrom .hooks import run_hook\n\n\ndef copy_without_render(path, context):\n \"\"\"\n Returns True if `path` matches some pattern in the\n `_copy_without_render` context setting.\n\n :param path: A file-system path referring to a file or dir that\n should be rendered or just copied.\n :param context: cookiecutter context.\n \"\"\"\n try:\n for dont_render in context['cookiecutter']['_copy_without_render']:\n if fnmatch.fnmatch(path, dont_render):\n return True\n except KeyError:\n return False\n\n return False\n\n\ndef apply_overwrites_to_context(context, overwrite_context):\n \"\"\"Modify the given context in place based on the overwrite_context.\"\"\"\n for variable, overwrite in overwrite_context.items():\n if variable not in context:\n # Do not include variables which are not used in the template\n continue\n\n context_value = context[variable]\n\n if isinstance(context_value, list):\n # We are dealing with a choice variable\n if overwrite in context_value:\n # This overwrite is actually valid for the given context\n # Let's set it as default (by definition first item in list)\n # see ``cookiecutter.prompt.prompt_choice_for_config``\n context_value.remove(overwrite)\n context_value.insert(0, overwrite)\n else:\n # Simply overwrite the value for this variable\n context[variable] = overwrite\n\n\ndef generate_context(context_file='cookiecutter.json', default_context=None,\n extra_context=None):\n \"\"\"\n Generates the context for a Cookiecutter project template.\n Loads the JSON file as a Python object, with key being the JSON filename.\n\n :param context_file: JSON file containing key/value pairs for populating\n the cookiecutter's variables.\n :param default_context: Dictionary containing config to take into account.\n :param extra_context: Dictionary containing configuration overrides\n \"\"\"\n\n context = {}\n\n try:\n with open(context_file) as file_handle:\n obj = json.load(file_handle, object_pairs_hook=OrderedDict)\n except ValueError as e:\n # JSON decoding error. Let's throw a new exception that is more\n # friendly for the developer or user.\n full_fpath = os.path.abspath(context_file)\n json_exc_message = str(e)\n our_exc_message = (\n 'JSON decoding error while loading \"{0}\". Decoding'\n ' error details: \"{1}\"'.format(full_fpath, json_exc_message))\n raise ContextDecodingException(our_exc_message)\n\n # Add the Python object to the context dictionary\n file_name = os.path.split(context_file)[1]\n file_stem = file_name.split('.')[0]\n context[file_stem] = obj\n\n # Overwrite context variable defaults with the default context from the\n # user's global config, if available\n if default_context:\n apply_overwrites_to_context(obj, default_context)\n if extra_context:\n apply_overwrites_to_context(obj, extra_context)\n\n logging.debug('Context generated is {0}'.format(context))\n return context\n\n\ndef generate_file(project_dir, infile, context, env):\n \"\"\"\n 1. Render the filename of infile as the name of outfile.\n 2. Deal with infile appropriately:\n\n a. If infile is a binary file, copy it over without rendering.\n b. If infile is a text file, render its contents and write the\n rendered infile to outfile.\n\n Precondition:\n\n When calling `generate_file()`, the root template dir must be the\n current working directory. Using `utils.work_in()` is the recommended\n way to perform this directory change.\n\n :param project_dir: Absolute path to the resulting generated project.\n :param infile: Input file to generate the file from. Relative to the root\n template dir.\n :param context: Dict for populating the cookiecutter's variables.\n :param env: Jinja2 template execution environment.\n \"\"\"\n\n logging.debug('Generating file {0}'.format(infile))\n\n # Render the path to the output file (not including the root project dir)\n outfile_tmpl = Template(infile)\n\n outfile = os.path.join(project_dir, outfile_tmpl.render(**context))\n file_name_is_empty = os.path.isdir(outfile)\n if file_name_is_empty:\n logging.debug('The resulting file name is empty: {0}'.format(outfile))\n return\n\n logging.debug('outfile is {0}'.format(outfile))\n\n # Just copy over binary files. Don't render.\n logging.debug(\"Check {0} to see if it's a binary\".format(infile))\n if is_binary(infile):\n logging.debug('Copying binary {0} to {1} without rendering'\n .format(infile, outfile))\n shutil.copyfile(infile, outfile)\n else:\n # Force fwd slashes on Windows for get_template\n # This is a by-design Jinja issue\n infile_fwd_slashes = infile.replace(os.path.sep, '/')\n\n # Render the file\n try:\n tmpl = env.get_template(infile_fwd_slashes)\n except TemplateSyntaxError as exception:\n # Disable translated so that printed exception contains verbose\n # information about syntax error location\n exception.translated = False\n raise\n rendered_file = tmpl.render(**context)\n\n logging.debug('Writing {0}'.format(outfile))\n\n with io.open(outfile, 'w', encoding='utf-8') as fh:\n fh.write(rendered_file)\n\n # Apply file permissions to output file\n shutil.copymode(infile, outfile)\n\n\ndef render_and_create_dir(dirname, context, output_dir,\n overwrite_if_exists=False):\n \"\"\"\n Renders the name of a directory, creates the directory, and\n returns its path.\n \"\"\"\n\n name_tmpl = Template(dirname)\n rendered_dirname = name_tmpl.render(**context)\n logging.debug('Rendered dir {0} must exist in output_dir {1}'.format(\n rendered_dirname,\n output_dir\n ))\n dir_to_create = os.path.normpath(\n os.path.join(output_dir, rendered_dirname)\n )\n\n output_dir_exists = os.path.exists(dir_to_create)\n\n if overwrite_if_exists:\n if output_dir_exists:\n logging.debug('Output directory {} already exists,'\n 'overwriting it'.format(dir_to_create))\n else:\n if output_dir_exists:\n msg = 'Error: \"{}\" directory already exists'.format(dir_to_create)\n raise OutputDirExistsException(msg)\n\n make_sure_path_exists(dir_to_create)\n return dir_to_create\n\n\ndef ensure_dir_is_templated(dirname):\n \"\"\"\n Ensures that dirname is a templated directory name.\n \"\"\"\n if '{{' in dirname and '}}' in dirname:\n return True\n else:\n raise NonTemplatedInputDirException\n\n\ndef _run_hook_from_repo_dir(repo_dir, hook_name, project_dir, context):\n \"\"\"\n Run hook from repo directory, cleaning up project directory if hook fails\n \"\"\"\n with work_in(repo_dir):\n try:\n run_hook(hook_name, project_dir, context)\n except FailedHookException:\n rmtree(project_dir)\n logging.error(\"Stopping generation because %s\"\n \" hook script didn't exit sucessfully\" % hook_name)\n raise\n\n\ndef generate_files(repo_dir, context=None, output_dir='.',\n overwrite_if_exists=False):\n \"\"\"\n Renders the templates and saves them to files.\n\n :param repo_dir: Project template input directory.\n :param context: Dict for populating the template's variables.\n :param output_dir: Where to output the generated project dir into.\n :param overwrite_if_exists: Overwrite the contents of the output directory\n if it exists\n \"\"\"\n\n template_dir = find_template(repo_dir)\n logging.debug('Generating project from {0}...'.format(template_dir))\n context = context or {}\n\n unrendered_dir = os.path.split(template_dir)[1]\n ensure_dir_is_templated(unrendered_dir)\n project_dir = render_and_create_dir(unrendered_dir,\n context,\n output_dir,\n overwrite_if_exists)\n\n # We want the Jinja path and the OS paths to match. Consequently, we'll:\n # + CD to the template folder\n # + Set Jinja's path to '.'\n #\n # In order to build our files to the correct folder(s), we'll use an\n # absolute path for the target folder (project_dir)\n\n project_dir = os.path.abspath(project_dir)\n logging.debug('project_dir is {0}'.format(project_dir))\n\n _run_hook_from_repo_dir(repo_dir, 'pre_gen_project', project_dir, context)\n\n with work_in(template_dir):\n env = Environment(keep_trailing_newline=True)\n env.loader = FileSystemLoader('.')\n\n for root, dirs, files in os.walk('.'):\n # We must separate the two types of dirs into different lists.\n # The reason is that we don't want ``os.walk`` to go through the\n # unrendered directories, since they will just be copied.\n copy_dirs = []\n render_dirs = []\n\n for d in dirs:\n d_ = os.path.normpath(os.path.join(root, d))\n # We check the full path, because that's how it can be\n # specified in the ``_copy_without_render`` setting, but\n # we store just the dir name\n if copy_without_render(d_, context):\n copy_dirs.append(d)\n else:\n render_dirs.append(d)\n\n for copy_dir in copy_dirs:\n indir = os.path.normpath(os.path.join(root, copy_dir))\n outdir = os.path.normpath(os.path.join(project_dir, indir))\n logging.debug(\n 'Copying dir {0} to {1} without rendering'\n ''.format(indir, outdir)\n )\n shutil.copytree(indir, outdir)\n\n # We mutate ``dirs``, because we only want to go through these dirs\n # recursively\n dirs[:] = render_dirs\n for d in dirs:\n unrendered_dir = os.path.join(project_dir, root, d)\n render_and_create_dir(unrendered_dir, context, output_dir,\n overwrite_if_exists)\n\n for f in files:\n infile = os.path.normpath(os.path.join(root, f))\n if copy_without_render(infile, context):\n outfile_tmpl = Template(infile)\n outfile_rendered = outfile_tmpl.render(**context)\n outfile = os.path.join(project_dir, outfile_rendered)\n logging.debug(\n 'Copying file {0} to {1} without rendering'\n ''.format(infile, outfile)\n )\n shutil.copyfile(infile, outfile)\n shutil.copymode(infile, outfile)\n continue\n logging.debug('f is {0}'.format(f))\n generate_file(project_dir, infile, context, env)\n\n _run_hook_from_repo_dir(repo_dir, 'post_gen_project', project_dir, context)\n\n return project_dir\n", "path": "cookiecutter/generate.py"}]} |
gh_patches_debug_1619 | rasdani/github-patches | git_diff | openai__gym-1730 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Sampling Bug
Gym Version: 0.15.3
issue: Box samples numbers above the `high` parameter.
```
from gym.spaces import Box
observation_space = Box(low=-3, high=-1, shape=(9,), dtype='int')
print(observation_space.sample())
>> [ 0 -2 0 -2 0 -1 0 -2 0]
```
The current implementation samples float numbers from uniform distribution of [`low`, `high`] and then converts the resulting samples to desired `dtype`. This runs into the problem of sampling `low` parameter very rarely(and not uniformly) when `dtype` is `int`(as we are converting the floats back to int which results in ceil operation in case of negative numbers) i.e in the above example -3 is almost never sampled as most of the low sampled floats like -2.85, -2.9 get converted to -2.
https://github.com/openai/gym/blob/0cd9266d986d470ed9c0dd87a41cd680b65cfe1c/gym/spaces/box.py#L93-L97
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `gym/spaces/box.py`
Content:
```
1 import numpy as np
2
3 from .space import Space
4
5
6 class Box(Space):
7 """
8 A (possibly unbounded) box in R^n. Specifically, a Box represents the
9 Cartesian product of n closed intervals. Each interval has the form of one
10 of [a, b], (-oo, b], [a, oo), or (-oo, oo).
11
12 There are two common use cases:
13
14 * Identical bound for each dimension::
15 >>> Box(low=-1.0, high=2.0, shape=(3, 4), dtype=np.float32)
16 Box(3, 4)
17
18 * Independent bound for each dimension::
19 >>> Box(low=np.array([-1.0, -2.0]), high=np.array([2.0, 4.0]), dtype=np.float32)
20 Box(2,)
21
22 """
23 def __init__(self, low, high, shape=None, dtype=np.float32):
24 assert dtype is not None, 'dtype must be explicitly provided. '
25 self.dtype = np.dtype(dtype)
26
27 if shape is None:
28 assert low.shape == high.shape, 'box dimension mismatch. '
29 self.shape = low.shape
30 self.low = low
31 self.high = high
32 else:
33 assert np.isscalar(low) and np.isscalar(high), 'box requires scalar bounds. '
34 self.shape = tuple(shape)
35 self.low = np.full(self.shape, low)
36 self.high = np.full(self.shape, high)
37
38 self.low = self.low.astype(self.dtype)
39 self.high = self.high.astype(self.dtype)
40
41 # Boolean arrays which indicate the interval type for each coordinate
42 self.bounded_below = -np.inf < self.low
43 self.bounded_above = np.inf > self.high
44
45 super(Box, self).__init__(self.shape, self.dtype)
46
47 def is_bounded(self, manner="both"):
48 below = np.all(self.bounded_below)
49 above = np.all(self.bounded_above)
50 if manner == "both":
51 return below and above
52 elif manner == "below":
53 return below
54 elif manner == "above":
55 return above
56 else:
57 raise ValueError("manner is not in {'below', 'above', 'both'}")
58
59 def sample(self):
60 """
61 Generates a single random sample inside of the Box.
62
63 In creating a sample of the box, each coordinate is sampled according to
64 the form of the interval:
65
66 * [a, b] : uniform distribution
67 * [a, oo) : shifted exponential distribution
68 * (-oo, b] : shifted negative exponential distribution
69 * (-oo, oo) : normal distribution
70 """
71 high = self.high if self.dtype.kind == 'f' \
72 else self.high.astype('int64') + 1
73 sample = np.empty(self.shape)
74
75 # Masking arrays which classify the coordinates according to interval
76 # type
77 unbounded = ~self.bounded_below & ~self.bounded_above
78 upp_bounded = ~self.bounded_below & self.bounded_above
79 low_bounded = self.bounded_below & ~self.bounded_above
80 bounded = self.bounded_below & self.bounded_above
81
82
83 # Vectorized sampling by interval type
84 sample[unbounded] = self.np_random.normal(
85 size=unbounded[unbounded].shape)
86
87 sample[low_bounded] = self.np_random.exponential(
88 size=low_bounded[low_bounded].shape) + self.low[low_bounded]
89
90 sample[upp_bounded] = -self.np_random.exponential(
91 size=upp_bounded[upp_bounded].shape) - self.high[upp_bounded]
92
93 sample[bounded] = self.np_random.uniform(low=self.low[bounded],
94 high=high[bounded],
95 size=bounded[bounded].shape)
96
97 return sample.astype(self.dtype)
98
99 def contains(self, x):
100 if isinstance(x, list):
101 x = np.array(x) # Promote list to array for contains check
102 return x.shape == self.shape and np.all(x >= self.low) and np.all(x <= self.high)
103
104 def to_jsonable(self, sample_n):
105 return np.array(sample_n).tolist()
106
107 def from_jsonable(self, sample_n):
108 return [np.asarray(sample) for sample in sample_n]
109
110 def __repr__(self):
111 return "Box" + str(self.shape)
112
113 def __eq__(self, other):
114 return isinstance(other, Box) and (self.shape == other.shape) and np.allclose(self.low, other.low) and np.allclose(self.high, other.high)
115
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/gym/spaces/box.py b/gym/spaces/box.py
--- a/gym/spaces/box.py
+++ b/gym/spaces/box.py
@@ -93,6 +93,8 @@
sample[bounded] = self.np_random.uniform(low=self.low[bounded],
high=high[bounded],
size=bounded[bounded].shape)
+ if self.dtype.kind == 'i':
+ sample = np.floor(sample)
return sample.astype(self.dtype)
| {"golden_diff": "diff --git a/gym/spaces/box.py b/gym/spaces/box.py\n--- a/gym/spaces/box.py\n+++ b/gym/spaces/box.py\n@@ -93,6 +93,8 @@\n sample[bounded] = self.np_random.uniform(low=self.low[bounded], \n high=high[bounded],\n size=bounded[bounded].shape)\n+ if self.dtype.kind == 'i':\n+ sample = np.floor(sample)\n \n return sample.astype(self.dtype)\n", "issue": "Sampling Bug\nGym Version: 0.15.3\r\nissue: Box samples numbers above the `high` parameter.\r\n\r\n```\r\nfrom gym.spaces import Box\r\nobservation_space = Box(low=-3, high=-1, shape=(9,), dtype='int')\r\nprint(observation_space.sample())\r\n>> [ 0 -2 0 -2 0 -1 0 -2 0]\r\n```\r\nThe current implementation samples float numbers from uniform distribution of [`low`, `high`] and then converts the resulting samples to desired `dtype`. This runs into the problem of sampling `low` parameter very rarely(and not uniformly) when `dtype` is `int`(as we are converting the floats back to int which results in ceil operation in case of negative numbers) i.e in the above example -3 is almost never sampled as most of the low sampled floats like -2.85, -2.9 get converted to -2.\r\nhttps://github.com/openai/gym/blob/0cd9266d986d470ed9c0dd87a41cd680b65cfe1c/gym/spaces/box.py#L93-L97\r\n\n", "before_files": [{"content": "import numpy as np\n\nfrom .space import Space\n\n\nclass Box(Space):\n \"\"\"\n A (possibly unbounded) box in R^n. Specifically, a Box represents the\n Cartesian product of n closed intervals. Each interval has the form of one\n of [a, b], (-oo, b], [a, oo), or (-oo, oo).\n \n There are two common use cases:\n \n * Identical bound for each dimension::\n >>> Box(low=-1.0, high=2.0, shape=(3, 4), dtype=np.float32)\n Box(3, 4)\n \n * Independent bound for each dimension::\n >>> Box(low=np.array([-1.0, -2.0]), high=np.array([2.0, 4.0]), dtype=np.float32)\n Box(2,)\n\n \"\"\"\n def __init__(self, low, high, shape=None, dtype=np.float32):\n assert dtype is not None, 'dtype must be explicitly provided. '\n self.dtype = np.dtype(dtype)\n\n if shape is None:\n assert low.shape == high.shape, 'box dimension mismatch. '\n self.shape = low.shape\n self.low = low\n self.high = high\n else:\n assert np.isscalar(low) and np.isscalar(high), 'box requires scalar bounds. '\n self.shape = tuple(shape)\n self.low = np.full(self.shape, low)\n self.high = np.full(self.shape, high)\n\n self.low = self.low.astype(self.dtype)\n self.high = self.high.astype(self.dtype)\n\n # Boolean arrays which indicate the interval type for each coordinate\n self.bounded_below = -np.inf < self.low\n self.bounded_above = np.inf > self.high\n\n super(Box, self).__init__(self.shape, self.dtype)\n\n def is_bounded(self, manner=\"both\"):\n below = np.all(self.bounded_below)\n above = np.all(self.bounded_above)\n if manner == \"both\":\n return below and above\n elif manner == \"below\":\n return below\n elif manner == \"above\":\n return above\n else:\n raise ValueError(\"manner is not in {'below', 'above', 'both'}\")\n\n def sample(self):\n \"\"\"\n Generates a single random sample inside of the Box. \n\n In creating a sample of the box, each coordinate is sampled according to\n the form of the interval:\n \n * [a, b] : uniform distribution \n * [a, oo) : shifted exponential distribution\n * (-oo, b] : shifted negative exponential distribution\n * (-oo, oo) : normal distribution\n \"\"\"\n high = self.high if self.dtype.kind == 'f' \\\n else self.high.astype('int64') + 1\n sample = np.empty(self.shape)\n\n # Masking arrays which classify the coordinates according to interval\n # type\n unbounded = ~self.bounded_below & ~self.bounded_above\n upp_bounded = ~self.bounded_below & self.bounded_above\n low_bounded = self.bounded_below & ~self.bounded_above\n bounded = self.bounded_below & self.bounded_above\n \n\n # Vectorized sampling by interval type\n sample[unbounded] = self.np_random.normal(\n size=unbounded[unbounded].shape)\n\n sample[low_bounded] = self.np_random.exponential(\n size=low_bounded[low_bounded].shape) + self.low[low_bounded]\n \n sample[upp_bounded] = -self.np_random.exponential(\n size=upp_bounded[upp_bounded].shape) - self.high[upp_bounded]\n \n sample[bounded] = self.np_random.uniform(low=self.low[bounded], \n high=high[bounded],\n size=bounded[bounded].shape)\n\n return sample.astype(self.dtype)\n \n def contains(self, x):\n if isinstance(x, list):\n x = np.array(x) # Promote list to array for contains check\n return x.shape == self.shape and np.all(x >= self.low) and np.all(x <= self.high)\n\n def to_jsonable(self, sample_n):\n return np.array(sample_n).tolist()\n\n def from_jsonable(self, sample_n):\n return [np.asarray(sample) for sample in sample_n]\n\n def __repr__(self):\n return \"Box\" + str(self.shape)\n\n def __eq__(self, other):\n return isinstance(other, Box) and (self.shape == other.shape) and np.allclose(self.low, other.low) and np.allclose(self.high, other.high)\n", "path": "gym/spaces/box.py"}], "after_files": [{"content": "import numpy as np\n\nfrom .space import Space\n\n\nclass Box(Space):\n \"\"\"\n A (possibly unbounded) box in R^n. Specifically, a Box represents the\n Cartesian product of n closed intervals. Each interval has the form of one\n of [a, b], (-oo, b], [a, oo), or (-oo, oo).\n \n There are two common use cases:\n \n * Identical bound for each dimension::\n >>> Box(low=-1.0, high=2.0, shape=(3, 4), dtype=np.float32)\n Box(3, 4)\n \n * Independent bound for each dimension::\n >>> Box(low=np.array([-1.0, -2.0]), high=np.array([2.0, 4.0]), dtype=np.float32)\n Box(2,)\n\n \"\"\"\n def __init__(self, low, high, shape=None, dtype=np.float32):\n assert dtype is not None, 'dtype must be explicitly provided. '\n self.dtype = np.dtype(dtype)\n\n if shape is None:\n assert low.shape == high.shape, 'box dimension mismatch. '\n self.shape = low.shape\n self.low = low\n self.high = high\n else:\n assert np.isscalar(low) and np.isscalar(high), 'box requires scalar bounds. '\n self.shape = tuple(shape)\n self.low = np.full(self.shape, low)\n self.high = np.full(self.shape, high)\n\n self.low = self.low.astype(self.dtype)\n self.high = self.high.astype(self.dtype)\n\n # Boolean arrays which indicate the interval type for each coordinate\n self.bounded_below = -np.inf < self.low\n self.bounded_above = np.inf > self.high\n\n super(Box, self).__init__(self.shape, self.dtype)\n\n def is_bounded(self, manner=\"both\"):\n below = np.all(self.bounded_below)\n above = np.all(self.bounded_above)\n if manner == \"both\":\n return below and above\n elif manner == \"below\":\n return below\n elif manner == \"above\":\n return above\n else:\n raise ValueError(\"manner is not in {'below', 'above', 'both'}\")\n\n def sample(self):\n \"\"\"\n Generates a single random sample inside of the Box. \n\n In creating a sample of the box, each coordinate is sampled according to\n the form of the interval:\n \n * [a, b] : uniform distribution \n * [a, oo) : shifted exponential distribution\n * (-oo, b] : shifted negative exponential distribution\n * (-oo, oo) : normal distribution\n \"\"\"\n high = self.high if self.dtype.kind == 'f' \\\n else self.high.astype('int64') + 1\n sample = np.empty(self.shape)\n\n # Masking arrays which classify the coordinates according to interval\n # type\n unbounded = ~self.bounded_below & ~self.bounded_above\n upp_bounded = ~self.bounded_below & self.bounded_above\n low_bounded = self.bounded_below & ~self.bounded_above\n bounded = self.bounded_below & self.bounded_above\n \n\n # Vectorized sampling by interval type\n sample[unbounded] = self.np_random.normal(\n size=unbounded[unbounded].shape)\n\n sample[low_bounded] = self.np_random.exponential(\n size=low_bounded[low_bounded].shape) + self.low[low_bounded]\n \n sample[upp_bounded] = -self.np_random.exponential(\n size=upp_bounded[upp_bounded].shape) - self.high[upp_bounded]\n \n sample[bounded] = self.np_random.uniform(low=self.low[bounded], \n high=high[bounded],\n size=bounded[bounded].shape)\n if self.dtype.kind == 'i':\n sample = np.floor(sample)\n\n return sample.astype(self.dtype)\n \n def contains(self, x):\n if isinstance(x, list):\n x = np.array(x) # Promote list to array for contains check\n return x.shape == self.shape and np.all(x >= self.low) and np.all(x <= self.high)\n\n def to_jsonable(self, sample_n):\n return np.array(sample_n).tolist()\n\n def from_jsonable(self, sample_n):\n return [np.asarray(sample) for sample in sample_n]\n\n def __repr__(self):\n return \"Box\" + str(self.shape)\n\n def __eq__(self, other):\n return isinstance(other, Box) and (self.shape == other.shape) and np.allclose(self.low, other.low) and np.allclose(self.high, other.high)\n", "path": "gym/spaces/box.py"}]} |
gh_patches_debug_1620 | rasdani/github-patches | git_diff | urllib3__urllib3-1304 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
RECENT_DATE needs to be bumped
Test suite started failing, as RECENT_DATE is not recent enough:
```
[ 62s] def test_recent_date(self):
[ 62s] # This test is to make sure that the RECENT_DATE value
[ 62s] # doesn't get too far behind what the current date is.
[ 62s] # When this test fails update urllib3.connection.RECENT_DATE
[ 62s] # according to the rules defined in that file.
[ 62s] two_years = datetime.timedelta(days=365 * 2)
[ 62s] > assert RECENT_DATE > (datetime.datetime.today() - two_years).date()
[ 62s] E AssertionError: assert datetime.date(2016, 1, 1) > datetime.date(2016, 1, 1)
[ 62s] E + where datetime.date(2016, 1, 1) = <built-in method date of datetime.datetime object at 0x7fb6899be198>()
[ 62s] E + where <built-in method date of datetime.datetime object at 0x7fb6899be198> = (datetime.datetime(2017, 12, 31, 10, 16, 14, 472906) - datetime.timedelta(730)).date
[ 62s] E + where datetime.datetime(2017, 12, 31, 10, 16, 14, 472906) = <built-in method today of type object at 0x7fb68e948d20>()
[ 62s] E + where <built-in method today of type object at 0x7fb68e948d20> = <class 'datetime.datetime'>.today
[ 62s] E + where <class 'datetime.datetime'> = datetime.datetime
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `urllib3/connection.py`
Content:
```
1 from __future__ import absolute_import
2 import datetime
3 import logging
4 import os
5 import sys
6 import socket
7 from socket import error as SocketError, timeout as SocketTimeout
8 import warnings
9 from .packages import six
10 from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
11 from .packages.six.moves.http_client import HTTPException # noqa: F401
12
13 try: # Compiled with SSL?
14 import ssl
15 BaseSSLError = ssl.SSLError
16 except (ImportError, AttributeError): # Platform-specific: No SSL.
17 ssl = None
18
19 class BaseSSLError(BaseException):
20 pass
21
22
23 try: # Python 3:
24 # Not a no-op, we're adding this to the namespace so it can be imported.
25 ConnectionError = ConnectionError
26 except NameError: # Python 2:
27 class ConnectionError(Exception):
28 pass
29
30
31 from .exceptions import (
32 NewConnectionError,
33 ConnectTimeoutError,
34 SubjectAltNameWarning,
35 SystemTimeWarning,
36 )
37 from .packages.ssl_match_hostname import match_hostname, CertificateError
38
39 from .util.ssl_ import (
40 resolve_cert_reqs,
41 resolve_ssl_version,
42 assert_fingerprint,
43 create_urllib3_context,
44 ssl_wrap_socket
45 )
46
47
48 from .util import connection
49
50 from ._collections import HTTPHeaderDict
51
52 log = logging.getLogger(__name__)
53
54 port_by_scheme = {
55 'http': 80,
56 'https': 443,
57 }
58
59 # When updating RECENT_DATE, move it to
60 # within two years of the current date, and no
61 # earlier than 6 months ago.
62 RECENT_DATE = datetime.date(2016, 1, 1)
63
64
65 class DummyConnection(object):
66 """Used to detect a failed ConnectionCls import."""
67 pass
68
69
70 class HTTPConnection(_HTTPConnection, object):
71 """
72 Based on httplib.HTTPConnection but provides an extra constructor
73 backwards-compatibility layer between older and newer Pythons.
74
75 Additional keyword parameters are used to configure attributes of the connection.
76 Accepted parameters include:
77
78 - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
79 - ``source_address``: Set the source address for the current connection.
80
81 .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
82
83 - ``socket_options``: Set specific options on the underlying socket. If not specified, then
84 defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
85 Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
86
87 For example, if you wish to enable TCP Keep Alive in addition to the defaults,
88 you might pass::
89
90 HTTPConnection.default_socket_options + [
91 (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
92 ]
93
94 Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
95 """
96
97 default_port = port_by_scheme['http']
98
99 #: Disable Nagle's algorithm by default.
100 #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
101 default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
102
103 #: Whether this connection verifies the host's certificate.
104 is_verified = False
105
106 def __init__(self, *args, **kw):
107 if six.PY3: # Python 3
108 kw.pop('strict', None)
109
110 # Pre-set source_address in case we have an older Python like 2.6.
111 self.source_address = kw.get('source_address')
112
113 if sys.version_info < (2, 7): # Python 2.6
114 # _HTTPConnection on Python 2.6 will balk at this keyword arg, but
115 # not newer versions. We can still use it when creating a
116 # connection though, so we pop it *after* we have saved it as
117 # self.source_address.
118 kw.pop('source_address', None)
119
120 #: The socket options provided by the user. If no options are
121 #: provided, we use the default options.
122 self.socket_options = kw.pop('socket_options', self.default_socket_options)
123
124 # Superclass also sets self.source_address in Python 2.7+.
125 _HTTPConnection.__init__(self, *args, **kw)
126
127 @property
128 def host(self):
129 """
130 Getter method to remove any trailing dots that indicate the hostname is an FQDN.
131
132 In general, SSL certificates don't include the trailing dot indicating a
133 fully-qualified domain name, and thus, they don't validate properly when
134 checked against a domain name that includes the dot. In addition, some
135 servers may not expect to receive the trailing dot when provided.
136
137 However, the hostname with trailing dot is critical to DNS resolution; doing a
138 lookup with the trailing dot will properly only resolve the appropriate FQDN,
139 whereas a lookup without a trailing dot will search the system's search domain
140 list. Thus, it's important to keep the original host around for use only in
141 those cases where it's appropriate (i.e., when doing DNS lookup to establish the
142 actual TCP connection across which we're going to send HTTP requests).
143 """
144 return self._dns_host.rstrip('.')
145
146 @host.setter
147 def host(self, value):
148 """
149 Setter for the `host` property.
150
151 We assume that only urllib3 uses the _dns_host attribute; httplib itself
152 only uses `host`, and it seems reasonable that other libraries follow suit.
153 """
154 self._dns_host = value
155
156 def _new_conn(self):
157 """ Establish a socket connection and set nodelay settings on it.
158
159 :return: New socket connection.
160 """
161 extra_kw = {}
162 if self.source_address:
163 extra_kw['source_address'] = self.source_address
164
165 if self.socket_options:
166 extra_kw['socket_options'] = self.socket_options
167
168 try:
169 conn = connection.create_connection(
170 (self._dns_host, self.port), self.timeout, **extra_kw)
171
172 except SocketTimeout as e:
173 raise ConnectTimeoutError(
174 self, "Connection to %s timed out. (connect timeout=%s)" %
175 (self.host, self.timeout))
176
177 except SocketError as e:
178 raise NewConnectionError(
179 self, "Failed to establish a new connection: %s" % e)
180
181 return conn
182
183 def _prepare_conn(self, conn):
184 self.sock = conn
185 # the _tunnel_host attribute was added in python 2.6.3 (via
186 # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
187 # not have them.
188 if getattr(self, '_tunnel_host', None):
189 # TODO: Fix tunnel so it doesn't depend on self.sock state.
190 self._tunnel()
191 # Mark this connection as not reusable
192 self.auto_open = 0
193
194 def connect(self):
195 conn = self._new_conn()
196 self._prepare_conn(conn)
197
198 def request_chunked(self, method, url, body=None, headers=None):
199 """
200 Alternative to the common request method, which sends the
201 body with chunked encoding and not as one block
202 """
203 headers = HTTPHeaderDict(headers if headers is not None else {})
204 skip_accept_encoding = 'accept-encoding' in headers
205 skip_host = 'host' in headers
206 self.putrequest(
207 method,
208 url,
209 skip_accept_encoding=skip_accept_encoding,
210 skip_host=skip_host
211 )
212 for header, value in headers.items():
213 self.putheader(header, value)
214 if 'transfer-encoding' not in headers:
215 self.putheader('Transfer-Encoding', 'chunked')
216 self.endheaders()
217
218 if body is not None:
219 stringish_types = six.string_types + (six.binary_type,)
220 if isinstance(body, stringish_types):
221 body = (body,)
222 for chunk in body:
223 if not chunk:
224 continue
225 if not isinstance(chunk, six.binary_type):
226 chunk = chunk.encode('utf8')
227 len_str = hex(len(chunk))[2:]
228 self.send(len_str.encode('utf-8'))
229 self.send(b'\r\n')
230 self.send(chunk)
231 self.send(b'\r\n')
232
233 # After the if clause, to always have a closed body
234 self.send(b'0\r\n\r\n')
235
236
237 class HTTPSConnection(HTTPConnection):
238 default_port = port_by_scheme['https']
239
240 ssl_version = None
241
242 def __init__(self, host, port=None, key_file=None, cert_file=None,
243 strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
244 ssl_context=None, **kw):
245
246 HTTPConnection.__init__(self, host, port, strict=strict,
247 timeout=timeout, **kw)
248
249 self.key_file = key_file
250 self.cert_file = cert_file
251 self.ssl_context = ssl_context
252
253 # Required property for Google AppEngine 1.9.0 which otherwise causes
254 # HTTPS requests to go out as HTTP. (See Issue #356)
255 self._protocol = 'https'
256
257 def connect(self):
258 conn = self._new_conn()
259 self._prepare_conn(conn)
260
261 if self.ssl_context is None:
262 self.ssl_context = create_urllib3_context(
263 ssl_version=resolve_ssl_version(None),
264 cert_reqs=resolve_cert_reqs(None),
265 )
266
267 self.sock = ssl_wrap_socket(
268 sock=conn,
269 keyfile=self.key_file,
270 certfile=self.cert_file,
271 ssl_context=self.ssl_context,
272 )
273
274
275 class VerifiedHTTPSConnection(HTTPSConnection):
276 """
277 Based on httplib.HTTPSConnection but wraps the socket with
278 SSL certification.
279 """
280 cert_reqs = None
281 ca_certs = None
282 ca_cert_dir = None
283 ssl_version = None
284 assert_fingerprint = None
285
286 def set_cert(self, key_file=None, cert_file=None,
287 cert_reqs=None, ca_certs=None,
288 assert_hostname=None, assert_fingerprint=None,
289 ca_cert_dir=None):
290 """
291 This method should only be called once, before the connection is used.
292 """
293 # If cert_reqs is not provided, we can try to guess. If the user gave
294 # us a cert database, we assume they want to use it: otherwise, if
295 # they gave us an SSL Context object we should use whatever is set for
296 # it.
297 if cert_reqs is None:
298 if ca_certs or ca_cert_dir:
299 cert_reqs = 'CERT_REQUIRED'
300 elif self.ssl_context is not None:
301 cert_reqs = self.ssl_context.verify_mode
302
303 self.key_file = key_file
304 self.cert_file = cert_file
305 self.cert_reqs = cert_reqs
306 self.assert_hostname = assert_hostname
307 self.assert_fingerprint = assert_fingerprint
308 self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
309 self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
310
311 def connect(self):
312 # Add certificate verification
313 conn = self._new_conn()
314
315 hostname = self.host
316 if getattr(self, '_tunnel_host', None):
317 # _tunnel_host was added in Python 2.6.3
318 # (See: http://hg.python.org/cpython/rev/0f57b30a152f)
319
320 self.sock = conn
321 # Calls self._set_hostport(), so self.host is
322 # self._tunnel_host below.
323 self._tunnel()
324 # Mark this connection as not reusable
325 self.auto_open = 0
326
327 # Override the host with the one we're requesting data from.
328 hostname = self._tunnel_host
329
330 is_time_off = datetime.date.today() < RECENT_DATE
331 if is_time_off:
332 warnings.warn((
333 'System time is way off (before {0}). This will probably '
334 'lead to SSL verification errors').format(RECENT_DATE),
335 SystemTimeWarning
336 )
337
338 # Wrap socket using verification with the root certs in
339 # trusted_root_certs
340 if self.ssl_context is None:
341 self.ssl_context = create_urllib3_context(
342 ssl_version=resolve_ssl_version(self.ssl_version),
343 cert_reqs=resolve_cert_reqs(self.cert_reqs),
344 )
345
346 context = self.ssl_context
347 context.verify_mode = resolve_cert_reqs(self.cert_reqs)
348 self.sock = ssl_wrap_socket(
349 sock=conn,
350 keyfile=self.key_file,
351 certfile=self.cert_file,
352 ca_certs=self.ca_certs,
353 ca_cert_dir=self.ca_cert_dir,
354 server_hostname=hostname,
355 ssl_context=context)
356
357 if self.assert_fingerprint:
358 assert_fingerprint(self.sock.getpeercert(binary_form=True),
359 self.assert_fingerprint)
360 elif context.verify_mode != ssl.CERT_NONE \
361 and not getattr(context, 'check_hostname', False) \
362 and self.assert_hostname is not False:
363 # While urllib3 attempts to always turn off hostname matching from
364 # the TLS library, this cannot always be done. So we check whether
365 # the TLS Library still thinks it's matching hostnames.
366 cert = self.sock.getpeercert()
367 if not cert.get('subjectAltName', ()):
368 warnings.warn((
369 'Certificate for {0} has no `subjectAltName`, falling back to check for a '
370 '`commonName` for now. This feature is being removed by major browsers and '
371 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
372 'for details.)'.format(hostname)),
373 SubjectAltNameWarning
374 )
375 _match_hostname(cert, self.assert_hostname or hostname)
376
377 self.is_verified = (
378 context.verify_mode == ssl.CERT_REQUIRED or
379 self.assert_fingerprint is not None
380 )
381
382
383 def _match_hostname(cert, asserted_hostname):
384 try:
385 match_hostname(cert, asserted_hostname)
386 except CertificateError as e:
387 log.error(
388 'Certificate did not match expected hostname: %s. '
389 'Certificate: %s', asserted_hostname, cert
390 )
391 # Add cert to exception and reraise so client code can inspect
392 # the cert when catching the exception, if they want to
393 e._peer_cert = cert
394 raise
395
396
397 if ssl:
398 # Make a copy for testing.
399 UnverifiedHTTPSConnection = HTTPSConnection
400 HTTPSConnection = VerifiedHTTPSConnection
401 else:
402 HTTPSConnection = DummyConnection
403
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/urllib3/connection.py b/urllib3/connection.py
--- a/urllib3/connection.py
+++ b/urllib3/connection.py
@@ -56,10 +56,11 @@
'https': 443,
}
-# When updating RECENT_DATE, move it to
-# within two years of the current date, and no
-# earlier than 6 months ago.
-RECENT_DATE = datetime.date(2016, 1, 1)
+# When updating RECENT_DATE, move it to within two years of the current date,
+# and not less than 6 months ago.
+# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or
+# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months)
+RECENT_DATE = datetime.date(2017, 6, 30)
class DummyConnection(object):
| {"golden_diff": "diff --git a/urllib3/connection.py b/urllib3/connection.py\n--- a/urllib3/connection.py\n+++ b/urllib3/connection.py\n@@ -56,10 +56,11 @@\n 'https': 443,\n }\n \n-# When updating RECENT_DATE, move it to\n-# within two years of the current date, and no\n-# earlier than 6 months ago.\n-RECENT_DATE = datetime.date(2016, 1, 1)\n+# When updating RECENT_DATE, move it to within two years of the current date,\n+# and not less than 6 months ago.\n+# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or\n+# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months)\n+RECENT_DATE = datetime.date(2017, 6, 30)\n \n \n class DummyConnection(object):\n", "issue": "RECENT_DATE needs to be bumped\nTest suite started failing, as RECENT_DATE is not recent enough:\r\n\r\n```\r\n[ 62s] def test_recent_date(self):\r\n[ 62s] # This test is to make sure that the RECENT_DATE value\r\n[ 62s] # doesn't get too far behind what the current date is.\r\n[ 62s] # When this test fails update urllib3.connection.RECENT_DATE\r\n[ 62s] # according to the rules defined in that file.\r\n[ 62s] two_years = datetime.timedelta(days=365 * 2)\r\n[ 62s] > assert RECENT_DATE > (datetime.datetime.today() - two_years).date()\r\n[ 62s] E AssertionError: assert datetime.date(2016, 1, 1) > datetime.date(2016, 1, 1)\r\n[ 62s] E + where datetime.date(2016, 1, 1) = <built-in method date of datetime.datetime object at 0x7fb6899be198>()\r\n[ 62s] E + where <built-in method date of datetime.datetime object at 0x7fb6899be198> = (datetime.datetime(2017, 12, 31, 10, 16, 14, 472906) - datetime.timedelta(730)).date\r\n[ 62s] E + where datetime.datetime(2017, 12, 31, 10, 16, 14, 472906) = <built-in method today of type object at 0x7fb68e948d20>()\r\n[ 62s] E + where <built-in method today of type object at 0x7fb68e948d20> = <class 'datetime.datetime'>.today\r\n[ 62s] E + where <class 'datetime.datetime'> = datetime.datetime\r\n```\n", "before_files": [{"content": "from __future__ import absolute_import\nimport datetime\nimport logging\nimport os\nimport sys\nimport socket\nfrom socket import error as SocketError, timeout as SocketTimeout\nimport warnings\nfrom .packages import six\nfrom .packages.six.moves.http_client import HTTPConnection as _HTTPConnection\nfrom .packages.six.moves.http_client import HTTPException # noqa: F401\n\ntry: # Compiled with SSL?\n import ssl\n BaseSSLError = ssl.SSLError\nexcept (ImportError, AttributeError): # Platform-specific: No SSL.\n ssl = None\n\n class BaseSSLError(BaseException):\n pass\n\n\ntry: # Python 3:\n # Not a no-op, we're adding this to the namespace so it can be imported.\n ConnectionError = ConnectionError\nexcept NameError: # Python 2:\n class ConnectionError(Exception):\n pass\n\n\nfrom .exceptions import (\n NewConnectionError,\n ConnectTimeoutError,\n SubjectAltNameWarning,\n SystemTimeWarning,\n)\nfrom .packages.ssl_match_hostname import match_hostname, CertificateError\n\nfrom .util.ssl_ import (\n resolve_cert_reqs,\n resolve_ssl_version,\n assert_fingerprint,\n create_urllib3_context,\n ssl_wrap_socket\n)\n\n\nfrom .util import connection\n\nfrom ._collections import HTTPHeaderDict\n\nlog = logging.getLogger(__name__)\n\nport_by_scheme = {\n 'http': 80,\n 'https': 443,\n}\n\n# When updating RECENT_DATE, move it to\n# within two years of the current date, and no\n# earlier than 6 months ago.\nRECENT_DATE = datetime.date(2016, 1, 1)\n\n\nclass DummyConnection(object):\n \"\"\"Used to detect a failed ConnectionCls import.\"\"\"\n pass\n\n\nclass HTTPConnection(_HTTPConnection, object):\n \"\"\"\n Based on httplib.HTTPConnection but provides an extra constructor\n backwards-compatibility layer between older and newer Pythons.\n\n Additional keyword parameters are used to configure attributes of the connection.\n Accepted parameters include:\n\n - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`\n - ``source_address``: Set the source address for the current connection.\n\n .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x\n\n - ``socket_options``: Set specific options on the underlying socket. If not specified, then\n defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling\n Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.\n\n For example, if you wish to enable TCP Keep Alive in addition to the defaults,\n you might pass::\n\n HTTPConnection.default_socket_options + [\n (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),\n ]\n\n Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).\n \"\"\"\n\n default_port = port_by_scheme['http']\n\n #: Disable Nagle's algorithm by default.\n #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``\n default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]\n\n #: Whether this connection verifies the host's certificate.\n is_verified = False\n\n def __init__(self, *args, **kw):\n if six.PY3: # Python 3\n kw.pop('strict', None)\n\n # Pre-set source_address in case we have an older Python like 2.6.\n self.source_address = kw.get('source_address')\n\n if sys.version_info < (2, 7): # Python 2.6\n # _HTTPConnection on Python 2.6 will balk at this keyword arg, but\n # not newer versions. We can still use it when creating a\n # connection though, so we pop it *after* we have saved it as\n # self.source_address.\n kw.pop('source_address', None)\n\n #: The socket options provided by the user. If no options are\n #: provided, we use the default options.\n self.socket_options = kw.pop('socket_options', self.default_socket_options)\n\n # Superclass also sets self.source_address in Python 2.7+.\n _HTTPConnection.__init__(self, *args, **kw)\n\n @property\n def host(self):\n \"\"\"\n Getter method to remove any trailing dots that indicate the hostname is an FQDN.\n\n In general, SSL certificates don't include the trailing dot indicating a\n fully-qualified domain name, and thus, they don't validate properly when\n checked against a domain name that includes the dot. In addition, some\n servers may not expect to receive the trailing dot when provided.\n\n However, the hostname with trailing dot is critical to DNS resolution; doing a\n lookup with the trailing dot will properly only resolve the appropriate FQDN,\n whereas a lookup without a trailing dot will search the system's search domain\n list. Thus, it's important to keep the original host around for use only in\n those cases where it's appropriate (i.e., when doing DNS lookup to establish the\n actual TCP connection across which we're going to send HTTP requests).\n \"\"\"\n return self._dns_host.rstrip('.')\n\n @host.setter\n def host(self, value):\n \"\"\"\n Setter for the `host` property.\n\n We assume that only urllib3 uses the _dns_host attribute; httplib itself\n only uses `host`, and it seems reasonable that other libraries follow suit.\n \"\"\"\n self._dns_host = value\n\n def _new_conn(self):\n \"\"\" Establish a socket connection and set nodelay settings on it.\n\n :return: New socket connection.\n \"\"\"\n extra_kw = {}\n if self.source_address:\n extra_kw['source_address'] = self.source_address\n\n if self.socket_options:\n extra_kw['socket_options'] = self.socket_options\n\n try:\n conn = connection.create_connection(\n (self._dns_host, self.port), self.timeout, **extra_kw)\n\n except SocketTimeout as e:\n raise ConnectTimeoutError(\n self, \"Connection to %s timed out. (connect timeout=%s)\" %\n (self.host, self.timeout))\n\n except SocketError as e:\n raise NewConnectionError(\n self, \"Failed to establish a new connection: %s\" % e)\n\n return conn\n\n def _prepare_conn(self, conn):\n self.sock = conn\n # the _tunnel_host attribute was added in python 2.6.3 (via\n # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do\n # not have them.\n if getattr(self, '_tunnel_host', None):\n # TODO: Fix tunnel so it doesn't depend on self.sock state.\n self._tunnel()\n # Mark this connection as not reusable\n self.auto_open = 0\n\n def connect(self):\n conn = self._new_conn()\n self._prepare_conn(conn)\n\n def request_chunked(self, method, url, body=None, headers=None):\n \"\"\"\n Alternative to the common request method, which sends the\n body with chunked encoding and not as one block\n \"\"\"\n headers = HTTPHeaderDict(headers if headers is not None else {})\n skip_accept_encoding = 'accept-encoding' in headers\n skip_host = 'host' in headers\n self.putrequest(\n method,\n url,\n skip_accept_encoding=skip_accept_encoding,\n skip_host=skip_host\n )\n for header, value in headers.items():\n self.putheader(header, value)\n if 'transfer-encoding' not in headers:\n self.putheader('Transfer-Encoding', 'chunked')\n self.endheaders()\n\n if body is not None:\n stringish_types = six.string_types + (six.binary_type,)\n if isinstance(body, stringish_types):\n body = (body,)\n for chunk in body:\n if not chunk:\n continue\n if not isinstance(chunk, six.binary_type):\n chunk = chunk.encode('utf8')\n len_str = hex(len(chunk))[2:]\n self.send(len_str.encode('utf-8'))\n self.send(b'\\r\\n')\n self.send(chunk)\n self.send(b'\\r\\n')\n\n # After the if clause, to always have a closed body\n self.send(b'0\\r\\n\\r\\n')\n\n\nclass HTTPSConnection(HTTPConnection):\n default_port = port_by_scheme['https']\n\n ssl_version = None\n\n def __init__(self, host, port=None, key_file=None, cert_file=None,\n strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,\n ssl_context=None, **kw):\n\n HTTPConnection.__init__(self, host, port, strict=strict,\n timeout=timeout, **kw)\n\n self.key_file = key_file\n self.cert_file = cert_file\n self.ssl_context = ssl_context\n\n # Required property for Google AppEngine 1.9.0 which otherwise causes\n # HTTPS requests to go out as HTTP. (See Issue #356)\n self._protocol = 'https'\n\n def connect(self):\n conn = self._new_conn()\n self._prepare_conn(conn)\n\n if self.ssl_context is None:\n self.ssl_context = create_urllib3_context(\n ssl_version=resolve_ssl_version(None),\n cert_reqs=resolve_cert_reqs(None),\n )\n\n self.sock = ssl_wrap_socket(\n sock=conn,\n keyfile=self.key_file,\n certfile=self.cert_file,\n ssl_context=self.ssl_context,\n )\n\n\nclass VerifiedHTTPSConnection(HTTPSConnection):\n \"\"\"\n Based on httplib.HTTPSConnection but wraps the socket with\n SSL certification.\n \"\"\"\n cert_reqs = None\n ca_certs = None\n ca_cert_dir = None\n ssl_version = None\n assert_fingerprint = None\n\n def set_cert(self, key_file=None, cert_file=None,\n cert_reqs=None, ca_certs=None,\n assert_hostname=None, assert_fingerprint=None,\n ca_cert_dir=None):\n \"\"\"\n This method should only be called once, before the connection is used.\n \"\"\"\n # If cert_reqs is not provided, we can try to guess. If the user gave\n # us a cert database, we assume they want to use it: otherwise, if\n # they gave us an SSL Context object we should use whatever is set for\n # it.\n if cert_reqs is None:\n if ca_certs or ca_cert_dir:\n cert_reqs = 'CERT_REQUIRED'\n elif self.ssl_context is not None:\n cert_reqs = self.ssl_context.verify_mode\n\n self.key_file = key_file\n self.cert_file = cert_file\n self.cert_reqs = cert_reqs\n self.assert_hostname = assert_hostname\n self.assert_fingerprint = assert_fingerprint\n self.ca_certs = ca_certs and os.path.expanduser(ca_certs)\n self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)\n\n def connect(self):\n # Add certificate verification\n conn = self._new_conn()\n\n hostname = self.host\n if getattr(self, '_tunnel_host', None):\n # _tunnel_host was added in Python 2.6.3\n # (See: http://hg.python.org/cpython/rev/0f57b30a152f)\n\n self.sock = conn\n # Calls self._set_hostport(), so self.host is\n # self._tunnel_host below.\n self._tunnel()\n # Mark this connection as not reusable\n self.auto_open = 0\n\n # Override the host with the one we're requesting data from.\n hostname = self._tunnel_host\n\n is_time_off = datetime.date.today() < RECENT_DATE\n if is_time_off:\n warnings.warn((\n 'System time is way off (before {0}). This will probably '\n 'lead to SSL verification errors').format(RECENT_DATE),\n SystemTimeWarning\n )\n\n # Wrap socket using verification with the root certs in\n # trusted_root_certs\n if self.ssl_context is None:\n self.ssl_context = create_urllib3_context(\n ssl_version=resolve_ssl_version(self.ssl_version),\n cert_reqs=resolve_cert_reqs(self.cert_reqs),\n )\n\n context = self.ssl_context\n context.verify_mode = resolve_cert_reqs(self.cert_reqs)\n self.sock = ssl_wrap_socket(\n sock=conn,\n keyfile=self.key_file,\n certfile=self.cert_file,\n ca_certs=self.ca_certs,\n ca_cert_dir=self.ca_cert_dir,\n server_hostname=hostname,\n ssl_context=context)\n\n if self.assert_fingerprint:\n assert_fingerprint(self.sock.getpeercert(binary_form=True),\n self.assert_fingerprint)\n elif context.verify_mode != ssl.CERT_NONE \\\n and not getattr(context, 'check_hostname', False) \\\n and self.assert_hostname is not False:\n # While urllib3 attempts to always turn off hostname matching from\n # the TLS library, this cannot always be done. So we check whether\n # the TLS Library still thinks it's matching hostnames.\n cert = self.sock.getpeercert()\n if not cert.get('subjectAltName', ()):\n warnings.warn((\n 'Certificate for {0} has no `subjectAltName`, falling back to check for a '\n '`commonName` for now. This feature is being removed by major browsers and '\n 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '\n 'for details.)'.format(hostname)),\n SubjectAltNameWarning\n )\n _match_hostname(cert, self.assert_hostname or hostname)\n\n self.is_verified = (\n context.verify_mode == ssl.CERT_REQUIRED or\n self.assert_fingerprint is not None\n )\n\n\ndef _match_hostname(cert, asserted_hostname):\n try:\n match_hostname(cert, asserted_hostname)\n except CertificateError as e:\n log.error(\n 'Certificate did not match expected hostname: %s. '\n 'Certificate: %s', asserted_hostname, cert\n )\n # Add cert to exception and reraise so client code can inspect\n # the cert when catching the exception, if they want to\n e._peer_cert = cert\n raise\n\n\nif ssl:\n # Make a copy for testing.\n UnverifiedHTTPSConnection = HTTPSConnection\n HTTPSConnection = VerifiedHTTPSConnection\nelse:\n HTTPSConnection = DummyConnection\n", "path": "urllib3/connection.py"}], "after_files": [{"content": "from __future__ import absolute_import\nimport datetime\nimport logging\nimport os\nimport sys\nimport socket\nfrom socket import error as SocketError, timeout as SocketTimeout\nimport warnings\nfrom .packages import six\nfrom .packages.six.moves.http_client import HTTPConnection as _HTTPConnection\nfrom .packages.six.moves.http_client import HTTPException # noqa: F401\n\ntry: # Compiled with SSL?\n import ssl\n BaseSSLError = ssl.SSLError\nexcept (ImportError, AttributeError): # Platform-specific: No SSL.\n ssl = None\n\n class BaseSSLError(BaseException):\n pass\n\n\ntry: # Python 3:\n # Not a no-op, we're adding this to the namespace so it can be imported.\n ConnectionError = ConnectionError\nexcept NameError: # Python 2:\n class ConnectionError(Exception):\n pass\n\n\nfrom .exceptions import (\n NewConnectionError,\n ConnectTimeoutError,\n SubjectAltNameWarning,\n SystemTimeWarning,\n)\nfrom .packages.ssl_match_hostname import match_hostname, CertificateError\n\nfrom .util.ssl_ import (\n resolve_cert_reqs,\n resolve_ssl_version,\n assert_fingerprint,\n create_urllib3_context,\n ssl_wrap_socket\n)\n\n\nfrom .util import connection\n\nfrom ._collections import HTTPHeaderDict\n\nlog = logging.getLogger(__name__)\n\nport_by_scheme = {\n 'http': 80,\n 'https': 443,\n}\n\n# When updating RECENT_DATE, move it to within two years of the current date,\n# and not less than 6 months ago.\n# Example: if Today is 2018-01-01, then RECENT_DATE should be any date on or\n# after 2016-01-01 (today - 2 years) AND before 2017-07-01 (today - 6 months)\nRECENT_DATE = datetime.date(2017, 6, 30)\n\n\nclass DummyConnection(object):\n \"\"\"Used to detect a failed ConnectionCls import.\"\"\"\n pass\n\n\nclass HTTPConnection(_HTTPConnection, object):\n \"\"\"\n Based on httplib.HTTPConnection but provides an extra constructor\n backwards-compatibility layer between older and newer Pythons.\n\n Additional keyword parameters are used to configure attributes of the connection.\n Accepted parameters include:\n\n - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`\n - ``source_address``: Set the source address for the current connection.\n\n .. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x\n\n - ``socket_options``: Set specific options on the underlying socket. If not specified, then\n defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling\n Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.\n\n For example, if you wish to enable TCP Keep Alive in addition to the defaults,\n you might pass::\n\n HTTPConnection.default_socket_options + [\n (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),\n ]\n\n Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).\n \"\"\"\n\n default_port = port_by_scheme['http']\n\n #: Disable Nagle's algorithm by default.\n #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``\n default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]\n\n #: Whether this connection verifies the host's certificate.\n is_verified = False\n\n def __init__(self, *args, **kw):\n if six.PY3: # Python 3\n kw.pop('strict', None)\n\n # Pre-set source_address in case we have an older Python like 2.6.\n self.source_address = kw.get('source_address')\n\n if sys.version_info < (2, 7): # Python 2.6\n # _HTTPConnection on Python 2.6 will balk at this keyword arg, but\n # not newer versions. We can still use it when creating a\n # connection though, so we pop it *after* we have saved it as\n # self.source_address.\n kw.pop('source_address', None)\n\n #: The socket options provided by the user. If no options are\n #: provided, we use the default options.\n self.socket_options = kw.pop('socket_options', self.default_socket_options)\n\n # Superclass also sets self.source_address in Python 2.7+.\n _HTTPConnection.__init__(self, *args, **kw)\n\n @property\n def host(self):\n \"\"\"\n Getter method to remove any trailing dots that indicate the hostname is an FQDN.\n\n In general, SSL certificates don't include the trailing dot indicating a\n fully-qualified domain name, and thus, they don't validate properly when\n checked against a domain name that includes the dot. In addition, some\n servers may not expect to receive the trailing dot when provided.\n\n However, the hostname with trailing dot is critical to DNS resolution; doing a\n lookup with the trailing dot will properly only resolve the appropriate FQDN,\n whereas a lookup without a trailing dot will search the system's search domain\n list. Thus, it's important to keep the original host around for use only in\n those cases where it's appropriate (i.e., when doing DNS lookup to establish the\n actual TCP connection across which we're going to send HTTP requests).\n \"\"\"\n return self._dns_host.rstrip('.')\n\n @host.setter\n def host(self, value):\n \"\"\"\n Setter for the `host` property.\n\n We assume that only urllib3 uses the _dns_host attribute; httplib itself\n only uses `host`, and it seems reasonable that other libraries follow suit.\n \"\"\"\n self._dns_host = value\n\n def _new_conn(self):\n \"\"\" Establish a socket connection and set nodelay settings on it.\n\n :return: New socket connection.\n \"\"\"\n extra_kw = {}\n if self.source_address:\n extra_kw['source_address'] = self.source_address\n\n if self.socket_options:\n extra_kw['socket_options'] = self.socket_options\n\n try:\n conn = connection.create_connection(\n (self._dns_host, self.port), self.timeout, **extra_kw)\n\n except SocketTimeout as e:\n raise ConnectTimeoutError(\n self, \"Connection to %s timed out. (connect timeout=%s)\" %\n (self.host, self.timeout))\n\n except SocketError as e:\n raise NewConnectionError(\n self, \"Failed to establish a new connection: %s\" % e)\n\n return conn\n\n def _prepare_conn(self, conn):\n self.sock = conn\n # the _tunnel_host attribute was added in python 2.6.3 (via\n # http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do\n # not have them.\n if getattr(self, '_tunnel_host', None):\n # TODO: Fix tunnel so it doesn't depend on self.sock state.\n self._tunnel()\n # Mark this connection as not reusable\n self.auto_open = 0\n\n def connect(self):\n conn = self._new_conn()\n self._prepare_conn(conn)\n\n def request_chunked(self, method, url, body=None, headers=None):\n \"\"\"\n Alternative to the common request method, which sends the\n body with chunked encoding and not as one block\n \"\"\"\n headers = HTTPHeaderDict(headers if headers is not None else {})\n skip_accept_encoding = 'accept-encoding' in headers\n skip_host = 'host' in headers\n self.putrequest(\n method,\n url,\n skip_accept_encoding=skip_accept_encoding,\n skip_host=skip_host\n )\n for header, value in headers.items():\n self.putheader(header, value)\n if 'transfer-encoding' not in headers:\n self.putheader('Transfer-Encoding', 'chunked')\n self.endheaders()\n\n if body is not None:\n stringish_types = six.string_types + (six.binary_type,)\n if isinstance(body, stringish_types):\n body = (body,)\n for chunk in body:\n if not chunk:\n continue\n if not isinstance(chunk, six.binary_type):\n chunk = chunk.encode('utf8')\n len_str = hex(len(chunk))[2:]\n self.send(len_str.encode('utf-8'))\n self.send(b'\\r\\n')\n self.send(chunk)\n self.send(b'\\r\\n')\n\n # After the if clause, to always have a closed body\n self.send(b'0\\r\\n\\r\\n')\n\n\nclass HTTPSConnection(HTTPConnection):\n default_port = port_by_scheme['https']\n\n ssl_version = None\n\n def __init__(self, host, port=None, key_file=None, cert_file=None,\n strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,\n ssl_context=None, **kw):\n\n HTTPConnection.__init__(self, host, port, strict=strict,\n timeout=timeout, **kw)\n\n self.key_file = key_file\n self.cert_file = cert_file\n self.ssl_context = ssl_context\n\n # Required property for Google AppEngine 1.9.0 which otherwise causes\n # HTTPS requests to go out as HTTP. (See Issue #356)\n self._protocol = 'https'\n\n def connect(self):\n conn = self._new_conn()\n self._prepare_conn(conn)\n\n if self.ssl_context is None:\n self.ssl_context = create_urllib3_context(\n ssl_version=resolve_ssl_version(None),\n cert_reqs=resolve_cert_reqs(None),\n )\n\n self.sock = ssl_wrap_socket(\n sock=conn,\n keyfile=self.key_file,\n certfile=self.cert_file,\n ssl_context=self.ssl_context,\n )\n\n\nclass VerifiedHTTPSConnection(HTTPSConnection):\n \"\"\"\n Based on httplib.HTTPSConnection but wraps the socket with\n SSL certification.\n \"\"\"\n cert_reqs = None\n ca_certs = None\n ca_cert_dir = None\n ssl_version = None\n assert_fingerprint = None\n\n def set_cert(self, key_file=None, cert_file=None,\n cert_reqs=None, ca_certs=None,\n assert_hostname=None, assert_fingerprint=None,\n ca_cert_dir=None):\n \"\"\"\n This method should only be called once, before the connection is used.\n \"\"\"\n # If cert_reqs is not provided, we can try to guess. If the user gave\n # us a cert database, we assume they want to use it: otherwise, if\n # they gave us an SSL Context object we should use whatever is set for\n # it.\n if cert_reqs is None:\n if ca_certs or ca_cert_dir:\n cert_reqs = 'CERT_REQUIRED'\n elif self.ssl_context is not None:\n cert_reqs = self.ssl_context.verify_mode\n\n self.key_file = key_file\n self.cert_file = cert_file\n self.cert_reqs = cert_reqs\n self.assert_hostname = assert_hostname\n self.assert_fingerprint = assert_fingerprint\n self.ca_certs = ca_certs and os.path.expanduser(ca_certs)\n self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)\n\n def connect(self):\n # Add certificate verification\n conn = self._new_conn()\n\n hostname = self.host\n if getattr(self, '_tunnel_host', None):\n # _tunnel_host was added in Python 2.6.3\n # (See: http://hg.python.org/cpython/rev/0f57b30a152f)\n\n self.sock = conn\n # Calls self._set_hostport(), so self.host is\n # self._tunnel_host below.\n self._tunnel()\n # Mark this connection as not reusable\n self.auto_open = 0\n\n # Override the host with the one we're requesting data from.\n hostname = self._tunnel_host\n\n is_time_off = datetime.date.today() < RECENT_DATE\n if is_time_off:\n warnings.warn((\n 'System time is way off (before {0}). This will probably '\n 'lead to SSL verification errors').format(RECENT_DATE),\n SystemTimeWarning\n )\n\n # Wrap socket using verification with the root certs in\n # trusted_root_certs\n if self.ssl_context is None:\n self.ssl_context = create_urllib3_context(\n ssl_version=resolve_ssl_version(self.ssl_version),\n cert_reqs=resolve_cert_reqs(self.cert_reqs),\n )\n\n context = self.ssl_context\n context.verify_mode = resolve_cert_reqs(self.cert_reqs)\n self.sock = ssl_wrap_socket(\n sock=conn,\n keyfile=self.key_file,\n certfile=self.cert_file,\n ca_certs=self.ca_certs,\n ca_cert_dir=self.ca_cert_dir,\n server_hostname=hostname,\n ssl_context=context)\n\n if self.assert_fingerprint:\n assert_fingerprint(self.sock.getpeercert(binary_form=True),\n self.assert_fingerprint)\n elif context.verify_mode != ssl.CERT_NONE \\\n and not getattr(context, 'check_hostname', False) \\\n and self.assert_hostname is not False:\n # While urllib3 attempts to always turn off hostname matching from\n # the TLS library, this cannot always be done. So we check whether\n # the TLS Library still thinks it's matching hostnames.\n cert = self.sock.getpeercert()\n if not cert.get('subjectAltName', ()):\n warnings.warn((\n 'Certificate for {0} has no `subjectAltName`, falling back to check for a '\n '`commonName` for now. This feature is being removed by major browsers and '\n 'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '\n 'for details.)'.format(hostname)),\n SubjectAltNameWarning\n )\n _match_hostname(cert, self.assert_hostname or hostname)\n\n self.is_verified = (\n context.verify_mode == ssl.CERT_REQUIRED or\n self.assert_fingerprint is not None\n )\n\n\ndef _match_hostname(cert, asserted_hostname):\n try:\n match_hostname(cert, asserted_hostname)\n except CertificateError as e:\n log.error(\n 'Certificate did not match expected hostname: %s. '\n 'Certificate: %s', asserted_hostname, cert\n )\n # Add cert to exception and reraise so client code can inspect\n # the cert when catching the exception, if they want to\n e._peer_cert = cert\n raise\n\n\nif ssl:\n # Make a copy for testing.\n UnverifiedHTTPSConnection = HTTPSConnection\n HTTPSConnection = VerifiedHTTPSConnection\nelse:\n HTTPSConnection = DummyConnection\n", "path": "urllib3/connection.py"}]} |
gh_patches_debug_1621 | rasdani/github-patches | git_diff | conan-io__conan-3284 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Consider turning off template directories in Git for conan config install
To help us debug your issue please explain:
- [x] I've read the [CONTRIBUTING guide](https://raw.githubusercontent.com/conan-io/conan/develop/.github/CONTRIBUTING.md).
- [x] I've specified the Conan version, operating system version and any tool that can be relevant.
- [x] I've explained the steps to reproduce the error or the motivation/use case of the question/suggestion.
This is with Conan 1.6.0 on Windows 10.
To see this in progress, have a Git installation set up with automatic ctags generation akin to what's described in [Effortless Ctags with Git](https://tbaggery.com/2011/08/08/effortless-ctags-with-git.html).
When doing `conan config install` with a Git URL, I get error messages like this:
```
Traceback (most recent call last):
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\shutil.py", line 391, in _rmtree_unsafe
os.rmdir(path)
OSError: [WinError 145] The directory is not empty: 'C:\\Users\\kam\\.conan\\tmp_config_install\\config\\.git\\hooks'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\site-packages\conans\client\command.py", line 1230, in run
method(args[0][1:])
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\site-packages\conans\client\command.py", line 379, in config
return self._conan.config_install(args.item, verify_ssl, args.type)
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\site-packages\conans\client\conan_api.py", line 79, in wrapper
return f(*args, **kwargs)
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\site-packages\conans\client\conan_api.py", line 510, in config_install
return configuration_install(item, self._client_cache, self._user_io.out, verify_ssl, config_type)
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\site-packages\conans\client\conf\config_installer.py", line 135, in configuration_install
rmdir(tmp_folder)
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\site-packages\conans\util\files.py", line 202, in rmdir
shutil.rmtree(path, onerror=_change_permissions)
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\shutil.py", line 494, in rmtree
return _rmtree_unsafe(path, onerror)
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\shutil.py", line 384, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\shutil.py", line 384, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\shutil.py", line 384, in _rmtree_unsafe
_rmtree_unsafe(fullname, onerror)
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\shutil.py", line 393, in _rmtree_unsafe
onerror(os.rmdir, path, sys.exc_info())
File "c:\users\kam\.virtualenvs\skel-hv6zqtfh\lib\site-packages\conans\util\files.py", line 197, in _change_permissions
raise OSError("Cannot change permissions for {}! Exception info: {}".format(path, exc_info))
OSError: Cannot change permissions for C:\Users\kam\.conan\tmp_config_install\config\.git\hooks! Exception info: (<class 'OSError'>, OSError(41, 'The directory is not empty'), <traceback object at 0x0000016409078548>)
ERROR: Cannot change permissions for C:\Users\kam\.conan\tmp_config_install\config\.git\hooks! Exception info: (<class 'OSError'>, OSError(41, 'The directory is not empty'), <traceback object at 0x0000016409078548>)
```
The problem boils down to the way files are locked on Windows. It's not possible to delete open files. But the hook starts a background process which has the script files open, and the cleanup of the directory fails. Of course, a second later, the directory can be deleted as files are closed and unlocked.
I've since started reworking my Git template to not start background processes by default, and only run `ctags` in checkouts that I'm actually developing on. This fixes my problem with `conan config install`.
It may make sense, though, to add `-c init.templateDir=` to the `git` command line when working on temporary downloads. It's part of sanitizing the process against user configuration: In a hook, _anything_ can happen, and the errors are obscure and difficult to understand.
`conan config install` is mostly treating a Git repository as a sophisticated form of file archive, and probably doesn't want the user's hooks or other unusual setups from the template.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `conans/client/conf/config_installer.py`
Content:
```
1 import os
2 import shutil
3 from six.moves.urllib.parse import urlparse
4
5 from conans.tools import unzip
6 from conans.util.files import rmdir, mkdir
7 from conans.client.remote_registry import RemoteRegistry
8 from conans import tools
9 from conans.errors import ConanException
10 import subprocess
11
12
13 def _hide_password(resource):
14 """
15 Hide password from url/file path
16
17 :param resource: string with url or file path
18 :return: resource with hidden password if present
19 """
20 password = urlparse(resource).password
21 return resource.replace(password, "<hidden>") if password else resource
22
23
24 def _handle_remotes(registry_path, remote_file, output):
25 registry = RemoteRegistry(registry_path, output)
26 new_registry = RemoteRegistry(remote_file, output)
27 registry.define_remotes(new_registry.remotes)
28
29
30 def _handle_profiles(source_folder, target_folder, output):
31 mkdir(target_folder)
32 for root, _, files in os.walk(source_folder):
33 relative_path = os.path.relpath(root, source_folder)
34 if relative_path == ".":
35 relative_path = ""
36 for f in files:
37 profile = os.path.join(relative_path, f)
38 output.info(" Installing profile %s" % profile)
39 shutil.copy(os.path.join(root, f), os.path.join(target_folder, profile))
40
41
42 def _process_git_repo(repo_url, client_cache, output, tmp_folder, verify_ssl):
43 output.info("Trying to clone repo %s" % repo_url)
44
45 with tools.chdir(tmp_folder):
46 try:
47 subprocess.check_output('git -c http.sslVerify=%s clone "%s" config' % (verify_ssl, repo_url),
48 shell=True)
49 output.info("Repo cloned")
50 except Exception as e:
51 raise ConanException("config install error. Can't clone repo: %s" % str(e))
52
53 tmp_folder = os.path.join(tmp_folder, "config")
54 _process_folder(tmp_folder, client_cache, output)
55
56
57 def _process_zip_file(zippath, client_cache, output, tmp_folder, remove=False):
58 unzip(zippath, tmp_folder)
59 if remove:
60 os.unlink(zippath)
61 _process_folder(tmp_folder, client_cache, output)
62
63
64 def _handle_conan_conf(current_conan_conf, new_conan_conf_path):
65 current_conan_conf.read(new_conan_conf_path)
66 with open(current_conan_conf.filename, "w") as f:
67 current_conan_conf.write(f)
68
69
70 def _process_folder(folder, client_cache, output):
71 for root, dirs, files in os.walk(folder):
72 for f in files:
73 if f == "settings.yml":
74 output.info("Installing settings.yml")
75 settings_path = client_cache.settings_path
76 shutil.copy(os.path.join(root, f), settings_path)
77 elif f == "conan.conf":
78 output.info("Processing conan.conf")
79 conan_conf = client_cache.conan_config
80 _handle_conan_conf(conan_conf, os.path.join(root, f))
81 elif f == "remotes.txt":
82 output.info("Defining remotes")
83 registry_path = client_cache.registry
84 _handle_remotes(registry_path, os.path.join(root, f), output)
85 else:
86 relpath = os.path.relpath(root, folder)
87 target_folder = os.path.join(client_cache.conan_folder, relpath)
88 mkdir(target_folder)
89 output.info("Copying file %s to %s" % (f, target_folder))
90 shutil.copy(os.path.join(root, f), target_folder)
91 for d in dirs:
92 if d == "profiles":
93 output.info("Installing profiles")
94 profiles_path = client_cache.profiles_path
95 _handle_profiles(os.path.join(root, d), profiles_path, output)
96 break
97 dirs[:] = [d for d in dirs if d not in ("profiles", ".git")]
98
99
100 def _process_download(item, client_cache, output, tmp_folder, verify_ssl):
101 output.info("Trying to download %s" % _hide_password(item))
102 zippath = os.path.join(tmp_folder, "config.zip")
103 try:
104 tools.download(item, zippath, out=output, verify=verify_ssl)
105 _process_zip_file(zippath, client_cache, output, tmp_folder, remove=True)
106 except Exception as e:
107 raise ConanException("Error while installing config from %s\n%s" % (item, str(e)))
108
109
110 def configuration_install(item, client_cache, output, verify_ssl, config_type=None):
111 tmp_folder = os.path.join(client_cache.conan_folder, "tmp_config_install")
112 # necessary for Mac OSX, where the temp folders in /var/ are symlinks to /private/var/
113 tmp_folder = os.path.realpath(tmp_folder)
114 mkdir(tmp_folder)
115 try:
116 if item is None:
117 try:
118 item = client_cache.conan_config.get_item("general.config_install")
119 except ConanException:
120 raise ConanException("Called config install without arguments and "
121 "'general.config_install' not defined in conan.conf")
122
123 if item.endswith(".git") or config_type == "git":
124 _process_git_repo(item, client_cache, output, tmp_folder, verify_ssl)
125 elif os.path.exists(item):
126 # is a local file
127 _process_zip_file(item, client_cache, output, tmp_folder)
128 elif item.startswith("http"):
129 _process_download(item, client_cache, output, tmp_folder, verify_ssl)
130 else:
131 raise ConanException("I don't know how to process %s" % item)
132 finally:
133 if item:
134 client_cache.conan_config.set_item("general.config_install", item)
135 rmdir(tmp_folder)
136
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/conans/client/conf/config_installer.py b/conans/client/conf/config_installer.py
--- a/conans/client/conf/config_installer.py
+++ b/conans/client/conf/config_installer.py
@@ -44,7 +44,7 @@
with tools.chdir(tmp_folder):
try:
- subprocess.check_output('git -c http.sslVerify=%s clone "%s" config' % (verify_ssl, repo_url),
+ subprocess.check_output('git -c http.sslVerify=%s -c init.templateDir= clone "%s" config' % (verify_ssl, repo_url),
shell=True)
output.info("Repo cloned")
except Exception as e:
| {"golden_diff": "diff --git a/conans/client/conf/config_installer.py b/conans/client/conf/config_installer.py\n--- a/conans/client/conf/config_installer.py\n+++ b/conans/client/conf/config_installer.py\n@@ -44,7 +44,7 @@\n \n with tools.chdir(tmp_folder):\n try:\n- subprocess.check_output('git -c http.sslVerify=%s clone \"%s\" config' % (verify_ssl, repo_url),\n+ subprocess.check_output('git -c http.sslVerify=%s -c init.templateDir= clone \"%s\" config' % (verify_ssl, repo_url),\n shell=True)\n output.info(\"Repo cloned\")\n except Exception as e:\n", "issue": "Consider turning off template directories in Git for conan config install\nTo help us debug your issue please explain:\r\n\r\n- [x] I've read the [CONTRIBUTING guide](https://raw.githubusercontent.com/conan-io/conan/develop/.github/CONTRIBUTING.md).\r\n- [x] I've specified the Conan version, operating system version and any tool that can be relevant.\r\n- [x] I've explained the steps to reproduce the error or the motivation/use case of the question/suggestion.\r\n\r\nThis is with Conan 1.6.0 on Windows 10.\r\n\r\nTo see this in progress, have a Git installation set up with automatic ctags generation akin to what's described in [Effortless Ctags with Git](https://tbaggery.com/2011/08/08/effortless-ctags-with-git.html).\r\n\r\nWhen doing `conan config install` with a Git URL, I get error messages like this:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\shutil.py\", line 391, in _rmtree_unsafe\r\n os.rmdir(path)\r\nOSError: [WinError 145] The directory is not empty: 'C:\\\\Users\\\\kam\\\\.conan\\\\tmp_config_install\\\\config\\\\.git\\\\hooks'\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\site-packages\\conans\\client\\command.py\", line 1230, in run\r\n method(args[0][1:])\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\site-packages\\conans\\client\\command.py\", line 379, in config\r\n return self._conan.config_install(args.item, verify_ssl, args.type)\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\site-packages\\conans\\client\\conan_api.py\", line 79, in wrapper\r\n return f(*args, **kwargs)\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\site-packages\\conans\\client\\conan_api.py\", line 510, in config_install\r\n return configuration_install(item, self._client_cache, self._user_io.out, verify_ssl, config_type)\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\site-packages\\conans\\client\\conf\\config_installer.py\", line 135, in configuration_install\r\n rmdir(tmp_folder)\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\site-packages\\conans\\util\\files.py\", line 202, in rmdir\r\n shutil.rmtree(path, onerror=_change_permissions)\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\shutil.py\", line 494, in rmtree\r\n return _rmtree_unsafe(path, onerror)\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\shutil.py\", line 384, in _rmtree_unsafe\r\n _rmtree_unsafe(fullname, onerror)\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\shutil.py\", line 384, in _rmtree_unsafe\r\n _rmtree_unsafe(fullname, onerror)\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\shutil.py\", line 384, in _rmtree_unsafe\r\n _rmtree_unsafe(fullname, onerror)\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\shutil.py\", line 393, in _rmtree_unsafe\r\n onerror(os.rmdir, path, sys.exc_info())\r\n File \"c:\\users\\kam\\.virtualenvs\\skel-hv6zqtfh\\lib\\site-packages\\conans\\util\\files.py\", line 197, in _change_permissions\r\n raise OSError(\"Cannot change permissions for {}! Exception info: {}\".format(path, exc_info))\r\nOSError: Cannot change permissions for C:\\Users\\kam\\.conan\\tmp_config_install\\config\\.git\\hooks! Exception info: (<class 'OSError'>, OSError(41, 'The directory is not empty'), <traceback object at 0x0000016409078548>)\r\n\r\nERROR: Cannot change permissions for C:\\Users\\kam\\.conan\\tmp_config_install\\config\\.git\\hooks! Exception info: (<class 'OSError'>, OSError(41, 'The directory is not empty'), <traceback object at 0x0000016409078548>)\r\n```\r\n\r\nThe problem boils down to the way files are locked on Windows. It's not possible to delete open files. But the hook starts a background process which has the script files open, and the cleanup of the directory fails. Of course, a second later, the directory can be deleted as files are closed and unlocked.\r\n\r\nI've since started reworking my Git template to not start background processes by default, and only run `ctags` in checkouts that I'm actually developing on. This fixes my problem with `conan config install`.\r\n\r\nIt may make sense, though, to add `-c init.templateDir=` to the `git` command line when working on temporary downloads. It's part of sanitizing the process against user configuration: In a hook, _anything_ can happen, and the errors are obscure and difficult to understand.\r\n\r\n`conan config install` is mostly treating a Git repository as a sophisticated form of file archive, and probably doesn't want the user's hooks or other unusual setups from the template.\r\n\n", "before_files": [{"content": "import os\nimport shutil\nfrom six.moves.urllib.parse import urlparse\n\nfrom conans.tools import unzip\nfrom conans.util.files import rmdir, mkdir\nfrom conans.client.remote_registry import RemoteRegistry\nfrom conans import tools\nfrom conans.errors import ConanException\nimport subprocess\n\n\ndef _hide_password(resource):\n \"\"\"\n Hide password from url/file path\n\n :param resource: string with url or file path\n :return: resource with hidden password if present\n \"\"\"\n password = urlparse(resource).password\n return resource.replace(password, \"<hidden>\") if password else resource\n\n\ndef _handle_remotes(registry_path, remote_file, output):\n registry = RemoteRegistry(registry_path, output)\n new_registry = RemoteRegistry(remote_file, output)\n registry.define_remotes(new_registry.remotes)\n\n\ndef _handle_profiles(source_folder, target_folder, output):\n mkdir(target_folder)\n for root, _, files in os.walk(source_folder):\n relative_path = os.path.relpath(root, source_folder)\n if relative_path == \".\":\n relative_path = \"\"\n for f in files:\n profile = os.path.join(relative_path, f)\n output.info(\" Installing profile %s\" % profile)\n shutil.copy(os.path.join(root, f), os.path.join(target_folder, profile))\n\n\ndef _process_git_repo(repo_url, client_cache, output, tmp_folder, verify_ssl):\n output.info(\"Trying to clone repo %s\" % repo_url)\n\n with tools.chdir(tmp_folder):\n try:\n subprocess.check_output('git -c http.sslVerify=%s clone \"%s\" config' % (verify_ssl, repo_url),\n shell=True)\n output.info(\"Repo cloned\")\n except Exception as e:\n raise ConanException(\"config install error. Can't clone repo: %s\" % str(e))\n\n tmp_folder = os.path.join(tmp_folder, \"config\")\n _process_folder(tmp_folder, client_cache, output)\n\n\ndef _process_zip_file(zippath, client_cache, output, tmp_folder, remove=False):\n unzip(zippath, tmp_folder)\n if remove:\n os.unlink(zippath)\n _process_folder(tmp_folder, client_cache, output)\n\n\ndef _handle_conan_conf(current_conan_conf, new_conan_conf_path):\n current_conan_conf.read(new_conan_conf_path)\n with open(current_conan_conf.filename, \"w\") as f:\n current_conan_conf.write(f)\n\n\ndef _process_folder(folder, client_cache, output):\n for root, dirs, files in os.walk(folder):\n for f in files:\n if f == \"settings.yml\":\n output.info(\"Installing settings.yml\")\n settings_path = client_cache.settings_path\n shutil.copy(os.path.join(root, f), settings_path)\n elif f == \"conan.conf\":\n output.info(\"Processing conan.conf\")\n conan_conf = client_cache.conan_config\n _handle_conan_conf(conan_conf, os.path.join(root, f))\n elif f == \"remotes.txt\":\n output.info(\"Defining remotes\")\n registry_path = client_cache.registry\n _handle_remotes(registry_path, os.path.join(root, f), output)\n else:\n relpath = os.path.relpath(root, folder)\n target_folder = os.path.join(client_cache.conan_folder, relpath)\n mkdir(target_folder)\n output.info(\"Copying file %s to %s\" % (f, target_folder))\n shutil.copy(os.path.join(root, f), target_folder)\n for d in dirs:\n if d == \"profiles\":\n output.info(\"Installing profiles\")\n profiles_path = client_cache.profiles_path\n _handle_profiles(os.path.join(root, d), profiles_path, output)\n break\n dirs[:] = [d for d in dirs if d not in (\"profiles\", \".git\")]\n\n\ndef _process_download(item, client_cache, output, tmp_folder, verify_ssl):\n output.info(\"Trying to download %s\" % _hide_password(item))\n zippath = os.path.join(tmp_folder, \"config.zip\")\n try:\n tools.download(item, zippath, out=output, verify=verify_ssl)\n _process_zip_file(zippath, client_cache, output, tmp_folder, remove=True)\n except Exception as e:\n raise ConanException(\"Error while installing config from %s\\n%s\" % (item, str(e)))\n\n\ndef configuration_install(item, client_cache, output, verify_ssl, config_type=None):\n tmp_folder = os.path.join(client_cache.conan_folder, \"tmp_config_install\")\n # necessary for Mac OSX, where the temp folders in /var/ are symlinks to /private/var/\n tmp_folder = os.path.realpath(tmp_folder)\n mkdir(tmp_folder)\n try:\n if item is None:\n try:\n item = client_cache.conan_config.get_item(\"general.config_install\")\n except ConanException:\n raise ConanException(\"Called config install without arguments and \"\n \"'general.config_install' not defined in conan.conf\")\n\n if item.endswith(\".git\") or config_type == \"git\":\n _process_git_repo(item, client_cache, output, tmp_folder, verify_ssl)\n elif os.path.exists(item):\n # is a local file\n _process_zip_file(item, client_cache, output, tmp_folder)\n elif item.startswith(\"http\"):\n _process_download(item, client_cache, output, tmp_folder, verify_ssl)\n else:\n raise ConanException(\"I don't know how to process %s\" % item)\n finally:\n if item:\n client_cache.conan_config.set_item(\"general.config_install\", item)\n rmdir(tmp_folder)\n", "path": "conans/client/conf/config_installer.py"}], "after_files": [{"content": "import os\nimport shutil\nfrom six.moves.urllib.parse import urlparse\n\nfrom conans.tools import unzip\nfrom conans.util.files import rmdir, mkdir\nfrom conans.client.remote_registry import RemoteRegistry\nfrom conans import tools\nfrom conans.errors import ConanException\nimport subprocess\n\n\ndef _hide_password(resource):\n \"\"\"\n Hide password from url/file path\n\n :param resource: string with url or file path\n :return: resource with hidden password if present\n \"\"\"\n password = urlparse(resource).password\n return resource.replace(password, \"<hidden>\") if password else resource\n\n\ndef _handle_remotes(registry_path, remote_file, output):\n registry = RemoteRegistry(registry_path, output)\n new_registry = RemoteRegistry(remote_file, output)\n registry.define_remotes(new_registry.remotes)\n\n\ndef _handle_profiles(source_folder, target_folder, output):\n mkdir(target_folder)\n for root, _, files in os.walk(source_folder):\n relative_path = os.path.relpath(root, source_folder)\n if relative_path == \".\":\n relative_path = \"\"\n for f in files:\n profile = os.path.join(relative_path, f)\n output.info(\" Installing profile %s\" % profile)\n shutil.copy(os.path.join(root, f), os.path.join(target_folder, profile))\n\n\ndef _process_git_repo(repo_url, client_cache, output, tmp_folder, verify_ssl):\n output.info(\"Trying to clone repo %s\" % repo_url)\n\n with tools.chdir(tmp_folder):\n try:\n subprocess.check_output('git -c http.sslVerify=%s -c init.templateDir= clone \"%s\" config' % (verify_ssl, repo_url),\n shell=True)\n output.info(\"Repo cloned\")\n except Exception as e:\n raise ConanException(\"config install error. Can't clone repo: %s\" % str(e))\n\n tmp_folder = os.path.join(tmp_folder, \"config\")\n _process_folder(tmp_folder, client_cache, output)\n\n\ndef _process_zip_file(zippath, client_cache, output, tmp_folder, remove=False):\n unzip(zippath, tmp_folder)\n if remove:\n os.unlink(zippath)\n _process_folder(tmp_folder, client_cache, output)\n\n\ndef _handle_conan_conf(current_conan_conf, new_conan_conf_path):\n current_conan_conf.read(new_conan_conf_path)\n with open(current_conan_conf.filename, \"w\") as f:\n current_conan_conf.write(f)\n\n\ndef _process_folder(folder, client_cache, output):\n for root, dirs, files in os.walk(folder):\n for f in files:\n if f == \"settings.yml\":\n output.info(\"Installing settings.yml\")\n settings_path = client_cache.settings_path\n shutil.copy(os.path.join(root, f), settings_path)\n elif f == \"conan.conf\":\n output.info(\"Processing conan.conf\")\n conan_conf = client_cache.conan_config\n _handle_conan_conf(conan_conf, os.path.join(root, f))\n elif f == \"remotes.txt\":\n output.info(\"Defining remotes\")\n registry_path = client_cache.registry\n _handle_remotes(registry_path, os.path.join(root, f), output)\n else:\n relpath = os.path.relpath(root, folder)\n target_folder = os.path.join(client_cache.conan_folder, relpath)\n mkdir(target_folder)\n output.info(\"Copying file %s to %s\" % (f, target_folder))\n shutil.copy(os.path.join(root, f), target_folder)\n for d in dirs:\n if d == \"profiles\":\n output.info(\"Installing profiles\")\n profiles_path = client_cache.profiles_path\n _handle_profiles(os.path.join(root, d), profiles_path, output)\n break\n dirs[:] = [d for d in dirs if d not in (\"profiles\", \".git\")]\n\n\ndef _process_download(item, client_cache, output, tmp_folder, verify_ssl):\n output.info(\"Trying to download %s\" % _hide_password(item))\n zippath = os.path.join(tmp_folder, \"config.zip\")\n try:\n tools.download(item, zippath, out=output, verify=verify_ssl)\n _process_zip_file(zippath, client_cache, output, tmp_folder, remove=True)\n except Exception as e:\n raise ConanException(\"Error while installing config from %s\\n%s\" % (item, str(e)))\n\n\ndef configuration_install(item, client_cache, output, verify_ssl, config_type=None):\n tmp_folder = os.path.join(client_cache.conan_folder, \"tmp_config_install\")\n # necessary for Mac OSX, where the temp folders in /var/ are symlinks to /private/var/\n tmp_folder = os.path.realpath(tmp_folder)\n mkdir(tmp_folder)\n try:\n if item is None:\n try:\n item = client_cache.conan_config.get_item(\"general.config_install\")\n except ConanException:\n raise ConanException(\"Called config install without arguments and \"\n \"'general.config_install' not defined in conan.conf\")\n\n if item.endswith(\".git\") or config_type == \"git\":\n _process_git_repo(item, client_cache, output, tmp_folder, verify_ssl)\n elif os.path.exists(item):\n # is a local file\n _process_zip_file(item, client_cache, output, tmp_folder)\n elif item.startswith(\"http\"):\n _process_download(item, client_cache, output, tmp_folder, verify_ssl)\n else:\n raise ConanException(\"I don't know how to process %s\" % item)\n finally:\n if item:\n client_cache.conan_config.set_item(\"general.config_install\", item)\n rmdir(tmp_folder)\n", "path": "conans/client/conf/config_installer.py"}]} |
gh_patches_debug_1622 | rasdani/github-patches | git_diff | googleapis__google-cloud-python-1481 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
pubsub fails if data key is not present
If a message is published with a string of 0 length (`topic.publish( '', url=url, title=title)`) when the message is received there is no data field in the message and a key error is thrown when trying to transform the message from the PubSub API representation.
https://github.com/GoogleCloudPlatform/gcloud-python/blob/master/gcloud/pubsub/message.py#L74
```
Traceback (most recent call last):
File "/en_notifications/en_notifications.py", line 51, in <module>
received = PS_SUBSCRIPTION.pull(max_messages=PULL_COUNT)
File "/usr/local/lib/python2.7/dist-packages/gcloud/pubsub/subscription.py", line 212, in pull
File "/usr/local/lib/python2.7/dist-packages/gcloud/pubsub/message.py", line 74, in from_api_repr
for info in response.get('receivedMessages', ())]
data = base64.b64decode(api_repr['data'])
KeyError: 'data'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `gcloud/pubsub/message.py`
Content:
```
1 # Copyright 2015 Google Inc. All rights reserved.
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 """Define API Topics."""
16
17 import base64
18
19 from gcloud._helpers import _rfc3339_to_datetime
20
21
22 class Message(object):
23 """Messages can be published to a topic and received by subscribers.
24
25 See:
26 https://cloud.google.com/pubsub/reference/rest/v1/PubsubMessage
27
28 :type data: bytes
29 :param data: the payload of the message
30
31 :type message_id: string
32 :param message_id: An ID assigned to the message by the API.
33
34 :type attributes: dict or None
35 :param attributes: Extra metadata associated by the publisher with the
36 message.
37 """
38 def __init__(self, data, message_id, attributes=None):
39 self.data = data
40 self.message_id = message_id
41 self._attributes = attributes
42
43 @property
44 def attributes(self):
45 """Lazily-constructed attribute dictionary"""
46 if self._attributes is None:
47 self._attributes = {}
48 return self._attributes
49
50 @property
51 def timestamp(self):
52 """Return sortable timestamp from attributes, if passed.
53
54 Allows sorting messages in publication order (assuming consistent
55 clocks across all publishers).
56
57 :rtype: :class:`datetime.datetime`
58 :returns: timestamp (in UTC timezone) parsed from RFC 3339 timestamp
59 :raises: ValueError if timestamp not in ``attributes``, or if it does
60 not match the RFC 3339 format.
61 """
62 stamp = self.attributes.get('timestamp')
63 if stamp is None:
64 raise ValueError('No timestamp')
65 return _rfc3339_to_datetime(stamp)
66
67 @classmethod
68 def from_api_repr(cls, api_repr):
69 """Factory: construct message from API representation.
70
71 :type api_repr: dict or None
72 :param api_repr: The API representation of the message
73 """
74 data = base64.b64decode(api_repr['data'])
75 return cls(data=data, message_id=api_repr['messageId'],
76 attributes=api_repr.get('attributes'))
77
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/gcloud/pubsub/message.py b/gcloud/pubsub/message.py
--- a/gcloud/pubsub/message.py
+++ b/gcloud/pubsub/message.py
@@ -71,6 +71,6 @@
:type api_repr: dict or None
:param api_repr: The API representation of the message
"""
- data = base64.b64decode(api_repr['data'])
+ data = base64.b64decode(api_repr.get('data', b''))
return cls(data=data, message_id=api_repr['messageId'],
attributes=api_repr.get('attributes'))
| {"golden_diff": "diff --git a/gcloud/pubsub/message.py b/gcloud/pubsub/message.py\n--- a/gcloud/pubsub/message.py\n+++ b/gcloud/pubsub/message.py\n@@ -71,6 +71,6 @@\n :type api_repr: dict or None\n :param api_repr: The API representation of the message\n \"\"\"\n- data = base64.b64decode(api_repr['data'])\n+ data = base64.b64decode(api_repr.get('data', b''))\n return cls(data=data, message_id=api_repr['messageId'],\n attributes=api_repr.get('attributes'))\n", "issue": "pubsub fails if data key is not present\nIf a message is published with a string of 0 length (`topic.publish( '', url=url, title=title)`) when the message is received there is no data field in the message and a key error is thrown when trying to transform the message from the PubSub API representation.\n\nhttps://github.com/GoogleCloudPlatform/gcloud-python/blob/master/gcloud/pubsub/message.py#L74\n\n```\nTraceback (most recent call last):\nFile \"/en_notifications/en_notifications.py\", line 51, in <module>\nreceived = PS_SUBSCRIPTION.pull(max_messages=PULL_COUNT)\nFile \"/usr/local/lib/python2.7/dist-packages/gcloud/pubsub/subscription.py\", line 212, in pull\nFile \"/usr/local/lib/python2.7/dist-packages/gcloud/pubsub/message.py\", line 74, in from_api_repr\nfor info in response.get('receivedMessages', ())]\ndata = base64.b64decode(api_repr['data'])\nKeyError: 'data'\n```\n\n", "before_files": [{"content": "# Copyright 2015 Google Inc. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Define API Topics.\"\"\"\n\nimport base64\n\nfrom gcloud._helpers import _rfc3339_to_datetime\n\n\nclass Message(object):\n \"\"\"Messages can be published to a topic and received by subscribers.\n\n See:\n https://cloud.google.com/pubsub/reference/rest/v1/PubsubMessage\n\n :type data: bytes\n :param data: the payload of the message\n\n :type message_id: string\n :param message_id: An ID assigned to the message by the API.\n\n :type attributes: dict or None\n :param attributes: Extra metadata associated by the publisher with the\n message.\n \"\"\"\n def __init__(self, data, message_id, attributes=None):\n self.data = data\n self.message_id = message_id\n self._attributes = attributes\n\n @property\n def attributes(self):\n \"\"\"Lazily-constructed attribute dictionary\"\"\"\n if self._attributes is None:\n self._attributes = {}\n return self._attributes\n\n @property\n def timestamp(self):\n \"\"\"Return sortable timestamp from attributes, if passed.\n\n Allows sorting messages in publication order (assuming consistent\n clocks across all publishers).\n\n :rtype: :class:`datetime.datetime`\n :returns: timestamp (in UTC timezone) parsed from RFC 3339 timestamp\n :raises: ValueError if timestamp not in ``attributes``, or if it does\n not match the RFC 3339 format.\n \"\"\"\n stamp = self.attributes.get('timestamp')\n if stamp is None:\n raise ValueError('No timestamp')\n return _rfc3339_to_datetime(stamp)\n\n @classmethod\n def from_api_repr(cls, api_repr):\n \"\"\"Factory: construct message from API representation.\n\n :type api_repr: dict or None\n :param api_repr: The API representation of the message\n \"\"\"\n data = base64.b64decode(api_repr['data'])\n return cls(data=data, message_id=api_repr['messageId'],\n attributes=api_repr.get('attributes'))\n", "path": "gcloud/pubsub/message.py"}], "after_files": [{"content": "# Copyright 2015 Google Inc. All rights reserved.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\"\"\"Define API Topics.\"\"\"\n\nimport base64\n\nfrom gcloud._helpers import _rfc3339_to_datetime\n\n\nclass Message(object):\n \"\"\"Messages can be published to a topic and received by subscribers.\n\n See:\n https://cloud.google.com/pubsub/reference/rest/v1/PubsubMessage\n\n :type data: bytes\n :param data: the payload of the message\n\n :type message_id: string\n :param message_id: An ID assigned to the message by the API.\n\n :type attributes: dict or None\n :param attributes: Extra metadata associated by the publisher with the\n message.\n \"\"\"\n def __init__(self, data, message_id, attributes=None):\n self.data = data\n self.message_id = message_id\n self._attributes = attributes\n\n @property\n def attributes(self):\n \"\"\"Lazily-constructed attribute dictionary\"\"\"\n if self._attributes is None:\n self._attributes = {}\n return self._attributes\n\n @property\n def timestamp(self):\n \"\"\"Return sortable timestamp from attributes, if passed.\n\n Allows sorting messages in publication order (assuming consistent\n clocks across all publishers).\n\n :rtype: :class:`datetime.datetime`\n :returns: timestamp (in UTC timezone) parsed from RFC 3339 timestamp\n :raises: ValueError if timestamp not in ``attributes``, or if it does\n not match the RFC 3339 format.\n \"\"\"\n stamp = self.attributes.get('timestamp')\n if stamp is None:\n raise ValueError('No timestamp')\n return _rfc3339_to_datetime(stamp)\n\n @classmethod\n def from_api_repr(cls, api_repr):\n \"\"\"Factory: construct message from API representation.\n\n :type api_repr: dict or None\n :param api_repr: The API representation of the message\n \"\"\"\n data = base64.b64decode(api_repr.get('data', b''))\n return cls(data=data, message_id=api_repr['messageId'],\n attributes=api_repr.get('attributes'))\n", "path": "gcloud/pubsub/message.py"}]} |
gh_patches_debug_1623 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-2093 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
RuntimeError: Requested component 'jaeger' not found in entry points for 'opentelemetry_exporter'
From the exporters selection of [spec](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/sdk-environment-variables.md#exporter-selection)
>Known values for OTEL_TRACES_EXPORTER are:
>
>- "otlp": OTLP
>- "jaeger": Jaeger gRPC
>- "zipkin": Zipkin (Defaults to protobuf format)
>- "none": No automatically configured exporter for traces.
We have split up the exporters based on protocol + serialisation to avoid taking unnecessary dependency so there is no entry point for `jaeger`. If someone reads the spec and follows that recommendation they will run into this error. We should either add an entry point `jaeger` or an alias that solves this problem.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 #
15
16 """
17 OpenTelemetry SDK Configurator for Easy Instrumentation with Distros
18 """
19
20 from os import environ
21 from typing import Sequence, Tuple
22
23 from pkg_resources import iter_entry_points
24
25 from opentelemetry import trace
26 from opentelemetry.environment_variables import (
27 OTEL_PYTHON_ID_GENERATOR,
28 OTEL_TRACES_EXPORTER,
29 )
30 from opentelemetry.instrumentation.configurator import BaseConfigurator
31 from opentelemetry.sdk.trace import TracerProvider
32 from opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter
33 from opentelemetry.sdk.trace.id_generator import IdGenerator
34
35 _EXPORTER_OTLP = "otlp"
36 _EXPORTER_OTLP_SPAN = "otlp_proto_grpc_span"
37
38 _RANDOM_ID_GENERATOR = "random"
39 _DEFAULT_ID_GENERATOR = _RANDOM_ID_GENERATOR
40
41
42 def _get_id_generator() -> str:
43 return environ.get(OTEL_PYTHON_ID_GENERATOR, _DEFAULT_ID_GENERATOR)
44
45
46 def _get_exporter_names() -> Sequence[str]:
47 trace_exporters = environ.get(OTEL_TRACES_EXPORTER)
48
49 exporters = set()
50
51 if trace_exporters and trace_exporters.lower().strip() != "none":
52 exporters.update(
53 {
54 trace_exporter.strip()
55 for trace_exporter in trace_exporters.split(",")
56 }
57 )
58
59 if _EXPORTER_OTLP in exporters:
60 exporters.remove(_EXPORTER_OTLP)
61 exporters.add(_EXPORTER_OTLP_SPAN)
62
63 return list(exporters)
64
65
66 def _init_tracing(
67 exporters: Sequence[SpanExporter], id_generator: IdGenerator
68 ):
69 # if env var OTEL_RESOURCE_ATTRIBUTES is given, it will read the service_name
70 # from the env variable else defaults to "unknown_service"
71 provider = TracerProvider(
72 id_generator=id_generator(),
73 )
74 trace.set_tracer_provider(provider)
75
76 for _, exporter_class in exporters.items():
77 exporter_args = {}
78 provider.add_span_processor(
79 BatchSpanProcessor(exporter_class(**exporter_args))
80 )
81
82
83 def _import_tracer_provider_config_components(
84 selected_components, entry_point_name
85 ) -> Sequence[Tuple[str, object]]:
86 component_entry_points = {
87 ep.name: ep for ep in iter_entry_points(entry_point_name)
88 }
89 component_impls = []
90 for selected_component in selected_components:
91 entry_point = component_entry_points.get(selected_component, None)
92 if not entry_point:
93 raise RuntimeError(
94 f"Requested component '{selected_component}' not found in entry points for '{entry_point_name}'"
95 )
96
97 component_impl = entry_point.load()
98 component_impls.append((selected_component, component_impl))
99
100 return component_impls
101
102
103 def _import_exporters(
104 exporter_names: Sequence[str],
105 ) -> Sequence[SpanExporter]:
106 trace_exporters = {}
107
108 for (
109 exporter_name,
110 exporter_impl,
111 ) in _import_tracer_provider_config_components(
112 exporter_names, "opentelemetry_exporter"
113 ):
114 if issubclass(exporter_impl, SpanExporter):
115 trace_exporters[exporter_name] = exporter_impl
116 else:
117 raise RuntimeError(f"{exporter_name} is not a trace exporter")
118 return trace_exporters
119
120
121 def _import_id_generator(id_generator_name: str) -> IdGenerator:
122 # pylint: disable=unbalanced-tuple-unpacking
123 [
124 (id_generator_name, id_generator_impl)
125 ] = _import_tracer_provider_config_components(
126 [id_generator_name.strip()], "opentelemetry_id_generator"
127 )
128
129 if issubclass(id_generator_impl, IdGenerator):
130 return id_generator_impl
131
132 raise RuntimeError(f"{id_generator_name} is not an IdGenerator")
133
134
135 def _initialize_components():
136 exporter_names = _get_exporter_names()
137 trace_exporters = _import_exporters(exporter_names)
138 id_generator_name = _get_id_generator()
139 id_generator = _import_id_generator(id_generator_name)
140 _init_tracing(trace_exporters, id_generator)
141
142
143 class _OTelSDKConfigurator(BaseConfigurator):
144 """A basic Configurator by OTel Python for initalizing OTel SDK components
145
146 Initializes several crucial OTel SDK components (i.e. TracerProvider,
147 MeterProvider, Processors...) according to a default implementation. Other
148 Configurators can subclass and slightly alter this initialization.
149
150 NOTE: This class should not be instantiated nor should it become an entry
151 point on the `opentelemetry-sdk` package. Instead, distros should subclass
152 this Configurator and enchance it as needed.
153 """
154
155 def _configure(self, **kwargs):
156 _initialize_components()
157
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py
--- a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py
+++ b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py
@@ -109,7 +109,7 @@
exporter_name,
exporter_impl,
) in _import_tracer_provider_config_components(
- exporter_names, "opentelemetry_exporter"
+ exporter_names, "opentelemetry_traces_exporter"
):
if issubclass(exporter_impl, SpanExporter):
trace_exporters[exporter_name] = exporter_impl
| {"golden_diff": "diff --git a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py\n--- a/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py\n+++ b/opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py\n@@ -109,7 +109,7 @@\n exporter_name,\n exporter_impl,\n ) in _import_tracer_provider_config_components(\n- exporter_names, \"opentelemetry_exporter\"\n+ exporter_names, \"opentelemetry_traces_exporter\"\n ):\n if issubclass(exporter_impl, SpanExporter):\n trace_exporters[exporter_name] = exporter_impl\n", "issue": "RuntimeError: Requested component 'jaeger' not found in entry points for 'opentelemetry_exporter'\nFrom the exporters selection of [spec](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/sdk-environment-variables.md#exporter-selection)\r\n\r\n>Known values for OTEL_TRACES_EXPORTER are:\r\n>\r\n>- \"otlp\": OTLP\r\n>- \"jaeger\": Jaeger gRPC\r\n>- \"zipkin\": Zipkin (Defaults to protobuf format)\r\n>- \"none\": No automatically configured exporter for traces.\r\n\r\nWe have split up the exporters based on protocol + serialisation to avoid taking unnecessary dependency so there is no entry point for `jaeger`. If someone reads the spec and follows that recommendation they will run into this error. We should either add an entry point `jaeger` or an alias that solves this problem.\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\"\"\"\nOpenTelemetry SDK Configurator for Easy Instrumentation with Distros\n\"\"\"\n\nfrom os import environ\nfrom typing import Sequence, Tuple\n\nfrom pkg_resources import iter_entry_points\n\nfrom opentelemetry import trace\nfrom opentelemetry.environment_variables import (\n OTEL_PYTHON_ID_GENERATOR,\n OTEL_TRACES_EXPORTER,\n)\nfrom opentelemetry.instrumentation.configurator import BaseConfigurator\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter\nfrom opentelemetry.sdk.trace.id_generator import IdGenerator\n\n_EXPORTER_OTLP = \"otlp\"\n_EXPORTER_OTLP_SPAN = \"otlp_proto_grpc_span\"\n\n_RANDOM_ID_GENERATOR = \"random\"\n_DEFAULT_ID_GENERATOR = _RANDOM_ID_GENERATOR\n\n\ndef _get_id_generator() -> str:\n return environ.get(OTEL_PYTHON_ID_GENERATOR, _DEFAULT_ID_GENERATOR)\n\n\ndef _get_exporter_names() -> Sequence[str]:\n trace_exporters = environ.get(OTEL_TRACES_EXPORTER)\n\n exporters = set()\n\n if trace_exporters and trace_exporters.lower().strip() != \"none\":\n exporters.update(\n {\n trace_exporter.strip()\n for trace_exporter in trace_exporters.split(\",\")\n }\n )\n\n if _EXPORTER_OTLP in exporters:\n exporters.remove(_EXPORTER_OTLP)\n exporters.add(_EXPORTER_OTLP_SPAN)\n\n return list(exporters)\n\n\ndef _init_tracing(\n exporters: Sequence[SpanExporter], id_generator: IdGenerator\n):\n # if env var OTEL_RESOURCE_ATTRIBUTES is given, it will read the service_name\n # from the env variable else defaults to \"unknown_service\"\n provider = TracerProvider(\n id_generator=id_generator(),\n )\n trace.set_tracer_provider(provider)\n\n for _, exporter_class in exporters.items():\n exporter_args = {}\n provider.add_span_processor(\n BatchSpanProcessor(exporter_class(**exporter_args))\n )\n\n\ndef _import_tracer_provider_config_components(\n selected_components, entry_point_name\n) -> Sequence[Tuple[str, object]]:\n component_entry_points = {\n ep.name: ep for ep in iter_entry_points(entry_point_name)\n }\n component_impls = []\n for selected_component in selected_components:\n entry_point = component_entry_points.get(selected_component, None)\n if not entry_point:\n raise RuntimeError(\n f\"Requested component '{selected_component}' not found in entry points for '{entry_point_name}'\"\n )\n\n component_impl = entry_point.load()\n component_impls.append((selected_component, component_impl))\n\n return component_impls\n\n\ndef _import_exporters(\n exporter_names: Sequence[str],\n) -> Sequence[SpanExporter]:\n trace_exporters = {}\n\n for (\n exporter_name,\n exporter_impl,\n ) in _import_tracer_provider_config_components(\n exporter_names, \"opentelemetry_exporter\"\n ):\n if issubclass(exporter_impl, SpanExporter):\n trace_exporters[exporter_name] = exporter_impl\n else:\n raise RuntimeError(f\"{exporter_name} is not a trace exporter\")\n return trace_exporters\n\n\ndef _import_id_generator(id_generator_name: str) -> IdGenerator:\n # pylint: disable=unbalanced-tuple-unpacking\n [\n (id_generator_name, id_generator_impl)\n ] = _import_tracer_provider_config_components(\n [id_generator_name.strip()], \"opentelemetry_id_generator\"\n )\n\n if issubclass(id_generator_impl, IdGenerator):\n return id_generator_impl\n\n raise RuntimeError(f\"{id_generator_name} is not an IdGenerator\")\n\n\ndef _initialize_components():\n exporter_names = _get_exporter_names()\n trace_exporters = _import_exporters(exporter_names)\n id_generator_name = _get_id_generator()\n id_generator = _import_id_generator(id_generator_name)\n _init_tracing(trace_exporters, id_generator)\n\n\nclass _OTelSDKConfigurator(BaseConfigurator):\n \"\"\"A basic Configurator by OTel Python for initalizing OTel SDK components\n\n Initializes several crucial OTel SDK components (i.e. TracerProvider,\n MeterProvider, Processors...) according to a default implementation. Other\n Configurators can subclass and slightly alter this initialization.\n\n NOTE: This class should not be instantiated nor should it become an entry\n point on the `opentelemetry-sdk` package. Instead, distros should subclass\n this Configurator and enchance it as needed.\n \"\"\"\n\n def _configure(self, **kwargs):\n _initialize_components()\n", "path": "opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\"\"\"\nOpenTelemetry SDK Configurator for Easy Instrumentation with Distros\n\"\"\"\n\nfrom os import environ\nfrom typing import Sequence, Tuple\n\nfrom pkg_resources import iter_entry_points\n\nfrom opentelemetry import trace\nfrom opentelemetry.environment_variables import (\n OTEL_PYTHON_ID_GENERATOR,\n OTEL_TRACES_EXPORTER,\n)\nfrom opentelemetry.instrumentation.configurator import BaseConfigurator\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import BatchSpanProcessor, SpanExporter\nfrom opentelemetry.sdk.trace.id_generator import IdGenerator\n\n_EXPORTER_OTLP = \"otlp\"\n_EXPORTER_OTLP_SPAN = \"otlp_proto_grpc_span\"\n\n_RANDOM_ID_GENERATOR = \"random\"\n_DEFAULT_ID_GENERATOR = _RANDOM_ID_GENERATOR\n\n\ndef _get_id_generator() -> str:\n return environ.get(OTEL_PYTHON_ID_GENERATOR, _DEFAULT_ID_GENERATOR)\n\n\ndef _get_exporter_names() -> Sequence[str]:\n trace_exporters = environ.get(OTEL_TRACES_EXPORTER)\n\n exporters = set()\n\n if trace_exporters and trace_exporters.lower().strip() != \"none\":\n exporters.update(\n {\n trace_exporter.strip()\n for trace_exporter in trace_exporters.split(\",\")\n }\n )\n\n if _EXPORTER_OTLP in exporters:\n exporters.remove(_EXPORTER_OTLP)\n exporters.add(_EXPORTER_OTLP_SPAN)\n\n return list(exporters)\n\n\ndef _init_tracing(\n exporters: Sequence[SpanExporter], id_generator: IdGenerator\n):\n # if env var OTEL_RESOURCE_ATTRIBUTES is given, it will read the service_name\n # from the env variable else defaults to \"unknown_service\"\n provider = TracerProvider(\n id_generator=id_generator(),\n )\n trace.set_tracer_provider(provider)\n\n for _, exporter_class in exporters.items():\n exporter_args = {}\n provider.add_span_processor(\n BatchSpanProcessor(exporter_class(**exporter_args))\n )\n\n\ndef _import_tracer_provider_config_components(\n selected_components, entry_point_name\n) -> Sequence[Tuple[str, object]]:\n component_entry_points = {\n ep.name: ep for ep in iter_entry_points(entry_point_name)\n }\n component_impls = []\n for selected_component in selected_components:\n entry_point = component_entry_points.get(selected_component, None)\n if not entry_point:\n raise RuntimeError(\n f\"Requested component '{selected_component}' not found in entry points for '{entry_point_name}'\"\n )\n\n component_impl = entry_point.load()\n component_impls.append((selected_component, component_impl))\n\n return component_impls\n\n\ndef _import_exporters(\n exporter_names: Sequence[str],\n) -> Sequence[SpanExporter]:\n trace_exporters = {}\n\n for (\n exporter_name,\n exporter_impl,\n ) in _import_tracer_provider_config_components(\n exporter_names, \"opentelemetry_traces_exporter\"\n ):\n if issubclass(exporter_impl, SpanExporter):\n trace_exporters[exporter_name] = exporter_impl\n else:\n raise RuntimeError(f\"{exporter_name} is not a trace exporter\")\n return trace_exporters\n\n\ndef _import_id_generator(id_generator_name: str) -> IdGenerator:\n # pylint: disable=unbalanced-tuple-unpacking\n [\n (id_generator_name, id_generator_impl)\n ] = _import_tracer_provider_config_components(\n [id_generator_name.strip()], \"opentelemetry_id_generator\"\n )\n\n if issubclass(id_generator_impl, IdGenerator):\n return id_generator_impl\n\n raise RuntimeError(f\"{id_generator_name} is not an IdGenerator\")\n\n\ndef _initialize_components():\n exporter_names = _get_exporter_names()\n trace_exporters = _import_exporters(exporter_names)\n id_generator_name = _get_id_generator()\n id_generator = _import_id_generator(id_generator_name)\n _init_tracing(trace_exporters, id_generator)\n\n\nclass _OTelSDKConfigurator(BaseConfigurator):\n \"\"\"A basic Configurator by OTel Python for initalizing OTel SDK components\n\n Initializes several crucial OTel SDK components (i.e. TracerProvider,\n MeterProvider, Processors...) according to a default implementation. Other\n Configurators can subclass and slightly alter this initialization.\n\n NOTE: This class should not be instantiated nor should it become an entry\n point on the `opentelemetry-sdk` package. Instead, distros should subclass\n this Configurator and enchance it as needed.\n \"\"\"\n\n def _configure(self, **kwargs):\n _initialize_components()\n", "path": "opentelemetry-sdk/src/opentelemetry/sdk/_configuration/__init__.py"}]} |
gh_patches_debug_1624 | rasdani/github-patches | git_diff | open-telemetry__opentelemetry-python-636 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add a standard way to "reset" a Configuration object for testing
It is a common occurrence in tests that the global `Configuration` object needs to be "reset" between tests. This means that its attributes need to be set back to their original values. Since the `Configuration` object is immutable by design, some additional, non-production available mechanism is needed to perform this action.
The need for this feature was mentioned in a [conversation](https://github.com/open-telemetry/opentelemetry-python/pull/630#discussion_r418343720) in #630.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `opentelemetry-api/src/opentelemetry/configuration/__init__.py`
Content:
```
1 # Copyright The OpenTelemetry Authors
2 #
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
6 #
7 # http://www.apache.org/licenses/LICENSE-2.0
8 #
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14
15 # FIXME find a better way to avoid all those "Expression has type "Any"" errors
16 # type: ignore
17
18 """
19 Simple configuration manager
20
21 This is a configuration manager for OpenTelemetry. It reads configuration
22 values from environment variables prefixed with ``OPENTELEMETRY_PYTHON_`` whose
23 characters are only alphanumeric characters and unserscores, except for the
24 first character after ``OPENTELEMETRY_PYTHON_`` which must not be a number.
25
26 For example, these environment variables will be read:
27
28 1. ``OPENTELEMETRY_PYTHON_SOMETHING``
29 2. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_``
30 3. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND__ELSE``
31 4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else``
32 4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else2``
33
34 These won't:
35
36 1. ``OPENTELEMETRY_PYTH_SOMETHING``
37 2. ``OPENTELEMETRY_PYTHON_2_SOMETHING_AND__ELSE``
38 3. ``OPENTELEMETRY_PYTHON_SOMETHING_%_ELSE``
39
40 The values stored in the environment variables can be found in an instance of
41 ``opentelemetry.configuration.Configuration``. This class can be instantiated
42 freely because instantiating it returns always the same object.
43
44 For example, if the environment variable
45 ``OPENTELEMETRY_PYTHON_METER_PROVIDER`` value is ``my_meter_provider``, then
46 ``Configuration().meter_provider == "my_meter_provider"`` would be ``True``.
47
48 Non defined attributes will always return ``None``. This is intended to make it
49 easier to use the ``Configuration`` object in actual code, because it won't be
50 necessary to check for the attribute to be defined first.
51
52 Environment variables used by OpenTelemetry
53 -------------------------------------------
54
55 1. OPENTELEMETRY_PYTHON_METER_PROVIDER
56 2. OPENTELEMETRY_PYTHON_TRACER_PROVIDER
57
58 The value of these environment variables should be the name of the entry point
59 that points to the class that implements either provider. This OpenTelemetry
60 API package provides one entry point for each, which can be found in the
61 setup.py file::
62
63 entry_points={
64 ...
65 "opentelemetry_meter_provider": [
66 "default_meter_provider = "
67 "opentelemetry.metrics:DefaultMeterProvider"
68 ],
69 "opentelemetry_tracer_provider": [
70 "default_tracer_provider = "
71 "opentelemetry.trace:DefaultTracerProvider"
72 ],
73 }
74
75 To use the meter provider above, then the
76 ``OPENTELEMETRY_PYTHON_METER_PROVIDER`` should be set to
77 "default_meter_provider" (this is not actually necessary since the
78 OpenTelemetry API provided providers are the default ones used if no
79 configuration is found in the environment variables).
80 """
81
82 from os import environ
83 from re import fullmatch
84
85
86 class Configuration:
87 _instance = None
88
89 __slots__ = []
90
91 def __new__(cls) -> "Configuration":
92 if Configuration._instance is None:
93
94 for key, value in environ.items():
95
96 match = fullmatch(
97 r"OPENTELEMETRY_PYTHON_([A-Za-z_][\w_]*)", key
98 )
99
100 if match is not None:
101
102 key = match.group(1)
103
104 setattr(Configuration, "_{}".format(key), value)
105 setattr(
106 Configuration,
107 key,
108 property(
109 fget=lambda cls, key=key: getattr(
110 cls, "_{}".format(key)
111 )
112 ),
113 )
114
115 Configuration.__slots__.append(key)
116
117 Configuration.__slots__ = tuple(Configuration.__slots__)
118
119 Configuration._instance = object.__new__(cls)
120
121 return cls._instance
122
123 def __getattr__(self, name):
124 return None
125
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/opentelemetry-api/src/opentelemetry/configuration/__init__.py b/opentelemetry-api/src/opentelemetry/configuration/__init__.py
--- a/opentelemetry-api/src/opentelemetry/configuration/__init__.py
+++ b/opentelemetry-api/src/opentelemetry/configuration/__init__.py
@@ -122,3 +122,20 @@
def __getattr__(self, name):
return None
+
+ @classmethod
+ def _reset(cls):
+ """
+ This method "resets" the global configuration attributes
+
+ It is not intended to be used by production code but by testing code
+ only.
+ """
+
+ for slot in cls.__slots__:
+ if slot in cls.__dict__.keys():
+ delattr(cls, slot)
+ delattr(cls, "_{}".format(slot))
+
+ cls.__slots__ = []
+ cls._instance = None
| {"golden_diff": "diff --git a/opentelemetry-api/src/opentelemetry/configuration/__init__.py b/opentelemetry-api/src/opentelemetry/configuration/__init__.py\n--- a/opentelemetry-api/src/opentelemetry/configuration/__init__.py\n+++ b/opentelemetry-api/src/opentelemetry/configuration/__init__.py\n@@ -122,3 +122,20 @@\n \n def __getattr__(self, name):\n return None\n+\n+ @classmethod\n+ def _reset(cls):\n+ \"\"\"\n+ This method \"resets\" the global configuration attributes\n+\n+ It is not intended to be used by production code but by testing code\n+ only.\n+ \"\"\"\n+\n+ for slot in cls.__slots__:\n+ if slot in cls.__dict__.keys():\n+ delattr(cls, slot)\n+ delattr(cls, \"_{}\".format(slot))\n+\n+ cls.__slots__ = []\n+ cls._instance = None\n", "issue": "Add a standard way to \"reset\" a Configuration object for testing\nIt is a common occurrence in tests that the global `Configuration` object needs to be \"reset\" between tests. This means that its attributes need to be set back to their original values. Since the `Configuration` object is immutable by design, some additional, non-production available mechanism is needed to perform this action.\r\n\r\nThe need for this feature was mentioned in a [conversation](https://github.com/open-telemetry/opentelemetry-python/pull/630#discussion_r418343720) in #630.\n", "before_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# FIXME find a better way to avoid all those \"Expression has type \"Any\"\" errors\n# type: ignore\n\n\"\"\"\nSimple configuration manager\n\nThis is a configuration manager for OpenTelemetry. It reads configuration\nvalues from environment variables prefixed with ``OPENTELEMETRY_PYTHON_`` whose\ncharacters are only alphanumeric characters and unserscores, except for the\nfirst character after ``OPENTELEMETRY_PYTHON_`` which must not be a number.\n\nFor example, these environment variables will be read:\n\n1. ``OPENTELEMETRY_PYTHON_SOMETHING``\n2. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_``\n3. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND__ELSE``\n4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else``\n4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else2``\n\nThese won't:\n\n1. ``OPENTELEMETRY_PYTH_SOMETHING``\n2. ``OPENTELEMETRY_PYTHON_2_SOMETHING_AND__ELSE``\n3. ``OPENTELEMETRY_PYTHON_SOMETHING_%_ELSE``\n\nThe values stored in the environment variables can be found in an instance of\n``opentelemetry.configuration.Configuration``. This class can be instantiated\nfreely because instantiating it returns always the same object.\n\nFor example, if the environment variable\n``OPENTELEMETRY_PYTHON_METER_PROVIDER`` value is ``my_meter_provider``, then\n``Configuration().meter_provider == \"my_meter_provider\"`` would be ``True``.\n\nNon defined attributes will always return ``None``. This is intended to make it\neasier to use the ``Configuration`` object in actual code, because it won't be\nnecessary to check for the attribute to be defined first.\n\nEnvironment variables used by OpenTelemetry\n-------------------------------------------\n\n1. OPENTELEMETRY_PYTHON_METER_PROVIDER\n2. OPENTELEMETRY_PYTHON_TRACER_PROVIDER\n\nThe value of these environment variables should be the name of the entry point\nthat points to the class that implements either provider. This OpenTelemetry\nAPI package provides one entry point for each, which can be found in the\nsetup.py file::\n\n entry_points={\n ...\n \"opentelemetry_meter_provider\": [\n \"default_meter_provider = \"\n \"opentelemetry.metrics:DefaultMeterProvider\"\n ],\n \"opentelemetry_tracer_provider\": [\n \"default_tracer_provider = \"\n \"opentelemetry.trace:DefaultTracerProvider\"\n ],\n }\n\nTo use the meter provider above, then the\n``OPENTELEMETRY_PYTHON_METER_PROVIDER`` should be set to\n\"default_meter_provider\" (this is not actually necessary since the\nOpenTelemetry API provided providers are the default ones used if no\nconfiguration is found in the environment variables).\n\"\"\"\n\nfrom os import environ\nfrom re import fullmatch\n\n\nclass Configuration:\n _instance = None\n\n __slots__ = []\n\n def __new__(cls) -> \"Configuration\":\n if Configuration._instance is None:\n\n for key, value in environ.items():\n\n match = fullmatch(\n r\"OPENTELEMETRY_PYTHON_([A-Za-z_][\\w_]*)\", key\n )\n\n if match is not None:\n\n key = match.group(1)\n\n setattr(Configuration, \"_{}\".format(key), value)\n setattr(\n Configuration,\n key,\n property(\n fget=lambda cls, key=key: getattr(\n cls, \"_{}\".format(key)\n )\n ),\n )\n\n Configuration.__slots__.append(key)\n\n Configuration.__slots__ = tuple(Configuration.__slots__)\n\n Configuration._instance = object.__new__(cls)\n\n return cls._instance\n\n def __getattr__(self, name):\n return None\n", "path": "opentelemetry-api/src/opentelemetry/configuration/__init__.py"}], "after_files": [{"content": "# Copyright The OpenTelemetry Authors\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n# FIXME find a better way to avoid all those \"Expression has type \"Any\"\" errors\n# type: ignore\n\n\"\"\"\nSimple configuration manager\n\nThis is a configuration manager for OpenTelemetry. It reads configuration\nvalues from environment variables prefixed with ``OPENTELEMETRY_PYTHON_`` whose\ncharacters are only alphanumeric characters and unserscores, except for the\nfirst character after ``OPENTELEMETRY_PYTHON_`` which must not be a number.\n\nFor example, these environment variables will be read:\n\n1. ``OPENTELEMETRY_PYTHON_SOMETHING``\n2. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_``\n3. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND__ELSE``\n4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else``\n4. ``OPENTELEMETRY_PYTHON_SOMETHING_ELSE_AND_else2``\n\nThese won't:\n\n1. ``OPENTELEMETRY_PYTH_SOMETHING``\n2. ``OPENTELEMETRY_PYTHON_2_SOMETHING_AND__ELSE``\n3. ``OPENTELEMETRY_PYTHON_SOMETHING_%_ELSE``\n\nThe values stored in the environment variables can be found in an instance of\n``opentelemetry.configuration.Configuration``. This class can be instantiated\nfreely because instantiating it returns always the same object.\n\nFor example, if the environment variable\n``OPENTELEMETRY_PYTHON_METER_PROVIDER`` value is ``my_meter_provider``, then\n``Configuration().meter_provider == \"my_meter_provider\"`` would be ``True``.\n\nNon defined attributes will always return ``None``. This is intended to make it\neasier to use the ``Configuration`` object in actual code, because it won't be\nnecessary to check for the attribute to be defined first.\n\nEnvironment variables used by OpenTelemetry\n-------------------------------------------\n\n1. OPENTELEMETRY_PYTHON_METER_PROVIDER\n2. OPENTELEMETRY_PYTHON_TRACER_PROVIDER\n\nThe value of these environment variables should be the name of the entry point\nthat points to the class that implements either provider. This OpenTelemetry\nAPI package provides one entry point for each, which can be found in the\nsetup.py file::\n\n entry_points={\n ...\n \"opentelemetry_meter_provider\": [\n \"default_meter_provider = \"\n \"opentelemetry.metrics:DefaultMeterProvider\"\n ],\n \"opentelemetry_tracer_provider\": [\n \"default_tracer_provider = \"\n \"opentelemetry.trace:DefaultTracerProvider\"\n ],\n }\n\nTo use the meter provider above, then the\n``OPENTELEMETRY_PYTHON_METER_PROVIDER`` should be set to\n\"default_meter_provider\" (this is not actually necessary since the\nOpenTelemetry API provided providers are the default ones used if no\nconfiguration is found in the environment variables).\n\"\"\"\n\nfrom os import environ\nfrom re import fullmatch\n\n\nclass Configuration:\n _instance = None\n\n __slots__ = []\n\n def __new__(cls) -> \"Configuration\":\n if Configuration._instance is None:\n\n for key, value in environ.items():\n\n match = fullmatch(\n r\"OPENTELEMETRY_PYTHON_([A-Za-z_][\\w_]*)\", key\n )\n\n if match is not None:\n\n key = match.group(1)\n\n setattr(Configuration, \"_{}\".format(key), value)\n setattr(\n Configuration,\n key,\n property(\n fget=lambda cls, key=key: getattr(\n cls, \"_{}\".format(key)\n )\n ),\n )\n\n Configuration.__slots__.append(key)\n\n Configuration.__slots__ = tuple(Configuration.__slots__)\n\n Configuration._instance = object.__new__(cls)\n\n return cls._instance\n\n def __getattr__(self, name):\n return None\n\n @classmethod\n def _reset(cls):\n \"\"\"\n This method \"resets\" the global configuration attributes\n\n It is not intended to be used by production code but by testing code\n only.\n \"\"\"\n\n for slot in cls.__slots__:\n if slot in cls.__dict__.keys():\n delattr(cls, slot)\n delattr(cls, \"_{}\".format(slot))\n\n cls.__slots__ = []\n cls._instance = None\n", "path": "opentelemetry-api/src/opentelemetry/configuration/__init__.py"}]} |
gh_patches_debug_1625 | rasdani/github-patches | git_diff | adamchainz__django-mysql-486 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
dangling connection created during system check may become unusable
*Summary*: mysql connection created during system check is not closed timely. This connection might (mysteriously) become unusable afterwards (for reason I could not fully understand for now), which blocks the database access in application logic.
*Description*: I'm using django with celery. Today I noticed that any task that accesses database by Django ORM just hangs forever (i.e. they only start but never finish). After some digging I realized it's database connection that blocks forever and never comes back.
After another really painful debugging process, I figured out that, during django's system checking process, django-mysql created a persistent database connection rather than a temporary one, thus get re-used in application code. But somehow its connection is dropped, which causes any code that implicitly depends on it blocks forever.
Specifically, I think the following code in `django_mysql/checks.py` should use a `BaseDatabaseWrapper.temporary_connection()`, therefore ensure connection is closed cleanly afterwards.
```python
def check_variables(app_configs, **kwargs):
errors = []
for alias, connection in mysql_connections():
with connection.cursor() as cursor: # connection.temporary_connection()
cursor.execute("""SELECT @@sql_mode,
@@innodb_strict_mode,
@@character_set_connection""")
# ...
```
(Sorry if this poor bug report seems in a hurry, I'm really tired (it's 4 AM my brain isn't working...) )
*Django Version*: Django 2.0.6
*Database and version used*: mysqld Ver 5.7.22 for Linux on x86_64 (MySQL Community Server (GPL))
*Version*: Django-MySQL 2.2.2
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `django_mysql/checks.py`
Content:
```
1 # -*- coding:utf-8 -*-
2 from __future__ import (
3 absolute_import, division, print_function, unicode_literals,
4 )
5
6 from django.core.checks import Tags, Warning, register
7 from django.db import DEFAULT_DB_ALIAS, connections
8
9 from django_mysql.utils import collapse_spaces
10
11
12 def register_checks():
13 register(Tags.compatibility)(check_variables)
14
15
16 def check_variables(app_configs, **kwargs):
17 errors = []
18
19 for alias, connection in mysql_connections():
20 with connection.cursor() as cursor:
21 cursor.execute("""SELECT @@sql_mode,
22 @@innodb_strict_mode,
23 @@character_set_connection""")
24 variables = cursor.fetchone()
25 sql_mode, innodb_strict_mode, character_set_connection = variables
26
27 modes = set(sql_mode.split(','))
28 if not (modes & {'STRICT_TRANS_TABLES', 'STRICT_ALL_TABLES'}):
29 errors.append(strict_mode_warning(alias))
30
31 if not innodb_strict_mode:
32 errors.append(innodb_strict_mode_warning(alias))
33
34 if character_set_connection != 'utf8mb4':
35 errors.append(utf8mb4_warning(alias))
36
37 return errors
38
39
40 def strict_mode_warning(alias):
41 message = "MySQL Strict Mode is not set for database connection '{}'"
42 hint = collapse_spaces("""
43 MySQL's Strict Mode fixes many data integrity problems in MySQL, such
44 as data truncation upon insertion, by escalating warnings into errors.
45 It is strongly recommended you activate it. See:
46 https://django-mysql.readthedocs.io/en/latest/checks.html#django-mysql-w001-strict-mode
47 """)
48 return Warning(
49 message.format(alias),
50 hint=hint,
51 id='django_mysql.W001',
52 )
53
54
55 def innodb_strict_mode_warning(alias):
56 message = "InnoDB Strict Mode is not set for database connection '{}'"
57 hint = collapse_spaces("""
58 InnoDB Strict Mode escalates several warnings around InnoDB-specific
59 statements into errors. It's recommended you activate this, but it's
60 not very likely to affect you if you don't. See:
61 https://django-mysql.readthedocs.io/en/latest/checks.html#django-mysql-w002-innodb-strict-mode
62 """)
63
64 return Warning(
65 message.format(alias),
66 hint=hint,
67 id='django_mysql.W002',
68 )
69
70
71 def utf8mb4_warning(alias):
72 message = "The character set is not utf8mb4 for database connection '{}'"
73 hint = collapse_spaces("""
74 The default 'utf8' character set does not include support for all
75 Unicode characters. It's strongly recommended you move to use
76 'utf8mb4'. See:
77 https://django-mysql.readthedocs.io/en/latest/checks.html#django-mysql-w003-utf8mb4
78 """)
79
80 return Warning(
81 message.format(alias),
82 hint=hint,
83 id='django_mysql.W003',
84 )
85
86
87 def mysql_connections():
88 conn_names = [DEFAULT_DB_ALIAS] + list(
89 set(connections) - {DEFAULT_DB_ALIAS},
90 )
91 for alias in conn_names:
92 connection = connections[alias]
93 if not hasattr(connection, 'mysql_version'):
94 continue # pragma: no cover
95
96 yield alias, connection
97
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/django_mysql/checks.py b/django_mysql/checks.py
--- a/django_mysql/checks.py
+++ b/django_mysql/checks.py
@@ -17,7 +17,7 @@
errors = []
for alias, connection in mysql_connections():
- with connection.cursor() as cursor:
+ with connection.temporary_connection() as cursor:
cursor.execute("""SELECT @@sql_mode,
@@innodb_strict_mode,
@@character_set_connection""")
| {"golden_diff": "diff --git a/django_mysql/checks.py b/django_mysql/checks.py\n--- a/django_mysql/checks.py\n+++ b/django_mysql/checks.py\n@@ -17,7 +17,7 @@\n errors = []\n \n for alias, connection in mysql_connections():\n- with connection.cursor() as cursor:\n+ with connection.temporary_connection() as cursor:\n cursor.execute(\"\"\"SELECT @@sql_mode,\n @@innodb_strict_mode,\n @@character_set_connection\"\"\")\n", "issue": "dangling connection created during system check may become unusable\n*Summary*: mysql connection created during system check is not closed timely. This connection might (mysteriously) become unusable afterwards (for reason I could not fully understand for now), which blocks the database access in application logic.\r\n\r\n*Description*: I'm using django with celery. Today I noticed that any task that accesses database by Django ORM just hangs forever (i.e. they only start but never finish). After some digging I realized it's database connection that blocks forever and never comes back.\r\n\r\nAfter another really painful debugging process, I figured out that, during django's system checking process, django-mysql created a persistent database connection rather than a temporary one, thus get re-used in application code. But somehow its connection is dropped, which causes any code that implicitly depends on it blocks forever.\r\n\r\nSpecifically, I think the following code in `django_mysql/checks.py` should use a `BaseDatabaseWrapper.temporary_connection()`, therefore ensure connection is closed cleanly afterwards.\r\n\r\n```python\r\ndef check_variables(app_configs, **kwargs):\r\n errors = []\r\n\r\n for alias, connection in mysql_connections():\r\n with connection.cursor() as cursor: # connection.temporary_connection()\r\n cursor.execute(\"\"\"SELECT @@sql_mode,\r\n @@innodb_strict_mode,\r\n @@character_set_connection\"\"\")\r\n # ...\r\n```\r\n\r\n(Sorry if this poor bug report seems in a hurry, I'm really tired (it's 4 AM my brain isn't working...) )\r\n\r\n*Django Version*: Django 2.0.6\r\n\r\n*Database and version used*: mysqld Ver 5.7.22 for Linux on x86_64 (MySQL Community Server (GPL))\r\n\r\n*Version*: Django-MySQL 2.2.2\r\n\n", "before_files": [{"content": "# -*- coding:utf-8 -*-\nfrom __future__ import (\n absolute_import, division, print_function, unicode_literals,\n)\n\nfrom django.core.checks import Tags, Warning, register\nfrom django.db import DEFAULT_DB_ALIAS, connections\n\nfrom django_mysql.utils import collapse_spaces\n\n\ndef register_checks():\n register(Tags.compatibility)(check_variables)\n\n\ndef check_variables(app_configs, **kwargs):\n errors = []\n\n for alias, connection in mysql_connections():\n with connection.cursor() as cursor:\n cursor.execute(\"\"\"SELECT @@sql_mode,\n @@innodb_strict_mode,\n @@character_set_connection\"\"\")\n variables = cursor.fetchone()\n sql_mode, innodb_strict_mode, character_set_connection = variables\n\n modes = set(sql_mode.split(','))\n if not (modes & {'STRICT_TRANS_TABLES', 'STRICT_ALL_TABLES'}):\n errors.append(strict_mode_warning(alias))\n\n if not innodb_strict_mode:\n errors.append(innodb_strict_mode_warning(alias))\n\n if character_set_connection != 'utf8mb4':\n errors.append(utf8mb4_warning(alias))\n\n return errors\n\n\ndef strict_mode_warning(alias):\n message = \"MySQL Strict Mode is not set for database connection '{}'\"\n hint = collapse_spaces(\"\"\"\n MySQL's Strict Mode fixes many data integrity problems in MySQL, such\n as data truncation upon insertion, by escalating warnings into errors.\n It is strongly recommended you activate it. See:\n https://django-mysql.readthedocs.io/en/latest/checks.html#django-mysql-w001-strict-mode\n \"\"\")\n return Warning(\n message.format(alias),\n hint=hint,\n id='django_mysql.W001',\n )\n\n\ndef innodb_strict_mode_warning(alias):\n message = \"InnoDB Strict Mode is not set for database connection '{}'\"\n hint = collapse_spaces(\"\"\"\n InnoDB Strict Mode escalates several warnings around InnoDB-specific\n statements into errors. It's recommended you activate this, but it's\n not very likely to affect you if you don't. See:\n https://django-mysql.readthedocs.io/en/latest/checks.html#django-mysql-w002-innodb-strict-mode\n \"\"\")\n\n return Warning(\n message.format(alias),\n hint=hint,\n id='django_mysql.W002',\n )\n\n\ndef utf8mb4_warning(alias):\n message = \"The character set is not utf8mb4 for database connection '{}'\"\n hint = collapse_spaces(\"\"\"\n The default 'utf8' character set does not include support for all\n Unicode characters. It's strongly recommended you move to use\n 'utf8mb4'. See:\n https://django-mysql.readthedocs.io/en/latest/checks.html#django-mysql-w003-utf8mb4\n \"\"\")\n\n return Warning(\n message.format(alias),\n hint=hint,\n id='django_mysql.W003',\n )\n\n\ndef mysql_connections():\n conn_names = [DEFAULT_DB_ALIAS] + list(\n set(connections) - {DEFAULT_DB_ALIAS},\n )\n for alias in conn_names:\n connection = connections[alias]\n if not hasattr(connection, 'mysql_version'):\n continue # pragma: no cover\n\n yield alias, connection\n", "path": "django_mysql/checks.py"}], "after_files": [{"content": "# -*- coding:utf-8 -*-\nfrom __future__ import (\n absolute_import, division, print_function, unicode_literals,\n)\n\nfrom django.core.checks import Tags, Warning, register\nfrom django.db import DEFAULT_DB_ALIAS, connections\n\nfrom django_mysql.utils import collapse_spaces\n\n\ndef register_checks():\n register(Tags.compatibility)(check_variables)\n\n\ndef check_variables(app_configs, **kwargs):\n errors = []\n\n for alias, connection in mysql_connections():\n with connection.temporary_connection() as cursor:\n cursor.execute(\"\"\"SELECT @@sql_mode,\n @@innodb_strict_mode,\n @@character_set_connection\"\"\")\n variables = cursor.fetchone()\n sql_mode, innodb_strict_mode, character_set_connection = variables\n\n modes = set(sql_mode.split(','))\n if not (modes & {'STRICT_TRANS_TABLES', 'STRICT_ALL_TABLES'}):\n errors.append(strict_mode_warning(alias))\n\n if not innodb_strict_mode:\n errors.append(innodb_strict_mode_warning(alias))\n\n if character_set_connection != 'utf8mb4':\n errors.append(utf8mb4_warning(alias))\n\n return errors\n\n\ndef strict_mode_warning(alias):\n message = \"MySQL Strict Mode is not set for database connection '{}'\"\n hint = collapse_spaces(\"\"\"\n MySQL's Strict Mode fixes many data integrity problems in MySQL, such\n as data truncation upon insertion, by escalating warnings into errors.\n It is strongly recommended you activate it. See:\n https://django-mysql.readthedocs.io/en/latest/checks.html#django-mysql-w001-strict-mode\n \"\"\")\n return Warning(\n message.format(alias),\n hint=hint,\n id='django_mysql.W001',\n )\n\n\ndef innodb_strict_mode_warning(alias):\n message = \"InnoDB Strict Mode is not set for database connection '{}'\"\n hint = collapse_spaces(\"\"\"\n InnoDB Strict Mode escalates several warnings around InnoDB-specific\n statements into errors. It's recommended you activate this, but it's\n not very likely to affect you if you don't. See:\n https://django-mysql.readthedocs.io/en/latest/checks.html#django-mysql-w002-innodb-strict-mode\n \"\"\")\n\n return Warning(\n message.format(alias),\n hint=hint,\n id='django_mysql.W002',\n )\n\n\ndef utf8mb4_warning(alias):\n message = \"The character set is not utf8mb4 for database connection '{}'\"\n hint = collapse_spaces(\"\"\"\n The default 'utf8' character set does not include support for all\n Unicode characters. It's strongly recommended you move to use\n 'utf8mb4'. See:\n https://django-mysql.readthedocs.io/en/latest/checks.html#django-mysql-w003-utf8mb4\n \"\"\")\n\n return Warning(\n message.format(alias),\n hint=hint,\n id='django_mysql.W003',\n )\n\n\ndef mysql_connections():\n conn_names = [DEFAULT_DB_ALIAS] + list(\n set(connections) - {DEFAULT_DB_ALIAS},\n )\n for alias in conn_names:\n connection = connections[alias]\n if not hasattr(connection, 'mysql_version'):\n continue # pragma: no cover\n\n yield alias, connection\n", "path": "django_mysql/checks.py"}]} |
gh_patches_debug_1626 | rasdani/github-patches | git_diff | napari__napari-1371 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Napari 0.3.4 release crashes when switching to volume rendering
## 🐛 Bug
Napari 0.3.4 (release version) crashes when trying to render volume in 3D.
## To Reproduce
Steps to reproduce the behavior:
1. Load a volume
2. Display and navigate through slices in 2D
3. Switch to 3D -> Crash

```
(stardist) λ napari
WARNING: Error drawing visual <Volume at 0x1b6364c0d48>
10:17:55 WARNING Error drawing visual <Volume at 0x1b6364c0d48>
WARNING: Traceback (most recent call last):
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\OpenGL\latebind.py", line 43, in __call__
return self._finalCall( *args, **named )
TypeError: 'NoneType' object is not callable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\app\backends\_qt.py", line 825, in paintGL
self._vispy_canvas.events.draw(region=None)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\util\event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\util\event.py", line 475, in _invoke_callback
self, cb_event=(cb, event))
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\util\event.py", line 471, in _invoke_callback
cb(event)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\scene\canvas.py", line 217, in on_draw
self._draw_scene()
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\scene\canvas.py", line 266, in _draw_scene
self.draw_visual(self.scene)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\scene\canvas.py", line 304, in draw_visual
node.draw()
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\scene\visuals.py", line 99, in draw
self._visual_superclass.draw(self)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\visuals\visual.py", line 443, in draw
self._vshare.index_buffer)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\visuals\shaders\program.py", line 101, in draw
Program.draw(self, *args, **kwargs)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\program.py", line 533, in draw
canvas.context.flush_commands()
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\context.py", line 176, in flush_commands
self.glir.flush(self.shared.parser)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 572, in flush
self._shared.flush(parser)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 494, in flush
parser.parse(self._filter(self.clear(), parser))
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 819, in parse
self._parse(command)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 789, in _parse
ob.set_size(*args) # Texture[1D, 2D, 3D], RenderBuffer
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 1624, in set_size
gl.GL_BYTE, shape[:3])
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 1573, in glTexImage3D
width, height, depth, border, format, type, None)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\OpenGL\latebind.py", line 47, in __call__
return self._finalCall( *args, **named )
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\OpenGL\wrapper.py", line 882, in wrapperCall
result = wrappedOperation( *cArguments )
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\OpenGL\platform\baseplatform.py", line 425, in __call__
self.__name__, self.__name__,
OpenGL.error.NullFunctionError: Attempt to call an undefined function glTexImage3D, check for bool(glTexImage3D) before calling
10:17:56 WARNING Traceback (most recent call last):
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\OpenGL\latebind.py", line 43, in __call__
return self._finalCall( *args, **named )
TypeError: 'NoneType' object is not callable
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\app\backends\_qt.py", line 825, in paintGL
self._vispy_canvas.events.draw(region=None)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\util\event.py", line 455, in __call__
self._invoke_callback(cb, event)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\util\event.py", line 475, in _invoke_callback
self, cb_event=(cb, event))
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\util\event.py", line 471, in _invoke_callback
cb(event)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\scene\canvas.py", line 217, in on_draw
self._draw_scene()
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\scene\canvas.py", line 266, in _draw_scene
self.draw_visual(self.scene)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\scene\canvas.py", line 304, in draw_visual
node.draw()
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\scene\visuals.py", line 99, in draw
self._visual_superclass.draw(self)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\visuals\visual.py", line 443, in draw
self._vshare.index_buffer)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\visuals\shaders\program.py", line 101, in draw
Program.draw(self, *args, **kwargs)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\program.py", line 533, in draw
canvas.context.flush_commands()
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\context.py", line 176, in flush_commands
self.glir.flush(self.shared.parser)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 572, in flush
self._shared.flush(parser)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 494, in flush
parser.parse(self._filter(self.clear(), parser))
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 819, in parse
self._parse(command)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 789, in _parse
ob.set_size(*args) # Texture[1D, 2D, 3D], RenderBuffer
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 1624, in set_size
gl.GL_BYTE, shape[:3])
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\vispy\gloo\glir.py", line 1573, in glTexImage3D
width, height, depth, border, format, type, None)
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\OpenGL\latebind.py", line 47, in __call__
return self._finalCall( *args, **named )
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\OpenGL\wrapper.py", line 882, in wrapperCall
result = wrappedOperation( *cArguments )
File "c:\users\volker\anaconda3\envs\stardist\lib\site-packages\OpenGL\platform\baseplatform.py", line 425, in __call__
self.__name__, self.__name__,
OpenGL.error.NullFunctionError: Attempt to call an undefined function glTexImage3D, check for bool(glTexImage3D) before calling
```
## Expected behavior
The same volume renders fine in an earlier version of napari.
## Environment
- Please copy and paste the information at napari info option in help menubar here:
```
napari: 0.3.4
Platform: Windows-10-10.0.18362-SP0
Python: 3.7.7 (default, Apr 15 2020, 05:09:04) [MSC v.1916 64 bit (AMD64)]
Qt: 5.14.2
PyQt5: 5.14.2
NumPy: 1.18.1
SciPy: 1.3.1
Dask: 2.18.0
VisPy: 0.6.4
GL version: 4.6.0 - Build 26.20.100.7812
MAX_TEXTURE_SIZE: 16384
Plugins:
- napari-plugin-engine: 0.1.6
- svg: 0.1.3
```
This was pip installed into an existing conda environment from the PyPI release.
## Additional context
In the same environment, when I open an IPython console before switching to volume rendering (same volume), **napari does not crash !**

Also, the volume renders fine in one of my older napari installations/environments that has the same version of vispy, which rules out my initial thought that this would be a vispy issue. **My current guess is that this may be PyQt-related.** The environment below works. Note a few things are different, notably PySide instead of PyQt.
```
napari: 0.3.1+12.g0cd943c
Platform: Windows-10-10.0.18362-SP0
Python: 3.7.7 (default, May 6 2020, 11:45:54) [MSC v.1916 64 bit (AMD64)]
Qt: 5.14.2
PySide2: 5.14.2.1
NumPy: 1.18.4
SciPy: 1.4.1
Dask: 2.16.0
VisPy: 0.6.4
GL version: 4.6.0 - Build 26.20.100.7812
MAX_TEXTURE_SIZE: 16384
Plugins:
- napari-plugin-engine: 0.1.5
- svg: 0.1.2
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `napari/__init__.py`
Content:
```
1 try:
2 from ._version import version as __version__
3 except ImportError:
4 __version__ = "not-installed"
5
6 import os
7 from distutils.version import StrictVersion
8 from pathlib import Path
9
10 try:
11 from qtpy import API_NAME
12 except Exception as e:
13 if 'No Qt bindings could be found' in str(e):
14 raise type(e)(
15 "No Qt bindings could be found.\n\nnapari requires either PyQt5 or"
16 " PySide2 to be installed in the environment.\nTo install the "
17 'default backend (currently PyQt5), run "pip install napari[all]"'
18 '\nYou may also use "pip install napari[pyside2]" for Pyside2, '
19 'or "pip install napari[pyqt5]" for PyQt5'
20 ) from e
21 raise
22
23
24 if API_NAME == 'PySide2':
25 # Set plugin path appropriately if using PySide2. This is a bug fix
26 # for when both PyQt5 and Pyside2 are installed
27 import PySide2
28
29 os.environ['QT_PLUGIN_PATH'] = str(
30 Path(PySide2.__file__).parent / 'Qt' / 'plugins'
31 )
32
33 from qtpy import QtCore
34
35 # When QT is not the specific version, we raise a warning:
36 from warnings import warn
37
38 if StrictVersion(QtCore.__version__) < StrictVersion('5.12.3'):
39 warn_message = f"""
40 napari was tested with QT library `>=5.12.3`.
41 The version installed is {QtCore.__version__}. Please report any issues with this
42 specific QT version at https://github.com/Napari/napari/issues.
43 """
44 warn(message=warn_message)
45
46 from vispy import app
47 import logging
48
49 # set vispy application to the appropriate qt backend
50 app.use_app(API_NAME)
51 del app
52 # set vispy logger to show warning and errors only
53 vispy_logger = logging.getLogger('vispy')
54 vispy_logger.setLevel(logging.WARNING)
55
56 from .viewer import Viewer
57 from .plugins.io import save_layers
58
59 # Note that importing _viewer_key_bindings is needed as the Viewer gets
60 # decorated with keybindings during that process, but it is not directly needed
61 # by our users and so is deleted below
62 from . import _viewer_key_bindings # noqa: F401
63 from .view_layers import (
64 view_path,
65 view_image,
66 view_labels,
67 view_surface,
68 view_shapes,
69 view_points,
70 view_vectors,
71 )
72 from ._qt import gui_qt
73 from .utils import sys_info, _magicgui
74
75 # register napari object types with magicgui if it is installed
76 _magicgui.register_types_with_magicgui()
77
78 del _magicgui
79 del _viewer_key_bindings
80
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/napari/__init__.py b/napari/__init__.py
--- a/napari/__init__.py
+++ b/napari/__init__.py
@@ -75,5 +75,14 @@
# register napari object types with magicgui if it is installed
_magicgui.register_types_with_magicgui()
+
+# this unused import is here to fix a very strange bug.
+# there is some mysterious magical goodness in scipy stats that needs
+# to be imported early.
+# see: https://github.com/napari/napari/issues/925
+# see: https://github.com/napari/napari/issues/1347
+from scipy import stats # noqa: F401
+
del _magicgui
+del stats
del _viewer_key_bindings
| {"golden_diff": "diff --git a/napari/__init__.py b/napari/__init__.py\n--- a/napari/__init__.py\n+++ b/napari/__init__.py\n@@ -75,5 +75,14 @@\n # register napari object types with magicgui if it is installed\n _magicgui.register_types_with_magicgui()\n \n+\n+# this unused import is here to fix a very strange bug.\n+# there is some mysterious magical goodness in scipy stats that needs\n+# to be imported early.\n+# see: https://github.com/napari/napari/issues/925\n+# see: https://github.com/napari/napari/issues/1347\n+from scipy import stats # noqa: F401\n+\n del _magicgui\n+del stats\n del _viewer_key_bindings\n", "issue": "Napari 0.3.4 release crashes when switching to volume rendering\n## \ud83d\udc1b Bug\r\n\r\nNapari 0.3.4 (release version) crashes when trying to render volume in 3D.\r\n\r\n\r\n## To Reproduce\r\n\r\nSteps to reproduce the behavior:\r\n\r\n1. Load a volume\r\n2. Display and navigate through slices in 2D\r\n3. Switch to 3D -> Crash\r\n\r\n\r\n\r\n\r\n\r\n```\r\n(stardist) \u03bb napari\r\nWARNING: Error drawing visual <Volume at 0x1b6364c0d48>\r\n10:17:55 WARNING Error drawing visual <Volume at 0x1b6364c0d48>\r\nWARNING: Traceback (most recent call last):\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\OpenGL\\latebind.py\", line 43, in __call__\r\n return self._finalCall( *args, **named )\r\nTypeError: 'NoneType' object is not callable\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\app\\backends\\_qt.py\", line 825, in paintGL\r\n self._vispy_canvas.events.draw(region=None)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\util\\event.py\", line 455, in __call__\r\n self._invoke_callback(cb, event)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\util\\event.py\", line 475, in _invoke_callback\r\n self, cb_event=(cb, event))\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\util\\event.py\", line 471, in _invoke_callback\r\n cb(event)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\scene\\canvas.py\", line 217, in on_draw\r\n\r\n self._draw_scene()\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\scene\\canvas.py\", line 266, in _draw_scene\r\n self.draw_visual(self.scene)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\scene\\canvas.py\", line 304, in draw_visual\r\n node.draw()\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\scene\\visuals.py\", line 99, in draw\r\n self._visual_superclass.draw(self)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\visuals\\visual.py\", line 443, in draw\r\n self._vshare.index_buffer)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\visuals\\shaders\\program.py\", line 101, in draw\r\n Program.draw(self, *args, **kwargs)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\program.py\", line 533, in draw\r\n canvas.context.flush_commands()\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\context.py\", line 176, in flush_commands\r\n self.glir.flush(self.shared.parser)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 572, in flush\r\n self._shared.flush(parser)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 494, in flush\r\n parser.parse(self._filter(self.clear(), parser))\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 819, in parse\r\n self._parse(command)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 789, in _parse\r\n ob.set_size(*args) # Texture[1D, 2D, 3D], RenderBuffer\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 1624, in set_size\r\n gl.GL_BYTE, shape[:3])\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 1573, in glTexImage3D\r\n width, height, depth, border, format, type, None)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\OpenGL\\latebind.py\", line 47, in __call__\r\n return self._finalCall( *args, **named )\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\OpenGL\\wrapper.py\", line 882, in wrapperCall\r\n\r\n result = wrappedOperation( *cArguments )\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\OpenGL\\platform\\baseplatform.py\", line 425, in __call__\r\n self.__name__, self.__name__,\r\nOpenGL.error.NullFunctionError: Attempt to call an undefined function glTexImage3D, check for bool(glTexImage3D) before calling\r\n10:17:56 WARNING Traceback (most recent call last):\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\OpenGL\\latebind.py\", line 43, in __call__\r\n return self._finalCall( *args, **named )\r\nTypeError: 'NoneType' object is not callable\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\app\\backends\\_qt.py\", line 825, in paintGL\r\n self._vispy_canvas.events.draw(region=None)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\util\\event.py\", line 455, in __call__\r\n self._invoke_callback(cb, event)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\util\\event.py\", line 475, in _invoke_callback\r\n self, cb_event=(cb, event))\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\util\\event.py\", line 471, in _invoke_callback\r\n cb(event)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\scene\\canvas.py\", line 217, in on_draw\r\n\r\n self._draw_scene()\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\scene\\canvas.py\", line 266, in _draw_scene\r\n self.draw_visual(self.scene)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\scene\\canvas.py\", line 304, in draw_visual\r\n node.draw()\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\scene\\visuals.py\", line 99, in draw\r\n self._visual_superclass.draw(self)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\visuals\\visual.py\", line 443, in draw\r\n self._vshare.index_buffer)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\visuals\\shaders\\program.py\", line 101, in draw\r\n Program.draw(self, *args, **kwargs)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\program.py\", line 533, in draw\r\n canvas.context.flush_commands()\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\context.py\", line 176, in flush_commands\r\n self.glir.flush(self.shared.parser)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 572, in flush\r\n self._shared.flush(parser)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 494, in flush\r\n parser.parse(self._filter(self.clear(), parser))\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 819, in parse\r\n self._parse(command)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 789, in _parse\r\n ob.set_size(*args) # Texture[1D, 2D, 3D], RenderBuffer\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 1624, in set_size\r\n gl.GL_BYTE, shape[:3])\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\vispy\\gloo\\glir.py\", line 1573, in glTexImage3D\r\n width, height, depth, border, format, type, None)\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\OpenGL\\latebind.py\", line 47, in __call__\r\n return self._finalCall( *args, **named )\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\OpenGL\\wrapper.py\", line 882, in wrapperCall\r\n\r\n result = wrappedOperation( *cArguments )\r\n File \"c:\\users\\volker\\anaconda3\\envs\\stardist\\lib\\site-packages\\OpenGL\\platform\\baseplatform.py\", line 425, in __call__\r\n self.__name__, self.__name__,\r\nOpenGL.error.NullFunctionError: Attempt to call an undefined function glTexImage3D, check for bool(glTexImage3D) before calling\r\n```\r\n\r\n## Expected behavior\r\n\r\nThe same volume renders fine in an earlier version of napari.\r\n\r\n## Environment\r\n\r\n - Please copy and paste the information at napari info option in help menubar here:\r\n```\r\nnapari: 0.3.4\r\nPlatform: Windows-10-10.0.18362-SP0\r\nPython: 3.7.7 (default, Apr 15 2020, 05:09:04) [MSC v.1916 64 bit (AMD64)]\r\nQt: 5.14.2\r\nPyQt5: 5.14.2\r\nNumPy: 1.18.1\r\nSciPy: 1.3.1\r\nDask: 2.18.0\r\nVisPy: 0.6.4\r\n\r\nGL version: 4.6.0 - Build 26.20.100.7812\r\nMAX_TEXTURE_SIZE: 16384\r\n\r\nPlugins:\r\n- napari-plugin-engine: 0.1.6\r\n- svg: 0.1.3\r\n```\r\n This was pip installed into an existing conda environment from the PyPI release.\r\n\r\n## Additional context\r\n\r\nIn the same environment, when I open an IPython console before switching to volume rendering (same volume), **napari does not crash !**\r\n\r\n\r\n\r\nAlso, the volume renders fine in one of my older napari installations/environments that has the same version of vispy, which rules out my initial thought that this would be a vispy issue. **My current guess is that this may be PyQt-related.** The environment below works. Note a few things are different, notably PySide instead of PyQt.\r\n\r\n```\r\nnapari: 0.3.1+12.g0cd943c\r\nPlatform: Windows-10-10.0.18362-SP0\r\nPython: 3.7.7 (default, May 6 2020, 11:45:54) [MSC v.1916 64 bit (AMD64)]\r\nQt: 5.14.2\r\nPySide2: 5.14.2.1\r\nNumPy: 1.18.4\r\nSciPy: 1.4.1\r\nDask: 2.16.0\r\nVisPy: 0.6.4\r\n\r\nGL version: 4.6.0 - Build 26.20.100.7812\r\nMAX_TEXTURE_SIZE: 16384\r\n\r\nPlugins:\r\n- napari-plugin-engine: 0.1.5\r\n- svg: 0.1.2\r\n```\r\n\n", "before_files": [{"content": "try:\n from ._version import version as __version__\nexcept ImportError:\n __version__ = \"not-installed\"\n\nimport os\nfrom distutils.version import StrictVersion\nfrom pathlib import Path\n\ntry:\n from qtpy import API_NAME\nexcept Exception as e:\n if 'No Qt bindings could be found' in str(e):\n raise type(e)(\n \"No Qt bindings could be found.\\n\\nnapari requires either PyQt5 or\"\n \" PySide2 to be installed in the environment.\\nTo install the \"\n 'default backend (currently PyQt5), run \"pip install napari[all]\"'\n '\\nYou may also use \"pip install napari[pyside2]\" for Pyside2, '\n 'or \"pip install napari[pyqt5]\" for PyQt5'\n ) from e\n raise\n\n\nif API_NAME == 'PySide2':\n # Set plugin path appropriately if using PySide2. This is a bug fix\n # for when both PyQt5 and Pyside2 are installed\n import PySide2\n\n os.environ['QT_PLUGIN_PATH'] = str(\n Path(PySide2.__file__).parent / 'Qt' / 'plugins'\n )\n\nfrom qtpy import QtCore\n\n# When QT is not the specific version, we raise a warning:\nfrom warnings import warn\n\nif StrictVersion(QtCore.__version__) < StrictVersion('5.12.3'):\n warn_message = f\"\"\"\n napari was tested with QT library `>=5.12.3`.\n The version installed is {QtCore.__version__}. Please report any issues with this\n specific QT version at https://github.com/Napari/napari/issues.\n \"\"\"\n warn(message=warn_message)\n\nfrom vispy import app\nimport logging\n\n# set vispy application to the appropriate qt backend\napp.use_app(API_NAME)\ndel app\n# set vispy logger to show warning and errors only\nvispy_logger = logging.getLogger('vispy')\nvispy_logger.setLevel(logging.WARNING)\n\nfrom .viewer import Viewer\nfrom .plugins.io import save_layers\n\n# Note that importing _viewer_key_bindings is needed as the Viewer gets\n# decorated with keybindings during that process, but it is not directly needed\n# by our users and so is deleted below\nfrom . import _viewer_key_bindings # noqa: F401\nfrom .view_layers import (\n view_path,\n view_image,\n view_labels,\n view_surface,\n view_shapes,\n view_points,\n view_vectors,\n)\nfrom ._qt import gui_qt\nfrom .utils import sys_info, _magicgui\n\n# register napari object types with magicgui if it is installed\n_magicgui.register_types_with_magicgui()\n\ndel _magicgui\ndel _viewer_key_bindings\n", "path": "napari/__init__.py"}], "after_files": [{"content": "try:\n from ._version import version as __version__\nexcept ImportError:\n __version__ = \"not-installed\"\n\nimport os\nfrom distutils.version import StrictVersion\nfrom pathlib import Path\n\ntry:\n from qtpy import API_NAME\nexcept Exception as e:\n if 'No Qt bindings could be found' in str(e):\n raise type(e)(\n \"No Qt bindings could be found.\\n\\nnapari requires either PyQt5 or\"\n \" PySide2 to be installed in the environment.\\nTo install the \"\n 'default backend (currently PyQt5), run \"pip install napari[all]\"'\n '\\nYou may also use \"pip install napari[pyside2]\" for Pyside2, '\n 'or \"pip install napari[pyqt5]\" for PyQt5'\n ) from e\n raise\n\n\nif API_NAME == 'PySide2':\n # Set plugin path appropriately if using PySide2. This is a bug fix\n # for when both PyQt5 and Pyside2 are installed\n import PySide2\n\n os.environ['QT_PLUGIN_PATH'] = str(\n Path(PySide2.__file__).parent / 'Qt' / 'plugins'\n )\n\nfrom qtpy import QtCore\n\n# When QT is not the specific version, we raise a warning:\nfrom warnings import warn\n\nif StrictVersion(QtCore.__version__) < StrictVersion('5.12.3'):\n warn_message = f\"\"\"\n napari was tested with QT library `>=5.12.3`.\n The version installed is {QtCore.__version__}. Please report any issues with this\n specific QT version at https://github.com/Napari/napari/issues.\n \"\"\"\n warn(message=warn_message)\n\nfrom vispy import app\nimport logging\n\n# set vispy application to the appropriate qt backend\napp.use_app(API_NAME)\ndel app\n# set vispy logger to show warning and errors only\nvispy_logger = logging.getLogger('vispy')\nvispy_logger.setLevel(logging.WARNING)\n\nfrom .viewer import Viewer\nfrom .plugins.io import save_layers\n\n# Note that importing _viewer_key_bindings is needed as the Viewer gets\n# decorated with keybindings during that process, but it is not directly needed\n# by our users and so is deleted below\nfrom . import _viewer_key_bindings # noqa: F401\nfrom .view_layers import (\n view_path,\n view_image,\n view_labels,\n view_surface,\n view_shapes,\n view_points,\n view_vectors,\n)\nfrom ._qt import gui_qt\nfrom .utils import sys_info, _magicgui\n\n# register napari object types with magicgui if it is installed\n_magicgui.register_types_with_magicgui()\n\n\n# this unused import is here to fix a very strange bug.\n# there is some mysterious magical goodness in scipy stats that needs\n# to be imported early.\n# see: https://github.com/napari/napari/issues/925\n# see: https://github.com/napari/napari/issues/1347\nfrom scipy import stats # noqa: F401\n\ndel _magicgui\ndel stats\ndel _viewer_key_bindings\n", "path": "napari/__init__.py"}]} |
gh_patches_debug_1627 | rasdani/github-patches | git_diff | ytdl-org__youtube-dl-18343 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Gfycat cajax json endpoint is gone
### Make sure you are using the *latest* version: run `youtube-dl --version` and ensure your version is *2018.11.23*. If it's not, read [this FAQ entry](https://github.com/rg3/youtube-dl/blob/master/README.md#how-do-i-update-youtube-dl) and update. Issues with outdated version will be rejected.
- [x] I've **verified** and **I assure** that I'm running youtube-dl **2018.11.23**
### Before submitting an *issue* make sure you have:
- [x] At least skimmed through the [README](https://github.com/rg3/youtube-dl/blob/master/README.md), **most notably** the [FAQ](https://github.com/rg3/youtube-dl#faq) and [BUGS](https://github.com/rg3/youtube-dl#bugs) sections
- [x] [Searched](https://github.com/rg3/youtube-dl/search?type=Issues) the bugtracker for similar issues including closed ones
- [x] Checked that provided video/audio/playlist URLs (if any) are alive and playable in a browser
### What is the purpose of your *issue*?
- [x] Bug report (encountered problems with youtube-dl)
- [ ] Site support request (request for adding support for a new site)
- [ ] Feature request (request for a new functionality)
- [ ] Question
- [ ] Other
---
### If the purpose of this *issue* is a *bug report*, *site support request* or you are not completely sure provide the full verbose output as follows:
Add the `-v` flag to **your command line** you run youtube-dl with (`youtube-dl -v <your command line>`), copy the **whole** output and insert it here. It should look similar to one below (replace it with **your** log inserted between triple ```):
```
$ youtube-dl https://gfycat.com/belovedsparseamericanbobtail -v
[debug] System config: []
[debug] User config: []
[debug] Custom config: []
[debug] Command-line args: ['https://gfycat.com/belovedsparseamericanbobtail', '-v']
[debug] Encodings: locale UTF-8, fs utf-8, out UTF-8, pref UTF-8
[debug] youtube-dl version 2018.11.23
[debug] Python version 3.6.5 (CPython) - Darwin-16.7.0-x86_64-i386-64bit
[debug] exe versions: ffmpeg 3.2.2, ffprobe 3.2.2
[debug] Proxy map: {}
[Gfycat] belovedsparseamericanbobtail: Downloading video info
ERROR: Unable to download JSON metadata: HTTP Error 404: Not Found (caused by <HTTPError 404: 'Not Found'>); please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; type youtube-dl -U to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.
File "/usr/local/bin/youtube-dl/youtube_dl/extractor/common.py", line 605, in _request_webpage
return self._downloader.urlopen(url_or_request)
File "/usr/local/bin/youtube-dl/youtube_dl/YoutubeDL.py", line 2211, in urlopen
return self._opener.open(req, timeout=self._socket_timeout)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 532, in open
response = meth(req, response)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 642, in http_response
'http', request, response, code, msg, hdrs)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 564, in error
result = self._call_chain(*args)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 504, in _call_chain
result = func(*args)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 756, in http_error_302
return self.parent.open(new, timeout=req.timeout)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 532, in open
response = meth(req, response)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 642, in http_response
'http', request, response, code, msg, hdrs)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 570, in error
return self._call_chain(*args)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 504, in _call_chain
result = func(*args)
File "/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py", line 650, in http_error_default
raise HTTPError(req.full_url, code, msg, hdrs, fp)
```
---
### Description of your *issue*, suggested solution and other information
Gfycat downloads no longer work because Gfycat removed the API youtube-dl uses:
> Yes we announced deprecation of this endpoint 1.5 years ago. Are you using it for an app still?
>
> This is the link to our current API. https://developers.gfycat.com/api/#introduction
https://www.reddit.com/r/gfycat/comments/a17ewc/embeded_gfycats_returning_failed_to_load_resource/eanbmrh/
So if you wanted to get https://gfycat.com/belovedsparseamericanbobtail then `youtube-dl` would look for the meta at https://gfycat.com/cajax/get/belovedsparseamericanbobtail (you can try the link, it'll 404).
https://github.com/rg3/youtube-dl/blob/d9df8f120b325766181fb474a8c534e51df78f17/youtube_dl/extractor/gfycat.py#L55-L57
It's not obvious how to me how to fix this because the new api uses an oauth2 bearer token, and there isn't a key store AFAIK built into youtube-dl. The closest thing looks like the `.netrc` file support, or possibly a custom [config file entry](https://github.com/rg3/youtube-dl#configuration)
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `youtube_dl/extractor/gfycat.py`
Content:
```
1 # coding: utf-8
2 from __future__ import unicode_literals
3
4 from .common import InfoExtractor
5 from ..utils import (
6 int_or_none,
7 float_or_none,
8 qualities,
9 ExtractorError,
10 )
11
12
13 class GfycatIE(InfoExtractor):
14 _VALID_URL = r'https?://(?:www\.)?gfycat\.com/(?:ifr/|gifs/detail/)?(?P<id>[^/?#]+)'
15 _TESTS = [{
16 'url': 'http://gfycat.com/DeadlyDecisiveGermanpinscher',
17 'info_dict': {
18 'id': 'DeadlyDecisiveGermanpinscher',
19 'ext': 'mp4',
20 'title': 'Ghost in the Shell',
21 'timestamp': 1410656006,
22 'upload_date': '20140914',
23 'uploader': 'anonymous',
24 'duration': 10.4,
25 'view_count': int,
26 'like_count': int,
27 'dislike_count': int,
28 'categories': list,
29 'age_limit': 0,
30 }
31 }, {
32 'url': 'http://gfycat.com/ifr/JauntyTimelyAmazontreeboa',
33 'info_dict': {
34 'id': 'JauntyTimelyAmazontreeboa',
35 'ext': 'mp4',
36 'title': 'JauntyTimelyAmazontreeboa',
37 'timestamp': 1411720126,
38 'upload_date': '20140926',
39 'uploader': 'anonymous',
40 'duration': 3.52,
41 'view_count': int,
42 'like_count': int,
43 'dislike_count': int,
44 'categories': list,
45 'age_limit': 0,
46 }
47 }, {
48 'url': 'https://gfycat.com/gifs/detail/UnconsciousLankyIvorygull',
49 'only_matching': True
50 }]
51
52 def _real_extract(self, url):
53 video_id = self._match_id(url)
54
55 gfy = self._download_json(
56 'http://gfycat.com/cajax/get/%s' % video_id,
57 video_id, 'Downloading video info')
58 if 'error' in gfy:
59 raise ExtractorError('Gfycat said: ' + gfy['error'], expected=True)
60 gfy = gfy['gfyItem']
61
62 title = gfy.get('title') or gfy['gfyName']
63 description = gfy.get('description')
64 timestamp = int_or_none(gfy.get('createDate'))
65 uploader = gfy.get('userName')
66 view_count = int_or_none(gfy.get('views'))
67 like_count = int_or_none(gfy.get('likes'))
68 dislike_count = int_or_none(gfy.get('dislikes'))
69 age_limit = 18 if gfy.get('nsfw') == '1' else 0
70
71 width = int_or_none(gfy.get('width'))
72 height = int_or_none(gfy.get('height'))
73 fps = int_or_none(gfy.get('frameRate'))
74 num_frames = int_or_none(gfy.get('numFrames'))
75
76 duration = float_or_none(num_frames, fps) if num_frames and fps else None
77
78 categories = gfy.get('tags') or gfy.get('extraLemmas') or []
79
80 FORMATS = ('gif', 'webm', 'mp4')
81 quality = qualities(FORMATS)
82
83 formats = []
84 for format_id in FORMATS:
85 video_url = gfy.get('%sUrl' % format_id)
86 if not video_url:
87 continue
88 filesize = int_or_none(gfy.get('%sSize' % format_id))
89 formats.append({
90 'url': video_url,
91 'format_id': format_id,
92 'width': width,
93 'height': height,
94 'fps': fps,
95 'filesize': filesize,
96 'quality': quality(format_id),
97 })
98 self._sort_formats(formats)
99
100 return {
101 'id': video_id,
102 'title': title,
103 'description': description,
104 'timestamp': timestamp,
105 'uploader': uploader,
106 'duration': duration,
107 'view_count': view_count,
108 'like_count': like_count,
109 'dislike_count': dislike_count,
110 'categories': categories,
111 'age_limit': age_limit,
112 'formats': formats,
113 }
114
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/youtube_dl/extractor/gfycat.py b/youtube_dl/extractor/gfycat.py
--- a/youtube_dl/extractor/gfycat.py
+++ b/youtube_dl/extractor/gfycat.py
@@ -53,7 +53,7 @@
video_id = self._match_id(url)
gfy = self._download_json(
- 'http://gfycat.com/cajax/get/%s' % video_id,
+ 'https://api.gfycat.com/v1/gfycats/%s' % video_id,
video_id, 'Downloading video info')
if 'error' in gfy:
raise ExtractorError('Gfycat said: ' + gfy['error'], expected=True)
| {"golden_diff": "diff --git a/youtube_dl/extractor/gfycat.py b/youtube_dl/extractor/gfycat.py\n--- a/youtube_dl/extractor/gfycat.py\n+++ b/youtube_dl/extractor/gfycat.py\n@@ -53,7 +53,7 @@\n video_id = self._match_id(url)\n \n gfy = self._download_json(\n- 'http://gfycat.com/cajax/get/%s' % video_id,\n+ 'https://api.gfycat.com/v1/gfycats/%s' % video_id,\n video_id, 'Downloading video info')\n if 'error' in gfy:\n raise ExtractorError('Gfycat said: ' + gfy['error'], expected=True)\n", "issue": "Gfycat cajax json endpoint is gone\n### Make sure you are using the *latest* version: run `youtube-dl --version` and ensure your version is *2018.11.23*. If it's not, read [this FAQ entry](https://github.com/rg3/youtube-dl/blob/master/README.md#how-do-i-update-youtube-dl) and update. Issues with outdated version will be rejected.\r\n- [x] I've **verified** and **I assure** that I'm running youtube-dl **2018.11.23**\r\n\r\n### Before submitting an *issue* make sure you have:\r\n- [x] At least skimmed through the [README](https://github.com/rg3/youtube-dl/blob/master/README.md), **most notably** the [FAQ](https://github.com/rg3/youtube-dl#faq) and [BUGS](https://github.com/rg3/youtube-dl#bugs) sections\r\n- [x] [Searched](https://github.com/rg3/youtube-dl/search?type=Issues) the bugtracker for similar issues including closed ones\r\n- [x] Checked that provided video/audio/playlist URLs (if any) are alive and playable in a browser\r\n\r\n### What is the purpose of your *issue*?\r\n- [x] Bug report (encountered problems with youtube-dl)\r\n- [ ] Site support request (request for adding support for a new site)\r\n- [ ] Feature request (request for a new functionality)\r\n- [ ] Question\r\n- [ ] Other\r\n\r\n---\r\n\r\n### If the purpose of this *issue* is a *bug report*, *site support request* or you are not completely sure provide the full verbose output as follows:\r\n\r\nAdd the `-v` flag to **your command line** you run youtube-dl with (`youtube-dl -v <your command line>`), copy the **whole** output and insert it here. It should look similar to one below (replace it with **your** log inserted between triple ```):\r\n\r\n```\r\n$ youtube-dl https://gfycat.com/belovedsparseamericanbobtail -v\r\n[debug] System config: []\r\n[debug] User config: []\r\n[debug] Custom config: []\r\n[debug] Command-line args: ['https://gfycat.com/belovedsparseamericanbobtail', '-v']\r\n[debug] Encodings: locale UTF-8, fs utf-8, out UTF-8, pref UTF-8\r\n[debug] youtube-dl version 2018.11.23\r\n[debug] Python version 3.6.5 (CPython) - Darwin-16.7.0-x86_64-i386-64bit\r\n[debug] exe versions: ffmpeg 3.2.2, ffprobe 3.2.2\r\n[debug] Proxy map: {}\r\n[Gfycat] belovedsparseamericanbobtail: Downloading video info\r\nERROR: Unable to download JSON metadata: HTTP Error 404: Not Found (caused by <HTTPError 404: 'Not Found'>); please report this issue on https://yt-dl.org/bug . Make sure you are using the latest version; type youtube-dl -U to update. Be sure to call youtube-dl with the --verbose flag and include its complete output.\r\n File \"/usr/local/bin/youtube-dl/youtube_dl/extractor/common.py\", line 605, in _request_webpage\r\n return self._downloader.urlopen(url_or_request)\r\n File \"/usr/local/bin/youtube-dl/youtube_dl/YoutubeDL.py\", line 2211, in urlopen\r\n return self._opener.open(req, timeout=self._socket_timeout)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 532, in open\r\n response = meth(req, response)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 642, in http_response\r\n 'http', request, response, code, msg, hdrs)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 564, in error\r\n result = self._call_chain(*args)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 504, in _call_chain\r\n result = func(*args)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 756, in http_error_302\r\n return self.parent.open(new, timeout=req.timeout)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 532, in open\r\n response = meth(req, response)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 642, in http_response\r\n 'http', request, response, code, msg, hdrs)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 570, in error\r\n return self._call_chain(*args)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 504, in _call_chain\r\n result = func(*args)\r\n File \"/usr/local/Cellar/python/3.6.5/Frameworks/Python.framework/Versions/3.6/lib/python3.6/urllib/request.py\", line 650, in http_error_default\r\n raise HTTPError(req.full_url, code, msg, hdrs, fp)\r\n\r\n```\r\n\r\n---\r\n\r\n### Description of your *issue*, suggested solution and other information\r\n\r\nGfycat downloads no longer work because Gfycat removed the API youtube-dl uses:\r\n\r\n> Yes we announced deprecation of this endpoint 1.5 years ago. Are you using it for an app still?\r\n>\r\n> This is the link to our current API. https://developers.gfycat.com/api/#introduction\r\nhttps://www.reddit.com/r/gfycat/comments/a17ewc/embeded_gfycats_returning_failed_to_load_resource/eanbmrh/\r\n\r\nSo if you wanted to get https://gfycat.com/belovedsparseamericanbobtail then `youtube-dl` would look for the meta at https://gfycat.com/cajax/get/belovedsparseamericanbobtail (you can try the link, it'll 404).\r\nhttps://github.com/rg3/youtube-dl/blob/d9df8f120b325766181fb474a8c534e51df78f17/youtube_dl/extractor/gfycat.py#L55-L57\r\n\r\nIt's not obvious how to me how to fix this because the new api uses an oauth2 bearer token, and there isn't a key store AFAIK built into youtube-dl. The closest thing looks like the `.netrc` file support, or possibly a custom [config file entry](https://github.com/rg3/youtube-dl#configuration)\n", "before_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nfrom .common import InfoExtractor\nfrom ..utils import (\n int_or_none,\n float_or_none,\n qualities,\n ExtractorError,\n)\n\n\nclass GfycatIE(InfoExtractor):\n _VALID_URL = r'https?://(?:www\\.)?gfycat\\.com/(?:ifr/|gifs/detail/)?(?P<id>[^/?#]+)'\n _TESTS = [{\n 'url': 'http://gfycat.com/DeadlyDecisiveGermanpinscher',\n 'info_dict': {\n 'id': 'DeadlyDecisiveGermanpinscher',\n 'ext': 'mp4',\n 'title': 'Ghost in the Shell',\n 'timestamp': 1410656006,\n 'upload_date': '20140914',\n 'uploader': 'anonymous',\n 'duration': 10.4,\n 'view_count': int,\n 'like_count': int,\n 'dislike_count': int,\n 'categories': list,\n 'age_limit': 0,\n }\n }, {\n 'url': 'http://gfycat.com/ifr/JauntyTimelyAmazontreeboa',\n 'info_dict': {\n 'id': 'JauntyTimelyAmazontreeboa',\n 'ext': 'mp4',\n 'title': 'JauntyTimelyAmazontreeboa',\n 'timestamp': 1411720126,\n 'upload_date': '20140926',\n 'uploader': 'anonymous',\n 'duration': 3.52,\n 'view_count': int,\n 'like_count': int,\n 'dislike_count': int,\n 'categories': list,\n 'age_limit': 0,\n }\n }, {\n 'url': 'https://gfycat.com/gifs/detail/UnconsciousLankyIvorygull',\n 'only_matching': True\n }]\n\n def _real_extract(self, url):\n video_id = self._match_id(url)\n\n gfy = self._download_json(\n 'http://gfycat.com/cajax/get/%s' % video_id,\n video_id, 'Downloading video info')\n if 'error' in gfy:\n raise ExtractorError('Gfycat said: ' + gfy['error'], expected=True)\n gfy = gfy['gfyItem']\n\n title = gfy.get('title') or gfy['gfyName']\n description = gfy.get('description')\n timestamp = int_or_none(gfy.get('createDate'))\n uploader = gfy.get('userName')\n view_count = int_or_none(gfy.get('views'))\n like_count = int_or_none(gfy.get('likes'))\n dislike_count = int_or_none(gfy.get('dislikes'))\n age_limit = 18 if gfy.get('nsfw') == '1' else 0\n\n width = int_or_none(gfy.get('width'))\n height = int_or_none(gfy.get('height'))\n fps = int_or_none(gfy.get('frameRate'))\n num_frames = int_or_none(gfy.get('numFrames'))\n\n duration = float_or_none(num_frames, fps) if num_frames and fps else None\n\n categories = gfy.get('tags') or gfy.get('extraLemmas') or []\n\n FORMATS = ('gif', 'webm', 'mp4')\n quality = qualities(FORMATS)\n\n formats = []\n for format_id in FORMATS:\n video_url = gfy.get('%sUrl' % format_id)\n if not video_url:\n continue\n filesize = int_or_none(gfy.get('%sSize' % format_id))\n formats.append({\n 'url': video_url,\n 'format_id': format_id,\n 'width': width,\n 'height': height,\n 'fps': fps,\n 'filesize': filesize,\n 'quality': quality(format_id),\n })\n self._sort_formats(formats)\n\n return {\n 'id': video_id,\n 'title': title,\n 'description': description,\n 'timestamp': timestamp,\n 'uploader': uploader,\n 'duration': duration,\n 'view_count': view_count,\n 'like_count': like_count,\n 'dislike_count': dislike_count,\n 'categories': categories,\n 'age_limit': age_limit,\n 'formats': formats,\n }\n", "path": "youtube_dl/extractor/gfycat.py"}], "after_files": [{"content": "# coding: utf-8\nfrom __future__ import unicode_literals\n\nfrom .common import InfoExtractor\nfrom ..utils import (\n int_or_none,\n float_or_none,\n qualities,\n ExtractorError,\n)\n\n\nclass GfycatIE(InfoExtractor):\n _VALID_URL = r'https?://(?:www\\.)?gfycat\\.com/(?:ifr/|gifs/detail/)?(?P<id>[^/?#]+)'\n _TESTS = [{\n 'url': 'http://gfycat.com/DeadlyDecisiveGermanpinscher',\n 'info_dict': {\n 'id': 'DeadlyDecisiveGermanpinscher',\n 'ext': 'mp4',\n 'title': 'Ghost in the Shell',\n 'timestamp': 1410656006,\n 'upload_date': '20140914',\n 'uploader': 'anonymous',\n 'duration': 10.4,\n 'view_count': int,\n 'like_count': int,\n 'dislike_count': int,\n 'categories': list,\n 'age_limit': 0,\n }\n }, {\n 'url': 'http://gfycat.com/ifr/JauntyTimelyAmazontreeboa',\n 'info_dict': {\n 'id': 'JauntyTimelyAmazontreeboa',\n 'ext': 'mp4',\n 'title': 'JauntyTimelyAmazontreeboa',\n 'timestamp': 1411720126,\n 'upload_date': '20140926',\n 'uploader': 'anonymous',\n 'duration': 3.52,\n 'view_count': int,\n 'like_count': int,\n 'dislike_count': int,\n 'categories': list,\n 'age_limit': 0,\n }\n }, {\n 'url': 'https://gfycat.com/gifs/detail/UnconsciousLankyIvorygull',\n 'only_matching': True\n }]\n\n def _real_extract(self, url):\n video_id = self._match_id(url)\n\n gfy = self._download_json(\n 'https://api.gfycat.com/v1/gfycats/%s' % video_id,\n video_id, 'Downloading video info')\n if 'error' in gfy:\n raise ExtractorError('Gfycat said: ' + gfy['error'], expected=True)\n gfy = gfy['gfyItem']\n\n title = gfy.get('title') or gfy['gfyName']\n description = gfy.get('description')\n timestamp = int_or_none(gfy.get('createDate'))\n uploader = gfy.get('userName')\n view_count = int_or_none(gfy.get('views'))\n like_count = int_or_none(gfy.get('likes'))\n dislike_count = int_or_none(gfy.get('dislikes'))\n age_limit = 18 if gfy.get('nsfw') == '1' else 0\n\n width = int_or_none(gfy.get('width'))\n height = int_or_none(gfy.get('height'))\n fps = int_or_none(gfy.get('frameRate'))\n num_frames = int_or_none(gfy.get('numFrames'))\n\n duration = float_or_none(num_frames, fps) if num_frames and fps else None\n\n categories = gfy.get('tags') or gfy.get('extraLemmas') or []\n\n FORMATS = ('gif', 'webm', 'mp4')\n quality = qualities(FORMATS)\n\n formats = []\n for format_id in FORMATS:\n video_url = gfy.get('%sUrl' % format_id)\n if not video_url:\n continue\n filesize = int_or_none(gfy.get('%sSize' % format_id))\n formats.append({\n 'url': video_url,\n 'format_id': format_id,\n 'width': width,\n 'height': height,\n 'fps': fps,\n 'filesize': filesize,\n 'quality': quality(format_id),\n })\n self._sort_formats(formats)\n\n return {\n 'id': video_id,\n 'title': title,\n 'description': description,\n 'timestamp': timestamp,\n 'uploader': uploader,\n 'duration': duration,\n 'view_count': view_count,\n 'like_count': like_count,\n 'dislike_count': dislike_count,\n 'categories': categories,\n 'age_limit': age_limit,\n 'formats': formats,\n }\n", "path": "youtube_dl/extractor/gfycat.py"}]} |
gh_patches_debug_1628 | rasdani/github-patches | git_diff | watchdogpolska__feder-349 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Przyjazny komunikat o aktualizacji wiarygodności

--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `feder/tasks/views.py`
Content:
```
1 from atom.views import (ActionMessageMixin, ActionView, CreateMessageMixin,
2 DeleteMessageMixin, UpdateMessageMixin)
3 from braces.views import (FormValidMessageMixin, PrefetchRelatedMixin,
4 SelectRelatedMixin, UserFormKwargsMixin)
5 from cached_property import cached_property
6 from django.contrib import messages
7 from django.contrib.auth.mixins import LoginRequiredMixin
8 from django.core.urlresolvers import reverse, reverse_lazy
9 from django.shortcuts import get_object_or_404, redirect
10 from django.utils.translation import ugettext_lazy as _
11 from django.views.generic import (CreateView, DeleteView, DetailView, FormView,
12 UpdateView)
13 from django_filters.views import FilterView
14
15 from feder.cases.models import Case
16 from feder.main.mixins import (AttrPermissionRequiredMixin,
17 RaisePermissionRequiredMixin)
18 from .filters import TaskFilter
19 from .forms import AnswerFormSet, SurveyForm, TaskForm
20 from .models import Survey, Task
21
22 DONE_MESSAGE_TEXT = _("Already done the job. If you want to change the answer - delete answers.")
23
24 THANK_TEXT = _("Thank you for your submission. It is approaching us to know the " +
25 "truth, by obtaining reliable data.")
26
27 EXHAUSTED_TEXT = _("Thank you for your help. Unfortunately, all the tasks " +
28 "for you have been exhausted.")
29
30
31 class TaskListView(SelectRelatedMixin, FilterView):
32 filterset_class = TaskFilter
33 model = Task
34 select_related = ['case', 'questionary']
35 paginate_by = 25
36
37 def get_context_data(self, **kwargs):
38 context = super(TaskListView, self).get_context_data(**kwargs)
39 context['stats'] = self.object_list.survey_stats()
40 return context
41
42
43 class TaskDetailView(SelectRelatedMixin, PrefetchRelatedMixin, DetailView):
44 model = Task
45 select_related = ['case__monitoring', 'case__institution', 'questionary']
46 prefetch_related = ['survey_set', 'questionary__question_set']
47
48 def get_user_survey(self):
49 try:
50 return (self.object.survey_set.with_full_answer().
51 of_user(self.request.user, self.request.light_user).get())
52 except Survey.DoesNotExist:
53 return None
54
55 def get_context_data(self, **kwargs):
56 context = super(TaskDetailView, self).get_context_data(**kwargs)
57 context['formset'] = AnswerFormSet(questionary=self.object.questionary)
58 context['user_survey'] = self.get_user_survey()
59 return context
60
61
62 class TaskSurveyView(SelectRelatedMixin, PrefetchRelatedMixin, DetailView):
63 model = Task
64 select_related = ['case__monitoring', 'case__institution', 'questionary', ]
65 prefetch_related = ['questionary__question_set']
66 template_name_suffix = '_survey'
67
68 def get_context_data(self, **kwargs):
69 context = super(TaskSurveyView, self).get_context_data(**kwargs)
70 survey_list = (Survey.objects.for_task(self.object).with_user().with_full_answer().all())
71 context['survey_list'] = survey_list
72 user_survey_list = [x for x in survey_list if x.user == self.request.user] # TODO: Lazy
73 context['user_survey'] = user_survey_list[0] if user_survey_list else None
74 return context
75
76
77 class TaskCreateView(RaisePermissionRequiredMixin, UserFormKwargsMixin,
78 CreateMessageMixin, CreateView):
79 model = Task
80 form_class = TaskForm
81 permission_required = 'monitorings.add_task'
82
83 @cached_property
84 def case(self):
85 return get_object_or_404(Case.objects.select_related('monitoring'),
86 pk=self.kwargs['case'])
87
88 def get_permission_object(self):
89 return self.case.monitoring
90
91 def get_form_kwargs(self):
92 kw = super(TaskCreateView, self).get_form_kwargs()
93 kw['case'] = self.case
94 return kw
95
96 def get_context_data(self, **kwargs):
97 context = super(TaskCreateView, self).get_context_data(**kwargs)
98 context['case'] = self.case
99 return context
100
101
102 class TaskUpdateView(AttrPermissionRequiredMixin, UserFormKwargsMixin,
103 UpdateMessageMixin, FormValidMessageMixin, UpdateView):
104 model = Task
105 form_class = TaskForm
106 permission_required = 'change_task'
107 permission_attribute = 'case__monitoring'
108
109
110 class TaskDeleteView(AttrPermissionRequiredMixin, DeleteMessageMixin, DeleteView):
111 model = Task
112 success_url = reverse_lazy('tasks:list')
113 permission_required = 'delete_task'
114 permission_attribute = 'case__monitoring'
115
116
117 class SurveyDeleteView(LoginRequiredMixin, DeleteMessageMixin, DeleteView):
118 model = Survey
119 slug_url_kwarg = 'task_id'
120 slug_field = 'task_id'
121
122 def get_queryset(self, *args, **kwargs):
123 qs = super(SurveyDeleteView, self).get_queryset()
124 return qs.of_user(self.request.user, self.request.light_user).with_full_answer()
125
126 def get_success_url(self):
127 return self.object.task.get_absolute_url()
128
129
130 class SurveySelectView(AttrPermissionRequiredMixin, ActionMessageMixin,
131 SelectRelatedMixin, ActionView): # TODO: Write test
132 model = Survey
133 template_name_suffix = '_select'
134 select_related = ['task__case__monitoring', ]
135 permission_required = 'monitorings.select_survey'
136 permission_attribute = 'task__case__monitoring'
137 direction = None
138 change = {'up': 1, 'down': -1}
139
140 def action(self, *args, **kwargs):
141 self.object.credibility_update(self.change[self.direction])
142 self.object.save()
143
144 def get_success_message(self):
145 return _("Survey {object} selected!").format(object=self.object)
146
147 def get_success_url(self):
148 return reverse('tasks:survey', kwargs={'pk': self.object.task_id})
149
150
151 class SurveyFillView(FormView):
152 template_name = 'tasks/survey_fill.html'
153 form_class = SurveyForm
154 formset_class = AnswerFormSet
155
156 @cached_property
157 def task(self):
158 return get_object_or_404(Task, pk=self.kwargs['pk'])
159
160 @cached_property
161 def object(self):
162 try:
163 return Survey.objects.filter(task=self.task).of_user(user=self.request.user,
164 light_user=self.request.light_user).all()[0]
165 except IndexError:
166 return None
167
168 def get_form_kwargs(self):
169 kwargs = super(SurveyFillView, self).get_form_kwargs()
170 kwargs['task'] = self.task
171 kwargs['instance'] = self.object
172 return kwargs
173
174 def get_success_url(self):
175 if 'save' in self.request.POST: # only save
176 return self.object.task.get_absolute_url()
177
178 # find next task
179 try:
180 next_task = self.task.get_next_for_user(self.request.user)
181 return next_task.get_absolute_url()
182 except Task.DoesNotExist:
183 messages.success(self.request, EXHAUSTED_TEXT)
184 return self.task.case.monitoring.get_absolute_url()
185
186 @cached_property
187 def formset(self):
188 return self.formset_class(data=self.request.POST or None,
189 survey=self.object,
190 questionary=self.task.questionary)
191
192 def form_valid(self, form):
193 self.object = form.save(commit=False)
194 if self.formset.is_valid():
195 if self.request.user.is_authenticated():
196 self.object.user = self.request.user
197 else:
198 self.object.light_user = self.request.light_user_new
199 self.object.save()
200 self.formset.save()
201 return self.formset_valid(form, self.object, self.formset)
202 return self.render_to_response(self.get_context_data())
203
204 def formset_valid(self, form, obj, formset):
205 messages.success(self.request, THANK_TEXT)
206 obj.save()
207 formset.save()
208 return redirect(self.get_success_url())
209
210 def get_context_data(self, **kwargs):
211 context = super(SurveyFillView, self).get_context_data(**kwargs)
212 context['formset'] = self.formset
213 context['object'] = self.object
214 context['task'] = self.task
215 return context
216
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/feder/tasks/views.py b/feder/tasks/views.py
--- a/feder/tasks/views.py
+++ b/feder/tasks/views.py
@@ -142,7 +142,10 @@
self.object.save()
def get_success_message(self):
- return _("Survey {object} selected!").format(object=self.object)
+ if self.direction == 'up':
+ return _("Survey credibility increased!")
+ else:
+ return _("Survey credibility decreased!")
def get_success_url(self):
return reverse('tasks:survey', kwargs={'pk': self.object.task_id})
| {"golden_diff": "diff --git a/feder/tasks/views.py b/feder/tasks/views.py\n--- a/feder/tasks/views.py\n+++ b/feder/tasks/views.py\n@@ -142,7 +142,10 @@\n self.object.save()\n \n def get_success_message(self):\n- return _(\"Survey {object} selected!\").format(object=self.object)\n+ if self.direction == 'up':\n+ return _(\"Survey credibility increased!\")\n+ else:\n+ return _(\"Survey credibility decreased!\")\n \n def get_success_url(self):\n return reverse('tasks:survey', kwargs={'pk': self.object.task_id})\n", "issue": "Przyjazny komunikat o aktualizacji wiarygodno\u015bci\n\r\n\n", "before_files": [{"content": "from atom.views import (ActionMessageMixin, ActionView, CreateMessageMixin,\n DeleteMessageMixin, UpdateMessageMixin)\nfrom braces.views import (FormValidMessageMixin, PrefetchRelatedMixin,\n SelectRelatedMixin, UserFormKwargsMixin)\nfrom cached_property import cached_property\nfrom django.contrib import messages\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.core.urlresolvers import reverse, reverse_lazy\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.views.generic import (CreateView, DeleteView, DetailView, FormView,\n UpdateView)\nfrom django_filters.views import FilterView\n\nfrom feder.cases.models import Case\nfrom feder.main.mixins import (AttrPermissionRequiredMixin,\n RaisePermissionRequiredMixin)\nfrom .filters import TaskFilter\nfrom .forms import AnswerFormSet, SurveyForm, TaskForm\nfrom .models import Survey, Task\n\nDONE_MESSAGE_TEXT = _(\"Already done the job. If you want to change the answer - delete answers.\")\n\nTHANK_TEXT = _(\"Thank you for your submission. It is approaching us to know the \" +\n \"truth, by obtaining reliable data.\")\n\nEXHAUSTED_TEXT = _(\"Thank you for your help. Unfortunately, all the tasks \" +\n \"for you have been exhausted.\")\n\n\nclass TaskListView(SelectRelatedMixin, FilterView):\n filterset_class = TaskFilter\n model = Task\n select_related = ['case', 'questionary']\n paginate_by = 25\n\n def get_context_data(self, **kwargs):\n context = super(TaskListView, self).get_context_data(**kwargs)\n context['stats'] = self.object_list.survey_stats()\n return context\n\n\nclass TaskDetailView(SelectRelatedMixin, PrefetchRelatedMixin, DetailView):\n model = Task\n select_related = ['case__monitoring', 'case__institution', 'questionary']\n prefetch_related = ['survey_set', 'questionary__question_set']\n\n def get_user_survey(self):\n try:\n return (self.object.survey_set.with_full_answer().\n of_user(self.request.user, self.request.light_user).get())\n except Survey.DoesNotExist:\n return None\n\n def get_context_data(self, **kwargs):\n context = super(TaskDetailView, self).get_context_data(**kwargs)\n context['formset'] = AnswerFormSet(questionary=self.object.questionary)\n context['user_survey'] = self.get_user_survey()\n return context\n\n\nclass TaskSurveyView(SelectRelatedMixin, PrefetchRelatedMixin, DetailView):\n model = Task\n select_related = ['case__monitoring', 'case__institution', 'questionary', ]\n prefetch_related = ['questionary__question_set']\n template_name_suffix = '_survey'\n\n def get_context_data(self, **kwargs):\n context = super(TaskSurveyView, self).get_context_data(**kwargs)\n survey_list = (Survey.objects.for_task(self.object).with_user().with_full_answer().all())\n context['survey_list'] = survey_list\n user_survey_list = [x for x in survey_list if x.user == self.request.user] # TODO: Lazy\n context['user_survey'] = user_survey_list[0] if user_survey_list else None\n return context\n\n\nclass TaskCreateView(RaisePermissionRequiredMixin, UserFormKwargsMixin,\n CreateMessageMixin, CreateView):\n model = Task\n form_class = TaskForm\n permission_required = 'monitorings.add_task'\n\n @cached_property\n def case(self):\n return get_object_or_404(Case.objects.select_related('monitoring'),\n pk=self.kwargs['case'])\n\n def get_permission_object(self):\n return self.case.monitoring\n\n def get_form_kwargs(self):\n kw = super(TaskCreateView, self).get_form_kwargs()\n kw['case'] = self.case\n return kw\n\n def get_context_data(self, **kwargs):\n context = super(TaskCreateView, self).get_context_data(**kwargs)\n context['case'] = self.case\n return context\n\n\nclass TaskUpdateView(AttrPermissionRequiredMixin, UserFormKwargsMixin,\n UpdateMessageMixin, FormValidMessageMixin, UpdateView):\n model = Task\n form_class = TaskForm\n permission_required = 'change_task'\n permission_attribute = 'case__monitoring'\n\n\nclass TaskDeleteView(AttrPermissionRequiredMixin, DeleteMessageMixin, DeleteView):\n model = Task\n success_url = reverse_lazy('tasks:list')\n permission_required = 'delete_task'\n permission_attribute = 'case__monitoring'\n\n\nclass SurveyDeleteView(LoginRequiredMixin, DeleteMessageMixin, DeleteView):\n model = Survey\n slug_url_kwarg = 'task_id'\n slug_field = 'task_id'\n\n def get_queryset(self, *args, **kwargs):\n qs = super(SurveyDeleteView, self).get_queryset()\n return qs.of_user(self.request.user, self.request.light_user).with_full_answer()\n\n def get_success_url(self):\n return self.object.task.get_absolute_url()\n\n\nclass SurveySelectView(AttrPermissionRequiredMixin, ActionMessageMixin,\n SelectRelatedMixin, ActionView): # TODO: Write test\n model = Survey\n template_name_suffix = '_select'\n select_related = ['task__case__monitoring', ]\n permission_required = 'monitorings.select_survey'\n permission_attribute = 'task__case__monitoring'\n direction = None\n change = {'up': 1, 'down': -1}\n\n def action(self, *args, **kwargs):\n self.object.credibility_update(self.change[self.direction])\n self.object.save()\n\n def get_success_message(self):\n return _(\"Survey {object} selected!\").format(object=self.object)\n\n def get_success_url(self):\n return reverse('tasks:survey', kwargs={'pk': self.object.task_id})\n\n\nclass SurveyFillView(FormView):\n template_name = 'tasks/survey_fill.html'\n form_class = SurveyForm\n formset_class = AnswerFormSet\n\n @cached_property\n def task(self):\n return get_object_or_404(Task, pk=self.kwargs['pk'])\n\n @cached_property\n def object(self):\n try:\n return Survey.objects.filter(task=self.task).of_user(user=self.request.user,\n light_user=self.request.light_user).all()[0]\n except IndexError:\n return None\n\n def get_form_kwargs(self):\n kwargs = super(SurveyFillView, self).get_form_kwargs()\n kwargs['task'] = self.task\n kwargs['instance'] = self.object\n return kwargs\n\n def get_success_url(self):\n if 'save' in self.request.POST: # only save\n return self.object.task.get_absolute_url()\n\n # find next task\n try:\n next_task = self.task.get_next_for_user(self.request.user)\n return next_task.get_absolute_url()\n except Task.DoesNotExist:\n messages.success(self.request, EXHAUSTED_TEXT)\n return self.task.case.monitoring.get_absolute_url()\n\n @cached_property\n def formset(self):\n return self.formset_class(data=self.request.POST or None,\n survey=self.object,\n questionary=self.task.questionary)\n\n def form_valid(self, form):\n self.object = form.save(commit=False)\n if self.formset.is_valid():\n if self.request.user.is_authenticated():\n self.object.user = self.request.user\n else:\n self.object.light_user = self.request.light_user_new\n self.object.save()\n self.formset.save()\n return self.formset_valid(form, self.object, self.formset)\n return self.render_to_response(self.get_context_data())\n\n def formset_valid(self, form, obj, formset):\n messages.success(self.request, THANK_TEXT)\n obj.save()\n formset.save()\n return redirect(self.get_success_url())\n\n def get_context_data(self, **kwargs):\n context = super(SurveyFillView, self).get_context_data(**kwargs)\n context['formset'] = self.formset\n context['object'] = self.object\n context['task'] = self.task\n return context\n", "path": "feder/tasks/views.py"}], "after_files": [{"content": "from atom.views import (ActionMessageMixin, ActionView, CreateMessageMixin,\n DeleteMessageMixin, UpdateMessageMixin)\nfrom braces.views import (FormValidMessageMixin, PrefetchRelatedMixin,\n SelectRelatedMixin, UserFormKwargsMixin)\nfrom cached_property import cached_property\nfrom django.contrib import messages\nfrom django.contrib.auth.mixins import LoginRequiredMixin\nfrom django.core.urlresolvers import reverse, reverse_lazy\nfrom django.shortcuts import get_object_or_404, redirect\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.views.generic import (CreateView, DeleteView, DetailView, FormView,\n UpdateView)\nfrom django_filters.views import FilterView\n\nfrom feder.cases.models import Case\nfrom feder.main.mixins import (AttrPermissionRequiredMixin,\n RaisePermissionRequiredMixin)\nfrom .filters import TaskFilter\nfrom .forms import AnswerFormSet, SurveyForm, TaskForm\nfrom .models import Survey, Task\n\nDONE_MESSAGE_TEXT = _(\"Already done the job. If you want to change the answer - delete answers.\")\n\nTHANK_TEXT = _(\"Thank you for your submission. It is approaching us to know the \" +\n \"truth, by obtaining reliable data.\")\n\nEXHAUSTED_TEXT = _(\"Thank you for your help. Unfortunately, all the tasks \" +\n \"for you have been exhausted.\")\n\n\nclass TaskListView(SelectRelatedMixin, FilterView):\n filterset_class = TaskFilter\n model = Task\n select_related = ['case', 'questionary']\n paginate_by = 25\n\n def get_context_data(self, **kwargs):\n context = super(TaskListView, self).get_context_data(**kwargs)\n context['stats'] = self.object_list.survey_stats()\n return context\n\n\nclass TaskDetailView(SelectRelatedMixin, PrefetchRelatedMixin, DetailView):\n model = Task\n select_related = ['case__monitoring', 'case__institution', 'questionary']\n prefetch_related = ['survey_set', 'questionary__question_set']\n\n def get_user_survey(self):\n try:\n return (self.object.survey_set.with_full_answer().\n of_user(self.request.user, self.request.light_user).get())\n except Survey.DoesNotExist:\n return None\n\n def get_context_data(self, **kwargs):\n context = super(TaskDetailView, self).get_context_data(**kwargs)\n context['formset'] = AnswerFormSet(questionary=self.object.questionary)\n context['user_survey'] = self.get_user_survey()\n return context\n\n\nclass TaskSurveyView(SelectRelatedMixin, PrefetchRelatedMixin, DetailView):\n model = Task\n select_related = ['case__monitoring', 'case__institution', 'questionary', ]\n prefetch_related = ['questionary__question_set']\n template_name_suffix = '_survey'\n\n def get_context_data(self, **kwargs):\n context = super(TaskSurveyView, self).get_context_data(**kwargs)\n survey_list = (Survey.objects.for_task(self.object).with_user().with_full_answer().all())\n context['survey_list'] = survey_list\n user_survey_list = [x for x in survey_list if x.user == self.request.user] # TODO: Lazy\n context['user_survey'] = user_survey_list[0] if user_survey_list else None\n return context\n\n\nclass TaskCreateView(RaisePermissionRequiredMixin, UserFormKwargsMixin,\n CreateMessageMixin, CreateView):\n model = Task\n form_class = TaskForm\n permission_required = 'monitorings.add_task'\n\n @cached_property\n def case(self):\n return get_object_or_404(Case.objects.select_related('monitoring'),\n pk=self.kwargs['case'])\n\n def get_permission_object(self):\n return self.case.monitoring\n\n def get_form_kwargs(self):\n kw = super(TaskCreateView, self).get_form_kwargs()\n kw['case'] = self.case\n return kw\n\n def get_context_data(self, **kwargs):\n context = super(TaskCreateView, self).get_context_data(**kwargs)\n context['case'] = self.case\n return context\n\n\nclass TaskUpdateView(AttrPermissionRequiredMixin, UserFormKwargsMixin,\n UpdateMessageMixin, FormValidMessageMixin, UpdateView):\n model = Task\n form_class = TaskForm\n permission_required = 'change_task'\n permission_attribute = 'case__monitoring'\n\n\nclass TaskDeleteView(AttrPermissionRequiredMixin, DeleteMessageMixin, DeleteView):\n model = Task\n success_url = reverse_lazy('tasks:list')\n permission_required = 'delete_task'\n permission_attribute = 'case__monitoring'\n\n\nclass SurveyDeleteView(LoginRequiredMixin, DeleteMessageMixin, DeleteView):\n model = Survey\n slug_url_kwarg = 'task_id'\n slug_field = 'task_id'\n\n def get_queryset(self, *args, **kwargs):\n qs = super(SurveyDeleteView, self).get_queryset()\n return qs.of_user(self.request.user, self.request.light_user).with_full_answer()\n\n def get_success_url(self):\n return self.object.task.get_absolute_url()\n\n\nclass SurveySelectView(AttrPermissionRequiredMixin, ActionMessageMixin,\n SelectRelatedMixin, ActionView): # TODO: Write test\n model = Survey\n template_name_suffix = '_select'\n select_related = ['task__case__monitoring', ]\n permission_required = 'monitorings.select_survey'\n permission_attribute = 'task__case__monitoring'\n direction = None\n change = {'up': 1, 'down': -1}\n\n def action(self, *args, **kwargs):\n self.object.credibility_update(self.change[self.direction])\n self.object.save()\n\n def get_success_message(self):\n if self.direction == 'up':\n return _(\"Survey credibility increased!\")\n else:\n return _(\"Survey credibility decreased!\")\n\n def get_success_url(self):\n return reverse('tasks:survey', kwargs={'pk': self.object.task_id})\n\n\nclass SurveyFillView(FormView):\n template_name = 'tasks/survey_fill.html'\n form_class = SurveyForm\n formset_class = AnswerFormSet\n\n @cached_property\n def task(self):\n return get_object_or_404(Task, pk=self.kwargs['pk'])\n\n @cached_property\n def object(self):\n try:\n return Survey.objects.filter(task=self.task).of_user(user=self.request.user,\n light_user=self.request.light_user).all()[0]\n except IndexError:\n return None\n\n def get_form_kwargs(self):\n kwargs = super(SurveyFillView, self).get_form_kwargs()\n kwargs['task'] = self.task\n kwargs['instance'] = self.object\n return kwargs\n\n def get_success_url(self):\n if 'save' in self.request.POST: # only save\n return self.object.task.get_absolute_url()\n\n # find next task\n try:\n next_task = self.task.get_next_for_user(self.request.user)\n return next_task.get_absolute_url()\n except Task.DoesNotExist:\n messages.success(self.request, EXHAUSTED_TEXT)\n return self.task.case.monitoring.get_absolute_url()\n\n @cached_property\n def formset(self):\n return self.formset_class(data=self.request.POST or None,\n survey=self.object,\n questionary=self.task.questionary)\n\n def form_valid(self, form):\n self.object = form.save(commit=False)\n if self.formset.is_valid():\n if self.request.user.is_authenticated():\n self.object.user = self.request.user\n else:\n self.object.light_user = self.request.light_user_new\n self.object.save()\n self.formset.save()\n return self.formset_valid(form, self.object, self.formset)\n return self.render_to_response(self.get_context_data())\n\n def formset_valid(self, form, obj, formset):\n messages.success(self.request, THANK_TEXT)\n obj.save()\n formset.save()\n return redirect(self.get_success_url())\n\n def get_context_data(self, **kwargs):\n context = super(SurveyFillView, self).get_context_data(**kwargs)\n context['formset'] = self.formset\n context['object'] = self.object\n context['task'] = self.task\n return context\n", "path": "feder/tasks/views.py"}]} |
gh_patches_debug_1629 | rasdani/github-patches | git_diff | pytorch__vision-810 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`UserWarning` when applying `Normalize`
Hi!
pytorch's version: 1.0.1.post2
torchvision's version: 0.2.2.post3
Sorry for the formatting, but you need to scroll the snippet to the right in order to see the warning.
```python
>>> from torch import tensor
>>> from torchvision.transforms import Normalize
>>> n = Normalize(tensor([1.0, 2.0, 3.0]), tensor([1.0, 2.0, 3.0]))
>>> t = tensor([1.0, 2.0, 3.0]).view(3, 1, 1)
>>> n(t)
/home/ygorishniy/miniconda3/envs/x/lib/python3.6/site-packages/torchvision/transforms/functional.py:206: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
mean = torch.tensor(mean, dtype=torch.float32)
/home/ygorishniy/miniconda3/envs/x/lib/python3.6/site-packages/torchvision/transforms/functional.py:207: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
std = torch.tensor(std, dtype=torch.float32)
Out[17]:
tensor([[[0.]],
[[0.]],
[[0.]]])
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `torchvision/transforms/functional.py`
Content:
```
1 from __future__ import division
2 import torch
3 import sys
4 import math
5 from PIL import Image, ImageOps, ImageEnhance, PILLOW_VERSION
6 try:
7 import accimage
8 except ImportError:
9 accimage = None
10 import numpy as np
11 import numbers
12 import collections
13 import warnings
14
15 if sys.version_info < (3, 3):
16 Sequence = collections.Sequence
17 Iterable = collections.Iterable
18 else:
19 Sequence = collections.abc.Sequence
20 Iterable = collections.abc.Iterable
21
22
23 def _is_pil_image(img):
24 if accimage is not None:
25 return isinstance(img, (Image.Image, accimage.Image))
26 else:
27 return isinstance(img, Image.Image)
28
29
30 def _is_tensor_image(img):
31 return torch.is_tensor(img) and img.ndimension() == 3
32
33
34 def _is_numpy_image(img):
35 return isinstance(img, np.ndarray) and (img.ndim in {2, 3})
36
37
38 def to_tensor(pic):
39 """Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor.
40
41 See ``ToTensor`` for more details.
42
43 Args:
44 pic (PIL Image or numpy.ndarray): Image to be converted to tensor.
45
46 Returns:
47 Tensor: Converted image.
48 """
49 if not(_is_pil_image(pic) or _is_numpy_image(pic)):
50 raise TypeError('pic should be PIL Image or ndarray. Got {}'.format(type(pic)))
51
52 if isinstance(pic, np.ndarray):
53 # handle numpy array
54 if pic.ndim == 2:
55 pic = pic[:, :, None]
56
57 img = torch.from_numpy(pic.transpose((2, 0, 1)))
58 # backward compatibility
59 if isinstance(img, torch.ByteTensor):
60 return img.float().div(255)
61 else:
62 return img
63
64 if accimage is not None and isinstance(pic, accimage.Image):
65 nppic = np.zeros([pic.channels, pic.height, pic.width], dtype=np.float32)
66 pic.copyto(nppic)
67 return torch.from_numpy(nppic)
68
69 # handle PIL Image
70 if pic.mode == 'I':
71 img = torch.from_numpy(np.array(pic, np.int32, copy=False))
72 elif pic.mode == 'I;16':
73 img = torch.from_numpy(np.array(pic, np.int16, copy=False))
74 elif pic.mode == 'F':
75 img = torch.from_numpy(np.array(pic, np.float32, copy=False))
76 elif pic.mode == '1':
77 img = 255 * torch.from_numpy(np.array(pic, np.uint8, copy=False))
78 else:
79 img = torch.ByteTensor(torch.ByteStorage.from_buffer(pic.tobytes()))
80 # PIL image mode: L, LA, P, I, F, RGB, YCbCr, RGBA, CMYK
81 if pic.mode == 'YCbCr':
82 nchannel = 3
83 elif pic.mode == 'I;16':
84 nchannel = 1
85 else:
86 nchannel = len(pic.mode)
87 img = img.view(pic.size[1], pic.size[0], nchannel)
88 # put it from HWC to CHW format
89 # yikes, this transpose takes 80% of the loading time/CPU
90 img = img.transpose(0, 1).transpose(0, 2).contiguous()
91 if isinstance(img, torch.ByteTensor):
92 return img.float().div(255)
93 else:
94 return img
95
96
97 def to_pil_image(pic, mode=None):
98 """Convert a tensor or an ndarray to PIL Image.
99
100 See :class:`~torchvision.transforms.ToPILImage` for more details.
101
102 Args:
103 pic (Tensor or numpy.ndarray): Image to be converted to PIL Image.
104 mode (`PIL.Image mode`_): color space and pixel depth of input data (optional).
105
106 .. _PIL.Image mode: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#concept-modes
107
108 Returns:
109 PIL Image: Image converted to PIL Image.
110 """
111 if not(isinstance(pic, torch.Tensor) or isinstance(pic, np.ndarray)):
112 raise TypeError('pic should be Tensor or ndarray. Got {}.'.format(type(pic)))
113
114 elif isinstance(pic, torch.Tensor):
115 if pic.ndimension() not in {2, 3}:
116 raise ValueError('pic should be 2/3 dimensional. Got {} dimensions.'.format(pic.ndimension()))
117
118 elif pic.ndimension() == 2:
119 # if 2D image, add channel dimension (CHW)
120 pic = pic.unsqueeze(0)
121
122 elif isinstance(pic, np.ndarray):
123 if pic.ndim not in {2, 3}:
124 raise ValueError('pic should be 2/3 dimensional. Got {} dimensions.'.format(pic.ndim))
125
126 elif pic.ndim == 2:
127 # if 2D image, add channel dimension (HWC)
128 pic = np.expand_dims(pic, 2)
129
130 npimg = pic
131 if isinstance(pic, torch.FloatTensor):
132 pic = pic.mul(255).byte()
133 if isinstance(pic, torch.Tensor):
134 npimg = np.transpose(pic.numpy(), (1, 2, 0))
135
136 if not isinstance(npimg, np.ndarray):
137 raise TypeError('Input pic must be a torch.Tensor or NumPy ndarray, ' +
138 'not {}'.format(type(npimg)))
139
140 if npimg.shape[2] == 1:
141 expected_mode = None
142 npimg = npimg[:, :, 0]
143 if npimg.dtype == np.uint8:
144 expected_mode = 'L'
145 elif npimg.dtype == np.int16:
146 expected_mode = 'I;16'
147 elif npimg.dtype == np.int32:
148 expected_mode = 'I'
149 elif npimg.dtype == np.float32:
150 expected_mode = 'F'
151 if mode is not None and mode != expected_mode:
152 raise ValueError("Incorrect mode ({}) supplied for input type {}. Should be {}"
153 .format(mode, np.dtype, expected_mode))
154 mode = expected_mode
155
156 elif npimg.shape[2] == 2:
157 permitted_2_channel_modes = ['LA']
158 if mode is not None and mode not in permitted_2_channel_modes:
159 raise ValueError("Only modes {} are supported for 2D inputs".format(permitted_2_channel_modes))
160
161 if mode is None and npimg.dtype == np.uint8:
162 mode = 'LA'
163
164 elif npimg.shape[2] == 4:
165 permitted_4_channel_modes = ['RGBA', 'CMYK', 'RGBX']
166 if mode is not None and mode not in permitted_4_channel_modes:
167 raise ValueError("Only modes {} are supported for 4D inputs".format(permitted_4_channel_modes))
168
169 if mode is None and npimg.dtype == np.uint8:
170 mode = 'RGBA'
171 else:
172 permitted_3_channel_modes = ['RGB', 'YCbCr', 'HSV']
173 if mode is not None and mode not in permitted_3_channel_modes:
174 raise ValueError("Only modes {} are supported for 3D inputs".format(permitted_3_channel_modes))
175 if mode is None and npimg.dtype == np.uint8:
176 mode = 'RGB'
177
178 if mode is None:
179 raise TypeError('Input type {} is not supported'.format(npimg.dtype))
180
181 return Image.fromarray(npimg, mode=mode)
182
183
184 def normalize(tensor, mean, std, inplace=False):
185 """Normalize a tensor image with mean and standard deviation.
186
187 .. note::
188 This transform acts out of place by default, i.e., it does not mutates the input tensor.
189
190 See :class:`~torchvision.transforms.Normalize` for more details.
191
192 Args:
193 tensor (Tensor): Tensor image of size (C, H, W) to be normalized.
194 mean (sequence): Sequence of means for each channel.
195 std (sequence): Sequence of standard deviations for each channely.
196
197 Returns:
198 Tensor: Normalized Tensor image.
199 """
200 if not _is_tensor_image(tensor):
201 raise TypeError('tensor is not a torch image.')
202
203 if not inplace:
204 tensor = tensor.clone()
205
206 mean = torch.tensor(mean, dtype=torch.float32, device=tensor.device)
207 std = torch.tensor(std, dtype=torch.float32, device=tensor.device)
208 tensor.sub_(mean[:, None, None]).div_(std[:, None, None])
209 return tensor
210
211
212 def resize(img, size, interpolation=Image.BILINEAR):
213 r"""Resize the input PIL Image to the given size.
214
215 Args:
216 img (PIL Image): Image to be resized.
217 size (sequence or int): Desired output size. If size is a sequence like
218 (h, w), the output size will be matched to this. If size is an int,
219 the smaller edge of the image will be matched to this number maintaing
220 the aspect ratio. i.e, if height > width, then image will be rescaled to
221 :math:`\left(\text{size} \times \frac{\text{height}}{\text{width}}, \text{size}\right)`
222 interpolation (int, optional): Desired interpolation. Default is
223 ``PIL.Image.BILINEAR``
224
225 Returns:
226 PIL Image: Resized image.
227 """
228 if not _is_pil_image(img):
229 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
230 if not (isinstance(size, int) or (isinstance(size, Iterable) and len(size) == 2)):
231 raise TypeError('Got inappropriate size arg: {}'.format(size))
232
233 if isinstance(size, int):
234 w, h = img.size
235 if (w <= h and w == size) or (h <= w and h == size):
236 return img
237 if w < h:
238 ow = size
239 oh = int(size * h / w)
240 return img.resize((ow, oh), interpolation)
241 else:
242 oh = size
243 ow = int(size * w / h)
244 return img.resize((ow, oh), interpolation)
245 else:
246 return img.resize(size[::-1], interpolation)
247
248
249 def scale(*args, **kwargs):
250 warnings.warn("The use of the transforms.Scale transform is deprecated, " +
251 "please use transforms.Resize instead.")
252 return resize(*args, **kwargs)
253
254
255 def pad(img, padding, fill=0, padding_mode='constant'):
256 r"""Pad the given PIL Image on all sides with specified padding mode and fill value.
257
258 Args:
259 img (PIL Image): Image to be padded.
260 padding (int or tuple): Padding on each border. If a single int is provided this
261 is used to pad all borders. If tuple of length 2 is provided this is the padding
262 on left/right and top/bottom respectively. If a tuple of length 4 is provided
263 this is the padding for the left, top, right and bottom borders
264 respectively.
265 fill: Pixel fill value for constant fill. Default is 0. If a tuple of
266 length 3, it is used to fill R, G, B channels respectively.
267 This value is only used when the padding_mode is constant
268 padding_mode: Type of padding. Should be: constant, edge, reflect or symmetric. Default is constant.
269
270 - constant: pads with a constant value, this value is specified with fill
271
272 - edge: pads with the last value on the edge of the image
273
274 - reflect: pads with reflection of image (without repeating the last value on the edge)
275
276 padding [1, 2, 3, 4] with 2 elements on both sides in reflect mode
277 will result in [3, 2, 1, 2, 3, 4, 3, 2]
278
279 - symmetric: pads with reflection of image (repeating the last value on the edge)
280
281 padding [1, 2, 3, 4] with 2 elements on both sides in symmetric mode
282 will result in [2, 1, 1, 2, 3, 4, 4, 3]
283
284 Returns:
285 PIL Image: Padded image.
286 """
287 if not _is_pil_image(img):
288 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
289
290 if not isinstance(padding, (numbers.Number, tuple)):
291 raise TypeError('Got inappropriate padding arg')
292 if not isinstance(fill, (numbers.Number, str, tuple)):
293 raise TypeError('Got inappropriate fill arg')
294 if not isinstance(padding_mode, str):
295 raise TypeError('Got inappropriate padding_mode arg')
296
297 if isinstance(padding, Sequence) and len(padding) not in [2, 4]:
298 raise ValueError("Padding must be an int or a 2, or 4 element tuple, not a " +
299 "{} element tuple".format(len(padding)))
300
301 assert padding_mode in ['constant', 'edge', 'reflect', 'symmetric'], \
302 'Padding mode should be either constant, edge, reflect or symmetric'
303
304 if padding_mode == 'constant':
305 if img.mode == 'P':
306 palette = img.getpalette()
307 image = ImageOps.expand(img, border=padding, fill=fill)
308 image.putpalette(palette)
309 return image
310
311 return ImageOps.expand(img, border=padding, fill=fill)
312 else:
313 if isinstance(padding, int):
314 pad_left = pad_right = pad_top = pad_bottom = padding
315 if isinstance(padding, Sequence) and len(padding) == 2:
316 pad_left = pad_right = padding[0]
317 pad_top = pad_bottom = padding[1]
318 if isinstance(padding, Sequence) and len(padding) == 4:
319 pad_left = padding[0]
320 pad_top = padding[1]
321 pad_right = padding[2]
322 pad_bottom = padding[3]
323
324 if img.mode == 'P':
325 palette = img.getpalette()
326 img = np.asarray(img)
327 img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right)), padding_mode)
328 img = Image.fromarray(img)
329 img.putpalette(palette)
330 return img
331
332 img = np.asarray(img)
333 # RGB image
334 if len(img.shape) == 3:
335 img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right), (0, 0)), padding_mode)
336 # Grayscale image
337 if len(img.shape) == 2:
338 img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right)), padding_mode)
339
340 return Image.fromarray(img)
341
342
343 def crop(img, i, j, h, w):
344 """Crop the given PIL Image.
345
346 Args:
347 img (PIL Image): Image to be cropped.
348 i: Upper pixel coordinate.
349 j: Left pixel coordinate.
350 h: Height of the cropped image.
351 w: Width of the cropped image.
352
353 Returns:
354 PIL Image: Cropped image.
355 """
356 if not _is_pil_image(img):
357 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
358
359 return img.crop((j, i, j + w, i + h))
360
361
362 def center_crop(img, output_size):
363 if isinstance(output_size, numbers.Number):
364 output_size = (int(output_size), int(output_size))
365 w, h = img.size
366 th, tw = output_size
367 i = int(round((h - th) / 2.))
368 j = int(round((w - tw) / 2.))
369 return crop(img, i, j, th, tw)
370
371
372 def resized_crop(img, i, j, h, w, size, interpolation=Image.BILINEAR):
373 """Crop the given PIL Image and resize it to desired size.
374
375 Notably used in :class:`~torchvision.transforms.RandomResizedCrop`.
376
377 Args:
378 img (PIL Image): Image to be cropped.
379 i: i in (i,j) i.e coordinates of the upper left corner
380 j: j in (i,j) i.e coordinates of the upper left corner
381 h: Height of the cropped image.
382 w: Width of the cropped image.
383 size (sequence or int): Desired output size. Same semantics as ``resize``.
384 interpolation (int, optional): Desired interpolation. Default is
385 ``PIL.Image.BILINEAR``.
386 Returns:
387 PIL Image: Cropped image.
388 """
389 assert _is_pil_image(img), 'img should be PIL Image'
390 img = crop(img, i, j, h, w)
391 img = resize(img, size, interpolation)
392 return img
393
394
395 def hflip(img):
396 """Horizontally flip the given PIL Image.
397
398 Args:
399 img (PIL Image): Image to be flipped.
400
401 Returns:
402 PIL Image: Horizontall flipped image.
403 """
404 if not _is_pil_image(img):
405 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
406
407 return img.transpose(Image.FLIP_LEFT_RIGHT)
408
409
410 def vflip(img):
411 """Vertically flip the given PIL Image.
412
413 Args:
414 img (PIL Image): Image to be flipped.
415
416 Returns:
417 PIL Image: Vertically flipped image.
418 """
419 if not _is_pil_image(img):
420 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
421
422 return img.transpose(Image.FLIP_TOP_BOTTOM)
423
424
425 def five_crop(img, size):
426 """Crop the given PIL Image into four corners and the central crop.
427
428 .. Note::
429 This transform returns a tuple of images and there may be a
430 mismatch in the number of inputs and targets your ``Dataset`` returns.
431
432 Args:
433 size (sequence or int): Desired output size of the crop. If size is an
434 int instead of sequence like (h, w), a square crop (size, size) is
435 made.
436
437 Returns:
438 tuple: tuple (tl, tr, bl, br, center)
439 Corresponding top left, top right, bottom left, bottom right and center crop.
440 """
441 if isinstance(size, numbers.Number):
442 size = (int(size), int(size))
443 else:
444 assert len(size) == 2, "Please provide only two dimensions (h, w) for size."
445
446 w, h = img.size
447 crop_h, crop_w = size
448 if crop_w > w or crop_h > h:
449 raise ValueError("Requested crop size {} is bigger than input size {}".format(size,
450 (h, w)))
451 tl = img.crop((0, 0, crop_w, crop_h))
452 tr = img.crop((w - crop_w, 0, w, crop_h))
453 bl = img.crop((0, h - crop_h, crop_w, h))
454 br = img.crop((w - crop_w, h - crop_h, w, h))
455 center = center_crop(img, (crop_h, crop_w))
456 return (tl, tr, bl, br, center)
457
458
459 def ten_crop(img, size, vertical_flip=False):
460 r"""Crop the given PIL Image into four corners and the central crop plus the
461 flipped version of these (horizontal flipping is used by default).
462
463 .. Note::
464 This transform returns a tuple of images and there may be a
465 mismatch in the number of inputs and targets your ``Dataset`` returns.
466
467 Args:
468 size (sequence or int): Desired output size of the crop. If size is an
469 int instead of sequence like (h, w), a square crop (size, size) is
470 made.
471 vertical_flip (bool): Use vertical flipping instead of horizontal
472
473 Returns:
474 tuple: tuple (tl, tr, bl, br, center, tl_flip, tr_flip, bl_flip, br_flip, center_flip)
475 Corresponding top left, top right, bottom left, bottom right and center crop
476 and same for the flipped image.
477 """
478 if isinstance(size, numbers.Number):
479 size = (int(size), int(size))
480 else:
481 assert len(size) == 2, "Please provide only two dimensions (h, w) for size."
482
483 first_five = five_crop(img, size)
484
485 if vertical_flip:
486 img = vflip(img)
487 else:
488 img = hflip(img)
489
490 second_five = five_crop(img, size)
491 return first_five + second_five
492
493
494 def adjust_brightness(img, brightness_factor):
495 """Adjust brightness of an Image.
496
497 Args:
498 img (PIL Image): PIL Image to be adjusted.
499 brightness_factor (float): How much to adjust the brightness. Can be
500 any non negative number. 0 gives a black image, 1 gives the
501 original image while 2 increases the brightness by a factor of 2.
502
503 Returns:
504 PIL Image: Brightness adjusted image.
505 """
506 if not _is_pil_image(img):
507 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
508
509 enhancer = ImageEnhance.Brightness(img)
510 img = enhancer.enhance(brightness_factor)
511 return img
512
513
514 def adjust_contrast(img, contrast_factor):
515 """Adjust contrast of an Image.
516
517 Args:
518 img (PIL Image): PIL Image to be adjusted.
519 contrast_factor (float): How much to adjust the contrast. Can be any
520 non negative number. 0 gives a solid gray image, 1 gives the
521 original image while 2 increases the contrast by a factor of 2.
522
523 Returns:
524 PIL Image: Contrast adjusted image.
525 """
526 if not _is_pil_image(img):
527 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
528
529 enhancer = ImageEnhance.Contrast(img)
530 img = enhancer.enhance(contrast_factor)
531 return img
532
533
534 def adjust_saturation(img, saturation_factor):
535 """Adjust color saturation of an image.
536
537 Args:
538 img (PIL Image): PIL Image to be adjusted.
539 saturation_factor (float): How much to adjust the saturation. 0 will
540 give a black and white image, 1 will give the original image while
541 2 will enhance the saturation by a factor of 2.
542
543 Returns:
544 PIL Image: Saturation adjusted image.
545 """
546 if not _is_pil_image(img):
547 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
548
549 enhancer = ImageEnhance.Color(img)
550 img = enhancer.enhance(saturation_factor)
551 return img
552
553
554 def adjust_hue(img, hue_factor):
555 """Adjust hue of an image.
556
557 The image hue is adjusted by converting the image to HSV and
558 cyclically shifting the intensities in the hue channel (H).
559 The image is then converted back to original image mode.
560
561 `hue_factor` is the amount of shift in H channel and must be in the
562 interval `[-0.5, 0.5]`.
563
564 See `Hue`_ for more details.
565
566 .. _Hue: https://en.wikipedia.org/wiki/Hue
567
568 Args:
569 img (PIL Image): PIL Image to be adjusted.
570 hue_factor (float): How much to shift the hue channel. Should be in
571 [-0.5, 0.5]. 0.5 and -0.5 give complete reversal of hue channel in
572 HSV space in positive and negative direction respectively.
573 0 means no shift. Therefore, both -0.5 and 0.5 will give an image
574 with complementary colors while 0 gives the original image.
575
576 Returns:
577 PIL Image: Hue adjusted image.
578 """
579 if not(-0.5 <= hue_factor <= 0.5):
580 raise ValueError('hue_factor is not in [-0.5, 0.5].'.format(hue_factor))
581
582 if not _is_pil_image(img):
583 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
584
585 input_mode = img.mode
586 if input_mode in {'L', '1', 'I', 'F'}:
587 return img
588
589 h, s, v = img.convert('HSV').split()
590
591 np_h = np.array(h, dtype=np.uint8)
592 # uint8 addition take cares of rotation across boundaries
593 with np.errstate(over='ignore'):
594 np_h += np.uint8(hue_factor * 255)
595 h = Image.fromarray(np_h, 'L')
596
597 img = Image.merge('HSV', (h, s, v)).convert(input_mode)
598 return img
599
600
601 def adjust_gamma(img, gamma, gain=1):
602 r"""Perform gamma correction on an image.
603
604 Also known as Power Law Transform. Intensities in RGB mode are adjusted
605 based on the following equation:
606
607 .. math::
608 I_{\text{out}} = 255 \times \text{gain} \times \left(\frac{I_{\text{in}}}{255}\right)^{\gamma}
609
610 See `Gamma Correction`_ for more details.
611
612 .. _Gamma Correction: https://en.wikipedia.org/wiki/Gamma_correction
613
614 Args:
615 img (PIL Image): PIL Image to be adjusted.
616 gamma (float): Non negative real number, same as :math:`\gamma` in the equation.
617 gamma larger than 1 make the shadows darker,
618 while gamma smaller than 1 make dark regions lighter.
619 gain (float): The constant multiplier.
620 """
621 if not _is_pil_image(img):
622 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
623
624 if gamma < 0:
625 raise ValueError('Gamma should be a non-negative real number')
626
627 input_mode = img.mode
628 img = img.convert('RGB')
629
630 gamma_map = [255 * gain * pow(ele / 255., gamma) for ele in range(256)] * 3
631 img = img.point(gamma_map) # use PIL's point-function to accelerate this part
632
633 img = img.convert(input_mode)
634 return img
635
636
637 def rotate(img, angle, resample=False, expand=False, center=None):
638 """Rotate the image by angle.
639
640
641 Args:
642 img (PIL Image): PIL Image to be rotated.
643 angle (float or int): In degrees degrees counter clockwise order.
644 resample (``PIL.Image.NEAREST`` or ``PIL.Image.BILINEAR`` or ``PIL.Image.BICUBIC``, optional):
645 An optional resampling filter. See `filters`_ for more information.
646 If omitted, or if the image has mode "1" or "P", it is set to ``PIL.Image.NEAREST``.
647 expand (bool, optional): Optional expansion flag.
648 If true, expands the output image to make it large enough to hold the entire rotated image.
649 If false or omitted, make the output image the same size as the input image.
650 Note that the expand flag assumes rotation around the center and no translation.
651 center (2-tuple, optional): Optional center of rotation.
652 Origin is the upper left corner.
653 Default is the center of the image.
654
655 .. _filters: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#filters
656
657 """
658
659 if not _is_pil_image(img):
660 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
661
662 return img.rotate(angle, resample, expand, center)
663
664
665 def _get_inverse_affine_matrix(center, angle, translate, scale, shear):
666 # Helper method to compute inverse matrix for affine transformation
667
668 # As it is explained in PIL.Image.rotate
669 # We need compute INVERSE of affine transformation matrix: M = T * C * RSS * C^-1
670 # where T is translation matrix: [1, 0, tx | 0, 1, ty | 0, 0, 1]
671 # C is translation matrix to keep center: [1, 0, cx | 0, 1, cy | 0, 0, 1]
672 # RSS is rotation with scale and shear matrix
673 # RSS(a, scale, shear) = [ cos(a)*scale -sin(a + shear)*scale 0]
674 # [ sin(a)*scale cos(a + shear)*scale 0]
675 # [ 0 0 1]
676 # Thus, the inverse is M^-1 = C * RSS^-1 * C^-1 * T^-1
677
678 angle = math.radians(angle)
679 shear = math.radians(shear)
680 scale = 1.0 / scale
681
682 # Inverted rotation matrix with scale and shear
683 d = math.cos(angle + shear) * math.cos(angle) + math.sin(angle + shear) * math.sin(angle)
684 matrix = [
685 math.cos(angle + shear), math.sin(angle + shear), 0,
686 -math.sin(angle), math.cos(angle), 0
687 ]
688 matrix = [scale / d * m for m in matrix]
689
690 # Apply inverse of translation and of center translation: RSS^-1 * C^-1 * T^-1
691 matrix[2] += matrix[0] * (-center[0] - translate[0]) + matrix[1] * (-center[1] - translate[1])
692 matrix[5] += matrix[3] * (-center[0] - translate[0]) + matrix[4] * (-center[1] - translate[1])
693
694 # Apply center translation: C * RSS^-1 * C^-1 * T^-1
695 matrix[2] += center[0]
696 matrix[5] += center[1]
697 return matrix
698
699
700 def affine(img, angle, translate, scale, shear, resample=0, fillcolor=None):
701 """Apply affine transformation on the image keeping image center invariant
702
703 Args:
704 img (PIL Image): PIL Image to be rotated.
705 angle (float or int): rotation angle in degrees between -180 and 180, clockwise direction.
706 translate (list or tuple of integers): horizontal and vertical translations (post-rotation translation)
707 scale (float): overall scale
708 shear (float): shear angle value in degrees between -180 to 180, clockwise direction.
709 resample (``PIL.Image.NEAREST`` or ``PIL.Image.BILINEAR`` or ``PIL.Image.BICUBIC``, optional):
710 An optional resampling filter.
711 See `filters`_ for more information.
712 If omitted, or if the image has mode "1" or "P", it is set to ``PIL.Image.NEAREST``.
713 fillcolor (int): Optional fill color for the area outside the transform in the output image. (Pillow>=5.0.0)
714 """
715 if not _is_pil_image(img):
716 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
717
718 assert isinstance(translate, (tuple, list)) and len(translate) == 2, \
719 "Argument translate should be a list or tuple of length 2"
720
721 assert scale > 0.0, "Argument scale should be positive"
722
723 output_size = img.size
724 center = (img.size[0] * 0.5 + 0.5, img.size[1] * 0.5 + 0.5)
725 matrix = _get_inverse_affine_matrix(center, angle, translate, scale, shear)
726 kwargs = {"fillcolor": fillcolor} if PILLOW_VERSION[0] == '5' else {}
727 return img.transform(output_size, Image.AFFINE, matrix, resample, **kwargs)
728
729
730 def to_grayscale(img, num_output_channels=1):
731 """Convert image to grayscale version of image.
732
733 Args:
734 img (PIL Image): Image to be converted to grayscale.
735
736 Returns:
737 PIL Image: Grayscale version of the image.
738 if num_output_channels = 1 : returned image is single channel
739
740 if num_output_channels = 3 : returned image is 3 channel with r = g = b
741 """
742 if not _is_pil_image(img):
743 raise TypeError('img should be PIL Image. Got {}'.format(type(img)))
744
745 if num_output_channels == 1:
746 img = img.convert('L')
747 elif num_output_channels == 3:
748 img = img.convert('L')
749 np_img = np.array(img, dtype=np.uint8)
750 np_img = np.dstack([np_img, np_img, np_img])
751 img = Image.fromarray(np_img, 'RGB')
752 else:
753 raise ValueError('num_output_channels should be either 1 or 3')
754
755 return img
756
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/torchvision/transforms/functional.py b/torchvision/transforms/functional.py
--- a/torchvision/transforms/functional.py
+++ b/torchvision/transforms/functional.py
@@ -203,8 +203,8 @@
if not inplace:
tensor = tensor.clone()
- mean = torch.tensor(mean, dtype=torch.float32, device=tensor.device)
- std = torch.tensor(std, dtype=torch.float32, device=tensor.device)
+ mean = torch.as_tensor(mean, dtype=torch.float32, device=tensor.device)
+ std = torch.as_tensor(std, dtype=torch.float32, device=tensor.device)
tensor.sub_(mean[:, None, None]).div_(std[:, None, None])
return tensor
| {"golden_diff": "diff --git a/torchvision/transforms/functional.py b/torchvision/transforms/functional.py\n--- a/torchvision/transforms/functional.py\n+++ b/torchvision/transforms/functional.py\n@@ -203,8 +203,8 @@\n if not inplace:\n tensor = tensor.clone()\n \n- mean = torch.tensor(mean, dtype=torch.float32, device=tensor.device)\n- std = torch.tensor(std, dtype=torch.float32, device=tensor.device)\n+ mean = torch.as_tensor(mean, dtype=torch.float32, device=tensor.device)\n+ std = torch.as_tensor(std, dtype=torch.float32, device=tensor.device)\n tensor.sub_(mean[:, None, None]).div_(std[:, None, None])\n return tensor\n", "issue": "`UserWarning` when applying `Normalize`\nHi!\r\npytorch's version: 1.0.1.post2\r\ntorchvision's version: 0.2.2.post3\r\n\r\nSorry for the formatting, but you need to scroll the snippet to the right in order to see the warning.\r\n\r\n```python\r\n>>> from torch import tensor\r\n>>> from torchvision.transforms import Normalize\r\n>>> n = Normalize(tensor([1.0, 2.0, 3.0]), tensor([1.0, 2.0, 3.0]))\r\n>>> t = tensor([1.0, 2.0, 3.0]).view(3, 1, 1)\r\n>>> n(t)\r\n/home/ygorishniy/miniconda3/envs/x/lib/python3.6/site-packages/torchvision/transforms/functional.py:206: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\r\n mean = torch.tensor(mean, dtype=torch.float32)\r\n/home/ygorishniy/miniconda3/envs/x/lib/python3.6/site-packages/torchvision/transforms/functional.py:207: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\r\n std = torch.tensor(std, dtype=torch.float32)\r\nOut[17]: \r\ntensor([[[0.]],\r\n [[0.]],\r\n [[0.]]])\r\n```\n", "before_files": [{"content": "from __future__ import division\nimport torch\nimport sys\nimport math\nfrom PIL import Image, ImageOps, ImageEnhance, PILLOW_VERSION\ntry:\n import accimage\nexcept ImportError:\n accimage = None\nimport numpy as np\nimport numbers\nimport collections\nimport warnings\n\nif sys.version_info < (3, 3):\n Sequence = collections.Sequence\n Iterable = collections.Iterable\nelse:\n Sequence = collections.abc.Sequence\n Iterable = collections.abc.Iterable\n\n\ndef _is_pil_image(img):\n if accimage is not None:\n return isinstance(img, (Image.Image, accimage.Image))\n else:\n return isinstance(img, Image.Image)\n\n\ndef _is_tensor_image(img):\n return torch.is_tensor(img) and img.ndimension() == 3\n\n\ndef _is_numpy_image(img):\n return isinstance(img, np.ndarray) and (img.ndim in {2, 3})\n\n\ndef to_tensor(pic):\n \"\"\"Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor.\n\n See ``ToTensor`` for more details.\n\n Args:\n pic (PIL Image or numpy.ndarray): Image to be converted to tensor.\n\n Returns:\n Tensor: Converted image.\n \"\"\"\n if not(_is_pil_image(pic) or _is_numpy_image(pic)):\n raise TypeError('pic should be PIL Image or ndarray. Got {}'.format(type(pic)))\n\n if isinstance(pic, np.ndarray):\n # handle numpy array\n if pic.ndim == 2:\n pic = pic[:, :, None]\n\n img = torch.from_numpy(pic.transpose((2, 0, 1)))\n # backward compatibility\n if isinstance(img, torch.ByteTensor):\n return img.float().div(255)\n else:\n return img\n\n if accimage is not None and isinstance(pic, accimage.Image):\n nppic = np.zeros([pic.channels, pic.height, pic.width], dtype=np.float32)\n pic.copyto(nppic)\n return torch.from_numpy(nppic)\n\n # handle PIL Image\n if pic.mode == 'I':\n img = torch.from_numpy(np.array(pic, np.int32, copy=False))\n elif pic.mode == 'I;16':\n img = torch.from_numpy(np.array(pic, np.int16, copy=False))\n elif pic.mode == 'F':\n img = torch.from_numpy(np.array(pic, np.float32, copy=False))\n elif pic.mode == '1':\n img = 255 * torch.from_numpy(np.array(pic, np.uint8, copy=False))\n else:\n img = torch.ByteTensor(torch.ByteStorage.from_buffer(pic.tobytes()))\n # PIL image mode: L, LA, P, I, F, RGB, YCbCr, RGBA, CMYK\n if pic.mode == 'YCbCr':\n nchannel = 3\n elif pic.mode == 'I;16':\n nchannel = 1\n else:\n nchannel = len(pic.mode)\n img = img.view(pic.size[1], pic.size[0], nchannel)\n # put it from HWC to CHW format\n # yikes, this transpose takes 80% of the loading time/CPU\n img = img.transpose(0, 1).transpose(0, 2).contiguous()\n if isinstance(img, torch.ByteTensor):\n return img.float().div(255)\n else:\n return img\n\n\ndef to_pil_image(pic, mode=None):\n \"\"\"Convert a tensor or an ndarray to PIL Image.\n\n See :class:`~torchvision.transforms.ToPILImage` for more details.\n\n Args:\n pic (Tensor or numpy.ndarray): Image to be converted to PIL Image.\n mode (`PIL.Image mode`_): color space and pixel depth of input data (optional).\n\n .. _PIL.Image mode: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#concept-modes\n\n Returns:\n PIL Image: Image converted to PIL Image.\n \"\"\"\n if not(isinstance(pic, torch.Tensor) or isinstance(pic, np.ndarray)):\n raise TypeError('pic should be Tensor or ndarray. Got {}.'.format(type(pic)))\n\n elif isinstance(pic, torch.Tensor):\n if pic.ndimension() not in {2, 3}:\n raise ValueError('pic should be 2/3 dimensional. Got {} dimensions.'.format(pic.ndimension()))\n\n elif pic.ndimension() == 2:\n # if 2D image, add channel dimension (CHW)\n pic = pic.unsqueeze(0)\n\n elif isinstance(pic, np.ndarray):\n if pic.ndim not in {2, 3}:\n raise ValueError('pic should be 2/3 dimensional. Got {} dimensions.'.format(pic.ndim))\n\n elif pic.ndim == 2:\n # if 2D image, add channel dimension (HWC)\n pic = np.expand_dims(pic, 2)\n\n npimg = pic\n if isinstance(pic, torch.FloatTensor):\n pic = pic.mul(255).byte()\n if isinstance(pic, torch.Tensor):\n npimg = np.transpose(pic.numpy(), (1, 2, 0))\n\n if not isinstance(npimg, np.ndarray):\n raise TypeError('Input pic must be a torch.Tensor or NumPy ndarray, ' +\n 'not {}'.format(type(npimg)))\n\n if npimg.shape[2] == 1:\n expected_mode = None\n npimg = npimg[:, :, 0]\n if npimg.dtype == np.uint8:\n expected_mode = 'L'\n elif npimg.dtype == np.int16:\n expected_mode = 'I;16'\n elif npimg.dtype == np.int32:\n expected_mode = 'I'\n elif npimg.dtype == np.float32:\n expected_mode = 'F'\n if mode is not None and mode != expected_mode:\n raise ValueError(\"Incorrect mode ({}) supplied for input type {}. Should be {}\"\n .format(mode, np.dtype, expected_mode))\n mode = expected_mode\n\n elif npimg.shape[2] == 2:\n permitted_2_channel_modes = ['LA']\n if mode is not None and mode not in permitted_2_channel_modes:\n raise ValueError(\"Only modes {} are supported for 2D inputs\".format(permitted_2_channel_modes))\n\n if mode is None and npimg.dtype == np.uint8:\n mode = 'LA'\n\n elif npimg.shape[2] == 4:\n permitted_4_channel_modes = ['RGBA', 'CMYK', 'RGBX']\n if mode is not None and mode not in permitted_4_channel_modes:\n raise ValueError(\"Only modes {} are supported for 4D inputs\".format(permitted_4_channel_modes))\n\n if mode is None and npimg.dtype == np.uint8:\n mode = 'RGBA'\n else:\n permitted_3_channel_modes = ['RGB', 'YCbCr', 'HSV']\n if mode is not None and mode not in permitted_3_channel_modes:\n raise ValueError(\"Only modes {} are supported for 3D inputs\".format(permitted_3_channel_modes))\n if mode is None and npimg.dtype == np.uint8:\n mode = 'RGB'\n\n if mode is None:\n raise TypeError('Input type {} is not supported'.format(npimg.dtype))\n\n return Image.fromarray(npimg, mode=mode)\n\n\ndef normalize(tensor, mean, std, inplace=False):\n \"\"\"Normalize a tensor image with mean and standard deviation.\n\n .. note::\n This transform acts out of place by default, i.e., it does not mutates the input tensor.\n\n See :class:`~torchvision.transforms.Normalize` for more details.\n\n Args:\n tensor (Tensor): Tensor image of size (C, H, W) to be normalized.\n mean (sequence): Sequence of means for each channel.\n std (sequence): Sequence of standard deviations for each channely.\n\n Returns:\n Tensor: Normalized Tensor image.\n \"\"\"\n if not _is_tensor_image(tensor):\n raise TypeError('tensor is not a torch image.')\n\n if not inplace:\n tensor = tensor.clone()\n\n mean = torch.tensor(mean, dtype=torch.float32, device=tensor.device)\n std = torch.tensor(std, dtype=torch.float32, device=tensor.device)\n tensor.sub_(mean[:, None, None]).div_(std[:, None, None])\n return tensor\n\n\ndef resize(img, size, interpolation=Image.BILINEAR):\n r\"\"\"Resize the input PIL Image to the given size.\n\n Args:\n img (PIL Image): Image to be resized.\n size (sequence or int): Desired output size. If size is a sequence like\n (h, w), the output size will be matched to this. If size is an int,\n the smaller edge of the image will be matched to this number maintaing\n the aspect ratio. i.e, if height > width, then image will be rescaled to\n :math:`\\left(\\text{size} \\times \\frac{\\text{height}}{\\text{width}}, \\text{size}\\right)`\n interpolation (int, optional): Desired interpolation. Default is\n ``PIL.Image.BILINEAR``\n\n Returns:\n PIL Image: Resized image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n if not (isinstance(size, int) or (isinstance(size, Iterable) and len(size) == 2)):\n raise TypeError('Got inappropriate size arg: {}'.format(size))\n\n if isinstance(size, int):\n w, h = img.size\n if (w <= h and w == size) or (h <= w and h == size):\n return img\n if w < h:\n ow = size\n oh = int(size * h / w)\n return img.resize((ow, oh), interpolation)\n else:\n oh = size\n ow = int(size * w / h)\n return img.resize((ow, oh), interpolation)\n else:\n return img.resize(size[::-1], interpolation)\n\n\ndef scale(*args, **kwargs):\n warnings.warn(\"The use of the transforms.Scale transform is deprecated, \" +\n \"please use transforms.Resize instead.\")\n return resize(*args, **kwargs)\n\n\ndef pad(img, padding, fill=0, padding_mode='constant'):\n r\"\"\"Pad the given PIL Image on all sides with specified padding mode and fill value.\n\n Args:\n img (PIL Image): Image to be padded.\n padding (int or tuple): Padding on each border. If a single int is provided this\n is used to pad all borders. If tuple of length 2 is provided this is the padding\n on left/right and top/bottom respectively. If a tuple of length 4 is provided\n this is the padding for the left, top, right and bottom borders\n respectively.\n fill: Pixel fill value for constant fill. Default is 0. If a tuple of\n length 3, it is used to fill R, G, B channels respectively.\n This value is only used when the padding_mode is constant\n padding_mode: Type of padding. Should be: constant, edge, reflect or symmetric. Default is constant.\n\n - constant: pads with a constant value, this value is specified with fill\n\n - edge: pads with the last value on the edge of the image\n\n - reflect: pads with reflection of image (without repeating the last value on the edge)\n\n padding [1, 2, 3, 4] with 2 elements on both sides in reflect mode\n will result in [3, 2, 1, 2, 3, 4, 3, 2]\n\n - symmetric: pads with reflection of image (repeating the last value on the edge)\n\n padding [1, 2, 3, 4] with 2 elements on both sides in symmetric mode\n will result in [2, 1, 1, 2, 3, 4, 4, 3]\n\n Returns:\n PIL Image: Padded image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n if not isinstance(padding, (numbers.Number, tuple)):\n raise TypeError('Got inappropriate padding arg')\n if not isinstance(fill, (numbers.Number, str, tuple)):\n raise TypeError('Got inappropriate fill arg')\n if not isinstance(padding_mode, str):\n raise TypeError('Got inappropriate padding_mode arg')\n\n if isinstance(padding, Sequence) and len(padding) not in [2, 4]:\n raise ValueError(\"Padding must be an int or a 2, or 4 element tuple, not a \" +\n \"{} element tuple\".format(len(padding)))\n\n assert padding_mode in ['constant', 'edge', 'reflect', 'symmetric'], \\\n 'Padding mode should be either constant, edge, reflect or symmetric'\n\n if padding_mode == 'constant':\n if img.mode == 'P':\n palette = img.getpalette()\n image = ImageOps.expand(img, border=padding, fill=fill)\n image.putpalette(palette)\n return image\n\n return ImageOps.expand(img, border=padding, fill=fill)\n else:\n if isinstance(padding, int):\n pad_left = pad_right = pad_top = pad_bottom = padding\n if isinstance(padding, Sequence) and len(padding) == 2:\n pad_left = pad_right = padding[0]\n pad_top = pad_bottom = padding[1]\n if isinstance(padding, Sequence) and len(padding) == 4:\n pad_left = padding[0]\n pad_top = padding[1]\n pad_right = padding[2]\n pad_bottom = padding[3]\n\n if img.mode == 'P':\n palette = img.getpalette()\n img = np.asarray(img)\n img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right)), padding_mode)\n img = Image.fromarray(img)\n img.putpalette(palette)\n return img\n\n img = np.asarray(img)\n # RGB image\n if len(img.shape) == 3:\n img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right), (0, 0)), padding_mode)\n # Grayscale image\n if len(img.shape) == 2:\n img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right)), padding_mode)\n\n return Image.fromarray(img)\n\n\ndef crop(img, i, j, h, w):\n \"\"\"Crop the given PIL Image.\n\n Args:\n img (PIL Image): Image to be cropped.\n i: Upper pixel coordinate.\n j: Left pixel coordinate.\n h: Height of the cropped image.\n w: Width of the cropped image.\n\n Returns:\n PIL Image: Cropped image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n return img.crop((j, i, j + w, i + h))\n\n\ndef center_crop(img, output_size):\n if isinstance(output_size, numbers.Number):\n output_size = (int(output_size), int(output_size))\n w, h = img.size\n th, tw = output_size\n i = int(round((h - th) / 2.))\n j = int(round((w - tw) / 2.))\n return crop(img, i, j, th, tw)\n\n\ndef resized_crop(img, i, j, h, w, size, interpolation=Image.BILINEAR):\n \"\"\"Crop the given PIL Image and resize it to desired size.\n\n Notably used in :class:`~torchvision.transforms.RandomResizedCrop`.\n\n Args:\n img (PIL Image): Image to be cropped.\n i: i in (i,j) i.e coordinates of the upper left corner\n j: j in (i,j) i.e coordinates of the upper left corner\n h: Height of the cropped image.\n w: Width of the cropped image.\n size (sequence or int): Desired output size. Same semantics as ``resize``.\n interpolation (int, optional): Desired interpolation. Default is\n ``PIL.Image.BILINEAR``.\n Returns:\n PIL Image: Cropped image.\n \"\"\"\n assert _is_pil_image(img), 'img should be PIL Image'\n img = crop(img, i, j, h, w)\n img = resize(img, size, interpolation)\n return img\n\n\ndef hflip(img):\n \"\"\"Horizontally flip the given PIL Image.\n\n Args:\n img (PIL Image): Image to be flipped.\n\n Returns:\n PIL Image: Horizontall flipped image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n return img.transpose(Image.FLIP_LEFT_RIGHT)\n\n\ndef vflip(img):\n \"\"\"Vertically flip the given PIL Image.\n\n Args:\n img (PIL Image): Image to be flipped.\n\n Returns:\n PIL Image: Vertically flipped image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n return img.transpose(Image.FLIP_TOP_BOTTOM)\n\n\ndef five_crop(img, size):\n \"\"\"Crop the given PIL Image into four corners and the central crop.\n\n .. Note::\n This transform returns a tuple of images and there may be a\n mismatch in the number of inputs and targets your ``Dataset`` returns.\n\n Args:\n size (sequence or int): Desired output size of the crop. If size is an\n int instead of sequence like (h, w), a square crop (size, size) is\n made.\n\n Returns:\n tuple: tuple (tl, tr, bl, br, center)\n Corresponding top left, top right, bottom left, bottom right and center crop.\n \"\"\"\n if isinstance(size, numbers.Number):\n size = (int(size), int(size))\n else:\n assert len(size) == 2, \"Please provide only two dimensions (h, w) for size.\"\n\n w, h = img.size\n crop_h, crop_w = size\n if crop_w > w or crop_h > h:\n raise ValueError(\"Requested crop size {} is bigger than input size {}\".format(size,\n (h, w)))\n tl = img.crop((0, 0, crop_w, crop_h))\n tr = img.crop((w - crop_w, 0, w, crop_h))\n bl = img.crop((0, h - crop_h, crop_w, h))\n br = img.crop((w - crop_w, h - crop_h, w, h))\n center = center_crop(img, (crop_h, crop_w))\n return (tl, tr, bl, br, center)\n\n\ndef ten_crop(img, size, vertical_flip=False):\n r\"\"\"Crop the given PIL Image into four corners and the central crop plus the\n flipped version of these (horizontal flipping is used by default).\n\n .. Note::\n This transform returns a tuple of images and there may be a\n mismatch in the number of inputs and targets your ``Dataset`` returns.\n\n Args:\n size (sequence or int): Desired output size of the crop. If size is an\n int instead of sequence like (h, w), a square crop (size, size) is\n made.\n vertical_flip (bool): Use vertical flipping instead of horizontal\n\n Returns:\n tuple: tuple (tl, tr, bl, br, center, tl_flip, tr_flip, bl_flip, br_flip, center_flip)\n Corresponding top left, top right, bottom left, bottom right and center crop\n and same for the flipped image.\n \"\"\"\n if isinstance(size, numbers.Number):\n size = (int(size), int(size))\n else:\n assert len(size) == 2, \"Please provide only two dimensions (h, w) for size.\"\n\n first_five = five_crop(img, size)\n\n if vertical_flip:\n img = vflip(img)\n else:\n img = hflip(img)\n\n second_five = five_crop(img, size)\n return first_five + second_five\n\n\ndef adjust_brightness(img, brightness_factor):\n \"\"\"Adjust brightness of an Image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n brightness_factor (float): How much to adjust the brightness. Can be\n any non negative number. 0 gives a black image, 1 gives the\n original image while 2 increases the brightness by a factor of 2.\n\n Returns:\n PIL Image: Brightness adjusted image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n enhancer = ImageEnhance.Brightness(img)\n img = enhancer.enhance(brightness_factor)\n return img\n\n\ndef adjust_contrast(img, contrast_factor):\n \"\"\"Adjust contrast of an Image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n contrast_factor (float): How much to adjust the contrast. Can be any\n non negative number. 0 gives a solid gray image, 1 gives the\n original image while 2 increases the contrast by a factor of 2.\n\n Returns:\n PIL Image: Contrast adjusted image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n enhancer = ImageEnhance.Contrast(img)\n img = enhancer.enhance(contrast_factor)\n return img\n\n\ndef adjust_saturation(img, saturation_factor):\n \"\"\"Adjust color saturation of an image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n saturation_factor (float): How much to adjust the saturation. 0 will\n give a black and white image, 1 will give the original image while\n 2 will enhance the saturation by a factor of 2.\n\n Returns:\n PIL Image: Saturation adjusted image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n enhancer = ImageEnhance.Color(img)\n img = enhancer.enhance(saturation_factor)\n return img\n\n\ndef adjust_hue(img, hue_factor):\n \"\"\"Adjust hue of an image.\n\n The image hue is adjusted by converting the image to HSV and\n cyclically shifting the intensities in the hue channel (H).\n The image is then converted back to original image mode.\n\n `hue_factor` is the amount of shift in H channel and must be in the\n interval `[-0.5, 0.5]`.\n\n See `Hue`_ for more details.\n\n .. _Hue: https://en.wikipedia.org/wiki/Hue\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n hue_factor (float): How much to shift the hue channel. Should be in\n [-0.5, 0.5]. 0.5 and -0.5 give complete reversal of hue channel in\n HSV space in positive and negative direction respectively.\n 0 means no shift. Therefore, both -0.5 and 0.5 will give an image\n with complementary colors while 0 gives the original image.\n\n Returns:\n PIL Image: Hue adjusted image.\n \"\"\"\n if not(-0.5 <= hue_factor <= 0.5):\n raise ValueError('hue_factor is not in [-0.5, 0.5].'.format(hue_factor))\n\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n input_mode = img.mode\n if input_mode in {'L', '1', 'I', 'F'}:\n return img\n\n h, s, v = img.convert('HSV').split()\n\n np_h = np.array(h, dtype=np.uint8)\n # uint8 addition take cares of rotation across boundaries\n with np.errstate(over='ignore'):\n np_h += np.uint8(hue_factor * 255)\n h = Image.fromarray(np_h, 'L')\n\n img = Image.merge('HSV', (h, s, v)).convert(input_mode)\n return img\n\n\ndef adjust_gamma(img, gamma, gain=1):\n r\"\"\"Perform gamma correction on an image.\n\n Also known as Power Law Transform. Intensities in RGB mode are adjusted\n based on the following equation:\n\n .. math::\n I_{\\text{out}} = 255 \\times \\text{gain} \\times \\left(\\frac{I_{\\text{in}}}{255}\\right)^{\\gamma}\n\n See `Gamma Correction`_ for more details.\n\n .. _Gamma Correction: https://en.wikipedia.org/wiki/Gamma_correction\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n gamma (float): Non negative real number, same as :math:`\\gamma` in the equation.\n gamma larger than 1 make the shadows darker,\n while gamma smaller than 1 make dark regions lighter.\n gain (float): The constant multiplier.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n if gamma < 0:\n raise ValueError('Gamma should be a non-negative real number')\n\n input_mode = img.mode\n img = img.convert('RGB')\n\n gamma_map = [255 * gain * pow(ele / 255., gamma) for ele in range(256)] * 3\n img = img.point(gamma_map) # use PIL's point-function to accelerate this part\n\n img = img.convert(input_mode)\n return img\n\n\ndef rotate(img, angle, resample=False, expand=False, center=None):\n \"\"\"Rotate the image by angle.\n\n\n Args:\n img (PIL Image): PIL Image to be rotated.\n angle (float or int): In degrees degrees counter clockwise order.\n resample (``PIL.Image.NEAREST`` or ``PIL.Image.BILINEAR`` or ``PIL.Image.BICUBIC``, optional):\n An optional resampling filter. See `filters`_ for more information.\n If omitted, or if the image has mode \"1\" or \"P\", it is set to ``PIL.Image.NEAREST``.\n expand (bool, optional): Optional expansion flag.\n If true, expands the output image to make it large enough to hold the entire rotated image.\n If false or omitted, make the output image the same size as the input image.\n Note that the expand flag assumes rotation around the center and no translation.\n center (2-tuple, optional): Optional center of rotation.\n Origin is the upper left corner.\n Default is the center of the image.\n\n .. _filters: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#filters\n\n \"\"\"\n\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n return img.rotate(angle, resample, expand, center)\n\n\ndef _get_inverse_affine_matrix(center, angle, translate, scale, shear):\n # Helper method to compute inverse matrix for affine transformation\n\n # As it is explained in PIL.Image.rotate\n # We need compute INVERSE of affine transformation matrix: M = T * C * RSS * C^-1\n # where T is translation matrix: [1, 0, tx | 0, 1, ty | 0, 0, 1]\n # C is translation matrix to keep center: [1, 0, cx | 0, 1, cy | 0, 0, 1]\n # RSS is rotation with scale and shear matrix\n # RSS(a, scale, shear) = [ cos(a)*scale -sin(a + shear)*scale 0]\n # [ sin(a)*scale cos(a + shear)*scale 0]\n # [ 0 0 1]\n # Thus, the inverse is M^-1 = C * RSS^-1 * C^-1 * T^-1\n\n angle = math.radians(angle)\n shear = math.radians(shear)\n scale = 1.0 / scale\n\n # Inverted rotation matrix with scale and shear\n d = math.cos(angle + shear) * math.cos(angle) + math.sin(angle + shear) * math.sin(angle)\n matrix = [\n math.cos(angle + shear), math.sin(angle + shear), 0,\n -math.sin(angle), math.cos(angle), 0\n ]\n matrix = [scale / d * m for m in matrix]\n\n # Apply inverse of translation and of center translation: RSS^-1 * C^-1 * T^-1\n matrix[2] += matrix[0] * (-center[0] - translate[0]) + matrix[1] * (-center[1] - translate[1])\n matrix[5] += matrix[3] * (-center[0] - translate[0]) + matrix[4] * (-center[1] - translate[1])\n\n # Apply center translation: C * RSS^-1 * C^-1 * T^-1\n matrix[2] += center[0]\n matrix[5] += center[1]\n return matrix\n\n\ndef affine(img, angle, translate, scale, shear, resample=0, fillcolor=None):\n \"\"\"Apply affine transformation on the image keeping image center invariant\n\n Args:\n img (PIL Image): PIL Image to be rotated.\n angle (float or int): rotation angle in degrees between -180 and 180, clockwise direction.\n translate (list or tuple of integers): horizontal and vertical translations (post-rotation translation)\n scale (float): overall scale\n shear (float): shear angle value in degrees between -180 to 180, clockwise direction.\n resample (``PIL.Image.NEAREST`` or ``PIL.Image.BILINEAR`` or ``PIL.Image.BICUBIC``, optional):\n An optional resampling filter.\n See `filters`_ for more information.\n If omitted, or if the image has mode \"1\" or \"P\", it is set to ``PIL.Image.NEAREST``.\n fillcolor (int): Optional fill color for the area outside the transform in the output image. (Pillow>=5.0.0)\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n assert isinstance(translate, (tuple, list)) and len(translate) == 2, \\\n \"Argument translate should be a list or tuple of length 2\"\n\n assert scale > 0.0, \"Argument scale should be positive\"\n\n output_size = img.size\n center = (img.size[0] * 0.5 + 0.5, img.size[1] * 0.5 + 0.5)\n matrix = _get_inverse_affine_matrix(center, angle, translate, scale, shear)\n kwargs = {\"fillcolor\": fillcolor} if PILLOW_VERSION[0] == '5' else {}\n return img.transform(output_size, Image.AFFINE, matrix, resample, **kwargs)\n\n\ndef to_grayscale(img, num_output_channels=1):\n \"\"\"Convert image to grayscale version of image.\n\n Args:\n img (PIL Image): Image to be converted to grayscale.\n\n Returns:\n PIL Image: Grayscale version of the image.\n if num_output_channels = 1 : returned image is single channel\n\n if num_output_channels = 3 : returned image is 3 channel with r = g = b\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n if num_output_channels == 1:\n img = img.convert('L')\n elif num_output_channels == 3:\n img = img.convert('L')\n np_img = np.array(img, dtype=np.uint8)\n np_img = np.dstack([np_img, np_img, np_img])\n img = Image.fromarray(np_img, 'RGB')\n else:\n raise ValueError('num_output_channels should be either 1 or 3')\n\n return img\n", "path": "torchvision/transforms/functional.py"}], "after_files": [{"content": "from __future__ import division\nimport torch\nimport sys\nimport math\nfrom PIL import Image, ImageOps, ImageEnhance, PILLOW_VERSION\ntry:\n import accimage\nexcept ImportError:\n accimage = None\nimport numpy as np\nimport numbers\nimport collections\nimport warnings\n\nif sys.version_info < (3, 3):\n Sequence = collections.Sequence\n Iterable = collections.Iterable\nelse:\n Sequence = collections.abc.Sequence\n Iterable = collections.abc.Iterable\n\n\ndef _is_pil_image(img):\n if accimage is not None:\n return isinstance(img, (Image.Image, accimage.Image))\n else:\n return isinstance(img, Image.Image)\n\n\ndef _is_tensor_image(img):\n return torch.is_tensor(img) and img.ndimension() == 3\n\n\ndef _is_numpy_image(img):\n return isinstance(img, np.ndarray) and (img.ndim in {2, 3})\n\n\ndef to_tensor(pic):\n \"\"\"Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor.\n\n See ``ToTensor`` for more details.\n\n Args:\n pic (PIL Image or numpy.ndarray): Image to be converted to tensor.\n\n Returns:\n Tensor: Converted image.\n \"\"\"\n if not(_is_pil_image(pic) or _is_numpy_image(pic)):\n raise TypeError('pic should be PIL Image or ndarray. Got {}'.format(type(pic)))\n\n if isinstance(pic, np.ndarray):\n # handle numpy array\n if pic.ndim == 2:\n pic = pic[:, :, None]\n\n img = torch.from_numpy(pic.transpose((2, 0, 1)))\n # backward compatibility\n if isinstance(img, torch.ByteTensor):\n return img.float().div(255)\n else:\n return img\n\n if accimage is not None and isinstance(pic, accimage.Image):\n nppic = np.zeros([pic.channels, pic.height, pic.width], dtype=np.float32)\n pic.copyto(nppic)\n return torch.from_numpy(nppic)\n\n # handle PIL Image\n if pic.mode == 'I':\n img = torch.from_numpy(np.array(pic, np.int32, copy=False))\n elif pic.mode == 'I;16':\n img = torch.from_numpy(np.array(pic, np.int16, copy=False))\n elif pic.mode == 'F':\n img = torch.from_numpy(np.array(pic, np.float32, copy=False))\n elif pic.mode == '1':\n img = 255 * torch.from_numpy(np.array(pic, np.uint8, copy=False))\n else:\n img = torch.ByteTensor(torch.ByteStorage.from_buffer(pic.tobytes()))\n # PIL image mode: L, LA, P, I, F, RGB, YCbCr, RGBA, CMYK\n if pic.mode == 'YCbCr':\n nchannel = 3\n elif pic.mode == 'I;16':\n nchannel = 1\n else:\n nchannel = len(pic.mode)\n img = img.view(pic.size[1], pic.size[0], nchannel)\n # put it from HWC to CHW format\n # yikes, this transpose takes 80% of the loading time/CPU\n img = img.transpose(0, 1).transpose(0, 2).contiguous()\n if isinstance(img, torch.ByteTensor):\n return img.float().div(255)\n else:\n return img\n\n\ndef to_pil_image(pic, mode=None):\n \"\"\"Convert a tensor or an ndarray to PIL Image.\n\n See :class:`~torchvision.transforms.ToPILImage` for more details.\n\n Args:\n pic (Tensor or numpy.ndarray): Image to be converted to PIL Image.\n mode (`PIL.Image mode`_): color space and pixel depth of input data (optional).\n\n .. _PIL.Image mode: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#concept-modes\n\n Returns:\n PIL Image: Image converted to PIL Image.\n \"\"\"\n if not(isinstance(pic, torch.Tensor) or isinstance(pic, np.ndarray)):\n raise TypeError('pic should be Tensor or ndarray. Got {}.'.format(type(pic)))\n\n elif isinstance(pic, torch.Tensor):\n if pic.ndimension() not in {2, 3}:\n raise ValueError('pic should be 2/3 dimensional. Got {} dimensions.'.format(pic.ndimension()))\n\n elif pic.ndimension() == 2:\n # if 2D image, add channel dimension (CHW)\n pic = pic.unsqueeze(0)\n\n elif isinstance(pic, np.ndarray):\n if pic.ndim not in {2, 3}:\n raise ValueError('pic should be 2/3 dimensional. Got {} dimensions.'.format(pic.ndim))\n\n elif pic.ndim == 2:\n # if 2D image, add channel dimension (HWC)\n pic = np.expand_dims(pic, 2)\n\n npimg = pic\n if isinstance(pic, torch.FloatTensor):\n pic = pic.mul(255).byte()\n if isinstance(pic, torch.Tensor):\n npimg = np.transpose(pic.numpy(), (1, 2, 0))\n\n if not isinstance(npimg, np.ndarray):\n raise TypeError('Input pic must be a torch.Tensor or NumPy ndarray, ' +\n 'not {}'.format(type(npimg)))\n\n if npimg.shape[2] == 1:\n expected_mode = None\n npimg = npimg[:, :, 0]\n if npimg.dtype == np.uint8:\n expected_mode = 'L'\n elif npimg.dtype == np.int16:\n expected_mode = 'I;16'\n elif npimg.dtype == np.int32:\n expected_mode = 'I'\n elif npimg.dtype == np.float32:\n expected_mode = 'F'\n if mode is not None and mode != expected_mode:\n raise ValueError(\"Incorrect mode ({}) supplied for input type {}. Should be {}\"\n .format(mode, np.dtype, expected_mode))\n mode = expected_mode\n\n elif npimg.shape[2] == 2:\n permitted_2_channel_modes = ['LA']\n if mode is not None and mode not in permitted_2_channel_modes:\n raise ValueError(\"Only modes {} are supported for 2D inputs\".format(permitted_2_channel_modes))\n\n if mode is None and npimg.dtype == np.uint8:\n mode = 'LA'\n\n elif npimg.shape[2] == 4:\n permitted_4_channel_modes = ['RGBA', 'CMYK', 'RGBX']\n if mode is not None and mode not in permitted_4_channel_modes:\n raise ValueError(\"Only modes {} are supported for 4D inputs\".format(permitted_4_channel_modes))\n\n if mode is None and npimg.dtype == np.uint8:\n mode = 'RGBA'\n else:\n permitted_3_channel_modes = ['RGB', 'YCbCr', 'HSV']\n if mode is not None and mode not in permitted_3_channel_modes:\n raise ValueError(\"Only modes {} are supported for 3D inputs\".format(permitted_3_channel_modes))\n if mode is None and npimg.dtype == np.uint8:\n mode = 'RGB'\n\n if mode is None:\n raise TypeError('Input type {} is not supported'.format(npimg.dtype))\n\n return Image.fromarray(npimg, mode=mode)\n\n\ndef normalize(tensor, mean, std, inplace=False):\n \"\"\"Normalize a tensor image with mean and standard deviation.\n\n .. note::\n This transform acts out of place by default, i.e., it does not mutates the input tensor.\n\n See :class:`~torchvision.transforms.Normalize` for more details.\n\n Args:\n tensor (Tensor): Tensor image of size (C, H, W) to be normalized.\n mean (sequence): Sequence of means for each channel.\n std (sequence): Sequence of standard deviations for each channely.\n\n Returns:\n Tensor: Normalized Tensor image.\n \"\"\"\n if not _is_tensor_image(tensor):\n raise TypeError('tensor is not a torch image.')\n\n if not inplace:\n tensor = tensor.clone()\n\n mean = torch.as_tensor(mean, dtype=torch.float32, device=tensor.device)\n std = torch.as_tensor(std, dtype=torch.float32, device=tensor.device)\n tensor.sub_(mean[:, None, None]).div_(std[:, None, None])\n return tensor\n\n\ndef resize(img, size, interpolation=Image.BILINEAR):\n r\"\"\"Resize the input PIL Image to the given size.\n\n Args:\n img (PIL Image): Image to be resized.\n size (sequence or int): Desired output size. If size is a sequence like\n (h, w), the output size will be matched to this. If size is an int,\n the smaller edge of the image will be matched to this number maintaing\n the aspect ratio. i.e, if height > width, then image will be rescaled to\n :math:`\\left(\\text{size} \\times \\frac{\\text{height}}{\\text{width}}, \\text{size}\\right)`\n interpolation (int, optional): Desired interpolation. Default is\n ``PIL.Image.BILINEAR``\n\n Returns:\n PIL Image: Resized image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n if not (isinstance(size, int) or (isinstance(size, Iterable) and len(size) == 2)):\n raise TypeError('Got inappropriate size arg: {}'.format(size))\n\n if isinstance(size, int):\n w, h = img.size\n if (w <= h and w == size) or (h <= w and h == size):\n return img\n if w < h:\n ow = size\n oh = int(size * h / w)\n return img.resize((ow, oh), interpolation)\n else:\n oh = size\n ow = int(size * w / h)\n return img.resize((ow, oh), interpolation)\n else:\n return img.resize(size[::-1], interpolation)\n\n\ndef scale(*args, **kwargs):\n warnings.warn(\"The use of the transforms.Scale transform is deprecated, \" +\n \"please use transforms.Resize instead.\")\n return resize(*args, **kwargs)\n\n\ndef pad(img, padding, fill=0, padding_mode='constant'):\n r\"\"\"Pad the given PIL Image on all sides with specified padding mode and fill value.\n\n Args:\n img (PIL Image): Image to be padded.\n padding (int or tuple): Padding on each border. If a single int is provided this\n is used to pad all borders. If tuple of length 2 is provided this is the padding\n on left/right and top/bottom respectively. If a tuple of length 4 is provided\n this is the padding for the left, top, right and bottom borders\n respectively.\n fill: Pixel fill value for constant fill. Default is 0. If a tuple of\n length 3, it is used to fill R, G, B channels respectively.\n This value is only used when the padding_mode is constant\n padding_mode: Type of padding. Should be: constant, edge, reflect or symmetric. Default is constant.\n\n - constant: pads with a constant value, this value is specified with fill\n\n - edge: pads with the last value on the edge of the image\n\n - reflect: pads with reflection of image (without repeating the last value on the edge)\n\n padding [1, 2, 3, 4] with 2 elements on both sides in reflect mode\n will result in [3, 2, 1, 2, 3, 4, 3, 2]\n\n - symmetric: pads with reflection of image (repeating the last value on the edge)\n\n padding [1, 2, 3, 4] with 2 elements on both sides in symmetric mode\n will result in [2, 1, 1, 2, 3, 4, 4, 3]\n\n Returns:\n PIL Image: Padded image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n if not isinstance(padding, (numbers.Number, tuple)):\n raise TypeError('Got inappropriate padding arg')\n if not isinstance(fill, (numbers.Number, str, tuple)):\n raise TypeError('Got inappropriate fill arg')\n if not isinstance(padding_mode, str):\n raise TypeError('Got inappropriate padding_mode arg')\n\n if isinstance(padding, Sequence) and len(padding) not in [2, 4]:\n raise ValueError(\"Padding must be an int or a 2, or 4 element tuple, not a \" +\n \"{} element tuple\".format(len(padding)))\n\n assert padding_mode in ['constant', 'edge', 'reflect', 'symmetric'], \\\n 'Padding mode should be either constant, edge, reflect or symmetric'\n\n if padding_mode == 'constant':\n if img.mode == 'P':\n palette = img.getpalette()\n image = ImageOps.expand(img, border=padding, fill=fill)\n image.putpalette(palette)\n return image\n\n return ImageOps.expand(img, border=padding, fill=fill)\n else:\n if isinstance(padding, int):\n pad_left = pad_right = pad_top = pad_bottom = padding\n if isinstance(padding, Sequence) and len(padding) == 2:\n pad_left = pad_right = padding[0]\n pad_top = pad_bottom = padding[1]\n if isinstance(padding, Sequence) and len(padding) == 4:\n pad_left = padding[0]\n pad_top = padding[1]\n pad_right = padding[2]\n pad_bottom = padding[3]\n\n if img.mode == 'P':\n palette = img.getpalette()\n img = np.asarray(img)\n img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right)), padding_mode)\n img = Image.fromarray(img)\n img.putpalette(palette)\n return img\n\n img = np.asarray(img)\n # RGB image\n if len(img.shape) == 3:\n img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right), (0, 0)), padding_mode)\n # Grayscale image\n if len(img.shape) == 2:\n img = np.pad(img, ((pad_top, pad_bottom), (pad_left, pad_right)), padding_mode)\n\n return Image.fromarray(img)\n\n\ndef crop(img, i, j, h, w):\n \"\"\"Crop the given PIL Image.\n\n Args:\n img (PIL Image): Image to be cropped.\n i: Upper pixel coordinate.\n j: Left pixel coordinate.\n h: Height of the cropped image.\n w: Width of the cropped image.\n\n Returns:\n PIL Image: Cropped image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n return img.crop((j, i, j + w, i + h))\n\n\ndef center_crop(img, output_size):\n if isinstance(output_size, numbers.Number):\n output_size = (int(output_size), int(output_size))\n w, h = img.size\n th, tw = output_size\n i = int(round((h - th) / 2.))\n j = int(round((w - tw) / 2.))\n return crop(img, i, j, th, tw)\n\n\ndef resized_crop(img, i, j, h, w, size, interpolation=Image.BILINEAR):\n \"\"\"Crop the given PIL Image and resize it to desired size.\n\n Notably used in :class:`~torchvision.transforms.RandomResizedCrop`.\n\n Args:\n img (PIL Image): Image to be cropped.\n i: i in (i,j) i.e coordinates of the upper left corner\n j: j in (i,j) i.e coordinates of the upper left corner\n h: Height of the cropped image.\n w: Width of the cropped image.\n size (sequence or int): Desired output size. Same semantics as ``resize``.\n interpolation (int, optional): Desired interpolation. Default is\n ``PIL.Image.BILINEAR``.\n Returns:\n PIL Image: Cropped image.\n \"\"\"\n assert _is_pil_image(img), 'img should be PIL Image'\n img = crop(img, i, j, h, w)\n img = resize(img, size, interpolation)\n return img\n\n\ndef hflip(img):\n \"\"\"Horizontally flip the given PIL Image.\n\n Args:\n img (PIL Image): Image to be flipped.\n\n Returns:\n PIL Image: Horizontall flipped image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n return img.transpose(Image.FLIP_LEFT_RIGHT)\n\n\ndef vflip(img):\n \"\"\"Vertically flip the given PIL Image.\n\n Args:\n img (PIL Image): Image to be flipped.\n\n Returns:\n PIL Image: Vertically flipped image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n return img.transpose(Image.FLIP_TOP_BOTTOM)\n\n\ndef five_crop(img, size):\n \"\"\"Crop the given PIL Image into four corners and the central crop.\n\n .. Note::\n This transform returns a tuple of images and there may be a\n mismatch in the number of inputs and targets your ``Dataset`` returns.\n\n Args:\n size (sequence or int): Desired output size of the crop. If size is an\n int instead of sequence like (h, w), a square crop (size, size) is\n made.\n\n Returns:\n tuple: tuple (tl, tr, bl, br, center)\n Corresponding top left, top right, bottom left, bottom right and center crop.\n \"\"\"\n if isinstance(size, numbers.Number):\n size = (int(size), int(size))\n else:\n assert len(size) == 2, \"Please provide only two dimensions (h, w) for size.\"\n\n w, h = img.size\n crop_h, crop_w = size\n if crop_w > w or crop_h > h:\n raise ValueError(\"Requested crop size {} is bigger than input size {}\".format(size,\n (h, w)))\n tl = img.crop((0, 0, crop_w, crop_h))\n tr = img.crop((w - crop_w, 0, w, crop_h))\n bl = img.crop((0, h - crop_h, crop_w, h))\n br = img.crop((w - crop_w, h - crop_h, w, h))\n center = center_crop(img, (crop_h, crop_w))\n return (tl, tr, bl, br, center)\n\n\ndef ten_crop(img, size, vertical_flip=False):\n r\"\"\"Crop the given PIL Image into four corners and the central crop plus the\n flipped version of these (horizontal flipping is used by default).\n\n .. Note::\n This transform returns a tuple of images and there may be a\n mismatch in the number of inputs and targets your ``Dataset`` returns.\n\n Args:\n size (sequence or int): Desired output size of the crop. If size is an\n int instead of sequence like (h, w), a square crop (size, size) is\n made.\n vertical_flip (bool): Use vertical flipping instead of horizontal\n\n Returns:\n tuple: tuple (tl, tr, bl, br, center, tl_flip, tr_flip, bl_flip, br_flip, center_flip)\n Corresponding top left, top right, bottom left, bottom right and center crop\n and same for the flipped image.\n \"\"\"\n if isinstance(size, numbers.Number):\n size = (int(size), int(size))\n else:\n assert len(size) == 2, \"Please provide only two dimensions (h, w) for size.\"\n\n first_five = five_crop(img, size)\n\n if vertical_flip:\n img = vflip(img)\n else:\n img = hflip(img)\n\n second_five = five_crop(img, size)\n return first_five + second_five\n\n\ndef adjust_brightness(img, brightness_factor):\n \"\"\"Adjust brightness of an Image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n brightness_factor (float): How much to adjust the brightness. Can be\n any non negative number. 0 gives a black image, 1 gives the\n original image while 2 increases the brightness by a factor of 2.\n\n Returns:\n PIL Image: Brightness adjusted image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n enhancer = ImageEnhance.Brightness(img)\n img = enhancer.enhance(brightness_factor)\n return img\n\n\ndef adjust_contrast(img, contrast_factor):\n \"\"\"Adjust contrast of an Image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n contrast_factor (float): How much to adjust the contrast. Can be any\n non negative number. 0 gives a solid gray image, 1 gives the\n original image while 2 increases the contrast by a factor of 2.\n\n Returns:\n PIL Image: Contrast adjusted image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n enhancer = ImageEnhance.Contrast(img)\n img = enhancer.enhance(contrast_factor)\n return img\n\n\ndef adjust_saturation(img, saturation_factor):\n \"\"\"Adjust color saturation of an image.\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n saturation_factor (float): How much to adjust the saturation. 0 will\n give a black and white image, 1 will give the original image while\n 2 will enhance the saturation by a factor of 2.\n\n Returns:\n PIL Image: Saturation adjusted image.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n enhancer = ImageEnhance.Color(img)\n img = enhancer.enhance(saturation_factor)\n return img\n\n\ndef adjust_hue(img, hue_factor):\n \"\"\"Adjust hue of an image.\n\n The image hue is adjusted by converting the image to HSV and\n cyclically shifting the intensities in the hue channel (H).\n The image is then converted back to original image mode.\n\n `hue_factor` is the amount of shift in H channel and must be in the\n interval `[-0.5, 0.5]`.\n\n See `Hue`_ for more details.\n\n .. _Hue: https://en.wikipedia.org/wiki/Hue\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n hue_factor (float): How much to shift the hue channel. Should be in\n [-0.5, 0.5]. 0.5 and -0.5 give complete reversal of hue channel in\n HSV space in positive and negative direction respectively.\n 0 means no shift. Therefore, both -0.5 and 0.5 will give an image\n with complementary colors while 0 gives the original image.\n\n Returns:\n PIL Image: Hue adjusted image.\n \"\"\"\n if not(-0.5 <= hue_factor <= 0.5):\n raise ValueError('hue_factor is not in [-0.5, 0.5].'.format(hue_factor))\n\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n input_mode = img.mode\n if input_mode in {'L', '1', 'I', 'F'}:\n return img\n\n h, s, v = img.convert('HSV').split()\n\n np_h = np.array(h, dtype=np.uint8)\n # uint8 addition take cares of rotation across boundaries\n with np.errstate(over='ignore'):\n np_h += np.uint8(hue_factor * 255)\n h = Image.fromarray(np_h, 'L')\n\n img = Image.merge('HSV', (h, s, v)).convert(input_mode)\n return img\n\n\ndef adjust_gamma(img, gamma, gain=1):\n r\"\"\"Perform gamma correction on an image.\n\n Also known as Power Law Transform. Intensities in RGB mode are adjusted\n based on the following equation:\n\n .. math::\n I_{\\text{out}} = 255 \\times \\text{gain} \\times \\left(\\frac{I_{\\text{in}}}{255}\\right)^{\\gamma}\n\n See `Gamma Correction`_ for more details.\n\n .. _Gamma Correction: https://en.wikipedia.org/wiki/Gamma_correction\n\n Args:\n img (PIL Image): PIL Image to be adjusted.\n gamma (float): Non negative real number, same as :math:`\\gamma` in the equation.\n gamma larger than 1 make the shadows darker,\n while gamma smaller than 1 make dark regions lighter.\n gain (float): The constant multiplier.\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n if gamma < 0:\n raise ValueError('Gamma should be a non-negative real number')\n\n input_mode = img.mode\n img = img.convert('RGB')\n\n gamma_map = [255 * gain * pow(ele / 255., gamma) for ele in range(256)] * 3\n img = img.point(gamma_map) # use PIL's point-function to accelerate this part\n\n img = img.convert(input_mode)\n return img\n\n\ndef rotate(img, angle, resample=False, expand=False, center=None):\n \"\"\"Rotate the image by angle.\n\n\n Args:\n img (PIL Image): PIL Image to be rotated.\n angle (float or int): In degrees degrees counter clockwise order.\n resample (``PIL.Image.NEAREST`` or ``PIL.Image.BILINEAR`` or ``PIL.Image.BICUBIC``, optional):\n An optional resampling filter. See `filters`_ for more information.\n If omitted, or if the image has mode \"1\" or \"P\", it is set to ``PIL.Image.NEAREST``.\n expand (bool, optional): Optional expansion flag.\n If true, expands the output image to make it large enough to hold the entire rotated image.\n If false or omitted, make the output image the same size as the input image.\n Note that the expand flag assumes rotation around the center and no translation.\n center (2-tuple, optional): Optional center of rotation.\n Origin is the upper left corner.\n Default is the center of the image.\n\n .. _filters: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#filters\n\n \"\"\"\n\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n return img.rotate(angle, resample, expand, center)\n\n\ndef _get_inverse_affine_matrix(center, angle, translate, scale, shear):\n # Helper method to compute inverse matrix for affine transformation\n\n # As it is explained in PIL.Image.rotate\n # We need compute INVERSE of affine transformation matrix: M = T * C * RSS * C^-1\n # where T is translation matrix: [1, 0, tx | 0, 1, ty | 0, 0, 1]\n # C is translation matrix to keep center: [1, 0, cx | 0, 1, cy | 0, 0, 1]\n # RSS is rotation with scale and shear matrix\n # RSS(a, scale, shear) = [ cos(a)*scale -sin(a + shear)*scale 0]\n # [ sin(a)*scale cos(a + shear)*scale 0]\n # [ 0 0 1]\n # Thus, the inverse is M^-1 = C * RSS^-1 * C^-1 * T^-1\n\n angle = math.radians(angle)\n shear = math.radians(shear)\n scale = 1.0 / scale\n\n # Inverted rotation matrix with scale and shear\n d = math.cos(angle + shear) * math.cos(angle) + math.sin(angle + shear) * math.sin(angle)\n matrix = [\n math.cos(angle + shear), math.sin(angle + shear), 0,\n -math.sin(angle), math.cos(angle), 0\n ]\n matrix = [scale / d * m for m in matrix]\n\n # Apply inverse of translation and of center translation: RSS^-1 * C^-1 * T^-1\n matrix[2] += matrix[0] * (-center[0] - translate[0]) + matrix[1] * (-center[1] - translate[1])\n matrix[5] += matrix[3] * (-center[0] - translate[0]) + matrix[4] * (-center[1] - translate[1])\n\n # Apply center translation: C * RSS^-1 * C^-1 * T^-1\n matrix[2] += center[0]\n matrix[5] += center[1]\n return matrix\n\n\ndef affine(img, angle, translate, scale, shear, resample=0, fillcolor=None):\n \"\"\"Apply affine transformation on the image keeping image center invariant\n\n Args:\n img (PIL Image): PIL Image to be rotated.\n angle (float or int): rotation angle in degrees between -180 and 180, clockwise direction.\n translate (list or tuple of integers): horizontal and vertical translations (post-rotation translation)\n scale (float): overall scale\n shear (float): shear angle value in degrees between -180 to 180, clockwise direction.\n resample (``PIL.Image.NEAREST`` or ``PIL.Image.BILINEAR`` or ``PIL.Image.BICUBIC``, optional):\n An optional resampling filter.\n See `filters`_ for more information.\n If omitted, or if the image has mode \"1\" or \"P\", it is set to ``PIL.Image.NEAREST``.\n fillcolor (int): Optional fill color for the area outside the transform in the output image. (Pillow>=5.0.0)\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n assert isinstance(translate, (tuple, list)) and len(translate) == 2, \\\n \"Argument translate should be a list or tuple of length 2\"\n\n assert scale > 0.0, \"Argument scale should be positive\"\n\n output_size = img.size\n center = (img.size[0] * 0.5 + 0.5, img.size[1] * 0.5 + 0.5)\n matrix = _get_inverse_affine_matrix(center, angle, translate, scale, shear)\n kwargs = {\"fillcolor\": fillcolor} if PILLOW_VERSION[0] == '5' else {}\n return img.transform(output_size, Image.AFFINE, matrix, resample, **kwargs)\n\n\ndef to_grayscale(img, num_output_channels=1):\n \"\"\"Convert image to grayscale version of image.\n\n Args:\n img (PIL Image): Image to be converted to grayscale.\n\n Returns:\n PIL Image: Grayscale version of the image.\n if num_output_channels = 1 : returned image is single channel\n\n if num_output_channels = 3 : returned image is 3 channel with r = g = b\n \"\"\"\n if not _is_pil_image(img):\n raise TypeError('img should be PIL Image. Got {}'.format(type(img)))\n\n if num_output_channels == 1:\n img = img.convert('L')\n elif num_output_channels == 3:\n img = img.convert('L')\n np_img = np.array(img, dtype=np.uint8)\n np_img = np.dstack([np_img, np_img, np_img])\n img = Image.fromarray(np_img, 'RGB')\n else:\n raise ValueError('num_output_channels should be either 1 or 3')\n\n return img\n", "path": "torchvision/transforms/functional.py"}]} |
gh_patches_debug_1630 | rasdani/github-patches | git_diff | cowrie__cowrie-1761 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Gcc "compile" file quits cowrie
**Describe the bug**
By running gcc to "compile" a file just quits cowrie
**To Reproduce**
Steps to reproduce the behavior:
1. Login on the honeypot
2. Compile the file with `gcc file.c`
4. See error and cowrie will quit(`Connection to localhost closed by remote host.`)
**Expected behavior**
To compile the file
**Server (please complete the following information):**
- OS: Ubuntu 22.04
- Python: Python 3.10
**Additional context**
```
2022-08-25T15:35:01.948821Z [HoneyPotSSHTransport,7728,127.0.0.1] CMD: gcc hi.c -o p
2022-08-25T15:35:01.950607Z [HoneyPotSSHTransport,7728,127.0.0.1] Command found: gcc hi.c -o p
2022-08-25T15:35:01.952849Z [HoneyPotSSHTransport,7728,127.0.0.1] Unhandled Error
Traceback (most recent call last):
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/python/log.py", line 96, in callWithLogger
return callWithContext({"system": lp}, func, *args, **kw)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/python/log.py", line 80, in callWithContext
return context.call({ILogContext: newCtx}, func, *args, **kw)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/python/context.py", line 117, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/python/context.py", line 82, in callWithContext
return func(*args, **kw)
--- <exception caught here> ---
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/internet/posixbase.py", line 683, in _doReadOrWrite
why = selectable.doRead()
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/internet/tcp.py", line 248, in doRead
return self._dataReceived(data)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/internet/tcp.py", line 253, in _dataReceived
rval = self.protocol.dataReceived(data)
File "/home/cowrie/cowrie/src/cowrie/ssh/transport.py", line 144, in dataReceived
self.dispatchMessage(messageNum, packet[1:])
File "/home/cowrie/cowrie/src/cowrie/ssh/transport.py", line 148, in dispatchMessage
transport.SSHServerTransport.dispatchMessage(self, message_num, payload)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/transport.py", line 790, in dispatchMessage
self.service.packetReceived(messageNum, payload)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/service.py", line 50, in packetReceived
return f(packet)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/connection.py", line 265, in ssh_CHANNEL_DATA
channel.dataReceived(data)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/session.py", line 173, in dataReceived
self.client.transport.write(data)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/session.py", line 233, in write
self.proto.dataReceived(data)
File "/home/cowrie/cowrie/src/cowrie/insults/insults.py", line 126, in dataReceived
insults.ServerProtocol.dataReceived(self, data)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/insults/insults.py", line 520, in dataReceived
self.terminalProtocol.keystrokeReceived(ch, None)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/recvline.py", line 435, in keystrokeReceived
m()
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 329, in handle_RETURN
return recvline.RecvLine.handle_RETURN(self)
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/recvline.py", line 493, in handle_RETURN
self.lineReceived(line)
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 190, in lineReceived
self.cmdstack[-1].lineReceived(line)
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 114, in lineReceived
self.runCommand()
File "/home/cowrie/cowrie/src/cowrie/shell/honeypot.py", line 329, in runCommand
self.protocol.call_command(pp, cmdclass, *cmd_array[0]["rargs"])
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 310, in call_command
HoneyPotBaseProtocol.call_command(self, pp, cmd, *args)
File "/home/cowrie/cowrie/src/cowrie/shell/protocol.py", line 199, in call_command
obj.start()
File "/home/cowrie/cowrie/src/cowrie/commands/gcc.py", line 135, in start
self.scheduled = reactor.callLater( # type: ignore[attr-defined]
File "/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/internet/base.py", line 868, in callLater
assert builtins.callable(callable), f"{callable} is not callable"
builtins.AssertionError: None is not callable
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/cowrie/commands/gcc.py`
Content:
```
1 # Copyright (c) 2013 Bas Stottelaar <basstottelaar [AT] gmail [DOT] com>
2
3 from __future__ import annotations
4
5 import getopt
6 import os
7 import random
8 import re
9 import time
10
11 from twisted.internet import reactor # type: ignore
12 from twisted.internet.defer import Deferred
13
14 from cowrie.core.config import CowrieConfig
15 from cowrie.shell.command import HoneyPotCommand
16
17 commands = {}
18
19
20 class Command_gcc(HoneyPotCommand):
21 # Name of program. Under OSX, you might consider i686-apple-darwin11-llvm-gcc-X.X
22 APP_NAME = "gcc"
23
24 # GCC verson, used in help, version and the commandline name gcc-X.X
25 APP_VERSION = (4, 7, 2)
26
27 # Random binary data, which looks awesome. You could change this to whatever you want, but this
28 # data will be put in the actual file and thus exposed to our hacker when he\she cats the file.
29 RANDOM_DATA = (
30 b"\x6a\x00\x48\x89\xe5\x48\x83\xe4\xf0\x48\x8b\x7d\x08\x48\x8d\x75\x10\x89\xfa"
31 b"\x83\xc2\x01\xc1\xe2\x03\x48\x01\xf2\x48\x89\xd1\xeb\x04\x48\x83\xc1\x08\x48"
32 b"\x83\x39\x00\x75\xf6\x48\x83\xc1\x08\xe8\x0c\x00\x00\x00\x89\xc7\xe8\xb9\x00"
33 b"\x00\x00\xf4\x90\x90\x90\x90\x55\x48\x89\xe5\x48\x83\xec\x40\x89\x7d\xfc\x48"
34 b"\x89\x75\xf0\x48\x8b\x45\xf0\x48\x8b\x00\x48\x83\xf8\x00\x75\x0c\xb8\x00\x00"
35 b"\x00\x00\x89\xc7\xe8\x8c\x00\x00\x00\x48\x8b\x45\xf0\x48\x8b\x40\x08\x30\xc9"
36 b"\x48\x89\xc7\x88\xc8\xe8\x7e\x00\x00\x00\x89\xc1\x89\x4d\xdc\x48\x8d\x0d\xd8"
37 b"\x01\x00\x00\x48\x89\xcf\x48\x89\x4d\xd0\xe8\x72\x00\x00\x00\x8b\x4d\xdc\x30"
38 b"\xd2\x48\x8d\x3d\xa4\x00\x00\x00\x89\xce\x88\x55\xcf\x48\x89\xc2\x8a\x45\xcf"
39 b"\xe8\x53\x00\x00\x00\x8b\x45\xdc\x88\x05\xc3\x01\x00\x00\x8b\x45\xdc\xc1\xe8"
40 b"\x08\x88\x05\xb8\x01\x00\x00\x8b\x45\xdc\xc1\xe8\x10\x88\x05\xad\x01\x00\x00"
41 b"\x8b\x45\xdc\xc1\xe8\x18\x88\x05\xa2\x01\x00\x00\x48\x8b\x45\xd0\x48\x89\x45"
42 b"\xe0\x48\x8b\x45\xe0\xff\xd0\x8b\x45\xec\x48\x83\xc4\x40\x5d\xc3\xff\x25\x3e"
43 b"\x01\x00\x00\xff\x25\x40\x01\x00\x00\xff\x25\x42\x01\x00\x00\xff\x25\x44\x01"
44 b"\x00\x00\x4c\x8d\x1d\x1d\x01\x00\x00\x41\x53\xff\x25\x0d\x01\x00\x00\x90\x68"
45 b"\x00\x00\x00\x00\xe9\xe6\xff\xff\xff\x68\x0c\x00\x00\x00\xe9\xdc\xff\xff\xff"
46 b"\x68\x1d\x00\x00\x00\xe9\xd2\xff\xff\xff\x68\x2b\x00\x00\x00\xe9\xc8\xff\xff"
47 b"\xff\x01\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x1c\x00\x00\x00\x00\x00"
48 b"\x00\x00\x1c\x00\x00\x00\x02\x00\x00\x00\x00\x0e\x00\x00\x34\x00\x00\x00\x34"
49 b"\x00\x00\x00\xf5\x0e\x00\x00\x00\x00\x00\x00\x34\x00\x00\x00\x03\x00\x00\x00"
50 b"\x0c\x00\x02\x00\x14\x00\x02\x00\x00\x00\x00\x01\x40\x00\x00\x00\x00\x00\x00"
51 b"\x01\x00\x00\x00"
52 )
53
54 scheduled: Deferred
55
56 def start(self):
57 """
58 Parse as much as possible from a GCC syntax and generate the output
59 that is requested. The file that is generated can be read (and will)
60 output garbage from an actual file, but when executed, it will generate
61 a segmentation fault.
62
63 The input files are expected to exists, but can be empty.
64
65 Verified syntaxes, including non-existing files:
66 * gcc test.c
67 * gcc test.c -o program
68 * gcc test1.c test2.c
69 * gcc test1.c test2.c -o program
70 * gcc test.c -o program -lm
71 * gcc -g test.c -o program -lm
72 * gcc test.c -DF_CPU=16000000 -I../etc -o program
73 * gcc test.c -O2 -o optimized_program
74 * gcc test.c -Wstrict-overflow=n -o overflowable_program
75
76 Others:
77 * gcc
78 * gcc -h
79 * gcc -v
80 * gcc --help
81 * gcc --version
82 """
83
84 output_file = None
85 input_files = 0
86 complete = True
87
88 # Parse options or display no files
89 try:
90 opts, args = getopt.gnu_getopt(
91 self.args, "ESchvgo:x:l:I:W:D:X:O:", ["help", "version", "param"]
92 )
93 except getopt.GetoptError:
94 self.no_files()
95 return
96
97 # Parse options
98 for o, a in opts:
99 if o in ("-v"):
100 self.version(short=False)
101 return
102 elif o in ("--version"):
103 self.version(short=True)
104 return
105 elif o in ("-h"):
106 self.arg_missing("-h")
107 return
108 elif o in ("--help"):
109 self.help()
110 return
111 elif o in ("-o"):
112 if len(a) == 0:
113 self.arg_missing("-o")
114 else:
115 output_file = a
116
117 # Check for *.c or *.cpp files
118 for value in args:
119 if ".c" in value.lower():
120 sourcefile = self.fs.resolve_path(value, self.protocol.cwd)
121
122 if self.fs.exists(sourcefile):
123 input_files = input_files + 1
124 else:
125 self.write(
126 f"{Command_gcc.APP_NAME}: {value}: No such file or directory\n"
127 )
128 complete = False
129
130 # To generate, or not
131 if input_files > 0 and complete:
132 timeout = 0.1 + random.random()
133
134 # Schedule call to make it more time consuming and real
135 self.scheduled = reactor.callLater( # type: ignore[attr-defined]
136 timeout, self.generate_file(output_file if output_file else "a.out")
137 )
138 else:
139 self.no_files()
140
141 def handle_CTRL_C(self):
142 """
143 Make sure the scheduled call will be canceled
144 """
145
146 if getattr(self, "scheduled", False):
147 self.scheduled.cancel()
148
149 def no_files(self):
150 """
151 Notify user there are no input files, and exit
152 """
153 self.write(
154 """gcc: fatal error: no input files
155 compilation terminated.\n"""
156 )
157 self.exit()
158
159 def version(self, short):
160 """
161 Print long or short version, and exit
162 """
163
164 # Generate version number
165 version = ".".join([str(v) for v in Command_gcc.APP_VERSION[:3]])
166 version_short = ".".join([str(v) for v in Command_gcc.APP_VERSION[:2]])
167
168 if short:
169 data = """{} (Debian {}-8) {}
170 Copyright (C) 2010 Free Software Foundation, Inc.
171 This is free software; see the source for copying conditions. There is NO
172 warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.""".format(
173 Command_gcc.APP_NAME, version, version
174 )
175 else:
176 data = """Using built-in specs.
177 COLLECT_GCC=gcc
178 COLLECT_LTO_WRAPPER=/usr/lib/gcc/x86_64-linux-gnu/4.7/lto-wrapper
179 Target: x86_64-linux-gnu
180 Configured with: ../src/configure -v --with-pkgversion=\'Debian {}-5\' --with-bugurl=file:///usr/share/doc/gcc-{}/README.Bugs --enable-languages=c,c++,fortran,objc,obj-c++ --prefix=/usr --program-suffix=-{} --enable-shared --enable-multiarch --enable-linker-build-id --with-system-zlib --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --with-gxx-include-dir=/usr/include/c++/{} --libdir=/usr/lib --enable-nls --enable-clocale=gnu --enable-libstdcxx-debug --enable-objc-gc --with-arch-32=i586 --with-tune=generic --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu
181 Thread model: posix
182 gcc version {} (Debian {}-5)""".format(
183 version, version_short, version_short, version_short, version, version
184 )
185
186 # Write
187 self.write(f"{data}\n")
188 self.exit()
189
190 def generate_file(self, outfile):
191 data = b""
192 # TODO: make sure it is written to temp file, not downloads
193 tmp_fname = "{}_{}_{}_{}".format(
194 time.strftime("%Y%m%d%H%M%S"),
195 self.protocol.getProtoTransport().transportId,
196 self.protocol.terminal.transport.session.id,
197 re.sub("[^A-Za-z0-9]", "_", outfile),
198 )
199 safeoutfile = os.path.join(
200 CowrieConfig.get("honeypot", "download_path"), tmp_fname
201 )
202
203 # Data contains random garbage from an actual file, so when
204 # catting the file, you'll see some 'real' compiled data
205 for i in range(random.randint(3, 15)):
206 if random.randint(1, 3) == 1:
207 data = data + Command_gcc.RANDOM_DATA[::-1]
208 else:
209 data = data + Command_gcc.RANDOM_DATA
210
211 # Write random data
212 with open(safeoutfile, "wb") as f:
213 f.write(data)
214
215 # Output file
216 outfile = self.fs.resolve_path(outfile, self.protocol.cwd)
217
218 # Create file for the protocol
219 self.fs.mkfile(outfile, 0, 0, len(data), 33188)
220 self.fs.update_realfile(self.fs.getfile(outfile), safeoutfile)
221 self.fs.chown(outfile, self.protocol.user.uid, self.protocol.user.gid)
222
223 # Segfault command
224 class segfault_command(HoneyPotCommand):
225 def call(self):
226 self.write("Segmentation fault\n")
227
228 # Trick the 'new compiled file' as an segfault
229 self.protocol.commands[outfile] = segfault_command
230
231 # Done
232 self.exit()
233
234 def arg_missing(self, arg):
235 """
236 Print missing argument message, and exit
237 """
238 self.write(f"{Command_gcc.APP_NAME}: argument to '{arg}' is missing\n")
239 self.exit()
240
241 def help(self):
242 """
243 Print help info, and exit
244 """
245
246 self.write(
247 """Usage: gcc [options] file...
248 Options:
249 -pass-exit-codes Exit with highest error code from a phase
250 --help Display this information
251 --target-help Display target specific command line options
252 --help={common|optimizers|params|target|warnings|[^]{joined|separate|undocumented}}[,...]
253 Display specific types of command line options
254 (Use '-v --help' to display command line options of sub-processes)
255 --version Display compiler version information
256 -dumpspecs Display all of the built in spec strings
257 -dumpversion Display the version of the compiler
258 -dumpmachine Display the compiler's target processor
259 -print-search-dirs Display the directories in the compiler's search path
260 -print-libgcc-file-name Display the name of the compiler's companion library
261 -print-file-name=<lib> Display the full path to library <lib>
262 -print-prog-name=<prog> Display the full path to compiler component <prog>
263 -print-multiarch Display the target's normalized GNU triplet, used as
264 a component in the library path
265 -print-multi-directory Display the root directory for versions of libgcc
266 -print-multi-lib Display the mapping between command line options and
267 multiple library search directories
268 -print-multi-os-directory Display the relative path to OS libraries
269 -print-sysroot Display the target libraries directory
270 -print-sysroot-headers-suffix Display the sysroot suffix used to find headers
271 -Wa,<options> Pass comma-separated <options> on to the assembler
272 -Wp,<options> Pass comma-separated <options> on to the preprocessor
273 -Wl,<options> Pass comma-separated <options> on to the linker
274 -Xassembler <arg> Pass <arg> on to the assembler
275 -Xpreprocessor <arg> Pass <arg> on to the preprocessor
276 -Xlinker <arg> Pass <arg> on to the linker
277 -save-temps Do not delete intermediate files
278 -save-temps=<arg> Do not delete intermediate files
279 -no-canonical-prefixes Do not canonicalize paths when building relative
280 prefixes to other gcc components
281 -pipe Use pipes rather than intermediate files
282 -time Time the execution of each subprocess
283 -specs=<file> Override built-in specs with the contents of <file>
284 -std=<standard> Assume that the input sources are for <standard>
285 --sysroot=<directory> Use <directory> as the root directory for headers
286 and libraries
287 -B <directory> Add <directory> to the compiler's search paths
288 -v Display the programs invoked by the compiler
289 -### Like -v but options quoted and commands not executed
290 -E Preprocess only; do not compile, assemble or link
291 -S Compile only; do not assemble or link
292 -c Compile and assemble, but do not link
293 -o <file> Place the output into <file>
294 -pie Create a position independent executable
295 -shared Create a shared library
296 -x <language> Specify the language of the following input files
297 Permissible languages include: c c++ assembler none
298 'none' means revert to the default behavior of
299 guessing the language based on the file's extension
300
301 Options starting with -g, -f, -m, -O, -W, or --param are automatically
302 passed on to the various sub-processes invoked by gcc. In order to pass
303 other options on to these processes the -W<letter> options must be used.
304
305 For bug reporting instructions, please see:
306 <file:///usr/share/doc/gcc-4.7/README.Bugs>.
307 """
308 )
309 self.exit()
310
311
312 commands["/usr/bin/gcc"] = Command_gcc
313 commands["gcc"] = Command_gcc
314 commands[
315 "/usr/bin/gcc-%s" % (".".join([str(v) for v in Command_gcc.APP_VERSION[:2]]))
316 ] = Command_gcc
317
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/cowrie/commands/gcc.py b/src/cowrie/commands/gcc.py
--- a/src/cowrie/commands/gcc.py
+++ b/src/cowrie/commands/gcc.py
@@ -133,7 +133,7 @@
# Schedule call to make it more time consuming and real
self.scheduled = reactor.callLater( # type: ignore[attr-defined]
- timeout, self.generate_file(output_file if output_file else "a.out")
+ timeout, self.generate_file, (output_file if output_file else "a.out")
)
else:
self.no_files()
| {"golden_diff": "diff --git a/src/cowrie/commands/gcc.py b/src/cowrie/commands/gcc.py\n--- a/src/cowrie/commands/gcc.py\n+++ b/src/cowrie/commands/gcc.py\n@@ -133,7 +133,7 @@\n \n # Schedule call to make it more time consuming and real\n self.scheduled = reactor.callLater( # type: ignore[attr-defined]\n- timeout, self.generate_file(output_file if output_file else \"a.out\")\n+ timeout, self.generate_file, (output_file if output_file else \"a.out\")\n )\n else:\n self.no_files()\n", "issue": "Gcc \"compile\" file quits cowrie\n**Describe the bug**\r\nBy running gcc to \"compile\" a file just quits cowrie\r\n\r\n**To Reproduce**\r\nSteps to reproduce the behavior:\r\n1. Login on the honeypot\r\n2. Compile the file with `gcc file.c` \r\n4. See error and cowrie will quit(`Connection to localhost closed by remote host.`)\r\n\r\n\r\n**Expected behavior**\r\nTo compile the file\r\n\r\n**Server (please complete the following information):**\r\n - OS: Ubuntu 22.04\r\n - Python: Python 3.10\r\n\r\n**Additional context**\r\n```\r\n2022-08-25T15:35:01.948821Z [HoneyPotSSHTransport,7728,127.0.0.1] CMD: gcc hi.c -o p\r\n2022-08-25T15:35:01.950607Z [HoneyPotSSHTransport,7728,127.0.0.1] Command found: gcc hi.c -o p\r\n2022-08-25T15:35:01.952849Z [HoneyPotSSHTransport,7728,127.0.0.1] Unhandled Error\r\n\tTraceback (most recent call last):\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/python/log.py\", line 96, in callWithLogger\r\n\t return callWithContext({\"system\": lp}, func, *args, **kw)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/python/log.py\", line 80, in callWithContext\r\n\t return context.call({ILogContext: newCtx}, func, *args, **kw)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/python/context.py\", line 117, in callWithContext\r\n\t return self.currentContext().callWithContext(ctx, func, *args, **kw)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/python/context.py\", line 82, in callWithContext\r\n\t return func(*args, **kw)\r\n\t--- <exception caught here> ---\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/internet/posixbase.py\", line 683, in _doReadOrWrite\r\n\t why = selectable.doRead()\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/internet/tcp.py\", line 248, in doRead\r\n\t return self._dataReceived(data)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/internet/tcp.py\", line 253, in _dataReceived\r\n\t rval = self.protocol.dataReceived(data)\r\n\t File \"/home/cowrie/cowrie/src/cowrie/ssh/transport.py\", line 144, in dataReceived\r\n\t self.dispatchMessage(messageNum, packet[1:])\r\n\t File \"/home/cowrie/cowrie/src/cowrie/ssh/transport.py\", line 148, in dispatchMessage\r\n\t transport.SSHServerTransport.dispatchMessage(self, message_num, payload)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/transport.py\", line 790, in dispatchMessage\r\n\t self.service.packetReceived(messageNum, payload)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/service.py\", line 50, in packetReceived\r\n\t return f(packet)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/connection.py\", line 265, in ssh_CHANNEL_DATA\r\n\t channel.dataReceived(data)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/session.py\", line 173, in dataReceived\r\n\t self.client.transport.write(data)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/ssh/session.py\", line 233, in write\r\n\t self.proto.dataReceived(data)\r\n\t File \"/home/cowrie/cowrie/src/cowrie/insults/insults.py\", line 126, in dataReceived\r\n\t insults.ServerProtocol.dataReceived(self, data)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/insults/insults.py\", line 520, in dataReceived\r\n\t self.terminalProtocol.keystrokeReceived(ch, None)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/recvline.py\", line 435, in keystrokeReceived\r\n\t m()\r\n\t File \"/home/cowrie/cowrie/src/cowrie/shell/protocol.py\", line 329, in handle_RETURN\r\n\t return recvline.RecvLine.handle_RETURN(self)\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/conch/recvline.py\", line 493, in handle_RETURN\r\n\t self.lineReceived(line)\r\n\t File \"/home/cowrie/cowrie/src/cowrie/shell/protocol.py\", line 190, in lineReceived\r\n\t self.cmdstack[-1].lineReceived(line)\r\n\t File \"/home/cowrie/cowrie/src/cowrie/shell/honeypot.py\", line 114, in lineReceived\r\n\t self.runCommand()\r\n\t File \"/home/cowrie/cowrie/src/cowrie/shell/honeypot.py\", line 329, in runCommand\r\n\t self.protocol.call_command(pp, cmdclass, *cmd_array[0][\"rargs\"])\r\n\t File \"/home/cowrie/cowrie/src/cowrie/shell/protocol.py\", line 310, in call_command\r\n\t HoneyPotBaseProtocol.call_command(self, pp, cmd, *args)\r\n\t File \"/home/cowrie/cowrie/src/cowrie/shell/protocol.py\", line 199, in call_command\r\n\t obj.start()\r\n\t File \"/home/cowrie/cowrie/src/cowrie/commands/gcc.py\", line 135, in start\r\n\t self.scheduled = reactor.callLater( # type: ignore[attr-defined]\r\n\t File \"/home/cowrie/cowrie/cowrie-env/lib/python3.10/site-packages/twisted/internet/base.py\", line 868, in callLater\r\n\t assert builtins.callable(callable), f\"{callable} is not callable\"\r\n\tbuiltins.AssertionError: None is not callable\r\n```\r\n\n", "before_files": [{"content": "# Copyright (c) 2013 Bas Stottelaar <basstottelaar [AT] gmail [DOT] com>\n\nfrom __future__ import annotations\n\nimport getopt\nimport os\nimport random\nimport re\nimport time\n\nfrom twisted.internet import reactor # type: ignore\nfrom twisted.internet.defer import Deferred\n\nfrom cowrie.core.config import CowrieConfig\nfrom cowrie.shell.command import HoneyPotCommand\n\ncommands = {}\n\n\nclass Command_gcc(HoneyPotCommand):\n # Name of program. Under OSX, you might consider i686-apple-darwin11-llvm-gcc-X.X\n APP_NAME = \"gcc\"\n\n # GCC verson, used in help, version and the commandline name gcc-X.X\n APP_VERSION = (4, 7, 2)\n\n # Random binary data, which looks awesome. You could change this to whatever you want, but this\n # data will be put in the actual file and thus exposed to our hacker when he\\she cats the file.\n RANDOM_DATA = (\n b\"\\x6a\\x00\\x48\\x89\\xe5\\x48\\x83\\xe4\\xf0\\x48\\x8b\\x7d\\x08\\x48\\x8d\\x75\\x10\\x89\\xfa\"\n b\"\\x83\\xc2\\x01\\xc1\\xe2\\x03\\x48\\x01\\xf2\\x48\\x89\\xd1\\xeb\\x04\\x48\\x83\\xc1\\x08\\x48\"\n b\"\\x83\\x39\\x00\\x75\\xf6\\x48\\x83\\xc1\\x08\\xe8\\x0c\\x00\\x00\\x00\\x89\\xc7\\xe8\\xb9\\x00\"\n b\"\\x00\\x00\\xf4\\x90\\x90\\x90\\x90\\x55\\x48\\x89\\xe5\\x48\\x83\\xec\\x40\\x89\\x7d\\xfc\\x48\"\n b\"\\x89\\x75\\xf0\\x48\\x8b\\x45\\xf0\\x48\\x8b\\x00\\x48\\x83\\xf8\\x00\\x75\\x0c\\xb8\\x00\\x00\"\n b\"\\x00\\x00\\x89\\xc7\\xe8\\x8c\\x00\\x00\\x00\\x48\\x8b\\x45\\xf0\\x48\\x8b\\x40\\x08\\x30\\xc9\"\n b\"\\x48\\x89\\xc7\\x88\\xc8\\xe8\\x7e\\x00\\x00\\x00\\x89\\xc1\\x89\\x4d\\xdc\\x48\\x8d\\x0d\\xd8\"\n b\"\\x01\\x00\\x00\\x48\\x89\\xcf\\x48\\x89\\x4d\\xd0\\xe8\\x72\\x00\\x00\\x00\\x8b\\x4d\\xdc\\x30\"\n b\"\\xd2\\x48\\x8d\\x3d\\xa4\\x00\\x00\\x00\\x89\\xce\\x88\\x55\\xcf\\x48\\x89\\xc2\\x8a\\x45\\xcf\"\n b\"\\xe8\\x53\\x00\\x00\\x00\\x8b\\x45\\xdc\\x88\\x05\\xc3\\x01\\x00\\x00\\x8b\\x45\\xdc\\xc1\\xe8\"\n b\"\\x08\\x88\\x05\\xb8\\x01\\x00\\x00\\x8b\\x45\\xdc\\xc1\\xe8\\x10\\x88\\x05\\xad\\x01\\x00\\x00\"\n b\"\\x8b\\x45\\xdc\\xc1\\xe8\\x18\\x88\\x05\\xa2\\x01\\x00\\x00\\x48\\x8b\\x45\\xd0\\x48\\x89\\x45\"\n b\"\\xe0\\x48\\x8b\\x45\\xe0\\xff\\xd0\\x8b\\x45\\xec\\x48\\x83\\xc4\\x40\\x5d\\xc3\\xff\\x25\\x3e\"\n b\"\\x01\\x00\\x00\\xff\\x25\\x40\\x01\\x00\\x00\\xff\\x25\\x42\\x01\\x00\\x00\\xff\\x25\\x44\\x01\"\n b\"\\x00\\x00\\x4c\\x8d\\x1d\\x1d\\x01\\x00\\x00\\x41\\x53\\xff\\x25\\x0d\\x01\\x00\\x00\\x90\\x68\"\n b\"\\x00\\x00\\x00\\x00\\xe9\\xe6\\xff\\xff\\xff\\x68\\x0c\\x00\\x00\\x00\\xe9\\xdc\\xff\\xff\\xff\"\n b\"\\x68\\x1d\\x00\\x00\\x00\\xe9\\xd2\\xff\\xff\\xff\\x68\\x2b\\x00\\x00\\x00\\xe9\\xc8\\xff\\xff\"\n b\"\\xff\\x01\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x00\\x00\"\n b\"\\x00\\x00\\x1c\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x0e\\x00\\x00\\x34\\x00\\x00\\x00\\x34\"\n b\"\\x00\\x00\\x00\\xf5\\x0e\\x00\\x00\\x00\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x03\\x00\\x00\\x00\"\n b\"\\x0c\\x00\\x02\\x00\\x14\\x00\\x02\\x00\\x00\\x00\\x00\\x01\\x40\\x00\\x00\\x00\\x00\\x00\\x00\"\n b\"\\x01\\x00\\x00\\x00\"\n )\n\n scheduled: Deferred\n\n def start(self):\n \"\"\"\n Parse as much as possible from a GCC syntax and generate the output\n that is requested. The file that is generated can be read (and will)\n output garbage from an actual file, but when executed, it will generate\n a segmentation fault.\n\n The input files are expected to exists, but can be empty.\n\n Verified syntaxes, including non-existing files:\n * gcc test.c\n * gcc test.c -o program\n * gcc test1.c test2.c\n * gcc test1.c test2.c -o program\n * gcc test.c -o program -lm\n * gcc -g test.c -o program -lm\n * gcc test.c -DF_CPU=16000000 -I../etc -o program\n * gcc test.c -O2 -o optimized_program\n * gcc test.c -Wstrict-overflow=n -o overflowable_program\n\n Others:\n * gcc\n * gcc -h\n * gcc -v\n * gcc --help\n * gcc --version\n \"\"\"\n\n output_file = None\n input_files = 0\n complete = True\n\n # Parse options or display no files\n try:\n opts, args = getopt.gnu_getopt(\n self.args, \"ESchvgo:x:l:I:W:D:X:O:\", [\"help\", \"version\", \"param\"]\n )\n except getopt.GetoptError:\n self.no_files()\n return\n\n # Parse options\n for o, a in opts:\n if o in (\"-v\"):\n self.version(short=False)\n return\n elif o in (\"--version\"):\n self.version(short=True)\n return\n elif o in (\"-h\"):\n self.arg_missing(\"-h\")\n return\n elif o in (\"--help\"):\n self.help()\n return\n elif o in (\"-o\"):\n if len(a) == 0:\n self.arg_missing(\"-o\")\n else:\n output_file = a\n\n # Check for *.c or *.cpp files\n for value in args:\n if \".c\" in value.lower():\n sourcefile = self.fs.resolve_path(value, self.protocol.cwd)\n\n if self.fs.exists(sourcefile):\n input_files = input_files + 1\n else:\n self.write(\n f\"{Command_gcc.APP_NAME}: {value}: No such file or directory\\n\"\n )\n complete = False\n\n # To generate, or not\n if input_files > 0 and complete:\n timeout = 0.1 + random.random()\n\n # Schedule call to make it more time consuming and real\n self.scheduled = reactor.callLater( # type: ignore[attr-defined]\n timeout, self.generate_file(output_file if output_file else \"a.out\")\n )\n else:\n self.no_files()\n\n def handle_CTRL_C(self):\n \"\"\"\n Make sure the scheduled call will be canceled\n \"\"\"\n\n if getattr(self, \"scheduled\", False):\n self.scheduled.cancel()\n\n def no_files(self):\n \"\"\"\n Notify user there are no input files, and exit\n \"\"\"\n self.write(\n \"\"\"gcc: fatal error: no input files\ncompilation terminated.\\n\"\"\"\n )\n self.exit()\n\n def version(self, short):\n \"\"\"\n Print long or short version, and exit\n \"\"\"\n\n # Generate version number\n version = \".\".join([str(v) for v in Command_gcc.APP_VERSION[:3]])\n version_short = \".\".join([str(v) for v in Command_gcc.APP_VERSION[:2]])\n\n if short:\n data = \"\"\"{} (Debian {}-8) {}\nCopyright (C) 2010 Free Software Foundation, Inc.\nThis is free software; see the source for copying conditions. There is NO\nwarranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\"\"\".format(\n Command_gcc.APP_NAME, version, version\n )\n else:\n data = \"\"\"Using built-in specs.\nCOLLECT_GCC=gcc\nCOLLECT_LTO_WRAPPER=/usr/lib/gcc/x86_64-linux-gnu/4.7/lto-wrapper\nTarget: x86_64-linux-gnu\nConfigured with: ../src/configure -v --with-pkgversion=\\'Debian {}-5\\' --with-bugurl=file:///usr/share/doc/gcc-{}/README.Bugs --enable-languages=c,c++,fortran,objc,obj-c++ --prefix=/usr --program-suffix=-{} --enable-shared --enable-multiarch --enable-linker-build-id --with-system-zlib --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --with-gxx-include-dir=/usr/include/c++/{} --libdir=/usr/lib --enable-nls --enable-clocale=gnu --enable-libstdcxx-debug --enable-objc-gc --with-arch-32=i586 --with-tune=generic --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu\nThread model: posix\ngcc version {} (Debian {}-5)\"\"\".format(\n version, version_short, version_short, version_short, version, version\n )\n\n # Write\n self.write(f\"{data}\\n\")\n self.exit()\n\n def generate_file(self, outfile):\n data = b\"\"\n # TODO: make sure it is written to temp file, not downloads\n tmp_fname = \"{}_{}_{}_{}\".format(\n time.strftime(\"%Y%m%d%H%M%S\"),\n self.protocol.getProtoTransport().transportId,\n self.protocol.terminal.transport.session.id,\n re.sub(\"[^A-Za-z0-9]\", \"_\", outfile),\n )\n safeoutfile = os.path.join(\n CowrieConfig.get(\"honeypot\", \"download_path\"), tmp_fname\n )\n\n # Data contains random garbage from an actual file, so when\n # catting the file, you'll see some 'real' compiled data\n for i in range(random.randint(3, 15)):\n if random.randint(1, 3) == 1:\n data = data + Command_gcc.RANDOM_DATA[::-1]\n else:\n data = data + Command_gcc.RANDOM_DATA\n\n # Write random data\n with open(safeoutfile, \"wb\") as f:\n f.write(data)\n\n # Output file\n outfile = self.fs.resolve_path(outfile, self.protocol.cwd)\n\n # Create file for the protocol\n self.fs.mkfile(outfile, 0, 0, len(data), 33188)\n self.fs.update_realfile(self.fs.getfile(outfile), safeoutfile)\n self.fs.chown(outfile, self.protocol.user.uid, self.protocol.user.gid)\n\n # Segfault command\n class segfault_command(HoneyPotCommand):\n def call(self):\n self.write(\"Segmentation fault\\n\")\n\n # Trick the 'new compiled file' as an segfault\n self.protocol.commands[outfile] = segfault_command\n\n # Done\n self.exit()\n\n def arg_missing(self, arg):\n \"\"\"\n Print missing argument message, and exit\n \"\"\"\n self.write(f\"{Command_gcc.APP_NAME}: argument to '{arg}' is missing\\n\")\n self.exit()\n\n def help(self):\n \"\"\"\n Print help info, and exit\n \"\"\"\n\n self.write(\n \"\"\"Usage: gcc [options] file...\nOptions:\n -pass-exit-codes Exit with highest error code from a phase\n --help Display this information\n --target-help Display target specific command line options\n --help={common|optimizers|params|target|warnings|[^]{joined|separate|undocumented}}[,...]\n Display specific types of command line options\n (Use '-v --help' to display command line options of sub-processes)\n --version Display compiler version information\n -dumpspecs Display all of the built in spec strings\n -dumpversion Display the version of the compiler\n -dumpmachine Display the compiler's target processor\n -print-search-dirs Display the directories in the compiler's search path\n -print-libgcc-file-name Display the name of the compiler's companion library\n -print-file-name=<lib> Display the full path to library <lib>\n -print-prog-name=<prog> Display the full path to compiler component <prog>\n -print-multiarch Display the target's normalized GNU triplet, used as\n a component in the library path\n -print-multi-directory Display the root directory for versions of libgcc\n -print-multi-lib Display the mapping between command line options and\n multiple library search directories\n -print-multi-os-directory Display the relative path to OS libraries\n -print-sysroot Display the target libraries directory\n -print-sysroot-headers-suffix Display the sysroot suffix used to find headers\n -Wa,<options> Pass comma-separated <options> on to the assembler\n -Wp,<options> Pass comma-separated <options> on to the preprocessor\n -Wl,<options> Pass comma-separated <options> on to the linker\n -Xassembler <arg> Pass <arg> on to the assembler\n -Xpreprocessor <arg> Pass <arg> on to the preprocessor\n -Xlinker <arg> Pass <arg> on to the linker\n -save-temps Do not delete intermediate files\n -save-temps=<arg> Do not delete intermediate files\n -no-canonical-prefixes Do not canonicalize paths when building relative\n prefixes to other gcc components\n -pipe Use pipes rather than intermediate files\n -time Time the execution of each subprocess\n -specs=<file> Override built-in specs with the contents of <file>\n -std=<standard> Assume that the input sources are for <standard>\n --sysroot=<directory> Use <directory> as the root directory for headers\n and libraries\n -B <directory> Add <directory> to the compiler's search paths\n -v Display the programs invoked by the compiler\n -### Like -v but options quoted and commands not executed\n -E Preprocess only; do not compile, assemble or link\n -S Compile only; do not assemble or link\n -c Compile and assemble, but do not link\n -o <file> Place the output into <file>\n -pie Create a position independent executable\n -shared Create a shared library\n -x <language> Specify the language of the following input files\n Permissible languages include: c c++ assembler none\n 'none' means revert to the default behavior of\n guessing the language based on the file's extension\n\nOptions starting with -g, -f, -m, -O, -W, or --param are automatically\n passed on to the various sub-processes invoked by gcc. In order to pass\n other options on to these processes the -W<letter> options must be used.\n\nFor bug reporting instructions, please see:\n<file:///usr/share/doc/gcc-4.7/README.Bugs>.\n\"\"\"\n )\n self.exit()\n\n\ncommands[\"/usr/bin/gcc\"] = Command_gcc\ncommands[\"gcc\"] = Command_gcc\ncommands[\n \"/usr/bin/gcc-%s\" % (\".\".join([str(v) for v in Command_gcc.APP_VERSION[:2]]))\n] = Command_gcc\n", "path": "src/cowrie/commands/gcc.py"}], "after_files": [{"content": "# Copyright (c) 2013 Bas Stottelaar <basstottelaar [AT] gmail [DOT] com>\n\nfrom __future__ import annotations\n\nimport getopt\nimport os\nimport random\nimport re\nimport time\n\nfrom twisted.internet import reactor # type: ignore\nfrom twisted.internet.defer import Deferred\n\nfrom cowrie.core.config import CowrieConfig\nfrom cowrie.shell.command import HoneyPotCommand\n\ncommands = {}\n\n\nclass Command_gcc(HoneyPotCommand):\n # Name of program. Under OSX, you might consider i686-apple-darwin11-llvm-gcc-X.X\n APP_NAME = \"gcc\"\n\n # GCC verson, used in help, version and the commandline name gcc-X.X\n APP_VERSION = (4, 7, 2)\n\n # Random binary data, which looks awesome. You could change this to whatever you want, but this\n # data will be put in the actual file and thus exposed to our hacker when he\\she cats the file.\n RANDOM_DATA = (\n b\"\\x6a\\x00\\x48\\x89\\xe5\\x48\\x83\\xe4\\xf0\\x48\\x8b\\x7d\\x08\\x48\\x8d\\x75\\x10\\x89\\xfa\"\n b\"\\x83\\xc2\\x01\\xc1\\xe2\\x03\\x48\\x01\\xf2\\x48\\x89\\xd1\\xeb\\x04\\x48\\x83\\xc1\\x08\\x48\"\n b\"\\x83\\x39\\x00\\x75\\xf6\\x48\\x83\\xc1\\x08\\xe8\\x0c\\x00\\x00\\x00\\x89\\xc7\\xe8\\xb9\\x00\"\n b\"\\x00\\x00\\xf4\\x90\\x90\\x90\\x90\\x55\\x48\\x89\\xe5\\x48\\x83\\xec\\x40\\x89\\x7d\\xfc\\x48\"\n b\"\\x89\\x75\\xf0\\x48\\x8b\\x45\\xf0\\x48\\x8b\\x00\\x48\\x83\\xf8\\x00\\x75\\x0c\\xb8\\x00\\x00\"\n b\"\\x00\\x00\\x89\\xc7\\xe8\\x8c\\x00\\x00\\x00\\x48\\x8b\\x45\\xf0\\x48\\x8b\\x40\\x08\\x30\\xc9\"\n b\"\\x48\\x89\\xc7\\x88\\xc8\\xe8\\x7e\\x00\\x00\\x00\\x89\\xc1\\x89\\x4d\\xdc\\x48\\x8d\\x0d\\xd8\"\n b\"\\x01\\x00\\x00\\x48\\x89\\xcf\\x48\\x89\\x4d\\xd0\\xe8\\x72\\x00\\x00\\x00\\x8b\\x4d\\xdc\\x30\"\n b\"\\xd2\\x48\\x8d\\x3d\\xa4\\x00\\x00\\x00\\x89\\xce\\x88\\x55\\xcf\\x48\\x89\\xc2\\x8a\\x45\\xcf\"\n b\"\\xe8\\x53\\x00\\x00\\x00\\x8b\\x45\\xdc\\x88\\x05\\xc3\\x01\\x00\\x00\\x8b\\x45\\xdc\\xc1\\xe8\"\n b\"\\x08\\x88\\x05\\xb8\\x01\\x00\\x00\\x8b\\x45\\xdc\\xc1\\xe8\\x10\\x88\\x05\\xad\\x01\\x00\\x00\"\n b\"\\x8b\\x45\\xdc\\xc1\\xe8\\x18\\x88\\x05\\xa2\\x01\\x00\\x00\\x48\\x8b\\x45\\xd0\\x48\\x89\\x45\"\n b\"\\xe0\\x48\\x8b\\x45\\xe0\\xff\\xd0\\x8b\\x45\\xec\\x48\\x83\\xc4\\x40\\x5d\\xc3\\xff\\x25\\x3e\"\n b\"\\x01\\x00\\x00\\xff\\x25\\x40\\x01\\x00\\x00\\xff\\x25\\x42\\x01\\x00\\x00\\xff\\x25\\x44\\x01\"\n b\"\\x00\\x00\\x4c\\x8d\\x1d\\x1d\\x01\\x00\\x00\\x41\\x53\\xff\\x25\\x0d\\x01\\x00\\x00\\x90\\x68\"\n b\"\\x00\\x00\\x00\\x00\\xe9\\xe6\\xff\\xff\\xff\\x68\\x0c\\x00\\x00\\x00\\xe9\\xdc\\xff\\xff\\xff\"\n b\"\\x68\\x1d\\x00\\x00\\x00\\xe9\\xd2\\xff\\xff\\xff\\x68\\x2b\\x00\\x00\\x00\\xe9\\xc8\\xff\\xff\"\n b\"\\xff\\x01\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x1c\\x00\\x00\\x00\\x00\\x00\"\n b\"\\x00\\x00\\x1c\\x00\\x00\\x00\\x02\\x00\\x00\\x00\\x00\\x0e\\x00\\x00\\x34\\x00\\x00\\x00\\x34\"\n b\"\\x00\\x00\\x00\\xf5\\x0e\\x00\\x00\\x00\\x00\\x00\\x00\\x34\\x00\\x00\\x00\\x03\\x00\\x00\\x00\"\n b\"\\x0c\\x00\\x02\\x00\\x14\\x00\\x02\\x00\\x00\\x00\\x00\\x01\\x40\\x00\\x00\\x00\\x00\\x00\\x00\"\n b\"\\x01\\x00\\x00\\x00\"\n )\n\n scheduled: Deferred\n\n def start(self):\n \"\"\"\n Parse as much as possible from a GCC syntax and generate the output\n that is requested. The file that is generated can be read (and will)\n output garbage from an actual file, but when executed, it will generate\n a segmentation fault.\n\n The input files are expected to exists, but can be empty.\n\n Verified syntaxes, including non-existing files:\n * gcc test.c\n * gcc test.c -o program\n * gcc test1.c test2.c\n * gcc test1.c test2.c -o program\n * gcc test.c -o program -lm\n * gcc -g test.c -o program -lm\n * gcc test.c -DF_CPU=16000000 -I../etc -o program\n * gcc test.c -O2 -o optimized_program\n * gcc test.c -Wstrict-overflow=n -o overflowable_program\n\n Others:\n * gcc\n * gcc -h\n * gcc -v\n * gcc --help\n * gcc --version\n \"\"\"\n\n output_file = None\n input_files = 0\n complete = True\n\n # Parse options or display no files\n try:\n opts, args = getopt.gnu_getopt(\n self.args, \"ESchvgo:x:l:I:W:D:X:O:\", [\"help\", \"version\", \"param\"]\n )\n except getopt.GetoptError:\n self.no_files()\n return\n\n # Parse options\n for o, a in opts:\n if o in (\"-v\"):\n self.version(short=False)\n return\n elif o in (\"--version\"):\n self.version(short=True)\n return\n elif o in (\"-h\"):\n self.arg_missing(\"-h\")\n return\n elif o in (\"--help\"):\n self.help()\n return\n elif o in (\"-o\"):\n if len(a) == 0:\n self.arg_missing(\"-o\")\n else:\n output_file = a\n\n # Check for *.c or *.cpp files\n for value in args:\n if \".c\" in value.lower():\n sourcefile = self.fs.resolve_path(value, self.protocol.cwd)\n\n if self.fs.exists(sourcefile):\n input_files = input_files + 1\n else:\n self.write(\n f\"{Command_gcc.APP_NAME}: {value}: No such file or directory\\n\"\n )\n complete = False\n\n # To generate, or not\n if input_files > 0 and complete:\n timeout = 0.1 + random.random()\n\n # Schedule call to make it more time consuming and real\n self.scheduled = reactor.callLater( # type: ignore[attr-defined]\n timeout, self.generate_file, (output_file if output_file else \"a.out\")\n )\n else:\n self.no_files()\n\n def handle_CTRL_C(self):\n \"\"\"\n Make sure the scheduled call will be canceled\n \"\"\"\n\n if getattr(self, \"scheduled\", False):\n self.scheduled.cancel()\n\n def no_files(self):\n \"\"\"\n Notify user there are no input files, and exit\n \"\"\"\n self.write(\n \"\"\"gcc: fatal error: no input files\ncompilation terminated.\\n\"\"\"\n )\n self.exit()\n\n def version(self, short):\n \"\"\"\n Print long or short version, and exit\n \"\"\"\n\n # Generate version number\n version = \".\".join([str(v) for v in Command_gcc.APP_VERSION[:3]])\n version_short = \".\".join([str(v) for v in Command_gcc.APP_VERSION[:2]])\n\n if short:\n data = \"\"\"{} (Debian {}-8) {}\nCopyright (C) 2010 Free Software Foundation, Inc.\nThis is free software; see the source for copying conditions. There is NO\nwarranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\"\"\".format(\n Command_gcc.APP_NAME, version, version\n )\n else:\n data = \"\"\"Using built-in specs.\nCOLLECT_GCC=gcc\nCOLLECT_LTO_WRAPPER=/usr/lib/gcc/x86_64-linux-gnu/4.7/lto-wrapper\nTarget: x86_64-linux-gnu\nConfigured with: ../src/configure -v --with-pkgversion=\\'Debian {}-5\\' --with-bugurl=file:///usr/share/doc/gcc-{}/README.Bugs --enable-languages=c,c++,fortran,objc,obj-c++ --prefix=/usr --program-suffix=-{} --enable-shared --enable-multiarch --enable-linker-build-id --with-system-zlib --libexecdir=/usr/lib --without-included-gettext --enable-threads=posix --with-gxx-include-dir=/usr/include/c++/{} --libdir=/usr/lib --enable-nls --enable-clocale=gnu --enable-libstdcxx-debug --enable-objc-gc --with-arch-32=i586 --with-tune=generic --enable-checking=release --build=x86_64-linux-gnu --host=x86_64-linux-gnu --target=x86_64-linux-gnu\nThread model: posix\ngcc version {} (Debian {}-5)\"\"\".format(\n version, version_short, version_short, version_short, version, version\n )\n\n # Write\n self.write(f\"{data}\\n\")\n self.exit()\n\n def generate_file(self, outfile):\n data = b\"\"\n # TODO: make sure it is written to temp file, not downloads\n tmp_fname = \"{}_{}_{}_{}\".format(\n time.strftime(\"%Y%m%d%H%M%S\"),\n self.protocol.getProtoTransport().transportId,\n self.protocol.terminal.transport.session.id,\n re.sub(\"[^A-Za-z0-9]\", \"_\", outfile),\n )\n safeoutfile = os.path.join(\n CowrieConfig.get(\"honeypot\", \"download_path\"), tmp_fname\n )\n\n # Data contains random garbage from an actual file, so when\n # catting the file, you'll see some 'real' compiled data\n for i in range(random.randint(3, 15)):\n if random.randint(1, 3) == 1:\n data = data + Command_gcc.RANDOM_DATA[::-1]\n else:\n data = data + Command_gcc.RANDOM_DATA\n\n # Write random data\n with open(safeoutfile, \"wb\") as f:\n f.write(data)\n\n # Output file\n outfile = self.fs.resolve_path(outfile, self.protocol.cwd)\n\n # Create file for the protocol\n self.fs.mkfile(outfile, 0, 0, len(data), 33188)\n self.fs.update_realfile(self.fs.getfile(outfile), safeoutfile)\n self.fs.chown(outfile, self.protocol.user.uid, self.protocol.user.gid)\n\n # Segfault command\n class segfault_command(HoneyPotCommand):\n def call(self):\n self.write(\"Segmentation fault\\n\")\n\n # Trick the 'new compiled file' as an segfault\n self.protocol.commands[outfile] = segfault_command\n\n # Done\n self.exit()\n\n def arg_missing(self, arg):\n \"\"\"\n Print missing argument message, and exit\n \"\"\"\n self.write(f\"{Command_gcc.APP_NAME}: argument to '{arg}' is missing\\n\")\n self.exit()\n\n def help(self):\n \"\"\"\n Print help info, and exit\n \"\"\"\n\n self.write(\n \"\"\"Usage: gcc [options] file...\nOptions:\n -pass-exit-codes Exit with highest error code from a phase\n --help Display this information\n --target-help Display target specific command line options\n --help={common|optimizers|params|target|warnings|[^]{joined|separate|undocumented}}[,...]\n Display specific types of command line options\n (Use '-v --help' to display command line options of sub-processes)\n --version Display compiler version information\n -dumpspecs Display all of the built in spec strings\n -dumpversion Display the version of the compiler\n -dumpmachine Display the compiler's target processor\n -print-search-dirs Display the directories in the compiler's search path\n -print-libgcc-file-name Display the name of the compiler's companion library\n -print-file-name=<lib> Display the full path to library <lib>\n -print-prog-name=<prog> Display the full path to compiler component <prog>\n -print-multiarch Display the target's normalized GNU triplet, used as\n a component in the library path\n -print-multi-directory Display the root directory for versions of libgcc\n -print-multi-lib Display the mapping between command line options and\n multiple library search directories\n -print-multi-os-directory Display the relative path to OS libraries\n -print-sysroot Display the target libraries directory\n -print-sysroot-headers-suffix Display the sysroot suffix used to find headers\n -Wa,<options> Pass comma-separated <options> on to the assembler\n -Wp,<options> Pass comma-separated <options> on to the preprocessor\n -Wl,<options> Pass comma-separated <options> on to the linker\n -Xassembler <arg> Pass <arg> on to the assembler\n -Xpreprocessor <arg> Pass <arg> on to the preprocessor\n -Xlinker <arg> Pass <arg> on to the linker\n -save-temps Do not delete intermediate files\n -save-temps=<arg> Do not delete intermediate files\n -no-canonical-prefixes Do not canonicalize paths when building relative\n prefixes to other gcc components\n -pipe Use pipes rather than intermediate files\n -time Time the execution of each subprocess\n -specs=<file> Override built-in specs with the contents of <file>\n -std=<standard> Assume that the input sources are for <standard>\n --sysroot=<directory> Use <directory> as the root directory for headers\n and libraries\n -B <directory> Add <directory> to the compiler's search paths\n -v Display the programs invoked by the compiler\n -### Like -v but options quoted and commands not executed\n -E Preprocess only; do not compile, assemble or link\n -S Compile only; do not assemble or link\n -c Compile and assemble, but do not link\n -o <file> Place the output into <file>\n -pie Create a position independent executable\n -shared Create a shared library\n -x <language> Specify the language of the following input files\n Permissible languages include: c c++ assembler none\n 'none' means revert to the default behavior of\n guessing the language based on the file's extension\n\nOptions starting with -g, -f, -m, -O, -W, or --param are automatically\n passed on to the various sub-processes invoked by gcc. In order to pass\n other options on to these processes the -W<letter> options must be used.\n\nFor bug reporting instructions, please see:\n<file:///usr/share/doc/gcc-4.7/README.Bugs>.\n\"\"\"\n )\n self.exit()\n\n\ncommands[\"/usr/bin/gcc\"] = Command_gcc\ncommands[\"gcc\"] = Command_gcc\ncommands[\n \"/usr/bin/gcc-%s\" % (\".\".join([str(v) for v in Command_gcc.APP_VERSION[:2]]))\n] = Command_gcc\n", "path": "src/cowrie/commands/gcc.py"}]} |
gh_patches_debug_1631 | rasdani/github-patches | git_diff | facebookresearch__hydra-1887 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[Ray-Plugin] Add support for Python 3.9
Python 3.9 support depends on https://github.com/ray-project/ray/issues/12788
Related to #1062
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `plugins/hydra_ray_launcher/setup.py`
Content:
```
1 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
2 # type: ignore
3 from pathlib import Path
4
5 from read_version import read_version
6 from setuptools import find_namespace_packages, setup
7
8 setup(
9 name="hydra-ray-launcher",
10 version=read_version("hydra_plugins/hydra_ray_launcher", "__init__.py"),
11 author="Jieru Hu",
12 author_email="[email protected]",
13 description="Hydra Ray launcher plugin",
14 long_description=(Path(__file__).parent / "README.md").read_text(),
15 long_description_content_type="text/markdown",
16 url="https://github.com/facebookresearch/hydra/",
17 packages=find_namespace_packages(include=["hydra_plugins.*"]),
18 classifiers=[
19 "License :: OSI Approved :: MIT License",
20 "Programming Language :: Python :: 3.7",
21 "Programming Language :: Python :: 3.8",
22 # "Programming Language :: Python :: 3.9",
23 "Operating System :: MacOS",
24 "Operating System :: POSIX :: Linux",
25 ],
26 install_requires=[
27 "boto3==1.17.17",
28 "hydra-core>=1.1.0.dev7",
29 "ray[default]==1.6.0",
30 # https://github.com/aio-libs/aiohttp/issues/6203
31 "aiohttp!=3.8.0",
32 "cloudpickle==1.6.0",
33 "pickle5==0.0.11",
34 ],
35 include_package_data=True,
36 )
37
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/plugins/hydra_ray_launcher/setup.py b/plugins/hydra_ray_launcher/setup.py
--- a/plugins/hydra_ray_launcher/setup.py
+++ b/plugins/hydra_ray_launcher/setup.py
@@ -19,7 +19,7 @@
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
- # "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.9",
"Operating System :: MacOS",
"Operating System :: POSIX :: Linux",
],
| {"golden_diff": "diff --git a/plugins/hydra_ray_launcher/setup.py b/plugins/hydra_ray_launcher/setup.py\n--- a/plugins/hydra_ray_launcher/setup.py\n+++ b/plugins/hydra_ray_launcher/setup.py\n@@ -19,7 +19,7 @@\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n- # \"Programming Language :: Python :: 3.9\",\n+ \"Programming Language :: Python :: 3.9\",\n \"Operating System :: MacOS\",\n \"Operating System :: POSIX :: Linux\",\n ],\n", "issue": "[Ray-Plugin] Add support for Python 3.9\nPython 3.9 support depends on https://github.com/ray-project/ray/issues/12788\r\n\r\nRelated to #1062 \n", "before_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# type: ignore\nfrom pathlib import Path\n\nfrom read_version import read_version\nfrom setuptools import find_namespace_packages, setup\n\nsetup(\n name=\"hydra-ray-launcher\",\n version=read_version(\"hydra_plugins/hydra_ray_launcher\", \"__init__.py\"),\n author=\"Jieru Hu\",\n author_email=\"[email protected]\",\n description=\"Hydra Ray launcher plugin\",\n long_description=(Path(__file__).parent / \"README.md\").read_text(),\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/facebookresearch/hydra/\",\n packages=find_namespace_packages(include=[\"hydra_plugins.*\"]),\n classifiers=[\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n # \"Programming Language :: Python :: 3.9\",\n \"Operating System :: MacOS\",\n \"Operating System :: POSIX :: Linux\",\n ],\n install_requires=[\n \"boto3==1.17.17\",\n \"hydra-core>=1.1.0.dev7\",\n \"ray[default]==1.6.0\",\n # https://github.com/aio-libs/aiohttp/issues/6203\n \"aiohttp!=3.8.0\",\n \"cloudpickle==1.6.0\",\n \"pickle5==0.0.11\",\n ],\n include_package_data=True,\n)\n", "path": "plugins/hydra_ray_launcher/setup.py"}], "after_files": [{"content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# type: ignore\nfrom pathlib import Path\n\nfrom read_version import read_version\nfrom setuptools import find_namespace_packages, setup\n\nsetup(\n name=\"hydra-ray-launcher\",\n version=read_version(\"hydra_plugins/hydra_ray_launcher\", \"__init__.py\"),\n author=\"Jieru Hu\",\n author_email=\"[email protected]\",\n description=\"Hydra Ray launcher plugin\",\n long_description=(Path(__file__).parent / \"README.md\").read_text(),\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/facebookresearch/hydra/\",\n packages=find_namespace_packages(include=[\"hydra_plugins.*\"]),\n classifiers=[\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n \"Programming Language :: Python :: 3.9\",\n \"Operating System :: MacOS\",\n \"Operating System :: POSIX :: Linux\",\n ],\n install_requires=[\n \"boto3==1.17.17\",\n \"hydra-core>=1.1.0.dev7\",\n \"ray[default]==1.6.0\",\n # https://github.com/aio-libs/aiohttp/issues/6203\n \"aiohttp!=3.8.0\",\n \"cloudpickle==1.6.0\",\n \"pickle5==0.0.11\",\n ],\n include_package_data=True,\n)\n", "path": "plugins/hydra_ray_launcher/setup.py"}]} |
gh_patches_debug_1632 | rasdani/github-patches | git_diff | mitmproxy__mitmproxy-6127 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
`view.flows.add` command does not exist but the examples reference it
#### Problem Description
The `view.flows.add` command does not exist but the example `duplicate-modify-replay.py` shows this command being used.
`replay.client` seems to perform both the "add to view" and "replay" function.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `examples/addons/duplicate-modify-replay.py`
Content:
```
1 """Take incoming HTTP requests and replay them with modified parameters."""
2 from mitmproxy import ctx
3
4
5 def request(flow):
6 # Avoid an infinite loop by not replaying already replayed requests
7 if flow.is_replay == "request":
8 return
9 flow = flow.copy()
10 # Only interactive tools have a view. If we have one, add a duplicate entry
11 # for our flow.
12 if "view" in ctx.master.addons:
13 ctx.master.commands.call("view.flows.add", [flow])
14 flow.request.path = "/changed"
15 ctx.master.commands.call("replay.client", [flow])
16
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/examples/addons/duplicate-modify-replay.py b/examples/addons/duplicate-modify-replay.py
--- a/examples/addons/duplicate-modify-replay.py
+++ b/examples/addons/duplicate-modify-replay.py
@@ -10,6 +10,6 @@
# Only interactive tools have a view. If we have one, add a duplicate entry
# for our flow.
if "view" in ctx.master.addons:
- ctx.master.commands.call("view.flows.add", [flow])
+ ctx.master.commands.call("view.flows.duplicate", [flow])
flow.request.path = "/changed"
ctx.master.commands.call("replay.client", [flow])
| {"golden_diff": "diff --git a/examples/addons/duplicate-modify-replay.py b/examples/addons/duplicate-modify-replay.py\n--- a/examples/addons/duplicate-modify-replay.py\n+++ b/examples/addons/duplicate-modify-replay.py\n@@ -10,6 +10,6 @@\n # Only interactive tools have a view. If we have one, add a duplicate entry\n # for our flow.\n if \"view\" in ctx.master.addons:\n- ctx.master.commands.call(\"view.flows.add\", [flow])\n+ ctx.master.commands.call(\"view.flows.duplicate\", [flow])\n flow.request.path = \"/changed\"\n ctx.master.commands.call(\"replay.client\", [flow])\n", "issue": "`view.flows.add` command does not exist but the examples reference it\n#### Problem Description\r\n\r\nThe `view.flows.add` command does not exist but the example `duplicate-modify-replay.py` shows this command being used.\r\n\r\n`replay.client` seems to perform both the \"add to view\" and \"replay\" function.\n", "before_files": [{"content": "\"\"\"Take incoming HTTP requests and replay them with modified parameters.\"\"\"\nfrom mitmproxy import ctx\n\n\ndef request(flow):\n # Avoid an infinite loop by not replaying already replayed requests\n if flow.is_replay == \"request\":\n return\n flow = flow.copy()\n # Only interactive tools have a view. If we have one, add a duplicate entry\n # for our flow.\n if \"view\" in ctx.master.addons:\n ctx.master.commands.call(\"view.flows.add\", [flow])\n flow.request.path = \"/changed\"\n ctx.master.commands.call(\"replay.client\", [flow])\n", "path": "examples/addons/duplicate-modify-replay.py"}], "after_files": [{"content": "\"\"\"Take incoming HTTP requests and replay them with modified parameters.\"\"\"\nfrom mitmproxy import ctx\n\n\ndef request(flow):\n # Avoid an infinite loop by not replaying already replayed requests\n if flow.is_replay == \"request\":\n return\n flow = flow.copy()\n # Only interactive tools have a view. If we have one, add a duplicate entry\n # for our flow.\n if \"view\" in ctx.master.addons:\n ctx.master.commands.call(\"view.flows.duplicate\", [flow])\n flow.request.path = \"/changed\"\n ctx.master.commands.call(\"replay.client\", [flow])\n", "path": "examples/addons/duplicate-modify-replay.py"}]} |
gh_patches_debug_1633 | rasdani/github-patches | git_diff | pyodide__pyodide-55 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Make work on Chrome
Make work on Chrome
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `tools/buildpkg.py`
Content:
```
1 #!/usr/bin/env python3
2
3 """
4 Builds a Pyodide package.
5 """
6
7 import argparse
8 import hashlib
9 import os
10 import shutil
11 import subprocess
12
13
14 import common
15
16
17 ROOTDIR = os.path.abspath(os.path.dirname(__file__))
18
19
20 def check_checksum(path, pkg):
21 """
22 Checks that a tarball matches the checksum in the package metadata.
23 """
24 if 'md5' not in pkg['source']:
25 return
26 checksum = pkg['source']['md5']
27 CHUNK_SIZE = 1 << 16
28 h = hashlib.md5()
29 with open(path, 'rb') as fd:
30 while True:
31 chunk = fd.read(CHUNK_SIZE)
32 h.update(chunk)
33 if len(chunk) < CHUNK_SIZE:
34 break
35 if h.hexdigest() != checksum:
36 raise ValueError("Invalid checksum")
37
38
39 def download_and_extract(buildpath, packagedir, pkg, args):
40 tarballpath = os.path.join(
41 buildpath, os.path.basename(pkg['source']['url']))
42 if not os.path.isfile(tarballpath):
43 subprocess.run([
44 'wget', '-q', '-O', tarballpath, pkg['source']['url']
45 ], check=True)
46 check_checksum(tarballpath, pkg)
47 srcpath = os.path.join(buildpath, packagedir)
48 if not os.path.isdir(srcpath):
49 shutil.unpack_archive(tarballpath, buildpath)
50 return srcpath
51
52
53 def patch(path, srcpath, pkg, args):
54 if os.path.isfile(os.path.join(srcpath, '.patched')):
55 return
56
57 # Apply all of the patches
58 orig_dir = os.getcwd()
59 pkgdir = os.path.abspath(os.path.dirname(path))
60 os.chdir(srcpath)
61 try:
62 for patch in pkg['source'].get('patches', []):
63 subprocess.run([
64 'patch', '-p1', '--binary', '-i', os.path.join(pkgdir, patch)
65 ], check=True)
66 finally:
67 os.chdir(orig_dir)
68
69 # Add any extra files
70 for src, dst in pkg['source'].get('extras', []):
71 shutil.copyfile(os.path.join(pkgdir, src), os.path.join(srcpath, dst))
72
73 with open(os.path.join(srcpath, '.patched'), 'wb') as fd:
74 fd.write(b'\n')
75
76
77 def get_libdir(srcpath, args):
78 # Get the name of the build/lib.XXX directory that distutils wrote its
79 # output to
80 slug = subprocess.check_output([
81 os.path.join(args.host, 'bin', 'python3'),
82 '-c',
83 'import sysconfig, sys; '
84 'print("{}-{}.{}".format('
85 'sysconfig.get_platform(), '
86 'sys.version_info[0], '
87 'sys.version_info[1]))']).decode('ascii').strip()
88 purelib = os.path.join(srcpath, 'build', 'lib')
89 if os.path.isdir(purelib):
90 libdir = purelib
91 else:
92 libdir = os.path.join(srcpath, 'build', 'lib.' + slug)
93 return libdir
94
95
96 def compile(path, srcpath, pkg, args):
97 if os.path.isfile(os.path.join(srcpath, '.built')):
98 return
99
100 orig_dir = os.getcwd()
101 os.chdir(srcpath)
102 try:
103 subprocess.run([
104 os.path.join(args.host, 'bin', 'python3'),
105 os.path.join(ROOTDIR, 'pywasmcross'),
106 '--cflags',
107 args.cflags + ' ' +
108 pkg.get('build', {}).get('cflags', ''),
109 '--ldflags',
110 args.ldflags + ' ' +
111 pkg.get('build', {}).get('ldflags', ''),
112 '--host', args.host,
113 '--target', args.target], check=True)
114 finally:
115 os.chdir(orig_dir)
116
117 post = pkg.get('build', {}).get('post')
118 if post is not None:
119 libdir = get_libdir(srcpath, args)
120 pkgdir = os.path.abspath(os.path.dirname(path))
121 env = {
122 'BUILD': libdir,
123 'PKGDIR': pkgdir
124 }
125 subprocess.run([
126 'bash', '-c', post], env=env, check=True)
127
128 with open(os.path.join(srcpath, '.built'), 'wb') as fd:
129 fd.write(b'\n')
130
131
132 def package_files(buildpath, srcpath, pkg, args):
133 if os.path.isfile(os.path.join(buildpath, '.packaged')):
134 return
135
136 name = pkg['package']['name']
137 libdir = get_libdir(srcpath, args)
138 subprocess.run([
139 'python2',
140 os.path.join(os.environ['EMSCRIPTEN'], 'tools', 'file_packager.py'),
141 os.path.join(buildpath, name + '.data'),
142 '--preload',
143 '{}@/lib/python3.6/site-packages'.format(libdir),
144 '--js-output={}'.format(os.path.join(buildpath, name + '.js')),
145 '--export-name=pyodide',
146 '--exclude', '*.wasm.pre',
147 '--exclude', '__pycache__'], check=True)
148 subprocess.run([
149 'uglifyjs',
150 os.path.join(buildpath, name + '.js'),
151 '-o',
152 os.path.join(buildpath, name + '.js')], check=True)
153
154 with open(os.path.join(buildpath, '.packaged'), 'wb') as fd:
155 fd.write(b'\n')
156
157
158 def build_package(path, args):
159 pkg = common.parse_package(path)
160 packagedir = pkg['package']['name'] + '-' + pkg['package']['version']
161 dirpath = os.path.dirname(path)
162 orig_path = os.getcwd()
163 os.chdir(dirpath)
164 try:
165 buildpath = os.path.join(dirpath, 'build')
166 if not os.path.exists(buildpath):
167 os.makedirs(buildpath)
168 srcpath = download_and_extract(buildpath, packagedir, pkg, args)
169 patch(path, srcpath, pkg, args)
170 compile(path, srcpath, pkg, args)
171 package_files(buildpath, srcpath, pkg, args)
172 finally:
173 os.chdir(orig_path)
174
175
176 def parse_args():
177 parser = argparse.ArgumentParser('Build a pyodide package.')
178 parser.add_argument(
179 'package', type=str, nargs=1,
180 help="Path to meta.yaml package description")
181 parser.add_argument(
182 '--cflags', type=str, nargs='?', default=common.DEFAULTCFLAGS,
183 help='Extra compiling flags')
184 parser.add_argument(
185 '--ldflags', type=str, nargs='?', default=common.DEFAULTLDFLAGS,
186 help='Extra linking flags')
187 parser.add_argument(
188 '--host', type=str, nargs='?', default=common.HOSTPYTHON,
189 help='The path to the host Python installation')
190 parser.add_argument(
191 '--target', type=str, nargs='?', default=common.TARGETPYTHON,
192 help='The path to the target Python installation')
193 return parser.parse_args()
194
195
196 def main(args):
197 path = os.path.abspath(args.package[0])
198 build_package(path, args)
199
200
201 if __name__ == '__main__':
202 args = parse_args()
203 main(args)
204
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/tools/buildpkg.py b/tools/buildpkg.py
--- a/tools/buildpkg.py
+++ b/tools/buildpkg.py
@@ -144,7 +144,8 @@
'--js-output={}'.format(os.path.join(buildpath, name + '.js')),
'--export-name=pyodide',
'--exclude', '*.wasm.pre',
- '--exclude', '__pycache__'], check=True)
+ '--exclude', '__pycache__',
+ '--use-preload-plugins'], check=True)
subprocess.run([
'uglifyjs',
os.path.join(buildpath, name + '.js'),
| {"golden_diff": "diff --git a/tools/buildpkg.py b/tools/buildpkg.py\n--- a/tools/buildpkg.py\n+++ b/tools/buildpkg.py\n@@ -144,7 +144,8 @@\n '--js-output={}'.format(os.path.join(buildpath, name + '.js')),\n '--export-name=pyodide',\n '--exclude', '*.wasm.pre',\n- '--exclude', '__pycache__'], check=True)\n+ '--exclude', '__pycache__',\n+ '--use-preload-plugins'], check=True)\n subprocess.run([\n 'uglifyjs',\n os.path.join(buildpath, name + '.js'),\n", "issue": "Make work on Chrome\n\nMake work on Chrome\n\n", "before_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"\nBuilds a Pyodide package.\n\"\"\"\n\nimport argparse\nimport hashlib\nimport os\nimport shutil\nimport subprocess\n\n\nimport common\n\n\nROOTDIR = os.path.abspath(os.path.dirname(__file__))\n\n\ndef check_checksum(path, pkg):\n \"\"\"\n Checks that a tarball matches the checksum in the package metadata.\n \"\"\"\n if 'md5' not in pkg['source']:\n return\n checksum = pkg['source']['md5']\n CHUNK_SIZE = 1 << 16\n h = hashlib.md5()\n with open(path, 'rb') as fd:\n while True:\n chunk = fd.read(CHUNK_SIZE)\n h.update(chunk)\n if len(chunk) < CHUNK_SIZE:\n break\n if h.hexdigest() != checksum:\n raise ValueError(\"Invalid checksum\")\n\n\ndef download_and_extract(buildpath, packagedir, pkg, args):\n tarballpath = os.path.join(\n buildpath, os.path.basename(pkg['source']['url']))\n if not os.path.isfile(tarballpath):\n subprocess.run([\n 'wget', '-q', '-O', tarballpath, pkg['source']['url']\n ], check=True)\n check_checksum(tarballpath, pkg)\n srcpath = os.path.join(buildpath, packagedir)\n if not os.path.isdir(srcpath):\n shutil.unpack_archive(tarballpath, buildpath)\n return srcpath\n\n\ndef patch(path, srcpath, pkg, args):\n if os.path.isfile(os.path.join(srcpath, '.patched')):\n return\n\n # Apply all of the patches\n orig_dir = os.getcwd()\n pkgdir = os.path.abspath(os.path.dirname(path))\n os.chdir(srcpath)\n try:\n for patch in pkg['source'].get('patches', []):\n subprocess.run([\n 'patch', '-p1', '--binary', '-i', os.path.join(pkgdir, patch)\n ], check=True)\n finally:\n os.chdir(orig_dir)\n\n # Add any extra files\n for src, dst in pkg['source'].get('extras', []):\n shutil.copyfile(os.path.join(pkgdir, src), os.path.join(srcpath, dst))\n\n with open(os.path.join(srcpath, '.patched'), 'wb') as fd:\n fd.write(b'\\n')\n\n\ndef get_libdir(srcpath, args):\n # Get the name of the build/lib.XXX directory that distutils wrote its\n # output to\n slug = subprocess.check_output([\n os.path.join(args.host, 'bin', 'python3'),\n '-c',\n 'import sysconfig, sys; '\n 'print(\"{}-{}.{}\".format('\n 'sysconfig.get_platform(), '\n 'sys.version_info[0], '\n 'sys.version_info[1]))']).decode('ascii').strip()\n purelib = os.path.join(srcpath, 'build', 'lib')\n if os.path.isdir(purelib):\n libdir = purelib\n else:\n libdir = os.path.join(srcpath, 'build', 'lib.' + slug)\n return libdir\n\n\ndef compile(path, srcpath, pkg, args):\n if os.path.isfile(os.path.join(srcpath, '.built')):\n return\n\n orig_dir = os.getcwd()\n os.chdir(srcpath)\n try:\n subprocess.run([\n os.path.join(args.host, 'bin', 'python3'),\n os.path.join(ROOTDIR, 'pywasmcross'),\n '--cflags',\n args.cflags + ' ' +\n pkg.get('build', {}).get('cflags', ''),\n '--ldflags',\n args.ldflags + ' ' +\n pkg.get('build', {}).get('ldflags', ''),\n '--host', args.host,\n '--target', args.target], check=True)\n finally:\n os.chdir(orig_dir)\n\n post = pkg.get('build', {}).get('post')\n if post is not None:\n libdir = get_libdir(srcpath, args)\n pkgdir = os.path.abspath(os.path.dirname(path))\n env = {\n 'BUILD': libdir,\n 'PKGDIR': pkgdir\n }\n subprocess.run([\n 'bash', '-c', post], env=env, check=True)\n\n with open(os.path.join(srcpath, '.built'), 'wb') as fd:\n fd.write(b'\\n')\n\n\ndef package_files(buildpath, srcpath, pkg, args):\n if os.path.isfile(os.path.join(buildpath, '.packaged')):\n return\n\n name = pkg['package']['name']\n libdir = get_libdir(srcpath, args)\n subprocess.run([\n 'python2',\n os.path.join(os.environ['EMSCRIPTEN'], 'tools', 'file_packager.py'),\n os.path.join(buildpath, name + '.data'),\n '--preload',\n '{}@/lib/python3.6/site-packages'.format(libdir),\n '--js-output={}'.format(os.path.join(buildpath, name + '.js')),\n '--export-name=pyodide',\n '--exclude', '*.wasm.pre',\n '--exclude', '__pycache__'], check=True)\n subprocess.run([\n 'uglifyjs',\n os.path.join(buildpath, name + '.js'),\n '-o',\n os.path.join(buildpath, name + '.js')], check=True)\n\n with open(os.path.join(buildpath, '.packaged'), 'wb') as fd:\n fd.write(b'\\n')\n\n\ndef build_package(path, args):\n pkg = common.parse_package(path)\n packagedir = pkg['package']['name'] + '-' + pkg['package']['version']\n dirpath = os.path.dirname(path)\n orig_path = os.getcwd()\n os.chdir(dirpath)\n try:\n buildpath = os.path.join(dirpath, 'build')\n if not os.path.exists(buildpath):\n os.makedirs(buildpath)\n srcpath = download_and_extract(buildpath, packagedir, pkg, args)\n patch(path, srcpath, pkg, args)\n compile(path, srcpath, pkg, args)\n package_files(buildpath, srcpath, pkg, args)\n finally:\n os.chdir(orig_path)\n\n\ndef parse_args():\n parser = argparse.ArgumentParser('Build a pyodide package.')\n parser.add_argument(\n 'package', type=str, nargs=1,\n help=\"Path to meta.yaml package description\")\n parser.add_argument(\n '--cflags', type=str, nargs='?', default=common.DEFAULTCFLAGS,\n help='Extra compiling flags')\n parser.add_argument(\n '--ldflags', type=str, nargs='?', default=common.DEFAULTLDFLAGS,\n help='Extra linking flags')\n parser.add_argument(\n '--host', type=str, nargs='?', default=common.HOSTPYTHON,\n help='The path to the host Python installation')\n parser.add_argument(\n '--target', type=str, nargs='?', default=common.TARGETPYTHON,\n help='The path to the target Python installation')\n return parser.parse_args()\n\n\ndef main(args):\n path = os.path.abspath(args.package[0])\n build_package(path, args)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)\n", "path": "tools/buildpkg.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n\n\"\"\"\nBuilds a Pyodide package.\n\"\"\"\n\nimport argparse\nimport hashlib\nimport os\nimport shutil\nimport subprocess\n\n\nimport common\n\n\nROOTDIR = os.path.abspath(os.path.dirname(__file__))\n\n\ndef check_checksum(path, pkg):\n \"\"\"\n Checks that a tarball matches the checksum in the package metadata.\n \"\"\"\n if 'md5' not in pkg['source']:\n return\n checksum = pkg['source']['md5']\n CHUNK_SIZE = 1 << 16\n h = hashlib.md5()\n with open(path, 'rb') as fd:\n while True:\n chunk = fd.read(CHUNK_SIZE)\n h.update(chunk)\n if len(chunk) < CHUNK_SIZE:\n break\n if h.hexdigest() != checksum:\n raise ValueError(\"Invalid checksum\")\n\n\ndef download_and_extract(buildpath, packagedir, pkg, args):\n tarballpath = os.path.join(\n buildpath, os.path.basename(pkg['source']['url']))\n if not os.path.isfile(tarballpath):\n subprocess.run([\n 'wget', '-q', '-O', tarballpath, pkg['source']['url']\n ], check=True)\n check_checksum(tarballpath, pkg)\n srcpath = os.path.join(buildpath, packagedir)\n if not os.path.isdir(srcpath):\n shutil.unpack_archive(tarballpath, buildpath)\n return srcpath\n\n\ndef patch(path, srcpath, pkg, args):\n if os.path.isfile(os.path.join(srcpath, '.patched')):\n return\n\n # Apply all of the patches\n orig_dir = os.getcwd()\n pkgdir = os.path.abspath(os.path.dirname(path))\n os.chdir(srcpath)\n try:\n for patch in pkg['source'].get('patches', []):\n subprocess.run([\n 'patch', '-p1', '--binary', '-i', os.path.join(pkgdir, patch)\n ], check=True)\n finally:\n os.chdir(orig_dir)\n\n # Add any extra files\n for src, dst in pkg['source'].get('extras', []):\n shutil.copyfile(os.path.join(pkgdir, src), os.path.join(srcpath, dst))\n\n with open(os.path.join(srcpath, '.patched'), 'wb') as fd:\n fd.write(b'\\n')\n\n\ndef get_libdir(srcpath, args):\n # Get the name of the build/lib.XXX directory that distutils wrote its\n # output to\n slug = subprocess.check_output([\n os.path.join(args.host, 'bin', 'python3'),\n '-c',\n 'import sysconfig, sys; '\n 'print(\"{}-{}.{}\".format('\n 'sysconfig.get_platform(), '\n 'sys.version_info[0], '\n 'sys.version_info[1]))']).decode('ascii').strip()\n purelib = os.path.join(srcpath, 'build', 'lib')\n if os.path.isdir(purelib):\n libdir = purelib\n else:\n libdir = os.path.join(srcpath, 'build', 'lib.' + slug)\n return libdir\n\n\ndef compile(path, srcpath, pkg, args):\n if os.path.isfile(os.path.join(srcpath, '.built')):\n return\n\n orig_dir = os.getcwd()\n os.chdir(srcpath)\n try:\n subprocess.run([\n os.path.join(args.host, 'bin', 'python3'),\n os.path.join(ROOTDIR, 'pywasmcross'),\n '--cflags',\n args.cflags + ' ' +\n pkg.get('build', {}).get('cflags', ''),\n '--ldflags',\n args.ldflags + ' ' +\n pkg.get('build', {}).get('ldflags', ''),\n '--host', args.host,\n '--target', args.target], check=True)\n finally:\n os.chdir(orig_dir)\n\n post = pkg.get('build', {}).get('post')\n if post is not None:\n libdir = get_libdir(srcpath, args)\n pkgdir = os.path.abspath(os.path.dirname(path))\n env = {\n 'BUILD': libdir,\n 'PKGDIR': pkgdir\n }\n subprocess.run([\n 'bash', '-c', post], env=env, check=True)\n\n with open(os.path.join(srcpath, '.built'), 'wb') as fd:\n fd.write(b'\\n')\n\n\ndef package_files(buildpath, srcpath, pkg, args):\n if os.path.isfile(os.path.join(buildpath, '.packaged')):\n return\n\n name = pkg['package']['name']\n libdir = get_libdir(srcpath, args)\n subprocess.run([\n 'python2',\n os.path.join(os.environ['EMSCRIPTEN'], 'tools', 'file_packager.py'),\n os.path.join(buildpath, name + '.data'),\n '--preload',\n '{}@/lib/python3.6/site-packages'.format(libdir),\n '--js-output={}'.format(os.path.join(buildpath, name + '.js')),\n '--export-name=pyodide',\n '--exclude', '*.wasm.pre',\n '--exclude', '__pycache__',\n '--use-preload-plugins'], check=True)\n subprocess.run([\n 'uglifyjs',\n os.path.join(buildpath, name + '.js'),\n '-o',\n os.path.join(buildpath, name + '.js')], check=True)\n\n with open(os.path.join(buildpath, '.packaged'), 'wb') as fd:\n fd.write(b'\\n')\n\n\ndef build_package(path, args):\n pkg = common.parse_package(path)\n packagedir = pkg['package']['name'] + '-' + pkg['package']['version']\n dirpath = os.path.dirname(path)\n orig_path = os.getcwd()\n os.chdir(dirpath)\n try:\n buildpath = os.path.join(dirpath, 'build')\n if not os.path.exists(buildpath):\n os.makedirs(buildpath)\n srcpath = download_and_extract(buildpath, packagedir, pkg, args)\n patch(path, srcpath, pkg, args)\n compile(path, srcpath, pkg, args)\n package_files(buildpath, srcpath, pkg, args)\n finally:\n os.chdir(orig_path)\n\n\ndef parse_args():\n parser = argparse.ArgumentParser('Build a pyodide package.')\n parser.add_argument(\n 'package', type=str, nargs=1,\n help=\"Path to meta.yaml package description\")\n parser.add_argument(\n '--cflags', type=str, nargs='?', default=common.DEFAULTCFLAGS,\n help='Extra compiling flags')\n parser.add_argument(\n '--ldflags', type=str, nargs='?', default=common.DEFAULTLDFLAGS,\n help='Extra linking flags')\n parser.add_argument(\n '--host', type=str, nargs='?', default=common.HOSTPYTHON,\n help='The path to the host Python installation')\n parser.add_argument(\n '--target', type=str, nargs='?', default=common.TARGETPYTHON,\n help='The path to the target Python installation')\n return parser.parse_args()\n\n\ndef main(args):\n path = os.path.abspath(args.package[0])\n build_package(path, args)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)\n", "path": "tools/buildpkg.py"}]} |
gh_patches_debug_1634 | rasdani/github-patches | git_diff | pallets__click-1587 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
click.prompt(type=int, hide_input=True) outputs rejected input
Despite asking `click.prompt()` to hide input it will still outputs the input if it rejects it. I get the same behavior for `7.0` from PyPI, `7.x` from Git, and `master` from Git.
```bash
altendky@p1:~$ venv/bin/python -c 'import click; click.prompt("prompt", type=int, hide_input=True)'
prompt:
Error: wait... i get to see this? is not a valid integer
prompt:
```
If you specify a type (let's say you are inputting a numeric pin) then the same can happen with `click.password_option()`.
```python3
import click
@click.command()
@click.password_option(type=int)
def cli(password):
pass
cli()
```
```bash
altendky@p1:~$ venv/bin/python x.py
Password:
Error: lkjsaf is not a valid integer
Password:
```
The workaround for `click.prompt()` anyways I guess is to not specify a type and to implement the rejection manually.
If there is agreement that this is an issue that should be fixed then I can try to put together a PR for it.
click.prompt(type=int, hide_input=True) outputs rejected input
Despite asking `click.prompt()` to hide input it will still outputs the input if it rejects it. I get the same behavior for `7.0` from PyPI, `7.x` from Git, and `master` from Git.
```bash
altendky@p1:~$ venv/bin/python -c 'import click; click.prompt("prompt", type=int, hide_input=True)'
prompt:
Error: wait... i get to see this? is not a valid integer
prompt:
```
If you specify a type (let's say you are inputting a numeric pin) then the same can happen with `click.password_option()`.
```python3
import click
@click.command()
@click.password_option(type=int)
def cli(password):
pass
cli()
```
```bash
altendky@p1:~$ venv/bin/python x.py
Password:
Error: lkjsaf is not a valid integer
Password:
```
The workaround for `click.prompt()` anyways I guess is to not specify a type and to implement the rejection manually.
If there is agreement that this is an issue that should be fixed then I can try to put together a PR for it.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/click/termui.py`
Content:
```
1 import inspect
2 import io
3 import itertools
4 import os
5 import struct
6 import sys
7
8 from ._compat import DEFAULT_COLUMNS
9 from ._compat import get_winterm_size
10 from ._compat import isatty
11 from ._compat import strip_ansi
12 from ._compat import WIN
13 from .exceptions import Abort
14 from .exceptions import UsageError
15 from .globals import resolve_color_default
16 from .types import Choice
17 from .types import convert_type
18 from .types import Path
19 from .utils import echo
20 from .utils import LazyFile
21
22 # The prompt functions to use. The doc tools currently override these
23 # functions to customize how they work.
24 visible_prompt_func = input
25
26 _ansi_colors = {
27 "black": 30,
28 "red": 31,
29 "green": 32,
30 "yellow": 33,
31 "blue": 34,
32 "magenta": 35,
33 "cyan": 36,
34 "white": 37,
35 "reset": 39,
36 "bright_black": 90,
37 "bright_red": 91,
38 "bright_green": 92,
39 "bright_yellow": 93,
40 "bright_blue": 94,
41 "bright_magenta": 95,
42 "bright_cyan": 96,
43 "bright_white": 97,
44 }
45 _ansi_reset_all = "\033[0m"
46
47
48 def hidden_prompt_func(prompt):
49 import getpass
50
51 return getpass.getpass(prompt)
52
53
54 def _build_prompt(
55 text, suffix, show_default=False, default=None, show_choices=True, type=None
56 ):
57 prompt = text
58 if type is not None and show_choices and isinstance(type, Choice):
59 prompt += f" ({', '.join(map(str, type.choices))})"
60 if default is not None and show_default:
61 prompt = f"{prompt} [{_format_default(default)}]"
62 return f"{prompt}{suffix}"
63
64
65 def _format_default(default):
66 if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"):
67 return default.name
68
69 return default
70
71
72 def prompt(
73 text,
74 default=None,
75 hide_input=False,
76 confirmation_prompt=False,
77 type=None,
78 value_proc=None,
79 prompt_suffix=": ",
80 show_default=True,
81 err=False,
82 show_choices=True,
83 ):
84 """Prompts a user for input. This is a convenience function that can
85 be used to prompt a user for input later.
86
87 If the user aborts the input by sending a interrupt signal, this
88 function will catch it and raise a :exc:`Abort` exception.
89
90 .. versionadded:: 7.0
91 Added the show_choices parameter.
92
93 .. versionadded:: 6.0
94 Added unicode support for cmd.exe on Windows.
95
96 .. versionadded:: 4.0
97 Added the `err` parameter.
98
99 :param text: the text to show for the prompt.
100 :param default: the default value to use if no input happens. If this
101 is not given it will prompt until it's aborted.
102 :param hide_input: if this is set to true then the input value will
103 be hidden.
104 :param confirmation_prompt: asks for confirmation for the value.
105 :param type: the type to use to check the value against.
106 :param value_proc: if this parameter is provided it's a function that
107 is invoked instead of the type conversion to
108 convert a value.
109 :param prompt_suffix: a suffix that should be added to the prompt.
110 :param show_default: shows or hides the default value in the prompt.
111 :param err: if set to true the file defaults to ``stderr`` instead of
112 ``stdout``, the same as with echo.
113 :param show_choices: Show or hide choices if the passed type is a Choice.
114 For example if type is a Choice of either day or week,
115 show_choices is true and text is "Group by" then the
116 prompt will be "Group by (day, week): ".
117 """
118 result = None
119
120 def prompt_func(text):
121 f = hidden_prompt_func if hide_input else visible_prompt_func
122 try:
123 # Write the prompt separately so that we get nice
124 # coloring through colorama on Windows
125 echo(text, nl=False, err=err)
126 return f("")
127 except (KeyboardInterrupt, EOFError):
128 # getpass doesn't print a newline if the user aborts input with ^C.
129 # Allegedly this behavior is inherited from getpass(3).
130 # A doc bug has been filed at https://bugs.python.org/issue24711
131 if hide_input:
132 echo(None, err=err)
133 raise Abort()
134
135 if value_proc is None:
136 value_proc = convert_type(type, default)
137
138 prompt = _build_prompt(
139 text, prompt_suffix, show_default, default, show_choices, type
140 )
141
142 while 1:
143 while 1:
144 value = prompt_func(prompt)
145 if value:
146 break
147 elif default is not None:
148 if isinstance(value_proc, Path):
149 # validate Path default value(exists, dir_okay etc.)
150 value = default
151 break
152 return default
153 try:
154 result = value_proc(value)
155 except UsageError as e:
156 echo(f"Error: {e.message}", err=err) # noqa: B306
157 continue
158 if not confirmation_prompt:
159 return result
160 while 1:
161 value2 = prompt_func("Repeat for confirmation: ")
162 if value2:
163 break
164 if value == value2:
165 return result
166 echo("Error: the two entered values do not match", err=err)
167
168
169 def confirm(
170 text, default=False, abort=False, prompt_suffix=": ", show_default=True, err=False
171 ):
172 """Prompts for confirmation (yes/no question).
173
174 If the user aborts the input by sending a interrupt signal this
175 function will catch it and raise a :exc:`Abort` exception.
176
177 .. versionadded:: 4.0
178 Added the `err` parameter.
179
180 :param text: the question to ask.
181 :param default: the default for the prompt.
182 :param abort: if this is set to `True` a negative answer aborts the
183 exception by raising :exc:`Abort`.
184 :param prompt_suffix: a suffix that should be added to the prompt.
185 :param show_default: shows or hides the default value in the prompt.
186 :param err: if set to true the file defaults to ``stderr`` instead of
187 ``stdout``, the same as with echo.
188 """
189 prompt = _build_prompt(
190 text, prompt_suffix, show_default, "Y/n" if default else "y/N"
191 )
192 while 1:
193 try:
194 # Write the prompt separately so that we get nice
195 # coloring through colorama on Windows
196 echo(prompt, nl=False, err=err)
197 value = visible_prompt_func("").lower().strip()
198 except (KeyboardInterrupt, EOFError):
199 raise Abort()
200 if value in ("y", "yes"):
201 rv = True
202 elif value in ("n", "no"):
203 rv = False
204 elif value == "":
205 rv = default
206 else:
207 echo("Error: invalid input", err=err)
208 continue
209 break
210 if abort and not rv:
211 raise Abort()
212 return rv
213
214
215 def get_terminal_size():
216 """Returns the current size of the terminal as tuple in the form
217 ``(width, height)`` in columns and rows.
218 """
219 import shutil
220
221 if hasattr(shutil, "get_terminal_size"):
222 return shutil.get_terminal_size()
223
224 # We provide a sensible default for get_winterm_size() when being invoked
225 # inside a subprocess. Without this, it would not provide a useful input.
226 if get_winterm_size is not None:
227 size = get_winterm_size()
228 if size == (0, 0):
229 return (79, 24)
230 else:
231 return size
232
233 def ioctl_gwinsz(fd):
234 try:
235 import fcntl
236 import termios
237
238 cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
239 except Exception:
240 return
241 return cr
242
243 cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
244 if not cr:
245 try:
246 fd = os.open(os.ctermid(), os.O_RDONLY)
247 try:
248 cr = ioctl_gwinsz(fd)
249 finally:
250 os.close(fd)
251 except Exception:
252 pass
253 if not cr or not cr[0] or not cr[1]:
254 cr = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", DEFAULT_COLUMNS))
255 return int(cr[1]), int(cr[0])
256
257
258 def echo_via_pager(text_or_generator, color=None):
259 """This function takes a text and shows it via an environment specific
260 pager on stdout.
261
262 .. versionchanged:: 3.0
263 Added the `color` flag.
264
265 :param text_or_generator: the text to page, or alternatively, a
266 generator emitting the text to page.
267 :param color: controls if the pager supports ANSI colors or not. The
268 default is autodetection.
269 """
270 color = resolve_color_default(color)
271
272 if inspect.isgeneratorfunction(text_or_generator):
273 i = text_or_generator()
274 elif isinstance(text_or_generator, str):
275 i = [text_or_generator]
276 else:
277 i = iter(text_or_generator)
278
279 # convert every element of i to a text type if necessary
280 text_generator = (el if isinstance(el, str) else str(el) for el in i)
281
282 from ._termui_impl import pager
283
284 return pager(itertools.chain(text_generator, "\n"), color)
285
286
287 def progressbar(
288 iterable=None,
289 length=None,
290 label=None,
291 show_eta=True,
292 show_percent=None,
293 show_pos=False,
294 item_show_func=None,
295 fill_char="#",
296 empty_char="-",
297 bar_template="%(label)s [%(bar)s] %(info)s",
298 info_sep=" ",
299 width=36,
300 file=None,
301 color=None,
302 ):
303 """This function creates an iterable context manager that can be used
304 to iterate over something while showing a progress bar. It will
305 either iterate over the `iterable` or `length` items (that are counted
306 up). While iteration happens, this function will print a rendered
307 progress bar to the given `file` (defaults to stdout) and will attempt
308 to calculate remaining time and more. By default, this progress bar
309 will not be rendered if the file is not a terminal.
310
311 The context manager creates the progress bar. When the context
312 manager is entered the progress bar is already created. With every
313 iteration over the progress bar, the iterable passed to the bar is
314 advanced and the bar is updated. When the context manager exits,
315 a newline is printed and the progress bar is finalized on screen.
316
317 Note: The progress bar is currently designed for use cases where the
318 total progress can be expected to take at least several seconds.
319 Because of this, the ProgressBar class object won't display
320 progress that is considered too fast, and progress where the time
321 between steps is less than a second.
322
323 No printing must happen or the progress bar will be unintentionally
324 destroyed.
325
326 Example usage::
327
328 with progressbar(items) as bar:
329 for item in bar:
330 do_something_with(item)
331
332 Alternatively, if no iterable is specified, one can manually update the
333 progress bar through the `update()` method instead of directly
334 iterating over the progress bar. The update method accepts the number
335 of steps to increment the bar with::
336
337 with progressbar(length=chunks.total_bytes) as bar:
338 for chunk in chunks:
339 process_chunk(chunk)
340 bar.update(chunks.bytes)
341
342 The ``update()`` method also takes an optional value specifying the
343 ``current_item`` at the new position. This is useful when used
344 together with ``item_show_func`` to customize the output for each
345 manual step::
346
347 with click.progressbar(
348 length=total_size,
349 label='Unzipping archive',
350 item_show_func=lambda a: a.filename
351 ) as bar:
352 for archive in zip_file:
353 archive.extract()
354 bar.update(archive.size, archive)
355
356 .. versionadded:: 2.0
357
358 .. versionadded:: 4.0
359 Added the `color` parameter. Added a `update` method to the
360 progressbar object.
361
362 :param iterable: an iterable to iterate over. If not provided the length
363 is required.
364 :param length: the number of items to iterate over. By default the
365 progressbar will attempt to ask the iterator about its
366 length, which might or might not work. If an iterable is
367 also provided this parameter can be used to override the
368 length. If an iterable is not provided the progress bar
369 will iterate over a range of that length.
370 :param label: the label to show next to the progress bar.
371 :param show_eta: enables or disables the estimated time display. This is
372 automatically disabled if the length cannot be
373 determined.
374 :param show_percent: enables or disables the percentage display. The
375 default is `True` if the iterable has a length or
376 `False` if not.
377 :param show_pos: enables or disables the absolute position display. The
378 default is `False`.
379 :param item_show_func: a function called with the current item which
380 can return a string to show the current item
381 next to the progress bar. Note that the current
382 item can be `None`!
383 :param fill_char: the character to use to show the filled part of the
384 progress bar.
385 :param empty_char: the character to use to show the non-filled part of
386 the progress bar.
387 :param bar_template: the format string to use as template for the bar.
388 The parameters in it are ``label`` for the label,
389 ``bar`` for the progress bar and ``info`` for the
390 info section.
391 :param info_sep: the separator between multiple info items (eta etc.)
392 :param width: the width of the progress bar in characters, 0 means full
393 terminal width
394 :param file: the file to write to. If this is not a terminal then
395 only the label is printed.
396 :param color: controls if the terminal supports ANSI colors or not. The
397 default is autodetection. This is only needed if ANSI
398 codes are included anywhere in the progress bar output
399 which is not the case by default.
400 """
401 from ._termui_impl import ProgressBar
402
403 color = resolve_color_default(color)
404 return ProgressBar(
405 iterable=iterable,
406 length=length,
407 show_eta=show_eta,
408 show_percent=show_percent,
409 show_pos=show_pos,
410 item_show_func=item_show_func,
411 fill_char=fill_char,
412 empty_char=empty_char,
413 bar_template=bar_template,
414 info_sep=info_sep,
415 file=file,
416 label=label,
417 width=width,
418 color=color,
419 )
420
421
422 def clear():
423 """Clears the terminal screen. This will have the effect of clearing
424 the whole visible space of the terminal and moving the cursor to the
425 top left. This does not do anything if not connected to a terminal.
426
427 .. versionadded:: 2.0
428 """
429 if not isatty(sys.stdout):
430 return
431 # If we're on Windows and we don't have colorama available, then we
432 # clear the screen by shelling out. Otherwise we can use an escape
433 # sequence.
434 if WIN:
435 os.system("cls")
436 else:
437 sys.stdout.write("\033[2J\033[1;1H")
438
439
440 def style(
441 text,
442 fg=None,
443 bg=None,
444 bold=None,
445 dim=None,
446 underline=None,
447 blink=None,
448 reverse=None,
449 reset=True,
450 ):
451 """Styles a text with ANSI styles and returns the new string. By
452 default the styling is self contained which means that at the end
453 of the string a reset code is issued. This can be prevented by
454 passing ``reset=False``.
455
456 Examples::
457
458 click.echo(click.style('Hello World!', fg='green'))
459 click.echo(click.style('ATTENTION!', blink=True))
460 click.echo(click.style('Some things', reverse=True, fg='cyan'))
461
462 Supported color names:
463
464 * ``black`` (might be a gray)
465 * ``red``
466 * ``green``
467 * ``yellow`` (might be an orange)
468 * ``blue``
469 * ``magenta``
470 * ``cyan``
471 * ``white`` (might be light gray)
472 * ``bright_black``
473 * ``bright_red``
474 * ``bright_green``
475 * ``bright_yellow``
476 * ``bright_blue``
477 * ``bright_magenta``
478 * ``bright_cyan``
479 * ``bright_white``
480 * ``reset`` (reset the color code only)
481
482 .. versionadded:: 2.0
483
484 .. versionadded:: 7.0
485 Added support for bright colors.
486
487 :param text: the string to style with ansi codes.
488 :param fg: if provided this will become the foreground color.
489 :param bg: if provided this will become the background color.
490 :param bold: if provided this will enable or disable bold mode.
491 :param dim: if provided this will enable or disable dim mode. This is
492 badly supported.
493 :param underline: if provided this will enable or disable underline.
494 :param blink: if provided this will enable or disable blinking.
495 :param reverse: if provided this will enable or disable inverse
496 rendering (foreground becomes background and the
497 other way round).
498 :param reset: by default a reset-all code is added at the end of the
499 string which means that styles do not carry over. This
500 can be disabled to compose styles.
501 """
502 bits = []
503 if fg:
504 try:
505 bits.append(f"\033[{_ansi_colors[fg]}m")
506 except KeyError:
507 raise TypeError(f"Unknown color {fg!r}")
508 if bg:
509 try:
510 bits.append(f"\033[{_ansi_colors[bg] + 10}m")
511 except KeyError:
512 raise TypeError(f"Unknown color {bg!r}")
513 if bold is not None:
514 bits.append(f"\033[{1 if bold else 22}m")
515 if dim is not None:
516 bits.append(f"\033[{2 if dim else 22}m")
517 if underline is not None:
518 bits.append(f"\033[{4 if underline else 24}m")
519 if blink is not None:
520 bits.append(f"\033[{5 if blink else 25}m")
521 if reverse is not None:
522 bits.append(f"\033[{7 if reverse else 27}m")
523 bits.append(text)
524 if reset:
525 bits.append(_ansi_reset_all)
526 return "".join(bits)
527
528
529 def unstyle(text):
530 """Removes ANSI styling information from a string. Usually it's not
531 necessary to use this function as Click's echo function will
532 automatically remove styling if necessary.
533
534 .. versionadded:: 2.0
535
536 :param text: the text to remove style information from.
537 """
538 return strip_ansi(text)
539
540
541 def secho(message=None, file=None, nl=True, err=False, color=None, **styles):
542 """This function combines :func:`echo` and :func:`style` into one
543 call. As such the following two calls are the same::
544
545 click.secho('Hello World!', fg='green')
546 click.echo(click.style('Hello World!', fg='green'))
547
548 All keyword arguments are forwarded to the underlying functions
549 depending on which one they go with.
550
551 .. versionadded:: 2.0
552 """
553 if message is not None:
554 message = style(message, **styles)
555 return echo(message, file=file, nl=nl, err=err, color=color)
556
557
558 def edit(
559 text=None, editor=None, env=None, require_save=True, extension=".txt", filename=None
560 ):
561 r"""Edits the given text in the defined editor. If an editor is given
562 (should be the full path to the executable but the regular operating
563 system search path is used for finding the executable) it overrides
564 the detected editor. Optionally, some environment variables can be
565 used. If the editor is closed without changes, `None` is returned. In
566 case a file is edited directly the return value is always `None` and
567 `require_save` and `extension` are ignored.
568
569 If the editor cannot be opened a :exc:`UsageError` is raised.
570
571 Note for Windows: to simplify cross-platform usage, the newlines are
572 automatically converted from POSIX to Windows and vice versa. As such,
573 the message here will have ``\n`` as newline markers.
574
575 :param text: the text to edit.
576 :param editor: optionally the editor to use. Defaults to automatic
577 detection.
578 :param env: environment variables to forward to the editor.
579 :param require_save: if this is true, then not saving in the editor
580 will make the return value become `None`.
581 :param extension: the extension to tell the editor about. This defaults
582 to `.txt` but changing this might change syntax
583 highlighting.
584 :param filename: if provided it will edit this file instead of the
585 provided text contents. It will not use a temporary
586 file as an indirection in that case.
587 """
588 from ._termui_impl import Editor
589
590 editor = Editor(
591 editor=editor, env=env, require_save=require_save, extension=extension
592 )
593 if filename is None:
594 return editor.edit(text)
595 editor.edit_file(filename)
596
597
598 def launch(url, wait=False, locate=False):
599 """This function launches the given URL (or filename) in the default
600 viewer application for this file type. If this is an executable, it
601 might launch the executable in a new session. The return value is
602 the exit code of the launched application. Usually, ``0`` indicates
603 success.
604
605 Examples::
606
607 click.launch('https://click.palletsprojects.com/')
608 click.launch('/my/downloaded/file', locate=True)
609
610 .. versionadded:: 2.0
611
612 :param url: URL or filename of the thing to launch.
613 :param wait: waits for the program to stop.
614 :param locate: if this is set to `True` then instead of launching the
615 application associated with the URL it will attempt to
616 launch a file manager with the file located. This
617 might have weird effects if the URL does not point to
618 the filesystem.
619 """
620 from ._termui_impl import open_url
621
622 return open_url(url, wait=wait, locate=locate)
623
624
625 # If this is provided, getchar() calls into this instead. This is used
626 # for unittesting purposes.
627 _getchar = None
628
629
630 def getchar(echo=False):
631 """Fetches a single character from the terminal and returns it. This
632 will always return a unicode character and under certain rare
633 circumstances this might return more than one character. The
634 situations which more than one character is returned is when for
635 whatever reason multiple characters end up in the terminal buffer or
636 standard input was not actually a terminal.
637
638 Note that this will always read from the terminal, even if something
639 is piped into the standard input.
640
641 Note for Windows: in rare cases when typing non-ASCII characters, this
642 function might wait for a second character and then return both at once.
643 This is because certain Unicode characters look like special-key markers.
644
645 .. versionadded:: 2.0
646
647 :param echo: if set to `True`, the character read will also show up on
648 the terminal. The default is to not show it.
649 """
650 f = _getchar
651 if f is None:
652 from ._termui_impl import getchar as f
653 return f(echo)
654
655
656 def raw_terminal():
657 from ._termui_impl import raw_terminal as f
658
659 return f()
660
661
662 def pause(info="Press any key to continue ...", err=False):
663 """This command stops execution and waits for the user to press any
664 key to continue. This is similar to the Windows batch "pause"
665 command. If the program is not run through a terminal, this command
666 will instead do nothing.
667
668 .. versionadded:: 2.0
669
670 .. versionadded:: 4.0
671 Added the `err` parameter.
672
673 :param info: the info string to print before pausing.
674 :param err: if set to message goes to ``stderr`` instead of
675 ``stdout``, the same as with echo.
676 """
677 if not isatty(sys.stdin) or not isatty(sys.stdout):
678 return
679 try:
680 if info:
681 echo(info, nl=False, err=err)
682 try:
683 getchar()
684 except (KeyboardInterrupt, EOFError):
685 pass
686 finally:
687 if info:
688 echo(err=err)
689
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/click/termui.py b/src/click/termui.py
--- a/src/click/termui.py
+++ b/src/click/termui.py
@@ -153,7 +153,10 @@
try:
result = value_proc(value)
except UsageError as e:
- echo(f"Error: {e.message}", err=err) # noqa: B306
+ if hide_input:
+ echo("Error: the value you entered was invalid", err=err)
+ else:
+ echo(f"Error: {e.message}", err=err) # noqa: B306
continue
if not confirmation_prompt:
return result
| {"golden_diff": "diff --git a/src/click/termui.py b/src/click/termui.py\n--- a/src/click/termui.py\n+++ b/src/click/termui.py\n@@ -153,7 +153,10 @@\n try:\n result = value_proc(value)\n except UsageError as e:\n- echo(f\"Error: {e.message}\", err=err) # noqa: B306\n+ if hide_input:\n+ echo(\"Error: the value you entered was invalid\", err=err)\n+ else:\n+ echo(f\"Error: {e.message}\", err=err) # noqa: B306\n continue\n if not confirmation_prompt:\n return result\n", "issue": "click.prompt(type=int, hide_input=True) outputs rejected input\nDespite asking `click.prompt()` to hide input it will still outputs the input if it rejects it. I get the same behavior for `7.0` from PyPI, `7.x` from Git, and `master` from Git.\r\n\r\n```bash\r\naltendky@p1:~$ venv/bin/python -c 'import click; click.prompt(\"prompt\", type=int, hide_input=True)'\r\nprompt: \r\nError: wait... i get to see this? is not a valid integer\r\nprompt:\r\n```\r\n\r\nIf you specify a type (let's say you are inputting a numeric pin) then the same can happen with `click.password_option()`.\r\n\r\n```python3\r\nimport click\r\n\r\n\r\[email protected]()\r\[email protected]_option(type=int)\r\ndef cli(password):\r\n pass\r\n\r\n\r\ncli()\r\n```\r\n\r\n```bash\r\naltendky@p1:~$ venv/bin/python x.py\r\nPassword: \r\nError: lkjsaf is not a valid integer\r\nPassword:\r\n```\r\n\r\nThe workaround for `click.prompt()` anyways I guess is to not specify a type and to implement the rejection manually.\r\n\r\nIf there is agreement that this is an issue that should be fixed then I can try to put together a PR for it.\nclick.prompt(type=int, hide_input=True) outputs rejected input\nDespite asking `click.prompt()` to hide input it will still outputs the input if it rejects it. I get the same behavior for `7.0` from PyPI, `7.x` from Git, and `master` from Git.\r\n\r\n```bash\r\naltendky@p1:~$ venv/bin/python -c 'import click; click.prompt(\"prompt\", type=int, hide_input=True)'\r\nprompt: \r\nError: wait... i get to see this? is not a valid integer\r\nprompt:\r\n```\r\n\r\nIf you specify a type (let's say you are inputting a numeric pin) then the same can happen with `click.password_option()`.\r\n\r\n```python3\r\nimport click\r\n\r\n\r\[email protected]()\r\[email protected]_option(type=int)\r\ndef cli(password):\r\n pass\r\n\r\n\r\ncli()\r\n```\r\n\r\n```bash\r\naltendky@p1:~$ venv/bin/python x.py\r\nPassword: \r\nError: lkjsaf is not a valid integer\r\nPassword:\r\n```\r\n\r\nThe workaround for `click.prompt()` anyways I guess is to not specify a type and to implement the rejection manually.\r\n\r\nIf there is agreement that this is an issue that should be fixed then I can try to put together a PR for it.\n", "before_files": [{"content": "import inspect\nimport io\nimport itertools\nimport os\nimport struct\nimport sys\n\nfrom ._compat import DEFAULT_COLUMNS\nfrom ._compat import get_winterm_size\nfrom ._compat import isatty\nfrom ._compat import strip_ansi\nfrom ._compat import WIN\nfrom .exceptions import Abort\nfrom .exceptions import UsageError\nfrom .globals import resolve_color_default\nfrom .types import Choice\nfrom .types import convert_type\nfrom .types import Path\nfrom .utils import echo\nfrom .utils import LazyFile\n\n# The prompt functions to use. The doc tools currently override these\n# functions to customize how they work.\nvisible_prompt_func = input\n\n_ansi_colors = {\n \"black\": 30,\n \"red\": 31,\n \"green\": 32,\n \"yellow\": 33,\n \"blue\": 34,\n \"magenta\": 35,\n \"cyan\": 36,\n \"white\": 37,\n \"reset\": 39,\n \"bright_black\": 90,\n \"bright_red\": 91,\n \"bright_green\": 92,\n \"bright_yellow\": 93,\n \"bright_blue\": 94,\n \"bright_magenta\": 95,\n \"bright_cyan\": 96,\n \"bright_white\": 97,\n}\n_ansi_reset_all = \"\\033[0m\"\n\n\ndef hidden_prompt_func(prompt):\n import getpass\n\n return getpass.getpass(prompt)\n\n\ndef _build_prompt(\n text, suffix, show_default=False, default=None, show_choices=True, type=None\n):\n prompt = text\n if type is not None and show_choices and isinstance(type, Choice):\n prompt += f\" ({', '.join(map(str, type.choices))})\"\n if default is not None and show_default:\n prompt = f\"{prompt} [{_format_default(default)}]\"\n return f\"{prompt}{suffix}\"\n\n\ndef _format_default(default):\n if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, \"name\"):\n return default.name\n\n return default\n\n\ndef prompt(\n text,\n default=None,\n hide_input=False,\n confirmation_prompt=False,\n type=None,\n value_proc=None,\n prompt_suffix=\": \",\n show_default=True,\n err=False,\n show_choices=True,\n):\n \"\"\"Prompts a user for input. This is a convenience function that can\n be used to prompt a user for input later.\n\n If the user aborts the input by sending a interrupt signal, this\n function will catch it and raise a :exc:`Abort` exception.\n\n .. versionadded:: 7.0\n Added the show_choices parameter.\n\n .. versionadded:: 6.0\n Added unicode support for cmd.exe on Windows.\n\n .. versionadded:: 4.0\n Added the `err` parameter.\n\n :param text: the text to show for the prompt.\n :param default: the default value to use if no input happens. If this\n is not given it will prompt until it's aborted.\n :param hide_input: if this is set to true then the input value will\n be hidden.\n :param confirmation_prompt: asks for confirmation for the value.\n :param type: the type to use to check the value against.\n :param value_proc: if this parameter is provided it's a function that\n is invoked instead of the type conversion to\n convert a value.\n :param prompt_suffix: a suffix that should be added to the prompt.\n :param show_default: shows or hides the default value in the prompt.\n :param err: if set to true the file defaults to ``stderr`` instead of\n ``stdout``, the same as with echo.\n :param show_choices: Show or hide choices if the passed type is a Choice.\n For example if type is a Choice of either day or week,\n show_choices is true and text is \"Group by\" then the\n prompt will be \"Group by (day, week): \".\n \"\"\"\n result = None\n\n def prompt_func(text):\n f = hidden_prompt_func if hide_input else visible_prompt_func\n try:\n # Write the prompt separately so that we get nice\n # coloring through colorama on Windows\n echo(text, nl=False, err=err)\n return f(\"\")\n except (KeyboardInterrupt, EOFError):\n # getpass doesn't print a newline if the user aborts input with ^C.\n # Allegedly this behavior is inherited from getpass(3).\n # A doc bug has been filed at https://bugs.python.org/issue24711\n if hide_input:\n echo(None, err=err)\n raise Abort()\n\n if value_proc is None:\n value_proc = convert_type(type, default)\n\n prompt = _build_prompt(\n text, prompt_suffix, show_default, default, show_choices, type\n )\n\n while 1:\n while 1:\n value = prompt_func(prompt)\n if value:\n break\n elif default is not None:\n if isinstance(value_proc, Path):\n # validate Path default value(exists, dir_okay etc.)\n value = default\n break\n return default\n try:\n result = value_proc(value)\n except UsageError as e:\n echo(f\"Error: {e.message}\", err=err) # noqa: B306\n continue\n if not confirmation_prompt:\n return result\n while 1:\n value2 = prompt_func(\"Repeat for confirmation: \")\n if value2:\n break\n if value == value2:\n return result\n echo(\"Error: the two entered values do not match\", err=err)\n\n\ndef confirm(\n text, default=False, abort=False, prompt_suffix=\": \", show_default=True, err=False\n):\n \"\"\"Prompts for confirmation (yes/no question).\n\n If the user aborts the input by sending a interrupt signal this\n function will catch it and raise a :exc:`Abort` exception.\n\n .. versionadded:: 4.0\n Added the `err` parameter.\n\n :param text: the question to ask.\n :param default: the default for the prompt.\n :param abort: if this is set to `True` a negative answer aborts the\n exception by raising :exc:`Abort`.\n :param prompt_suffix: a suffix that should be added to the prompt.\n :param show_default: shows or hides the default value in the prompt.\n :param err: if set to true the file defaults to ``stderr`` instead of\n ``stdout``, the same as with echo.\n \"\"\"\n prompt = _build_prompt(\n text, prompt_suffix, show_default, \"Y/n\" if default else \"y/N\"\n )\n while 1:\n try:\n # Write the prompt separately so that we get nice\n # coloring through colorama on Windows\n echo(prompt, nl=False, err=err)\n value = visible_prompt_func(\"\").lower().strip()\n except (KeyboardInterrupt, EOFError):\n raise Abort()\n if value in (\"y\", \"yes\"):\n rv = True\n elif value in (\"n\", \"no\"):\n rv = False\n elif value == \"\":\n rv = default\n else:\n echo(\"Error: invalid input\", err=err)\n continue\n break\n if abort and not rv:\n raise Abort()\n return rv\n\n\ndef get_terminal_size():\n \"\"\"Returns the current size of the terminal as tuple in the form\n ``(width, height)`` in columns and rows.\n \"\"\"\n import shutil\n\n if hasattr(shutil, \"get_terminal_size\"):\n return shutil.get_terminal_size()\n\n # We provide a sensible default for get_winterm_size() when being invoked\n # inside a subprocess. Without this, it would not provide a useful input.\n if get_winterm_size is not None:\n size = get_winterm_size()\n if size == (0, 0):\n return (79, 24)\n else:\n return size\n\n def ioctl_gwinsz(fd):\n try:\n import fcntl\n import termios\n\n cr = struct.unpack(\"hh\", fcntl.ioctl(fd, termios.TIOCGWINSZ, \"1234\"))\n except Exception:\n return\n return cr\n\n cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)\n if not cr:\n try:\n fd = os.open(os.ctermid(), os.O_RDONLY)\n try:\n cr = ioctl_gwinsz(fd)\n finally:\n os.close(fd)\n except Exception:\n pass\n if not cr or not cr[0] or not cr[1]:\n cr = (os.environ.get(\"LINES\", 25), os.environ.get(\"COLUMNS\", DEFAULT_COLUMNS))\n return int(cr[1]), int(cr[0])\n\n\ndef echo_via_pager(text_or_generator, color=None):\n \"\"\"This function takes a text and shows it via an environment specific\n pager on stdout.\n\n .. versionchanged:: 3.0\n Added the `color` flag.\n\n :param text_or_generator: the text to page, or alternatively, a\n generator emitting the text to page.\n :param color: controls if the pager supports ANSI colors or not. The\n default is autodetection.\n \"\"\"\n color = resolve_color_default(color)\n\n if inspect.isgeneratorfunction(text_or_generator):\n i = text_or_generator()\n elif isinstance(text_or_generator, str):\n i = [text_or_generator]\n else:\n i = iter(text_or_generator)\n\n # convert every element of i to a text type if necessary\n text_generator = (el if isinstance(el, str) else str(el) for el in i)\n\n from ._termui_impl import pager\n\n return pager(itertools.chain(text_generator, \"\\n\"), color)\n\n\ndef progressbar(\n iterable=None,\n length=None,\n label=None,\n show_eta=True,\n show_percent=None,\n show_pos=False,\n item_show_func=None,\n fill_char=\"#\",\n empty_char=\"-\",\n bar_template=\"%(label)s [%(bar)s] %(info)s\",\n info_sep=\" \",\n width=36,\n file=None,\n color=None,\n):\n \"\"\"This function creates an iterable context manager that can be used\n to iterate over something while showing a progress bar. It will\n either iterate over the `iterable` or `length` items (that are counted\n up). While iteration happens, this function will print a rendered\n progress bar to the given `file` (defaults to stdout) and will attempt\n to calculate remaining time and more. By default, this progress bar\n will not be rendered if the file is not a terminal.\n\n The context manager creates the progress bar. When the context\n manager is entered the progress bar is already created. With every\n iteration over the progress bar, the iterable passed to the bar is\n advanced and the bar is updated. When the context manager exits,\n a newline is printed and the progress bar is finalized on screen.\n\n Note: The progress bar is currently designed for use cases where the\n total progress can be expected to take at least several seconds.\n Because of this, the ProgressBar class object won't display\n progress that is considered too fast, and progress where the time\n between steps is less than a second.\n\n No printing must happen or the progress bar will be unintentionally\n destroyed.\n\n Example usage::\n\n with progressbar(items) as bar:\n for item in bar:\n do_something_with(item)\n\n Alternatively, if no iterable is specified, one can manually update the\n progress bar through the `update()` method instead of directly\n iterating over the progress bar. The update method accepts the number\n of steps to increment the bar with::\n\n with progressbar(length=chunks.total_bytes) as bar:\n for chunk in chunks:\n process_chunk(chunk)\n bar.update(chunks.bytes)\n\n The ``update()`` method also takes an optional value specifying the\n ``current_item`` at the new position. This is useful when used\n together with ``item_show_func`` to customize the output for each\n manual step::\n\n with click.progressbar(\n length=total_size,\n label='Unzipping archive',\n item_show_func=lambda a: a.filename\n ) as bar:\n for archive in zip_file:\n archive.extract()\n bar.update(archive.size, archive)\n\n .. versionadded:: 2.0\n\n .. versionadded:: 4.0\n Added the `color` parameter. Added a `update` method to the\n progressbar object.\n\n :param iterable: an iterable to iterate over. If not provided the length\n is required.\n :param length: the number of items to iterate over. By default the\n progressbar will attempt to ask the iterator about its\n length, which might or might not work. If an iterable is\n also provided this parameter can be used to override the\n length. If an iterable is not provided the progress bar\n will iterate over a range of that length.\n :param label: the label to show next to the progress bar.\n :param show_eta: enables or disables the estimated time display. This is\n automatically disabled if the length cannot be\n determined.\n :param show_percent: enables or disables the percentage display. The\n default is `True` if the iterable has a length or\n `False` if not.\n :param show_pos: enables or disables the absolute position display. The\n default is `False`.\n :param item_show_func: a function called with the current item which\n can return a string to show the current item\n next to the progress bar. Note that the current\n item can be `None`!\n :param fill_char: the character to use to show the filled part of the\n progress bar.\n :param empty_char: the character to use to show the non-filled part of\n the progress bar.\n :param bar_template: the format string to use as template for the bar.\n The parameters in it are ``label`` for the label,\n ``bar`` for the progress bar and ``info`` for the\n info section.\n :param info_sep: the separator between multiple info items (eta etc.)\n :param width: the width of the progress bar in characters, 0 means full\n terminal width\n :param file: the file to write to. If this is not a terminal then\n only the label is printed.\n :param color: controls if the terminal supports ANSI colors or not. The\n default is autodetection. This is only needed if ANSI\n codes are included anywhere in the progress bar output\n which is not the case by default.\n \"\"\"\n from ._termui_impl import ProgressBar\n\n color = resolve_color_default(color)\n return ProgressBar(\n iterable=iterable,\n length=length,\n show_eta=show_eta,\n show_percent=show_percent,\n show_pos=show_pos,\n item_show_func=item_show_func,\n fill_char=fill_char,\n empty_char=empty_char,\n bar_template=bar_template,\n info_sep=info_sep,\n file=file,\n label=label,\n width=width,\n color=color,\n )\n\n\ndef clear():\n \"\"\"Clears the terminal screen. This will have the effect of clearing\n the whole visible space of the terminal and moving the cursor to the\n top left. This does not do anything if not connected to a terminal.\n\n .. versionadded:: 2.0\n \"\"\"\n if not isatty(sys.stdout):\n return\n # If we're on Windows and we don't have colorama available, then we\n # clear the screen by shelling out. Otherwise we can use an escape\n # sequence.\n if WIN:\n os.system(\"cls\")\n else:\n sys.stdout.write(\"\\033[2J\\033[1;1H\")\n\n\ndef style(\n text,\n fg=None,\n bg=None,\n bold=None,\n dim=None,\n underline=None,\n blink=None,\n reverse=None,\n reset=True,\n):\n \"\"\"Styles a text with ANSI styles and returns the new string. By\n default the styling is self contained which means that at the end\n of the string a reset code is issued. This can be prevented by\n passing ``reset=False``.\n\n Examples::\n\n click.echo(click.style('Hello World!', fg='green'))\n click.echo(click.style('ATTENTION!', blink=True))\n click.echo(click.style('Some things', reverse=True, fg='cyan'))\n\n Supported color names:\n\n * ``black`` (might be a gray)\n * ``red``\n * ``green``\n * ``yellow`` (might be an orange)\n * ``blue``\n * ``magenta``\n * ``cyan``\n * ``white`` (might be light gray)\n * ``bright_black``\n * ``bright_red``\n * ``bright_green``\n * ``bright_yellow``\n * ``bright_blue``\n * ``bright_magenta``\n * ``bright_cyan``\n * ``bright_white``\n * ``reset`` (reset the color code only)\n\n .. versionadded:: 2.0\n\n .. versionadded:: 7.0\n Added support for bright colors.\n\n :param text: the string to style with ansi codes.\n :param fg: if provided this will become the foreground color.\n :param bg: if provided this will become the background color.\n :param bold: if provided this will enable or disable bold mode.\n :param dim: if provided this will enable or disable dim mode. This is\n badly supported.\n :param underline: if provided this will enable or disable underline.\n :param blink: if provided this will enable or disable blinking.\n :param reverse: if provided this will enable or disable inverse\n rendering (foreground becomes background and the\n other way round).\n :param reset: by default a reset-all code is added at the end of the\n string which means that styles do not carry over. This\n can be disabled to compose styles.\n \"\"\"\n bits = []\n if fg:\n try:\n bits.append(f\"\\033[{_ansi_colors[fg]}m\")\n except KeyError:\n raise TypeError(f\"Unknown color {fg!r}\")\n if bg:\n try:\n bits.append(f\"\\033[{_ansi_colors[bg] + 10}m\")\n except KeyError:\n raise TypeError(f\"Unknown color {bg!r}\")\n if bold is not None:\n bits.append(f\"\\033[{1 if bold else 22}m\")\n if dim is not None:\n bits.append(f\"\\033[{2 if dim else 22}m\")\n if underline is not None:\n bits.append(f\"\\033[{4 if underline else 24}m\")\n if blink is not None:\n bits.append(f\"\\033[{5 if blink else 25}m\")\n if reverse is not None:\n bits.append(f\"\\033[{7 if reverse else 27}m\")\n bits.append(text)\n if reset:\n bits.append(_ansi_reset_all)\n return \"\".join(bits)\n\n\ndef unstyle(text):\n \"\"\"Removes ANSI styling information from a string. Usually it's not\n necessary to use this function as Click's echo function will\n automatically remove styling if necessary.\n\n .. versionadded:: 2.0\n\n :param text: the text to remove style information from.\n \"\"\"\n return strip_ansi(text)\n\n\ndef secho(message=None, file=None, nl=True, err=False, color=None, **styles):\n \"\"\"This function combines :func:`echo` and :func:`style` into one\n call. As such the following two calls are the same::\n\n click.secho('Hello World!', fg='green')\n click.echo(click.style('Hello World!', fg='green'))\n\n All keyword arguments are forwarded to the underlying functions\n depending on which one they go with.\n\n .. versionadded:: 2.0\n \"\"\"\n if message is not None:\n message = style(message, **styles)\n return echo(message, file=file, nl=nl, err=err, color=color)\n\n\ndef edit(\n text=None, editor=None, env=None, require_save=True, extension=\".txt\", filename=None\n):\n r\"\"\"Edits the given text in the defined editor. If an editor is given\n (should be the full path to the executable but the regular operating\n system search path is used for finding the executable) it overrides\n the detected editor. Optionally, some environment variables can be\n used. If the editor is closed without changes, `None` is returned. In\n case a file is edited directly the return value is always `None` and\n `require_save` and `extension` are ignored.\n\n If the editor cannot be opened a :exc:`UsageError` is raised.\n\n Note for Windows: to simplify cross-platform usage, the newlines are\n automatically converted from POSIX to Windows and vice versa. As such,\n the message here will have ``\\n`` as newline markers.\n\n :param text: the text to edit.\n :param editor: optionally the editor to use. Defaults to automatic\n detection.\n :param env: environment variables to forward to the editor.\n :param require_save: if this is true, then not saving in the editor\n will make the return value become `None`.\n :param extension: the extension to tell the editor about. This defaults\n to `.txt` but changing this might change syntax\n highlighting.\n :param filename: if provided it will edit this file instead of the\n provided text contents. It will not use a temporary\n file as an indirection in that case.\n \"\"\"\n from ._termui_impl import Editor\n\n editor = Editor(\n editor=editor, env=env, require_save=require_save, extension=extension\n )\n if filename is None:\n return editor.edit(text)\n editor.edit_file(filename)\n\n\ndef launch(url, wait=False, locate=False):\n \"\"\"This function launches the given URL (or filename) in the default\n viewer application for this file type. If this is an executable, it\n might launch the executable in a new session. The return value is\n the exit code of the launched application. Usually, ``0`` indicates\n success.\n\n Examples::\n\n click.launch('https://click.palletsprojects.com/')\n click.launch('/my/downloaded/file', locate=True)\n\n .. versionadded:: 2.0\n\n :param url: URL or filename of the thing to launch.\n :param wait: waits for the program to stop.\n :param locate: if this is set to `True` then instead of launching the\n application associated with the URL it will attempt to\n launch a file manager with the file located. This\n might have weird effects if the URL does not point to\n the filesystem.\n \"\"\"\n from ._termui_impl import open_url\n\n return open_url(url, wait=wait, locate=locate)\n\n\n# If this is provided, getchar() calls into this instead. This is used\n# for unittesting purposes.\n_getchar = None\n\n\ndef getchar(echo=False):\n \"\"\"Fetches a single character from the terminal and returns it. This\n will always return a unicode character and under certain rare\n circumstances this might return more than one character. The\n situations which more than one character is returned is when for\n whatever reason multiple characters end up in the terminal buffer or\n standard input was not actually a terminal.\n\n Note that this will always read from the terminal, even if something\n is piped into the standard input.\n\n Note for Windows: in rare cases when typing non-ASCII characters, this\n function might wait for a second character and then return both at once.\n This is because certain Unicode characters look like special-key markers.\n\n .. versionadded:: 2.0\n\n :param echo: if set to `True`, the character read will also show up on\n the terminal. The default is to not show it.\n \"\"\"\n f = _getchar\n if f is None:\n from ._termui_impl import getchar as f\n return f(echo)\n\n\ndef raw_terminal():\n from ._termui_impl import raw_terminal as f\n\n return f()\n\n\ndef pause(info=\"Press any key to continue ...\", err=False):\n \"\"\"This command stops execution and waits for the user to press any\n key to continue. This is similar to the Windows batch \"pause\"\n command. If the program is not run through a terminal, this command\n will instead do nothing.\n\n .. versionadded:: 2.0\n\n .. versionadded:: 4.0\n Added the `err` parameter.\n\n :param info: the info string to print before pausing.\n :param err: if set to message goes to ``stderr`` instead of\n ``stdout``, the same as with echo.\n \"\"\"\n if not isatty(sys.stdin) or not isatty(sys.stdout):\n return\n try:\n if info:\n echo(info, nl=False, err=err)\n try:\n getchar()\n except (KeyboardInterrupt, EOFError):\n pass\n finally:\n if info:\n echo(err=err)\n", "path": "src/click/termui.py"}], "after_files": [{"content": "import inspect\nimport io\nimport itertools\nimport os\nimport struct\nimport sys\n\nfrom ._compat import DEFAULT_COLUMNS\nfrom ._compat import get_winterm_size\nfrom ._compat import isatty\nfrom ._compat import strip_ansi\nfrom ._compat import WIN\nfrom .exceptions import Abort\nfrom .exceptions import UsageError\nfrom .globals import resolve_color_default\nfrom .types import Choice\nfrom .types import convert_type\nfrom .types import Path\nfrom .utils import echo\nfrom .utils import LazyFile\n\n# The prompt functions to use. The doc tools currently override these\n# functions to customize how they work.\nvisible_prompt_func = input\n\n_ansi_colors = {\n \"black\": 30,\n \"red\": 31,\n \"green\": 32,\n \"yellow\": 33,\n \"blue\": 34,\n \"magenta\": 35,\n \"cyan\": 36,\n \"white\": 37,\n \"reset\": 39,\n \"bright_black\": 90,\n \"bright_red\": 91,\n \"bright_green\": 92,\n \"bright_yellow\": 93,\n \"bright_blue\": 94,\n \"bright_magenta\": 95,\n \"bright_cyan\": 96,\n \"bright_white\": 97,\n}\n_ansi_reset_all = \"\\033[0m\"\n\n\ndef hidden_prompt_func(prompt):\n import getpass\n\n return getpass.getpass(prompt)\n\n\ndef _build_prompt(\n text, suffix, show_default=False, default=None, show_choices=True, type=None\n):\n prompt = text\n if type is not None and show_choices and isinstance(type, Choice):\n prompt += f\" ({', '.join(map(str, type.choices))})\"\n if default is not None and show_default:\n prompt = f\"{prompt} [{_format_default(default)}]\"\n return f\"{prompt}{suffix}\"\n\n\ndef _format_default(default):\n if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, \"name\"):\n return default.name\n\n return default\n\n\ndef prompt(\n text,\n default=None,\n hide_input=False,\n confirmation_prompt=False,\n type=None,\n value_proc=None,\n prompt_suffix=\": \",\n show_default=True,\n err=False,\n show_choices=True,\n):\n \"\"\"Prompts a user for input. This is a convenience function that can\n be used to prompt a user for input later.\n\n If the user aborts the input by sending a interrupt signal, this\n function will catch it and raise a :exc:`Abort` exception.\n\n .. versionadded:: 7.0\n Added the show_choices parameter.\n\n .. versionadded:: 6.0\n Added unicode support for cmd.exe on Windows.\n\n .. versionadded:: 4.0\n Added the `err` parameter.\n\n :param text: the text to show for the prompt.\n :param default: the default value to use if no input happens. If this\n is not given it will prompt until it's aborted.\n :param hide_input: if this is set to true then the input value will\n be hidden.\n :param confirmation_prompt: asks for confirmation for the value.\n :param type: the type to use to check the value against.\n :param value_proc: if this parameter is provided it's a function that\n is invoked instead of the type conversion to\n convert a value.\n :param prompt_suffix: a suffix that should be added to the prompt.\n :param show_default: shows or hides the default value in the prompt.\n :param err: if set to true the file defaults to ``stderr`` instead of\n ``stdout``, the same as with echo.\n :param show_choices: Show or hide choices if the passed type is a Choice.\n For example if type is a Choice of either day or week,\n show_choices is true and text is \"Group by\" then the\n prompt will be \"Group by (day, week): \".\n \"\"\"\n result = None\n\n def prompt_func(text):\n f = hidden_prompt_func if hide_input else visible_prompt_func\n try:\n # Write the prompt separately so that we get nice\n # coloring through colorama on Windows\n echo(text, nl=False, err=err)\n return f(\"\")\n except (KeyboardInterrupt, EOFError):\n # getpass doesn't print a newline if the user aborts input with ^C.\n # Allegedly this behavior is inherited from getpass(3).\n # A doc bug has been filed at https://bugs.python.org/issue24711\n if hide_input:\n echo(None, err=err)\n raise Abort()\n\n if value_proc is None:\n value_proc = convert_type(type, default)\n\n prompt = _build_prompt(\n text, prompt_suffix, show_default, default, show_choices, type\n )\n\n while 1:\n while 1:\n value = prompt_func(prompt)\n if value:\n break\n elif default is not None:\n if isinstance(value_proc, Path):\n # validate Path default value(exists, dir_okay etc.)\n value = default\n break\n return default\n try:\n result = value_proc(value)\n except UsageError as e:\n if hide_input:\n echo(\"Error: the value you entered was invalid\", err=err)\n else:\n echo(f\"Error: {e.message}\", err=err) # noqa: B306\n continue\n if not confirmation_prompt:\n return result\n while 1:\n value2 = prompt_func(\"Repeat for confirmation: \")\n if value2:\n break\n if value == value2:\n return result\n echo(\"Error: the two entered values do not match\", err=err)\n\n\ndef confirm(\n text, default=False, abort=False, prompt_suffix=\": \", show_default=True, err=False\n):\n \"\"\"Prompts for confirmation (yes/no question).\n\n If the user aborts the input by sending a interrupt signal this\n function will catch it and raise a :exc:`Abort` exception.\n\n .. versionadded:: 4.0\n Added the `err` parameter.\n\n :param text: the question to ask.\n :param default: the default for the prompt.\n :param abort: if this is set to `True` a negative answer aborts the\n exception by raising :exc:`Abort`.\n :param prompt_suffix: a suffix that should be added to the prompt.\n :param show_default: shows or hides the default value in the prompt.\n :param err: if set to true the file defaults to ``stderr`` instead of\n ``stdout``, the same as with echo.\n \"\"\"\n prompt = _build_prompt(\n text, prompt_suffix, show_default, \"Y/n\" if default else \"y/N\"\n )\n while 1:\n try:\n # Write the prompt separately so that we get nice\n # coloring through colorama on Windows\n echo(prompt, nl=False, err=err)\n value = visible_prompt_func(\"\").lower().strip()\n except (KeyboardInterrupt, EOFError):\n raise Abort()\n if value in (\"y\", \"yes\"):\n rv = True\n elif value in (\"n\", \"no\"):\n rv = False\n elif value == \"\":\n rv = default\n else:\n echo(\"Error: invalid input\", err=err)\n continue\n break\n if abort and not rv:\n raise Abort()\n return rv\n\n\ndef get_terminal_size():\n \"\"\"Returns the current size of the terminal as tuple in the form\n ``(width, height)`` in columns and rows.\n \"\"\"\n import shutil\n\n if hasattr(shutil, \"get_terminal_size\"):\n return shutil.get_terminal_size()\n\n # We provide a sensible default for get_winterm_size() when being invoked\n # inside a subprocess. Without this, it would not provide a useful input.\n if get_winterm_size is not None:\n size = get_winterm_size()\n if size == (0, 0):\n return (79, 24)\n else:\n return size\n\n def ioctl_gwinsz(fd):\n try:\n import fcntl\n import termios\n\n cr = struct.unpack(\"hh\", fcntl.ioctl(fd, termios.TIOCGWINSZ, \"1234\"))\n except Exception:\n return\n return cr\n\n cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)\n if not cr:\n try:\n fd = os.open(os.ctermid(), os.O_RDONLY)\n try:\n cr = ioctl_gwinsz(fd)\n finally:\n os.close(fd)\n except Exception:\n pass\n if not cr or not cr[0] or not cr[1]:\n cr = (os.environ.get(\"LINES\", 25), os.environ.get(\"COLUMNS\", DEFAULT_COLUMNS))\n return int(cr[1]), int(cr[0])\n\n\ndef echo_via_pager(text_or_generator, color=None):\n \"\"\"This function takes a text and shows it via an environment specific\n pager on stdout.\n\n .. versionchanged:: 3.0\n Added the `color` flag.\n\n :param text_or_generator: the text to page, or alternatively, a\n generator emitting the text to page.\n :param color: controls if the pager supports ANSI colors or not. The\n default is autodetection.\n \"\"\"\n color = resolve_color_default(color)\n\n if inspect.isgeneratorfunction(text_or_generator):\n i = text_or_generator()\n elif isinstance(text_or_generator, str):\n i = [text_or_generator]\n else:\n i = iter(text_or_generator)\n\n # convert every element of i to a text type if necessary\n text_generator = (el if isinstance(el, str) else str(el) for el in i)\n\n from ._termui_impl import pager\n\n return pager(itertools.chain(text_generator, \"\\n\"), color)\n\n\ndef progressbar(\n iterable=None,\n length=None,\n label=None,\n show_eta=True,\n show_percent=None,\n show_pos=False,\n item_show_func=None,\n fill_char=\"#\",\n empty_char=\"-\",\n bar_template=\"%(label)s [%(bar)s] %(info)s\",\n info_sep=\" \",\n width=36,\n file=None,\n color=None,\n):\n \"\"\"This function creates an iterable context manager that can be used\n to iterate over something while showing a progress bar. It will\n either iterate over the `iterable` or `length` items (that are counted\n up). While iteration happens, this function will print a rendered\n progress bar to the given `file` (defaults to stdout) and will attempt\n to calculate remaining time and more. By default, this progress bar\n will not be rendered if the file is not a terminal.\n\n The context manager creates the progress bar. When the context\n manager is entered the progress bar is already created. With every\n iteration over the progress bar, the iterable passed to the bar is\n advanced and the bar is updated. When the context manager exits,\n a newline is printed and the progress bar is finalized on screen.\n\n Note: The progress bar is currently designed for use cases where the\n total progress can be expected to take at least several seconds.\n Because of this, the ProgressBar class object won't display\n progress that is considered too fast, and progress where the time\n between steps is less than a second.\n\n No printing must happen or the progress bar will be unintentionally\n destroyed.\n\n Example usage::\n\n with progressbar(items) as bar:\n for item in bar:\n do_something_with(item)\n\n Alternatively, if no iterable is specified, one can manually update the\n progress bar through the `update()` method instead of directly\n iterating over the progress bar. The update method accepts the number\n of steps to increment the bar with::\n\n with progressbar(length=chunks.total_bytes) as bar:\n for chunk in chunks:\n process_chunk(chunk)\n bar.update(chunks.bytes)\n\n The ``update()`` method also takes an optional value specifying the\n ``current_item`` at the new position. This is useful when used\n together with ``item_show_func`` to customize the output for each\n manual step::\n\n with click.progressbar(\n length=total_size,\n label='Unzipping archive',\n item_show_func=lambda a: a.filename\n ) as bar:\n for archive in zip_file:\n archive.extract()\n bar.update(archive.size, archive)\n\n .. versionadded:: 2.0\n\n .. versionadded:: 4.0\n Added the `color` parameter. Added a `update` method to the\n progressbar object.\n\n :param iterable: an iterable to iterate over. If not provided the length\n is required.\n :param length: the number of items to iterate over. By default the\n progressbar will attempt to ask the iterator about its\n length, which might or might not work. If an iterable is\n also provided this parameter can be used to override the\n length. If an iterable is not provided the progress bar\n will iterate over a range of that length.\n :param label: the label to show next to the progress bar.\n :param show_eta: enables or disables the estimated time display. This is\n automatically disabled if the length cannot be\n determined.\n :param show_percent: enables or disables the percentage display. The\n default is `True` if the iterable has a length or\n `False` if not.\n :param show_pos: enables or disables the absolute position display. The\n default is `False`.\n :param item_show_func: a function called with the current item which\n can return a string to show the current item\n next to the progress bar. Note that the current\n item can be `None`!\n :param fill_char: the character to use to show the filled part of the\n progress bar.\n :param empty_char: the character to use to show the non-filled part of\n the progress bar.\n :param bar_template: the format string to use as template for the bar.\n The parameters in it are ``label`` for the label,\n ``bar`` for the progress bar and ``info`` for the\n info section.\n :param info_sep: the separator between multiple info items (eta etc.)\n :param width: the width of the progress bar in characters, 0 means full\n terminal width\n :param file: the file to write to. If this is not a terminal then\n only the label is printed.\n :param color: controls if the terminal supports ANSI colors or not. The\n default is autodetection. This is only needed if ANSI\n codes are included anywhere in the progress bar output\n which is not the case by default.\n \"\"\"\n from ._termui_impl import ProgressBar\n\n color = resolve_color_default(color)\n return ProgressBar(\n iterable=iterable,\n length=length,\n show_eta=show_eta,\n show_percent=show_percent,\n show_pos=show_pos,\n item_show_func=item_show_func,\n fill_char=fill_char,\n empty_char=empty_char,\n bar_template=bar_template,\n info_sep=info_sep,\n file=file,\n label=label,\n width=width,\n color=color,\n )\n\n\ndef clear():\n \"\"\"Clears the terminal screen. This will have the effect of clearing\n the whole visible space of the terminal and moving the cursor to the\n top left. This does not do anything if not connected to a terminal.\n\n .. versionadded:: 2.0\n \"\"\"\n if not isatty(sys.stdout):\n return\n # If we're on Windows and we don't have colorama available, then we\n # clear the screen by shelling out. Otherwise we can use an escape\n # sequence.\n if WIN:\n os.system(\"cls\")\n else:\n sys.stdout.write(\"\\033[2J\\033[1;1H\")\n\n\ndef style(\n text,\n fg=None,\n bg=None,\n bold=None,\n dim=None,\n underline=None,\n blink=None,\n reverse=None,\n reset=True,\n):\n \"\"\"Styles a text with ANSI styles and returns the new string. By\n default the styling is self contained which means that at the end\n of the string a reset code is issued. This can be prevented by\n passing ``reset=False``.\n\n Examples::\n\n click.echo(click.style('Hello World!', fg='green'))\n click.echo(click.style('ATTENTION!', blink=True))\n click.echo(click.style('Some things', reverse=True, fg='cyan'))\n\n Supported color names:\n\n * ``black`` (might be a gray)\n * ``red``\n * ``green``\n * ``yellow`` (might be an orange)\n * ``blue``\n * ``magenta``\n * ``cyan``\n * ``white`` (might be light gray)\n * ``bright_black``\n * ``bright_red``\n * ``bright_green``\n * ``bright_yellow``\n * ``bright_blue``\n * ``bright_magenta``\n * ``bright_cyan``\n * ``bright_white``\n * ``reset`` (reset the color code only)\n\n .. versionadded:: 2.0\n\n .. versionadded:: 7.0\n Added support for bright colors.\n\n :param text: the string to style with ansi codes.\n :param fg: if provided this will become the foreground color.\n :param bg: if provided this will become the background color.\n :param bold: if provided this will enable or disable bold mode.\n :param dim: if provided this will enable or disable dim mode. This is\n badly supported.\n :param underline: if provided this will enable or disable underline.\n :param blink: if provided this will enable or disable blinking.\n :param reverse: if provided this will enable or disable inverse\n rendering (foreground becomes background and the\n other way round).\n :param reset: by default a reset-all code is added at the end of the\n string which means that styles do not carry over. This\n can be disabled to compose styles.\n \"\"\"\n bits = []\n if fg:\n try:\n bits.append(f\"\\033[{_ansi_colors[fg]}m\")\n except KeyError:\n raise TypeError(f\"Unknown color {fg!r}\")\n if bg:\n try:\n bits.append(f\"\\033[{_ansi_colors[bg] + 10}m\")\n except KeyError:\n raise TypeError(f\"Unknown color {bg!r}\")\n if bold is not None:\n bits.append(f\"\\033[{1 if bold else 22}m\")\n if dim is not None:\n bits.append(f\"\\033[{2 if dim else 22}m\")\n if underline is not None:\n bits.append(f\"\\033[{4 if underline else 24}m\")\n if blink is not None:\n bits.append(f\"\\033[{5 if blink else 25}m\")\n if reverse is not None:\n bits.append(f\"\\033[{7 if reverse else 27}m\")\n bits.append(text)\n if reset:\n bits.append(_ansi_reset_all)\n return \"\".join(bits)\n\n\ndef unstyle(text):\n \"\"\"Removes ANSI styling information from a string. Usually it's not\n necessary to use this function as Click's echo function will\n automatically remove styling if necessary.\n\n .. versionadded:: 2.0\n\n :param text: the text to remove style information from.\n \"\"\"\n return strip_ansi(text)\n\n\ndef secho(message=None, file=None, nl=True, err=False, color=None, **styles):\n \"\"\"This function combines :func:`echo` and :func:`style` into one\n call. As such the following two calls are the same::\n\n click.secho('Hello World!', fg='green')\n click.echo(click.style('Hello World!', fg='green'))\n\n All keyword arguments are forwarded to the underlying functions\n depending on which one they go with.\n\n .. versionadded:: 2.0\n \"\"\"\n if message is not None:\n message = style(message, **styles)\n return echo(message, file=file, nl=nl, err=err, color=color)\n\n\ndef edit(\n text=None, editor=None, env=None, require_save=True, extension=\".txt\", filename=None\n):\n r\"\"\"Edits the given text in the defined editor. If an editor is given\n (should be the full path to the executable but the regular operating\n system search path is used for finding the executable) it overrides\n the detected editor. Optionally, some environment variables can be\n used. If the editor is closed without changes, `None` is returned. In\n case a file is edited directly the return value is always `None` and\n `require_save` and `extension` are ignored.\n\n If the editor cannot be opened a :exc:`UsageError` is raised.\n\n Note for Windows: to simplify cross-platform usage, the newlines are\n automatically converted from POSIX to Windows and vice versa. As such,\n the message here will have ``\\n`` as newline markers.\n\n :param text: the text to edit.\n :param editor: optionally the editor to use. Defaults to automatic\n detection.\n :param env: environment variables to forward to the editor.\n :param require_save: if this is true, then not saving in the editor\n will make the return value become `None`.\n :param extension: the extension to tell the editor about. This defaults\n to `.txt` but changing this might change syntax\n highlighting.\n :param filename: if provided it will edit this file instead of the\n provided text contents. It will not use a temporary\n file as an indirection in that case.\n \"\"\"\n from ._termui_impl import Editor\n\n editor = Editor(\n editor=editor, env=env, require_save=require_save, extension=extension\n )\n if filename is None:\n return editor.edit(text)\n editor.edit_file(filename)\n\n\ndef launch(url, wait=False, locate=False):\n \"\"\"This function launches the given URL (or filename) in the default\n viewer application for this file type. If this is an executable, it\n might launch the executable in a new session. The return value is\n the exit code of the launched application. Usually, ``0`` indicates\n success.\n\n Examples::\n\n click.launch('https://click.palletsprojects.com/')\n click.launch('/my/downloaded/file', locate=True)\n\n .. versionadded:: 2.0\n\n :param url: URL or filename of the thing to launch.\n :param wait: waits for the program to stop.\n :param locate: if this is set to `True` then instead of launching the\n application associated with the URL it will attempt to\n launch a file manager with the file located. This\n might have weird effects if the URL does not point to\n the filesystem.\n \"\"\"\n from ._termui_impl import open_url\n\n return open_url(url, wait=wait, locate=locate)\n\n\n# If this is provided, getchar() calls into this instead. This is used\n# for unittesting purposes.\n_getchar = None\n\n\ndef getchar(echo=False):\n \"\"\"Fetches a single character from the terminal and returns it. This\n will always return a unicode character and under certain rare\n circumstances this might return more than one character. The\n situations which more than one character is returned is when for\n whatever reason multiple characters end up in the terminal buffer or\n standard input was not actually a terminal.\n\n Note that this will always read from the terminal, even if something\n is piped into the standard input.\n\n Note for Windows: in rare cases when typing non-ASCII characters, this\n function might wait for a second character and then return both at once.\n This is because certain Unicode characters look like special-key markers.\n\n .. versionadded:: 2.0\n\n :param echo: if set to `True`, the character read will also show up on\n the terminal. The default is to not show it.\n \"\"\"\n f = _getchar\n if f is None:\n from ._termui_impl import getchar as f\n return f(echo)\n\n\ndef raw_terminal():\n from ._termui_impl import raw_terminal as f\n\n return f()\n\n\ndef pause(info=\"Press any key to continue ...\", err=False):\n \"\"\"This command stops execution and waits for the user to press any\n key to continue. This is similar to the Windows batch \"pause\"\n command. If the program is not run through a terminal, this command\n will instead do nothing.\n\n .. versionadded:: 2.0\n\n .. versionadded:: 4.0\n Added the `err` parameter.\n\n :param info: the info string to print before pausing.\n :param err: if set to message goes to ``stderr`` instead of\n ``stdout``, the same as with echo.\n \"\"\"\n if not isatty(sys.stdin) or not isatty(sys.stdout):\n return\n try:\n if info:\n echo(info, nl=False, err=err)\n try:\n getchar()\n except (KeyboardInterrupt, EOFError):\n pass\n finally:\n if info:\n echo(err=err)\n", "path": "src/click/termui.py"}]} |
gh_patches_debug_1635 | rasdani/github-patches | git_diff | chainer__chainer-271 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
FunctionSet.copy_parameters_from()
Hi all!
The code in 'FunctionSet.copy_parameters_from()' does not work, when 'src' and 'dst' are both numpy.ndarrays?
``` python
if isinstance(dst, numpy.ndarray):
if isinstance(src, numpy.ndarray):
dst.copy(src) # this gives a ValueError
```
I think this should read
``` python
if isinstance(dst, numpy.ndarray):
if isinstance(src, numpy.ndarray):
numpy.copyto(dst, src)
```
My numpy.version.full_version is 1.9.2, the 'copyto' method exists since 1.7.0.
Cheers,
-r
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `chainer/function_set.py`
Content:
```
1 import numpy
2 import six
3
4 from chainer import cuda
5
6
7 class FunctionSet(object):
8
9 """Set of objects with ``parameters`` and ``gradients`` properties.
10
11 :class:`FunctionSet` is useful to collect parameters and gradients of
12 multiple parameterized :class:`Function` objects. :class:`FunctionSet`
13 itself also implements :attr:`~FunctionSet.parameters` and
14 :attr:`~FunctionSet.gradients`, so it can be nested in another
15 :class:`FunctionSet` object.
16
17 Function registration is done by just adding an attribute to
18 :class:`FunctionSet` object.
19
20 """
21
22 def __init__(self, **functions):
23 """Initializes the function set by given functions.
24
25 Args:
26 **functions: ``dict`` of ``str`` key and :class:`Function` values.
27 The key-value pairs are just set to the :class:`FunctionSet`
28 object as attributes.
29
30 """
31 for name, func in six.iteritems(functions):
32 setattr(self, name, func)
33
34 def collect_parameters(self):
35 """Returns a tuple of parameters and gradients.
36
37 Returns:
38 Tuple (pair) of two tuples. The first element is a tuple of
39 parameter arrays, and the second is a tuple of gradient arrays.
40
41 """
42 return self.parameters, self.gradients
43
44 def to_gpu(self, device=None):
45 """Migrates all parameters and gradients onto GPU.
46
47 This method calls ``to_gpu`` method of each registered object.
48
49 Args:
50 device (int or :class:`pycuda.driver.Device` or ``None``): Device
51 ID of GPU. If ``None`` is given, it uses the current device.
52
53 Returns:
54 self
55
56 """
57 for func in six.itervalues(self.__dict__):
58 func.to_gpu(device=device)
59 return self
60
61 def to_cpu(self):
62 """Migrates all parameters and gradients onto CPU.
63
64 This method calls ``to_cpu`` method of each registered object.
65
66 Returns:
67 self
68
69 """
70 for func in six.itervalues(self.__dict__):
71 func.to_cpu()
72 return self
73
74 def copy_parameters_from(self, params):
75 """Copies parameters from another source without reallocation.
76
77 Args:
78 params (Iterable): Iterable of parameter arrays.
79
80 """
81 for dst, src in zip(self.parameters, params):
82 if isinstance(dst, numpy.ndarray):
83 if isinstance(src, numpy.ndarray):
84 dst.copy(src)
85 else:
86 src.get(dst)
87 elif isinstance(src, numpy.ndarray):
88 dst.set(src)
89 else:
90 cuda.copy(src, out=dst)
91
92 @property
93 def parameters(self):
94 """Tuple of parameter arrays of all registered functions.
95
96 The order of parameters is consistent with :meth:`gradients` property.
97
98 """
99 return sum((func.parameters for _, func in self._get_sorted_funcs()),
100 ())
101
102 @parameters.setter
103 def parameters(self, params):
104 param_iter = iter(params)
105 for _, func in self._get_sorted_funcs():
106 func.parameters = param_iter
107
108 @property
109 def gradients(self):
110 """Tuple of gradient arrays of all registered functions.
111
112 The order of gradients is consistent with :meth:`parameters` property.
113
114 """
115 return sum((func.gradients for _, func in self._get_sorted_funcs()),
116 ())
117
118 @gradients.setter
119 def gradients(self, grads):
120 grad_iter = iter(grads)
121 for _, func in self._get_sorted_funcs():
122 func.gradients = grad_iter
123
124 def _get_sorted_funcs(self):
125 return sorted(six.iteritems(self.__dict__))
126
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/chainer/function_set.py b/chainer/function_set.py
--- a/chainer/function_set.py
+++ b/chainer/function_set.py
@@ -81,7 +81,7 @@
for dst, src in zip(self.parameters, params):
if isinstance(dst, numpy.ndarray):
if isinstance(src, numpy.ndarray):
- dst.copy(src)
+ numpy.copyto(dst, src)
else:
src.get(dst)
elif isinstance(src, numpy.ndarray):
| {"golden_diff": "diff --git a/chainer/function_set.py b/chainer/function_set.py\n--- a/chainer/function_set.py\n+++ b/chainer/function_set.py\n@@ -81,7 +81,7 @@\n for dst, src in zip(self.parameters, params):\n if isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n- dst.copy(src)\n+ numpy.copyto(dst, src)\n else:\n src.get(dst)\n elif isinstance(src, numpy.ndarray):\n", "issue": "FunctionSet.copy_parameters_from()\nHi all!\n\nThe code in 'FunctionSet.copy_parameters_from()' does not work, when 'src' and 'dst' are both numpy.ndarrays?\n\n``` python\nif isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n dst.copy(src) # this gives a ValueError\n```\n\nI think this should read\n\n``` python\nif isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n numpy.copyto(dst, src)\n```\n\nMy numpy.version.full_version is 1.9.2, the 'copyto' method exists since 1.7.0.\n\nCheers,\n-r\n\n", "before_files": [{"content": "import numpy\nimport six\n\nfrom chainer import cuda\n\n\nclass FunctionSet(object):\n\n \"\"\"Set of objects with ``parameters`` and ``gradients`` properties.\n\n :class:`FunctionSet` is useful to collect parameters and gradients of\n multiple parameterized :class:`Function` objects. :class:`FunctionSet`\n itself also implements :attr:`~FunctionSet.parameters` and\n :attr:`~FunctionSet.gradients`, so it can be nested in another\n :class:`FunctionSet` object.\n\n Function registration is done by just adding an attribute to\n :class:`FunctionSet` object.\n\n \"\"\"\n\n def __init__(self, **functions):\n \"\"\"Initializes the function set by given functions.\n\n Args:\n **functions: ``dict`` of ``str`` key and :class:`Function` values.\n The key-value pairs are just set to the :class:`FunctionSet`\n object as attributes.\n\n \"\"\"\n for name, func in six.iteritems(functions):\n setattr(self, name, func)\n\n def collect_parameters(self):\n \"\"\"Returns a tuple of parameters and gradients.\n\n Returns:\n Tuple (pair) of two tuples. The first element is a tuple of\n parameter arrays, and the second is a tuple of gradient arrays.\n\n \"\"\"\n return self.parameters, self.gradients\n\n def to_gpu(self, device=None):\n \"\"\"Migrates all parameters and gradients onto GPU.\n\n This method calls ``to_gpu`` method of each registered object.\n\n Args:\n device (int or :class:`pycuda.driver.Device` or ``None``): Device\n ID of GPU. If ``None`` is given, it uses the current device.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n func.to_gpu(device=device)\n return self\n\n def to_cpu(self):\n \"\"\"Migrates all parameters and gradients onto CPU.\n\n This method calls ``to_cpu`` method of each registered object.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n func.to_cpu()\n return self\n\n def copy_parameters_from(self, params):\n \"\"\"Copies parameters from another source without reallocation.\n\n Args:\n params (Iterable): Iterable of parameter arrays.\n\n \"\"\"\n for dst, src in zip(self.parameters, params):\n if isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n dst.copy(src)\n else:\n src.get(dst)\n elif isinstance(src, numpy.ndarray):\n dst.set(src)\n else:\n cuda.copy(src, out=dst)\n\n @property\n def parameters(self):\n \"\"\"Tuple of parameter arrays of all registered functions.\n\n The order of parameters is consistent with :meth:`gradients` property.\n\n \"\"\"\n return sum((func.parameters for _, func in self._get_sorted_funcs()),\n ())\n\n @parameters.setter\n def parameters(self, params):\n param_iter = iter(params)\n for _, func in self._get_sorted_funcs():\n func.parameters = param_iter\n\n @property\n def gradients(self):\n \"\"\"Tuple of gradient arrays of all registered functions.\n\n The order of gradients is consistent with :meth:`parameters` property.\n\n \"\"\"\n return sum((func.gradients for _, func in self._get_sorted_funcs()),\n ())\n\n @gradients.setter\n def gradients(self, grads):\n grad_iter = iter(grads)\n for _, func in self._get_sorted_funcs():\n func.gradients = grad_iter\n\n def _get_sorted_funcs(self):\n return sorted(six.iteritems(self.__dict__))\n", "path": "chainer/function_set.py"}], "after_files": [{"content": "import numpy\nimport six\n\nfrom chainer import cuda\n\n\nclass FunctionSet(object):\n\n \"\"\"Set of objects with ``parameters`` and ``gradients`` properties.\n\n :class:`FunctionSet` is useful to collect parameters and gradients of\n multiple parameterized :class:`Function` objects. :class:`FunctionSet`\n itself also implements :attr:`~FunctionSet.parameters` and\n :attr:`~FunctionSet.gradients`, so it can be nested in another\n :class:`FunctionSet` object.\n\n Function registration is done by just adding an attribute to\n :class:`FunctionSet` object.\n\n \"\"\"\n\n def __init__(self, **functions):\n \"\"\"Initializes the function set by given functions.\n\n Args:\n **functions: ``dict`` of ``str`` key and :class:`Function` values.\n The key-value pairs are just set to the :class:`FunctionSet`\n object as attributes.\n\n \"\"\"\n for name, func in six.iteritems(functions):\n setattr(self, name, func)\n\n def collect_parameters(self):\n \"\"\"Returns a tuple of parameters and gradients.\n\n Returns:\n Tuple (pair) of two tuples. The first element is a tuple of\n parameter arrays, and the second is a tuple of gradient arrays.\n\n \"\"\"\n return self.parameters, self.gradients\n\n def to_gpu(self, device=None):\n \"\"\"Migrates all parameters and gradients onto GPU.\n\n This method calls ``to_gpu`` method of each registered object.\n\n Args:\n device (int or :class:`pycuda.driver.Device` or ``None``): Device\n ID of GPU. If ``None`` is given, it uses the current device.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n func.to_gpu(device=device)\n return self\n\n def to_cpu(self):\n \"\"\"Migrates all parameters and gradients onto CPU.\n\n This method calls ``to_cpu`` method of each registered object.\n\n Returns:\n self\n\n \"\"\"\n for func in six.itervalues(self.__dict__):\n func.to_cpu()\n return self\n\n def copy_parameters_from(self, params):\n \"\"\"Copies parameters from another source without reallocation.\n\n Args:\n params (Iterable): Iterable of parameter arrays.\n\n \"\"\"\n for dst, src in zip(self.parameters, params):\n if isinstance(dst, numpy.ndarray):\n if isinstance(src, numpy.ndarray):\n numpy.copyto(dst, src)\n else:\n src.get(dst)\n elif isinstance(src, numpy.ndarray):\n dst.set(src)\n else:\n cuda.copy(src, out=dst)\n\n @property\n def parameters(self):\n \"\"\"Tuple of parameter arrays of all registered functions.\n\n The order of parameters is consistent with :meth:`gradients` property.\n\n \"\"\"\n return sum((func.parameters for _, func in self._get_sorted_funcs()),\n ())\n\n @parameters.setter\n def parameters(self, params):\n param_iter = iter(params)\n for _, func in self._get_sorted_funcs():\n func.parameters = param_iter\n\n @property\n def gradients(self):\n \"\"\"Tuple of gradient arrays of all registered functions.\n\n The order of gradients is consistent with :meth:`parameters` property.\n\n \"\"\"\n return sum((func.gradients for _, func in self._get_sorted_funcs()),\n ())\n\n @gradients.setter\n def gradients(self, grads):\n grad_iter = iter(grads)\n for _, func in self._get_sorted_funcs():\n func.gradients = grad_iter\n\n def _get_sorted_funcs(self):\n return sorted(six.iteritems(self.__dict__))\n", "path": "chainer/function_set.py"}]} |
gh_patches_debug_1636 | rasdani/github-patches | git_diff | Qiskit__qiskit-1024 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
_matches_coupling_map seems to check single qubit ops too and fails
<!-- ⚠️ If you do not respect this template, your issue will be closed -->
<!-- ⚠️ Make sure to browse the opened and closed issues -->
### Informations
- **Qiskit Terra version**: 0.6.0
- **Python version**: 3.6
- **Operating system**: macos
### What is the current behavior?
Using _matches_coupling_map breaks and returns false at first single qubit op as single qubits are not in the coupling map
### Steps to reproduce the problem
Run the function on a dag
### What is the expected behavior?
Ignore single qubits ops
### Suggested solutions
Check no of qubits. Have fixed and pull request ready to go if ok. 👍
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `qiskit/transpiler/_transpiler.py`
Content:
```
1 # -*- coding: utf-8 -*-
2
3 # Copyright 2018, IBM.
4 #
5 # This source code is licensed under the Apache License, Version 2.0 found in
6 # the LICENSE.txt file in the root directory of this source tree.
7
8 """Tools for compiling a batch of quantum circuits."""
9 from copy import deepcopy
10 import logging
11 import uuid
12 import numpy as np
13 import scipy.sparse as sp
14 import scipy.sparse.csgraph as cs
15
16 from qiskit.transpiler._transpilererror import TranspilerError
17 from qiskit._qiskiterror import QISKitError
18 from qiskit import QuantumCircuit
19 from qiskit.dagcircuit import DAGCircuit
20 from qiskit.unroll import DagUnroller, DAGBackend, JsonBackend
21 from qiskit.mapper import (Coupling, optimize_1q_gates, coupling_list2dict, swap_mapper,
22 cx_cancellation, direction_mapper,
23 remove_last_measurements, return_last_measurements)
24 from qiskit.qobj import Qobj, QobjConfig, QobjExperiment, QobjItem, QobjHeader
25 from ._parallel import parallel_map
26
27 logger = logging.getLogger(__name__)
28
29
30 # pylint: disable=redefined-builtin
31 def compile(circuits, backend,
32 config=None, basis_gates=None, coupling_map=None, initial_layout=None,
33 shots=1024, max_credits=10, seed=None, qobj_id=None, hpc=None,
34 pass_manager=None):
35 """Compile a list of circuits into a qobj.
36
37 Args:
38 circuits (QuantumCircuit or list[QuantumCircuit]): circuits to compile
39 backend (BaseBackend): a backend to compile for
40 config (dict): dictionary of parameters (e.g. noise) used by runner
41 basis_gates (str): comma-separated basis gate set to compile to
42 coupling_map (list): coupling map (perhaps custom) to target in mapping
43 initial_layout (list): initial layout of qubits in mapping
44 shots (int): number of repetitions of each circuit, for sampling
45 max_credits (int): maximum credits to use
46 seed (int): random seed for simulators
47 qobj_id (int): identifier for the generated qobj
48 hpc (dict): HPC simulator parameters
49 pass_manager (PassManager): a pass_manager for the transpiler stage
50
51 Returns:
52 QobjExperiment: Experiment to be wrapped in a Qobj.
53
54 Raises:
55 TranspilerError: in case of bad compile options, e.g. the hpc options.
56 """
57 if isinstance(circuits, QuantumCircuit):
58 circuits = [circuits]
59
60 # FIXME: THIS NEEDS TO BE CLEANED UP -- some things to decide for list of circuits:
61 # 1. do all circuits have same coupling map?
62 # 2. do all circuit have the same basis set?
63 # 3. do they all have same registers etc?
64 backend_conf = backend.configuration()
65 backend_name = backend_conf['name']
66 # Check for valid parameters for the experiments.
67 if hpc is not None and \
68 not all(key in hpc for key in ('multi_shot_optimization', 'omp_num_threads')):
69 raise TranspilerError('Unknown HPC parameter format!')
70 basis_gates = basis_gates or backend_conf['basis_gates']
71 coupling_map = coupling_map or backend_conf['coupling_map']
72
73 # step 1: Making the list of dag circuits
74 dags = _circuits_2_dags(circuits)
75
76 # step 2: Transpile all the dags
77
78 # FIXME: Work-around for transpiling multiple circuits with different qreg names.
79 # Make compile take a list of initial_layouts.
80 _initial_layout = initial_layout
81
82 # Pick a good initial layout if coupling_map is not already satisfied
83 # otherwise keep it as q[i]->q[i].
84 # TODO: move this inside mapper pass.
85 initial_layouts = []
86 for dag in dags:
87 if (initial_layout is None and not backend.configuration()['simulator']
88 and not _matches_coupling_map(dag, coupling_map)):
89 _initial_layout = _pick_best_layout(dag, backend)
90 initial_layouts.append(_initial_layout)
91 dags = _transpile_dags(dags, basis_gates=basis_gates, coupling_map=coupling_map,
92 initial_layouts=initial_layouts, seed=seed,
93 pass_manager=pass_manager)
94
95 # step 3: Making a qobj
96 qobj = _dags_2_qobj(dags, backend_name=backend_name,
97 config=config, shots=shots, max_credits=max_credits,
98 qobj_id=qobj_id, basis_gates=basis_gates,
99 coupling_map=coupling_map, seed=seed)
100
101 return qobj
102
103
104 def _circuits_2_dags(circuits):
105 """Convert a list of circuits into a list of dags.
106
107 Args:
108 circuits (list[QuantumCircuit]): circuit to compile
109
110 Returns:
111 list[DAGCircuit]: the dag representation of the circuits
112 to be used in the transpiler
113 """
114 dags = parallel_map(DAGCircuit.fromQuantumCircuit, circuits)
115 return dags
116
117
118 def _transpile_dags(dags, basis_gates='u1,u2,u3,cx,id', coupling_map=None,
119 initial_layouts=None, seed=None, pass_manager=None):
120 """Transform multiple dags through a sequence of passes.
121
122 Args:
123 dags (list[DAGCircuit]): dag circuits to transform
124 basis_gates (str): a comma seperated string for the target basis gates
125 coupling_map (list): A graph of coupling
126 initial_layouts (list[dict]): A mapping of qubit to qubit for each dag
127 seed (int): random seed for the swap mapper
128 pass_manager (PassManager): pass manager instance for the tranpilation process
129 If None, a default set of passes are run.
130 Otherwise, the passes defined in it will run.
131 If contains no passes in it, no dag transformations occur.
132
133 Returns:
134 list[DAGCircuit]: the dag circuits after going through transpilation
135
136 Raises:
137 TranspilerError: if the format is not valid.
138 """
139
140 index = list(range(len(dags)))
141 final_dags = parallel_map(_transpile_dags_parallel, index,
142 task_args=(dags, initial_layouts),
143 task_kwargs={'basis_gates': basis_gates,
144 'coupling_map': coupling_map,
145 'seed': seed,
146 'pass_manager': pass_manager})
147 return final_dags
148
149
150 def _transpile_dags_parallel(idx, dags, initial_layouts, basis_gates='u1,u2,u3,cx,id',
151 coupling_map=None, seed=None, pass_manager=None):
152 """Helper function for transpiling in parallel (if available).
153
154 Args:
155 idx (int): Index for dag of interest
156 dags (list): List of dags
157 initial_layouts (list): List of initial layouts
158 basis_gates (str): a comma seperated string for the target basis gates
159 coupling_map (list): A graph of coupling
160 seed (int): random seed for the swap mapper
161 pass_manager (PassManager): pass manager instance for the tranpilation process
162 If None, a default set of passes are run.
163 Otherwise, the passes defined in it will run.
164 If contains no passes in it, no dag transformations occur.
165 Returns:
166 DAGCircuit: DAG circuit after going through transpilation.
167 """
168 dag = dags[idx]
169 initial_layout = initial_layouts[idx]
170 final_dag, final_layout = transpile(
171 dag,
172 basis_gates=basis_gates,
173 coupling_map=coupling_map,
174 initial_layout=initial_layout,
175 get_layout=True,
176 seed=seed,
177 pass_manager=pass_manager)
178 final_dag.layout = [[k, v]
179 for k, v in final_layout.items()] if final_layout else None
180 return final_dag
181
182
183 def _dags_2_qobj(dags, backend_name, config=None, shots=None,
184 max_credits=None, qobj_id=None, basis_gates=None, coupling_map=None,
185 seed=None):
186 """Convert a list of dags into a qobj.
187
188 Args:
189 dags (list[DAGCircuit]): dags to compile
190 backend_name (str): name of runner backend
191 config (dict): dictionary of parameters (e.g. noise) used by runner
192 shots (int): number of repetitions of each circuit, for sampling
193 max_credits (int): maximum credits to use
194 qobj_id (int): identifier for the generated qobj
195 basis_gates (list[str])): basis gates for the experiment
196 coupling_map (list): coupling map (perhaps custom) to target in mapping
197 seed (int): random seed for simulators
198
199 Returns:
200 Qobj: the Qobj to be run on the backends
201 """
202 # TODO: the following will be removed from qobj and thus removed here:
203 # `basis_gates`, `coupling_map`
204
205 # Step 1: create the Qobj, with empty experiments.
206 # Copy the configuration: the values in `config` have preference
207 qobj_config = deepcopy(config or {})
208 # TODO: "memory_slots" is required by the qobj schema in the top-level
209 # qobj.config, and is user-defined. At the moment is set to the maximum
210 # number of *register* slots for the circuits, in order to have `measure`
211 # behave properly until the transition is over; and each circuit stores
212 # its memory_slots in its configuration.
213 qobj_config.update({'shots': shots,
214 'max_credits': max_credits,
215 'memory_slots': 0})
216
217 qobj = Qobj(qobj_id=qobj_id or str(uuid.uuid4()),
218 config=QobjConfig(**qobj_config),
219 experiments=[],
220 header=QobjHeader(backend_name=backend_name))
221 if seed:
222 qobj.config.seed = seed
223
224 qobj.experiments = parallel_map(_dags_2_qobj_parallel, dags,
225 task_kwargs={'basis_gates': basis_gates,
226 'config': config,
227 'coupling_map': coupling_map})
228
229 # Update the `memory_slots` value.
230 # TODO: remove when `memory_slots` can be provided by the user.
231 qobj.config.memory_slots = max(experiment.config.memory_slots for
232 experiment in qobj.experiments)
233
234 # Update the `n_qubits` global value.
235 # TODO: num_qubits is not part of the qobj specification, but needed
236 # for the simulator.
237 qobj.config.n_qubits = max(experiment.config.n_qubits for
238 experiment in qobj.experiments)
239
240 return qobj
241
242
243 def _dags_2_qobj_parallel(dag, config=None, basis_gates=None,
244 coupling_map=None):
245 """Helper function for dags to qobj in parallel (if available).
246
247 Args:
248 dag (DAGCircuit): DAG to compile
249 config (dict): dictionary of parameters (e.g. noise) used by runner
250 basis_gates (list[str])): basis gates for the experiment
251 coupling_map (list): coupling map (perhaps custom) to target in mapping
252
253 Returns:
254 Qobj: Qobj to be run on the backends
255 """
256 json_circuit = DagUnroller(dag, JsonBackend(dag.basis)).execute()
257 # Step 3a: create the Experiment based on json_circuit
258 experiment = QobjExperiment.from_dict(json_circuit)
259 # Step 3b: populate the Experiment configuration and header
260 experiment.header.name = dag.name
261 # TODO: place in header or config?
262 experiment_config = deepcopy(config or {})
263 experiment_config.update({
264 'coupling_map': coupling_map,
265 'basis_gates': basis_gates,
266 'layout': dag.layout,
267 'memory_slots': sum(dag.cregs.values()),
268 # TODO: `n_qubits` is not part of the qobj spec, but needed for the simulator.
269 'n_qubits': sum(dag.qregs.values())})
270 experiment.config = QobjItem(**experiment_config)
271
272 # set eval_symbols=True to evaluate each symbolic expression
273 # TODO: after transition to qobj, we can drop this
274 experiment.header.compiled_circuit_qasm = dag.qasm(
275 qeflag=True, eval_symbols=True)
276 # Step 3c: add the Experiment to the Qobj
277 return experiment
278
279
280 def transpile(dag, basis_gates='u1,u2,u3,cx,id', coupling_map=None,
281 initial_layout=None, get_layout=False,
282 format='dag', seed=None, pass_manager=None):
283 """Transform a dag circuit into another dag circuit (transpile), through
284 consecutive passes on the dag.
285
286 Args:
287 dag (DAGCircuit): dag circuit to transform via transpilation
288 basis_gates (str): a comma seperated string for the target basis gates
289 coupling_map (list): A graph of coupling::
290
291 [
292 [control0(int), target0(int)],
293 [control1(int), target1(int)],
294 ]
295
296 eg. [[0, 2], [1, 2], [1, 3], [3, 4]}
297
298 initial_layout (dict): A mapping of qubit to qubit::
299
300 {
301 ("q", start(int)): ("q", final(int)),
302 ...
303 }
304 eg.
305 {
306 ("q", 0): ("q", 0),
307 ("q", 1): ("q", 1),
308 ("q", 2): ("q", 2),
309 ("q", 3): ("q", 3)
310 }
311 get_layout (bool): flag for returning the final layout after mapping
312 format (str): The target format of the compilation:
313 {'dag', 'json', 'qasm'}
314 seed (int): random seed for the swap mapper
315 pass_manager (PassManager): pass manager instance for the tranpilation process
316 If None, a default set of passes are run.
317 Otherwise, the passes defined in it will run.
318 If contains no passes in it, no dag transformations occur.
319
320 Returns:
321 DAGCircuit: transformed dag
322 DAGCircuit, dict: transformed dag along with the final layout on backend qubits
323
324 Raises:
325 TranspilerError: if the format is not valid.
326 """
327 # TODO: `basis_gates` will be removed after we have the unroller pass.
328 # TODO: `coupling_map`, `initial_layout`, `get_layout`, `seed` removed after mapper pass.
329
330 # TODO: move this to the mapper pass
331 num_qubits = sum(dag.qregs.values())
332 if num_qubits == 1 or coupling_map == "all-to-all":
333 coupling_map = None
334
335 final_layout = None
336
337 if pass_manager:
338 # run the passes specified by the pass manager
339 for pass_ in pass_manager.passes():
340 pass_.run(dag)
341 else:
342 # default set of passes
343 # TODO: move each step here to a pass, and use a default passmanager below
344 basis = basis_gates.split(',') if basis_gates else []
345 dag_unroller = DagUnroller(dag, DAGBackend(basis))
346 dag = dag_unroller.expand_gates()
347 # if a coupling map is given compile to the map
348 if coupling_map:
349 logger.info("pre-mapping properties: %s",
350 dag.property_summary())
351 # Insert swap gates
352 coupling = Coupling(coupling_list2dict(coupling_map))
353 removed_meas = remove_last_measurements(dag)
354 logger.info("measurements moved: %s", removed_meas)
355 logger.info("initial layout: %s", initial_layout)
356 dag, final_layout, last_layout = swap_mapper(
357 dag, coupling, initial_layout, trials=20, seed=seed)
358 logger.info("final layout: %s", final_layout)
359 # Expand swaps
360 dag_unroller = DagUnroller(dag, DAGBackend(basis))
361 dag = dag_unroller.expand_gates()
362 # Change cx directions
363 dag = direction_mapper(dag, coupling)
364 # Simplify cx gates
365 cx_cancellation(dag)
366 # Simplify single qubit gates
367 dag = optimize_1q_gates(dag)
368 return_last_measurements(dag, removed_meas,
369 last_layout)
370 logger.info("post-mapping properties: %s",
371 dag.property_summary())
372
373 # choose output format
374 # TODO: do we need all of these formats, or just the dag?
375 if format == 'dag':
376 compiled_circuit = dag
377 elif format == 'json':
378 # FIXME: JsonBackend is wrongly taking an ordered dict as basis, not list
379 dag_unroller = DagUnroller(dag, JsonBackend(dag.basis))
380 compiled_circuit = dag_unroller.execute()
381 elif format == 'qasm':
382 compiled_circuit = dag.qasm()
383 else:
384 raise TranspilerError('unrecognized circuit format')
385
386 if get_layout:
387 return compiled_circuit, final_layout
388 return compiled_circuit
389
390
391 def _best_subset(backend, n_qubits):
392 """Computes the qubit mapping with the best
393 connectivity.
394
395 Parameters:
396 backend (Qiskit.BaseBackend): A QISKit backend instance.
397 n_qubits (int): Number of subset qubits to consider.
398
399 Returns:
400 ndarray: Array of qubits to use for best
401 connectivity mapping.
402
403 Raises:
404 QISKitError: Wrong number of qubits given.
405 """
406 if n_qubits == 1:
407 return np.array([0])
408 elif n_qubits <= 0:
409 raise QISKitError('Number of qubits <= 0.')
410
411 device_qubits = backend.configuration()['n_qubits']
412 if n_qubits > device_qubits:
413 raise QISKitError('Number of qubits greater than device.')
414
415 cmap = np.asarray(backend.configuration()['coupling_map'])
416 data = np.ones_like(cmap[:, 0])
417 sp_cmap = sp.coo_matrix((data, (cmap[:, 0], cmap[:, 1])),
418 shape=(device_qubits, device_qubits)).tocsr()
419 best = 0
420 best_map = None
421 # do bfs with each node as starting point
422 for k in range(sp_cmap.shape[0]):
423 bfs = cs.breadth_first_order(sp_cmap, i_start=k, directed=False,
424 return_predecessors=False)
425
426 connection_count = 0
427 for i in range(n_qubits):
428 node_idx = bfs[i]
429 for j in range(sp_cmap.indptr[node_idx],
430 sp_cmap.indptr[node_idx + 1]):
431 node = sp_cmap.indices[j]
432 for counter in range(n_qubits):
433 if node == bfs[counter]:
434 connection_count += 1
435 break
436
437 if connection_count > best:
438 best = connection_count
439 best_map = bfs[0:n_qubits]
440 return best_map
441
442
443 def _matches_coupling_map(dag, coupling_map):
444 """Iterate over circuit gates to check if all multi-qubit couplings
445 match the qubit coupling graph in the backend.
446
447 Parameters:
448 dag (DAGCircuit): DAG representation of circuit.
449 coupling_map (list): Backend coupling map, represented as an adjacency list.
450
451 Returns:
452 bool: True if all gates readily fit the backend coupling graph.
453 False if there's at least one gate that uses multiple qubits
454 which does not match the backend couplings.
455 """
456 match = True
457 for _, data in dag.multi_graph.nodes(data=True):
458 if data['type'] == 'op':
459 gate_map = [qr[1] for qr in data['qargs']]
460 if gate_map not in coupling_map:
461 match = False
462 break
463 return match
464
465
466 def _pick_best_layout(dag, backend):
467 """Pick a convenient layout depending on the best matching qubit connectivity
468
469 Parameters:
470 dag (DAGCircuit): DAG representation of circuit.
471 backend (BaseBackend) : The backend with the coupling_map for searching
472
473 Returns:
474 dict: A special ordered initial_layout
475
476 """
477 num_qubits = sum(dag.qregs.values())
478 best_sub = _best_subset(backend, num_qubits)
479 layout = {}
480 map_iter = 0
481 for key, value in dag.qregs.items():
482 for i in range(value):
483 layout[(key, i)] = ('q', best_sub[map_iter])
484 map_iter += 1
485 return layout
486
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/qiskit/transpiler/_transpiler.py b/qiskit/transpiler/_transpiler.py
--- a/qiskit/transpiler/_transpiler.py
+++ b/qiskit/transpiler/_transpiler.py
@@ -457,9 +457,10 @@
for _, data in dag.multi_graph.nodes(data=True):
if data['type'] == 'op':
gate_map = [qr[1] for qr in data['qargs']]
- if gate_map not in coupling_map:
- match = False
- break
+ if len(gate_map) > 1:
+ if gate_map not in coupling_map:
+ match = False
+ break
return match
| {"golden_diff": "diff --git a/qiskit/transpiler/_transpiler.py b/qiskit/transpiler/_transpiler.py\n--- a/qiskit/transpiler/_transpiler.py\n+++ b/qiskit/transpiler/_transpiler.py\n@@ -457,9 +457,10 @@\n for _, data in dag.multi_graph.nodes(data=True):\n if data['type'] == 'op':\n gate_map = [qr[1] for qr in data['qargs']]\n- if gate_map not in coupling_map:\n- match = False\n- break\n+ if len(gate_map) > 1:\n+ if gate_map not in coupling_map:\n+ match = False\n+ break\n return match\n", "issue": "_matches_coupling_map seems to check single qubit ops too and fails\n<!-- \u26a0\ufe0f If you do not respect this template, your issue will be closed -->\r\n<!-- \u26a0\ufe0f Make sure to browse the opened and closed issues -->\r\n\r\n### Informations\r\n\r\n- **Qiskit Terra version**: 0.6.0\r\n- **Python version**: 3.6\r\n- **Operating system**: macos\r\n\r\n### What is the current behavior?\r\n\r\nUsing _matches_coupling_map breaks and returns false at first single qubit op as single qubits are not in the coupling map\r\n\r\n### Steps to reproduce the problem\r\n\r\nRun the function on a dag \r\n\r\n### What is the expected behavior?\r\n\r\nIgnore single qubits ops\r\n\r\n### Suggested solutions\r\nCheck no of qubits. Have fixed and pull request ready to go if ok. \ud83d\udc4d \r\n\r\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright 2018, IBM.\n#\n# This source code is licensed under the Apache License, Version 2.0 found in\n# the LICENSE.txt file in the root directory of this source tree.\n\n\"\"\"Tools for compiling a batch of quantum circuits.\"\"\"\nfrom copy import deepcopy\nimport logging\nimport uuid\nimport numpy as np\nimport scipy.sparse as sp\nimport scipy.sparse.csgraph as cs\n\nfrom qiskit.transpiler._transpilererror import TranspilerError\nfrom qiskit._qiskiterror import QISKitError\nfrom qiskit import QuantumCircuit\nfrom qiskit.dagcircuit import DAGCircuit\nfrom qiskit.unroll import DagUnroller, DAGBackend, JsonBackend\nfrom qiskit.mapper import (Coupling, optimize_1q_gates, coupling_list2dict, swap_mapper,\n cx_cancellation, direction_mapper,\n remove_last_measurements, return_last_measurements)\nfrom qiskit.qobj import Qobj, QobjConfig, QobjExperiment, QobjItem, QobjHeader\nfrom ._parallel import parallel_map\n\nlogger = logging.getLogger(__name__)\n\n\n# pylint: disable=redefined-builtin\ndef compile(circuits, backend,\n config=None, basis_gates=None, coupling_map=None, initial_layout=None,\n shots=1024, max_credits=10, seed=None, qobj_id=None, hpc=None,\n pass_manager=None):\n \"\"\"Compile a list of circuits into a qobj.\n\n Args:\n circuits (QuantumCircuit or list[QuantumCircuit]): circuits to compile\n backend (BaseBackend): a backend to compile for\n config (dict): dictionary of parameters (e.g. noise) used by runner\n basis_gates (str): comma-separated basis gate set to compile to\n coupling_map (list): coupling map (perhaps custom) to target in mapping\n initial_layout (list): initial layout of qubits in mapping\n shots (int): number of repetitions of each circuit, for sampling\n max_credits (int): maximum credits to use\n seed (int): random seed for simulators\n qobj_id (int): identifier for the generated qobj\n hpc (dict): HPC simulator parameters\n pass_manager (PassManager): a pass_manager for the transpiler stage\n\n Returns:\n QobjExperiment: Experiment to be wrapped in a Qobj.\n\n Raises:\n TranspilerError: in case of bad compile options, e.g. the hpc options.\n \"\"\"\n if isinstance(circuits, QuantumCircuit):\n circuits = [circuits]\n\n # FIXME: THIS NEEDS TO BE CLEANED UP -- some things to decide for list of circuits:\n # 1. do all circuits have same coupling map?\n # 2. do all circuit have the same basis set?\n # 3. do they all have same registers etc?\n backend_conf = backend.configuration()\n backend_name = backend_conf['name']\n # Check for valid parameters for the experiments.\n if hpc is not None and \\\n not all(key in hpc for key in ('multi_shot_optimization', 'omp_num_threads')):\n raise TranspilerError('Unknown HPC parameter format!')\n basis_gates = basis_gates or backend_conf['basis_gates']\n coupling_map = coupling_map or backend_conf['coupling_map']\n\n # step 1: Making the list of dag circuits\n dags = _circuits_2_dags(circuits)\n\n # step 2: Transpile all the dags\n\n # FIXME: Work-around for transpiling multiple circuits with different qreg names.\n # Make compile take a list of initial_layouts.\n _initial_layout = initial_layout\n\n # Pick a good initial layout if coupling_map is not already satisfied\n # otherwise keep it as q[i]->q[i].\n # TODO: move this inside mapper pass.\n initial_layouts = []\n for dag in dags:\n if (initial_layout is None and not backend.configuration()['simulator']\n and not _matches_coupling_map(dag, coupling_map)):\n _initial_layout = _pick_best_layout(dag, backend)\n initial_layouts.append(_initial_layout)\n dags = _transpile_dags(dags, basis_gates=basis_gates, coupling_map=coupling_map,\n initial_layouts=initial_layouts, seed=seed,\n pass_manager=pass_manager)\n\n # step 3: Making a qobj\n qobj = _dags_2_qobj(dags, backend_name=backend_name,\n config=config, shots=shots, max_credits=max_credits,\n qobj_id=qobj_id, basis_gates=basis_gates,\n coupling_map=coupling_map, seed=seed)\n\n return qobj\n\n\ndef _circuits_2_dags(circuits):\n \"\"\"Convert a list of circuits into a list of dags.\n\n Args:\n circuits (list[QuantumCircuit]): circuit to compile\n\n Returns:\n list[DAGCircuit]: the dag representation of the circuits\n to be used in the transpiler\n \"\"\"\n dags = parallel_map(DAGCircuit.fromQuantumCircuit, circuits)\n return dags\n\n\ndef _transpile_dags(dags, basis_gates='u1,u2,u3,cx,id', coupling_map=None,\n initial_layouts=None, seed=None, pass_manager=None):\n \"\"\"Transform multiple dags through a sequence of passes.\n\n Args:\n dags (list[DAGCircuit]): dag circuits to transform\n basis_gates (str): a comma seperated string for the target basis gates\n coupling_map (list): A graph of coupling\n initial_layouts (list[dict]): A mapping of qubit to qubit for each dag\n seed (int): random seed for the swap mapper\n pass_manager (PassManager): pass manager instance for the tranpilation process\n If None, a default set of passes are run.\n Otherwise, the passes defined in it will run.\n If contains no passes in it, no dag transformations occur.\n\n Returns:\n list[DAGCircuit]: the dag circuits after going through transpilation\n\n Raises:\n TranspilerError: if the format is not valid.\n \"\"\"\n\n index = list(range(len(dags)))\n final_dags = parallel_map(_transpile_dags_parallel, index,\n task_args=(dags, initial_layouts),\n task_kwargs={'basis_gates': basis_gates,\n 'coupling_map': coupling_map,\n 'seed': seed,\n 'pass_manager': pass_manager})\n return final_dags\n\n\ndef _transpile_dags_parallel(idx, dags, initial_layouts, basis_gates='u1,u2,u3,cx,id',\n coupling_map=None, seed=None, pass_manager=None):\n \"\"\"Helper function for transpiling in parallel (if available).\n\n Args:\n idx (int): Index for dag of interest\n dags (list): List of dags\n initial_layouts (list): List of initial layouts\n basis_gates (str): a comma seperated string for the target basis gates\n coupling_map (list): A graph of coupling\n seed (int): random seed for the swap mapper\n pass_manager (PassManager): pass manager instance for the tranpilation process\n If None, a default set of passes are run.\n Otherwise, the passes defined in it will run.\n If contains no passes in it, no dag transformations occur.\n Returns:\n DAGCircuit: DAG circuit after going through transpilation.\n \"\"\"\n dag = dags[idx]\n initial_layout = initial_layouts[idx]\n final_dag, final_layout = transpile(\n dag,\n basis_gates=basis_gates,\n coupling_map=coupling_map,\n initial_layout=initial_layout,\n get_layout=True,\n seed=seed,\n pass_manager=pass_manager)\n final_dag.layout = [[k, v]\n for k, v in final_layout.items()] if final_layout else None\n return final_dag\n\n\ndef _dags_2_qobj(dags, backend_name, config=None, shots=None,\n max_credits=None, qobj_id=None, basis_gates=None, coupling_map=None,\n seed=None):\n \"\"\"Convert a list of dags into a qobj.\n\n Args:\n dags (list[DAGCircuit]): dags to compile\n backend_name (str): name of runner backend\n config (dict): dictionary of parameters (e.g. noise) used by runner\n shots (int): number of repetitions of each circuit, for sampling\n max_credits (int): maximum credits to use\n qobj_id (int): identifier for the generated qobj\n basis_gates (list[str])): basis gates for the experiment\n coupling_map (list): coupling map (perhaps custom) to target in mapping\n seed (int): random seed for simulators\n\n Returns:\n Qobj: the Qobj to be run on the backends\n \"\"\"\n # TODO: the following will be removed from qobj and thus removed here:\n # `basis_gates`, `coupling_map`\n\n # Step 1: create the Qobj, with empty experiments.\n # Copy the configuration: the values in `config` have preference\n qobj_config = deepcopy(config or {})\n # TODO: \"memory_slots\" is required by the qobj schema in the top-level\n # qobj.config, and is user-defined. At the moment is set to the maximum\n # number of *register* slots for the circuits, in order to have `measure`\n # behave properly until the transition is over; and each circuit stores\n # its memory_slots in its configuration.\n qobj_config.update({'shots': shots,\n 'max_credits': max_credits,\n 'memory_slots': 0})\n\n qobj = Qobj(qobj_id=qobj_id or str(uuid.uuid4()),\n config=QobjConfig(**qobj_config),\n experiments=[],\n header=QobjHeader(backend_name=backend_name))\n if seed:\n qobj.config.seed = seed\n\n qobj.experiments = parallel_map(_dags_2_qobj_parallel, dags,\n task_kwargs={'basis_gates': basis_gates,\n 'config': config,\n 'coupling_map': coupling_map})\n\n # Update the `memory_slots` value.\n # TODO: remove when `memory_slots` can be provided by the user.\n qobj.config.memory_slots = max(experiment.config.memory_slots for\n experiment in qobj.experiments)\n\n # Update the `n_qubits` global value.\n # TODO: num_qubits is not part of the qobj specification, but needed\n # for the simulator.\n qobj.config.n_qubits = max(experiment.config.n_qubits for\n experiment in qobj.experiments)\n\n return qobj\n\n\ndef _dags_2_qobj_parallel(dag, config=None, basis_gates=None,\n coupling_map=None):\n \"\"\"Helper function for dags to qobj in parallel (if available).\n\n Args:\n dag (DAGCircuit): DAG to compile\n config (dict): dictionary of parameters (e.g. noise) used by runner\n basis_gates (list[str])): basis gates for the experiment\n coupling_map (list): coupling map (perhaps custom) to target in mapping\n\n Returns:\n Qobj: Qobj to be run on the backends\n \"\"\"\n json_circuit = DagUnroller(dag, JsonBackend(dag.basis)).execute()\n # Step 3a: create the Experiment based on json_circuit\n experiment = QobjExperiment.from_dict(json_circuit)\n # Step 3b: populate the Experiment configuration and header\n experiment.header.name = dag.name\n # TODO: place in header or config?\n experiment_config = deepcopy(config or {})\n experiment_config.update({\n 'coupling_map': coupling_map,\n 'basis_gates': basis_gates,\n 'layout': dag.layout,\n 'memory_slots': sum(dag.cregs.values()),\n # TODO: `n_qubits` is not part of the qobj spec, but needed for the simulator.\n 'n_qubits': sum(dag.qregs.values())})\n experiment.config = QobjItem(**experiment_config)\n\n # set eval_symbols=True to evaluate each symbolic expression\n # TODO: after transition to qobj, we can drop this\n experiment.header.compiled_circuit_qasm = dag.qasm(\n qeflag=True, eval_symbols=True)\n # Step 3c: add the Experiment to the Qobj\n return experiment\n\n\ndef transpile(dag, basis_gates='u1,u2,u3,cx,id', coupling_map=None,\n initial_layout=None, get_layout=False,\n format='dag', seed=None, pass_manager=None):\n \"\"\"Transform a dag circuit into another dag circuit (transpile), through\n consecutive passes on the dag.\n\n Args:\n dag (DAGCircuit): dag circuit to transform via transpilation\n basis_gates (str): a comma seperated string for the target basis gates\n coupling_map (list): A graph of coupling::\n\n [\n [control0(int), target0(int)],\n [control1(int), target1(int)],\n ]\n\n eg. [[0, 2], [1, 2], [1, 3], [3, 4]}\n\n initial_layout (dict): A mapping of qubit to qubit::\n\n {\n (\"q\", start(int)): (\"q\", final(int)),\n ...\n }\n eg.\n {\n (\"q\", 0): (\"q\", 0),\n (\"q\", 1): (\"q\", 1),\n (\"q\", 2): (\"q\", 2),\n (\"q\", 3): (\"q\", 3)\n }\n get_layout (bool): flag for returning the final layout after mapping\n format (str): The target format of the compilation:\n {'dag', 'json', 'qasm'}\n seed (int): random seed for the swap mapper\n pass_manager (PassManager): pass manager instance for the tranpilation process\n If None, a default set of passes are run.\n Otherwise, the passes defined in it will run.\n If contains no passes in it, no dag transformations occur.\n\n Returns:\n DAGCircuit: transformed dag\n DAGCircuit, dict: transformed dag along with the final layout on backend qubits\n\n Raises:\n TranspilerError: if the format is not valid.\n \"\"\"\n # TODO: `basis_gates` will be removed after we have the unroller pass.\n # TODO: `coupling_map`, `initial_layout`, `get_layout`, `seed` removed after mapper pass.\n\n # TODO: move this to the mapper pass\n num_qubits = sum(dag.qregs.values())\n if num_qubits == 1 or coupling_map == \"all-to-all\":\n coupling_map = None\n\n final_layout = None\n\n if pass_manager:\n # run the passes specified by the pass manager\n for pass_ in pass_manager.passes():\n pass_.run(dag)\n else:\n # default set of passes\n # TODO: move each step here to a pass, and use a default passmanager below\n basis = basis_gates.split(',') if basis_gates else []\n dag_unroller = DagUnroller(dag, DAGBackend(basis))\n dag = dag_unroller.expand_gates()\n # if a coupling map is given compile to the map\n if coupling_map:\n logger.info(\"pre-mapping properties: %s\",\n dag.property_summary())\n # Insert swap gates\n coupling = Coupling(coupling_list2dict(coupling_map))\n removed_meas = remove_last_measurements(dag)\n logger.info(\"measurements moved: %s\", removed_meas)\n logger.info(\"initial layout: %s\", initial_layout)\n dag, final_layout, last_layout = swap_mapper(\n dag, coupling, initial_layout, trials=20, seed=seed)\n logger.info(\"final layout: %s\", final_layout)\n # Expand swaps\n dag_unroller = DagUnroller(dag, DAGBackend(basis))\n dag = dag_unroller.expand_gates()\n # Change cx directions\n dag = direction_mapper(dag, coupling)\n # Simplify cx gates\n cx_cancellation(dag)\n # Simplify single qubit gates\n dag = optimize_1q_gates(dag)\n return_last_measurements(dag, removed_meas,\n last_layout)\n logger.info(\"post-mapping properties: %s\",\n dag.property_summary())\n\n # choose output format\n # TODO: do we need all of these formats, or just the dag?\n if format == 'dag':\n compiled_circuit = dag\n elif format == 'json':\n # FIXME: JsonBackend is wrongly taking an ordered dict as basis, not list\n dag_unroller = DagUnroller(dag, JsonBackend(dag.basis))\n compiled_circuit = dag_unroller.execute()\n elif format == 'qasm':\n compiled_circuit = dag.qasm()\n else:\n raise TranspilerError('unrecognized circuit format')\n\n if get_layout:\n return compiled_circuit, final_layout\n return compiled_circuit\n\n\ndef _best_subset(backend, n_qubits):\n \"\"\"Computes the qubit mapping with the best\n connectivity.\n\n Parameters:\n backend (Qiskit.BaseBackend): A QISKit backend instance.\n n_qubits (int): Number of subset qubits to consider.\n\n Returns:\n ndarray: Array of qubits to use for best\n connectivity mapping.\n\n Raises:\n QISKitError: Wrong number of qubits given.\n \"\"\"\n if n_qubits == 1:\n return np.array([0])\n elif n_qubits <= 0:\n raise QISKitError('Number of qubits <= 0.')\n\n device_qubits = backend.configuration()['n_qubits']\n if n_qubits > device_qubits:\n raise QISKitError('Number of qubits greater than device.')\n\n cmap = np.asarray(backend.configuration()['coupling_map'])\n data = np.ones_like(cmap[:, 0])\n sp_cmap = sp.coo_matrix((data, (cmap[:, 0], cmap[:, 1])),\n shape=(device_qubits, device_qubits)).tocsr()\n best = 0\n best_map = None\n # do bfs with each node as starting point\n for k in range(sp_cmap.shape[0]):\n bfs = cs.breadth_first_order(sp_cmap, i_start=k, directed=False,\n return_predecessors=False)\n\n connection_count = 0\n for i in range(n_qubits):\n node_idx = bfs[i]\n for j in range(sp_cmap.indptr[node_idx],\n sp_cmap.indptr[node_idx + 1]):\n node = sp_cmap.indices[j]\n for counter in range(n_qubits):\n if node == bfs[counter]:\n connection_count += 1\n break\n\n if connection_count > best:\n best = connection_count\n best_map = bfs[0:n_qubits]\n return best_map\n\n\ndef _matches_coupling_map(dag, coupling_map):\n \"\"\"Iterate over circuit gates to check if all multi-qubit couplings\n match the qubit coupling graph in the backend.\n\n Parameters:\n dag (DAGCircuit): DAG representation of circuit.\n coupling_map (list): Backend coupling map, represented as an adjacency list.\n\n Returns:\n bool: True if all gates readily fit the backend coupling graph.\n False if there's at least one gate that uses multiple qubits\n which does not match the backend couplings.\n \"\"\"\n match = True\n for _, data in dag.multi_graph.nodes(data=True):\n if data['type'] == 'op':\n gate_map = [qr[1] for qr in data['qargs']]\n if gate_map not in coupling_map:\n match = False\n break\n return match\n\n\ndef _pick_best_layout(dag, backend):\n \"\"\"Pick a convenient layout depending on the best matching qubit connectivity\n\n Parameters:\n dag (DAGCircuit): DAG representation of circuit.\n backend (BaseBackend) : The backend with the coupling_map for searching\n\n Returns:\n dict: A special ordered initial_layout\n\n \"\"\"\n num_qubits = sum(dag.qregs.values())\n best_sub = _best_subset(backend, num_qubits)\n layout = {}\n map_iter = 0\n for key, value in dag.qregs.items():\n for i in range(value):\n layout[(key, i)] = ('q', best_sub[map_iter])\n map_iter += 1\n return layout\n", "path": "qiskit/transpiler/_transpiler.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n\n# Copyright 2018, IBM.\n#\n# This source code is licensed under the Apache License, Version 2.0 found in\n# the LICENSE.txt file in the root directory of this source tree.\n\n\"\"\"Tools for compiling a batch of quantum circuits.\"\"\"\nfrom copy import deepcopy\nimport logging\nimport uuid\nimport numpy as np\nimport scipy.sparse as sp\nimport scipy.sparse.csgraph as cs\n\nfrom qiskit.transpiler._transpilererror import TranspilerError\nfrom qiskit._qiskiterror import QISKitError\nfrom qiskit import QuantumCircuit\nfrom qiskit.dagcircuit import DAGCircuit\nfrom qiskit.unroll import DagUnroller, DAGBackend, JsonBackend\nfrom qiskit.mapper import (Coupling, optimize_1q_gates, coupling_list2dict, swap_mapper,\n cx_cancellation, direction_mapper,\n remove_last_measurements, return_last_measurements)\nfrom qiskit.qobj import Qobj, QobjConfig, QobjExperiment, QobjItem, QobjHeader\nfrom ._parallel import parallel_map\n\nlogger = logging.getLogger(__name__)\n\n\n# pylint: disable=redefined-builtin\ndef compile(circuits, backend,\n config=None, basis_gates=None, coupling_map=None, initial_layout=None,\n shots=1024, max_credits=10, seed=None, qobj_id=None, hpc=None,\n pass_manager=None):\n \"\"\"Compile a list of circuits into a qobj.\n\n Args:\n circuits (QuantumCircuit or list[QuantumCircuit]): circuits to compile\n backend (BaseBackend): a backend to compile for\n config (dict): dictionary of parameters (e.g. noise) used by runner\n basis_gates (str): comma-separated basis gate set to compile to\n coupling_map (list): coupling map (perhaps custom) to target in mapping\n initial_layout (list): initial layout of qubits in mapping\n shots (int): number of repetitions of each circuit, for sampling\n max_credits (int): maximum credits to use\n seed (int): random seed for simulators\n qobj_id (int): identifier for the generated qobj\n hpc (dict): HPC simulator parameters\n pass_manager (PassManager): a pass_manager for the transpiler stage\n\n Returns:\n QobjExperiment: Experiment to be wrapped in a Qobj.\n\n Raises:\n TranspilerError: in case of bad compile options, e.g. the hpc options.\n \"\"\"\n if isinstance(circuits, QuantumCircuit):\n circuits = [circuits]\n\n # FIXME: THIS NEEDS TO BE CLEANED UP -- some things to decide for list of circuits:\n # 1. do all circuits have same coupling map?\n # 2. do all circuit have the same basis set?\n # 3. do they all have same registers etc?\n backend_conf = backend.configuration()\n backend_name = backend_conf['name']\n # Check for valid parameters for the experiments.\n if hpc is not None and \\\n not all(key in hpc for key in ('multi_shot_optimization', 'omp_num_threads')):\n raise TranspilerError('Unknown HPC parameter format!')\n basis_gates = basis_gates or backend_conf['basis_gates']\n coupling_map = coupling_map or backend_conf['coupling_map']\n\n # step 1: Making the list of dag circuits\n dags = _circuits_2_dags(circuits)\n\n # step 2: Transpile all the dags\n\n # FIXME: Work-around for transpiling multiple circuits with different qreg names.\n # Make compile take a list of initial_layouts.\n _initial_layout = initial_layout\n\n # Pick a good initial layout if coupling_map is not already satisfied\n # otherwise keep it as q[i]->q[i].\n # TODO: move this inside mapper pass.\n initial_layouts = []\n for dag in dags:\n if (initial_layout is None and not backend.configuration()['simulator']\n and not _matches_coupling_map(dag, coupling_map)):\n _initial_layout = _pick_best_layout(dag, backend)\n initial_layouts.append(_initial_layout)\n dags = _transpile_dags(dags, basis_gates=basis_gates, coupling_map=coupling_map,\n initial_layouts=initial_layouts, seed=seed,\n pass_manager=pass_manager)\n\n # step 3: Making a qobj\n qobj = _dags_2_qobj(dags, backend_name=backend_name,\n config=config, shots=shots, max_credits=max_credits,\n qobj_id=qobj_id, basis_gates=basis_gates,\n coupling_map=coupling_map, seed=seed)\n\n return qobj\n\n\ndef _circuits_2_dags(circuits):\n \"\"\"Convert a list of circuits into a list of dags.\n\n Args:\n circuits (list[QuantumCircuit]): circuit to compile\n\n Returns:\n list[DAGCircuit]: the dag representation of the circuits\n to be used in the transpiler\n \"\"\"\n dags = parallel_map(DAGCircuit.fromQuantumCircuit, circuits)\n return dags\n\n\ndef _transpile_dags(dags, basis_gates='u1,u2,u3,cx,id', coupling_map=None,\n initial_layouts=None, seed=None, pass_manager=None):\n \"\"\"Transform multiple dags through a sequence of passes.\n\n Args:\n dags (list[DAGCircuit]): dag circuits to transform\n basis_gates (str): a comma seperated string for the target basis gates\n coupling_map (list): A graph of coupling\n initial_layouts (list[dict]): A mapping of qubit to qubit for each dag\n seed (int): random seed for the swap mapper\n pass_manager (PassManager): pass manager instance for the tranpilation process\n If None, a default set of passes are run.\n Otherwise, the passes defined in it will run.\n If contains no passes in it, no dag transformations occur.\n\n Returns:\n list[DAGCircuit]: the dag circuits after going through transpilation\n\n Raises:\n TranspilerError: if the format is not valid.\n \"\"\"\n\n index = list(range(len(dags)))\n final_dags = parallel_map(_transpile_dags_parallel, index,\n task_args=(dags, initial_layouts),\n task_kwargs={'basis_gates': basis_gates,\n 'coupling_map': coupling_map,\n 'seed': seed,\n 'pass_manager': pass_manager})\n return final_dags\n\n\ndef _transpile_dags_parallel(idx, dags, initial_layouts, basis_gates='u1,u2,u3,cx,id',\n coupling_map=None, seed=None, pass_manager=None):\n \"\"\"Helper function for transpiling in parallel (if available).\n\n Args:\n idx (int): Index for dag of interest\n dags (list): List of dags\n initial_layouts (list): List of initial layouts\n basis_gates (str): a comma seperated string for the target basis gates\n coupling_map (list): A graph of coupling\n seed (int): random seed for the swap mapper\n pass_manager (PassManager): pass manager instance for the tranpilation process\n If None, a default set of passes are run.\n Otherwise, the passes defined in it will run.\n If contains no passes in it, no dag transformations occur.\n Returns:\n DAGCircuit: DAG circuit after going through transpilation.\n \"\"\"\n dag = dags[idx]\n initial_layout = initial_layouts[idx]\n final_dag, final_layout = transpile(\n dag,\n basis_gates=basis_gates,\n coupling_map=coupling_map,\n initial_layout=initial_layout,\n get_layout=True,\n seed=seed,\n pass_manager=pass_manager)\n final_dag.layout = [[k, v]\n for k, v in final_layout.items()] if final_layout else None\n return final_dag\n\n\ndef _dags_2_qobj(dags, backend_name, config=None, shots=None,\n max_credits=None, qobj_id=None, basis_gates=None, coupling_map=None,\n seed=None):\n \"\"\"Convert a list of dags into a qobj.\n\n Args:\n dags (list[DAGCircuit]): dags to compile\n backend_name (str): name of runner backend\n config (dict): dictionary of parameters (e.g. noise) used by runner\n shots (int): number of repetitions of each circuit, for sampling\n max_credits (int): maximum credits to use\n qobj_id (int): identifier for the generated qobj\n basis_gates (list[str])): basis gates for the experiment\n coupling_map (list): coupling map (perhaps custom) to target in mapping\n seed (int): random seed for simulators\n\n Returns:\n Qobj: the Qobj to be run on the backends\n \"\"\"\n # TODO: the following will be removed from qobj and thus removed here:\n # `basis_gates`, `coupling_map`\n\n # Step 1: create the Qobj, with empty experiments.\n # Copy the configuration: the values in `config` have preference\n qobj_config = deepcopy(config or {})\n # TODO: \"memory_slots\" is required by the qobj schema in the top-level\n # qobj.config, and is user-defined. At the moment is set to the maximum\n # number of *register* slots for the circuits, in order to have `measure`\n # behave properly until the transition is over; and each circuit stores\n # its memory_slots in its configuration.\n qobj_config.update({'shots': shots,\n 'max_credits': max_credits,\n 'memory_slots': 0})\n\n qobj = Qobj(qobj_id=qobj_id or str(uuid.uuid4()),\n config=QobjConfig(**qobj_config),\n experiments=[],\n header=QobjHeader(backend_name=backend_name))\n if seed:\n qobj.config.seed = seed\n\n qobj.experiments = parallel_map(_dags_2_qobj_parallel, dags,\n task_kwargs={'basis_gates': basis_gates,\n 'config': config,\n 'coupling_map': coupling_map})\n\n # Update the `memory_slots` value.\n # TODO: remove when `memory_slots` can be provided by the user.\n qobj.config.memory_slots = max(experiment.config.memory_slots for\n experiment in qobj.experiments)\n\n # Update the `n_qubits` global value.\n # TODO: num_qubits is not part of the qobj specification, but needed\n # for the simulator.\n qobj.config.n_qubits = max(experiment.config.n_qubits for\n experiment in qobj.experiments)\n\n return qobj\n\n\ndef _dags_2_qobj_parallel(dag, config=None, basis_gates=None,\n coupling_map=None):\n \"\"\"Helper function for dags to qobj in parallel (if available).\n\n Args:\n dag (DAGCircuit): DAG to compile\n config (dict): dictionary of parameters (e.g. noise) used by runner\n basis_gates (list[str])): basis gates for the experiment\n coupling_map (list): coupling map (perhaps custom) to target in mapping\n\n Returns:\n Qobj: Qobj to be run on the backends\n \"\"\"\n json_circuit = DagUnroller(dag, JsonBackend(dag.basis)).execute()\n # Step 3a: create the Experiment based on json_circuit\n experiment = QobjExperiment.from_dict(json_circuit)\n # Step 3b: populate the Experiment configuration and header\n experiment.header.name = dag.name\n # TODO: place in header or config?\n experiment_config = deepcopy(config or {})\n experiment_config.update({\n 'coupling_map': coupling_map,\n 'basis_gates': basis_gates,\n 'layout': dag.layout,\n 'memory_slots': sum(dag.cregs.values()),\n # TODO: `n_qubits` is not part of the qobj spec, but needed for the simulator.\n 'n_qubits': sum(dag.qregs.values())})\n experiment.config = QobjItem(**experiment_config)\n\n # set eval_symbols=True to evaluate each symbolic expression\n # TODO: after transition to qobj, we can drop this\n experiment.header.compiled_circuit_qasm = dag.qasm(\n qeflag=True, eval_symbols=True)\n # Step 3c: add the Experiment to the Qobj\n return experiment\n\n\ndef transpile(dag, basis_gates='u1,u2,u3,cx,id', coupling_map=None,\n initial_layout=None, get_layout=False,\n format='dag', seed=None, pass_manager=None):\n \"\"\"Transform a dag circuit into another dag circuit (transpile), through\n consecutive passes on the dag.\n\n Args:\n dag (DAGCircuit): dag circuit to transform via transpilation\n basis_gates (str): a comma seperated string for the target basis gates\n coupling_map (list): A graph of coupling::\n\n [\n [control0(int), target0(int)],\n [control1(int), target1(int)],\n ]\n\n eg. [[0, 2], [1, 2], [1, 3], [3, 4]}\n\n initial_layout (dict): A mapping of qubit to qubit::\n\n {\n (\"q\", start(int)): (\"q\", final(int)),\n ...\n }\n eg.\n {\n (\"q\", 0): (\"q\", 0),\n (\"q\", 1): (\"q\", 1),\n (\"q\", 2): (\"q\", 2),\n (\"q\", 3): (\"q\", 3)\n }\n get_layout (bool): flag for returning the final layout after mapping\n format (str): The target format of the compilation:\n {'dag', 'json', 'qasm'}\n seed (int): random seed for the swap mapper\n pass_manager (PassManager): pass manager instance for the tranpilation process\n If None, a default set of passes are run.\n Otherwise, the passes defined in it will run.\n If contains no passes in it, no dag transformations occur.\n\n Returns:\n DAGCircuit: transformed dag\n DAGCircuit, dict: transformed dag along with the final layout on backend qubits\n\n Raises:\n TranspilerError: if the format is not valid.\n \"\"\"\n # TODO: `basis_gates` will be removed after we have the unroller pass.\n # TODO: `coupling_map`, `initial_layout`, `get_layout`, `seed` removed after mapper pass.\n\n # TODO: move this to the mapper pass\n num_qubits = sum(dag.qregs.values())\n if num_qubits == 1 or coupling_map == \"all-to-all\":\n coupling_map = None\n\n final_layout = None\n\n if pass_manager:\n # run the passes specified by the pass manager\n for pass_ in pass_manager.passes():\n pass_.run(dag)\n else:\n # default set of passes\n # TODO: move each step here to a pass, and use a default passmanager below\n basis = basis_gates.split(',') if basis_gates else []\n dag_unroller = DagUnroller(dag, DAGBackend(basis))\n dag = dag_unroller.expand_gates()\n # if a coupling map is given compile to the map\n if coupling_map:\n logger.info(\"pre-mapping properties: %s\",\n dag.property_summary())\n # Insert swap gates\n coupling = Coupling(coupling_list2dict(coupling_map))\n removed_meas = remove_last_measurements(dag)\n logger.info(\"measurements moved: %s\", removed_meas)\n logger.info(\"initial layout: %s\", initial_layout)\n dag, final_layout, last_layout = swap_mapper(\n dag, coupling, initial_layout, trials=20, seed=seed)\n logger.info(\"final layout: %s\", final_layout)\n # Expand swaps\n dag_unroller = DagUnroller(dag, DAGBackend(basis))\n dag = dag_unroller.expand_gates()\n # Change cx directions\n dag = direction_mapper(dag, coupling)\n # Simplify cx gates\n cx_cancellation(dag)\n # Simplify single qubit gates\n dag = optimize_1q_gates(dag)\n return_last_measurements(dag, removed_meas,\n last_layout)\n logger.info(\"post-mapping properties: %s\",\n dag.property_summary())\n\n # choose output format\n # TODO: do we need all of these formats, or just the dag?\n if format == 'dag':\n compiled_circuit = dag\n elif format == 'json':\n # FIXME: JsonBackend is wrongly taking an ordered dict as basis, not list\n dag_unroller = DagUnroller(dag, JsonBackend(dag.basis))\n compiled_circuit = dag_unroller.execute()\n elif format == 'qasm':\n compiled_circuit = dag.qasm()\n else:\n raise TranspilerError('unrecognized circuit format')\n\n if get_layout:\n return compiled_circuit, final_layout\n return compiled_circuit\n\n\ndef _best_subset(backend, n_qubits):\n \"\"\"Computes the qubit mapping with the best\n connectivity.\n\n Parameters:\n backend (Qiskit.BaseBackend): A QISKit backend instance.\n n_qubits (int): Number of subset qubits to consider.\n\n Returns:\n ndarray: Array of qubits to use for best\n connectivity mapping.\n\n Raises:\n QISKitError: Wrong number of qubits given.\n \"\"\"\n if n_qubits == 1:\n return np.array([0])\n elif n_qubits <= 0:\n raise QISKitError('Number of qubits <= 0.')\n\n device_qubits = backend.configuration()['n_qubits']\n if n_qubits > device_qubits:\n raise QISKitError('Number of qubits greater than device.')\n\n cmap = np.asarray(backend.configuration()['coupling_map'])\n data = np.ones_like(cmap[:, 0])\n sp_cmap = sp.coo_matrix((data, (cmap[:, 0], cmap[:, 1])),\n shape=(device_qubits, device_qubits)).tocsr()\n best = 0\n best_map = None\n # do bfs with each node as starting point\n for k in range(sp_cmap.shape[0]):\n bfs = cs.breadth_first_order(sp_cmap, i_start=k, directed=False,\n return_predecessors=False)\n\n connection_count = 0\n for i in range(n_qubits):\n node_idx = bfs[i]\n for j in range(sp_cmap.indptr[node_idx],\n sp_cmap.indptr[node_idx + 1]):\n node = sp_cmap.indices[j]\n for counter in range(n_qubits):\n if node == bfs[counter]:\n connection_count += 1\n break\n\n if connection_count > best:\n best = connection_count\n best_map = bfs[0:n_qubits]\n return best_map\n\n\ndef _matches_coupling_map(dag, coupling_map):\n \"\"\"Iterate over circuit gates to check if all multi-qubit couplings\n match the qubit coupling graph in the backend.\n\n Parameters:\n dag (DAGCircuit): DAG representation of circuit.\n coupling_map (list): Backend coupling map, represented as an adjacency list.\n\n Returns:\n bool: True if all gates readily fit the backend coupling graph.\n False if there's at least one gate that uses multiple qubits\n which does not match the backend couplings.\n \"\"\"\n match = True\n for _, data in dag.multi_graph.nodes(data=True):\n if data['type'] == 'op':\n gate_map = [qr[1] for qr in data['qargs']]\n if len(gate_map) > 1:\n if gate_map not in coupling_map:\n match = False\n break\n return match\n\n\ndef _pick_best_layout(dag, backend):\n \"\"\"Pick a convenient layout depending on the best matching qubit connectivity\n\n Parameters:\n dag (DAGCircuit): DAG representation of circuit.\n backend (BaseBackend) : The backend with the coupling_map for searching\n\n Returns:\n dict: A special ordered initial_layout\n\n \"\"\"\n num_qubits = sum(dag.qregs.values())\n best_sub = _best_subset(backend, num_qubits)\n layout = {}\n map_iter = 0\n for key, value in dag.qregs.items():\n for i in range(value):\n layout[(key, i)] = ('q', best_sub[map_iter])\n map_iter += 1\n return layout\n", "path": "qiskit/transpiler/_transpiler.py"}]} |
gh_patches_debug_1637 | rasdani/github-patches | git_diff | ARM-DOE__ACT-396 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Add Google Analytics ID
Add a Google Analytics ID to the `conf.py` file used by sphinx. For those interested in having access the analytics, you will need to send over your gmail address
Fixes #396
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `docs/source/conf.py`
Content:
```
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*-
3 #
4 # Atmospheric data Community Toolkit documentation build configuration file, created by
5 # sphinx-quickstart on Thu Jun 28 12:35:56 2018.
6 #
7 # This file is execfile()d with the current directory set to its
8 # containing dir.
9 #
10 # Note that not all possible configuration values are present in this
11 # autogenerated file.
12 #
13 # All configuration values have a default; values that are commented out
14 # serve to show the default.
15
16 # If extensions (or modules to document with autodoc) are in another directory,
17 # add these directories to sys.path here. If the directory is relative to the
18 # documentation root, use os.path.abspath to make it absolute, like shown here.
19 #
20 # import os
21 # import sys
22 # sys.path.insert(0, os.path.abspath('.'))
23
24
25 # -- General configuration ------------------------------------------------
26
27 # If your documentation needs a minimal Sphinx version, state it here.
28 #
29 # needs_sphinx = '1.0'
30
31 # Add any Sphinx extension module names here, as strings. They can be
32 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
33 # ones.
34 extensions = [
35 'sphinx.ext.autodoc',
36 'sphinx.ext.autosummary',
37 'sphinx.ext.githubpages',
38 'sphinx.ext.intersphinx',
39 'sphinx.ext.mathjax',
40 'sphinx.ext.viewcode',
41 'IPython.sphinxext.ipython_directive',
42 'IPython.sphinxext.ipython_console_highlighting',
43 'matplotlib.sphinxext.plot_directive',
44 'sphinx_copybutton',
45 'sphinx_gallery.gen_gallery',
46 'sphinx.ext.napoleon',
47 ]
48
49 exclude_patterns = ['_build', '**.ipynb_checkpoints']
50 sphinx_gallery_conf = {
51 'examples_dirs': '../../examples',
52 'gallery_dirs': 'source/auto_examples'
53 }
54
55 # Configuration options for plot_directive. See:
56 # https://github.com/matplotlib/matplotlib/blob/f3ed922d935751e08494e5fb5311d3050a3b637b/lib/matplotlib/sphinxext/plot_directive.py#L81
57 plot_html_show_source_link = False
58 plot_html_show_formats = False
59
60 # Generate the API documentation when building
61 autoclass_content = "both"
62 autosummary_generate = True
63 autosummary_imported_members = True
64
65 # Otherwise, the Return parameter list looks different from the Parameter list
66 napoleon_use_rtype = False
67 napoleon_use_ivar = True
68 napoleon_include_init_with_doc = False
69 napoleon_use_param = False
70
71 # Add any paths that contain templates here, relative to this directory.
72 templates_path = ['_templates']
73
74 # The suffix(es) of source filenames.
75 # You can specify multiple suffix as a list of string:
76 #
77 # source_suffix = ['.rst', '.md']
78 source_suffix = '.rst'
79
80 # The master toctree document.
81 master_doc = 'index'
82
83 # General information about the project.
84 project = 'Atmospheric data Community Toolkit'
85 copyright = '2018, ACT Developers'
86 author = 'ACT Developers'
87
88 # The version info for the project you're documenting, acts as replacement for
89 # |version| and |release|, also used in various other places throughout the
90 # built documents.
91 #
92 import act
93 # The short X.Y version.
94 version = act.__version__
95 # The full version, including alpha/beta/rc tags.
96 release = act.__version__
97
98 # The language for content autogenerated by Sphinx. Refer to documentation
99 # for a list of supported languages.
100 #
101 # This is also used if you do content translation via gettext catalogs.
102 # Usually you set "language" from the command line for these cases.
103 language = None
104
105 # List of patterns, relative to source directory, that match files and
106 # directories to ignore when looking for source files.
107 # This patterns also effect to html_static_path and html_extra_path
108
109 # The name of the Pygments (syntax highlighting) style to use.
110 pygments_style = 'sphinx'
111
112 # If true, `todo` and `todoList` produce output, else they produce nothing.
113 todo_include_todos = False
114
115
116 # -- Options for HTML output ----------------------------------------------
117
118 # The theme to use for HTML and HTML Help pages. See the documentation for
119 # a list of builtin themes.
120 #
121 html_theme = 'sphinx_rtd_theme'
122 import sphinx_rtd_theme
123 html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
124
125 # Theme options are theme-specific and customize the look and feel of a theme
126 # further. For a list of options available for each theme, see the
127 # documentation.
128 #
129 # html_theme_options = {}
130
131 # Add any paths that contain custom static files (such as style sheets) here,
132 # relative to this directory. They are copied after the builtin static files,
133 # so a file named "default.css" will overwrite the builtin "default.css".
134 html_static_path = ['_static']
135
136 # Custom sidebar templates, must be a dictionary that maps document names
137 # to template names.
138 #
139 # This is required for the alabaster theme
140 # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
141 html_sidebars = {
142 '**': [
143 'relations.html', # needs 'show_related': True theme option to display
144 'searchbox.html',
145 ]
146 }
147
148
149 # -- Options for HTMLHelp output ------------------------------------------
150
151 # Output file base name for HTML help builder.
152 htmlhelp_basename = 'act'
153
154
155 # -- Options for LaTeX output ---------------------------------------------
156
157 latex_elements = {
158 # The paper size ('letterpaper' or 'a4paper').
159 #
160 # 'papersize': 'letterpaper',
161
162 # The font size ('10pt', '11pt' or '12pt').
163 #
164 # 'pointsize': '10pt',
165
166 # Additional stuff for the LaTeX preamble.
167 #
168 # 'preamble': '',
169
170 # Latex figure (float) alignment
171 #
172 # 'figure_align': 'htbp',
173 }
174
175 # Grouping the document tree into LaTeX files. List of tuples
176 # (source start file, target name, title,
177 # author, documentclass [howto, manual, or own class]).
178 latex_documents = [
179 (master_doc, 'act.tex', 'Atmospheric data Community Toolkit Documentation',
180 'Contributors', 'manual'),
181 ]
182
183
184 # -- Options for manual page output ---------------------------------------
185
186 # One entry per manual page. List of tuples
187 # (source start file, name, description, authors, manual section).
188 man_pages = [
189 (master_doc, 'act', 'Atmospheric data Community Toolkit Documentation',
190 [author], 1)
191 ]
192
193
194 # -- Options for Texinfo output -------------------------------------------
195
196 # Grouping the document tree into Texinfo files. List of tuples
197 # (source start file, target name, title, author,
198 # dir menu entry, description, category)
199 texinfo_documents = [
200 (master_doc, 'act', 'Atmospheric data Community Toolkit Documentation',
201 author, 'act', 'Package for connecting users to the data',
202 'Miscellaneous'),
203 ]
204
205
206
207
208 # Example configuration for intersphinx: refer to the Python standard library.
209 intersphinx_mapping = {
210 'python': ('https://docs.python.org/3/', None),
211 'numpy': ('https://docs.scipy.org/doc/numpy/', None),
212 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),
213 'pandas': ('https://pandas.pydata.org/pandas-docs/stable', None),
214 'matplotlib': ('https://matplotlib.org', None),
215 }
216
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/docs/source/conf.py b/docs/source/conf.py
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -126,7 +126,9 @@
# further. For a list of options available for each theme, see the
# documentation.
#
-# html_theme_options = {}
+html_theme_options = {
+ 'google_analytics_id': 'UA-179020619-3',
+}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
| {"golden_diff": "diff --git a/docs/source/conf.py b/docs/source/conf.py\n--- a/docs/source/conf.py\n+++ b/docs/source/conf.py\n@@ -126,7 +126,9 @@\n # further. For a list of options available for each theme, see the\n # documentation.\n #\n-# html_theme_options = {}\n+html_theme_options = {\n+ 'google_analytics_id': 'UA-179020619-3',\n+}\n \n # Add any paths that contain custom static files (such as style sheets) here,\n # relative to this directory. They are copied after the builtin static files,\n", "issue": "Add Google Analytics ID\nAdd a Google Analytics ID to the `conf.py` file used by sphinx. For those interested in having access the analytics, you will need to send over your gmail address\r\n\r\nFixes #396 \n", "before_files": [{"content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n#\n# Atmospheric data Community Toolkit documentation build configuration file, created by\n# sphinx-quickstart on Thu Jun 28 12:35:56 2018.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\n# import os\n# import sys\n# sys.path.insert(0, os.path.abspath('.'))\n\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n 'sphinx.ext.autodoc',\n 'sphinx.ext.autosummary',\n 'sphinx.ext.githubpages',\n 'sphinx.ext.intersphinx',\n 'sphinx.ext.mathjax',\n 'sphinx.ext.viewcode',\n 'IPython.sphinxext.ipython_directive',\n 'IPython.sphinxext.ipython_console_highlighting',\n 'matplotlib.sphinxext.plot_directive',\n 'sphinx_copybutton',\n 'sphinx_gallery.gen_gallery',\n 'sphinx.ext.napoleon',\n]\n\nexclude_patterns = ['_build', '**.ipynb_checkpoints']\nsphinx_gallery_conf = {\n 'examples_dirs': '../../examples',\n 'gallery_dirs': 'source/auto_examples'\n}\n\n# Configuration options for plot_directive. See:\n# https://github.com/matplotlib/matplotlib/blob/f3ed922d935751e08494e5fb5311d3050a3b637b/lib/matplotlib/sphinxext/plot_directive.py#L81\nplot_html_show_source_link = False\nplot_html_show_formats = False\n\n# Generate the API documentation when building\nautoclass_content = \"both\"\nautosummary_generate = True\nautosummary_imported_members = True\n\n# Otherwise, the Return parameter list looks different from the Parameter list\nnapoleon_use_rtype = False\nnapoleon_use_ivar = True\nnapoleon_include_init_with_doc = False\nnapoleon_use_param = False\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = 'Atmospheric data Community Toolkit'\ncopyright = '2018, ACT Developers'\nauthor = 'ACT Developers'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\nimport act\n# The short X.Y version.\nversion = act.__version__\n# The full version, including alpha/beta/rc tags.\nrelease = act.__version__\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This patterns also effect to html_static_path and html_extra_path\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'sphinx_rtd_theme'\nimport sphinx_rtd_theme\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#\n# html_theme_options = {}\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Custom sidebar templates, must be a dictionary that maps document names\n# to template names.\n#\n# This is required for the alabaster theme\n# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars\nhtml_sidebars = {\n '**': [\n 'relations.html', # needs 'show_related': True theme option to display\n 'searchbox.html',\n ]\n}\n\n\n# -- Options for HTMLHelp output ------------------------------------------\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'act'\n\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #\n # 'papersize': 'letterpaper',\n\n # The font size ('10pt', '11pt' or '12pt').\n #\n # 'pointsize': '10pt',\n\n # Additional stuff for the LaTeX preamble.\n #\n # 'preamble': '',\n\n # Latex figure (float) alignment\n #\n # 'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (master_doc, 'act.tex', 'Atmospheric data Community Toolkit Documentation',\n 'Contributors', 'manual'),\n]\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n (master_doc, 'act', 'Atmospheric data Community Toolkit Documentation',\n [author], 1)\n]\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (master_doc, 'act', 'Atmospheric data Community Toolkit Documentation',\n author, 'act', 'Package for connecting users to the data',\n 'Miscellaneous'),\n]\n\n\n\n\n# Example configuration for intersphinx: refer to the Python standard library.\nintersphinx_mapping = {\n 'python': ('https://docs.python.org/3/', None),\n 'numpy': ('https://docs.scipy.org/doc/numpy/', None),\n 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),\n 'pandas': ('https://pandas.pydata.org/pandas-docs/stable', None),\n 'matplotlib': ('https://matplotlib.org', None),\n}\n", "path": "docs/source/conf.py"}], "after_files": [{"content": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n#\n# Atmospheric data Community Toolkit documentation build configuration file, created by\n# sphinx-quickstart on Thu Jun 28 12:35:56 2018.\n#\n# This file is execfile()d with the current directory set to its\n# containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\n# import os\n# import sys\n# sys.path.insert(0, os.path.abspath('.'))\n\n\n# -- General configuration ------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\n# needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n 'sphinx.ext.autodoc',\n 'sphinx.ext.autosummary',\n 'sphinx.ext.githubpages',\n 'sphinx.ext.intersphinx',\n 'sphinx.ext.mathjax',\n 'sphinx.ext.viewcode',\n 'IPython.sphinxext.ipython_directive',\n 'IPython.sphinxext.ipython_console_highlighting',\n 'matplotlib.sphinxext.plot_directive',\n 'sphinx_copybutton',\n 'sphinx_gallery.gen_gallery',\n 'sphinx.ext.napoleon',\n]\n\nexclude_patterns = ['_build', '**.ipynb_checkpoints']\nsphinx_gallery_conf = {\n 'examples_dirs': '../../examples',\n 'gallery_dirs': 'source/auto_examples'\n}\n\n# Configuration options for plot_directive. See:\n# https://github.com/matplotlib/matplotlib/blob/f3ed922d935751e08494e5fb5311d3050a3b637b/lib/matplotlib/sphinxext/plot_directive.py#L81\nplot_html_show_source_link = False\nplot_html_show_formats = False\n\n# Generate the API documentation when building\nautoclass_content = \"both\"\nautosummary_generate = True\nautosummary_imported_members = True\n\n# Otherwise, the Return parameter list looks different from the Parameter list\nnapoleon_use_rtype = False\nnapoleon_use_ivar = True\nnapoleon_include_init_with_doc = False\nnapoleon_use_param = False\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\n# source_suffix = ['.rst', '.md']\nsource_suffix = '.rst'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = 'Atmospheric data Community Toolkit'\ncopyright = '2018, ACT Developers'\nauthor = 'ACT Developers'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\nimport act\n# The short X.Y version.\nversion = act.__version__\n# The full version, including alpha/beta/rc tags.\nrelease = act.__version__\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This patterns also effect to html_static_path and html_extra_path\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'sphinx'\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = False\n\n\n# -- Options for HTML output ----------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n#\nhtml_theme = 'sphinx_rtd_theme'\nimport sphinx_rtd_theme\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#\nhtml_theme_options = {\n 'google_analytics_id': 'UA-179020619-3',\n}\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# Custom sidebar templates, must be a dictionary that maps document names\n# to template names.\n#\n# This is required for the alabaster theme\n# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars\nhtml_sidebars = {\n '**': [\n 'relations.html', # needs 'show_related': True theme option to display\n 'searchbox.html',\n ]\n}\n\n\n# -- Options for HTMLHelp output ------------------------------------------\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'act'\n\n\n# -- Options for LaTeX output ---------------------------------------------\n\nlatex_elements = {\n # The paper size ('letterpaper' or 'a4paper').\n #\n # 'papersize': 'letterpaper',\n\n # The font size ('10pt', '11pt' or '12pt').\n #\n # 'pointsize': '10pt',\n\n # Additional stuff for the LaTeX preamble.\n #\n # 'preamble': '',\n\n # Latex figure (float) alignment\n #\n # 'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n# author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n (master_doc, 'act.tex', 'Atmospheric data Community Toolkit Documentation',\n 'Contributors', 'manual'),\n]\n\n\n# -- Options for manual page output ---------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [\n (master_doc, 'act', 'Atmospheric data Community Toolkit Documentation',\n [author], 1)\n]\n\n\n# -- Options for Texinfo output -------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n# dir menu entry, description, category)\ntexinfo_documents = [\n (master_doc, 'act', 'Atmospheric data Community Toolkit Documentation',\n author, 'act', 'Package for connecting users to the data',\n 'Miscellaneous'),\n]\n\n\n\n\n# Example configuration for intersphinx: refer to the Python standard library.\nintersphinx_mapping = {\n 'python': ('https://docs.python.org/3/', None),\n 'numpy': ('https://docs.scipy.org/doc/numpy/', None),\n 'scipy': ('https://docs.scipy.org/doc/scipy/reference/', None),\n 'pandas': ('https://pandas.pydata.org/pandas-docs/stable', None),\n 'matplotlib': ('https://matplotlib.org', None),\n}\n", "path": "docs/source/conf.py"}]} |
gh_patches_debug_1638 | rasdani/github-patches | git_diff | evennia__evennia-2748 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
[BUG - Develop] `get_by_tag` category list fails on None
#### Describe the bug
When using a list for the `category` kwarg that contains an entry of `None` (which is a valid category), it fails with a traceback. Presumably due to the use of `sorted` here <https://github.com/evennia/evennia/blob/develop/evennia/typeclasses/managers.py#L289>
#### To Reproduce
Steps to reproduce the behavior:
1. Add tags to an object (e.g. a room), both with categories and without. Let's say:
```
tag here = test
tag here = tag1:cat1
tag here = tag2:cat2
```
2. `Room.objects.get_by_tag(key=["tag1","tag2"], category=["cat1","cat2"])` returns the current room
3. `Room.objects.get_by_tag(key=["tag1","test"], category=["cat1",None])` _should_ return the current room as well, but instead will traceback.
#### Expected behavior
Since `None` is a valid tag category, the function should be capable of searching for tags with categories of strings _and_ of `None`.
#### Develop-branch commit
7f4769bd9
#### Additional context
Traceback:
```
File "./evennia/evennia/typeclasses/managers.py", line 289, in get_by_tag
unique_categories = sorted(set(categories))
TypeError: '<' not supported between instances of 'NoneType' and 'str'
```
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `evennia/typeclasses/managers.py`
Content:
```
1 """
2 This implements the common managers that are used by the
3 abstract models in dbobjects.py (and which are thus shared by
4 all Attributes and TypedObjects).
5
6 """
7 import shlex
8 from django.db.models import F, Q, Count, ExpressionWrapper, FloatField
9 from django.db.models.functions import Cast
10 from evennia.utils import idmapper
11 from evennia.utils.utils import make_iter, variable_from_module
12 from evennia.typeclasses.attributes import Attribute
13 from evennia.typeclasses.tags import Tag
14
15 __all__ = ("TypedObjectManager",)
16 _GA = object.__getattribute__
17 _Tag = None
18
19
20 # Managers
21
22
23 class TypedObjectManager(idmapper.manager.SharedMemoryManager):
24 """
25 Common ObjectManager for all dbobjects.
26
27 """
28
29 # common methods for all typed managers. These are used
30 # in other methods. Returns querysets.
31
32 # Attribute manager methods
33 def get_attribute(
34 self, key=None, category=None, value=None, strvalue=None, obj=None, attrtype=None, **kwargs
35 ):
36 """
37 Return Attribute objects by key, by category, by value, by strvalue, by
38 object (it is stored on) or with a combination of those criteria.
39
40 Args:
41 key (str, optional): The attribute's key to search for
42 category (str, optional): The category of the attribute(s) to search for.
43 value (str, optional): The attribute value to search for.
44 Note that this is not a very efficient operation since it
45 will query for a pickled entity. Mutually exclusive to
46 `strvalue`.
47 strvalue (str, optional): The str-value to search for.
48 Most Attributes will not have strvalue set. This is
49 mutually exclusive to the `value` keyword and will take
50 precedence if given.
51 obj (Object, optional): On which object the Attribute to
52 search for is.
53 attrype (str, optional): An attribute-type to search for.
54 By default this is either `None` (normal Attributes) or
55 `"nick"`.
56 **kwargs (any): Currently unused. Reserved for future use.
57
58 Returns:
59 list: The matching Attributes.
60
61 """
62 dbmodel = self.model.__dbclass__.__name__.lower()
63 query = [("attribute__db_attrtype", attrtype), ("attribute__db_model", dbmodel)]
64 if obj:
65 query.append(("%s__id" % self.model.__dbclass__.__name__.lower(), obj.id))
66 if key:
67 query.append(("attribute__db_key", key))
68 if category:
69 query.append(("attribute__db_category", category))
70 if strvalue:
71 query.append(("attribute__db_strvalue", strvalue))
72 if value:
73 # no reason to make strvalue/value mutually exclusive at this level
74 query.append(("attribute__db_value", value))
75 return Attribute.objects.filter(
76 pk__in=self.model.db_attributes.through.objects.filter(**dict(query)).values_list(
77 "attribute_id", flat=True
78 )
79 )
80
81 def get_nick(self, key=None, category=None, value=None, strvalue=None, obj=None):
82 """
83 Get a nick, in parallel to `get_attribute`.
84
85 Args:
86 key (str, optional): The nicks's key to search for
87 category (str, optional): The category of the nicks(s) to search for.
88 value (str, optional): The attribute value to search for. Note that this
89 is not a very efficient operation since it will query for a pickled
90 entity. Mutually exclusive to `strvalue`.
91 strvalue (str, optional): The str-value to search for. Most Attributes
92 will not have strvalue set. This is mutually exclusive to the `value`
93 keyword and will take precedence if given.
94 obj (Object, optional): On which object the Attribute to search for is.
95
96 Returns:
97 nicks (list): The matching Nicks.
98
99 """
100 return self.get_attribute(
101 key=key, category=category, value=value, strvalue=strvalue, obj=obj
102 )
103
104 def get_by_attribute(
105 self, key=None, category=None, value=None, strvalue=None, attrtype=None, **kwargs
106 ):
107 """
108 Return objects having attributes with the given key, category,
109 value, strvalue or combination of those criteria.
110
111 Args:
112 key (str, optional): The attribute's key to search for
113 category (str, optional): The category of the attribute
114 to search for.
115 value (str, optional): The attribute value to search for.
116 Note that this is not a very efficient operation since it
117 will query for a pickled entity. Mutually exclusive to
118 `strvalue`.
119 strvalue (str, optional): The str-value to search for.
120 Most Attributes will not have strvalue set. This is
121 mutually exclusive to the `value` keyword and will take
122 precedence if given.
123 attrype (str, optional): An attribute-type to search for.
124 By default this is either `None` (normal Attributes) or
125 `"nick"`.
126 kwargs (any): Currently unused. Reserved for future use.
127
128 Returns:
129 obj (list): Objects having the matching Attributes.
130
131 """
132 dbmodel = self.model.__dbclass__.__name__.lower()
133 query = [
134 ("db_attributes__db_attrtype", attrtype),
135 ("db_attributes__db_model", dbmodel),
136 ]
137 if key:
138 query.append(("db_attributes__db_key", key))
139 if category:
140 query.append(("db_attributes__db_category", category))
141 if strvalue:
142 query.append(("db_attributes__db_strvalue", strvalue))
143 elif value:
144 # strvalue and value are mutually exclusive
145 query.append(("db_attributes__db_value", value))
146 return self.filter(**dict(query))
147
148 def get_by_nick(self, key=None, nick=None, category="inputline"):
149 """
150 Get object based on its key or nick.
151
152 Args:
153 key (str, optional): The attribute's key to search for
154 nick (str, optional): The nickname to search for
155 category (str, optional): The category of the nick
156 to search for.
157
158 Returns:
159 obj (list): Objects having the matching Nicks.
160
161 """
162 return self.get_by_attribute(key=key, category=category, strvalue=nick, attrtype="nick")
163
164 # Tag manager methods
165
166 def get_tag(self, key=None, category=None, obj=None, tagtype=None, global_search=False):
167 """
168 Return Tag objects by key, by category, by object (it is
169 stored on) or with a combination of those criteria.
170
171 Args:
172 key (str, optional): The Tag's key to search for
173 category (str, optional): The Tag of the attribute(s)
174 to search for.
175 obj (Object, optional): On which object the Tag to
176 search for is.
177 tagtype (str, optional): One of `None` (normal tags),
178 "alias" or "permission"
179 global_search (bool, optional): Include all possible tags,
180 not just tags on this object
181
182 Returns:
183 tag (list): The matching Tags.
184
185 """
186 global _Tag
187 if not _Tag:
188 from evennia.typeclasses.models import Tag as _Tag
189 dbmodel = self.model.__dbclass__.__name__.lower()
190 if global_search:
191 # search all tags using the Tag model
192 query = [("db_tagtype", tagtype), ("db_model", dbmodel)]
193 if obj:
194 query.append(("id", obj.id))
195 if key:
196 query.append(("db_key", key))
197 if category:
198 query.append(("db_category", category))
199 return _Tag.objects.filter(**dict(query))
200 else:
201 # search only among tags stored on on this model
202 query = [("tag__db_tagtype", tagtype), ("tag__db_model", dbmodel)]
203 if obj:
204 query.append(("%s__id" % self.model.__name__.lower(), obj.id))
205 if key:
206 query.append(("tag__db_key", key))
207 if category:
208 query.append(("tag__db_category", category))
209 return Tag.objects.filter(
210 pk__in=self.model.db_tags.through.objects.filter(**dict(query)).values_list(
211 "tag_id", flat=True
212 )
213 )
214
215 def get_permission(self, key=None, category=None, obj=None):
216 """
217 Get a permission from the database.
218
219 Args:
220 key (str, optional): The permission's identifier.
221 category (str, optional): The permission's category.
222 obj (object, optional): The object on which this Tag is set.
223
224 Returns:
225 permission (list): Permission objects.
226
227 """
228 return self.get_tag(key=key, category=category, obj=obj, tagtype="permission")
229
230 def get_alias(self, key=None, category=None, obj=None):
231 """
232 Get an alias from the database.
233
234 Args:
235 key (str, optional): The permission's identifier.
236 category (str, optional): The permission's category.
237 obj (object, optional): The object on which this Tag is set.
238
239 Returns:
240 alias (list): Alias objects.
241
242 """
243 return self.get_tag(key=key, category=category, obj=obj, tagtype="alias")
244
245 def get_by_tag(self, key=None, category=None, tagtype=None, **kwargs):
246 """
247 Return objects having tags with a given key or category or combination of the two.
248 Also accepts multiple tags/category/tagtype
249
250 Args:
251 key (str or list, optional): Tag key or list of keys. Not case sensitive.
252 category (str or list, optional): Tag category. Not case sensitive.
253 If `key` is a list, a single category can either apply to all
254 keys in that list or this must be a list matching the `key`
255 list element by element. If no `key` is given, all objects with
256 tags of this category are returned.
257 tagtype (str, optional): 'type' of Tag, by default
258 this is either `None` (a normal Tag), `alias` or
259 `permission`. This always apply to all queried tags.
260
261 Keyword Args:
262 match (str): "all" (default) or "any"; determines whether the
263 target object must be tagged with ALL of the provided
264 tags/categories or ANY single one. ANY will perform a weighted
265 sort, so objects with more tag matches will outrank those with
266 fewer tag matches.
267
268 Returns:
269 objects (list): Objects with matching tag.
270
271 Raises:
272 IndexError: If `key` and `category` are both lists and `category` is shorter
273 than `key`.
274
275 """
276 if not (key or category):
277 return []
278
279 global _Tag
280 if not _Tag:
281 from evennia.typeclasses.models import Tag as _Tag
282
283 anymatch = "any" == kwargs.get("match", "all").lower().strip()
284
285 keys = make_iter(key) if key else []
286 categories = make_iter(category) if category else []
287 n_keys = len(keys)
288 n_categories = len(categories)
289 unique_categories = sorted(set(categories))
290 n_unique_categories = len(unique_categories)
291
292 dbmodel = self.model.__dbclass__.__name__.lower()
293 query = (
294 self.filter(db_tags__db_tagtype__iexact=tagtype, db_tags__db_model__iexact=dbmodel)
295 .distinct()
296 .order_by("id")
297 )
298
299 if n_keys > 0:
300 # keys and/or categories given
301 if n_categories == 0:
302 categories = [None for _ in range(n_keys)]
303 elif n_categories == 1 and n_keys > 1:
304 cat = categories[0]
305 categories = [cat for _ in range(n_keys)]
306 elif 1 < n_categories < n_keys:
307 raise IndexError(
308 "get_by_tag needs a single category or a list of categories "
309 "the same length as the list of tags."
310 )
311 clauses = Q()
312 for ikey, key in enumerate(keys):
313 # ANY mode; must match any one of the given tags/categories
314 clauses |= Q(db_key__iexact=key, db_category__iexact=categories[ikey])
315 else:
316 # only one or more categories given
317 clauses = Q()
318 # ANY mode; must match any one of them
319 for category in unique_categories:
320 clauses |= Q(db_category__iexact=category)
321
322 tags = _Tag.objects.filter(clauses)
323 query = query.filter(db_tags__in=tags).annotate(
324 matches=Count("db_tags__pk", filter=Q(db_tags__in=tags), distinct=True)
325 )
326
327 if anymatch:
328 # ANY: Match any single tag, ordered by weight
329 query = query.order_by("-matches")
330 else:
331 # Default ALL: Match all of the tags and optionally more
332 n_req_tags = n_keys if n_keys > 0 else n_unique_categories
333 query = query.filter(matches__gte=n_req_tags)
334
335 return query
336
337 def get_by_permission(self, key=None, category=None):
338 """
339 Return objects having permissions with a given key or category or
340 combination of the two.
341
342 Args:
343 key (str, optional): Permissions key. Not case sensitive.
344 category (str, optional): Permission category. Not case sensitive.
345 Returns:
346 objects (list): Objects with matching permission.
347 """
348 return self.get_by_tag(key=key, category=category, tagtype="permission")
349
350 def get_by_alias(self, key=None, category=None):
351 """
352 Return objects having aliases with a given key or category or
353 combination of the two.
354
355 Args:
356 key (str, optional): Alias key. Not case sensitive.
357 category (str, optional): Alias category. Not case sensitive.
358 Returns:
359 objects (list): Objects with matching alias.
360 """
361 return self.get_by_tag(key=key, category=category, tagtype="alias")
362
363 def create_tag(self, key=None, category=None, data=None, tagtype=None):
364 """
365 Create a new Tag of the base type associated with this
366 object. This makes sure to create case-insensitive tags.
367 If the exact same tag configuration (key+category+tagtype+dbmodel)
368 exists on the model, a new tag will not be created, but an old
369 one returned.
370
371
372 Args:
373 key (str, optional): Tag key. Not case sensitive.
374 category (str, optional): Tag category. Not case sensitive.
375 data (str, optional): Extra information about the tag.
376 tagtype (str or None, optional): 'type' of Tag, by default
377 this is either `None` (a normal Tag), `alias` or
378 `permission`.
379 Notes:
380 The `data` field is not part of the uniqueness of the tag:
381 Setting `data` on an existing tag will overwrite the old
382 data field. It is intended only as a way to carry
383 information about the tag (like a help text), not to carry
384 any information about the tagged objects themselves.
385
386 """
387 data = str(data) if data is not None else None
388 # try to get old tag
389
390 dbmodel = self.model.__dbclass__.__name__.lower()
391 tag = self.get_tag(key=key, category=category, tagtype=tagtype, global_search=True)
392 if tag and data is not None:
393 # get tag from list returned by get_tag
394 tag = tag[0]
395 # overload data on tag
396 tag.db_data = data
397 tag.save()
398 elif not tag:
399 # create a new tag
400 global _Tag
401 if not _Tag:
402 from evennia.typeclasses.models import Tag as _Tag
403 tag = _Tag.objects.create(
404 db_key=key.strip().lower() if key is not None else None,
405 db_category=category.strip().lower() if category and key is not None else None,
406 db_data=data,
407 db_model=dbmodel,
408 db_tagtype=tagtype.strip().lower() if tagtype is not None else None,
409 )
410 tag.save()
411 return make_iter(tag)[0]
412
413 def dbref(self, dbref, reqhash=True):
414 """
415 Determing if input is a valid dbref.
416
417 Args:
418 dbref (str or int): A possible dbref.
419 reqhash (bool, optional): If the "#" is required for this
420 to be considered a valid hash.
421
422 Returns:
423 dbref (int or None): The integer part of the dbref.
424
425 Notes:
426 Valid forms of dbref (database reference number) are
427 either a string '#N' or an integer N.
428
429 """
430 if reqhash and not (isinstance(dbref, str) and dbref.startswith("#")):
431 return None
432 if isinstance(dbref, str):
433 dbref = dbref.lstrip("#")
434 try:
435 if int(dbref) < 0:
436 return None
437 except Exception:
438 return None
439 return dbref
440
441 def get_id(self, dbref):
442 """
443 Find object with given dbref.
444
445 Args:
446 dbref (str or int): The id to search for.
447
448 Returns:
449 object (TypedObject): The matched object.
450
451 """
452 dbref = self.dbref(dbref, reqhash=False)
453 try:
454 return self.get(id=dbref)
455 except self.model.DoesNotExist:
456 pass
457 return None
458
459 def dbref_search(self, dbref):
460 """
461 Alias to get_id.
462
463 Args:
464 dbref (str or int): The id to search for.
465
466 Returns:
467 Queryset: Queryset with 0 or 1 match.
468
469 """
470 dbref = self.dbref(dbref, reqhash=False)
471 if dbref:
472 return self.filter(id=dbref)
473 return self.none()
474
475 def get_dbref_range(self, min_dbref=None, max_dbref=None):
476 """
477 Get objects within a certain range of dbrefs.
478
479 Args:
480 min_dbref (int): Start of dbref range.
481 max_dbref (int): End of dbref range (inclusive)
482
483 Returns:
484 objects (list): TypedObjects with dbrefs within
485 the given dbref ranges.
486
487 """
488 retval = super().all()
489 if min_dbref is not None:
490 retval = retval.filter(id__gte=self.dbref(min_dbref, reqhash=False))
491 if max_dbref is not None:
492 retval = retval.filter(id__lte=self.dbref(max_dbref, reqhash=False))
493 return retval
494
495 def get_typeclass_totals(self, *args, **kwargs) -> object:
496 """
497 Returns a queryset of typeclass composition statistics.
498
499 Returns:
500 qs (Queryset): A queryset of dicts containing the typeclass (name),
501 the count of objects with that typeclass and a float representing
502 the percentage of objects associated with the typeclass.
503
504 """
505 return (
506 self.values("db_typeclass_path")
507 .distinct()
508 .annotate(
509 # Get count of how many objects for each typeclass exist
510 count=Count("db_typeclass_path")
511 )
512 .annotate(
513 # Rename db_typeclass_path field to something more human
514 typeclass=F("db_typeclass_path"),
515 # Calculate this class' percentage of total composition
516 percent=ExpressionWrapper(
517 ((F("count") / float(self.count())) * 100.0),
518 output_field=FloatField(),
519 ),
520 )
521 .values("typeclass", "count", "percent")
522 )
523
524 def object_totals(self):
525 """
526 Get info about database statistics.
527
528 Returns:
529 census (dict): A dictionary `{typeclass_path: number, ...}` with
530 all the typeclasses active in-game as well as the number
531 of such objects defined (i.e. the number of database
532 object having that typeclass set on themselves).
533
534 """
535 stats = self.get_typeclass_totals().order_by("typeclass")
536 return {x.get("typeclass"): x.get("count") for x in stats}
537
538 def typeclass_search(self, typeclass, include_children=False, include_parents=False):
539 """
540 Searches through all objects returning those which has a
541 certain typeclass. If location is set, limit search to objects
542 in that location.
543
544 Args:
545 typeclass (str or class): A typeclass class or a python path to a typeclass.
546 include_children (bool, optional): Return objects with
547 given typeclass *and* all children inheriting from this
548 typeclass. Mutuall exclusive to `include_parents`.
549 include_parents (bool, optional): Return objects with
550 given typeclass *and* all parents to this typeclass.
551 Mutually exclusive to `include_children`.
552
553 Returns:
554 objects (list): The objects found with the given typeclasses.
555
556 """
557
558 if callable(typeclass):
559 cls = typeclass.__class__
560 typeclass = "%s.%s" % (cls.__module__, cls.__name__)
561 elif not isinstance(typeclass, str) and hasattr(typeclass, "path"):
562 typeclass = typeclass.path
563
564 # query objects of exact typeclass
565 query = Q(db_typeclass_path__exact=typeclass)
566
567 if include_children:
568 # build requests for child typeclass objects
569 clsmodule, clsname = typeclass.rsplit(".", 1)
570 cls = variable_from_module(clsmodule, clsname)
571 subclasses = cls.__subclasses__()
572 if subclasses:
573 for child in (child for child in subclasses if hasattr(child, "path")):
574 query = query | Q(db_typeclass_path__exact=child.path)
575 elif include_parents:
576 # build requests for parent typeclass objects
577 clsmodule, clsname = typeclass.rsplit(".", 1)
578 cls = variable_from_module(clsmodule, clsname)
579 parents = cls.__mro__
580 if parents:
581 for parent in (parent for parent in parents if hasattr(parent, "path")):
582 query = query | Q(db_typeclass_path__exact=parent.path)
583 # actually query the database
584 return super().filter(query)
585
586
587 class TypeclassManager(TypedObjectManager):
588 """
589 Manager for the typeclasses. The main purpose of this manager is
590 to limit database queries to the given typeclass despite all
591 typeclasses technically being defined in the same core database
592 model.
593
594 """
595
596 # object-manager methods
597 def smart_search(self, query):
598 """
599 Search by supplying a string with optional extra search criteria to aid the query.
600
601 Args:
602 query (str): A search criteria that accepts extra search criteria on the following
603 forms:
604
605 [key|alias|#dbref...]
606 [tag==<tagstr>[:category]...]
607 [attr==<key>:<value>:category...]
608
609 All three can be combined in the same query, separated by spaces.
610
611 Returns:
612 matches (queryset): A queryset result matching all queries exactly. If wanting to use
613 spaces or ==, != in tags or attributes, enclose them in quotes.
614
615 Example:
616 house = smart_search("key=foo alias=bar tag=house:building tag=magic attr=color:red")
617
618 Note:
619 The flexibility of this method is limited by the input line format. Tag/attribute
620 matching only works for matching primitives. For even more complex queries, such as
621 'in' operations or object field matching, use the full django query language.
622
623 """
624 # shlex splits by spaces unless escaped by quotes
625 querysplit = shlex.split(query)
626 queries, plustags, plusattrs, negtags, negattrs = [], [], [], [], []
627 for ipart, part in enumerate(querysplit):
628 key, rest = part, ""
629 if ":" in part:
630 key, rest = part.split(":", 1)
631 # tags are on the form tag or tag:category
632 if key.startswith("tag=="):
633 plustags.append((key[5:], rest))
634 continue
635 elif key.startswith("tag!="):
636 negtags.append((key[5:], rest))
637 continue
638 # attrs are on the form attr:value or attr:value:category
639 elif rest:
640 value, category = rest, ""
641 if ":" in rest:
642 value, category = rest.split(":", 1)
643 if key.startswith("attr=="):
644 plusattrs.append((key[7:], value, category))
645 continue
646 elif key.startswith("attr!="):
647 negattrs.append((key[7:], value, category))
648 continue
649 # if we get here, we are entering a key search criterion which
650 # we assume is one word.
651 queries.append(part)
652 # build query from components
653 query = " ".join(queries)
654 # TODO
655
656 def get(self, *args, **kwargs):
657 """
658 Overload the standard get. This will limit itself to only
659 return the current typeclass.
660
661 Args:
662 args (any): These are passed on as arguments to the default
663 django get method.
664 Keyword Args:
665 kwargs (any): These are passed on as normal arguments
666 to the default django get method
667 Returns:
668 object (object): The object found.
669
670 Raises:
671 ObjectNotFound: The exact name of this exception depends
672 on the model base used.
673
674 """
675 kwargs.update({"db_typeclass_path": self.model.path})
676 return super().get(**kwargs)
677
678 def filter(self, *args, **kwargs):
679 """
680 Overload of the standard filter function. This filter will
681 limit itself to only the current typeclass.
682
683 Args:
684 args (any): These are passed on as arguments to the default
685 django filter method.
686 Keyword Args:
687 kwargs (any): These are passed on as normal arguments
688 to the default django filter method.
689 Returns:
690 objects (queryset): The objects found.
691
692 """
693 kwargs.update({"db_typeclass_path": self.model.path})
694 return super().filter(*args, **kwargs)
695
696 def all(self):
697 """
698 Overload method to return all matches, filtering for typeclass.
699
700 Returns:
701 objects (queryset): The objects found.
702
703 """
704 return super().all().filter(db_typeclass_path=self.model.path)
705
706 def first(self):
707 """
708 Overload method to return first match, filtering for typeclass.
709
710 Returns:
711 object (object): The object found.
712
713 Raises:
714 ObjectNotFound: The exact name of this exception depends
715 on the model base used.
716
717 """
718 return super().filter(db_typeclass_path=self.model.path).first()
719
720 def last(self):
721 """
722 Overload method to return last match, filtering for typeclass.
723
724 Returns:
725 object (object): The object found.
726
727 Raises:
728 ObjectNotFound: The exact name of this exception depends
729 on the model base used.
730
731 """
732 return super().filter(db_typeclass_path=self.model.path).last()
733
734 def count(self):
735 """
736 Overload method to return number of matches, filtering for typeclass.
737
738 Returns:
739 integer : Number of objects found.
740
741 """
742 return super().filter(db_typeclass_path=self.model.path).count()
743
744 def annotate(self, *args, **kwargs):
745 """
746 Overload annotate method to filter on typeclass before annotating.
747 Args:
748 *args (any): Positional arguments passed along to queryset annotate method.
749 **kwargs (any): Keyword arguments passed along to queryset annotate method.
750
751 Returns:
752 Annotated queryset.
753 """
754 return super().filter(db_typeclass_path=self.model.path).annotate(*args, **kwargs)
755
756 def values(self, *args, **kwargs):
757 """
758 Overload values method to filter on typeclass first.
759 Args:
760 *args (any): Positional arguments passed along to values method.
761 **kwargs (any): Keyword arguments passed along to values method.
762
763 Returns:
764 Queryset of values dictionaries, just filtered by typeclass first.
765 """
766 return super().filter(db_typeclass_path=self.model.path).values(*args, **kwargs)
767
768 def values_list(self, *args, **kwargs):
769 """
770 Overload values method to filter on typeclass first.
771 Args:
772 *args (any): Positional arguments passed along to values_list method.
773 **kwargs (any): Keyword arguments passed along to values_list method.
774
775 Returns:
776 Queryset of value_list tuples, just filtered by typeclass first.
777 """
778 return super().filter(db_typeclass_path=self.model.path).values_list(*args, **kwargs)
779
780 def _get_subclasses(self, cls):
781 """
782 Recursively get all subclasses to a class.
783
784 Args:
785 cls (classoject): A class to get subclasses from.
786 """
787 all_subclasses = cls.__subclasses__()
788 for subclass in all_subclasses:
789 all_subclasses.extend(self._get_subclasses(subclass))
790 return all_subclasses
791
792 def get_family(self, *args, **kwargs):
793 """
794 Variation of get that not only returns the current typeclass
795 but also all subclasses of that typeclass.
796
797 Keyword Args:
798 kwargs (any): These are passed on as normal arguments
799 to the default django get method.
800 Returns:
801 objects (list): The objects found.
802
803 Raises:
804 ObjectNotFound: The exact name of this exception depends
805 on the model base used.
806
807 """
808 paths = [self.model.path] + [
809 "%s.%s" % (cls.__module__, cls.__name__) for cls in self._get_subclasses(self.model)
810 ]
811 kwargs.update({"db_typeclass_path__in": paths})
812 return super().get(*args, **kwargs)
813
814 def filter_family(self, *args, **kwargs):
815 """
816 Variation of filter that allows results both from typeclass
817 and from subclasses of typeclass
818
819 Args:
820 args (any): These are passed on as arguments to the default
821 django filter method.
822 Keyword Args:
823 kwargs (any): These are passed on as normal arguments
824 to the default django filter method.
825 Returns:
826 objects (list): The objects found.
827
828 """
829 # query, including all subclasses
830 paths = [self.model.path] + [
831 "%s.%s" % (cls.__module__, cls.__name__) for cls in self._get_subclasses(self.model)
832 ]
833 kwargs.update({"db_typeclass_path__in": paths})
834 return super().filter(*args, **kwargs)
835
836 def all_family(self):
837 """
838 Return all matches, allowing matches from all subclasses of
839 the typeclass.
840
841 Returns:
842 objects (list): The objects found.
843
844 """
845 paths = [self.model.path] + [
846 "%s.%s" % (cls.__module__, cls.__name__) for cls in self._get_subclasses(self.model)
847 ]
848 return super().all().filter(db_typeclass_path__in=paths)
849
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/evennia/typeclasses/managers.py b/evennia/typeclasses/managers.py
--- a/evennia/typeclasses/managers.py
+++ b/evennia/typeclasses/managers.py
@@ -286,7 +286,7 @@
categories = make_iter(category) if category else []
n_keys = len(keys)
n_categories = len(categories)
- unique_categories = sorted(set(categories))
+ unique_categories = set(categories)
n_unique_categories = len(unique_categories)
dbmodel = self.model.__dbclass__.__name__.lower()
| {"golden_diff": "diff --git a/evennia/typeclasses/managers.py b/evennia/typeclasses/managers.py\n--- a/evennia/typeclasses/managers.py\n+++ b/evennia/typeclasses/managers.py\n@@ -286,7 +286,7 @@\n categories = make_iter(category) if category else []\n n_keys = len(keys)\n n_categories = len(categories)\n- unique_categories = sorted(set(categories))\n+ unique_categories = set(categories)\n n_unique_categories = len(unique_categories)\n \n dbmodel = self.model.__dbclass__.__name__.lower()\n", "issue": "[BUG - Develop] `get_by_tag` category list fails on None\n#### Describe the bug\r\nWhen using a list for the `category` kwarg that contains an entry of `None` (which is a valid category), it fails with a traceback. Presumably due to the use of `sorted` here <https://github.com/evennia/evennia/blob/develop/evennia/typeclasses/managers.py#L289>\r\n\r\n#### To Reproduce\r\nSteps to reproduce the behavior:\r\n1. Add tags to an object (e.g. a room), both with categories and without. Let's say:\r\n```\r\ntag here = test\r\ntag here = tag1:cat1\r\ntag here = tag2:cat2\r\n```\r\n2. `Room.objects.get_by_tag(key=[\"tag1\",\"tag2\"], category=[\"cat1\",\"cat2\"])` returns the current room\r\n3. `Room.objects.get_by_tag(key=[\"tag1\",\"test\"], category=[\"cat1\",None])` _should_ return the current room as well, but instead will traceback.\r\n\r\n#### Expected behavior\r\nSince `None` is a valid tag category, the function should be capable of searching for tags with categories of strings _and_ of `None`.\r\n\r\n#### Develop-branch commit\r\n7f4769bd9\r\n\r\n#### Additional context\r\nTraceback:\r\n```\r\nFile \"./evennia/evennia/typeclasses/managers.py\", line 289, in get_by_tag\r\n unique_categories = sorted(set(categories))\r\nTypeError: '<' not supported between instances of 'NoneType' and 'str'\r\n```\n", "before_files": [{"content": "\"\"\"\nThis implements the common managers that are used by the\nabstract models in dbobjects.py (and which are thus shared by\nall Attributes and TypedObjects).\n\n\"\"\"\nimport shlex\nfrom django.db.models import F, Q, Count, ExpressionWrapper, FloatField\nfrom django.db.models.functions import Cast\nfrom evennia.utils import idmapper\nfrom evennia.utils.utils import make_iter, variable_from_module\nfrom evennia.typeclasses.attributes import Attribute\nfrom evennia.typeclasses.tags import Tag\n\n__all__ = (\"TypedObjectManager\",)\n_GA = object.__getattribute__\n_Tag = None\n\n\n# Managers\n\n\nclass TypedObjectManager(idmapper.manager.SharedMemoryManager):\n \"\"\"\n Common ObjectManager for all dbobjects.\n\n \"\"\"\n\n # common methods for all typed managers. These are used\n # in other methods. Returns querysets.\n\n # Attribute manager methods\n def get_attribute(\n self, key=None, category=None, value=None, strvalue=None, obj=None, attrtype=None, **kwargs\n ):\n \"\"\"\n Return Attribute objects by key, by category, by value, by strvalue, by\n object (it is stored on) or with a combination of those criteria.\n\n Args:\n key (str, optional): The attribute's key to search for\n category (str, optional): The category of the attribute(s) to search for.\n value (str, optional): The attribute value to search for.\n Note that this is not a very efficient operation since it\n will query for a pickled entity. Mutually exclusive to\n `strvalue`.\n strvalue (str, optional): The str-value to search for.\n Most Attributes will not have strvalue set. This is\n mutually exclusive to the `value` keyword and will take\n precedence if given.\n obj (Object, optional): On which object the Attribute to\n search for is.\n attrype (str, optional): An attribute-type to search for.\n By default this is either `None` (normal Attributes) or\n `\"nick\"`.\n **kwargs (any): Currently unused. Reserved for future use.\n\n Returns:\n list: The matching Attributes.\n\n \"\"\"\n dbmodel = self.model.__dbclass__.__name__.lower()\n query = [(\"attribute__db_attrtype\", attrtype), (\"attribute__db_model\", dbmodel)]\n if obj:\n query.append((\"%s__id\" % self.model.__dbclass__.__name__.lower(), obj.id))\n if key:\n query.append((\"attribute__db_key\", key))\n if category:\n query.append((\"attribute__db_category\", category))\n if strvalue:\n query.append((\"attribute__db_strvalue\", strvalue))\n if value:\n # no reason to make strvalue/value mutually exclusive at this level\n query.append((\"attribute__db_value\", value))\n return Attribute.objects.filter(\n pk__in=self.model.db_attributes.through.objects.filter(**dict(query)).values_list(\n \"attribute_id\", flat=True\n )\n )\n\n def get_nick(self, key=None, category=None, value=None, strvalue=None, obj=None):\n \"\"\"\n Get a nick, in parallel to `get_attribute`.\n\n Args:\n key (str, optional): The nicks's key to search for\n category (str, optional): The category of the nicks(s) to search for.\n value (str, optional): The attribute value to search for. Note that this\n is not a very efficient operation since it will query for a pickled\n entity. Mutually exclusive to `strvalue`.\n strvalue (str, optional): The str-value to search for. Most Attributes\n will not have strvalue set. This is mutually exclusive to the `value`\n keyword and will take precedence if given.\n obj (Object, optional): On which object the Attribute to search for is.\n\n Returns:\n nicks (list): The matching Nicks.\n\n \"\"\"\n return self.get_attribute(\n key=key, category=category, value=value, strvalue=strvalue, obj=obj\n )\n\n def get_by_attribute(\n self, key=None, category=None, value=None, strvalue=None, attrtype=None, **kwargs\n ):\n \"\"\"\n Return objects having attributes with the given key, category,\n value, strvalue or combination of those criteria.\n\n Args:\n key (str, optional): The attribute's key to search for\n category (str, optional): The category of the attribute\n to search for.\n value (str, optional): The attribute value to search for.\n Note that this is not a very efficient operation since it\n will query for a pickled entity. Mutually exclusive to\n `strvalue`.\n strvalue (str, optional): The str-value to search for.\n Most Attributes will not have strvalue set. This is\n mutually exclusive to the `value` keyword and will take\n precedence if given.\n attrype (str, optional): An attribute-type to search for.\n By default this is either `None` (normal Attributes) or\n `\"nick\"`.\n kwargs (any): Currently unused. Reserved for future use.\n\n Returns:\n obj (list): Objects having the matching Attributes.\n\n \"\"\"\n dbmodel = self.model.__dbclass__.__name__.lower()\n query = [\n (\"db_attributes__db_attrtype\", attrtype),\n (\"db_attributes__db_model\", dbmodel),\n ]\n if key:\n query.append((\"db_attributes__db_key\", key))\n if category:\n query.append((\"db_attributes__db_category\", category))\n if strvalue:\n query.append((\"db_attributes__db_strvalue\", strvalue))\n elif value:\n # strvalue and value are mutually exclusive\n query.append((\"db_attributes__db_value\", value))\n return self.filter(**dict(query))\n\n def get_by_nick(self, key=None, nick=None, category=\"inputline\"):\n \"\"\"\n Get object based on its key or nick.\n\n Args:\n key (str, optional): The attribute's key to search for\n nick (str, optional): The nickname to search for\n category (str, optional): The category of the nick\n to search for.\n\n Returns:\n obj (list): Objects having the matching Nicks.\n\n \"\"\"\n return self.get_by_attribute(key=key, category=category, strvalue=nick, attrtype=\"nick\")\n\n # Tag manager methods\n\n def get_tag(self, key=None, category=None, obj=None, tagtype=None, global_search=False):\n \"\"\"\n Return Tag objects by key, by category, by object (it is\n stored on) or with a combination of those criteria.\n\n Args:\n key (str, optional): The Tag's key to search for\n category (str, optional): The Tag of the attribute(s)\n to search for.\n obj (Object, optional): On which object the Tag to\n search for is.\n tagtype (str, optional): One of `None` (normal tags),\n \"alias\" or \"permission\"\n global_search (bool, optional): Include all possible tags,\n not just tags on this object\n\n Returns:\n tag (list): The matching Tags.\n\n \"\"\"\n global _Tag\n if not _Tag:\n from evennia.typeclasses.models import Tag as _Tag\n dbmodel = self.model.__dbclass__.__name__.lower()\n if global_search:\n # search all tags using the Tag model\n query = [(\"db_tagtype\", tagtype), (\"db_model\", dbmodel)]\n if obj:\n query.append((\"id\", obj.id))\n if key:\n query.append((\"db_key\", key))\n if category:\n query.append((\"db_category\", category))\n return _Tag.objects.filter(**dict(query))\n else:\n # search only among tags stored on on this model\n query = [(\"tag__db_tagtype\", tagtype), (\"tag__db_model\", dbmodel)]\n if obj:\n query.append((\"%s__id\" % self.model.__name__.lower(), obj.id))\n if key:\n query.append((\"tag__db_key\", key))\n if category:\n query.append((\"tag__db_category\", category))\n return Tag.objects.filter(\n pk__in=self.model.db_tags.through.objects.filter(**dict(query)).values_list(\n \"tag_id\", flat=True\n )\n )\n\n def get_permission(self, key=None, category=None, obj=None):\n \"\"\"\n Get a permission from the database.\n\n Args:\n key (str, optional): The permission's identifier.\n category (str, optional): The permission's category.\n obj (object, optional): The object on which this Tag is set.\n\n Returns:\n permission (list): Permission objects.\n\n \"\"\"\n return self.get_tag(key=key, category=category, obj=obj, tagtype=\"permission\")\n\n def get_alias(self, key=None, category=None, obj=None):\n \"\"\"\n Get an alias from the database.\n\n Args:\n key (str, optional): The permission's identifier.\n category (str, optional): The permission's category.\n obj (object, optional): The object on which this Tag is set.\n\n Returns:\n alias (list): Alias objects.\n\n \"\"\"\n return self.get_tag(key=key, category=category, obj=obj, tagtype=\"alias\")\n\n def get_by_tag(self, key=None, category=None, tagtype=None, **kwargs):\n \"\"\"\n Return objects having tags with a given key or category or combination of the two.\n Also accepts multiple tags/category/tagtype\n\n Args:\n key (str or list, optional): Tag key or list of keys. Not case sensitive.\n category (str or list, optional): Tag category. Not case sensitive.\n If `key` is a list, a single category can either apply to all\n keys in that list or this must be a list matching the `key`\n list element by element. If no `key` is given, all objects with\n tags of this category are returned.\n tagtype (str, optional): 'type' of Tag, by default\n this is either `None` (a normal Tag), `alias` or\n `permission`. This always apply to all queried tags.\n\n Keyword Args:\n match (str): \"all\" (default) or \"any\"; determines whether the\n target object must be tagged with ALL of the provided\n tags/categories or ANY single one. ANY will perform a weighted\n sort, so objects with more tag matches will outrank those with\n fewer tag matches.\n\n Returns:\n objects (list): Objects with matching tag.\n\n Raises:\n IndexError: If `key` and `category` are both lists and `category` is shorter\n than `key`.\n\n \"\"\"\n if not (key or category):\n return []\n\n global _Tag\n if not _Tag:\n from evennia.typeclasses.models import Tag as _Tag\n\n anymatch = \"any\" == kwargs.get(\"match\", \"all\").lower().strip()\n\n keys = make_iter(key) if key else []\n categories = make_iter(category) if category else []\n n_keys = len(keys)\n n_categories = len(categories)\n unique_categories = sorted(set(categories))\n n_unique_categories = len(unique_categories)\n\n dbmodel = self.model.__dbclass__.__name__.lower()\n query = (\n self.filter(db_tags__db_tagtype__iexact=tagtype, db_tags__db_model__iexact=dbmodel)\n .distinct()\n .order_by(\"id\")\n )\n\n if n_keys > 0:\n # keys and/or categories given\n if n_categories == 0:\n categories = [None for _ in range(n_keys)]\n elif n_categories == 1 and n_keys > 1:\n cat = categories[0]\n categories = [cat for _ in range(n_keys)]\n elif 1 < n_categories < n_keys:\n raise IndexError(\n \"get_by_tag needs a single category or a list of categories \"\n \"the same length as the list of tags.\"\n )\n clauses = Q()\n for ikey, key in enumerate(keys):\n # ANY mode; must match any one of the given tags/categories\n clauses |= Q(db_key__iexact=key, db_category__iexact=categories[ikey])\n else:\n # only one or more categories given\n clauses = Q()\n # ANY mode; must match any one of them\n for category in unique_categories:\n clauses |= Q(db_category__iexact=category)\n\n tags = _Tag.objects.filter(clauses)\n query = query.filter(db_tags__in=tags).annotate(\n matches=Count(\"db_tags__pk\", filter=Q(db_tags__in=tags), distinct=True)\n )\n\n if anymatch:\n # ANY: Match any single tag, ordered by weight\n query = query.order_by(\"-matches\")\n else:\n # Default ALL: Match all of the tags and optionally more\n n_req_tags = n_keys if n_keys > 0 else n_unique_categories\n query = query.filter(matches__gte=n_req_tags)\n\n return query\n\n def get_by_permission(self, key=None, category=None):\n \"\"\"\n Return objects having permissions with a given key or category or\n combination of the two.\n\n Args:\n key (str, optional): Permissions key. Not case sensitive.\n category (str, optional): Permission category. Not case sensitive.\n Returns:\n objects (list): Objects with matching permission.\n \"\"\"\n return self.get_by_tag(key=key, category=category, tagtype=\"permission\")\n\n def get_by_alias(self, key=None, category=None):\n \"\"\"\n Return objects having aliases with a given key or category or\n combination of the two.\n\n Args:\n key (str, optional): Alias key. Not case sensitive.\n category (str, optional): Alias category. Not case sensitive.\n Returns:\n objects (list): Objects with matching alias.\n \"\"\"\n return self.get_by_tag(key=key, category=category, tagtype=\"alias\")\n\n def create_tag(self, key=None, category=None, data=None, tagtype=None):\n \"\"\"\n Create a new Tag of the base type associated with this\n object. This makes sure to create case-insensitive tags.\n If the exact same tag configuration (key+category+tagtype+dbmodel)\n exists on the model, a new tag will not be created, but an old\n one returned.\n\n\n Args:\n key (str, optional): Tag key. Not case sensitive.\n category (str, optional): Tag category. Not case sensitive.\n data (str, optional): Extra information about the tag.\n tagtype (str or None, optional): 'type' of Tag, by default\n this is either `None` (a normal Tag), `alias` or\n `permission`.\n Notes:\n The `data` field is not part of the uniqueness of the tag:\n Setting `data` on an existing tag will overwrite the old\n data field. It is intended only as a way to carry\n information about the tag (like a help text), not to carry\n any information about the tagged objects themselves.\n\n \"\"\"\n data = str(data) if data is not None else None\n # try to get old tag\n\n dbmodel = self.model.__dbclass__.__name__.lower()\n tag = self.get_tag(key=key, category=category, tagtype=tagtype, global_search=True)\n if tag and data is not None:\n # get tag from list returned by get_tag\n tag = tag[0]\n # overload data on tag\n tag.db_data = data\n tag.save()\n elif not tag:\n # create a new tag\n global _Tag\n if not _Tag:\n from evennia.typeclasses.models import Tag as _Tag\n tag = _Tag.objects.create(\n db_key=key.strip().lower() if key is not None else None,\n db_category=category.strip().lower() if category and key is not None else None,\n db_data=data,\n db_model=dbmodel,\n db_tagtype=tagtype.strip().lower() if tagtype is not None else None,\n )\n tag.save()\n return make_iter(tag)[0]\n\n def dbref(self, dbref, reqhash=True):\n \"\"\"\n Determing if input is a valid dbref.\n\n Args:\n dbref (str or int): A possible dbref.\n reqhash (bool, optional): If the \"#\" is required for this\n to be considered a valid hash.\n\n Returns:\n dbref (int or None): The integer part of the dbref.\n\n Notes:\n Valid forms of dbref (database reference number) are\n either a string '#N' or an integer N.\n\n \"\"\"\n if reqhash and not (isinstance(dbref, str) and dbref.startswith(\"#\")):\n return None\n if isinstance(dbref, str):\n dbref = dbref.lstrip(\"#\")\n try:\n if int(dbref) < 0:\n return None\n except Exception:\n return None\n return dbref\n\n def get_id(self, dbref):\n \"\"\"\n Find object with given dbref.\n\n Args:\n dbref (str or int): The id to search for.\n\n Returns:\n object (TypedObject): The matched object.\n\n \"\"\"\n dbref = self.dbref(dbref, reqhash=False)\n try:\n return self.get(id=dbref)\n except self.model.DoesNotExist:\n pass\n return None\n\n def dbref_search(self, dbref):\n \"\"\"\n Alias to get_id.\n\n Args:\n dbref (str or int): The id to search for.\n\n Returns:\n Queryset: Queryset with 0 or 1 match.\n\n \"\"\"\n dbref = self.dbref(dbref, reqhash=False)\n if dbref:\n return self.filter(id=dbref)\n return self.none()\n\n def get_dbref_range(self, min_dbref=None, max_dbref=None):\n \"\"\"\n Get objects within a certain range of dbrefs.\n\n Args:\n min_dbref (int): Start of dbref range.\n max_dbref (int): End of dbref range (inclusive)\n\n Returns:\n objects (list): TypedObjects with dbrefs within\n the given dbref ranges.\n\n \"\"\"\n retval = super().all()\n if min_dbref is not None:\n retval = retval.filter(id__gte=self.dbref(min_dbref, reqhash=False))\n if max_dbref is not None:\n retval = retval.filter(id__lte=self.dbref(max_dbref, reqhash=False))\n return retval\n\n def get_typeclass_totals(self, *args, **kwargs) -> object:\n \"\"\"\n Returns a queryset of typeclass composition statistics.\n\n Returns:\n qs (Queryset): A queryset of dicts containing the typeclass (name),\n the count of objects with that typeclass and a float representing\n the percentage of objects associated with the typeclass.\n\n \"\"\"\n return (\n self.values(\"db_typeclass_path\")\n .distinct()\n .annotate(\n # Get count of how many objects for each typeclass exist\n count=Count(\"db_typeclass_path\")\n )\n .annotate(\n # Rename db_typeclass_path field to something more human\n typeclass=F(\"db_typeclass_path\"),\n # Calculate this class' percentage of total composition\n percent=ExpressionWrapper(\n ((F(\"count\") / float(self.count())) * 100.0),\n output_field=FloatField(),\n ),\n )\n .values(\"typeclass\", \"count\", \"percent\")\n )\n\n def object_totals(self):\n \"\"\"\n Get info about database statistics.\n\n Returns:\n census (dict): A dictionary `{typeclass_path: number, ...}` with\n all the typeclasses active in-game as well as the number\n of such objects defined (i.e. the number of database\n object having that typeclass set on themselves).\n\n \"\"\"\n stats = self.get_typeclass_totals().order_by(\"typeclass\")\n return {x.get(\"typeclass\"): x.get(\"count\") for x in stats}\n\n def typeclass_search(self, typeclass, include_children=False, include_parents=False):\n \"\"\"\n Searches through all objects returning those which has a\n certain typeclass. If location is set, limit search to objects\n in that location.\n\n Args:\n typeclass (str or class): A typeclass class or a python path to a typeclass.\n include_children (bool, optional): Return objects with\n given typeclass *and* all children inheriting from this\n typeclass. Mutuall exclusive to `include_parents`.\n include_parents (bool, optional): Return objects with\n given typeclass *and* all parents to this typeclass.\n Mutually exclusive to `include_children`.\n\n Returns:\n objects (list): The objects found with the given typeclasses.\n\n \"\"\"\n\n if callable(typeclass):\n cls = typeclass.__class__\n typeclass = \"%s.%s\" % (cls.__module__, cls.__name__)\n elif not isinstance(typeclass, str) and hasattr(typeclass, \"path\"):\n typeclass = typeclass.path\n\n # query objects of exact typeclass\n query = Q(db_typeclass_path__exact=typeclass)\n\n if include_children:\n # build requests for child typeclass objects\n clsmodule, clsname = typeclass.rsplit(\".\", 1)\n cls = variable_from_module(clsmodule, clsname)\n subclasses = cls.__subclasses__()\n if subclasses:\n for child in (child for child in subclasses if hasattr(child, \"path\")):\n query = query | Q(db_typeclass_path__exact=child.path)\n elif include_parents:\n # build requests for parent typeclass objects\n clsmodule, clsname = typeclass.rsplit(\".\", 1)\n cls = variable_from_module(clsmodule, clsname)\n parents = cls.__mro__\n if parents:\n for parent in (parent for parent in parents if hasattr(parent, \"path\")):\n query = query | Q(db_typeclass_path__exact=parent.path)\n # actually query the database\n return super().filter(query)\n\n\nclass TypeclassManager(TypedObjectManager):\n \"\"\"\n Manager for the typeclasses. The main purpose of this manager is\n to limit database queries to the given typeclass despite all\n typeclasses technically being defined in the same core database\n model.\n\n \"\"\"\n\n # object-manager methods\n def smart_search(self, query):\n \"\"\"\n Search by supplying a string with optional extra search criteria to aid the query.\n\n Args:\n query (str): A search criteria that accepts extra search criteria on the following\n forms:\n\n [key|alias|#dbref...]\n [tag==<tagstr>[:category]...]\n [attr==<key>:<value>:category...]\n\n All three can be combined in the same query, separated by spaces.\n\n Returns:\n matches (queryset): A queryset result matching all queries exactly. If wanting to use\n spaces or ==, != in tags or attributes, enclose them in quotes.\n\n Example:\n house = smart_search(\"key=foo alias=bar tag=house:building tag=magic attr=color:red\")\n\n Note:\n The flexibility of this method is limited by the input line format. Tag/attribute\n matching only works for matching primitives. For even more complex queries, such as\n 'in' operations or object field matching, use the full django query language.\n\n \"\"\"\n # shlex splits by spaces unless escaped by quotes\n querysplit = shlex.split(query)\n queries, plustags, plusattrs, negtags, negattrs = [], [], [], [], []\n for ipart, part in enumerate(querysplit):\n key, rest = part, \"\"\n if \":\" in part:\n key, rest = part.split(\":\", 1)\n # tags are on the form tag or tag:category\n if key.startswith(\"tag==\"):\n plustags.append((key[5:], rest))\n continue\n elif key.startswith(\"tag!=\"):\n negtags.append((key[5:], rest))\n continue\n # attrs are on the form attr:value or attr:value:category\n elif rest:\n value, category = rest, \"\"\n if \":\" in rest:\n value, category = rest.split(\":\", 1)\n if key.startswith(\"attr==\"):\n plusattrs.append((key[7:], value, category))\n continue\n elif key.startswith(\"attr!=\"):\n negattrs.append((key[7:], value, category))\n continue\n # if we get here, we are entering a key search criterion which\n # we assume is one word.\n queries.append(part)\n # build query from components\n query = \" \".join(queries)\n # TODO\n\n def get(self, *args, **kwargs):\n \"\"\"\n Overload the standard get. This will limit itself to only\n return the current typeclass.\n\n Args:\n args (any): These are passed on as arguments to the default\n django get method.\n Keyword Args:\n kwargs (any): These are passed on as normal arguments\n to the default django get method\n Returns:\n object (object): The object found.\n\n Raises:\n ObjectNotFound: The exact name of this exception depends\n on the model base used.\n\n \"\"\"\n kwargs.update({\"db_typeclass_path\": self.model.path})\n return super().get(**kwargs)\n\n def filter(self, *args, **kwargs):\n \"\"\"\n Overload of the standard filter function. This filter will\n limit itself to only the current typeclass.\n\n Args:\n args (any): These are passed on as arguments to the default\n django filter method.\n Keyword Args:\n kwargs (any): These are passed on as normal arguments\n to the default django filter method.\n Returns:\n objects (queryset): The objects found.\n\n \"\"\"\n kwargs.update({\"db_typeclass_path\": self.model.path})\n return super().filter(*args, **kwargs)\n\n def all(self):\n \"\"\"\n Overload method to return all matches, filtering for typeclass.\n\n Returns:\n objects (queryset): The objects found.\n\n \"\"\"\n return super().all().filter(db_typeclass_path=self.model.path)\n\n def first(self):\n \"\"\"\n Overload method to return first match, filtering for typeclass.\n\n Returns:\n object (object): The object found.\n\n Raises:\n ObjectNotFound: The exact name of this exception depends\n on the model base used.\n\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).first()\n\n def last(self):\n \"\"\"\n Overload method to return last match, filtering for typeclass.\n\n Returns:\n object (object): The object found.\n\n Raises:\n ObjectNotFound: The exact name of this exception depends\n on the model base used.\n\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).last()\n\n def count(self):\n \"\"\"\n Overload method to return number of matches, filtering for typeclass.\n\n Returns:\n integer : Number of objects found.\n\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).count()\n\n def annotate(self, *args, **kwargs):\n \"\"\"\n Overload annotate method to filter on typeclass before annotating.\n Args:\n *args (any): Positional arguments passed along to queryset annotate method.\n **kwargs (any): Keyword arguments passed along to queryset annotate method.\n\n Returns:\n Annotated queryset.\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).annotate(*args, **kwargs)\n\n def values(self, *args, **kwargs):\n \"\"\"\n Overload values method to filter on typeclass first.\n Args:\n *args (any): Positional arguments passed along to values method.\n **kwargs (any): Keyword arguments passed along to values method.\n\n Returns:\n Queryset of values dictionaries, just filtered by typeclass first.\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).values(*args, **kwargs)\n\n def values_list(self, *args, **kwargs):\n \"\"\"\n Overload values method to filter on typeclass first.\n Args:\n *args (any): Positional arguments passed along to values_list method.\n **kwargs (any): Keyword arguments passed along to values_list method.\n\n Returns:\n Queryset of value_list tuples, just filtered by typeclass first.\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).values_list(*args, **kwargs)\n\n def _get_subclasses(self, cls):\n \"\"\"\n Recursively get all subclasses to a class.\n\n Args:\n cls (classoject): A class to get subclasses from.\n \"\"\"\n all_subclasses = cls.__subclasses__()\n for subclass in all_subclasses:\n all_subclasses.extend(self._get_subclasses(subclass))\n return all_subclasses\n\n def get_family(self, *args, **kwargs):\n \"\"\"\n Variation of get that not only returns the current typeclass\n but also all subclasses of that typeclass.\n\n Keyword Args:\n kwargs (any): These are passed on as normal arguments\n to the default django get method.\n Returns:\n objects (list): The objects found.\n\n Raises:\n ObjectNotFound: The exact name of this exception depends\n on the model base used.\n\n \"\"\"\n paths = [self.model.path] + [\n \"%s.%s\" % (cls.__module__, cls.__name__) for cls in self._get_subclasses(self.model)\n ]\n kwargs.update({\"db_typeclass_path__in\": paths})\n return super().get(*args, **kwargs)\n\n def filter_family(self, *args, **kwargs):\n \"\"\"\n Variation of filter that allows results both from typeclass\n and from subclasses of typeclass\n\n Args:\n args (any): These are passed on as arguments to the default\n django filter method.\n Keyword Args:\n kwargs (any): These are passed on as normal arguments\n to the default django filter method.\n Returns:\n objects (list): The objects found.\n\n \"\"\"\n # query, including all subclasses\n paths = [self.model.path] + [\n \"%s.%s\" % (cls.__module__, cls.__name__) for cls in self._get_subclasses(self.model)\n ]\n kwargs.update({\"db_typeclass_path__in\": paths})\n return super().filter(*args, **kwargs)\n\n def all_family(self):\n \"\"\"\n Return all matches, allowing matches from all subclasses of\n the typeclass.\n\n Returns:\n objects (list): The objects found.\n\n \"\"\"\n paths = [self.model.path] + [\n \"%s.%s\" % (cls.__module__, cls.__name__) for cls in self._get_subclasses(self.model)\n ]\n return super().all().filter(db_typeclass_path__in=paths)\n", "path": "evennia/typeclasses/managers.py"}], "after_files": [{"content": "\"\"\"\nThis implements the common managers that are used by the\nabstract models in dbobjects.py (and which are thus shared by\nall Attributes and TypedObjects).\n\n\"\"\"\nimport shlex\nfrom django.db.models import F, Q, Count, ExpressionWrapper, FloatField\nfrom django.db.models.functions import Cast\nfrom evennia.utils import idmapper\nfrom evennia.utils.utils import make_iter, variable_from_module\nfrom evennia.typeclasses.attributes import Attribute\nfrom evennia.typeclasses.tags import Tag\n\n__all__ = (\"TypedObjectManager\",)\n_GA = object.__getattribute__\n_Tag = None\n\n\n# Managers\n\n\nclass TypedObjectManager(idmapper.manager.SharedMemoryManager):\n \"\"\"\n Common ObjectManager for all dbobjects.\n\n \"\"\"\n\n # common methods for all typed managers. These are used\n # in other methods. Returns querysets.\n\n # Attribute manager methods\n def get_attribute(\n self, key=None, category=None, value=None, strvalue=None, obj=None, attrtype=None, **kwargs\n ):\n \"\"\"\n Return Attribute objects by key, by category, by value, by strvalue, by\n object (it is stored on) or with a combination of those criteria.\n\n Args:\n key (str, optional): The attribute's key to search for\n category (str, optional): The category of the attribute(s) to search for.\n value (str, optional): The attribute value to search for.\n Note that this is not a very efficient operation since it\n will query for a pickled entity. Mutually exclusive to\n `strvalue`.\n strvalue (str, optional): The str-value to search for.\n Most Attributes will not have strvalue set. This is\n mutually exclusive to the `value` keyword and will take\n precedence if given.\n obj (Object, optional): On which object the Attribute to\n search for is.\n attrype (str, optional): An attribute-type to search for.\n By default this is either `None` (normal Attributes) or\n `\"nick\"`.\n **kwargs (any): Currently unused. Reserved for future use.\n\n Returns:\n list: The matching Attributes.\n\n \"\"\"\n dbmodel = self.model.__dbclass__.__name__.lower()\n query = [(\"attribute__db_attrtype\", attrtype), (\"attribute__db_model\", dbmodel)]\n if obj:\n query.append((\"%s__id\" % self.model.__dbclass__.__name__.lower(), obj.id))\n if key:\n query.append((\"attribute__db_key\", key))\n if category:\n query.append((\"attribute__db_category\", category))\n if strvalue:\n query.append((\"attribute__db_strvalue\", strvalue))\n if value:\n # no reason to make strvalue/value mutually exclusive at this level\n query.append((\"attribute__db_value\", value))\n return Attribute.objects.filter(\n pk__in=self.model.db_attributes.through.objects.filter(**dict(query)).values_list(\n \"attribute_id\", flat=True\n )\n )\n\n def get_nick(self, key=None, category=None, value=None, strvalue=None, obj=None):\n \"\"\"\n Get a nick, in parallel to `get_attribute`.\n\n Args:\n key (str, optional): The nicks's key to search for\n category (str, optional): The category of the nicks(s) to search for.\n value (str, optional): The attribute value to search for. Note that this\n is not a very efficient operation since it will query for a pickled\n entity. Mutually exclusive to `strvalue`.\n strvalue (str, optional): The str-value to search for. Most Attributes\n will not have strvalue set. This is mutually exclusive to the `value`\n keyword and will take precedence if given.\n obj (Object, optional): On which object the Attribute to search for is.\n\n Returns:\n nicks (list): The matching Nicks.\n\n \"\"\"\n return self.get_attribute(\n key=key, category=category, value=value, strvalue=strvalue, obj=obj\n )\n\n def get_by_attribute(\n self, key=None, category=None, value=None, strvalue=None, attrtype=None, **kwargs\n ):\n \"\"\"\n Return objects having attributes with the given key, category,\n value, strvalue or combination of those criteria.\n\n Args:\n key (str, optional): The attribute's key to search for\n category (str, optional): The category of the attribute\n to search for.\n value (str, optional): The attribute value to search for.\n Note that this is not a very efficient operation since it\n will query for a pickled entity. Mutually exclusive to\n `strvalue`.\n strvalue (str, optional): The str-value to search for.\n Most Attributes will not have strvalue set. This is\n mutually exclusive to the `value` keyword and will take\n precedence if given.\n attrype (str, optional): An attribute-type to search for.\n By default this is either `None` (normal Attributes) or\n `\"nick\"`.\n kwargs (any): Currently unused. Reserved for future use.\n\n Returns:\n obj (list): Objects having the matching Attributes.\n\n \"\"\"\n dbmodel = self.model.__dbclass__.__name__.lower()\n query = [\n (\"db_attributes__db_attrtype\", attrtype),\n (\"db_attributes__db_model\", dbmodel),\n ]\n if key:\n query.append((\"db_attributes__db_key\", key))\n if category:\n query.append((\"db_attributes__db_category\", category))\n if strvalue:\n query.append((\"db_attributes__db_strvalue\", strvalue))\n elif value:\n # strvalue and value are mutually exclusive\n query.append((\"db_attributes__db_value\", value))\n return self.filter(**dict(query))\n\n def get_by_nick(self, key=None, nick=None, category=\"inputline\"):\n \"\"\"\n Get object based on its key or nick.\n\n Args:\n key (str, optional): The attribute's key to search for\n nick (str, optional): The nickname to search for\n category (str, optional): The category of the nick\n to search for.\n\n Returns:\n obj (list): Objects having the matching Nicks.\n\n \"\"\"\n return self.get_by_attribute(key=key, category=category, strvalue=nick, attrtype=\"nick\")\n\n # Tag manager methods\n\n def get_tag(self, key=None, category=None, obj=None, tagtype=None, global_search=False):\n \"\"\"\n Return Tag objects by key, by category, by object (it is\n stored on) or with a combination of those criteria.\n\n Args:\n key (str, optional): The Tag's key to search for\n category (str, optional): The Tag of the attribute(s)\n to search for.\n obj (Object, optional): On which object the Tag to\n search for is.\n tagtype (str, optional): One of `None` (normal tags),\n \"alias\" or \"permission\"\n global_search (bool, optional): Include all possible tags,\n not just tags on this object\n\n Returns:\n tag (list): The matching Tags.\n\n \"\"\"\n global _Tag\n if not _Tag:\n from evennia.typeclasses.models import Tag as _Tag\n dbmodel = self.model.__dbclass__.__name__.lower()\n if global_search:\n # search all tags using the Tag model\n query = [(\"db_tagtype\", tagtype), (\"db_model\", dbmodel)]\n if obj:\n query.append((\"id\", obj.id))\n if key:\n query.append((\"db_key\", key))\n if category:\n query.append((\"db_category\", category))\n return _Tag.objects.filter(**dict(query))\n else:\n # search only among tags stored on on this model\n query = [(\"tag__db_tagtype\", tagtype), (\"tag__db_model\", dbmodel)]\n if obj:\n query.append((\"%s__id\" % self.model.__name__.lower(), obj.id))\n if key:\n query.append((\"tag__db_key\", key))\n if category:\n query.append((\"tag__db_category\", category))\n return Tag.objects.filter(\n pk__in=self.model.db_tags.through.objects.filter(**dict(query)).values_list(\n \"tag_id\", flat=True\n )\n )\n\n def get_permission(self, key=None, category=None, obj=None):\n \"\"\"\n Get a permission from the database.\n\n Args:\n key (str, optional): The permission's identifier.\n category (str, optional): The permission's category.\n obj (object, optional): The object on which this Tag is set.\n\n Returns:\n permission (list): Permission objects.\n\n \"\"\"\n return self.get_tag(key=key, category=category, obj=obj, tagtype=\"permission\")\n\n def get_alias(self, key=None, category=None, obj=None):\n \"\"\"\n Get an alias from the database.\n\n Args:\n key (str, optional): The permission's identifier.\n category (str, optional): The permission's category.\n obj (object, optional): The object on which this Tag is set.\n\n Returns:\n alias (list): Alias objects.\n\n \"\"\"\n return self.get_tag(key=key, category=category, obj=obj, tagtype=\"alias\")\n\n def get_by_tag(self, key=None, category=None, tagtype=None, **kwargs):\n \"\"\"\n Return objects having tags with a given key or category or combination of the two.\n Also accepts multiple tags/category/tagtype\n\n Args:\n key (str or list, optional): Tag key or list of keys. Not case sensitive.\n category (str or list, optional): Tag category. Not case sensitive.\n If `key` is a list, a single category can either apply to all\n keys in that list or this must be a list matching the `key`\n list element by element. If no `key` is given, all objects with\n tags of this category are returned.\n tagtype (str, optional): 'type' of Tag, by default\n this is either `None` (a normal Tag), `alias` or\n `permission`. This always apply to all queried tags.\n\n Keyword Args:\n match (str): \"all\" (default) or \"any\"; determines whether the\n target object must be tagged with ALL of the provided\n tags/categories or ANY single one. ANY will perform a weighted\n sort, so objects with more tag matches will outrank those with\n fewer tag matches.\n\n Returns:\n objects (list): Objects with matching tag.\n\n Raises:\n IndexError: If `key` and `category` are both lists and `category` is shorter\n than `key`.\n\n \"\"\"\n if not (key or category):\n return []\n\n global _Tag\n if not _Tag:\n from evennia.typeclasses.models import Tag as _Tag\n\n anymatch = \"any\" == kwargs.get(\"match\", \"all\").lower().strip()\n\n keys = make_iter(key) if key else []\n categories = make_iter(category) if category else []\n n_keys = len(keys)\n n_categories = len(categories)\n unique_categories = set(categories)\n n_unique_categories = len(unique_categories)\n\n dbmodel = self.model.__dbclass__.__name__.lower()\n query = (\n self.filter(db_tags__db_tagtype__iexact=tagtype, db_tags__db_model__iexact=dbmodel)\n .distinct()\n .order_by(\"id\")\n )\n\n if n_keys > 0:\n # keys and/or categories given\n if n_categories == 0:\n categories = [None for _ in range(n_keys)]\n elif n_categories == 1 and n_keys > 1:\n cat = categories[0]\n categories = [cat for _ in range(n_keys)]\n elif 1 < n_categories < n_keys:\n raise IndexError(\n \"get_by_tag needs a single category or a list of categories \"\n \"the same length as the list of tags.\"\n )\n clauses = Q()\n for ikey, key in enumerate(keys):\n # ANY mode; must match any one of the given tags/categories\n clauses |= Q(db_key__iexact=key, db_category__iexact=categories[ikey])\n else:\n # only one or more categories given\n clauses = Q()\n # ANY mode; must match any one of them\n for category in unique_categories:\n clauses |= Q(db_category__iexact=category)\n\n tags = _Tag.objects.filter(clauses)\n query = query.filter(db_tags__in=tags).annotate(\n matches=Count(\"db_tags__pk\", filter=Q(db_tags__in=tags), distinct=True)\n )\n\n if anymatch:\n # ANY: Match any single tag, ordered by weight\n query = query.order_by(\"-matches\")\n else:\n # Default ALL: Match all of the tags and optionally more\n n_req_tags = n_keys if n_keys > 0 else n_unique_categories\n query = query.filter(matches__gte=n_req_tags)\n\n return query\n\n def get_by_permission(self, key=None, category=None):\n \"\"\"\n Return objects having permissions with a given key or category or\n combination of the two.\n\n Args:\n key (str, optional): Permissions key. Not case sensitive.\n category (str, optional): Permission category. Not case sensitive.\n Returns:\n objects (list): Objects with matching permission.\n \"\"\"\n return self.get_by_tag(key=key, category=category, tagtype=\"permission\")\n\n def get_by_alias(self, key=None, category=None):\n \"\"\"\n Return objects having aliases with a given key or category or\n combination of the two.\n\n Args:\n key (str, optional): Alias key. Not case sensitive.\n category (str, optional): Alias category. Not case sensitive.\n Returns:\n objects (list): Objects with matching alias.\n \"\"\"\n return self.get_by_tag(key=key, category=category, tagtype=\"alias\")\n\n def create_tag(self, key=None, category=None, data=None, tagtype=None):\n \"\"\"\n Create a new Tag of the base type associated with this\n object. This makes sure to create case-insensitive tags.\n If the exact same tag configuration (key+category+tagtype+dbmodel)\n exists on the model, a new tag will not be created, but an old\n one returned.\n\n\n Args:\n key (str, optional): Tag key. Not case sensitive.\n category (str, optional): Tag category. Not case sensitive.\n data (str, optional): Extra information about the tag.\n tagtype (str or None, optional): 'type' of Tag, by default\n this is either `None` (a normal Tag), `alias` or\n `permission`.\n Notes:\n The `data` field is not part of the uniqueness of the tag:\n Setting `data` on an existing tag will overwrite the old\n data field. It is intended only as a way to carry\n information about the tag (like a help text), not to carry\n any information about the tagged objects themselves.\n\n \"\"\"\n data = str(data) if data is not None else None\n # try to get old tag\n\n dbmodel = self.model.__dbclass__.__name__.lower()\n tag = self.get_tag(key=key, category=category, tagtype=tagtype, global_search=True)\n if tag and data is not None:\n # get tag from list returned by get_tag\n tag = tag[0]\n # overload data on tag\n tag.db_data = data\n tag.save()\n elif not tag:\n # create a new tag\n global _Tag\n if not _Tag:\n from evennia.typeclasses.models import Tag as _Tag\n tag = _Tag.objects.create(\n db_key=key.strip().lower() if key is not None else None,\n db_category=category.strip().lower() if category and key is not None else None,\n db_data=data,\n db_model=dbmodel,\n db_tagtype=tagtype.strip().lower() if tagtype is not None else None,\n )\n tag.save()\n return make_iter(tag)[0]\n\n def dbref(self, dbref, reqhash=True):\n \"\"\"\n Determing if input is a valid dbref.\n\n Args:\n dbref (str or int): A possible dbref.\n reqhash (bool, optional): If the \"#\" is required for this\n to be considered a valid hash.\n\n Returns:\n dbref (int or None): The integer part of the dbref.\n\n Notes:\n Valid forms of dbref (database reference number) are\n either a string '#N' or an integer N.\n\n \"\"\"\n if reqhash and not (isinstance(dbref, str) and dbref.startswith(\"#\")):\n return None\n if isinstance(dbref, str):\n dbref = dbref.lstrip(\"#\")\n try:\n if int(dbref) < 0:\n return None\n except Exception:\n return None\n return dbref\n\n def get_id(self, dbref):\n \"\"\"\n Find object with given dbref.\n\n Args:\n dbref (str or int): The id to search for.\n\n Returns:\n object (TypedObject): The matched object.\n\n \"\"\"\n dbref = self.dbref(dbref, reqhash=False)\n try:\n return self.get(id=dbref)\n except self.model.DoesNotExist:\n pass\n return None\n\n def dbref_search(self, dbref):\n \"\"\"\n Alias to get_id.\n\n Args:\n dbref (str or int): The id to search for.\n\n Returns:\n Queryset: Queryset with 0 or 1 match.\n\n \"\"\"\n dbref = self.dbref(dbref, reqhash=False)\n if dbref:\n return self.filter(id=dbref)\n return self.none()\n\n def get_dbref_range(self, min_dbref=None, max_dbref=None):\n \"\"\"\n Get objects within a certain range of dbrefs.\n\n Args:\n min_dbref (int): Start of dbref range.\n max_dbref (int): End of dbref range (inclusive)\n\n Returns:\n objects (list): TypedObjects with dbrefs within\n the given dbref ranges.\n\n \"\"\"\n retval = super().all()\n if min_dbref is not None:\n retval = retval.filter(id__gte=self.dbref(min_dbref, reqhash=False))\n if max_dbref is not None:\n retval = retval.filter(id__lte=self.dbref(max_dbref, reqhash=False))\n return retval\n\n def get_typeclass_totals(self, *args, **kwargs) -> object:\n \"\"\"\n Returns a queryset of typeclass composition statistics.\n\n Returns:\n qs (Queryset): A queryset of dicts containing the typeclass (name),\n the count of objects with that typeclass and a float representing\n the percentage of objects associated with the typeclass.\n\n \"\"\"\n return (\n self.values(\"db_typeclass_path\")\n .distinct()\n .annotate(\n # Get count of how many objects for each typeclass exist\n count=Count(\"db_typeclass_path\")\n )\n .annotate(\n # Rename db_typeclass_path field to something more human\n typeclass=F(\"db_typeclass_path\"),\n # Calculate this class' percentage of total composition\n percent=ExpressionWrapper(\n ((F(\"count\") / float(self.count())) * 100.0),\n output_field=FloatField(),\n ),\n )\n .values(\"typeclass\", \"count\", \"percent\")\n )\n\n def object_totals(self):\n \"\"\"\n Get info about database statistics.\n\n Returns:\n census (dict): A dictionary `{typeclass_path: number, ...}` with\n all the typeclasses active in-game as well as the number\n of such objects defined (i.e. the number of database\n object having that typeclass set on themselves).\n\n \"\"\"\n stats = self.get_typeclass_totals().order_by(\"typeclass\")\n return {x.get(\"typeclass\"): x.get(\"count\") for x in stats}\n\n def typeclass_search(self, typeclass, include_children=False, include_parents=False):\n \"\"\"\n Searches through all objects returning those which has a\n certain typeclass. If location is set, limit search to objects\n in that location.\n\n Args:\n typeclass (str or class): A typeclass class or a python path to a typeclass.\n include_children (bool, optional): Return objects with\n given typeclass *and* all children inheriting from this\n typeclass. Mutuall exclusive to `include_parents`.\n include_parents (bool, optional): Return objects with\n given typeclass *and* all parents to this typeclass.\n Mutually exclusive to `include_children`.\n\n Returns:\n objects (list): The objects found with the given typeclasses.\n\n \"\"\"\n\n if callable(typeclass):\n cls = typeclass.__class__\n typeclass = \"%s.%s\" % (cls.__module__, cls.__name__)\n elif not isinstance(typeclass, str) and hasattr(typeclass, \"path\"):\n typeclass = typeclass.path\n\n # query objects of exact typeclass\n query = Q(db_typeclass_path__exact=typeclass)\n\n if include_children:\n # build requests for child typeclass objects\n clsmodule, clsname = typeclass.rsplit(\".\", 1)\n cls = variable_from_module(clsmodule, clsname)\n subclasses = cls.__subclasses__()\n if subclasses:\n for child in (child for child in subclasses if hasattr(child, \"path\")):\n query = query | Q(db_typeclass_path__exact=child.path)\n elif include_parents:\n # build requests for parent typeclass objects\n clsmodule, clsname = typeclass.rsplit(\".\", 1)\n cls = variable_from_module(clsmodule, clsname)\n parents = cls.__mro__\n if parents:\n for parent in (parent for parent in parents if hasattr(parent, \"path\")):\n query = query | Q(db_typeclass_path__exact=parent.path)\n # actually query the database\n return super().filter(query)\n\n\nclass TypeclassManager(TypedObjectManager):\n \"\"\"\n Manager for the typeclasses. The main purpose of this manager is\n to limit database queries to the given typeclass despite all\n typeclasses technically being defined in the same core database\n model.\n\n \"\"\"\n\n # object-manager methods\n def smart_search(self, query):\n \"\"\"\n Search by supplying a string with optional extra search criteria to aid the query.\n\n Args:\n query (str): A search criteria that accepts extra search criteria on the following\n forms:\n\n [key|alias|#dbref...]\n [tag==<tagstr>[:category]...]\n [attr==<key>:<value>:category...]\n\n All three can be combined in the same query, separated by spaces.\n\n Returns:\n matches (queryset): A queryset result matching all queries exactly. If wanting to use\n spaces or ==, != in tags or attributes, enclose them in quotes.\n\n Example:\n house = smart_search(\"key=foo alias=bar tag=house:building tag=magic attr=color:red\")\n\n Note:\n The flexibility of this method is limited by the input line format. Tag/attribute\n matching only works for matching primitives. For even more complex queries, such as\n 'in' operations or object field matching, use the full django query language.\n\n \"\"\"\n # shlex splits by spaces unless escaped by quotes\n querysplit = shlex.split(query)\n queries, plustags, plusattrs, negtags, negattrs = [], [], [], [], []\n for ipart, part in enumerate(querysplit):\n key, rest = part, \"\"\n if \":\" in part:\n key, rest = part.split(\":\", 1)\n # tags are on the form tag or tag:category\n if key.startswith(\"tag==\"):\n plustags.append((key[5:], rest))\n continue\n elif key.startswith(\"tag!=\"):\n negtags.append((key[5:], rest))\n continue\n # attrs are on the form attr:value or attr:value:category\n elif rest:\n value, category = rest, \"\"\n if \":\" in rest:\n value, category = rest.split(\":\", 1)\n if key.startswith(\"attr==\"):\n plusattrs.append((key[7:], value, category))\n continue\n elif key.startswith(\"attr!=\"):\n negattrs.append((key[7:], value, category))\n continue\n # if we get here, we are entering a key search criterion which\n # we assume is one word.\n queries.append(part)\n # build query from components\n query = \" \".join(queries)\n # TODO\n\n def get(self, *args, **kwargs):\n \"\"\"\n Overload the standard get. This will limit itself to only\n return the current typeclass.\n\n Args:\n args (any): These are passed on as arguments to the default\n django get method.\n Keyword Args:\n kwargs (any): These are passed on as normal arguments\n to the default django get method\n Returns:\n object (object): The object found.\n\n Raises:\n ObjectNotFound: The exact name of this exception depends\n on the model base used.\n\n \"\"\"\n kwargs.update({\"db_typeclass_path\": self.model.path})\n return super().get(**kwargs)\n\n def filter(self, *args, **kwargs):\n \"\"\"\n Overload of the standard filter function. This filter will\n limit itself to only the current typeclass.\n\n Args:\n args (any): These are passed on as arguments to the default\n django filter method.\n Keyword Args:\n kwargs (any): These are passed on as normal arguments\n to the default django filter method.\n Returns:\n objects (queryset): The objects found.\n\n \"\"\"\n kwargs.update({\"db_typeclass_path\": self.model.path})\n return super().filter(*args, **kwargs)\n\n def all(self):\n \"\"\"\n Overload method to return all matches, filtering for typeclass.\n\n Returns:\n objects (queryset): The objects found.\n\n \"\"\"\n return super().all().filter(db_typeclass_path=self.model.path)\n\n def first(self):\n \"\"\"\n Overload method to return first match, filtering for typeclass.\n\n Returns:\n object (object): The object found.\n\n Raises:\n ObjectNotFound: The exact name of this exception depends\n on the model base used.\n\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).first()\n\n def last(self):\n \"\"\"\n Overload method to return last match, filtering for typeclass.\n\n Returns:\n object (object): The object found.\n\n Raises:\n ObjectNotFound: The exact name of this exception depends\n on the model base used.\n\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).last()\n\n def count(self):\n \"\"\"\n Overload method to return number of matches, filtering for typeclass.\n\n Returns:\n integer : Number of objects found.\n\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).count()\n\n def annotate(self, *args, **kwargs):\n \"\"\"\n Overload annotate method to filter on typeclass before annotating.\n Args:\n *args (any): Positional arguments passed along to queryset annotate method.\n **kwargs (any): Keyword arguments passed along to queryset annotate method.\n\n Returns:\n Annotated queryset.\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).annotate(*args, **kwargs)\n\n def values(self, *args, **kwargs):\n \"\"\"\n Overload values method to filter on typeclass first.\n Args:\n *args (any): Positional arguments passed along to values method.\n **kwargs (any): Keyword arguments passed along to values method.\n\n Returns:\n Queryset of values dictionaries, just filtered by typeclass first.\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).values(*args, **kwargs)\n\n def values_list(self, *args, **kwargs):\n \"\"\"\n Overload values method to filter on typeclass first.\n Args:\n *args (any): Positional arguments passed along to values_list method.\n **kwargs (any): Keyword arguments passed along to values_list method.\n\n Returns:\n Queryset of value_list tuples, just filtered by typeclass first.\n \"\"\"\n return super().filter(db_typeclass_path=self.model.path).values_list(*args, **kwargs)\n\n def _get_subclasses(self, cls):\n \"\"\"\n Recursively get all subclasses to a class.\n\n Args:\n cls (classoject): A class to get subclasses from.\n \"\"\"\n all_subclasses = cls.__subclasses__()\n for subclass in all_subclasses:\n all_subclasses.extend(self._get_subclasses(subclass))\n return all_subclasses\n\n def get_family(self, *args, **kwargs):\n \"\"\"\n Variation of get that not only returns the current typeclass\n but also all subclasses of that typeclass.\n\n Keyword Args:\n kwargs (any): These are passed on as normal arguments\n to the default django get method.\n Returns:\n objects (list): The objects found.\n\n Raises:\n ObjectNotFound: The exact name of this exception depends\n on the model base used.\n\n \"\"\"\n paths = [self.model.path] + [\n \"%s.%s\" % (cls.__module__, cls.__name__) for cls in self._get_subclasses(self.model)\n ]\n kwargs.update({\"db_typeclass_path__in\": paths})\n return super().get(*args, **kwargs)\n\n def filter_family(self, *args, **kwargs):\n \"\"\"\n Variation of filter that allows results both from typeclass\n and from subclasses of typeclass\n\n Args:\n args (any): These are passed on as arguments to the default\n django filter method.\n Keyword Args:\n kwargs (any): These are passed on as normal arguments\n to the default django filter method.\n Returns:\n objects (list): The objects found.\n\n \"\"\"\n # query, including all subclasses\n paths = [self.model.path] + [\n \"%s.%s\" % (cls.__module__, cls.__name__) for cls in self._get_subclasses(self.model)\n ]\n kwargs.update({\"db_typeclass_path__in\": paths})\n return super().filter(*args, **kwargs)\n\n def all_family(self):\n \"\"\"\n Return all matches, allowing matches from all subclasses of\n the typeclass.\n\n Returns:\n objects (list): The objects found.\n\n \"\"\"\n paths = [self.model.path] + [\n \"%s.%s\" % (cls.__module__, cls.__name__) for cls in self._get_subclasses(self.model)\n ]\n return super().all().filter(db_typeclass_path__in=paths)\n", "path": "evennia/typeclasses/managers.py"}]} |
gh_patches_debug_1639 | rasdani/github-patches | git_diff | openfun__richie-1960 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
MultipleObjectsReturned error during sync course from LMS to Richie
## Bug Report
**Problematic Behavior**
Error synchronizing course from LMS to Richie after adding a course code.
**Expected behavior/code**
Richie API raises:
```
get() returned more than one Course -- it returned 2!
ERROR 2023-04-06 17:06:39,973 log 42 140255949047616 Internal Server Error: /api/v1.0/course-runs-sync/
Traceback (most recent call last):
File "/usr/local/lib/python3.10/site-packages/django/core/handlers/exception.py", line 47, in inner
response = get_response(request)
File "/usr/local/lib/python3.10/site-packages/django/core/handlers/base.py", line 181, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "/usr/local/lib/python3.10/site-packages/django/views/decorators/csrf.py", line 54, in wrapped_view
return view_func(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/django/views/generic/base.py", line 70, in view
return self.dispatch(request, *args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/rest_framework/views.py", line 509, in dispatch
response = self.handle_exception(exc)
File "/usr/local/lib/python3.10/site-packages/rest_framework/views.py", line 469, in handle_exception
self.raise_uncaught_exception(exc)
File "/usr/local/lib/python3.10/site-packages/rest_framework/views.py", line 480, in raise_uncaught_exception
raise exc
File "/usr/local/lib/python3.10/site-packages/rest_framework/views.py", line 506, in dispatch
response = handler(request, *args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/rest_framework/decorators.py", line 50, in handler
return func(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/richie/apps/courses/api.py", line 270, in sync_course_runs_from_request
sync_course_run(request.data)
File "/usr/local/lib/python3.10/site-packages/richie/apps/courses/api.py", line 163, in sync_course_run
course = Course.objects.get(
File "/usr/local/lib/python3.10/site-packages/django/db/models/manager.py", line 85, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/django/db/models/query.py", line 439, in get
raise self.model.MultipleObjectsReturned(
richie.apps.courses.models.course.Course.MultipleObjectsReturned: get() returned more than one Course -- it returned 2!
```
**Steps to Reproduce**
1. Create a new course
2. Add a code to the course
3. And then the bug happens!
**Environment**
- Richie version: `2.21.0`
- Platform: Linux
**Additional context/Screenshots**
When I run this on python shell - `python manage.py shell`:
```python
>>> from richie.apps.courses.models import Course
>>> course_code='FF_PP'
>>> Course.objects.get(code=course_code, extended_object__publisher_is_draft=True, extended_object__node__parent__cms_pages__course__isnull=True, )
Traceback (most recent call last):
File "<console>", line 1, in <module>
File "/usr/local/lib/python3.10/site-packages/django/db/models/manager.py", line 85, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.10/site-packages/django/db/models/query.py", line 439, in get
raise self.model.MultipleObjectsReturned(
richie.apps.courses.models.course.Course.MultipleObjectsReturned: get() returned more than one Course -- it returned 2!
```
URL: https://www.nau.edu.pt/pt/curso/primeiros-passos-em-folha-de-calculo-formulas-e-funcoes-nivel-1/
Nevertheless, I found out that the synchronization is running and the number of course enrollments have been updated automatically.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `src/richie/apps/courses/api.py`
Content:
```
1 """
2 API endpoints for the courses app.
3 """
4 from django.conf import settings
5 from django.db.models import Q
6
7 from cms import signals as cms_signals
8 from cms.models import Page
9 from rest_framework.decorators import api_view
10 from rest_framework.exceptions import ValidationError
11 from rest_framework.permissions import BasePermission
12 from rest_framework.response import Response
13 from rest_framework.serializers import as_serializer_error
14 from rest_framework.viewsets import ModelViewSet
15
16 from .exceptions import MissingResourceLinkError
17 from .lms import LMSHandler
18 from .models import Course, CourseRun, CourseRunSyncMode
19 from .serializers import CourseRunSerializer
20 from .utils import get_signature, normalize_code
21
22
23 class NotAllowed(BasePermission):
24 """
25 Utility permission class to deny all requests. This is used as a default to close
26 requests to unsupported actions.
27 """
28
29 def has_permission(self, request, view):
30 """
31 Always deny permission.
32 """
33 return False
34
35
36 class CourseRunsViewSet(ModelViewSet):
37 """
38 API endpoints to access and perform actions on course runs.
39 """
40
41 permission_classes = [NotAllowed]
42 queryset = CourseRun.objects.all()
43 serializer_class = CourseRunSerializer
44
45 def get_permissions(self):
46 """
47 Manage permissions for builtin DRF methods on ViewSets.
48 """
49 if self.action == "retrieve":
50 permission_classes = []
51 else:
52 try:
53 permission_classes = getattr(self, self.action).kwargs.get(
54 "permission_classes"
55 )
56 except AttributeError:
57 permission_classes = self.permission_classes
58
59 return [permission() for permission in permission_classes]
60
61
62 # pylint: disable=too-many-locals,too-many-branches
63 def sync_course_run(data):
64 """ "
65 Synchronize a course run from its data.
66
67 Parameters
68 ----------
69 data : dict
70 A dictionary describing a course run of the form:
71 {
72 "resource_link": "http://example.edx:8073/courses/course-v1:edX+DemoX+01/course/",
73 "start": "2020-12-09T09:31:59.417817Z",
74 "end": "2021-03-14T09:31:59.417895Z",
75 "enrollment_start": "2020-11-09T09:31:59.417936Z",
76 "enrollment_end": "2020-12-24T09:31:59.417972Z",
77 "languages": ["en", "fr"],
78 "enrollment_count": 42,
79 "catalog_visibility": "course_and_search",
80 }
81
82 Returns
83 -------
84 None or raises:
85 MissingResourceLinkError: the data dictionary is missing a "resource_link" key
86 ValidationError: something is wrong in the data. The error dict describes the error.
87
88 """
89 # Select LMS from resource link
90 resource_link = data.get("resource_link")
91 if not resource_link:
92 raise MissingResourceLinkError()
93
94 lms = LMSHandler.select_lms(resource_link)
95 if lms is None:
96 raise ValidationError(
97 {"resource_link": ["No LMS configuration found for this resource link."]}
98 )
99 sync_mode = lms.default_course_run_sync_mode
100
101 target_course_runs = CourseRun.objects.filter(resource_link=resource_link)
102 draft_course_runs = target_course_runs.filter(draft_course_run__isnull=True)
103
104 # Clean data before instiating a serializer with it
105 cleaned_data = lms.clean_course_run_data(data)
106 serializer = lms.get_course_run_serializer(
107 cleaned_data, partial=bool(draft_course_runs)
108 )
109
110 if serializer.is_valid() is not True:
111 raise ValidationError(serializer.errors)
112 validated_data = serializer.validated_data
113
114 if draft_course_runs:
115 # Remove fields that are protected for update
116 validated_data = {
117 key: value
118 for (key, value) in validated_data.items()
119 if key not in lms.configuration.get("COURSE_RUN_SYNC_NO_UPDATE_FIELDS", [])
120 }
121
122 for course_run in draft_course_runs.filter(
123 sync_mode__in=[
124 CourseRunSyncMode.SYNC_TO_DRAFT,
125 CourseRunSyncMode.SYNC_TO_PUBLIC,
126 ]
127 ):
128 nb_updated = CourseRun.objects.filter(
129 Q(pk=course_run.pk)
130 | Q(
131 draft_course_run__sync_mode=CourseRunSyncMode.SYNC_TO_PUBLIC,
132 draft_course_run=course_run,
133 )
134 ).update(**validated_data)
135
136 public_course = course_run.direct_course.public_extension
137 if course_run.sync_mode == CourseRunSyncMode.SYNC_TO_PUBLIC:
138 if public_course:
139 # If the public course run did not exist yet it has to be created
140 if nb_updated == 1:
141 public_course.copy_relations(course_run.direct_course)
142
143 # What we did has changed the public course page. We must reindex it
144 cms_signals.post_publish.send(
145 sender=Page,
146 instance=course_run.direct_course.extended_object,
147 language=None,
148 )
149 else:
150 course_run.refresh_from_db()
151 course_run.mark_course_dirty()
152 return
153
154 # We need to create a new course run
155 if lms.default_course_run_sync_mode == CourseRunSyncMode.MANUAL:
156 raise ValidationError(
157 {"resource_link": ["Unknown course run when creation is deactivated."]}
158 )
159
160 # Look for the course targeted by the resource link
161 course_code = normalize_code(lms.extract_course_code(data))
162 try:
163 course = Course.objects.get(
164 code=course_code,
165 extended_object__publisher_is_draft=True,
166 # Exclude snapshots
167 extended_object__node__parent__cms_pages__course__isnull=True,
168 )
169 except Course.DoesNotExist as exc:
170 # Create the course page in draft
171 raise ValidationError(
172 {"resource_link": [f"Unknown course: {course_code:s}."]}
173 ) from exc
174
175 # Instantiate a new draft course run
176 draft_course_run = CourseRun(
177 direct_course=course, sync_mode=sync_mode, **validated_data
178 )
179
180 # Create the related public course run if necessary
181 if sync_mode == CourseRunSyncMode.SYNC_TO_PUBLIC:
182 # Don't mark the related course page dirty and directly add
183 # the course run to the corresponding public course page
184 draft_course_run.save()
185 if course.public_extension_id:
186 public_course_run = CourseRun(
187 direct_course=course.public_extension,
188 draft_course_run=draft_course_run,
189 sync_mode=sync_mode,
190 **validated_data,
191 )
192 public_course_run.save()
193
194 # What we did has changed the public course page. We must reindex it
195 cms_signals.post_publish.send(
196 sender=Page, instance=course.extended_object, language=None
197 )
198 else:
199 # Save the draft course run marking the course page dirty
200 draft_course_run.save()
201 draft_course_run.mark_course_dirty()
202
203
204 # pylint: disable=too-many-return-statements,unused-argument, too-many-locals,too-many-branches
205 @api_view(["POST"])
206 def sync_course_runs_from_request(request, version):
207 """View for the web hook to create or update course runs based on their resource link.
208
209 - An existing course run is updated only if its "sync_mode" field is set to something else
210 than "manual".
211
212 - The public version of a course run is updated only if its "sync_mode" field is set to
213 "sync_to_public". Otherwise, only the draft version is updated and the related course
214 is marked dirty.
215
216 - A new course run is created only if the "DEFAULT_COURSE_RUN_SYNC_MODE" parameter is set
217 to something else than "manual" in the lms configuration (or the setting
218 "RICHIE_DEFAULT_COURSE_RUN_SYNC_MODE" in the absence of LMS preference). Otherwise, only
219 existing course runs are updated.
220
221 - A new public course run is created only if the "DEFAULT_COURSE_RUN_SYNC_MODE" parameter
222 is set to "sync_to_public" in the lms configuration (or the setting
223 "RICHIE_DEFAULT_COURSE_RUN_SYNC_MODE" in the absence of LMS preference). Otherwise, only
224 the draft course run is created and the related course is marked dirty.
225
226 Parameters
227 ----------
228 request : Type[django.http.request.HttpRequest]
229 The request on the API endpoint, it should contain a payload with course run fields.
230
231 Returns
232 -------
233 Type[rest_framework.response.Response]
234 HttpResponse acknowledging the success or failure of the synchronization operation.
235 """
236 message = request.body.decode("utf-8")
237
238 # Check if the provided signature is valid against any secret in our list
239 #
240 # We need to do this to support 2 or more versions of our infrastructure at the same time.
241 # It then enables us to do updates and change the secret without incurring downtime.
242 authorization_header = request.headers.get("Authorization")
243 if not authorization_header:
244 return Response("Missing authentication.", status=403)
245
246 signature_is_valid = any(
247 authorization_header == get_signature(message, secret)
248 for secret in getattr(settings, "RICHIE_COURSE_RUN_SYNC_SECRETS", [])
249 )
250
251 if not signature_is_valid:
252 return Response("Invalid authentication.", status=401)
253
254 if isinstance(request.data, (list, tuple)):
255 result = {}
256 status = 200
257 if not all("resource_link" in d for d in request.data):
258 return Response({"resource_link": ["This field is required."]}, status=400)
259 for data in request.data:
260 try:
261 sync_course_run(data)
262 except ValidationError as error:
263 result[data["resource_link"]] = as_serializer_error(error)
264 status = 400
265 else:
266 result[data["resource_link"]] = {"success": True}
267 return Response(result, status=status)
268
269 try:
270 sync_course_run(request.data)
271 except MissingResourceLinkError:
272 return Response({"resource_link": ["This field is required."]}, status=400)
273
274 return Response({"success": True})
275
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/src/richie/apps/courses/api.py b/src/richie/apps/courses/api.py
--- a/src/richie/apps/courses/api.py
+++ b/src/richie/apps/courses/api.py
@@ -160,7 +160,7 @@
# Look for the course targeted by the resource link
course_code = normalize_code(lms.extract_course_code(data))
try:
- course = Course.objects.get(
+ course = Course.objects.distinct().get(
code=course_code,
extended_object__publisher_is_draft=True,
# Exclude snapshots
| {"golden_diff": "diff --git a/src/richie/apps/courses/api.py b/src/richie/apps/courses/api.py\n--- a/src/richie/apps/courses/api.py\n+++ b/src/richie/apps/courses/api.py\n@@ -160,7 +160,7 @@\n # Look for the course targeted by the resource link\n course_code = normalize_code(lms.extract_course_code(data))\n try:\n- course = Course.objects.get(\n+ course = Course.objects.distinct().get(\n code=course_code,\n extended_object__publisher_is_draft=True,\n # Exclude snapshots\n", "issue": "MultipleObjectsReturned error during sync course from LMS to Richie\n## Bug Report\r\n\r\n**Problematic Behavior**\r\nError synchronizing course from LMS to Richie after adding a course code.\r\n\r\n**Expected behavior/code**\r\nRichie API raises:\r\n```\r\nget() returned more than one Course -- it returned 2!\r\nERROR 2023-04-06 17:06:39,973 log 42 140255949047616 Internal Server Error: /api/v1.0/course-runs-sync/\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.10/site-packages/django/core/handlers/exception.py\", line 47, in inner\r\n response = get_response(request)\r\n File \"/usr/local/lib/python3.10/site-packages/django/core/handlers/base.py\", line 181, in _get_response\r\n response = wrapped_callback(request, *callback_args, **callback_kwargs)\r\n File \"/usr/local/lib/python3.10/site-packages/django/views/decorators/csrf.py\", line 54, in wrapped_view\r\n return view_func(*args, **kwargs)\r\n File \"/usr/local/lib/python3.10/site-packages/django/views/generic/base.py\", line 70, in view\r\n return self.dispatch(request, *args, **kwargs)\r\n File \"/usr/local/lib/python3.10/site-packages/rest_framework/views.py\", line 509, in dispatch\r\n response = self.handle_exception(exc)\r\n File \"/usr/local/lib/python3.10/site-packages/rest_framework/views.py\", line 469, in handle_exception\r\n self.raise_uncaught_exception(exc)\r\n File \"/usr/local/lib/python3.10/site-packages/rest_framework/views.py\", line 480, in raise_uncaught_exception\r\n raise exc\r\n File \"/usr/local/lib/python3.10/site-packages/rest_framework/views.py\", line 506, in dispatch\r\n response = handler(request, *args, **kwargs)\r\n File \"/usr/local/lib/python3.10/site-packages/rest_framework/decorators.py\", line 50, in handler\r\n return func(*args, **kwargs)\r\n File \"/usr/local/lib/python3.10/site-packages/richie/apps/courses/api.py\", line 270, in sync_course_runs_from_request\r\n sync_course_run(request.data)\r\n File \"/usr/local/lib/python3.10/site-packages/richie/apps/courses/api.py\", line 163, in sync_course_run\r\n course = Course.objects.get(\r\n File \"/usr/local/lib/python3.10/site-packages/django/db/models/manager.py\", line 85, in manager_method\r\n return getattr(self.get_queryset(), name)(*args, **kwargs)\r\n File \"/usr/local/lib/python3.10/site-packages/django/db/models/query.py\", line 439, in get\r\n raise self.model.MultipleObjectsReturned(\r\nrichie.apps.courses.models.course.Course.MultipleObjectsReturned: get() returned more than one Course -- it returned 2!\r\n``` \r\n\r\n**Steps to Reproduce**\r\n1. Create a new course\r\n2. Add a code to the course\r\n3. And then the bug happens!\r\n\r\n**Environment**\r\n- Richie version: `2.21.0`\r\n- Platform: Linux\r\n\r\n**Additional context/Screenshots**\r\nWhen I run this on python shell - `python manage.py shell`:\r\n```python\r\n>>> from richie.apps.courses.models import Course\r\n>>> course_code='FF_PP'\r\n>>> Course.objects.get(code=course_code, extended_object__publisher_is_draft=True, extended_object__node__parent__cms_pages__course__isnull=True, )\r\nTraceback (most recent call last):\r\n File \"<console>\", line 1, in <module>\r\n File \"/usr/local/lib/python3.10/site-packages/django/db/models/manager.py\", line 85, in manager_method\r\n return getattr(self.get_queryset(), name)(*args, **kwargs)\r\n File \"/usr/local/lib/python3.10/site-packages/django/db/models/query.py\", line 439, in get\r\n raise self.model.MultipleObjectsReturned(\r\nrichie.apps.courses.models.course.Course.MultipleObjectsReturned: get() returned more than one Course -- it returned 2!\r\n``` \r\n\r\nURL: https://www.nau.edu.pt/pt/curso/primeiros-passos-em-folha-de-calculo-formulas-e-funcoes-nivel-1/\r\n\r\nNevertheless, I found out that the synchronization is running and the number of course enrollments have been updated automatically.\n", "before_files": [{"content": "\"\"\"\nAPI endpoints for the courses app.\n\"\"\"\nfrom django.conf import settings\nfrom django.db.models import Q\n\nfrom cms import signals as cms_signals\nfrom cms.models import Page\nfrom rest_framework.decorators import api_view\nfrom rest_framework.exceptions import ValidationError\nfrom rest_framework.permissions import BasePermission\nfrom rest_framework.response import Response\nfrom rest_framework.serializers import as_serializer_error\nfrom rest_framework.viewsets import ModelViewSet\n\nfrom .exceptions import MissingResourceLinkError\nfrom .lms import LMSHandler\nfrom .models import Course, CourseRun, CourseRunSyncMode\nfrom .serializers import CourseRunSerializer\nfrom .utils import get_signature, normalize_code\n\n\nclass NotAllowed(BasePermission):\n \"\"\"\n Utility permission class to deny all requests. This is used as a default to close\n requests to unsupported actions.\n \"\"\"\n\n def has_permission(self, request, view):\n \"\"\"\n Always deny permission.\n \"\"\"\n return False\n\n\nclass CourseRunsViewSet(ModelViewSet):\n \"\"\"\n API endpoints to access and perform actions on course runs.\n \"\"\"\n\n permission_classes = [NotAllowed]\n queryset = CourseRun.objects.all()\n serializer_class = CourseRunSerializer\n\n def get_permissions(self):\n \"\"\"\n Manage permissions for builtin DRF methods on ViewSets.\n \"\"\"\n if self.action == \"retrieve\":\n permission_classes = []\n else:\n try:\n permission_classes = getattr(self, self.action).kwargs.get(\n \"permission_classes\"\n )\n except AttributeError:\n permission_classes = self.permission_classes\n\n return [permission() for permission in permission_classes]\n\n\n# pylint: disable=too-many-locals,too-many-branches\ndef sync_course_run(data):\n \"\"\" \"\n Synchronize a course run from its data.\n\n Parameters\n ----------\n data : dict\n A dictionary describing a course run of the form:\n {\n \"resource_link\": \"http://example.edx:8073/courses/course-v1:edX+DemoX+01/course/\",\n \"start\": \"2020-12-09T09:31:59.417817Z\",\n \"end\": \"2021-03-14T09:31:59.417895Z\",\n \"enrollment_start\": \"2020-11-09T09:31:59.417936Z\",\n \"enrollment_end\": \"2020-12-24T09:31:59.417972Z\",\n \"languages\": [\"en\", \"fr\"],\n \"enrollment_count\": 42,\n \"catalog_visibility\": \"course_and_search\",\n }\n\n Returns\n -------\n None or raises:\n MissingResourceLinkError: the data dictionary is missing a \"resource_link\" key\n ValidationError: something is wrong in the data. The error dict describes the error.\n\n \"\"\"\n # Select LMS from resource link\n resource_link = data.get(\"resource_link\")\n if not resource_link:\n raise MissingResourceLinkError()\n\n lms = LMSHandler.select_lms(resource_link)\n if lms is None:\n raise ValidationError(\n {\"resource_link\": [\"No LMS configuration found for this resource link.\"]}\n )\n sync_mode = lms.default_course_run_sync_mode\n\n target_course_runs = CourseRun.objects.filter(resource_link=resource_link)\n draft_course_runs = target_course_runs.filter(draft_course_run__isnull=True)\n\n # Clean data before instiating a serializer with it\n cleaned_data = lms.clean_course_run_data(data)\n serializer = lms.get_course_run_serializer(\n cleaned_data, partial=bool(draft_course_runs)\n )\n\n if serializer.is_valid() is not True:\n raise ValidationError(serializer.errors)\n validated_data = serializer.validated_data\n\n if draft_course_runs:\n # Remove fields that are protected for update\n validated_data = {\n key: value\n for (key, value) in validated_data.items()\n if key not in lms.configuration.get(\"COURSE_RUN_SYNC_NO_UPDATE_FIELDS\", [])\n }\n\n for course_run in draft_course_runs.filter(\n sync_mode__in=[\n CourseRunSyncMode.SYNC_TO_DRAFT,\n CourseRunSyncMode.SYNC_TO_PUBLIC,\n ]\n ):\n nb_updated = CourseRun.objects.filter(\n Q(pk=course_run.pk)\n | Q(\n draft_course_run__sync_mode=CourseRunSyncMode.SYNC_TO_PUBLIC,\n draft_course_run=course_run,\n )\n ).update(**validated_data)\n\n public_course = course_run.direct_course.public_extension\n if course_run.sync_mode == CourseRunSyncMode.SYNC_TO_PUBLIC:\n if public_course:\n # If the public course run did not exist yet it has to be created\n if nb_updated == 1:\n public_course.copy_relations(course_run.direct_course)\n\n # What we did has changed the public course page. We must reindex it\n cms_signals.post_publish.send(\n sender=Page,\n instance=course_run.direct_course.extended_object,\n language=None,\n )\n else:\n course_run.refresh_from_db()\n course_run.mark_course_dirty()\n return\n\n # We need to create a new course run\n if lms.default_course_run_sync_mode == CourseRunSyncMode.MANUAL:\n raise ValidationError(\n {\"resource_link\": [\"Unknown course run when creation is deactivated.\"]}\n )\n\n # Look for the course targeted by the resource link\n course_code = normalize_code(lms.extract_course_code(data))\n try:\n course = Course.objects.get(\n code=course_code,\n extended_object__publisher_is_draft=True,\n # Exclude snapshots\n extended_object__node__parent__cms_pages__course__isnull=True,\n )\n except Course.DoesNotExist as exc:\n # Create the course page in draft\n raise ValidationError(\n {\"resource_link\": [f\"Unknown course: {course_code:s}.\"]}\n ) from exc\n\n # Instantiate a new draft course run\n draft_course_run = CourseRun(\n direct_course=course, sync_mode=sync_mode, **validated_data\n )\n\n # Create the related public course run if necessary\n if sync_mode == CourseRunSyncMode.SYNC_TO_PUBLIC:\n # Don't mark the related course page dirty and directly add\n # the course run to the corresponding public course page\n draft_course_run.save()\n if course.public_extension_id:\n public_course_run = CourseRun(\n direct_course=course.public_extension,\n draft_course_run=draft_course_run,\n sync_mode=sync_mode,\n **validated_data,\n )\n public_course_run.save()\n\n # What we did has changed the public course page. We must reindex it\n cms_signals.post_publish.send(\n sender=Page, instance=course.extended_object, language=None\n )\n else:\n # Save the draft course run marking the course page dirty\n draft_course_run.save()\n draft_course_run.mark_course_dirty()\n\n\n# pylint: disable=too-many-return-statements,unused-argument, too-many-locals,too-many-branches\n@api_view([\"POST\"])\ndef sync_course_runs_from_request(request, version):\n \"\"\"View for the web hook to create or update course runs based on their resource link.\n\n - An existing course run is updated only if its \"sync_mode\" field is set to something else\n than \"manual\".\n\n - The public version of a course run is updated only if its \"sync_mode\" field is set to\n \"sync_to_public\". Otherwise, only the draft version is updated and the related course\n is marked dirty.\n\n - A new course run is created only if the \"DEFAULT_COURSE_RUN_SYNC_MODE\" parameter is set\n to something else than \"manual\" in the lms configuration (or the setting\n \"RICHIE_DEFAULT_COURSE_RUN_SYNC_MODE\" in the absence of LMS preference). Otherwise, only\n existing course runs are updated.\n\n - A new public course run is created only if the \"DEFAULT_COURSE_RUN_SYNC_MODE\" parameter\n is set to \"sync_to_public\" in the lms configuration (or the setting\n \"RICHIE_DEFAULT_COURSE_RUN_SYNC_MODE\" in the absence of LMS preference). Otherwise, only\n the draft course run is created and the related course is marked dirty.\n\n Parameters\n ----------\n request : Type[django.http.request.HttpRequest]\n The request on the API endpoint, it should contain a payload with course run fields.\n\n Returns\n -------\n Type[rest_framework.response.Response]\n HttpResponse acknowledging the success or failure of the synchronization operation.\n \"\"\"\n message = request.body.decode(\"utf-8\")\n\n # Check if the provided signature is valid against any secret in our list\n #\n # We need to do this to support 2 or more versions of our infrastructure at the same time.\n # It then enables us to do updates and change the secret without incurring downtime.\n authorization_header = request.headers.get(\"Authorization\")\n if not authorization_header:\n return Response(\"Missing authentication.\", status=403)\n\n signature_is_valid = any(\n authorization_header == get_signature(message, secret)\n for secret in getattr(settings, \"RICHIE_COURSE_RUN_SYNC_SECRETS\", [])\n )\n\n if not signature_is_valid:\n return Response(\"Invalid authentication.\", status=401)\n\n if isinstance(request.data, (list, tuple)):\n result = {}\n status = 200\n if not all(\"resource_link\" in d for d in request.data):\n return Response({\"resource_link\": [\"This field is required.\"]}, status=400)\n for data in request.data:\n try:\n sync_course_run(data)\n except ValidationError as error:\n result[data[\"resource_link\"]] = as_serializer_error(error)\n status = 400\n else:\n result[data[\"resource_link\"]] = {\"success\": True}\n return Response(result, status=status)\n\n try:\n sync_course_run(request.data)\n except MissingResourceLinkError:\n return Response({\"resource_link\": [\"This field is required.\"]}, status=400)\n\n return Response({\"success\": True})\n", "path": "src/richie/apps/courses/api.py"}], "after_files": [{"content": "\"\"\"\nAPI endpoints for the courses app.\n\"\"\"\nfrom django.conf import settings\nfrom django.db.models import Q\n\nfrom cms import signals as cms_signals\nfrom cms.models import Page\nfrom rest_framework.decorators import api_view\nfrom rest_framework.exceptions import ValidationError\nfrom rest_framework.permissions import BasePermission\nfrom rest_framework.response import Response\nfrom rest_framework.serializers import as_serializer_error\nfrom rest_framework.viewsets import ModelViewSet\n\nfrom .exceptions import MissingResourceLinkError\nfrom .lms import LMSHandler\nfrom .models import Course, CourseRun, CourseRunSyncMode\nfrom .serializers import CourseRunSerializer\nfrom .utils import get_signature, normalize_code\n\n\nclass NotAllowed(BasePermission):\n \"\"\"\n Utility permission class to deny all requests. This is used as a default to close\n requests to unsupported actions.\n \"\"\"\n\n def has_permission(self, request, view):\n \"\"\"\n Always deny permission.\n \"\"\"\n return False\n\n\nclass CourseRunsViewSet(ModelViewSet):\n \"\"\"\n API endpoints to access and perform actions on course runs.\n \"\"\"\n\n permission_classes = [NotAllowed]\n queryset = CourseRun.objects.all()\n serializer_class = CourseRunSerializer\n\n def get_permissions(self):\n \"\"\"\n Manage permissions for builtin DRF methods on ViewSets.\n \"\"\"\n if self.action == \"retrieve\":\n permission_classes = []\n else:\n try:\n permission_classes = getattr(self, self.action).kwargs.get(\n \"permission_classes\"\n )\n except AttributeError:\n permission_classes = self.permission_classes\n\n return [permission() for permission in permission_classes]\n\n\n# pylint: disable=too-many-locals,too-many-branches\ndef sync_course_run(data):\n \"\"\" \"\n Synchronize a course run from its data.\n\n Parameters\n ----------\n data : dict\n A dictionary describing a course run of the form:\n {\n \"resource_link\": \"http://example.edx:8073/courses/course-v1:edX+DemoX+01/course/\",\n \"start\": \"2020-12-09T09:31:59.417817Z\",\n \"end\": \"2021-03-14T09:31:59.417895Z\",\n \"enrollment_start\": \"2020-11-09T09:31:59.417936Z\",\n \"enrollment_end\": \"2020-12-24T09:31:59.417972Z\",\n \"languages\": [\"en\", \"fr\"],\n \"enrollment_count\": 42,\n \"catalog_visibility\": \"course_and_search\",\n }\n\n Returns\n -------\n None or raises:\n MissingResourceLinkError: the data dictionary is missing a \"resource_link\" key\n ValidationError: something is wrong in the data. The error dict describes the error.\n\n \"\"\"\n # Select LMS from resource link\n resource_link = data.get(\"resource_link\")\n if not resource_link:\n raise MissingResourceLinkError()\n\n lms = LMSHandler.select_lms(resource_link)\n if lms is None:\n raise ValidationError(\n {\"resource_link\": [\"No LMS configuration found for this resource link.\"]}\n )\n sync_mode = lms.default_course_run_sync_mode\n\n target_course_runs = CourseRun.objects.filter(resource_link=resource_link)\n draft_course_runs = target_course_runs.filter(draft_course_run__isnull=True)\n\n # Clean data before instiating a serializer with it\n cleaned_data = lms.clean_course_run_data(data)\n serializer = lms.get_course_run_serializer(\n cleaned_data, partial=bool(draft_course_runs)\n )\n\n if serializer.is_valid() is not True:\n raise ValidationError(serializer.errors)\n validated_data = serializer.validated_data\n\n if draft_course_runs:\n # Remove fields that are protected for update\n validated_data = {\n key: value\n for (key, value) in validated_data.items()\n if key not in lms.configuration.get(\"COURSE_RUN_SYNC_NO_UPDATE_FIELDS\", [])\n }\n\n for course_run in draft_course_runs.filter(\n sync_mode__in=[\n CourseRunSyncMode.SYNC_TO_DRAFT,\n CourseRunSyncMode.SYNC_TO_PUBLIC,\n ]\n ):\n nb_updated = CourseRun.objects.filter(\n Q(pk=course_run.pk)\n | Q(\n draft_course_run__sync_mode=CourseRunSyncMode.SYNC_TO_PUBLIC,\n draft_course_run=course_run,\n )\n ).update(**validated_data)\n\n public_course = course_run.direct_course.public_extension\n if course_run.sync_mode == CourseRunSyncMode.SYNC_TO_PUBLIC:\n if public_course:\n # If the public course run did not exist yet it has to be created\n if nb_updated == 1:\n public_course.copy_relations(course_run.direct_course)\n\n # What we did has changed the public course page. We must reindex it\n cms_signals.post_publish.send(\n sender=Page,\n instance=course_run.direct_course.extended_object,\n language=None,\n )\n else:\n course_run.refresh_from_db()\n course_run.mark_course_dirty()\n return\n\n # We need to create a new course run\n if lms.default_course_run_sync_mode == CourseRunSyncMode.MANUAL:\n raise ValidationError(\n {\"resource_link\": [\"Unknown course run when creation is deactivated.\"]}\n )\n\n # Look for the course targeted by the resource link\n course_code = normalize_code(lms.extract_course_code(data))\n try:\n course = Course.objects.distinct().get(\n code=course_code,\n extended_object__publisher_is_draft=True,\n # Exclude snapshots\n extended_object__node__parent__cms_pages__course__isnull=True,\n )\n except Course.DoesNotExist as exc:\n # Create the course page in draft\n raise ValidationError(\n {\"resource_link\": [f\"Unknown course: {course_code:s}.\"]}\n ) from exc\n\n # Instantiate a new draft course run\n draft_course_run = CourseRun(\n direct_course=course, sync_mode=sync_mode, **validated_data\n )\n\n # Create the related public course run if necessary\n if sync_mode == CourseRunSyncMode.SYNC_TO_PUBLIC:\n # Don't mark the related course page dirty and directly add\n # the course run to the corresponding public course page\n draft_course_run.save()\n if course.public_extension_id:\n public_course_run = CourseRun(\n direct_course=course.public_extension,\n draft_course_run=draft_course_run,\n sync_mode=sync_mode,\n **validated_data,\n )\n public_course_run.save()\n\n # What we did has changed the public course page. We must reindex it\n cms_signals.post_publish.send(\n sender=Page, instance=course.extended_object, language=None\n )\n else:\n # Save the draft course run marking the course page dirty\n draft_course_run.save()\n draft_course_run.mark_course_dirty()\n\n\n# pylint: disable=too-many-return-statements,unused-argument, too-many-locals,too-many-branches\n@api_view([\"POST\"])\ndef sync_course_runs_from_request(request, version):\n \"\"\"View for the web hook to create or update course runs based on their resource link.\n\n - An existing course run is updated only if its \"sync_mode\" field is set to something else\n than \"manual\".\n\n - The public version of a course run is updated only if its \"sync_mode\" field is set to\n \"sync_to_public\". Otherwise, only the draft version is updated and the related course\n is marked dirty.\n\n - A new course run is created only if the \"DEFAULT_COURSE_RUN_SYNC_MODE\" parameter is set\n to something else than \"manual\" in the lms configuration (or the setting\n \"RICHIE_DEFAULT_COURSE_RUN_SYNC_MODE\" in the absence of LMS preference). Otherwise, only\n existing course runs are updated.\n\n - A new public course run is created only if the \"DEFAULT_COURSE_RUN_SYNC_MODE\" parameter\n is set to \"sync_to_public\" in the lms configuration (or the setting\n \"RICHIE_DEFAULT_COURSE_RUN_SYNC_MODE\" in the absence of LMS preference). Otherwise, only\n the draft course run is created and the related course is marked dirty.\n\n Parameters\n ----------\n request : Type[django.http.request.HttpRequest]\n The request on the API endpoint, it should contain a payload with course run fields.\n\n Returns\n -------\n Type[rest_framework.response.Response]\n HttpResponse acknowledging the success or failure of the synchronization operation.\n \"\"\"\n message = request.body.decode(\"utf-8\")\n\n # Check if the provided signature is valid against any secret in our list\n #\n # We need to do this to support 2 or more versions of our infrastructure at the same time.\n # It then enables us to do updates and change the secret without incurring downtime.\n authorization_header = request.headers.get(\"Authorization\")\n if not authorization_header:\n return Response(\"Missing authentication.\", status=403)\n\n signature_is_valid = any(\n authorization_header == get_signature(message, secret)\n for secret in getattr(settings, \"RICHIE_COURSE_RUN_SYNC_SECRETS\", [])\n )\n\n if not signature_is_valid:\n return Response(\"Invalid authentication.\", status=401)\n\n if isinstance(request.data, (list, tuple)):\n result = {}\n status = 200\n if not all(\"resource_link\" in d for d in request.data):\n return Response({\"resource_link\": [\"This field is required.\"]}, status=400)\n for data in request.data:\n try:\n sync_course_run(data)\n except ValidationError as error:\n result[data[\"resource_link\"]] = as_serializer_error(error)\n status = 400\n else:\n result[data[\"resource_link\"]] = {\"success\": True}\n return Response(result, status=status)\n\n try:\n sync_course_run(request.data)\n except MissingResourceLinkError:\n return Response({\"resource_link\": [\"This field is required.\"]}, status=400)\n\n return Response({\"success\": True})\n", "path": "src/richie/apps/courses/api.py"}]} |
gh_patches_debug_1640 | rasdani/github-patches | git_diff | spotify__luigi-880 | We are currently solving the following issue within our repository. Here is the issue text:
--- BEGIN ISSUE ---
Fix external dynamic deps
Since running tasks communicate with worker via a queue, all dynamic dependencies that they yield must be serialized and then deserialized back. This doesn't work if a task has `run = NotImplemented`, since there was a specific check for that in Register for unclear reason.
This PR adds a test case to reproduce the issue and fixes it by removing the check.
--- END ISSUE ---
Below are some code segments, each from a relevant file. One or more of these files may contain bugs.
--- BEGIN FILES ---
Path: `luigi/task_register.py`
Content:
```
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright 2012-2015 Spotify AB
4 #
5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing, software
12 # distributed under the License is distributed on an "AS IS" BASIS,
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 # See the License for the specific language governing permissions and
15 # limitations under the License.
16 #
17 """
18 Define the centralized register of all :class:`~luigi.task.Task` classes.
19 """
20
21 import abc
22 try:
23 from collections import OrderedDict
24 except ImportError:
25 from ordereddict import OrderedDict
26
27 from luigi import six
28 import logging
29 logger = logging.getLogger('luigi-interface')
30
31
32 class TaskClassException(Exception):
33 pass
34
35
36 class Register(abc.ABCMeta):
37 """
38 The Metaclass of :py:class:`Task`.
39
40 Acts as a global registry of Tasks with the following properties:
41
42 1. Cache instances of objects so that eg. ``X(1, 2, 3)`` always returns the
43 same object.
44 2. Keep track of all subclasses of :py:class:`Task` and expose them.
45 """
46 __instance_cache = {}
47 _default_namespace = None
48 _reg = []
49 AMBIGUOUS_CLASS = object() # Placeholder denoting an error
50 """If this value is returned by :py:meth:`__get_reg` then there is an
51 ambiguous task name (two :py:class:`Task` have the same name). This denotes
52 an error."""
53
54 def __new__(metacls, classname, bases, classdict):
55 """
56 Custom class creation for namespacing.
57
58 Also register all subclasses.
59
60 Set the task namespace to whatever the currently declared namespace is.
61 """
62 if "task_namespace" not in classdict:
63 classdict["task_namespace"] = metacls._default_namespace
64
65 cls = super(Register, metacls).__new__(metacls, classname, bases, classdict)
66 metacls._reg.append(cls)
67
68 return cls
69
70 def __call__(cls, *args, **kwargs):
71 """
72 Custom class instantiation utilizing instance cache.
73
74 If a Task has already been instantiated with the same parameters,
75 the previous instance is returned to reduce number of object instances.
76 """
77 def instantiate():
78 return super(Register, cls).__call__(*args, **kwargs)
79
80 h = cls.__instance_cache
81
82 if h is None: # disabled
83 return instantiate()
84
85 params = cls.get_params()
86 param_values = cls.get_param_values(params, args, kwargs)
87
88 k = (cls, tuple(param_values))
89
90 try:
91 hash(k)
92 except TypeError:
93 logger.debug("Not all parameter values are hashable so instance isn't coming from the cache")
94 return instantiate() # unhashable types in parameters
95
96 if k not in h:
97 h[k] = instantiate()
98
99 return h[k]
100
101 @classmethod
102 def clear_instance_cache(cls):
103 """
104 Clear/Reset the instance cache.
105 """
106 cls.__instance_cache = {}
107
108 @classmethod
109 def disable_instance_cache(cls):
110 """
111 Disables the instance cache.
112 """
113 cls.__instance_cache = None
114
115 @property
116 def task_family(cls):
117 """
118 The task family for the given class.
119
120 If ``cls.task_namespace is None`` then it's the name of the class.
121 Otherwise, ``<task_namespace>.`` is prefixed to the class name.
122 """
123 if cls.task_namespace is None:
124 return cls.__name__
125 else:
126 return "%s.%s" % (cls.task_namespace, cls.__name__)
127
128 @classmethod
129 def __get_reg(cls):
130 """Return all of the registered classes.
131
132 :return: an ``collections.OrderedDict`` of task_family -> class
133 """
134 # We have to do this on-demand in case task names have changed later
135 # We return this in a topologically sorted list of inheritance: this is useful in some cases (#822)
136 reg = OrderedDict()
137 for cls in cls._reg:
138 if cls.run == NotImplemented:
139 continue
140 name = cls.task_family
141
142 if name in reg and reg[name] != cls and \
143 reg[name] != cls.AMBIGUOUS_CLASS and \
144 not issubclass(cls, reg[name]):
145 # Registering two different classes - this means we can't instantiate them by name
146 # The only exception is if one class is a subclass of the other. In that case, we
147 # instantiate the most-derived class (this fixes some issues with decorator wrappers).
148 reg[name] = cls.AMBIGUOUS_CLASS
149 else:
150 reg[name] = cls
151
152 return reg
153
154 @classmethod
155 def task_names(cls):
156 """
157 List of task names as strings
158 """
159 return sorted(cls.__get_reg().keys())
160
161 @classmethod
162 def tasks_str(cls):
163 """
164 Human-readable register contents dump.
165 """
166 return ','.join(cls.task_names())
167
168 @classmethod
169 def get_task_cls(cls, name):
170 """
171 Returns an unambiguous class or raises an exception.
172 """
173 task_cls = cls.__get_reg().get(name)
174 if not task_cls:
175 raise TaskClassException('Task %r not found. Candidates are: %s' % (name, cls.tasks_str()))
176
177 if task_cls == cls.AMBIGUOUS_CLASS:
178 raise TaskClassException('Task %r is ambiguous' % name)
179 return task_cls
180
181 @classmethod
182 def get_all_params(cls):
183 """
184 Compiles and returns all parameters for all :py:class:`Task`.
185
186 :return: a generator of tuples (TODO: we should make this more elegant)
187 """
188 for task_name, task_cls in six.iteritems(cls.__get_reg()):
189 if task_cls == cls.AMBIGUOUS_CLASS:
190 continue
191 for param_name, param_obj in task_cls.get_params():
192 yield task_name, (not task_cls.use_cmdline_section), param_name, param_obj
193
194
195 def load_task(module, task_name, params_str):
196 """
197 Imports task dynamically given a module and a task name.
198 """
199 if module is not None:
200 __import__(module)
201 task_cls = Register.get_task_cls(task_name)
202 return task_cls.from_str_params(params_str)
203
```
--- END FILES ---
Please first localize the bug based on the issue statement, and then generate a patch according to the `git diff` format fenced by three backticks.
Here is an example:
```diff
diff --git a/examples/server_async.py b/examples/server_async.py
--- a/examples/server_async.py
+++ b/examples/server_async.py
@@ -313,4 +313,4 @@
if __name__ == "__main__":
- asyncio.run(run_async_server("."), debug=True)
+ asyncio.run(run_async_server(), debug=True)
diff --git a/examples/server_sync.py b/examples/server_sync.py
--- a/examples/server_sync.py
+++ b/examples/server_sync.py
@@ -313,5 +313,5 @@
if __name__ == "__main__":
- server = run_sync_server(".")
+ server = run_sync_server()
server.shutdown()
```
| diff --git a/luigi/task_register.py b/luigi/task_register.py
--- a/luigi/task_register.py
+++ b/luigi/task_register.py
@@ -135,8 +135,6 @@
# We return this in a topologically sorted list of inheritance: this is useful in some cases (#822)
reg = OrderedDict()
for cls in cls._reg:
- if cls.run == NotImplemented:
- continue
name = cls.task_family
if name in reg and reg[name] != cls and \
| {"golden_diff": "diff --git a/luigi/task_register.py b/luigi/task_register.py\n--- a/luigi/task_register.py\n+++ b/luigi/task_register.py\n@@ -135,8 +135,6 @@\n # We return this in a topologically sorted list of inheritance: this is useful in some cases (#822)\n reg = OrderedDict()\n for cls in cls._reg:\n- if cls.run == NotImplemented:\n- continue\n name = cls.task_family\n \n if name in reg and reg[name] != cls and \\\n", "issue": "Fix external dynamic deps\nSince running tasks communicate with worker via a queue, all dynamic dependencies that they yield must be serialized and then deserialized back. This doesn't work if a task has `run = NotImplemented`, since there was a specific check for that in Register for unclear reason.\n\nThis PR adds a test case to reproduce the issue and fixes it by removing the check.\n\n", "before_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright 2012-2015 Spotify AB\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\"\"\"\nDefine the centralized register of all :class:`~luigi.task.Task` classes.\n\"\"\"\n\nimport abc\ntry:\n from collections import OrderedDict\nexcept ImportError:\n from ordereddict import OrderedDict\n\nfrom luigi import six\nimport logging\nlogger = logging.getLogger('luigi-interface')\n\n\nclass TaskClassException(Exception):\n pass\n\n\nclass Register(abc.ABCMeta):\n \"\"\"\n The Metaclass of :py:class:`Task`.\n\n Acts as a global registry of Tasks with the following properties:\n\n 1. Cache instances of objects so that eg. ``X(1, 2, 3)`` always returns the\n same object.\n 2. Keep track of all subclasses of :py:class:`Task` and expose them.\n \"\"\"\n __instance_cache = {}\n _default_namespace = None\n _reg = []\n AMBIGUOUS_CLASS = object() # Placeholder denoting an error\n \"\"\"If this value is returned by :py:meth:`__get_reg` then there is an\n ambiguous task name (two :py:class:`Task` have the same name). This denotes\n an error.\"\"\"\n\n def __new__(metacls, classname, bases, classdict):\n \"\"\"\n Custom class creation for namespacing.\n\n Also register all subclasses.\n\n Set the task namespace to whatever the currently declared namespace is.\n \"\"\"\n if \"task_namespace\" not in classdict:\n classdict[\"task_namespace\"] = metacls._default_namespace\n\n cls = super(Register, metacls).__new__(metacls, classname, bases, classdict)\n metacls._reg.append(cls)\n\n return cls\n\n def __call__(cls, *args, **kwargs):\n \"\"\"\n Custom class instantiation utilizing instance cache.\n\n If a Task has already been instantiated with the same parameters,\n the previous instance is returned to reduce number of object instances.\n \"\"\"\n def instantiate():\n return super(Register, cls).__call__(*args, **kwargs)\n\n h = cls.__instance_cache\n\n if h is None: # disabled\n return instantiate()\n\n params = cls.get_params()\n param_values = cls.get_param_values(params, args, kwargs)\n\n k = (cls, tuple(param_values))\n\n try:\n hash(k)\n except TypeError:\n logger.debug(\"Not all parameter values are hashable so instance isn't coming from the cache\")\n return instantiate() # unhashable types in parameters\n\n if k not in h:\n h[k] = instantiate()\n\n return h[k]\n\n @classmethod\n def clear_instance_cache(cls):\n \"\"\"\n Clear/Reset the instance cache.\n \"\"\"\n cls.__instance_cache = {}\n\n @classmethod\n def disable_instance_cache(cls):\n \"\"\"\n Disables the instance cache.\n \"\"\"\n cls.__instance_cache = None\n\n @property\n def task_family(cls):\n \"\"\"\n The task family for the given class.\n\n If ``cls.task_namespace is None`` then it's the name of the class.\n Otherwise, ``<task_namespace>.`` is prefixed to the class name.\n \"\"\"\n if cls.task_namespace is None:\n return cls.__name__\n else:\n return \"%s.%s\" % (cls.task_namespace, cls.__name__)\n\n @classmethod\n def __get_reg(cls):\n \"\"\"Return all of the registered classes.\n\n :return: an ``collections.OrderedDict`` of task_family -> class\n \"\"\"\n # We have to do this on-demand in case task names have changed later\n # We return this in a topologically sorted list of inheritance: this is useful in some cases (#822)\n reg = OrderedDict()\n for cls in cls._reg:\n if cls.run == NotImplemented:\n continue\n name = cls.task_family\n\n if name in reg and reg[name] != cls and \\\n reg[name] != cls.AMBIGUOUS_CLASS and \\\n not issubclass(cls, reg[name]):\n # Registering two different classes - this means we can't instantiate them by name\n # The only exception is if one class is a subclass of the other. In that case, we\n # instantiate the most-derived class (this fixes some issues with decorator wrappers).\n reg[name] = cls.AMBIGUOUS_CLASS\n else:\n reg[name] = cls\n\n return reg\n\n @classmethod\n def task_names(cls):\n \"\"\"\n List of task names as strings\n \"\"\"\n return sorted(cls.__get_reg().keys())\n\n @classmethod\n def tasks_str(cls):\n \"\"\"\n Human-readable register contents dump.\n \"\"\"\n return ','.join(cls.task_names())\n\n @classmethod\n def get_task_cls(cls, name):\n \"\"\"\n Returns an unambiguous class or raises an exception.\n \"\"\"\n task_cls = cls.__get_reg().get(name)\n if not task_cls:\n raise TaskClassException('Task %r not found. Candidates are: %s' % (name, cls.tasks_str()))\n\n if task_cls == cls.AMBIGUOUS_CLASS:\n raise TaskClassException('Task %r is ambiguous' % name)\n return task_cls\n\n @classmethod\n def get_all_params(cls):\n \"\"\"\n Compiles and returns all parameters for all :py:class:`Task`.\n\n :return: a generator of tuples (TODO: we should make this more elegant)\n \"\"\"\n for task_name, task_cls in six.iteritems(cls.__get_reg()):\n if task_cls == cls.AMBIGUOUS_CLASS:\n continue\n for param_name, param_obj in task_cls.get_params():\n yield task_name, (not task_cls.use_cmdline_section), param_name, param_obj\n\n\ndef load_task(module, task_name, params_str):\n \"\"\"\n Imports task dynamically given a module and a task name.\n \"\"\"\n if module is not None:\n __import__(module)\n task_cls = Register.get_task_cls(task_name)\n return task_cls.from_str_params(params_str)\n", "path": "luigi/task_register.py"}], "after_files": [{"content": "# -*- coding: utf-8 -*-\n#\n# Copyright 2012-2015 Spotify AB\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\"\"\"\nDefine the centralized register of all :class:`~luigi.task.Task` classes.\n\"\"\"\n\nimport abc\ntry:\n from collections import OrderedDict\nexcept ImportError:\n from ordereddict import OrderedDict\n\nfrom luigi import six\nimport logging\nlogger = logging.getLogger('luigi-interface')\n\n\nclass TaskClassException(Exception):\n pass\n\n\nclass Register(abc.ABCMeta):\n \"\"\"\n The Metaclass of :py:class:`Task`.\n\n Acts as a global registry of Tasks with the following properties:\n\n 1. Cache instances of objects so that eg. ``X(1, 2, 3)`` always returns the\n same object.\n 2. Keep track of all subclasses of :py:class:`Task` and expose them.\n \"\"\"\n __instance_cache = {}\n _default_namespace = None\n _reg = []\n AMBIGUOUS_CLASS = object() # Placeholder denoting an error\n \"\"\"If this value is returned by :py:meth:`__get_reg` then there is an\n ambiguous task name (two :py:class:`Task` have the same name). This denotes\n an error.\"\"\"\n\n def __new__(metacls, classname, bases, classdict):\n \"\"\"\n Custom class creation for namespacing.\n\n Also register all subclasses.\n\n Set the task namespace to whatever the currently declared namespace is.\n \"\"\"\n if \"task_namespace\" not in classdict:\n classdict[\"task_namespace\"] = metacls._default_namespace\n\n cls = super(Register, metacls).__new__(metacls, classname, bases, classdict)\n metacls._reg.append(cls)\n\n return cls\n\n def __call__(cls, *args, **kwargs):\n \"\"\"\n Custom class instantiation utilizing instance cache.\n\n If a Task has already been instantiated with the same parameters,\n the previous instance is returned to reduce number of object instances.\n \"\"\"\n def instantiate():\n return super(Register, cls).__call__(*args, **kwargs)\n\n h = cls.__instance_cache\n\n if h is None: # disabled\n return instantiate()\n\n params = cls.get_params()\n param_values = cls.get_param_values(params, args, kwargs)\n\n k = (cls, tuple(param_values))\n\n try:\n hash(k)\n except TypeError:\n logger.debug(\"Not all parameter values are hashable so instance isn't coming from the cache\")\n return instantiate() # unhashable types in parameters\n\n if k not in h:\n h[k] = instantiate()\n\n return h[k]\n\n @classmethod\n def clear_instance_cache(cls):\n \"\"\"\n Clear/Reset the instance cache.\n \"\"\"\n cls.__instance_cache = {}\n\n @classmethod\n def disable_instance_cache(cls):\n \"\"\"\n Disables the instance cache.\n \"\"\"\n cls.__instance_cache = None\n\n @property\n def task_family(cls):\n \"\"\"\n The task family for the given class.\n\n If ``cls.task_namespace is None`` then it's the name of the class.\n Otherwise, ``<task_namespace>.`` is prefixed to the class name.\n \"\"\"\n if cls.task_namespace is None:\n return cls.__name__\n else:\n return \"%s.%s\" % (cls.task_namespace, cls.__name__)\n\n @classmethod\n def __get_reg(cls):\n \"\"\"Return all of the registered classes.\n\n :return: an ``collections.OrderedDict`` of task_family -> class\n \"\"\"\n # We have to do this on-demand in case task names have changed later\n # We return this in a topologically sorted list of inheritance: this is useful in some cases (#822)\n reg = OrderedDict()\n for cls in cls._reg:\n name = cls.task_family\n\n if name in reg and reg[name] != cls and \\\n reg[name] != cls.AMBIGUOUS_CLASS and \\\n not issubclass(cls, reg[name]):\n # Registering two different classes - this means we can't instantiate them by name\n # The only exception is if one class is a subclass of the other. In that case, we\n # instantiate the most-derived class (this fixes some issues with decorator wrappers).\n reg[name] = cls.AMBIGUOUS_CLASS\n else:\n reg[name] = cls\n\n return reg\n\n @classmethod\n def task_names(cls):\n \"\"\"\n List of task names as strings\n \"\"\"\n return sorted(cls.__get_reg().keys())\n\n @classmethod\n def tasks_str(cls):\n \"\"\"\n Human-readable register contents dump.\n \"\"\"\n return ','.join(cls.task_names())\n\n @classmethod\n def get_task_cls(cls, name):\n \"\"\"\n Returns an unambiguous class or raises an exception.\n \"\"\"\n task_cls = cls.__get_reg().get(name)\n if not task_cls:\n raise TaskClassException('Task %r not found. Candidates are: %s' % (name, cls.tasks_str()))\n\n if task_cls == cls.AMBIGUOUS_CLASS:\n raise TaskClassException('Task %r is ambiguous' % name)\n return task_cls\n\n @classmethod\n def get_all_params(cls):\n \"\"\"\n Compiles and returns all parameters for all :py:class:`Task`.\n\n :return: a generator of tuples (TODO: we should make this more elegant)\n \"\"\"\n for task_name, task_cls in six.iteritems(cls.__get_reg()):\n if task_cls == cls.AMBIGUOUS_CLASS:\n continue\n for param_name, param_obj in task_cls.get_params():\n yield task_name, (not task_cls.use_cmdline_section), param_name, param_obj\n\n\ndef load_task(module, task_name, params_str):\n \"\"\"\n Imports task dynamically given a module and a task name.\n \"\"\"\n if module is not None:\n __import__(module)\n task_cls = Register.get_task_cls(task_name)\n return task_cls.from_str_params(params_str)\n", "path": "luigi/task_register.py"}]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.